Merge "ndk_crypto_fuzzer: Bug fix"
diff --git a/Android.bp b/Android.bp
index 37f6457..302e250 100644
--- a/Android.bp
+++ b/Android.bp
@@ -102,3 +102,27 @@
         },
     },
 }
+
+aidl_interface {
+    name: "av-audio-types-aidl",
+    unstable: true,
+    host_supported: true,
+    vendor_available: true,
+    double_loadable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        "aidl/android/media/audio/IHalAdapterVendorExtension.aidl",
+    ],
+    imports: [
+        "android.hardware.audio.core-V1",
+    ],
+    backend: {
+        // The C++ backend is disabled transitively due to use of FMQ by the audio core HAL.
+        cpp: {
+            enabled: false,
+        },
+        java: {
+            sdk_version: "module_current",
+        },
+    },
+}
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index 1f7083b..62cf827 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -12,3 +12,4 @@
                media/libmediatranscoding/
                services/mediatranscoding/
                media/libaudioclient/tests/
+               media/libaudiohal/tests/
diff --git a/aidl/android/media/audio/IHalAdapterVendorExtension.aidl b/aidl/android/media/audio/IHalAdapterVendorExtension.aidl
new file mode 100644
index 0000000..b7a7678
--- /dev/null
+++ b/aidl/android/media/audio/IHalAdapterVendorExtension.aidl
@@ -0,0 +1,136 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audio;
+
+import android.hardware.audio.core.VendorParameter;
+
+/**
+ * This interface is used by the HAL adapter of the Audio Server. Implementation
+ * is optional. Vendors may provide an implementation on the system_ext
+ * partition. The default instance of this interface, if provided, must be
+ * registered prior to the moment when the audio server connects to HAL modules.
+ *
+ * {@hide}
+ */
+interface IHalAdapterVendorExtension {
+    enum ParameterScope {
+        MODULE = 0,
+        STREAM = 1,
+    }
+
+    /**
+     * Parse raw parameter keys into vendor parameter ids.
+     *
+     * This method prepares arguments for a call to the 'getVendorParameters'
+     * method of an 'IModule' or an 'IStreamCommon' interface instance,
+     * depending on the provided scope.
+     *
+     * The client calls this method in order to prepare arguments for a call to
+     * the particular Core HAL interface. The result returned by the HAL is then
+     * processed using the 'processVendorParameters' method. It is not required
+     * to maintain a 1:1 correspondence between the provided raw keys and the
+     * elements of the parsed result. If the returned list is empty, the call of
+     * 'getVendorParameters' is skipped. The implementation can either ignore
+     * keys which it does not recognize, or throw an error. The latter is
+     * preferred as it can help in discovering malformed key names.
+     *
+     * @param scope The scope of all raw parameter keys.
+     * @param rawKeys Raw parameter keys, joined into a string using a semicolon
+     *                (';') as the delimiter.
+     * @return A list of vendor parameter IDs, see android.hardware.audio.core.VendorParameter.
+     * @throws EX_ILLEGAL_ARGUMENT If the implementation can not parse the raw keys
+     *                             and prefers to signal an error.
+     */
+    @utf8InCpp String[] parseVendorParameterIds(
+            ParameterScope scope, in @utf8InCpp String rawKeys);
+
+    /**
+     * Parse raw parameter key-value pairs into vendor parameters.
+     *
+     * This method prepares arguments for a call to the 'setVendorParameters'
+     * method of an 'IModule' or an 'IStreamCommon' interface instance,
+     * depending on the provided scope.
+     *
+     * The vendor parameters returned using 'syncParameters' argument is then
+     * used to call the 'setVendorParameters' method with 'async = false', and
+     * 'asyncParameters' is used in a subsequent call to the same method, but
+     * with 'async = true'. It is not required to maintain a 1:1 correspondence
+     * between the provided key-value pairs and the elements of parsed
+     * results. If any of the returned lists of vendor parameters is empty, then
+     * the corresponding call is skipped. The implementation can either ignore
+     * keys which it does not recognize, and invalid values, or throw an
+     * error. The latter is preferred as it can help in discovering malformed
+     * key names and values.
+     *
+     * @param scope The scope of all raw key-value pairs.
+     * @param rawKeys Raw key-value pairs, separated by the "equals" sign ('='),
+     *                joined into a string using a semicolon (';') as the delimiter.
+     * @param syncParameters A list of vendor parameters to be set synchronously.
+     * @param asyncParameters A list of vendor parameters to be set asynchronously.
+     * @throws EX_ILLEGAL_ARGUMENT If the implementation can not parse raw key-value
+     *                             pairs and prefers to signal an error.
+     */
+    void parseVendorParameters(
+            ParameterScope scope, in @utf8InCpp String rawKeysAndValues,
+            out VendorParameter[] syncParameters, out VendorParameter[] asyncParameters);
+
+    /**
+     * Parse raw value of the parameter for BT A2DP reconfiguration.
+     *
+     * This method may return any number of vendor parameters (including zero)
+     * which will be passed to the 'IBluetoothA2dp.reconfigureOffload' method.
+     *
+     * @param rawValue An unparsed value of the legacy parameter.
+     * @return A list of vendor parameters.
+     * @throws EX_ILLEGAL_ARGUMENT If the implementation can not parse the raw value.
+     */
+    VendorParameter[] parseBluetoothA2dpReconfigureOffload(in @utf8InCpp String rawValue);
+
+    /**
+     * Parse raw value of the parameter for BT LE reconfiguration.
+     *
+     * This method may return any number of vendor parameters (including zero)
+     * which will be passed to the 'IBluetoothLe.reconfigureOffload' method.
+     *
+     * @param rawValue An unparsed value of the legacy parameter.
+     * @return A list of vendor parameters.
+     * @throws EX_ILLEGAL_ARGUMENT If the implementation can not parse the raw value.
+     */
+    VendorParameter[] parseBluetoothLeReconfigureOffload(in @utf8InCpp String rawValue);
+
+    /**
+     * Process vendor parameters returned by the Audio Core HAL.
+     *
+     * This processes the result returned from a call to the
+     * 'getVendorParameters' method of an 'IModule' or an 'IStreamCommon'
+     * interface instance, depending on the provided scope.
+     *
+     * See 'parseVendorParameterIds' method for the flow description.  It is not
+     * required to maintain a 1:1 correspondence between the elements of the
+     * provided list and the emitted key-value pairs. The returned string with
+     * raw key-value pairs is passed back to the framework.
+     *
+     * @param scope The scope of vendor parameters.
+     * @param parameters Vendor parameters, see android.hardware.audio.core.VendorParameter.
+     * @return Key-value pairs, separated by the "equals" sign ('='),
+     *         joined into a string using a semicolon (';') as the delimiter.
+     * @throws EX_ILLEGAL_ARGUMENT If the implementation can not emit raw key-value
+     *                             pairs and prefers to signal an error.
+     */
+    @utf8InCpp String processVendorParameters(
+            ParameterScope scope, in VendorParameter[] parameters);
+}
diff --git a/apex/TEST_MAPPING b/apex/TEST_MAPPING
index 4b7c019..bb4f089 100644
--- a/apex/TEST_MAPPING
+++ b/apex/TEST_MAPPING
@@ -7,16 +7,16 @@
   "presubmit": [
     // The following tests validate codec and drm path.
     {
-      "name": "GtsMediaTestCases",
+      "name": "WvtsDeviceTestCases",
       "options" : [
         {
           "include-annotation": "android.platform.test.annotations.Presubmit"
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+          "include-filter": "com.google.android.media.wvts.WidevineGenericOpsTests"
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
+          "include-filter": "com.google.android.media.wvts.WidevineH264PlaybackTests"
         }
       ]
     }
diff --git a/camera/Android.bp b/camera/Android.bp
index f27eb31..b3f70f4 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -142,14 +142,15 @@
 filegroup {
     name: "libcamera_client_aidl",
     srcs: [
+        "aidl/android/hardware/CameraExtensionSessionStats.aidl",
         "aidl/android/hardware/ICameraService.aidl",
         "aidl/android/hardware/ICameraServiceListener.aidl",
         "aidl/android/hardware/ICameraServiceProxy.aidl",
         "aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl",
         "aidl/android/hardware/camera2/ICameraDeviceUser.aidl",
-        "aidl/android/hardware/camera2/ICameraOfflineSession.aidl",
         "aidl/android/hardware/camera2/ICameraInjectionCallback.aidl",
         "aidl/android/hardware/camera2/ICameraInjectionSession.aidl",
+        "aidl/android/hardware/camera2/ICameraOfflineSession.aidl",
     ],
     path: "aidl",
 }
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index d1618e4..2244682 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -71,10 +71,11 @@
 }
 
 sp<Camera> Camera::connect(int cameraId, const String16& clientPackageName,
-        int clientUid, int clientPid, int targetSdkVersion, bool overrideToPortrait)
+        int clientUid, int clientPid, int targetSdkVersion, bool overrideToPortrait,
+        bool forceSlowJpegMode)
 {
     return CameraBaseT::connect(cameraId, clientPackageName, clientUid,
-            clientPid, targetSdkVersion, overrideToPortrait);
+            clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode);
 }
 
 status_t Camera::reconnect()
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 0a5bc12..ef40f6c 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -103,7 +103,6 @@
 
 namespace {
     sp<::android::hardware::ICameraService> gCameraService;
-    const int                 kCameraServicePollDelay = 500000; // 0.5s
     const char*               kCameraServiceName      = "media.camera";
 
     Mutex                     gLock;
@@ -141,14 +140,10 @@
 
         sp<IServiceManager> sm = defaultServiceManager();
         sp<IBinder> binder;
-        do {
-            binder = sm->getService(String16(kCameraServiceName));
-            if (binder != 0) {
-                break;
-            }
-            ALOGW("CameraService not published, waiting...");
-            usleep(kCameraServicePollDelay);
-        } while(true);
+        binder = sm->waitForService(String16(kCameraServiceName));
+        if (binder == nullptr) {
+            return nullptr;
+        }
         if (gDeathNotifier == NULL) {
             gDeathNotifier = new DeathNotifier();
         }
@@ -163,7 +158,7 @@
 sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
                                                const String16& clientPackageName,
                                                int clientUid, int clientPid, int targetSdkVersion,
-                                               bool overrideToPortrait)
+                                               bool overrideToPortrait, bool forceSlowJpegMode)
 {
     ALOGV("%s: connect", __FUNCTION__);
     sp<TCam> c = new TCam(cameraId);
@@ -173,9 +168,11 @@
     binder::Status ret;
     if (cs != nullptr) {
         TCamConnectService fnConnectService = TCamTraits::fnConnectService;
-        ALOGI("Connect camera (legacy API) - overrideToPortrait %d", overrideToPortrait);
+        ALOGI("Connect camera (legacy API) - overrideToPortrait %d, forceSlowJpegMode %d",
+                overrideToPortrait, forceSlowJpegMode);
         ret = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
-                clientPid, targetSdkVersion, overrideToPortrait, /*out*/ &c->mCamera);
+                clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode,
+                 /*out*/ &c->mCamera);
     }
     if (ret.isOk() && c->mCamera != nullptr) {
         IInterface::asBinder(c->mCamera)->linkToDeath(c);
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index 26c612a..9e9793d 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -278,7 +278,9 @@
         mRequestCount(0),
         mResultErrorCount(0),
         mDeviceError(false),
-        mVideoStabilizationMode(-1) {}
+        mVideoStabilizationMode(-1),
+        mSessionIndex(0),
+        mCameraExtensionSessionStats() {}
 
 CameraSessionStats::CameraSessionStats(const String16& cameraId,
         int facing, int newCameraState, const String16& clientName,
@@ -297,7 +299,9 @@
                 mRequestCount(0),
                 mResultErrorCount(0),
                 mDeviceError(0),
-                mVideoStabilizationMode(-1) {}
+                mVideoStabilizationMode(-1),
+                mSessionIndex(0),
+                mCameraExtensionSessionStats() {}
 
 status_t CameraSessionStats::readFromParcel(const android::Parcel* parcel) {
     if (parcel == NULL) {
@@ -409,6 +413,18 @@
         return err;
     }
 
+    int32_t sessionIdx;
+    if ((err = parcel->readInt32(&sessionIdx)) != OK) {
+        ALOGE("%s: Failed to read session index from parcel", __FUNCTION__);
+        return err;
+    }
+
+    CameraExtensionSessionStats extStats{};
+    if ((err = extStats.readFromParcel(parcel)) != OK) {
+        ALOGE("%s: Failed to read extension session stats from parcel", __FUNCTION__);
+        return err;
+    }
+
     mCameraId = id;
     mFacing = facing;
     mNewCameraState = newCameraState;
@@ -426,6 +442,8 @@
     mStreamStats = std::move(streamStats);
     mUserTag = userTag;
     mVideoStabilizationMode = videoStabilizationMode;
+    mSessionIndex = sessionIdx;
+    mCameraExtensionSessionStats = extStats;
 
     return OK;
 }
@@ -523,6 +541,16 @@
         return err;
     }
 
+    if ((err = parcel->writeInt32(mSessionIndex)) != OK) {
+        ALOGE("%s: Failed to write session index!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = mCameraExtensionSessionStats.writeToParcel(parcel)) != OK) {
+        ALOGE("%s: Failed to write extension sessions stats!", __FUNCTION__);
+        return err;
+    }
+
     return OK;
 }
 
diff --git a/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl b/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
new file mode 100644
index 0000000..1c81831
--- /dev/null
+++ b/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware;
+
+/**
+ * Metrics specific to Extension Sessions (see CameraExtensionSession) for logging.
+ *
+ * Each Extension Session is mapped to one camera session internally, and will be sent to
+ * CameraServiceProxy with IDLE/CLOSE calls.
+ * @hide
+ */
+parcelable CameraExtensionSessionStats {
+    /**
+     * Value should match {@code CameraExtensionCharacteristics#EXTENSION_*}
+     */
+    @Backing(type="int")
+    enum Type {
+        EXTENSION_NONE = -1,
+        EXTENSION_AUTOMATIC = 0,
+        EXTENSION_FACE_RETOUCH = 1,
+        EXTENSION_BOKEH = 2,
+        EXTENSION_HDR = 3,
+        EXTENSION_NIGHT = 4
+    }
+
+    /**
+     * Key to uniquely identify the session this stat is associated with. The first call to
+     * 'ICameraService.reportExtensionSessionStats' should set this to an empty string.
+     * 'ICameraService.reportExtensionSessionStats' will return the key which should be used with
+     * the next calls.
+     */
+    String key;
+
+    /**
+     * Camera ID for which the stats is being reported.
+     */
+    String cameraId;
+
+    /**
+     * Package name of the client using the camera
+     */
+    String clientName;
+
+
+    /**
+     * Type of extension session requested by the app. Note that EXTENSION_AUTOMATIC is reported
+     * as such.
+     */
+    Type type = Type.EXTENSION_NONE;
+
+    /**
+     * true if advanced extensions are being used, false otherwise
+     */
+    boolean isAdvanced = false;
+}
\ No newline at end of file
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 01baba1..f8e1631 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -30,6 +30,7 @@
 import android.hardware.ICameraServiceListener;
 import android.hardware.CameraInfo;
 import android.hardware.CameraStatus;
+import android.hardware.CameraExtensionSessionStats;
 
 /**
  * Binder interface for the native camera service running in mediaserver.
@@ -84,7 +85,8 @@
             String opPackageName,
             int clientUid, int clientPid,
             int targetSdkVersion,
-            boolean overrideToPortrait);
+            boolean overrideToPortrait,
+            boolean forceSlowJpegMode);
 
     /**
      * Open a camera device through the new camera API
@@ -213,6 +215,26 @@
      */
     oneway void notifyDeviceStateChange(long newState);
 
+    /**
+     * Report Extension specific metrics to camera service for logging. This should only be called
+     * by CameraExtensionSession to log extension metrics. All calls after the first must set
+     * CameraExtensionSessionStats.key to the value returned by this function.
+     *
+     * Each subsequent call fully overwrites the existing CameraExtensionSessionStats for the
+     * current session, so the caller is responsible for keeping the stats complete.
+     *
+     * Due to cameraservice and cameraservice_proxy architecture, there is no guarantee that
+     * {@code stats} will be logged immediately (or at all). CameraService will log whatever
+     * extension stats it has at the time of camera session closing which may be before the app
+     * process receives a session/device closed callback; so CameraExtensionSession
+     * should send metrics to the cameraservice preriodically, and cameraservice must handle calls
+     * to this function from sessions that have not been logged yet and from sessions that have
+     * already been closed.
+     *
+     * @return the key that must be used to report updates to previously reported stats.
+     */
+    String reportExtensionSessionStats(in CameraExtensionSessionStats stats);
+
     // Bitfield constants for notifyDeviceStateChange
     // All bits >= 32 are for custom vendor states
     // Written as ints since AIDL does not support long constants.
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index be8a00f..4faa6b4 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -17,6 +17,7 @@
 package android.hardware;
 
 import android.hardware.CameraSessionStats;
+import android.hardware.CameraExtensionSessionStats;
 
 /**
  * Binder interface for the camera service proxy running in system_server.
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 8472562..13b705c 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -26,12 +26,15 @@
 
     srcs: ["main_cameraserver.cpp"],
 
+    defaults: [
+        "libcameraservice_deps",
+    ],
+
     header_libs: [
         "libmedia_headers",
     ],
 
     shared_libs: [
-        "libcameraservice",
         "liblog",
         "libutils",
         "libui",
@@ -40,15 +43,13 @@
         "libbinder_ndk",
         "libhidlbase",
         "android.hardware.camera.common@1.0",
-        "android.hardware.camera.provider@2.4",
-        "android.hardware.camera.provider@2.5",
-        "android.hardware.camera.provider@2.6",
-        "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
     ],
+    static_libs: [
+        "libcameraservice",
+    ],
     compile_multilib: "first",
     cflags: [
         "-Wall",
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 26c36a7..21b57af 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -58,7 +58,7 @@
     typedef ::android::hardware::ICameraClient TCamCallbacks;
     typedef ::android::binder::Status(::android::hardware::ICameraService::*TCamConnectService)
         (const sp<::android::hardware::ICameraClient>&,
-        int, const String16&, int, int, int, bool,
+        int, const String16&, int, int, int, bool, bool,
         /*out*/
         sp<::android::hardware::ICamera>*);
     static TCamConnectService     fnConnectService;
@@ -82,7 +82,7 @@
     static  sp<Camera>  connect(int cameraId,
                                 const String16& clientPackageName,
                                 int clientUid, int clientPid, int targetSdkVersion,
-                                bool overrideToPortrait);
+                                bool overrideToPortrait, bool forceSlowJpegMode);
 
             virtual     ~Camera();
 
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index 9d0721b..b20dc1b 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -120,7 +120,7 @@
     static sp<TCam>      connect(int cameraId,
                                  const String16& clientPackageName,
                                  int clientUid, int clientPid, int targetSdkVersion,
-                                 bool overrideToPortrait);
+                                 bool overrideToPortrait, bool forceSlowJpegMode);
     virtual void         disconnect();
 
     void                 setListener(const sp<TCamListener>& listener);
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index 091a7ff..071bc73 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -20,6 +20,7 @@
 #include <binder/Parcelable.h>
 
 #include <camera/CameraMetadata.h>
+#include <android/hardware/CameraExtensionSessionStats.h>
 
 namespace android {
 namespace hardware {
@@ -158,6 +159,9 @@
     std::vector<CameraStreamStats> mStreamStats;
     String16 mUserTag;
     int mVideoStabilizationMode;
+    int mSessionIndex;
+
+    CameraExtensionSessionStats mCameraExtensionSessionStats;
 
     // Constructors
     CameraSessionStats();
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index bfd02b3..866dc72 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -182,6 +182,7 @@
     shared_libs: [
         "libcamera2ndk_vendor",
         "libcamera_metadata",
+        "libhidlbase",
         "libmediandk",
         "libnativewindow",
         "libutils",
@@ -191,6 +192,7 @@
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
+        "android.hidl.token@1.0",
     ],
     cflags: [
         "-D__ANDROID_VNDK__",
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 1bd3603..bd679e5 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -4487,8 +4487,8 @@
      * <p>The guaranteed stream combinations related to stream use case for a camera device with
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE</a>
      * capability is documented in the camera device
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">guideline</a>. The
-     * application is strongly recommended to use one of the guaranteed stream combinations.
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#stream-use-case-capability-additional-guaranteed-configurations">guideline</a>. The application is strongly recommended to use one of the guaranteed stream
+     * combinations.
      * If the application creates a session with a stream combination not in the guaranteed
      * list, or with mixed DEFAULT and non-DEFAULT use cases within the same session,
      * the camera device may ignore some stream use cases due to hardware constraints
@@ -9866,9 +9866,10 @@
      * </ul>
      * <p><a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#SCALER_AVAILABLE_STREAM_USE_CASES">CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES</a>
      * lists all of the supported stream use cases.</p>
-     * <p>Refer to <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a> for the
-     * mandatory stream combinations involving stream use cases, which can also be queried
-     * via <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">MandatoryStreamCombination</a>.</p>
+     * <p>Refer to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#stream-use-case-capability-additional-guaranteed-configurations">CameraDevice#stream-use-case-capability-additional-guaranteed-configurations</a>
+     * for the mandatory stream combinations involving stream use cases, which can also be
+     * queried via <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">MandatoryStreamCombination</a>.</p>
      */
     ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE           = 19,
 
@@ -10728,7 +10729,8 @@
      * <p>This camera device does not have enough capabilities to qualify as a <code>FULL</code> device or
      * better.</p>
      * <p>Only the stream configurations listed in the <code>LEGACY</code> and <code>LIMITED</code> tables in the
-     * {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.</p>
+     * {@link ACameraDevice_createCaptureSession }
+     * documentation are guaranteed to be supported.</p>
      * <p>All <code>LIMITED</code> devices support the <code>BACKWARDS_COMPATIBLE</code> capability, indicating basic
      * support for color image capture. The only exception is that the device may
      * alternatively support only the <code>DEPTH_OUTPUT</code> capability, if it can only output depth
@@ -10754,7 +10756,8 @@
     /**
      * <p>This camera device is capable of supporting advanced imaging applications.</p>
      * <p>The stream configurations listed in the <code>FULL</code>, <code>LEGACY</code> and <code>LIMITED</code> tables in the
-     * {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.</p>
+     * {@link ACameraDevice_createCaptureSession }
+     * documentation are guaranteed to be supported.</p>
      * <p>A <code>FULL</code> device will support below capabilities:</p>
      * <ul>
      * <li><code>BURST_CAPTURE</code> capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
@@ -10781,7 +10784,9 @@
 
     /**
      * <p>This camera device is running in backward compatibility mode.</p>
-     * <p>Only the stream configurations listed in the <code>LEGACY</code> table in the {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are supported.</p>
+     * <p>Only the stream configurations listed in the <code>LEGACY</code> table in the
+     * {@link ACameraDevice_createCaptureSession }
+     * documentation are supported.</p>
      * <p>A <code>LEGACY</code> device does not support per-frame control, manual sensor control, manual
      * post-processing, arbitrary cropping regions, and has relaxed performance constraints.
      * No additional capabilities beyond <code>BACKWARD_COMPATIBLE</code> will ever be listed by a
@@ -10803,7 +10808,9 @@
      * <p>This camera device is capable of YUV reprocessing and RAW data capture, in addition to
      * FULL-level capabilities.</p>
      * <p>The stream configurations listed in the <code>LEVEL_3</code>, <code>RAW</code>, <code>FULL</code>, <code>LEGACY</code> and
-     * <code>LIMITED</code> tables in the {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.</p>
+     * <code>LIMITED</code> tables in the
+     * {@link ACameraDevice_createCaptureSession }
+     * documentation are guaranteed to be supported.</p>
      * <p>The following additional capabilities are guaranteed to be supported:</p>
      * <ul>
      * <li><code>YUV_REPROCESSING</code> capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 7f6ea9d..74c6cad 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -31,10 +31,13 @@
 #include <stdio.h>
 
 #include <android/log.h>
+#include <android/hidl/manager/1.2/IServiceManager.h>
+#include <android/hidl/token/1.0/ITokenManager.h>
 #include <camera/NdkCameraError.h>
 #include <camera/NdkCameraManager.h>
 #include <camera/NdkCameraDevice.h>
 #include <camera/NdkCameraCaptureSession.h>
+#include <hidl/ServiceManagement.h>
 #include <media/NdkImage.h>
 #include <media/NdkImageReader.h>
 #include <cutils/native_handle.h>
@@ -50,6 +53,8 @@
 static constexpr int kTestImageFormat = AIMAGE_FORMAT_YUV_420_888;
 
 using android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
+using android::hidl::manager::V1_0::IServiceManager;
+using android::hidl::token::V1_0::ITokenManager;
 using ConfiguredWindows = std::set<const native_handle_t *>;
 
 class CameraHelper {
@@ -981,11 +986,19 @@
 
 
 TEST_F(AImageReaderVendorTest, CreateWindowNativeHandle) {
+    auto transport = android::hardware::defaultServiceManager()->getTransport(ITokenManager::descriptor, "default");
+    if (transport.isOk() && transport == IServiceManager::Transport::EMPTY) {
+        GTEST_SKIP() << "This device no longer supports AImageReader_getWindowNativeHandle";
+    }
     testBasicTakePictures(/*prepareSurfaces*/ false);
     testBasicTakePictures(/*prepareSurfaces*/ true);
 }
 
 TEST_F(AImageReaderVendorTest, LogicalCameraPhysicalStream) {
+    auto transport = android::hardware::defaultServiceManager()->getTransport(ITokenManager::descriptor, "default");
+    if (transport.isOk() && transport == IServiceManager::Transport::EMPTY) {
+        GTEST_SKIP() << "This device no longer supports AImageReader_getWindowNativeHandle";
+    }
     for (auto & v2 : {true, false}) {
         testLogicalCameraPhysicalStream(false/*usePhysicalSettings*/, v2);
         testLogicalCameraPhysicalStream(true/*usePhysicalSettings*/, v2);
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index bdfb84a..6423709 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -211,7 +211,7 @@
                 String16("ZSLTest"), hardware::ICameraService::USE_CALLING_UID,
                 hardware::ICameraService::USE_CALLING_PID,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, &cameraDevice);
+                /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, &cameraDevice);
         EXPECT_TRUE(rc.isOk());
 
         CameraParameters params(cameraDevice->getParameters());
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index d41e6b6..f9ef98e 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -152,7 +152,7 @@
                             String16("CAMERAFUZZ"), hardware::ICameraService::USE_CALLING_UID,
                             hardware::ICameraService::USE_CALLING_PID,
                             /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
-                            /*overrideToPortrait*/false, &cameraDevice);
+                            /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, &cameraDevice);
     mCamera = Camera::create(cameraDevice);
     if (!mCamera) {
         return false;
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index cd4932d..f53fc0a 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -122,7 +122,7 @@
 static uint32_t gBitRate = 20000000;     // 20Mbps
 static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
 static uint32_t gBframes = 0;
-static PhysicalDisplayId gPhysicalDisplayId;
+static std::optional<PhysicalDisplayId> gPhysicalDisplayId;
 // Set by signal handler to stop recording.
 static volatile bool gStopRequested = false;
 
@@ -336,6 +336,24 @@
 }
 
 /*
+ * Gets the physical id of the display to record. If the user specified a physical
+ * display id, then that id will be set. Otherwise, the default display will be set.
+ */
+static status_t getPhysicalDisplayId(PhysicalDisplayId& outDisplayId) {
+    if (gPhysicalDisplayId) {
+        outDisplayId = *gPhysicalDisplayId;
+        return NO_ERROR;
+    }
+
+    const std::vector<PhysicalDisplayId> ids = SurfaceComposerClient::getPhysicalDisplayIds();
+    if (ids.empty()) {
+        return INVALID_OPERATION;
+    }
+    outDisplayId = ids.front();
+    return NO_ERROR;
+}
+
+/*
  * Configures the virtual display.  When this completes, virtual display
  * frames will start arriving from the buffer producer.
  */
@@ -350,7 +368,12 @@
     setDisplayProjection(t, dpy, displayState);
     ui::LayerStack layerStack = ui::LayerStack::fromValue(std::rand());
     t.setDisplayLayerStack(dpy, layerStack);
-    *mirrorRoot = SurfaceComposerClient::getDefault()->mirrorDisplay(gPhysicalDisplayId);
+    PhysicalDisplayId displayId;
+    status_t err = getPhysicalDisplayId(displayId);
+    if (err != NO_ERROR) {
+        return err;
+    }
+    *mirrorRoot = SurfaceComposerClient::getDefault()->mirrorDisplay(displayId);
     if (*mirrorRoot == nullptr) {
         ALOGE("Failed to create a mirror for screenrecord");
         return UNKNOWN_ERROR;
@@ -486,6 +509,43 @@
 }
 
 /*
+ * Update the display projection if size or orientation have changed.
+ */
+void updateDisplayProjection(const sp<IBinder>& virtualDpy, ui::DisplayState& displayState) {
+    ATRACE_NAME("updateDisplayProjection");
+
+    PhysicalDisplayId displayId;
+    if (getPhysicalDisplayId(displayId) != NO_ERROR) {
+        fprintf(stderr, "ERROR: Failed to get display id\n");
+        return;
+    }
+
+    sp<IBinder> displayToken = SurfaceComposerClient::getPhysicalDisplayToken(displayId);
+    if (!displayToken) {
+        fprintf(stderr, "ERROR: failed to get display token\n");
+        return;
+    }
+
+    ui::DisplayState currentDisplayState;
+    if (SurfaceComposerClient::getDisplayState(displayToken, &currentDisplayState) != NO_ERROR) {
+        ALOGW("ERROR: failed to get display state\n");
+        return;
+    }
+
+    if (currentDisplayState.orientation != displayState.orientation ||
+        currentDisplayState.layerStackSpaceRect != displayState.layerStackSpaceRect) {
+        displayState = currentDisplayState;
+        ALOGD("display state changed, now has orientation %s, size (%d, %d)",
+              toCString(displayState.orientation), displayState.layerStackSpaceRect.getWidth(),
+              displayState.layerStackSpaceRect.getHeight());
+
+        SurfaceComposerClient::Transaction t;
+        setDisplayProjection(t, virtualDpy, currentDisplayState);
+        t.apply();
+    }
+}
+
+/*
  * Runs the MediaCodec encoder, sending the output to the MediaMuxer.  The
  * input frames are coming from the virtual display as fast as SurfaceFlinger
  * wants to send them.
@@ -494,9 +554,8 @@
  *
  * The muxer must *not* have been started before calling.
  */
-static status_t runEncoder(const sp<MediaCodec>& encoder,
-        AMediaMuxer *muxer, FILE* rawFp, const sp<IBinder>& display,
-        const sp<IBinder>& virtualDpy, ui::Rotation orientation) {
+static status_t runEncoder(const sp<MediaCodec>& encoder, AMediaMuxer* muxer, FILE* rawFp,
+                           const sp<IBinder>& virtualDpy, ui::DisplayState displayState) {
     static int kTimeout = 250000;   // be responsive on signal
     status_t err;
     ssize_t trackIdx = -1;
@@ -555,24 +614,7 @@
                 ALOGV("Got data in buffer %zu, size=%zu, pts=%" PRId64,
                         bufIndex, size, ptsUsec);
 
-                { // scope
-                    ATRACE_NAME("orientation");
-                    // Check orientation, update if it has changed.
-                    //
-                    // Polling for changes is inefficient and wrong, but the
-                    // useful stuff is hard to get at without a Dalvik VM.
-                    ui::DisplayState displayState;
-                    err = SurfaceComposerClient::getDisplayState(display, &displayState);
-                    if (err != NO_ERROR) {
-                        ALOGW("getDisplayState() failed: %d", err);
-                    } else if (orientation != displayState.orientation) {
-                        ALOGD("orientation changed, now %s", toCString(displayState.orientation));
-                        SurfaceComposerClient::Transaction t;
-                        setDisplayProjection(t, virtualDpy, displayState);
-                        t.apply();
-                        orientation = displayState.orientation;
-                    }
-                }
+                updateDisplayProjection(virtualDpy, displayState);
 
                 // If the virtual display isn't providing us with timestamps,
                 // use the current time.  This isn't great -- we could get
@@ -764,6 +806,38 @@
 };
 
 /*
+ * Computes the maximum width and height across all physical displays.
+ */
+static ui::Size getMaxDisplaySize() {
+    const std::vector<PhysicalDisplayId> physicalDisplayIds =
+            SurfaceComposerClient::getPhysicalDisplayIds();
+    if (physicalDisplayIds.empty()) {
+        fprintf(stderr, "ERROR: Failed to get physical display ids\n");
+        return {};
+    }
+
+    ui::Size result;
+    for (auto& displayId : physicalDisplayIds) {
+        sp<IBinder> displayToken = SurfaceComposerClient::getPhysicalDisplayToken(displayId);
+        if (!displayToken) {
+            fprintf(stderr, "ERROR: failed to get display token\n");
+            continue;
+        }
+
+        ui::DisplayState displayState;
+        status_t err = SurfaceComposerClient::getDisplayState(displayToken, &displayState);
+        if (err != NO_ERROR) {
+            fprintf(stderr, "ERROR: failed to get display state\n");
+            continue;
+        }
+
+        result.height = std::max(result.height, displayState.layerStackSpaceRect.getHeight());
+        result.width = std::max(result.width, displayState.layerStackSpaceRect.getWidth());
+    }
+    return result;
+}
+
+/*
  * Main "do work" start point.
  *
  * Configures codec, muxer, and virtual display, then starts moving bits
@@ -781,9 +855,15 @@
     sp<ProcessState> self = ProcessState::self();
     self->startThreadPool();
 
+    PhysicalDisplayId displayId;
+    err = getPhysicalDisplayId(displayId);
+    if (err != NO_ERROR) {
+        fprintf(stderr, "ERROR: Failed to get display id\n");
+        return err;
+    }
+
     // Get main display parameters.
-    sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(
-            gPhysicalDisplayId);
+    sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(displayId);
     if (display == nullptr) {
         fprintf(stderr, "ERROR: no display\n");
         return NAME_NOT_FOUND;
@@ -808,7 +888,8 @@
         return INVALID_OPERATION;
     }
 
-    const ui::Size& layerStackSpaceRect = displayState.layerStackSpaceRect;
+    const ui::Size layerStackSpaceRect =
+        gPhysicalDisplayId ? displayState.layerStackSpaceRect : getMaxDisplaySize();
     if (gVerbose) {
         printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
                 layerStackSpaceRect.getWidth(), layerStackSpaceRect.getHeight(),
@@ -973,8 +1054,7 @@
         }
     } else {
         // Main encoder loop.
-        err = runEncoder(recordingData.encoder, muxer, rawFp, display, recordingData.dpy,
-                         displayState.orientation);
+        err = runEncoder(recordingData.encoder, muxer, rawFp, recordingData.dpy, displayState);
         if (err != NO_ERROR) {
             fprintf(stderr, "Encoder failed (err=%d)\n", err);
             // fall through to cleanup
@@ -1175,14 +1255,6 @@
         { NULL,                 0,                  NULL, 0 }
     };
 
-    const std::vector<PhysicalDisplayId> ids = SurfaceComposerClient::getPhysicalDisplayIds();
-    if (ids.empty()) {
-        fprintf(stderr, "Failed to get ID for any displays\n");
-        return 1;
-    }
-
-    gPhysicalDisplayId = ids.front();
-
     while (true) {
         int optionIndex = 0;
         int ic = getopt_long(argc, argv, "", longOptions, &optionIndex);
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 9783855..2edc0fe 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -749,7 +749,8 @@
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP8)   ? asString_VP8Profile(pl.mProfile) :
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_HEVC)  ? asString_HEVCProfile(pl.mProfile) :
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP9)   ? asString_VP9Profile(pl.mProfile) :
-                        mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AV1)   ? asString_AV1Profile(pl.mProfile) :"??";
+                        mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AV1)   ? asString_AV1Profile(pl.mProfile) :
+                        mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_DOLBY_VISION) ? asString_DolbyVisionProfile(pl.mProfile) :"??";
                     const char *niceLevel =
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_MPEG2) ? asString_MPEG2Level(pl.mLevel) :
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_H263)  ? asString_H263Level(pl.mLevel) :
@@ -759,6 +760,7 @@
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_HEVC)  ? asString_HEVCTierLevel(pl.mLevel) :
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP9)   ? asString_VP9Level(pl.mLevel) :
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AV1)   ? asString_AV1Level(pl.mLevel) :
+                        mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_DOLBY_VISION) ? asString_DolbyVisionLevel(pl.mLevel) :
                         "??";
 
                     list.add(AStringPrintf("% 5u/% 5u (%s/%s)",
diff --git a/drm/TEST_MAPPING b/drm/TEST_MAPPING
index b2d4d6e..a9b4b2a 100644
--- a/drm/TEST_MAPPING
+++ b/drm/TEST_MAPPING
@@ -2,16 +2,16 @@
   "presubmit": [
     // The following tests validate codec and drm path.
     {
-      "name": "GtsMediaTestCases",
+      "name": "WvtsDeviceTestCases",
       "options" : [
         {
           "include-annotation": "android.platform.test.annotations.Presubmit"
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+          "include-filter": "com.google.android.media.wvts.WidevineGenericOpsTests"
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
+          "include-filter": "com.google.android.media.wvts.WidevineH264PlaybackTests"
         }
       ]
     }
diff --git a/drm/drmserver/Android.bp b/drm/drmserver/Android.bp
index ab25c65..cee44b9 100644
--- a/drm/drmserver/Android.bp
+++ b/drm/drmserver/Android.bp
@@ -31,7 +31,33 @@
     ],
 }
 
-cc_binary {
+prebuilt_etc {
+    name: "drmserver.zygote64_32.rc",
+    src: "drmserver.zygote64_32.rc",
+    sub_dir: "init/hw",
+}
+
+prebuilt_etc {
+    name: "drmserver.zygote64.rc",
+    src: "drmserver.zygote64.rc",
+    sub_dir: "init/hw",
+}
+
+soong_config_module_type {
+    name: "drmserver_cc_binary",
+    module_type: "cc_binary",
+    config_namespace: "ANDROID",
+    bool_variables: ["TARGET_DYNAMIC_64_32_DRMSERVER"],
+    properties: [
+        "compile_multilib",
+        "init_rc",
+        "multilib.lib32.suffix",
+        "multilib.lib64.suffix",
+        "required",
+    ],
+}
+
+drmserver_cc_binary {
     name: "drmserver",
 
     srcs: [
@@ -61,7 +87,27 @@
 
     compile_multilib: "prefer32",
 
-    init_rc: ["drmserver.rc"],
+    soong_config_variables: {
+        TARGET_DYNAMIC_64_32_DRMSERVER: {
+            compile_multilib: "both",
+            multilib: {
+                lib32: {
+                    suffix: "32",
+                },
+                lib64: {
+                    suffix: "64",
+                },
+            },
+            required: [
+                "drmserver.zygote64_32.rc",
+                "drmserver.zygote64.rc",
+            ],
+            init_rc: ["drmserver_dynamic.rc"],
+            conditions_default: {
+                init_rc: ["drmserver.rc"],
+            },
+        },
+    },
 }
 
 cc_fuzz {
@@ -80,7 +126,6 @@
     static_libs: [
         "libmediautils",
         "liblog",
-        "libdl",
         "libdrmframeworkcommon",
         "libselinux",
         "libstagefright_foundation",
@@ -98,4 +143,4 @@
              "android-drm-team@google.com",
          ],
      },
-}
\ No newline at end of file
+}
diff --git a/drm/drmserver/drmserver.zygote64.rc b/drm/drmserver/drmserver.zygote64.rc
new file mode 100644
index 0000000..60cd906
--- /dev/null
+++ b/drm/drmserver/drmserver.zygote64.rc
@@ -0,0 +1,6 @@
+service drm /system/bin/drmserver64
+    disabled
+    class main
+    user drm
+    group drm system inet drmrpc readproc
+    task_profiles ProcessCapacityHigh
diff --git a/drm/drmserver/drmserver.zygote64_32.rc b/drm/drmserver/drmserver.zygote64_32.rc
new file mode 100644
index 0000000..c881acf
--- /dev/null
+++ b/drm/drmserver/drmserver.zygote64_32.rc
@@ -0,0 +1,6 @@
+service drm /system/bin/drmserver32
+    disabled
+    class main
+    user drm
+    group drm system inet drmrpc readproc
+    task_profiles ProcessCapacityHigh
diff --git a/drm/drmserver/drmserver_dynamic.rc b/drm/drmserver/drmserver_dynamic.rc
new file mode 100644
index 0000000..bfaada1
--- /dev/null
+++ b/drm/drmserver/drmserver_dynamic.rc
@@ -0,0 +1,7 @@
+import /system/etc/init/hw/drmserver.${ro.zygote}.rc
+
+on property:drm.service.enabled=true
+    start drm
+
+on property:drm.service.enabled=1
+    start drm
diff --git a/drm/libmediadrm/DrmMetricsLogger.cpp b/drm/libmediadrm/DrmMetricsLogger.cpp
index bc004c8..ce4d730 100644
--- a/drm/libmediadrm/DrmMetricsLogger.cpp
+++ b/drm/libmediadrm/DrmMetricsLogger.cpp
@@ -41,49 +41,59 @@
 
 DrmMetricsLogger::~DrmMetricsLogger() {}
 
-int MediaErrorToJavaError(status_t err) {
+int MediaErrorToEnum(status_t err) {
+#define ERROR_BAD_VALUE (BAD_VALUE)
+#define ERROR_DEAD_OBJECT (DEAD_OBJECT)
 #define STATUS_CASE(status) \
-    case status: \
-        return J##status
+    case ERROR_##status: \
+        return ENUM_##status
 
     switch (err) {
-        STATUS_CASE(ERROR_DRM_UNKNOWN);
-        STATUS_CASE(ERROR_DRM_NO_LICENSE);
-        STATUS_CASE(ERROR_DRM_LICENSE_EXPIRED);
-        STATUS_CASE(ERROR_DRM_RESOURCE_BUSY);
-        STATUS_CASE(ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION);
-        STATUS_CASE(ERROR_DRM_SESSION_NOT_OPENED);
-        STATUS_CASE(ERROR_DRM_CANNOT_HANDLE);
-        STATUS_CASE(ERROR_DRM_INSUFFICIENT_SECURITY);
-        STATUS_CASE(ERROR_DRM_FRAME_TOO_LARGE);
-        STATUS_CASE(ERROR_DRM_SESSION_LOST_STATE);
-        STATUS_CASE(ERROR_DRM_CERTIFICATE_MALFORMED);
-        STATUS_CASE(ERROR_DRM_CERTIFICATE_MISSING);
-        STATUS_CASE(ERROR_DRM_CRYPTO_LIBRARY);
-        STATUS_CASE(ERROR_DRM_GENERIC_OEM);
-        STATUS_CASE(ERROR_DRM_GENERIC_PLUGIN);
-        STATUS_CASE(ERROR_DRM_INIT_DATA);
-        STATUS_CASE(ERROR_DRM_KEY_NOT_LOADED);
-        STATUS_CASE(ERROR_DRM_LICENSE_PARSE);
-        STATUS_CASE(ERROR_DRM_LICENSE_POLICY);
-        STATUS_CASE(ERROR_DRM_LICENSE_RELEASE);
-        STATUS_CASE(ERROR_DRM_LICENSE_REQUEST_REJECTED);
-        STATUS_CASE(ERROR_DRM_LICENSE_RESTORE);
-        STATUS_CASE(ERROR_DRM_LICENSE_STATE);
-        STATUS_CASE(ERROR_DRM_MEDIA_FRAMEWORK);
-        STATUS_CASE(ERROR_DRM_PROVISIONING_CERTIFICATE);
-        STATUS_CASE(ERROR_DRM_PROVISIONING_CONFIG);
-        STATUS_CASE(ERROR_DRM_PROVISIONING_PARSE);
-        STATUS_CASE(ERROR_DRM_PROVISIONING_REQUEST_REJECTED);
-        STATUS_CASE(ERROR_DRM_PROVISIONING_RETRY);
-        STATUS_CASE(ERROR_DRM_RESOURCE_CONTENTION);
-        STATUS_CASE(ERROR_DRM_SECURE_STOP_RELEASE);
-        STATUS_CASE(ERROR_DRM_STORAGE_READ);
-        STATUS_CASE(ERROR_DRM_STORAGE_WRITE);
-        STATUS_CASE(ERROR_DRM_ZERO_SUBSAMPLES);
+        STATUS_CASE(DRM_UNKNOWN);
+        STATUS_CASE(DRM_NO_LICENSE);
+        STATUS_CASE(DRM_LICENSE_EXPIRED);
+        STATUS_CASE(DRM_RESOURCE_BUSY);
+        STATUS_CASE(DRM_INSUFFICIENT_OUTPUT_PROTECTION);
+        STATUS_CASE(DRM_SESSION_NOT_OPENED);
+        STATUS_CASE(DRM_CANNOT_HANDLE);
+        STATUS_CASE(DRM_INSUFFICIENT_SECURITY);
+        STATUS_CASE(DRM_FRAME_TOO_LARGE);
+        STATUS_CASE(DRM_SESSION_LOST_STATE);
+        STATUS_CASE(DRM_CERTIFICATE_MALFORMED);
+        STATUS_CASE(DRM_CERTIFICATE_MISSING);
+        STATUS_CASE(DRM_CRYPTO_LIBRARY);
+        STATUS_CASE(DRM_GENERIC_OEM);
+        STATUS_CASE(DRM_GENERIC_PLUGIN);
+        STATUS_CASE(DRM_INIT_DATA);
+        STATUS_CASE(DRM_KEY_NOT_LOADED);
+        STATUS_CASE(DRM_LICENSE_PARSE);
+        STATUS_CASE(DRM_LICENSE_POLICY);
+        STATUS_CASE(DRM_LICENSE_RELEASE);
+        STATUS_CASE(DRM_LICENSE_REQUEST_REJECTED);
+        STATUS_CASE(DRM_LICENSE_RESTORE);
+        STATUS_CASE(DRM_LICENSE_STATE);
+        STATUS_CASE(DRM_MEDIA_FRAMEWORK);
+        STATUS_CASE(DRM_PROVISIONING_CERTIFICATE);
+        STATUS_CASE(DRM_PROVISIONING_CONFIG);
+        STATUS_CASE(DRM_PROVISIONING_PARSE);
+        STATUS_CASE(DRM_PROVISIONING_REQUEST_REJECTED);
+        STATUS_CASE(DRM_PROVISIONING_RETRY);
+        STATUS_CASE(DRM_RESOURCE_CONTENTION);
+        STATUS_CASE(DRM_SECURE_STOP_RELEASE);
+        STATUS_CASE(DRM_STORAGE_READ);
+        STATUS_CASE(DRM_STORAGE_WRITE);
+        STATUS_CASE(DRM_ZERO_SUBSAMPLES);
+        STATUS_CASE(DRM_INVALID_STATE);
+        STATUS_CASE(BAD_VALUE);
+        STATUS_CASE(DRM_NOT_PROVISIONED);
+        STATUS_CASE(DRM_DEVICE_REVOKED);
+        STATUS_CASE(DRM_DECRYPT);
+        STATUS_CASE(DEAD_OBJECT);
+#undef ERROR_BAD_VALUE
+#undef ERROR_DEAD_OBJECT
 #undef STATUS_CASE
     }
-    return static_cast<int>(err);
+    return ENUM_DRM_UNKNOWN;
 }
 
 int DrmPluginSecurityLevelToJavaSecurityLevel(DrmPlugin::SecurityLevel securityLevel) {
@@ -187,12 +197,12 @@
 
 DrmStatus DrmMetricsLogger::closeSession(Vector<uint8_t> const& sessionId) {
     std::vector<uint8_t> sid = toStdVec(sessionId);
-    {
+    DrmStatus status = mImpl->closeSession(sessionId);
+    if (status == OK) {
         const std::lock_guard<std::mutex> lock(mSessionMapMutex);
         mSessionMap.erase(sid);
-    }
-    DrmStatus status = mImpl->closeSession(sessionId);
-    if (status != OK) {
+    } else {
+        // TODO(b/275729711): reclaim sessions that failed to close
         reportMediaDrmErrored(status, __func__, sid);
     }
     return status;
@@ -582,7 +592,7 @@
         }
     }
     mediametrics_setCString(handle, "api", api);
-    mediametrics_setInt32(handle, "error_code", MediaErrorToJavaError(error_code));
+    mediametrics_setInt32(handle, "error_code", MediaErrorToEnum(error_code));
     mediametrics_setInt32(handle, "cdm_err", error_code.getCdmErr());
     mediametrics_setInt32(handle, "oem_err", error_code.getOemErr());
     mediametrics_setInt32(handle, "error_context", error_code.getContext());
diff --git a/drm/libmediadrm/DrmSessionManager.cpp b/drm/libmediadrm/DrmSessionManager.cpp
index 301538f..6744e25 100644
--- a/drm/libmediadrm/DrmSessionManager.cpp
+++ b/drm/libmediadrm/DrmSessionManager.cpp
@@ -36,13 +36,6 @@
 using aidl::android::media::MediaResourceParcel;
 using aidl::android::media::ClientInfoParcel;
 
-namespace {
-void ResourceManagerServiceDied(void* cookie) {
-    auto thiz = static_cast<DrmSessionManager*>(cookie);
-    thiz->binderDied();
-}
-}
-
 using ::ndk::ScopedAStatus;
 
 static String8 GetSessionIdString(const Vector<uint8_t> &sessionId) {
@@ -60,6 +53,12 @@
     return vec;
 }
 
+static Vector<uint8_t> toAndroidVec(const std::vector<uint8_t>& array) {
+    Vector<uint8_t> vec;
+    vec.appendArray(array.data(), array.size());
+    return vec;
+}
+
 static std::vector<MediaResourceParcel> toResourceVec(
         const Vector<uint8_t> &sessionId, int64_t value) {
     using Type = aidl::android::media::MediaResourceType;
@@ -72,11 +71,6 @@
     return resources;
 }
 
-static std::shared_ptr<IResourceManagerService> getResourceManagerService() {
-    ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
-    return IResourceManagerService::fromBinder(binder);
-}
-
 bool isEqualSessionId(const Vector<uint8_t> &sessionId1, const Vector<uint8_t> &sessionId2) {
     if (sessionId1.size() != sessionId2.size()) {
         return false;
@@ -96,16 +90,15 @@
 }
 
 DrmSessionManager::DrmSessionManager()
-    : DrmSessionManager(getResourceManagerService()) {
+    : DrmSessionManager(nullptr) {
 }
 
 DrmSessionManager::DrmSessionManager(const std::shared_ptr<IResourceManagerService> &service)
     : mService(service),
-      mInitialized(false),
-      mDeathRecipient(AIBinder_DeathRecipient_new(ResourceManagerServiceDied)) {
-    if (mService == NULL) {
-        ALOGE("Failed to init ResourceManagerService");
-    }
+      mDeathRecipient(::ndk::ScopedAIBinder_DeathRecipient(
+          AIBinder_DeathRecipient_new(ResourceManagerServiceDied))) {
+    // Setting callback notification when DeathRecipient gets deleted.
+    AIBinder_DeathRecipient_setOnUnlinked(mDeathRecipient.get(), BinderUnlinkedCallback);
 }
 
 DrmSessionManager::~DrmSessionManager() {
@@ -114,14 +107,64 @@
     }
 }
 
-void DrmSessionManager::init() {
+status_t DrmSessionManager::init() {
     Mutex::Autolock lock(mLock);
-    if (mInitialized) {
+    getResourceManagerService_l();
+    if (mService == nullptr) {
+        ALOGE("Failed to init ResourceManagerService");
+        return DEAD_OBJECT;
+    }
+
+    return OK;
+}
+
+void DrmSessionManager::getResourceManagerService_l() {
+    if (mService != nullptr) {
         return;
     }
-    mInitialized = true;
-    if (mService != NULL) {
-        AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
+
+    // Get binder interface to resource manager.
+    ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
+    mService = IResourceManagerService::fromBinder(binder);
+    if (mService == nullptr) {
+        ALOGE("Failed to get ResourceManagerService");
+        return;
+    }
+
+    // Create the context that is passed as cookie to the binder death notification.
+    // The context gets deleted at BinderUnlinkedCallback.
+    BinderDiedContext* context = new BinderDiedContext{
+        .mDrmSessionManager = wp<DrmSessionManager>::fromExisting(this)};
+    // Register for the callbacks by linking to death notification.
+    AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), context);
+
+    // If the RM was restarted, re-register all the resources.
+    if (mBinderDied) {
+        reRegisterAllResources_l();
+        mBinderDied = false;
+    }
+}
+
+void DrmSessionManager::reRegisterAllResources_l() {
+    if (mSessionMap.empty()) {
+        // Nothing to register.
+        ALOGV("No resources to add");
+        return;
+    }
+
+    if (mService == nullptr) {
+        ALOGW("Service isn't available");
+        return;
+    }
+
+    // Go through the session map and re-register all the resources for those sessions.
+    for (SessionInfoMap::const_iterator iter = mSessionMap.begin();
+         iter != mSessionMap.end(); ++iter) {
+        ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(iter->second.pid),
+                                    .uid = static_cast<int32_t>(iter->second.uid),
+                                    .id = iter->second.clientId};
+        mService->addResource(clientInfo, iter->second.drm,
+                              toResourceVec(toAndroidVec(iter->first), iter->second.resourceValue));
     }
 }
 
@@ -137,7 +180,7 @@
     }
 
     static int64_t clientId = 0;
-    mSessionMap[toStdVec(sessionId)] = (SessionInfo){pid, uid, clientId};
+    mSessionMap[toStdVec(sessionId)] = (SessionInfo){pid, uid, clientId, drm, INT64_MAX};
     ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(pid),
                                 .uid = static_cast<int32_t>(uid),
                                 .id = clientId++};
@@ -154,6 +197,7 @@
     }
 
     auto info = it->second;
+    info.resourceValue = -1;
     ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(info.pid),
                                 .uid = static_cast<int32_t>(info.uid),
                                 .id = info.clientId};
@@ -215,7 +259,31 @@
 void DrmSessionManager::binderDied() {
     ALOGW("ResourceManagerService died.");
     Mutex::Autolock lock(mLock);
-    mService.reset();
+    mService = nullptr;
+    mBinderDied = true;
+    // start an async operation that will reconnect with the RM and
+    // re-registers all the resources.
+    mGetServiceFuture = std::async(std::launch::async, [this] { getResourceManagerService(); });
+}
+
+void DrmSessionManager::ResourceManagerServiceDied(void* cookie) {
+    BinderDiedContext* context = reinterpret_cast<BinderDiedContext*>(cookie);
+
+    // Validate the context and check if the DrmSessionManager object is still in scope.
+    if (context != nullptr) {
+        sp<DrmSessionManager> thiz = context->mDrmSessionManager.promote();
+        if (thiz != nullptr) {
+            thiz->binderDied();
+        } else {
+            ALOGI("DrmSessionManager is out of scope already");
+        }
+    }
+}
+
+void DrmSessionManager::BinderUnlinkedCallback(void* cookie) {
+    BinderDiedContext* context = reinterpret_cast<BinderDiedContext*>(cookie);
+    // Since we don't need the context anymore, we are deleting it now.
+    delete context;
 }
 
 }  // namespace android
diff --git a/drm/libmediadrm/TEST_MAPPING b/drm/libmediadrm/TEST_MAPPING
index bc15879..8d7be22 100644
--- a/drm/libmediadrm/TEST_MAPPING
+++ b/drm/libmediadrm/TEST_MAPPING
@@ -1,19 +1,19 @@
 {
   "presubmit": [
     {
-      "name": "GtsMediaTestCases",
+      "name": "WvtsDeviceTestCases",
       "options" : [
         {
 	  "include-annotation": "android.platform.test.annotations.Presubmit"
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+          "include-filter": "com.google.android.media.wvts.WidevineGenericOpsTests"
         },
         {
-          "include-filter": "com.google.android.media.gts.MediaDrmTest"
+          "include-filter": "com.google.android.media.wvts.MediaDrmTest"
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineDashPolicyTests"
+          "include-filter": "com.google.android.media.wvts.WidevineDashPolicyTests"
         }
       ]
     }
diff --git a/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h b/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h
index 7666f04..e72f7f7 100644
--- a/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h
+++ b/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h
@@ -25,41 +25,49 @@
 
 namespace android {
 
+// Keep enums in sync with frameworks/proto_logging/stats/enums/media/drm/enums.proto
+
 enum {
-    JERROR_DRM_UNKNOWN = 0,
-    JERROR_DRM_NO_LICENSE = 1,
-    JERROR_DRM_LICENSE_EXPIRED = 2,
-    JERROR_DRM_RESOURCE_BUSY = 3,
-    JERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION = 4,
-    JERROR_DRM_SESSION_NOT_OPENED = 5,
-    JERROR_DRM_CANNOT_HANDLE = 6,
-    JERROR_DRM_INSUFFICIENT_SECURITY = 7,
-    JERROR_DRM_FRAME_TOO_LARGE = 8,
-    JERROR_DRM_SESSION_LOST_STATE = 9,
-    JERROR_DRM_CERTIFICATE_MALFORMED = 10,
-    JERROR_DRM_CERTIFICATE_MISSING = 11,
-    JERROR_DRM_CRYPTO_LIBRARY = 12,
-    JERROR_DRM_GENERIC_OEM = 13,
-    JERROR_DRM_GENERIC_PLUGIN = 14,
-    JERROR_DRM_INIT_DATA = 15,
-    JERROR_DRM_KEY_NOT_LOADED = 16,
-    JERROR_DRM_LICENSE_PARSE = 17,
-    JERROR_DRM_LICENSE_POLICY = 18,
-    JERROR_DRM_LICENSE_RELEASE = 19,
-    JERROR_DRM_LICENSE_REQUEST_REJECTED = 20,
-    JERROR_DRM_LICENSE_RESTORE = 21,
-    JERROR_DRM_LICENSE_STATE = 22,
-    JERROR_DRM_MEDIA_FRAMEWORK = 23,
-    JERROR_DRM_PROVISIONING_CERTIFICATE = 24,
-    JERROR_DRM_PROVISIONING_CONFIG = 25,
-    JERROR_DRM_PROVISIONING_PARSE = 26,
-    JERROR_DRM_PROVISIONING_REQUEST_REJECTED = 27,
-    JERROR_DRM_PROVISIONING_RETRY = 28,
-    JERROR_DRM_RESOURCE_CONTENTION = 29,
-    JERROR_DRM_SECURE_STOP_RELEASE = 30,
-    JERROR_DRM_STORAGE_READ = 31,
-    JERROR_DRM_STORAGE_WRITE = 32,
-    JERROR_DRM_ZERO_SUBSAMPLES = 33,
+    ENUM_DRM_UNKNOWN = 0,
+    ENUM_DRM_NO_LICENSE = 1,
+    ENUM_DRM_LICENSE_EXPIRED = 2,
+    ENUM_DRM_RESOURCE_BUSY = 3,
+    ENUM_DRM_INSUFFICIENT_OUTPUT_PROTECTION = 4,
+    ENUM_DRM_SESSION_NOT_OPENED = 5,
+    ENUM_DRM_CANNOT_HANDLE = 6,
+    ENUM_DRM_INSUFFICIENT_SECURITY = 7,
+    ENUM_DRM_FRAME_TOO_LARGE = 8,
+    ENUM_DRM_SESSION_LOST_STATE = 9,
+    ENUM_DRM_CERTIFICATE_MALFORMED = 10,
+    ENUM_DRM_CERTIFICATE_MISSING = 11,
+    ENUM_DRM_CRYPTO_LIBRARY = 12,
+    ENUM_DRM_GENERIC_OEM = 13,
+    ENUM_DRM_GENERIC_PLUGIN = 14,
+    ENUM_DRM_INIT_DATA = 15,
+    ENUM_DRM_KEY_NOT_LOADED = 16,
+    ENUM_DRM_LICENSE_PARSE = 17,
+    ENUM_DRM_LICENSE_POLICY = 18,
+    ENUM_DRM_LICENSE_RELEASE = 19,
+    ENUM_DRM_LICENSE_REQUEST_REJECTED = 20,
+    ENUM_DRM_LICENSE_RESTORE = 21,
+    ENUM_DRM_LICENSE_STATE = 22,
+    ENUM_DRM_MEDIA_FRAMEWORK = 23,
+    ENUM_DRM_PROVISIONING_CERTIFICATE = 24,
+    ENUM_DRM_PROVISIONING_CONFIG = 25,
+    ENUM_DRM_PROVISIONING_PARSE = 26,
+    ENUM_DRM_PROVISIONING_REQUEST_REJECTED = 27,
+    ENUM_DRM_PROVISIONING_RETRY = 28,
+    ENUM_DRM_RESOURCE_CONTENTION = 29,
+    ENUM_DRM_SECURE_STOP_RELEASE = 30,
+    ENUM_DRM_STORAGE_READ = 31,
+    ENUM_DRM_STORAGE_WRITE = 32,
+    ENUM_DRM_ZERO_SUBSAMPLES = 33,
+    ENUM_DRM_INVALID_STATE = 34,
+    ENUM_BAD_VALUE = 35,
+    ENUM_DRM_NOT_PROVISIONED = 36,
+    ENUM_DRM_DEVICE_REVOKED = 37,
+    ENUM_DRM_DECRYPT = 38,
+    ENUM_DEAD_OBJECT = 39,
 };
 
 enum {
diff --git a/drm/libmediadrm/include/mediadrm/DrmSessionManager.h b/drm/libmediadrm/include/mediadrm/DrmSessionManager.h
index c56bf01..025261d 100644
--- a/drm/libmediadrm/include/mediadrm/DrmSessionManager.h
+++ b/drm/libmediadrm/include/mediadrm/DrmSessionManager.h
@@ -27,8 +27,10 @@
 #include <utils/threads.h>
 #include <utils/Vector.h>
 
+#include <future>
 #include <map>
 #include <memory>
+#include <set>
 #include <utility>
 #include <vector>
 
@@ -38,6 +40,7 @@
 
 using aidl::android::media::IResourceManagerClient;
 using aidl::android::media::IResourceManagerService;
+using aidl::android::media::MediaResourceParcel;
 
 bool isEqualSessionId(const Vector<uint8_t> &sessionId1, const Vector<uint8_t> &sessionId2);
 
@@ -45,6 +48,9 @@
     pid_t pid;
     uid_t uid;
     int64_t clientId;
+    std::shared_ptr<IResourceManagerClient> drm;
+    int64_t resourceValue;
+
 };
 
 typedef std::map<std::vector<uint8_t>, SessionInfo> SessionInfoMap;
@@ -66,20 +72,52 @@
     size_t getSessionCount() const;
     bool containsSession(const Vector<uint8_t>& sessionId) const;
 
-    // implements DeathRecipient
-    void binderDied();
-
 protected:
     virtual ~DrmSessionManager();
 
 private:
-    void init();
+    status_t init();
 
-    std::shared_ptr<IResourceManagerService> mService;
+    // To set up the binder interface with the resource manager service.
+    void getResourceManagerService() {
+        Mutex::Autolock lock(mLock);
+        getResourceManagerService_l();
+    }
+    void getResourceManagerService_l();
+
+    // To add/register all the resources currently added/registered with
+    // the ResourceManagerService.
+    // This function will be called right after the death of the Resource
+    // Manager to make sure that the newly started ResourceManagerService
+    // knows about the current resource usage.
+    void reRegisterAllResources_l();
+
+    // For binder death handling
+    static void ResourceManagerServiceDied(void* cookie);
+    static void BinderUnlinkedCallback(void* cookie);
+    void binderDied();
+
+    // BinderDiedContext defines the cookie that is passed as DeathRecipient.
+    // Since this can maintain more context than a raw pointer, we can
+    // validate the scope of DrmSessionManager,
+    // before deferencing it upon the binder death.
+    struct BinderDiedContext {
+        wp<DrmSessionManager> mDrmSessionManager;
+    };
+
+    std::shared_ptr<IResourceManagerService> mService = nullptr;
     mutable Mutex mLock;
     SessionInfoMap mSessionMap;
-    bool mInitialized;
+    bool mBinderDied = false;
     ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+    /**
+     * Reconnecting with the ResourceManagerService, after its binder interface dies,
+     * is done asynchronously. It will also make sure that, all the resources
+     * asssociated with this DrmSessionManager are added with the new instance
+     * of the ResourceManagerService to persist the state of resources.
+     * We must store the reference of the furture to guarantee real asynchronous operation.
+     */
+    std::future<void> mGetServiceFuture;
 
     DISALLOW_EVIL_CONSTRUCTORS(DrmSessionManager);
 };
diff --git a/include/drm/TEST_MAPPING b/include/drm/TEST_MAPPING
index 74fa50d..8595f12 100644
--- a/include/drm/TEST_MAPPING
+++ b/include/drm/TEST_MAPPING
@@ -1,16 +1,16 @@
 {
   "presubmit": [
     {
-      "name": "GtsMediaTestCases",
+      "name": "WvtsDeviceTestCases",
       "options" : [
         {
           "include-annotation": "android.platform.test.annotations.Presubmit"
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+          "include-filter": "com.google.android.media.wvts.WidevineGenericOpsTests"
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
+          "include-filter": "com.google.android.media.wvts.WidevineH264PlaybackTests"
         }
       ]
     }
diff --git a/include/media/Interpolator.h b/include/media/Interpolator.h
index 71e7604..0ee8779 100644
--- a/include/media/Interpolator.h
+++ b/include/media/Interpolator.h
@@ -122,7 +122,7 @@
             // monotonic computation.
             // we use lazy computation here - if we precompute in
             // a single pass, duplicate secant computations may be avoided.
-            S sec, sec0, sec1;
+            S sec{}, sec0{}, sec1{};  // initialization not needed, used for clang-tidy
             if (!catmullRom || monotonic) {
                 sec = (high->second - low->second) / interval;
                 sec0 = low2 != this->end()
@@ -269,7 +269,7 @@
 
         // Note: We don't need to check size is within some bounds as
         // the Parcel read will fail if size is incorrectly specified too large.
-        float lastx;
+        float lastx = 0.f; // initialization not needed, used for clang tidy
         for (uint32_t i = 0; i < size; ++i) {
             float x = config.xy[i * 2];
             float y = config.xy[i * 2 + 1];
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index 9e51cff..cd5d354 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -21,16 +21,16 @@
     ],
     "presubmit": [
         {
-            "name": "GtsMediaTestCases",
+            "name": "WvtsDeviceTestCases",
             "options" : [
                 {
                     "include-annotation": "android.platform.test.annotations.Presubmit"
                 },
                 {
-                    "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+                    "include-filter": "com.google.android.media.wvts.WidevineGenericOpsTests"
                 },
                 {
-                    "include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
+                    "include-filter": "com.google.android.media.wvts.WidevineH264PlaybackTests"
                 }
             ],
             "file_patterns": ["(?i)drm|crypto"]
@@ -44,53 +44,5 @@
             ],
             "file_patterns": ["(?i)drm|crypto"]
         }
-    ],
-
-    "platinum-postsubmit": [
-        // runs regularly, independent of changes in this tree.
-        // signals if changes elsewhere break media functionality
-        // @FlakyTest: in staged-postsubmit, but not postsubmit
-        {
-            "name": "CtsMediaCodecTestCases",
-            "options": [
-                {
-                    "include-filter": "android.media.codec.cts.EncodeDecodeTest"
-                }
-            ]
-        },
-        {
-            "name": "CtsMediaCodecTestCases",
-            "options": [
-                {
-                    "include-filter": "android.media.codec.cts.DecodeEditEncodeTest"
-                },
-                {
-                    "exclude-annotation": "androidx.test.filters.FlakyTest"
-                }
-            ]
-        }
-    ],
-
-    "staged-platinum-postsubmit": [
-        // runs every four hours
-        {
-            "name": "CtsMediaCodecTestCases",
-            "options": [
-                {
-                    "include-filter": "android.media.codec.cts.EncodeDecodeTest"
-                }
-            ]
-        },
-        {
-            "name": "CtsMediaCodecTestCases",
-            "options": [
-                {
-                    "include-filter": "android.media.codec.cts.DecodeEditEncodeTest"
-                }
-            ]
-        }
     ]
-
-    // TODO (b/229286407) Add EncodeDecodeTest and DecodeEditEncodeTest to
-    // platinum-postsubmit once issues in cuttlefish are fixed
 }
diff --git a/media/audioaidlconversion/AidlConversionCore.cpp b/media/audioaidlconversion/AidlConversionCore.cpp
new file mode 100644
index 0000000..948e35d
--- /dev/null
+++ b/media/audioaidlconversion/AidlConversionCore.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlConversionCore"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <media/AidlConversionCore.h>
+#include <media/AidlConversionCppNdk.h>
+
+namespace aidl {
+namespace android {
+
+using MicrophoneDirection = hardware::audio::core::IStreamIn::MicrophoneDirection;
+using ::android::BAD_VALUE;
+using ::android::OK;
+using ::android::status_t;
+using ::android::base::unexpected;
+
+ConversionResult<audio_microphone_direction_t>
+aidl2legacy_MicrophoneDirection_audio_microphone_direction_t(MicrophoneDirection aidl) {
+    switch (aidl) {
+        case MicrophoneDirection::UNSPECIFIED:
+            return MIC_DIRECTION_UNSPECIFIED;
+        case MicrophoneDirection::FRONT:
+            return MIC_DIRECTION_FRONT;
+        case MicrophoneDirection::BACK:
+            return MIC_DIRECTION_BACK;
+        case MicrophoneDirection::EXTERNAL:
+            return MIC_DIRECTION_EXTERNAL;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<MicrophoneDirection>
+legacy2aidl_audio_microphone_direction_t_MicrophoneDirection(audio_microphone_direction_t legacy) {
+    switch (legacy) {
+        case MIC_DIRECTION_UNSPECIFIED:
+            return MicrophoneDirection::UNSPECIFIED;
+        case MIC_DIRECTION_FRONT:
+            return MicrophoneDirection::FRONT;
+        case MIC_DIRECTION_BACK:
+            return MicrophoneDirection::BACK;
+        case MIC_DIRECTION_EXTERNAL:
+            return MicrophoneDirection::EXTERNAL;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+}  // namespace android
+}  // aidl
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 951a0e7..4d3f9bd 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -14,8 +14,11 @@
  * limitations under the License.
  */
 
+#include <stdio.h>
+
 #include <algorithm>
 #include <map>
+#include <sstream>
 #include <utility>
 #include <vector>
 
@@ -48,6 +51,7 @@
 using ::android::status_t;
 using ::android::base::unexpected;
 
+using media::audio::common::AudioAttributes;
 using media::audio::common::AudioChannelLayout;
 using media::audio::common::AudioConfig;
 using media::audio::common::AudioConfigBase;
@@ -60,6 +64,7 @@
 using media::audio::common::AudioEncapsulationMetadataType;
 using media::audio::common::AudioEncapsulationMode;
 using media::audio::common::AudioEncapsulationType;
+using media::audio::common::AudioFlag;
 using media::audio::common::AudioFormatDescription;
 using media::audio::common::AudioFormatType;
 using media::audio::common::AudioGain;
@@ -93,6 +98,20 @@
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // Converters
 
+namespace {
+
+std::vector<std::string> splitString(const std::string& s, char separator) {
+    std::istringstream iss(s);
+    std::string t;
+    std::vector<std::string> result;
+    while (std::getline(iss, t, separator)) {
+        result.push_back(std::move(t));
+    }
+    return result;
+}
+
+}  // namespace
+
 ::android::status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize) {
     if (aidl.size() > maxSize - 1) {
         return BAD_VALUE;
@@ -260,12 +279,17 @@
 
         DEFINE_INPUT_LAYOUT(MONO),
         DEFINE_INPUT_LAYOUT(STEREO),
+        DEFINE_INPUT_LAYOUT(2POINT1),
         DEFINE_INPUT_LAYOUT(FRONT_BACK),
+        DEFINE_INPUT_LAYOUT(TRI),
+        DEFINE_INPUT_LAYOUT(3POINT1),
         // AUDIO_CHANNEL_IN_6 not supported
         DEFINE_INPUT_LAYOUT(2POINT0POINT2),
         DEFINE_INPUT_LAYOUT(2POINT1POINT2),
         DEFINE_INPUT_LAYOUT(3POINT0POINT2),
         DEFINE_INPUT_LAYOUT(3POINT1POINT2),
+        DEFINE_INPUT_LAYOUT(QUAD),
+        DEFINE_INPUT_LAYOUT(PENTA),
         DEFINE_INPUT_LAYOUT(5POINT1)
 #undef DEFINE_INPUT_LAYOUT
     };
@@ -570,7 +594,6 @@
                 GET_DEVICE_DESC_CONNECTION(BT_LE));
         return pairs;
     }();
-#undef GET_DEVICE_DESC_CONNECTION
     return pairs;
 }
 
@@ -998,55 +1021,161 @@
     }
 }
 
+AudioDeviceAddress::Tag suggestDeviceAddressTag(const AudioDeviceDescription& description) {
+    using Tag = AudioDeviceAddress::Tag;
+    if (std::string connection = description.connection;
+            connection == GET_DEVICE_DESC_CONNECTION(BT_A2DP) ||
+            // Note: BT LE Broadcast uses a "group id".
+            (description.type != AudioDeviceType::OUT_BROADCAST &&
+                    connection == GET_DEVICE_DESC_CONNECTION(BT_LE)) ||
+            connection == GET_DEVICE_DESC_CONNECTION(BT_SCO) ||
+            connection == GET_DEVICE_DESC_CONNECTION(WIRELESS)) {
+        return Tag::mac;
+    } else if (connection == GET_DEVICE_DESC_CONNECTION(IP_V4)) {
+        return Tag::ipv4;
+    } else if (connection == GET_DEVICE_DESC_CONNECTION(USB)) {
+        return Tag::alsa;
+    }
+    return Tag::id;
+}
+
 ::android::status_t aidl2legacy_AudioDevice_audio_device(
         const AudioDevice& aidl,
         audio_devices_t* legacyType, char* legacyAddress) {
-    *legacyType = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioDeviceDescription_audio_devices_t(aidl.type));
-    return aidl2legacy_string(
-                    aidl.address.get<AudioDeviceAddress::id>(),
-                    legacyAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN);
+    std::string stringAddress;
+    RETURN_STATUS_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
+                    aidl, legacyType, &stringAddress));
+    return aidl2legacy_string(stringAddress, legacyAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN);
 }
 
 ::android::status_t aidl2legacy_AudioDevice_audio_device(
         const AudioDevice& aidl,
         audio_devices_t* legacyType, String8* legacyAddress) {
-    *legacyType = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioDeviceDescription_audio_devices_t(aidl.type));
-    *legacyAddress = VALUE_OR_RETURN_STATUS(aidl2legacy_string_view_String8(
-                    aidl.address.get<AudioDeviceAddress::id>()));
+    std::string stringAddress;
+    RETURN_STATUS_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
+                    aidl, legacyType, &stringAddress));
+    *legacyAddress = VALUE_OR_RETURN_STATUS(aidl2legacy_string_view_String8(stringAddress));
     return OK;
 }
 
 ::android::status_t aidl2legacy_AudioDevice_audio_device(
         const AudioDevice& aidl,
         audio_devices_t* legacyType, std::string* legacyAddress) {
+    using Tag = AudioDeviceAddress::Tag;
     *legacyType = VALUE_OR_RETURN_STATUS(
             aidl2legacy_AudioDeviceDescription_audio_devices_t(aidl.type));
-    *legacyAddress = aidl.address.get<AudioDeviceAddress::id>();
+    char addressBuffer[AUDIO_DEVICE_MAX_ADDRESS_LEN]{};
+    // 'aidl.address' can be empty even when the connection type is not.
+    // This happens for device ports that act as "blueprints". In this case
+    // we pass an empty string using the 'id' variant.
+    switch (aidl.address.getTag()) {
+        case Tag::mac: {
+            const std::vector<uint8_t>& mac = aidl.address.get<AudioDeviceAddress::mac>();
+            if (mac.size() != 6) return BAD_VALUE;
+            snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN, "%02X:%02X:%02X:%02X:%02X:%02X",
+                    mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]);
+        } break;
+        case Tag::ipv4: {
+            const std::vector<uint8_t>& ipv4 = aidl.address.get<AudioDeviceAddress::ipv4>();
+            if (ipv4.size() != 4) return BAD_VALUE;
+            snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN, "%u.%u.%u.%u",
+                    ipv4[0], ipv4[1], ipv4[2], ipv4[3]);
+        } break;
+        case Tag::ipv6: {
+            const std::vector<int32_t>& ipv6 = aidl.address.get<AudioDeviceAddress::ipv6>();
+            if (ipv6.size() != 8) return BAD_VALUE;
+            snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN,
+                    "%04X:%04X:%04X:%04X:%04X:%04X:%04X:%04X",
+                    ipv6[0], ipv6[1], ipv6[2], ipv6[3], ipv6[4], ipv6[5], ipv6[6], ipv6[7]);
+        } break;
+        case Tag::alsa: {
+            const std::vector<int32_t>& alsa = aidl.address.get<AudioDeviceAddress::alsa>();
+            if (alsa.size() != 2) return BAD_VALUE;
+            snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN, "card=%d;device=%d",
+                    alsa[0], alsa[1]);
+        } break;
+        case Tag::id: {
+            RETURN_STATUS_IF_ERROR(aidl2legacy_string(aidl.address.get<AudioDeviceAddress::id>(),
+                            addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN));
+        } break;
+    }
+    *legacyAddress = addressBuffer;
     return OK;
 }
 
 ConversionResult<AudioDevice> legacy2aidl_audio_device_AudioDevice(
         audio_devices_t legacyType, const char* legacyAddress) {
-    AudioDevice aidl;
-    aidl.type = VALUE_OR_RETURN(
-            legacy2aidl_audio_devices_t_AudioDeviceDescription(legacyType));
-    const std::string aidl_id = VALUE_OR_RETURN(
+    const std::string stringAddress = VALUE_OR_RETURN(
             legacy2aidl_string(legacyAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN));
-    aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::id>(aidl_id);
-    return aidl;
+    return legacy2aidl_audio_device_AudioDevice(legacyType, stringAddress);
 }
 
 ConversionResult<AudioDevice>
 legacy2aidl_audio_device_AudioDevice(
         audio_devices_t legacyType, const String8& legacyAddress) {
+    const std::string stringAddress = VALUE_OR_RETURN(legacy2aidl_String8_string(legacyAddress));
+    return legacy2aidl_audio_device_AudioDevice(legacyType, stringAddress);
+}
+
+ConversionResult<AudioDevice>
+legacy2aidl_audio_device_AudioDevice(
+        audio_devices_t legacyType, const std::string& legacyAddress) {
+    using Tag = AudioDeviceAddress::Tag;
     AudioDevice aidl;
     aidl.type = VALUE_OR_RETURN(
             legacy2aidl_audio_devices_t_AudioDeviceDescription(legacyType));
-    const std::string aidl_id = VALUE_OR_RETURN(
-            legacy2aidl_String8_string(legacyAddress));
-    aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::id>(aidl_id);
+    // 'legacyAddress' can be empty even when the connection type is not.
+    // This happens for device ports that act as "blueprints". In this case
+    // we pass an empty string using the 'id' variant.
+    if (!legacyAddress.empty()) {
+        switch (suggestDeviceAddressTag(aidl.type)) {
+            case Tag::mac: {
+                std::vector<uint8_t> mac(6);
+                int status = sscanf(legacyAddress.c_str(), "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
+                        &mac[0], &mac[1], &mac[2], &mac[3], &mac[4], &mac[5]);
+                if (status != mac.size()) {
+                    ALOGE("%s: malformed MAC address: \"%s\"", __func__, legacyAddress.c_str());
+                    return unexpected(BAD_VALUE);
+                }
+                aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::mac>(std::move(mac));
+            } break;
+            case Tag::ipv4: {
+                std::vector<uint8_t> ipv4(4);
+                int status = sscanf(legacyAddress.c_str(), "%hhu.%hhu.%hhu.%hhu",
+                        &ipv4[0], &ipv4[1], &ipv4[2], &ipv4[3]);
+                if (status != ipv4.size()) {
+                    ALOGE("%s: malformed IPv4 address: \"%s\"", __func__, legacyAddress.c_str());
+                    return unexpected(BAD_VALUE);
+                }
+                aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::ipv4>(std::move(ipv4));
+            } break;
+            case Tag::ipv6: {
+                std::vector<int32_t> ipv6(8);
+                int status = sscanf(legacyAddress.c_str(), "%X:%X:%X:%X:%X:%X:%X:%X",
+                        &ipv6[0], &ipv6[1], &ipv6[2], &ipv6[3], &ipv6[4], &ipv6[5], &ipv6[6],
+                        &ipv6[7]);
+                if (status != ipv6.size()) {
+                    ALOGE("%s: malformed IPv6 address: \"%s\"", __func__, legacyAddress.c_str());
+                    return unexpected(BAD_VALUE);
+                }
+                aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::ipv6>(std::move(ipv6));
+            } break;
+            case Tag::alsa: {
+                std::vector<int32_t> alsa(2);
+                int status = sscanf(legacyAddress.c_str(), "card=%d;device=%d", &alsa[0], &alsa[1]);
+                if (status != alsa.size()) {
+                    ALOGE("%s: malformed ALSA address: \"%s\"", __func__, legacyAddress.c_str());
+                    return unexpected(BAD_VALUE);
+                }
+                aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::alsa>(std::move(alsa));
+            } break;
+            case Tag::id: {
+                aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::id>(legacyAddress);
+            } break;
+        }
+    } else {
+        aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::id>(legacyAddress);
+    }
     return aidl;
 }
 
@@ -1692,6 +1821,156 @@
     return unexpected(BAD_VALUE);
 }
 
+ConversionResult<audio_flags_mask_t>
+aidl2legacy_AudioFlag_audio_flags_mask_t(AudioFlag aidl) {
+    switch (aidl) {
+        case AudioFlag::NONE:
+            return AUDIO_FLAG_NONE;
+        case AudioFlag::AUDIBILITY_ENFORCED:
+            return AUDIO_FLAG_AUDIBILITY_ENFORCED;
+        // The is no AudioFlag::SECURE, see the comment in the AudioFlag.aidl
+        //  return AUDIO_FLAG_SECURE;
+        case AudioFlag::SCO:
+            return AUDIO_FLAG_SCO;
+        case AudioFlag::BEACON:
+            return AUDIO_FLAG_BEACON;
+        case AudioFlag::HW_AV_SYNC:
+            return AUDIO_FLAG_HW_AV_SYNC;
+        case AudioFlag::HW_HOTWORD:
+            return AUDIO_FLAG_HW_HOTWORD;
+        case AudioFlag::BYPASS_INTERRUPTION_POLICY:
+            return AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY;
+        case AudioFlag::BYPASS_MUTE:
+            return AUDIO_FLAG_BYPASS_MUTE;
+        case AudioFlag::LOW_LATENCY:
+            return AUDIO_FLAG_LOW_LATENCY;
+        case AudioFlag::DEEP_BUFFER:
+            return AUDIO_FLAG_DEEP_BUFFER;
+        case AudioFlag::NO_MEDIA_PROJECTION:
+            return AUDIO_FLAG_NO_MEDIA_PROJECTION;
+        case AudioFlag::MUTE_HAPTIC:
+            return AUDIO_FLAG_MUTE_HAPTIC;
+        case AudioFlag::NO_SYSTEM_CAPTURE:
+            return AUDIO_FLAG_NO_SYSTEM_CAPTURE;
+        case AudioFlag::CAPTURE_PRIVATE:
+            return AUDIO_FLAG_CAPTURE_PRIVATE;
+        case AudioFlag::CONTENT_SPATIALIZED:
+            return AUDIO_FLAG_CONTENT_SPATIALIZED;
+        case AudioFlag::NEVER_SPATIALIZE:
+            return AUDIO_FLAG_NEVER_SPATIALIZE;
+        case AudioFlag::CALL_REDIRECTION:
+            return AUDIO_FLAG_CALL_REDIRECTION;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioFlag>
+legacy2aidl_audio_flags_mask_t_AudioFlag(audio_flags_mask_t legacy) {
+    switch (legacy) {
+        case AUDIO_FLAG_NONE:
+            return AudioFlag::NONE;
+        case AUDIO_FLAG_AUDIBILITY_ENFORCED:
+            return AudioFlag::AUDIBILITY_ENFORCED;
+        case AUDIO_FLAG_SECURE:
+            return unexpected(BAD_VALUE);
+        case AUDIO_FLAG_SCO:
+            return AudioFlag::SCO;
+        case AUDIO_FLAG_BEACON:
+            return AudioFlag::BEACON;
+        case AUDIO_FLAG_HW_AV_SYNC:
+            return AudioFlag::HW_AV_SYNC;
+        case AUDIO_FLAG_HW_HOTWORD:
+            return AudioFlag::HW_HOTWORD;
+        case AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY:
+            return AudioFlag::BYPASS_INTERRUPTION_POLICY;
+        case AUDIO_FLAG_BYPASS_MUTE:
+            return AudioFlag::BYPASS_MUTE;
+        case AUDIO_FLAG_LOW_LATENCY:
+            return AudioFlag::LOW_LATENCY;
+        case AUDIO_FLAG_DEEP_BUFFER:
+            return AudioFlag::DEEP_BUFFER;
+        case AUDIO_FLAG_NO_MEDIA_PROJECTION:
+            return AudioFlag::NO_MEDIA_PROJECTION;
+        case AUDIO_FLAG_MUTE_HAPTIC:
+            return AudioFlag::MUTE_HAPTIC;
+        case AUDIO_FLAG_NO_SYSTEM_CAPTURE:
+            return AudioFlag::NO_SYSTEM_CAPTURE;
+        case AUDIO_FLAG_CAPTURE_PRIVATE:
+            return AudioFlag::CAPTURE_PRIVATE;
+        case AUDIO_FLAG_CONTENT_SPATIALIZED:
+            return AudioFlag::CONTENT_SPATIALIZED;
+        case AUDIO_FLAG_NEVER_SPATIALIZE:
+            return AudioFlag::NEVER_SPATIALIZE;
+        case AUDIO_FLAG_CALL_REDIRECTION:
+            return AudioFlag::CALL_REDIRECTION;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_flags_mask_t>
+aidl2legacy_int32_t_audio_flags_mask_t_mask(int32_t aidl) {
+    return convertBitmask<audio_flags_mask_t, int32_t, audio_flags_mask_t, AudioFlag>(
+            aidl, aidl2legacy_AudioFlag_audio_flags_mask_t, indexToEnum_bitmask<AudioFlag>,
+            enumToMask_bitmask<audio_flags_mask_t, audio_flags_mask_t>);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_audio_flags_mask_t_int32_t_mask(audio_flags_mask_t legacy) {
+    return convertBitmask<int32_t, audio_flags_mask_t, AudioFlag, audio_flags_mask_t>(
+            legacy, legacy2aidl_audio_flags_mask_t_AudioFlag,
+            indexToEnum_bitmask<audio_flags_mask_t>,
+            enumToMask_bitmask<int32_t, AudioFlag>);
+}
+
+ConversionResult<std::string>
+aidl2legacy_AudioTags_string(const std::vector<std::string>& aidl) {
+    std::ostringstream tagsBuffer;
+    bool hasValue = false;
+    for (const auto& tag : aidl) {
+        if (hasValue) {
+            tagsBuffer << AUDIO_ATTRIBUTES_TAGS_SEPARATOR;
+        }
+        if (strchr(tag.c_str(), AUDIO_ATTRIBUTES_TAGS_SEPARATOR) == nullptr) {
+            tagsBuffer << tag;
+            hasValue = true;
+        } else {
+            ALOGE("Tag is ill-formed: \"%s\"", tag.c_str());
+            return unexpected(BAD_VALUE);
+        }
+    }
+    return tagsBuffer.str();
+}
+
+ConversionResult<std::vector<std::string>>
+legacy2aidl_string_AudioTags(const std::string& legacy) {
+    return splitString(legacy, AUDIO_ATTRIBUTES_TAGS_SEPARATOR);
+}
+
+ConversionResult<audio_attributes_t>
+aidl2legacy_AudioAttributes_audio_attributes_t(const AudioAttributes& aidl) {
+    audio_attributes_t legacy;
+    legacy.content_type = VALUE_OR_RETURN(
+            aidl2legacy_AudioContentType_audio_content_type_t(aidl.contentType));
+    legacy.usage = VALUE_OR_RETURN(aidl2legacy_AudioUsage_audio_usage_t(aidl.usage));
+    legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSource_audio_source_t(aidl.source));
+    legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_flags_mask_t_mask(aidl.flags));
+    auto tagsString = VALUE_OR_RETURN(aidl2legacy_AudioTags_string(aidl.tags));
+    RETURN_IF_ERROR(aidl2legacy_string(tagsString, legacy.tags, sizeof(legacy.tags)));
+    return legacy;
+}
+
+ConversionResult<AudioAttributes>
+legacy2aidl_audio_attributes_t_AudioAttributes(const audio_attributes_t& legacy) {
+    AudioAttributes aidl;
+    aidl.contentType = VALUE_OR_RETURN(
+            legacy2aidl_audio_content_type_t_AudioContentType(legacy.content_type));
+    aidl.usage = VALUE_OR_RETURN(legacy2aidl_audio_usage_t_AudioUsage(legacy.usage));
+    aidl.source = VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSource(legacy.source));
+    aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_flags_mask_t_int32_t_mask(legacy.flags));
+    auto tagsString = VALUE_OR_RETURN(legacy2aidl_string(legacy.tags, sizeof(legacy.tags)));
+    aidl.tags = VALUE_OR_RETURN(legacy2aidl_string_AudioTags(tagsString));
+    return aidl;
+}
 
 ConversionResult<audio_encapsulation_mode_t>
 aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(AudioEncapsulationMode aidl) {
@@ -3025,6 +3304,8 @@
 
 }  // namespace android
 
+#undef GET_DEVICE_DESC_CONNECTION
+
 #if defined(BACKEND_NDK)
 }  // aidl
 #endif
diff --git a/media/audioaidlconversion/AidlConversionNdk.cpp b/media/audioaidlconversion/AidlConversionNdk.cpp
index 71c547c..9b14a5e 100644
--- a/media/audioaidlconversion/AidlConversionNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionNdk.cpp
@@ -79,39 +79,42 @@
 }  // namespace
 
 // buffer_provider_t is not supported thus skipped
-ConversionResult<buffer_config_t> aidl2legacy_AudioConfigBase_buffer_config_t(
-        const media::audio::common::AudioConfigBase& aidl, bool isInput) {
+ConversionResult<buffer_config_t> aidl2legacy_AudioConfig_buffer_config_t(
+        const media::audio::common::AudioConfig& aidl, bool isInput) {
     buffer_config_t legacy;
 
-    legacy.samplingRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
+    legacy.samplingRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.base.sampleRate));
     legacy.mask |= EFFECT_CONFIG_SMP_RATE;
 
     legacy.channels = VALUE_OR_RETURN(
-            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(aidl.channelMask, isInput));
+            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(aidl.base.channelMask, isInput));
     legacy.mask |= EFFECT_CONFIG_CHANNELS;
 
-    legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormatDescription_audio_format_t(aidl.format));
+    legacy.format =
+            VALUE_OR_RETURN(aidl2legacy_AudioFormatDescription_audio_format_t(aidl.base.format));
     legacy.mask |= EFFECT_CONFIG_FORMAT;
+    legacy.buffer.frameCount = aidl.frameCount;
 
     // TODO: add accessMode and mask
     return legacy;
 }
 
-ConversionResult<media::audio::common::AudioConfigBase>
-legacy2aidl_buffer_config_t_AudioConfigBase(const buffer_config_t& legacy, bool isInput) {
-    media::audio::common::AudioConfigBase aidl;
+ConversionResult<media::audio::common::AudioConfig>
+legacy2aidl_buffer_config_t_AudioConfig(const buffer_config_t& legacy, bool isInput) {
+    media::audio::common::AudioConfig aidl;
 
     if (legacy.mask & EFFECT_CONFIG_SMP_RATE) {
-        aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.samplingRate));
+        aidl.base.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.samplingRate));
     }
     if (legacy.mask & EFFECT_CONFIG_CHANNELS) {
-        aidl.channelMask = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+        aidl.base.channelMask = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
                 static_cast<audio_channel_mask_t>(legacy.channels), isInput));
     }
     if (legacy.mask & EFFECT_CONFIG_FORMAT) {
-        aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormatDescription(
+        aidl.base.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormatDescription(
                 static_cast<audio_format_t>(legacy.format)));
     }
+    aidl.frameCount = legacy.buffer.frameCount;
 
     // TODO: add accessMode and mask
     return aidl;
diff --git a/media/audioaidlconversion/AidlConversionNdkCpp.cpp b/media/audioaidlconversion/AidlConversionNdkCpp.cpp
new file mode 100644
index 0000000..ecd2e5e
--- /dev/null
+++ b/media/audioaidlconversion/AidlConversionNdkCpp.cpp
@@ -0,0 +1,187 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <regex>
+#include <type_traits>
+
+#define LOG_TAG "AidlConversionNdkCpp"
+#include <utils/Log.h>
+
+#include <android-base/expected.h>
+#include <android/binder_auto_utils.h>
+#include <android/binder_enums.h>
+#include <android/binder_parcel.h>
+#include <binder/Enums.h>
+#include <media/AidlConversionNdkCpp.h>
+#include <media/AidlConversionUtil.h>
+
+using aidl::android::aidl_utils::statusTFromBinderStatusT;
+
+namespace android {
+
+namespace {
+
+bool isVendorExtension(const std::string& s) {
+    // Per definition in AudioAttributes.aidl and {Playback|Record}TrackMetadata.aidl
+    static const std::regex vendorExtension("VX_[A-Z0-9]{3,}_[_A-Z0-9]+");
+    return std::regex_match(s.begin(), s.end(), vendorExtension);
+}
+
+inline bool isNotVendorExtension(const std::string& s) { return !isVendorExtension(s); }
+
+void filterOutNonVendorTagsInPlace(std::vector<std::string>& tags) {
+    if (std::find_if(tags.begin(), tags.end(), isNotVendorExtension) == tags.end()) {
+        return;
+    }
+    std::vector<std::string> temp;
+    temp.reserve(tags.size());
+    std::copy_if(tags.begin(), tags.end(), std::back_inserter(temp), isVendorExtension);
+    tags = std::move(temp);
+}
+
+// cpp2ndk and ndk2cpp are universal converters which work for any type,
+// however they are not the most efficient way to convert due to extra
+// marshaling / unmarshaling step.
+
+template<typename NdkType, typename CppType>
+ConversionResult<NdkType> cpp2ndk(const CppType& cpp) {
+    Parcel cppParcel;
+    RETURN_IF_ERROR(cpp.writeToParcel(&cppParcel));
+    ::ndk::ScopedAParcel ndkParcel(AParcel_create());
+    const int32_t ndkParcelBegin = AParcel_getDataPosition(ndkParcel.get());
+    RETURN_IF_ERROR(statusTFromBinderStatusT(AParcel_unmarshal(
+                            ndkParcel.get(), cppParcel.data(), cppParcel.dataSize())));
+    RETURN_IF_ERROR(statusTFromBinderStatusT(AParcel_setDataPosition(
+                            ndkParcel.get(), ndkParcelBegin)));
+    NdkType ndk;
+    RETURN_IF_ERROR(statusTFromBinderStatusT(ndk.readFromParcel(ndkParcel.get())));
+    return ndk;
+}
+
+template<typename CppType, typename NdkType>
+ConversionResult<CppType> ndk2cpp(const NdkType& ndk) {
+    ::ndk::ScopedAParcel ndkParcel(AParcel_create());
+    RETURN_IF_ERROR(statusTFromBinderStatusT(ndk.writeToParcel(ndkParcel.get())));
+    const int32_t ndkParcelDataSize = AParcel_getDataSize(ndkParcel.get());
+    if (ndkParcelDataSize < 0) {
+        return base::unexpected(BAD_VALUE);
+    }
+    // Parcel does not expose its data in a mutable form, we have to use an intermediate buffer.
+    std::vector<uint8_t> parcelData(static_cast<size_t>(ndkParcelDataSize));
+    RETURN_IF_ERROR(statusTFromBinderStatusT(AParcel_marshal(
+                            ndkParcel.get(), parcelData.data(), 0, ndkParcelDataSize)));
+    Parcel cppParcel;
+    RETURN_IF_ERROR(cppParcel.setData(parcelData.data(), parcelData.size()));
+    CppType cpp;
+    RETURN_IF_ERROR(cpp.readFromParcel(&cppParcel));
+    return cpp;
+}
+
+// cpp2ndk_Enum and ndk2cpp_Enum are more efficient implementations specifically for enums.
+
+template<typename OutEnum, typename OutEnumRange, typename InEnum>
+        ConversionResult<OutEnum> convertEnum(const OutEnumRange& range, InEnum e) {
+    using InIntType = std::underlying_type_t<InEnum>;
+    static_assert(std::is_same_v<InIntType, std::underlying_type_t<OutEnum>>);
+
+    InIntType inEnumIndex = static_cast<InIntType>(e);
+    OutEnum outEnum = static_cast<OutEnum>(inEnumIndex);
+    if (std::find(range.begin(), range.end(), outEnum) == range.end()) {
+        return base::unexpected(BAD_VALUE);
+    }
+    return outEnum;
+}
+
+template<typename NdkEnum, typename CppEnum>
+        ConversionResult<NdkEnum> cpp2ndk_Enum(CppEnum cpp) {
+    return convertEnum<NdkEnum>(::ndk::enum_range<NdkEnum>(), cpp);
+}
+
+template<typename CppEnum, typename NdkEnum>
+        ConversionResult<CppEnum> ndk2cpp_Enum(NdkEnum ndk) {
+    return convertEnum<CppEnum>(enum_range<CppEnum>(), ndk);
+}
+
+}  // namespace
+
+#define GENERATE_CONVERTERS(packageName, className) \
+    GENERATE_CONVERTERS_IMPL(packageName, _, className)
+
+#define GENERATE_CONVERTERS_IMPL(packageName, prefix, className)        \
+    ConversionResult<::aidl::packageName::className> cpp2ndk##prefix##className( \
+            const ::packageName::className& cpp) {                      \
+        return cpp2ndk<::aidl::packageName::className>(cpp);            \
+    }                                                                   \
+    ConversionResult<::packageName::className> ndk2cpp##prefix##className( \
+            const ::aidl::packageName::className& ndk) {                \
+        return ndk2cpp<::packageName::className>(ndk);                  \
+    }
+
+#define GENERATE_ENUM_CONVERTERS(packageName, className)                \
+    ConversionResult<::aidl::packageName::className> cpp2ndk_##className( \
+            const ::packageName::className& cpp) {                      \
+        return cpp2ndk_Enum<::aidl::packageName::className>(cpp);       \
+    }                                                                   \
+    ConversionResult<::packageName::className> ndk2cpp_##className(     \
+            const ::aidl::packageName::className& ndk) {                \
+        return ndk2cpp_Enum<::packageName::className>(ndk);             \
+}
+
+GENERATE_CONVERTERS(android::media::audio::common, AudioFormatDescription);
+GENERATE_CONVERTERS_IMPL(android::media::audio::common, _Impl_, AudioHalEngineConfig);
+GENERATE_CONVERTERS(android::media::audio::common, AudioMMapPolicyInfo);
+GENERATE_ENUM_CONVERTERS(android::media::audio::common, AudioMMapPolicyType);
+GENERATE_ENUM_CONVERTERS(android::media::audio::common, AudioMode);
+GENERATE_CONVERTERS(android::media::audio::common, AudioPort);
+
+namespace {
+
+// Filter out all AudioAttributes tags that do not conform to the vendor extension pattern.
+template<typename T>
+void filterOutNonVendorTags(T& audioHalEngineConfig) {
+    for (auto& strategy : audioHalEngineConfig.productStrategies) {
+        for (auto& group : strategy.attributesGroups) {
+            for (auto& attr : group.attributes) {
+                filterOutNonVendorTagsInPlace(attr.tags);
+            }
+        }
+    }
+}
+
+}  // namespace
+
+ConversionResult<::aidl::android::media::audio::common::AudioHalEngineConfig>
+cpp2ndk_AudioHalEngineConfig(const ::android::media::audio::common::AudioHalEngineConfig& cpp) {
+    auto conv = cpp2ndk_Impl_AudioHalEngineConfig(cpp);
+    if (conv.ok()) {
+        filterOutNonVendorTags(conv.value());
+    }
+    return conv;
+}
+
+ConversionResult<::android::media::audio::common::AudioHalEngineConfig>
+ndk2cpp_AudioHalEngineConfig(
+        const ::aidl::android::media::audio::common::AudioHalEngineConfig& ndk) {
+    auto conv = ndk2cpp_Impl_AudioHalEngineConfig(ndk);
+    if (conv.ok()) {
+        filterOutNonVendorTags(conv.value());
+    }
+    return conv;
+}
+
+
+}  // namespace android
diff --git a/media/audioaidlconversion/Android.bp b/media/audioaidlconversion/Android.bp
index bdb3a2c..d3a5755 100644
--- a/media/audioaidlconversion/Android.bp
+++ b/media/audioaidlconversion/Android.bp
@@ -152,6 +152,37 @@
 }
 
 /**
+ * Only including AIDL core HAL conversion.
+ */
+cc_library {
+    name: "libaudio_aidl_conversion_core_ndk",
+    srcs: [
+        "AidlConversionCore.cpp",
+    ],
+    header_libs: [
+        "libaudio_aidl_conversion_common_util_ndk",
+    ],
+    export_header_lib_headers: [
+        "libaudio_aidl_conversion_common_util_ndk",
+    ],
+    defaults: [
+        "audio_aidl_conversion_common_default",
+        "latest_android_hardware_audio_common_ndk_shared",
+        "latest_android_hardware_audio_core_ndk_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
+    ],
+    shared_libs: [
+        "libaudio_aidl_conversion_common_ndk",
+        "libbinder_ndk",
+        "libbase",
+    ],
+    cflags: [
+        "-DBACKEND_NDK",
+    ],
+    min_sdk_version: "31", //AParcelableHolder has been introduced in 31
+}
+
+/**
  * Only including AIDL effect HAL conversion.
  */
 cc_library {
@@ -181,3 +212,27 @@
     ],
     min_sdk_version: "31", //AParcelableHolder has been introduced in 31
 }
+
+/**
+ * Conversions between the NDK and CPP backends for common types.
+ */
+cc_library {
+    name: "libaudio_aidl_conversion_common_ndk_cpp",
+    srcs: [
+        "AidlConversionNdkCpp.cpp",
+    ],
+    defaults: [
+        "audio_aidl_conversion_common_default",
+        "audio_aidl_conversion_common_util_default",
+        "latest_android_media_audio_common_types_cpp_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
+    ],
+    shared_libs: [
+        "libbinder_ndk",
+        "libbase",
+    ],
+    cflags: [
+        "-DBACKEND_CPP_NDK",
+    ],
+    min_sdk_version: "33", //AParcel_unmarshal has been introduced in 33
+}
diff --git a/media/audioaidlconversion/TEST_MAPPING b/media/audioaidlconversion/TEST_MAPPING
index a0c9759..216bc12 100644
--- a/media/audioaidlconversion/TEST_MAPPING
+++ b/media/audioaidlconversion/TEST_MAPPING
@@ -1,7 +1,9 @@
 {
   "presubmit": [
     {
-      "name": "audio_aidl_ndk_conversion_tests"
+      "name": "audio_aidl_conversion_tests",
+      "name": "audio_aidl_ndk_conversion_tests",
+      "name": "audio_aidl_ndk_cpp_conversion_tests"
     }
   ]
 }
diff --git a/media/audioaidlconversion/include/media/AidlConversionCore.h b/media/audioaidlconversion/include/media/AidlConversionCore.h
new file mode 100644
index 0000000..aaa2e53
--- /dev/null
+++ b/media/audioaidlconversion/include/media/AidlConversionCore.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+/**
+ * Can only handle conversion between AIDL (NDK backend) and legacy type.
+ */
+#include <aidl/android/hardware/audio/core/IStreamIn.h>
+#include <media/AidlConversionUtil.h>
+#include <system/audio.h>
+
+namespace aidl {
+namespace android {
+
+ConversionResult<audio_microphone_direction_t>
+aidl2legacy_MicrophoneDirection_audio_microphone_direction_t(
+        hardware::audio::core::IStreamIn::MicrophoneDirection aidl);
+ConversionResult<hardware::audio::core::IStreamIn::MicrophoneDirection>
+legacy2aidl_audio_microphone_direction_t_MicrophoneDirection(audio_microphone_direction_t legacy);
+
+}  // namespace android
+}  // namespace aidl
diff --git a/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h b/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
index 9100892..7268464 100644
--- a/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
+++ b/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
@@ -37,6 +37,7 @@
 #define PREFIX(f) <f>
 #endif
 
+#include PREFIX(android/media/audio/common/AudioAttributes.h)
 #include PREFIX(android/media/audio/common/AudioChannelLayout.h)
 #include PREFIX(android/media/audio/common/AudioConfig.h)
 #include PREFIX(android/media/audio/common/AudioConfigBase.h)
@@ -46,6 +47,7 @@
 #include PREFIX(android/media/audio/common/AudioEncapsulationMetadataType.h)
 #include PREFIX(android/media/audio/common/AudioEncapsulationMode.h)
 #include PREFIX(android/media/audio/common/AudioEncapsulationType.h)
+#include PREFIX(android/media/audio/common/AudioFlag.h)
 #include PREFIX(android/media/audio/common/AudioFormatDescription.h)
 #include PREFIX(android/media/audio/common/AudioGain.h)
 #include PREFIX(android/media/audio/common/AudioGainConfig.h)
@@ -190,6 +192,9 @@
 ConversionResult<media::audio::common::AudioDeviceDescription>
 legacy2aidl_audio_devices_t_AudioDeviceDescription(audio_devices_t legacy);
 
+media::audio::common::AudioDeviceAddress::Tag suggestDeviceAddressTag(
+        const media::audio::common::AudioDeviceDescription& description);
+
 ::android::status_t aidl2legacy_AudioDevice_audio_device(
         const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
         char* legacyAddress);
@@ -204,6 +209,8 @@
         audio_devices_t legacyType, const char* legacyAddress);
 ConversionResult<media::audio::common::AudioDevice> legacy2aidl_audio_device_AudioDevice(
         audio_devices_t legacyType, const ::android::String8& legacyAddress);
+ConversionResult<media::audio::common::AudioDevice> legacy2aidl_audio_device_AudioDevice(
+        audio_devices_t legacyType, const std::string& legacyAddress);
 
 ConversionResult<audio_extra_audio_descriptor>
 aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(
@@ -283,6 +290,11 @@
 ConversionResult<media::audio::common::AudioOutputFlags>
 legacy2aidl_audio_output_flags_t_AudioOutputFlags(audio_output_flags_t legacy);
 
+ConversionResult<audio_stream_type_t>
+aidl2legacy_AudioStreamType_audio_stream_type_t(media::audio::common::AudioStreamType aidl);
+ConversionResult<media::audio::common::AudioStreamType>
+legacy2aidl_audio_stream_type_t_AudioStreamType(audio_stream_type_t legacy);
+
 // This type is unnamed in the original definition, thus we name it here.
 using audio_port_config_mix_ext_usecase = decltype(audio_port_config_mix_ext::usecase);
 ConversionResult<audio_port_config_mix_ext_usecase>
@@ -345,6 +357,26 @@
 ConversionResult<media::audio::common::AudioUsage> legacy2aidl_audio_usage_t_AudioUsage(
         audio_usage_t legacy);
 
+ConversionResult<audio_flags_mask_t>
+aidl2legacy_AudioFlag_audio_flags_mask_t(media::audio::common::AudioFlag aidl);
+ConversionResult<media::audio::common::AudioFlag>
+legacy2aidl_audio_flags_mask_t_AudioFlag(audio_flags_mask_t legacy);
+
+ConversionResult<audio_flags_mask_t>
+aidl2legacy_int32_t_audio_flags_mask_t_mask(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_audio_flags_mask_t_int32_t_mask(audio_flags_mask_t legacy);
+
+ConversionResult<std::string>
+aidl2legacy_AudioTags_string(const std::vector<std::string>& aidl);
+ConversionResult<std::vector<std::string>>
+legacy2aidl_string_AudioTags(const std::string& legacy);
+
+ConversionResult<audio_attributes_t>
+aidl2legacy_AudioAttributes_audio_attributes_t(const media::audio::common::AudioAttributes& aidl);
+ConversionResult<media::audio::common::AudioAttributes>
+legacy2aidl_audio_attributes_t_AudioAttributes(const audio_attributes_t& legacy);
+
 ConversionResult<audio_uuid_t> aidl2legacy_AudioUuid_audio_uuid_t(
         const media::audio::common::AudioUuid &aidl);
 ConversionResult<media::audio::common::AudioUuid> legacy2aidl_audio_uuid_t_AudioUuid(
diff --git a/media/audioaidlconversion/include/media/AidlConversionNdk.h b/media/audioaidlconversion/include/media/AidlConversionNdk.h
index e92f1a9..813a728 100644
--- a/media/audioaidlconversion/include/media/AidlConversionNdk.h
+++ b/media/audioaidlconversion/include/media/AidlConversionNdk.h
@@ -34,9 +34,9 @@
 namespace aidl {
 namespace android {
 
-ConversionResult<buffer_config_t> aidl2legacy_AudioConfigBase_buffer_config_t(
-        const media::audio::common::AudioConfigBase& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioConfigBase> legacy2aidl_buffer_config_t_AudioConfigBase(
+ConversionResult<buffer_config_t> aidl2legacy_AudioConfig_buffer_config_t(
+        const media::audio::common::AudioConfig& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioConfig> legacy2aidl_buffer_config_t_AudioConfig(
         const buffer_config_t& legacy, bool isInput);
 
 ::android::status_t aidl2legacy_AudioAttributesTags(
diff --git a/media/audioaidlconversion/include/media/AidlConversionNdkCpp.h b/media/audioaidlconversion/include/media/AidlConversionNdkCpp.h
new file mode 100644
index 0000000..f4822aa
--- /dev/null
+++ b/media/audioaidlconversion/include/media/AidlConversionNdkCpp.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+/**
+ * Conversions between the NDK and CPP backends for common types.
+ */
+#include <aidl/android/media/audio/common/AudioFormatDescription.h>
+#include <aidl/android/media/audio/common/AudioHalEngineConfig.h>
+#include <aidl/android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <aidl/android/media/audio/common/AudioMMapPolicyType.h>
+#include <aidl/android/media/audio/common/AudioMode.h>
+#include <aidl/android/media/audio/common/AudioPort.h>
+#include <android/media/audio/common/AudioFormatDescription.h>
+#include <android/media/audio/common/AudioHalEngineConfig.h>
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <android/media/audio/common/AudioMode.h>
+#include <android/media/audio/common/AudioPort.h>
+#include <media/AidlConversionUtil.h>
+
+namespace android {
+
+#define DECLARE_CONVERTERS(packageName, className)                       \
+    ConversionResult<::aidl::packageName::className>                    \
+    cpp2ndk_##className(const ::packageName::className& cpp);           \
+    ConversionResult<::packageName::className>                          \
+    ndk2cpp_##className(const ::aidl::packageName::className& ndk);
+
+DECLARE_CONVERTERS(android::media::audio::common, AudioFormatDescription);
+DECLARE_CONVERTERS(android::media::audio::common, AudioHalEngineConfig);
+DECLARE_CONVERTERS(android::media::audio::common, AudioMMapPolicyInfo);
+DECLARE_CONVERTERS(android::media::audio::common, AudioMMapPolicyType);
+DECLARE_CONVERTERS(android::media::audio::common, AudioMode);
+DECLARE_CONVERTERS(android::media::audio::common, AudioPort);
+
+#undef DECLARE_CONVERTERS
+
+}  // namespace android
diff --git a/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h b/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
index ed91e2c..656d76a 100644
--- a/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
+++ b/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
@@ -58,15 +58,15 @@
 ConversionResult<To> convertIntegral(From from) {
     // Special handling is required for signed / vs. unsigned comparisons, since otherwise we may
     // have the signed converted to unsigned and produce wrong results.
-    if (std::is_signed_v<From> && !std::is_signed_v<To>) {
+    if constexpr (std::is_signed_v<From> && !std::is_signed_v<To>) {
         if (from < 0 || from > std::numeric_limits<To>::max()) {
             return ::android::base::unexpected(::android::BAD_VALUE);
         }
-    } else if (std::is_signed_v<To> && !std::is_signed_v<From>) {
+    } else if constexpr (std::is_signed_v<To> && !std::is_signed_v<From>) {
         if (from > std::numeric_limits<To>::max()) {
             return ::android::base::unexpected(::android::BAD_VALUE);
         }
-    } else {
+    } else /* constexpr */ {
         if (from < std::numeric_limits<To>::min() || from > std::numeric_limits<To>::max()) {
             return ::android::base::unexpected(::android::BAD_VALUE);
         }
@@ -119,6 +119,20 @@
 }
 
 /**
+ * A generic template that helps convert containers of convertible types without
+ * using an intermediate container.
+ */
+template<typename InputContainer, typename OutputContainer, typename Func>
+::android::status_t convertContainer(const InputContainer& input, OutputContainer* output,
+        const Func& itemConversion) {
+    auto ins = std::inserter(*output, output->begin());
+    for (const auto& item : input) {
+        *ins = VALUE_OR_RETURN_STATUS(itemConversion(item));
+    }
+    return ::android::OK;
+}
+
+/**
  * A generic template that helps convert containers of convertible types.
  */
 template<typename OutputContainer, typename InputContainer, typename Func>
@@ -208,6 +222,34 @@
 
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // Utilities for handling bitmasks.
+// Some AIDL enums are specified using bit indices, for example:
+//   `AidlEnum { FOO = 0, BAR = 1, BAZ = 2' }`
+// while corresponding legacy types universally uses actual bitmasks, for example:
+//   `enum legacy_enum_t { LEGACY_FOO = 1 << 0, LEGACY_BAR = 1 << 1, LEGACY_BAZ = 1 << 2 }`
+// There is also the third type used to store the resulting mask, which is combined
+// from individual bits. In AIDL this is typically an int (`int32_t`), in legacy types this
+// is often the enum type itself (although, strictly this is not correct since masks are not
+// declared as part of the enum type). The bit index value always has an integer type.
+//
+// `indexToEnum_index` constructs an instance of the enum from an index,
+// for example `AidlEnum::BAR` from `1`.
+// `indexToEnum_bitmask` produces a corresponding legacy bitmask enum instance,
+// for example, `LEGACY_BAR` (`2`) from `1`.
+// `enumToMask_bitmask` simply casts an enum type to a bitmask type.
+// `enumToMask_index` creates a mask from an enum type which specifies an index.
+//
+// All these functions can be plugged into `convertBitmask`. For example, to implement
+// conversion from `AidlEnum` to `legacy_enum_t`, with a mask stored in `int32_t`,
+// the following call needs to be made:
+//   convertBitmask<legacy_enum_t /*DestMask*/, int32_t /*SrcMask*/,
+//                  legacy_enum_t /*DestEnum*/, AidlEnum /*SrcEnum*/>(
+//     maskField /*int32_t*/, aidl2legacy_AidlEnum_legacy_enum_t /*enumConversion*/,
+//     indexToEnum_index<AidlEnum> /*srcIndexToEnum*/,
+//     enumToMask_bitmask<legacy_enum_t, legacy_enum_t> /*destEnumToMask*/)
+//
+// The only extra function needed is for mapping between corresponding enum values
+// of the AidlEnum and the legacy_enum_t. Note that the mapping is between values
+// of enums, for example, `AidlEnum::BAZ` maps to `LEGACY_BAZ` and vice versa.
 
 template<typename Enum>
 Enum indexToEnum_index(int index) {
@@ -389,6 +431,10 @@
         ?: statusTFromExceptionCode(status.getExceptionCode()); // a service-side error with a
                                                      // standard Java exception (fromExceptionCode)
 }
+
+static inline ::android::status_t statusTFromBinderStatusT(binder_status_t status) {
+    return statusTFromBinderStatus(::ndk::ScopedAStatus::fromStatus(status));
+}
 #endif
 
 /**
diff --git a/media/audioaidlconversion/tests/Android.bp b/media/audioaidlconversion/tests/Android.bp
index de7c8a2..88b2cc9 100644
--- a/media/audioaidlconversion/tests/Android.bp
+++ b/media/audioaidlconversion/tests/Android.bp
@@ -44,3 +44,27 @@
         "-DBACKEND_NDK",
     ],
 }
+
+cc_test {
+    name: "audio_aidl_ndk_cpp_conversion_tests",
+
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_static",
+        "latest_android_media_audio_common_types_ndk_static",
+        "libaudio_aidl_conversion_tests_defaults",
+    ],
+    srcs: ["audio_aidl_ndk_cpp_conversion_tests.cpp"],
+    shared_libs: [
+        "libbinder",
+        "libbinder_ndk",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
+    static_libs: [
+        "libaudio_aidl_conversion_common_ndk_cpp",
+    ],
+    cflags: [
+        "-DBACKEND_CPP_NDK",
+    ],
+}
diff --git a/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp b/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp
index c505e60..60727b4 100644
--- a/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp
+++ b/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp
@@ -19,6 +19,7 @@
 
 #include <gtest/gtest.h>
 
+#include <media/AidlConversionCppNdk.h>
 #include <media/AidlConversionNdk.h>
 
 namespace {
@@ -89,3 +90,48 @@
     ASSERT_EQ(1, convBack.value().tags.size());
     EXPECT_EQ(initial.tags[1], convBack.value().tags[0]);
 }
+
+class AudioTagsRoundTripTest : public testing::TestWithParam<std::vector<std::string>>
+{
+};
+TEST_P(AudioTagsRoundTripTest, Aidl2Legacy2Aidl) {
+    const auto& initial = GetParam();
+    auto conv = aidl2legacy_AudioTags_string(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_string_AudioTags(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+INSTANTIATE_TEST_SUITE_P(AudioTagsRoundTrip, AudioTagsRoundTripTest,
+        testing::Values(std::vector<std::string>{},
+                std::vector<std::string>{"VX_GOOGLE_41"},
+                std::vector<std::string>{"VX_GOOGLE_41", "VX_GOOGLE_42"}));
+
+TEST(AudioTags, NonVendorTagsAllowed) {
+    const std::string separator(1, AUDIO_ATTRIBUTES_TAGS_SEPARATOR);
+    const std::vector<std::string> initial{"random_string", "VX_GOOGLE_42"};
+    auto conv = aidl2legacy_AudioTags_string(initial);
+    ASSERT_TRUE(conv.ok());
+    EXPECT_EQ("random_string" + separator + "VX_GOOGLE_42", conv.value());
+}
+
+TEST(AudioTags, IllFormedAidlTag) {
+    const std::string separator(1, AUDIO_ATTRIBUTES_TAGS_SEPARATOR);
+    {
+        const std::vector<std::string> initial{"VX_GOOGLE" + separator + "42", "VX_GOOGLE_42"};
+        auto conv = aidl2legacy_AudioTags_string(initial);
+        if (conv.ok()) {
+            EXPECT_EQ("VX_GOOGLE_42", conv.value());
+        }
+        // Failing this conversion is also OK. The result depends on whether the conversion
+        // only passes through vendor tags.
+    }
+    {
+        const std::vector<std::string> initial{
+            "random_string", "random" + separator + "string", "VX_GOOGLE_42"};
+        auto conv = aidl2legacy_AudioTags_string(initial);
+        if (conv.ok()) {
+            EXPECT_EQ("VX_GOOGLE_42", conv.value());
+        }
+    }
+}
diff --git a/media/audioaidlconversion/tests/audio_aidl_ndk_cpp_conversion_tests.cpp b/media/audioaidlconversion/tests/audio_aidl_ndk_cpp_conversion_tests.cpp
new file mode 100644
index 0000000..206c35b
--- /dev/null
+++ b/media/audioaidlconversion/tests/audio_aidl_ndk_cpp_conversion_tests.cpp
@@ -0,0 +1,136 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+#include <type_traits>
+
+#include <gtest/gtest.h>
+
+#include <media/AidlConversionNdkCpp.h>
+
+namespace {
+template<typename> struct mf_traits {};
+template<class T, class U> struct mf_traits<U T::*> {
+    using member_type = U;
+};
+}  // namespace
+
+// Provide value printers for types generated from AIDL
+// They need to be in the same namespace as the types we intend to print
+#define DEFINE_PRINTING_TEMPLATES()
+    template <typename P>                                                                         \
+    std::enable_if_t<std::is_function_v<typename mf_traits<decltype(&P::toString)>::member_type>, \
+            std::ostream&> operator<<(std::ostream& os, const P& p) {                             \
+        return os << p.toString();                                                                \
+    }                                                                                             \
+    template <typename E>                                                                         \
+    std::enable_if_t<std::is_enum_v<E>, std::ostream&> operator<<(std::ostream& os, const E& e) { \
+        return os << toString(e);                                                                 \
+    }
+
+namespace aidl::android::media::audio::common {
+DEFINE_PRINTING_TEMPLATES();
+}  // namespace aidl::android::media::audio::common
+namespace android::hardware::audio::common {
+DEFINE_PRINTING_TEMPLATES();
+}  // namespace android::hardware::audio::common
+#undef DEFINE_PRINTING_TEMPLATES
+
+using namespace android;
+
+namespace {
+
+using namespace ::aidl::android::media::audio::common;
+
+AudioFormatDescription make_AudioFormatDescription(AudioFormatType type) {
+    AudioFormatDescription result;
+    result.type = type;
+    return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(PcmType pcm) {
+    auto result = make_AudioFormatDescription(AudioFormatType::PCM);
+    result.pcm = pcm;
+    return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(const std::string& encoding) {
+    AudioFormatDescription result;
+    result.encoding = encoding;
+    return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(PcmType transport, const std::string& encoding) {
+    auto result = make_AudioFormatDescription(encoding);
+    result.pcm = transport;
+    return result;
+}
+
+AudioFormatDescription make_AFD_Default() {
+    return AudioFormatDescription{};
+}
+
+AudioFormatDescription make_AFD_Invalid() {
+    return make_AudioFormatDescription(AudioFormatType::SYS_RESERVED_INVALID);
+}
+
+AudioFormatDescription make_AFD_Pcm16Bit() {
+    return make_AudioFormatDescription(PcmType::INT_16_BIT);
+}
+
+AudioFormatDescription make_AFD_Bitstream() {
+    return make_AudioFormatDescription("example");
+}
+
+AudioFormatDescription make_AFD_Encap() {
+    return make_AudioFormatDescription(PcmType::INT_16_BIT, "example.encap");
+}
+
+AudioFormatDescription make_AFD_Encap_with_Enc() {
+    auto afd = make_AFD_Encap();
+    afd.encoding += "+example";
+    return afd;
+}
+
+}  // namespace
+
+// There is no reason to write test for every type which gets converted via parcelable
+// since the conversion code is all the same.
+
+class AudioFormatDescriptionRoundTripTest :
+        public testing::TestWithParam<::aidl::android::media::audio::common::AudioFormatDescription>
+{
+};
+TEST_P(AudioFormatDescriptionRoundTripTest, Ndk2Cpp2Ndk) {
+    const auto& initial = GetParam();
+    auto conv = ndk2cpp_AudioFormatDescription(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = cpp2ndk_AudioFormatDescription(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+INSTANTIATE_TEST_SUITE_P(AudioFormatDescriptionRoundTrip, AudioFormatDescriptionRoundTripTest,
+        testing::Values(make_AFD_Invalid(), make_AFD_Default(), make_AFD_Pcm16Bit(),
+                make_AFD_Bitstream(), make_AFD_Encap(), make_AFD_Encap_with_Enc()));
+
+TEST(AudioPortRoundTripTest, Ndk2Cpp2Ndk) {
+    const AudioPort initial;
+    auto conv = ndk2cpp_AudioPort(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = cpp2ndk_AudioPort(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index 828d861..2030dc7 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -25,21 +25,31 @@
         "libmediametrics_headers",
     ],
 
-    shared_libs: [
-        "packagemanager_aidl-cpp",
+    defaults: [
+        "libaaudioservice_dependencies",
+        "libaudioflinger_dependencies",
+        "libaudiopolicyservice_dependencies",
+        "latest_android_media_audio_common_types_cpp_shared",
+        "latest_android_hardware_audio_core_sounddose_ndk_shared",
+    ],
+
+    static_libs: [
         "libaaudioservice",
-        "libaudioclient",
         "libaudioflinger",
         "libaudiopolicyservice",
+        "libmedialogservice",
+        "libnbaio",
+    ],
+
+    shared_libs: [
+        "libaudioclient",
         "libaudioprocessing",
         "libbinder",
         "libcutils",
         "libhidlbase",
         "liblog",
         "libmedia",
-        "libmedialogservice",
         "libmediautils",
-        "libnbaio",
         "libnblog",
         "libpowermanager",
         "libutils",
@@ -59,9 +69,9 @@
         "frameworks/av/services/audiopolicy/engine/interface",
         "frameworks/av/services/audiopolicy/service",
         "frameworks/av/services/medialog",
+        "frameworks/av/services/oboeservice", // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
 
-        // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
-        "frameworks/av/services/oboeservice",
+
     ],
 
     init_rc: ["audioserver.rc"],
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
index 59cad9d..e08bf43 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.cpp
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -542,15 +542,15 @@
     mCodecConfiguration->kf_max_dist = 3000;
     // Encoder determines optimal key frame placement automatically.
     mCodecConfiguration->kf_mode = AOM_KF_AUTO;
-    // Initial value of the buffer level in ms.
-    mCodecConfiguration->rc_buf_initial_sz = 500;
-    // Amount of data that the encoder should try to maintain in ms.
-    mCodecConfiguration->rc_buf_optimal_sz = 600;
     // The amount of data that may be buffered by the decoding
     // application in ms.
     mCodecConfiguration->rc_buf_sz = 1000;
 
     if (mBitrateControlMode == AOM_CBR) {
+        // Initial value of the buffer level in ms.
+        mCodecConfiguration->rc_buf_initial_sz = 500;
+        // Amount of data that the encoder should try to maintain in ms.
+        mCodecConfiguration->rc_buf_optimal_sz = 600;
         // Maximum amount of bits that can be subtracted from the target
         // bitrate - expressed as percentage of the target bitrate.
         mCodecConfiguration->rc_undershoot_pct = 100;
@@ -563,7 +563,7 @@
         mCodecConfiguration->rc_undershoot_pct = 100;
         // Maximum amount of bits that can be added to the target
         // bitrate - expressed as percentage of the target bitrate.
-        mCodecConfiguration->rc_overshoot_pct = 25;
+        mCodecConfiguration->rc_overshoot_pct = 100;
     }
 
     if (mIntf->getSyncFramePeriod() >= 0) {
@@ -576,6 +576,12 @@
     }
     if (mMaxQuantizer > 0) {
         mCodecConfiguration->rc_max_quantizer = mMaxQuantizer;
+    } else {
+        if (mBitrateControlMode == AOM_VBR) {
+            // For VBR we are limiting MaxQP to 52 (down 11 steps) to maintain quality
+            // 52 comes from experiments done on libaom standalone app
+            mCodecConfiguration->rc_max_quantizer = 52;
+        }
     }
 
     mCodecContext = new aom_codec_ctx_t;
@@ -630,11 +636,11 @@
         return;
     }
 
-    std::shared_ptr<const C2GraphicView> rView;
+    std::shared_ptr<C2GraphicView> rView;
     std::shared_ptr<C2Buffer> inputBuffer;
     if (!work->input.buffers.empty()) {
         inputBuffer = work->input.buffers[0];
-        rView = std::make_shared<const C2GraphicView>(
+        rView = std::make_shared<C2GraphicView>(
                 inputBuffer->data().graphicBlocks().front().map().get());
         if (rView->error() != C2_OK) {
             ALOGE("graphic view map err = %d", rView->error());
@@ -672,6 +678,10 @@
         return;
     }
 
+    //(b/279387842)
+    //workaround for incorrect crop size in view when using surface mode
+    rView->setCrop_be(C2Rect(mSize->width, mSize->height));
+
     if (!mHeadersReceived) {
         Av1Config av1_config;
         constexpr uint32_t header_length = 2048;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index 5d2856a..e424860 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -334,7 +334,10 @@
         // By default needsUpdate = false in case the supplied level does meet
         // the requirements. For Level 1b, we want to update the level anyway,
         // so we set it to true in that case.
-        bool needsUpdate = (me.v.level == LEVEL_AVC_1B);
+        bool needsUpdate = false;
+        if (me.v.level == LEVEL_AVC_1B || !me.F(me.v.level).supportsAtAll(me.v.level)) {
+            needsUpdate = true;
+        }
         for (const LevelLimits &limit : kLimits) {
             if (mbs <= limit.mbs && mbsPerSec <= limit.mbsPerSec &&
                     bitrate.v.value <= limit.bitrate) {
@@ -356,7 +359,7 @@
                 needsUpdate = true;
             }
         }
-        if (!found) {
+        if (!found || me.v.level > LEVEL_AVC_5) {
             // We set to the highest supported level.
             me.set().level = LEVEL_AVC_5;
         }
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 182edfb..591d56d 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -188,12 +188,6 @@
     return onStop();
 }
 
-static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
-    work->worklets.front()->output.flags = work->input.flags;
-    work->worklets.front()->output.buffers.clear();
-    work->worklets.front()->output.ordinal = work->input.ordinal;
-}
-
 void C2SoftFlacEnc::process(
         const std::unique_ptr<C2Work> &work,
         const std::shared_ptr<C2BlockPool> &pool) {
@@ -245,12 +239,10 @@
         mWroteHeader = true;
     }
 
-    const uint32_t sampleRate = mIntf->getSampleRate();
     const uint32_t channelCount = mIntf->getChannelCount();
     const bool inputFloat = mIntf->getPcmEncodingInfo() == C2Config::PCM_FLOAT;
     const unsigned sampleSize = inputFloat ? sizeof(float) : sizeof(int16_t);
     const unsigned frameSize = channelCount * sampleSize;
-    const uint64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
 
     size_t outCapacity = inSize;
     outCapacity += mBlockSize * frameSize;
@@ -270,6 +262,33 @@
         return;
     }
 
+    class FillWork {
+    public:
+        FillWork(uint32_t flags, C2WorkOrdinalStruct ordinal,
+                 const std::shared_ptr<C2Buffer> &buffer)
+            : mFlags(flags), mOrdinal(ordinal), mBuffer(buffer) {}
+        ~FillWork() = default;
+
+        void operator()(const std::unique_ptr<C2Work> &work) {
+            work->worklets.front()->output.flags = (C2FrameData::flags_t)mFlags;
+            work->worklets.front()->output.buffers.clear();
+            work->worklets.front()->output.ordinal = mOrdinal;
+            work->workletsProcessed = 1u;
+            work->result = C2_OK;
+            if (mBuffer) {
+                work->worklets.front()->output.buffers.push_back(mBuffer);
+            }
+            ALOGV("timestamp = %lld, index = %lld, w/%s buffer",
+                  mOrdinal.timestamp.peekll(), mOrdinal.frameIndex.peekll(),
+                  mBuffer ? "" : "o");
+        }
+
+    private:
+        const uint32_t mFlags;
+        const C2WorkOrdinalStruct mOrdinal;
+        const std::shared_ptr<C2Buffer> mBuffer;
+    };
+
     mEncoderWriteData = true;
     mEncoderReturnedNbBytes = 0;
     size_t inPos = 0;
@@ -308,14 +327,33 @@
         mOutputBlock.reset();
         return;
     }
-    fillEmptyWork(work);
-    if (mEncoderReturnedNbBytes != 0) {
-        std::shared_ptr<C2Buffer> buffer = createLinearBuffer(std::move(mOutputBlock), 0, mEncoderReturnedNbBytes);
-        work->worklets.front()->output.buffers.push_back(buffer);
-        work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp;
-    } else {
-        ALOGV("encoder process_interleaved returned without data to write");
+
+    // cloneAndSend will create clone of work when more than one encoded frame is produced
+    while (mOutputBuffers.size() > 1) {
+        const OutputBuffer& front = mOutputBuffers.front();
+        C2WorkOrdinalStruct ordinal = work->input.ordinal;
+        ordinal.frameIndex = front.frameIndex;
+        ordinal.timestamp = front.timestampUs;
+        cloneAndSend(work->input.ordinal.frameIndex.peeku(), work,
+                     FillWork(C2FrameData::FLAG_INCOMPLETE, ordinal, front.buffer));
+        mOutputBuffers.pop_front();
     }
+
+    std::shared_ptr<C2Buffer> buffer;
+    C2WorkOrdinalStruct ordinal = work->input.ordinal;
+    if (mOutputBuffers.size() == 1) {
+        const OutputBuffer& front = mOutputBuffers.front();
+        ordinal.frameIndex = front.frameIndex;
+        ordinal.timestamp = front.timestampUs;
+        buffer = front.buffer;
+        mOutputBuffers.pop_front();
+    }
+    // finish the response for the overall transaction.
+    // this includes any final frame that the encoder produced during this request
+    // this response is required even if no data was encoded.
+    FillWork((C2FrameData::flags_t)(eos ? C2FrameData::FLAG_END_OF_STREAM : 0),
+             ordinal, buffer)(work);
+
     mOutputBlock = nullptr;
     if (eos) {
         mSignalledOutputEos = true;
@@ -349,6 +387,8 @@
     // write encoded data
     C2WriteView wView = mOutputBlock->map().get();
     uint8_t* outData = wView.data();
+    const uint32_t sampleRate = mIntf->getSampleRate();
+    const uint64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
     ALOGV("writing %zu bytes of encoded data on output", bytes);
     // increment mProcessedSamples to maintain audio synchronization during
     // play back
@@ -359,7 +399,12 @@
         return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
     }
     memcpy(outData + mEncoderReturnedNbBytes, buffer, bytes);
+
+    std::shared_ptr<C2Buffer> c2Buffer =
+        createLinearBuffer(mOutputBlock, mEncoderReturnedNbBytes, bytes);
+    mOutputBuffers.push_back({c2Buffer, mAnchorTimeStamp + outTimeStamp, current_frame});
     mEncoderReturnedNbBytes += bytes;
+
     return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
 }
 
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.h b/media/codec2/components/flac/C2SoftFlacEnc.h
index b3f01d5..a971ab5 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.h
+++ b/media/codec2/components/flac/C2SoftFlacEnc.h
@@ -79,6 +79,12 @@
     unsigned mHeaderOffset;
     bool mWroteHeader;
     char mHeader[FLAC_HEADER_SIZE];
+    struct OutputBuffer {
+        std::shared_ptr<C2Buffer> buffer;
+        c2_cntr64_t timestampUs;
+        std::uint64_t frameIndex;
+    };
+    std::list<OutputBuffer> mOutputBuffers;
 
     C2_DO_NOT_COPY(C2SoftFlacEnc);
 };
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 914167d..5f5f05d 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -29,6 +29,14 @@
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/MediaDefs.h>
 
+// libyuv version required for I410ToAB30Matrix and I210ToAB30Matrix.
+#if LIBYUV_VERSION >= 1780
+#include <algorithm>
+#define HAVE_LIBYUV_I410_I210_TO_AB30 1
+#else
+#define HAVE_LIBYUV_I410_I210_TO_AB30 0
+#endif
+
 namespace android {
 
 // codecname set and passed in as a compile flag from Android.bp
@@ -744,6 +752,19 @@
     return true;
 }
 
+bool C2SoftGav1Dec::fillMonochromeRow(int value) {
+    const size_t tmpSize = mWidth;
+    const bool needFill = tmpSize > mTmpFrameBufferSize;
+    if (!allocTmpFrameBuffer(tmpSize)) {
+        ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+        return false;
+    }
+    if (needFill) {
+        std::fill_n(mTmpFrameBuffer.get(), tmpSize, value);
+    }
+    return true;
+}
+
 bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
                                  const std::unique_ptr<C2Work> &work) {
   if (!(work && pool)) return false;
@@ -765,6 +786,16 @@
     return false;
   }
 
+#if LIBYUV_VERSION < 1871
+  if (buffer->bitdepth > 10) {
+    ALOGE("bitdepth %d is not supported", buffer->bitdepth);
+    mSignalledError = true;
+    work->workletsProcessed = 1u;
+    work->result = C2_CORRUPTED;
+    return false;
+  }
+#endif
+
   const int width = buffer->displayed_width[0];
   const int height = buffer->displayed_height[0];
   if (width != mWidth || height != mHeight) {
@@ -808,7 +839,7 @@
   std::shared_ptr<C2GraphicBlock> block;
   uint32_t format = HAL_PIXEL_FORMAT_YV12;
   std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
-  if (buffer->bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+  if (buffer->bitdepth >= 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
     IntfImpl::Lock lock = mIntf->lock();
     codedColorAspects = mIntf->getColorAspects_l();
     bool allowRGBA1010102 = false;
@@ -818,14 +849,29 @@
       allowRGBA1010102 = true;
     }
     format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
+#if !HAVE_LIBYUV_I410_I210_TO_AB30
     if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
-        (buffer->image_format != libgav1::kImageFormatYuv420)) {
-        ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
+        (buffer->image_format != libgav1::kImageFormatYuv420) &&
+        (buffer->bitdepth == 10)) {
+        ALOGE("Only YUV420 output is supported for 10-bit when targeting RGBA_1010102");
       mSignalledError = true;
       work->result = C2_OMITTED;
       work->workletsProcessed = 1u;
       return false;
     }
+#endif
+  }
+  if (buffer->bitdepth == 12 && format == HAL_PIXEL_FORMAT_RGBA_1010102 &&
+      (buffer->image_format == libgav1::kImageFormatYuv422 ||
+       buffer->image_format == libgav1::kImageFormatYuv444)) {
+      // There are no 12-bit color conversion functions from YUV422/YUV444 to
+      // RGBA_1010102. Use 8-bit YV12 in this case.
+      format = HAL_PIXEL_FORMAT_YV12;
+  }
+  if (buffer->bitdepth == 12 && format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+      // There are no 12-bit color conversion functions to P010. Use 8-bit YV12
+      // in this case.
+      format = HAL_PIXEL_FORMAT_YV12;
   }
 
   if (mHalPixelFormat != format) {
@@ -880,7 +926,41 @@
   size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
   size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
 
-  if (buffer->bitdepth == 10) {
+  if (buffer->bitdepth == 12) {
+#if LIBYUV_VERSION >= 1871
+      const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
+      const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
+      const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
+      size_t srcYStride = buffer->stride[0] / 2;
+      size_t srcUStride = buffer->stride[1] / 2;
+      size_t srcVStride = buffer->stride[2] / 2;
+      if (isMonochrome) {
+          if (!fillMonochromeRow(2048)) {
+              setError(work, C2_NO_MEMORY);
+              return false;
+          }
+          srcU = srcV = mTmpFrameBuffer.get();
+          srcUStride = srcVStride = 0;
+      }
+      if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
+          libyuv::I012ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                                   dstY, dstYStride, &libyuv::kYuvV2020Constants,
+                                   mWidth, mHeight);
+      } else if (isMonochrome || buffer->image_format == libgav1::kImageFormatYuv420) {
+          libyuv::I012ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                             dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                             mWidth, mHeight);
+      } else if (buffer->image_format == libgav1::kImageFormatYuv444) {
+          libyuv::I412ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                             dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                             mWidth, mHeight);
+      } else {
+          libyuv::I212ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                             dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                             mWidth, mHeight);
+      }
+#endif  // LIBYUV_VERSION >= 1871
+  } else if (buffer->bitdepth == 10) {
     const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
     const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
     const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
@@ -889,11 +969,35 @@
     size_t srcVStride = buffer->stride[2] / 2;
 
     if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
-        convertYUV420Planar16ToY410OrRGBA1010102(
-                (uint32_t *)dstY, srcY, srcU, srcV, srcYStride,
-                srcUStride, srcVStride,
-                dstYStride / sizeof(uint32_t), mWidth, mHeight,
-                std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
+        bool processed = false;
+#if HAVE_LIBYUV_I410_I210_TO_AB30
+        if (buffer->image_format == libgav1::kImageFormatYuv444) {
+            libyuv::I410ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                                     dstY, dstYStride, &libyuv::kYuvV2020Constants,
+                                     mWidth, mHeight);
+            processed = true;
+        } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
+            libyuv::I210ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                                     dstY, dstYStride, &libyuv::kYuvV2020Constants,
+                                     mWidth, mHeight);
+            processed = true;
+        }
+#endif  // HAVE_LIBYUV_I410_I210_TO_AB30
+        if (!processed) {
+            if (isMonochrome) {
+                if (!fillMonochromeRow(512)) {
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+                srcU = srcV = mTmpFrameBuffer.get();
+                srcUStride = srcVStride = 0;
+            }
+            convertYUV420Planar16ToY410OrRGBA1010102(
+                    (uint32_t *)dstY, srcY, srcU, srcV, srcYStride,
+                    srcUStride, srcVStride,
+                    dstYStride / sizeof(uint32_t), mWidth, mHeight,
+                    std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
+        }
     } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
         dstYStride /= 2;
         dstUStride /= 2;
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index c3b27ea..0e09fcc 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -105,6 +105,7 @@
   // Sets |work->result| and mSignalledError. Returns false.
   void setError(const std::unique_ptr<C2Work> &work, c2_status_t error);
   bool allocTmpFrameBuffer(size_t size);
+  bool fillMonochromeRow(int value);
   bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
                     const std::unique_ptr<C2Work>& work);
   c2_status_t drainInternal(uint32_t drainMode,
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index 9c26c02..ec1dd14 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -341,6 +341,9 @@
         // By default needsUpdate = false in case the supplied level does meet
         // the requirements.
         bool needsUpdate = false;
+        if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+            needsUpdate = true;
+        }
         for (const LevelLimits &limit : kLimits) {
             if (samples <= limit.samples && samplesPerSec <= limit.samplesPerSec &&
                     bitrate.v.value <= limit.bitrate) {
@@ -362,7 +365,7 @@
                 needsUpdate = true;
             }
         }
-        if (!found) {
+        if (!found || me.v.level > LEVEL_HEVC_MAIN_5_2) {
             // We set to the highest supported level.
             me.set().level = LEVEL_HEVC_MAIN_5_2;
         }
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index d5e8c56..acc42e9 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -49,6 +49,8 @@
 const char *MEDIA_MIMETYPE_VIDEO = MEDIA_MIMETYPE_VIDEO_H263;
 #endif
 
+constexpr float VBV_DELAY = 5.0f;
+
 } // namepsace
 
 class C2SoftMpeg4Enc::IntfImpl : public SimpleInterface<void>::BaseParams {
@@ -131,7 +133,7 @@
                             C2Config::LEVEL_MP4V_1,
                             C2Config::LEVEL_MP4V_2})
                 })
-                .withSetter(ProfileLevelSetter)
+                .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
                 .build());
 #else
         addParameter(
@@ -148,7 +150,7 @@
                             C2Config::LEVEL_H263_40,
                             C2Config::LEVEL_H263_45})
                 })
-                .withSetter(ProfileLevelSetter)
+                .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
                 .build());
 #endif
     }
@@ -179,7 +181,10 @@
 
     static C2R ProfileLevelSetter(
             bool mayBlock,
-            C2P<C2StreamProfileLevelInfo::output> &me) {
+            C2P<C2StreamProfileLevelInfo::output> &me,
+            const C2P<C2StreamPictureSizeInfo::input> &size,
+            const C2P<C2StreamFrameRateInfo::output> &frameRate,
+            const C2P<C2StreamBitrateInfo::output> &bitrate) {
         (void)mayBlock;
         if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
 #ifdef MPEG4
@@ -188,13 +193,91 @@
             me.set().profile = PROFILE_H263_BASELINE;
 #endif
         }
-        if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+
+        struct LevelLimits {
+            C2Config::level_t level;
+            uint32_t sampleRate;
+            uint32_t width;
+            uint32_t height;
+            uint32_t frameRate;
+            uint32_t bitrate;
+            uint32_t vbvSize;
+        };
+
+        constexpr LevelLimits kLimits[] = {
 #ifdef MPEG4
-            me.set().level = LEVEL_MP4V_2;
+            { LEVEL_MP4V_0, 380160, 176, 144, 15, 64000, 163840 },
+            // { LEVEL_MP4V_0B, 380160, 176, 144, 15, 128000, 163840 },
+            { LEVEL_MP4V_1, 380160, 176, 144, 30, 64000, 163840 },
+            { LEVEL_MP4V_2, 1520640, 352, 288, 30, 128000, 655360 },
 #else
-            me.set().level = LEVEL_H263_45;
+            // HRD Buffer Size = (B + BPPmaxKb * 1024 bits)
+            // where, (BPPmaxKb * 1024) is maximum number of bits per picture
+            // that has been negotiated for use in the bitstream Sec 3.6 of T-Rec-H.263
+            // and B = 4 * Rmax / PCF. Rmax is max bit rate and PCF is picture
+            // clock frequency
+            { LEVEL_H263_10, 380160, 176, 144, 15, 64000, 74077 },
+            { LEVEL_H263_45, 380160, 176, 144, 15, 128000, 82619 },
+            { LEVEL_H263_20, 1520640, 352, 288, 30, 128000, 279227 },
+            { LEVEL_H263_30, 3041280, 352, 288, 30, 384000, 313395 },
+            { LEVEL_H263_40, 3041280, 352, 288, 30, 2048000, 535483 },
+            // { LEVEL_H263_50, 5068800, 352, 288, 60, 4096000, 808823 },
 #endif
+        };
+
+        auto mbs = ((size.v.width + 15) / 16) * ((size.v.height + 15) / 16);
+        auto sampleRate = mbs * frameRate.v.value * 16 * 16;
+        auto vbvSize = bitrate.v.value * VBV_DELAY;
+
+        // Check if the supplied level meets the MB / bitrate requirements. If
+        // not, update the level with the lowest level meeting the requirements.
+        bool found = false;
+
+        // By default needsUpdate = false in case the supplied level does meet
+        // the requirements.
+        bool needsUpdate = false;
+#ifdef MPEG4
+        // For Level 0b, we want to update the level anyway, as library does not
+        // seem to accept this value.
+        if (me.v.level == LEVEL_MP4V_0B) {
+            needsUpdate = true;
         }
+#endif
+        if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+            needsUpdate = true;
+        }
+        for (const LevelLimits &limit : kLimits) {
+            if (sampleRate <= limit.sampleRate && size.v.width <= limit.width &&
+                    vbvSize <= limit.vbvSize && size.v.height <= limit.height &&
+                    bitrate.v.value <= limit.bitrate && frameRate.v.value <= limit.frameRate) {
+                // This is the lowest level that meets the requirements, and if
+                // we haven't seen the supplied level yet, that means we don't
+                // need the update.
+                if (needsUpdate) {
+                    ALOGD("Given level %x does not cover current configuration: "
+                          "adjusting to %x", me.v.level, limit.level);
+                    me.set().level = limit.level;
+                }
+                found = true;
+                break;
+            }
+            if (me.v.level == limit.level) {
+                // We break out of the loop when the lowest feasible level is
+                // found. The fact that we're here means that our level doesn't
+                // meet the requirement and needs to be updated.
+                needsUpdate = true;
+            }
+        }
+        // If not found or exceeds max level, set to the highest supported level.
+#ifdef MPEG4
+        if (!found || me.v.level > LEVEL_MP4V_2) {
+            me.set().level = LEVEL_MP4V_2;
+        }
+#else
+        if (!found || (me.v.level != LEVEL_H263_45 && me.v.level > LEVEL_H263_40)) {
+            me.set().level = LEVEL_H263_40;
+        }
+#endif
         return C2R::Ok();
     }
 
@@ -325,7 +408,7 @@
     mEncParams->encHeight[0] = mSize->height;
     mEncParams->encFrameRate[0] = mFrameRate->value + 0.5;
     mEncParams->rcType = VBR_1;
-    mEncParams->vbvDelay = 5.0f;
+    mEncParams->vbvDelay = VBV_DELAY;
     mEncParams->profile_level = CORE_PROFILE_LEVEL2;
     mEncParams->packetSize = 32;
     mEncParams->rvlcEnable = PV_OFF;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 5700e5d..e903069 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -263,6 +263,9 @@
     // By default needsUpdate = false in case the supplied level does meet
     // the requirements.
     bool needsUpdate = false;
+    if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+        needsUpdate = true;
+    }
     for (const LevelLimits& limit : kLimits) {
         if (samples <= limit.samples && samplesPerSec <= limit.samplesPerSec &&
             bitrate.v.value <= limit.bitrate && dimension <= limit.dimension) {
diff --git a/media/codec2/core/include/C2Buffer.h b/media/codec2/core/include/C2Buffer.h
index abe343b..7e1df91 100644
--- a/media/codec2/core/include/C2Buffer.h
+++ b/media/codec2/core/include/C2Buffer.h
@@ -101,6 +101,8 @@
 
     /**
      * Returns a file descriptor that can be used to wait for this fence in a select system call.
+     * \note If there are multiple file descriptors in fence then a file descriptor for merged fence
+     *  would be returned
      * \note The returned file descriptor, if valid, must be closed by the caller.
      *
      * This can be used in e.g. poll() system calls. This file becomes readable (POLLIN) when the
@@ -129,6 +131,7 @@
     std::shared_ptr<Impl> mImpl;
     C2Fence(std::shared_ptr<Impl> impl);
     friend struct _C2FenceFactory;
+    friend std::vector<int> ExtractFdsFromCodec2SyncFence(const C2Fence& fence);
 };
 
 /**
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 417b261..9a3399d 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -2016,7 +2016,8 @@
 constexpr char C2_PARAMKEY_MAX_CODED_CHANNEL_COUNT[] = "coded.max-channel-count";
 
 /**
- * Audio channel mask. Used by decoder to express audio channel mask of decoded content.
+ * Audio channel mask. Used by decoder to express audio channel mask of decoded content,
+ * or by encoder for the channel mask of the encoded content once decoded.
  * Channel representation is specified according to the Java android.media.AudioFormat
  * CHANNEL_OUT_* constants.
  */
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 9359e29..97c0806 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -610,16 +610,9 @@
 
 // Codec2Client
 Codec2Client::Codec2Client(sp<Base> const& base,
+                           sp<IConfigurable> const& configurable,
                            size_t serviceIndex)
-      : Configurable{
-            [base]() -> sp<IConfigurable> {
-                Return<sp<IConfigurable>> transResult =
-                        base->getConfigurable();
-                return transResult.isOk() ?
-                        static_cast<sp<IConfigurable>>(transResult) :
-                        nullptr;
-            }()
-        },
+      : Configurable{configurable},
         mBase1_0{base},
         mBase1_1{Base1_1::castFrom(base)},
         mBase1_2{Base1_2::castFrom(base)},
@@ -1003,7 +996,11 @@
     CHECK(baseStore) << "Codec2 service \"" << name << "\""
                         " inaccessible for unknown reasons.";
     LOG(VERBOSE) << "Client to Codec2 service \"" << name << "\" created";
-    return std::make_shared<Codec2Client>(baseStore, index);
+    Return<sp<IConfigurable>> transResult = baseStore->getConfigurable();
+    CHECK(transResult.isOk()) << "Codec2 service \"" << name << "\""
+                                "does not have IConfigurable.";
+    sp<IConfigurable> configurable = static_cast<sp<IConfigurable>>(transResult);
+    return std::make_shared<Codec2Client>(baseStore, configurable, index);
 }
 
 c2_status_t Codec2Client::ForAllServices(
@@ -1523,8 +1520,8 @@
     uint64_t consumerUsage = kDefaultConsumerUsage;
     {
         if (surface) {
-            int usage = 0;
-            status_t err = surface->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &usage);
+            uint64_t usage = 0;
+            status_t err = surface->getConsumerUsage(&usage);
             if (err != NO_ERROR) {
                 ALOGD("setOutputSurface -- failed to get consumer usage bits (%d/%s). ignoring",
                         err, asString(err));
@@ -1537,8 +1534,7 @@
                 // they do not exist inside of C2 scope. Any buffer usage shall be communicated
                 // through the sideband channel.
 
-                // do an unsigned conversion as bit-31 may be 1
-                consumerUsage = (uint32_t)usage | kDefaultConsumerUsage;
+                consumerUsage = usage | kDefaultConsumerUsage;
             }
         }
 
@@ -1562,6 +1558,8 @@
                     static_cast<uint64_t>(blockPoolId),
                     bqId == 0 ? nullHgbp : igbp);
 
+    mOutputBufferQueue->expireOldWaiters();
+
     if (!transStatus.isOk()) {
         LOG(ERROR) << "setOutputSurface -- transaction failed.";
         return C2_TRANSACTION_FAILED;
@@ -1607,6 +1605,7 @@
                        << status << ".";
         }
     }
+    mOutputBufferQueue->expireOldWaiters();
 }
 
 c2_status_t Codec2Client::Component::connectToInputSurface(
diff --git a/media/codec2/hal/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
index efbf179..5267394 100644
--- a/media/codec2/hal/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -146,6 +146,8 @@
     typedef ::android::hardware::media::c2::V1_2::IComponentStore Base1_2;
     typedef Base1_0 Base;
 
+    typedef ::android::hardware::media::c2::V1_0::IConfigurable IConfigurable;
+
     struct Listener;
 
     typedef Codec2ConfigurableClient Configurable;
@@ -230,8 +232,11 @@
     static std::shared_ptr<InputSurface> CreateInputSurface(
             char const* serviceName = nullptr);
 
-    // base cannot be null.
-    Codec2Client(sp<Base> const& base, size_t serviceIndex);
+    // base and/or configurable cannot be null.
+    Codec2Client(
+            sp<Base> const& base,
+            sp<IConfigurable> const& configurable,
+            size_t serviceIndex);
 
 protected:
     sp<Base1_0> mBase1_0;
diff --git a/media/codec2/hal/client/include/codec2/hidl/output.h b/media/codec2/hal/client/include/codec2/hidl/output.h
index 35a0224..2e89c3b 100644
--- a/media/codec2/hal/client/include/codec2/hidl/output.h
+++ b/media/codec2/hal/client/include/codec2/hidl/output.h
@@ -51,6 +51,10 @@
                    int maxDequeueBufferCount,
                    std::shared_ptr<V1_2::SurfaceSyncObj> *syncObj);
 
+    // If there are waiters to allocate from the old surface, wake up and expire
+    // them.
+    void expireOldWaiters();
+
     // Stop using the current output surface. Pending buffer opeations will not
     // perform anymore.
     void stop();
@@ -90,6 +94,8 @@
     std::weak_ptr<_C2BlockPoolData> mPoolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
     std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
     bool mStopped;
+    std::mutex mOldMutex;
+    std::shared_ptr<C2SurfaceSyncMemory> mOldMem;
 
     bool registerBuffer(const C2ConstGraphicBlock& block);
 };
diff --git a/media/codec2/hal/client/output.cpp b/media/codec2/hal/client/output.cpp
index ce706cc..4eebd1c 100644
--- a/media/codec2/hal/client/output.cpp
+++ b/media/codec2/hal/client/output.cpp
@@ -217,6 +217,7 @@
     sp<GraphicBuffer> buffers[BufferQueueDefs::NUM_BUFFER_SLOTS];
     std::weak_ptr<_C2BlockPoolData>
             poolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
+    std::shared_ptr<C2SurfaceSyncMemory> oldMem;
     {
         std::scoped_lock<std::mutex> l(mMutex);
         bool stopped = mStopped;
@@ -238,7 +239,7 @@
             }
             return false;
         }
-        std::shared_ptr<C2SurfaceSyncMemory> oldMem = mSyncMem;
+        oldMem = mSyncMem;
         C2SyncVariables *oldSync = mSyncMem ? mSyncMem->mem() : nullptr;
         if (oldSync) {
             oldSync->lock();
@@ -314,11 +315,26 @@
             newSync->unlock();
         }
     }
+    {
+        std::scoped_lock<std::mutex> l(mOldMutex);
+        mOldMem = oldMem;
+    }
     ALOGD("remote graphic buffer migration %zu/%zu",
           success, tryNum);
     return true;
 }
 
+void OutputBufferQueue::expireOldWaiters() {
+    std::scoped_lock<std::mutex> l(mOldMutex);
+    if (mOldMem) {
+        C2SyncVariables *oldSync = mOldMem->mem();
+        if (oldSync) {
+            oldSync->notifyAll();
+        }
+        mOldMem.reset();
+    }
+}
+
 void OutputBufferQueue::stop() {
     std::scoped_lock<std::mutex> l(mMutex);
     mStopped = true;
diff --git a/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp b/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
index c77eb22..b5383ad 100644
--- a/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
+++ b/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
@@ -710,10 +710,6 @@
                 layerSettings.source.buffer.fence = Fence::NO_FENCE;
                 layerSettings.source.buffer.textureName = textureName;
                 layerSettings.source.buffer.usePremultipliedAlpha = false;
-                layerSettings.source.buffer.isY410BT2020 =
-                    (layerSettings.sourceDataspace == ui::Dataspace::BT2020_ITU_PQ ||
-                     layerSettings.sourceDataspace == ui::Dataspace::BT2020_ITU_HLG) &&
-                    format == HAL_PIXEL_FORMAT_RGBA_1010102;
                 layerSettings.source.buffer.maxMasteringLuminance =
                     (hdrStaticInfo && *hdrStaticInfo &&
                      hdrStaticInfo->mastering.maxLuminance > 0 &&
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index ecd5463..e4daf5c 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -71,10 +71,11 @@
         "libstagefright_codecbase",
         "libstagefright_foundation",
         "libstagefright_omx",
-	"libstagefright_surface_utils",
+        "libstagefright_surface_utils",
         "libstagefright_xmlparser",
         "libui",
         "libutils",
+        "server_configurable_flags",
     ],
 
     export_shared_lib_headers: [
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index eb1b4b5..5e53acc 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -1828,6 +1828,7 @@
         mCallback->onError(err2, ACTION_CODE_FATAL);
         return;
     }
+
     err2 = mChannel->start(inputFormat, outputFormat, buffersBoundToCodec);
     if (err2 != OK) {
         mCallback->onError(err2, ACTION_CODE_FATAL);
@@ -2131,6 +2132,25 @@
         RevertOutputFormatIfNeeded(outputFormat, config->mOutputFormat);
     }
 
+    std::map<size_t, sp<MediaCodecBuffer>> clientInputBuffers;
+    status_t err = mChannel->prepareInitialInputBuffers(&clientInputBuffers);
+    if (err != OK) {
+        if (err == NO_MEMORY) {
+            // NO_MEMORY happens here when all the buffers are still
+            // with the codec. That is not an error as it is momentarily
+            // and the buffers are send to the client as soon as the codec
+            // releases them
+            ALOGI("Resuming with all input buffers still with codec");
+        } else {
+            ALOGE("Resume request for Input Buffers failed");
+            mCallback->onError(err, ACTION_CODE_FATAL);
+            return;
+        }
+    }
+
+    // channel start should be called after prepareInitialBuffers
+    // Calling before can cause a failure during prepare when
+    // buffers are sent to the client before preparation from onWorkDone
     (void)mChannel->start(nullptr, nullptr, [&]{
         Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
         const std::unique_ptr<Config> &config = *configLocked;
@@ -2148,14 +2168,6 @@
         state->set(RUNNING);
     }
 
-    std::map<size_t, sp<MediaCodecBuffer>> clientInputBuffers;
-    status_t err = mChannel->prepareInitialInputBuffers(&clientInputBuffers);
-    // FIXME(b/237656746)
-    if (err != OK && err != NO_MEMORY) {
-        ALOGE("Resume request for Input Buffers failed");
-        mCallback->onError(err, ACTION_CODE_FATAL);
-        return;
-    }
     mChannel->requestInitialInputBuffers(std::move(clientInputBuffers));
 }
 
@@ -2557,43 +2569,6 @@
 }
 
 void CCodec::initiateReleaseIfStuck() {
-    bool tunneled = false;
-    bool isMediaTypeKnown = false;
-    {
-        static const std::set<std::string> kKnownMediaTypes{
-            MIMETYPE_VIDEO_VP8,
-            MIMETYPE_VIDEO_VP9,
-            MIMETYPE_VIDEO_AV1,
-            MIMETYPE_VIDEO_AVC,
-            MIMETYPE_VIDEO_HEVC,
-            MIMETYPE_VIDEO_MPEG4,
-            MIMETYPE_VIDEO_H263,
-            MIMETYPE_VIDEO_MPEG2,
-            MIMETYPE_VIDEO_RAW,
-            MIMETYPE_VIDEO_DOLBY_VISION,
-
-            MIMETYPE_AUDIO_AMR_NB,
-            MIMETYPE_AUDIO_AMR_WB,
-            MIMETYPE_AUDIO_MPEG,
-            MIMETYPE_AUDIO_AAC,
-            MIMETYPE_AUDIO_QCELP,
-            MIMETYPE_AUDIO_VORBIS,
-            MIMETYPE_AUDIO_OPUS,
-            MIMETYPE_AUDIO_G711_ALAW,
-            MIMETYPE_AUDIO_G711_MLAW,
-            MIMETYPE_AUDIO_RAW,
-            MIMETYPE_AUDIO_FLAC,
-            MIMETYPE_AUDIO_MSGSM,
-            MIMETYPE_AUDIO_AC3,
-            MIMETYPE_AUDIO_EAC3,
-
-            MIMETYPE_IMAGE_ANDROID_HEIC,
-        };
-        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
-        const std::unique_ptr<Config> &config = *configLocked;
-        tunneled = config->mTunneled;
-        isMediaTypeKnown = (kKnownMediaTypes.count(config->mCodingMediaType) != 0);
-    }
     std::string name;
     bool pendingDeadline = false;
     {
@@ -2605,16 +2580,6 @@
             pendingDeadline = true;
         }
     }
-    if (!tunneled && isMediaTypeKnown && name.empty()) {
-        constexpr std::chrono::steady_clock::duration kWorkDurationThreshold = 3s;
-        std::chrono::steady_clock::duration elapsed = mChannel->elapsed();
-        if (elapsed >= kWorkDurationThreshold) {
-            name = "queue";
-        }
-        if (elapsed > 0s) {
-            pendingDeadline = true;
-        }
-    }
     if (name.empty()) {
         // We're not stuck.
         if (pendingDeadline) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index e800ccd..51082d1 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -34,6 +34,7 @@
 
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <android/hardware/drm/1.0/types.h>
+#include <android-base/parseint.h>
 #include <android-base/properties.h>
 #include <android-base/stringprintf.h>
 #include <binder/MemoryBase.h>
@@ -52,6 +53,7 @@
 #include <media/stagefright/SurfaceUtils.h>
 #include <media/MediaCodecBuffer.h>
 #include <mediadrm/ICrypto.h>
+#include <server_configurable_flags/get_flags.h>
 #include <system/window.h>
 
 #include "CCodecBufferChannel.h"
@@ -75,7 +77,6 @@
 namespace {
 
 constexpr size_t kSmoothnessFactor = 4;
-constexpr size_t kRenderingDepth = 3;
 
 // This is for keeping IGBP's buffer dropping logic in legacy mode other
 // than making it non-blocking. Do not change this value.
@@ -147,10 +148,12 @@
       mCCodecCallback(callback),
       mFrameIndex(0u),
       mFirstValidFrameIndex(0u),
+      mIsSurfaceToDisplay(false),
+      mHasPresentFenceTimes(false),
+      mRenderingDepth(3u),
       mMetaMode(MODE_NONE),
       mInputMetEos(false),
       mSendEncryptedInfoBuffer(false) {
-    mOutputSurface.lock()->maxDequeueBuffers = kSmoothnessFactor + kRenderingDepth;
     {
         Mutexed<Input>::Locked input(mInput);
         input->buffers.reset(new DummyInputBuffers(""));
@@ -165,11 +168,16 @@
         Mutexed<Output>::Locked output(mOutput);
         output->outputDelay = 0u;
         output->numSlots = kSmoothnessFactor;
+        output->bounded = false;
     }
     {
         Mutexed<BlockPools>::Locked pools(mBlockPools);
         pools->outputPoolId = C2BlockPool::BASIC_LINEAR;
     }
+    std::string value = server_configurable_flags::GetServerConfigurableFlag(
+            "media_native", "ccodec_rendering_depth", "3");
+    android::base::ParseInt(value, &mRenderingDepth);
+    mOutputSurface.lock()->maxDequeueBuffers = kSmoothnessFactor + mRenderingDepth;
 }
 
 CCodecBufferChannel::~CCodecBufferChannel() {
@@ -426,7 +434,8 @@
         size_t offset,
         const CryptoPlugin::SubSample *subSamples,
         size_t numSubSamples,
-        const sp<MediaCodecBuffer> &buffer) {
+        const sp<MediaCodecBuffer> &buffer,
+        AString* errorDetailMsg) {
     static const C2MemoryUsage kSecureUsage{C2MemoryUsage::READ_PROTECTED, 0};
     static const C2MemoryUsage kDefaultReadWriteUsage{
         C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
@@ -456,7 +465,6 @@
     ssize_t result = -1;
     ssize_t codecDataOffset = 0;
     if (mCrypto) {
-        AString errorDetailMsg;
         int32_t heapSeqNum = getHeapSeqNum(memory);
         hardware::drm::V1_0::SharedBuffer src{(uint32_t)heapSeqNum, offset, size};
         hardware::drm::V1_0::DestinationBuffer dst;
@@ -470,7 +478,7 @@
         }
         result = mCrypto->decrypt(
                 key, iv, mode, pattern, src, 0, subSamples, numSubSamples,
-                dst, &errorDetailMsg);
+                dst, errorDetailMsg);
         if (result < 0) {
             ALOGI("[%s] attachEncryptedBuffer: decrypt failed: result = %zd", mName, result);
             return result;
@@ -515,7 +523,9 @@
                     result = (ssize_t)_bytesWritten;
                     detailedError = _detailedError;
                 });
-
+        if (errorDetailMsg) {
+            errorDetailMsg->setTo(detailedError.c_str(), detailedError.size());
+        }
         if (!returnVoid.isOk() || status != CasStatus::OK || result < 0) {
             ALOGI("[%s] descramble failed, trans=%s, status=%d, result=%zd",
                     mName, returnVoid.description().c_str(), status, result);
@@ -723,7 +733,7 @@
         Mutexed<Output>::Locked output(mOutput);
         if (!output->buffers ||
                 output->buffers->hasPending() ||
-                output->buffers->numActiveSlots() >= output->numSlots) {
+                (!output->bounded && output->buffers->numActiveSlots() >= output->numSlots)) {
             return;
         }
     }
@@ -986,20 +996,36 @@
 
     int64_t mediaTimeUs = 0;
     (void)buffer->meta()->findInt64("timeUs", &mediaTimeUs);
-    trackReleasedFrame(qbo, mediaTimeUs, timestampNs);
-    processRenderedFrames(qbo.frameTimestamps);
+    if (mIsSurfaceToDisplay) {
+        trackReleasedFrame(qbo, mediaTimeUs, timestampNs);
+        processRenderedFrames(qbo.frameTimestamps);
+    } else {
+        // When the surface is an intermediate surface, onFrameRendered is triggered immediately
+        // when the frame is queued to the non-display surface
+        mCCodecCallback->onOutputFramesRendered(mediaTimeUs, timestampNs);
+    }
 
     return OK;
 }
 
 void CCodecBufferChannel::initializeFrameTrackingFor(ANativeWindow * window) {
+    mTrackedFrames.clear();
+
+    int isSurfaceToDisplay = 0;
+    window->query(window, NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &isSurfaceToDisplay);
+    mIsSurfaceToDisplay = isSurfaceToDisplay == 1;
+    // No frame tracking is needed if we're not sending frames to the display
+    if (!mIsSurfaceToDisplay) {
+        // Return early so we don't call into SurfaceFlinger (requiring permissions)
+        return;
+    }
+
     int hasPresentFenceTimes = 0;
     window->query(window, NATIVE_WINDOW_FRAME_TIMESTAMPS_SUPPORTS_PRESENT, &hasPresentFenceTimes);
     mHasPresentFenceTimes = hasPresentFenceTimes == 1;
-    if (mHasPresentFenceTimes) {
+    if (!mHasPresentFenceTimes) {
         ALOGI("Using latch times for frame rendered signals - present fences not supported");
     }
-    mTrackedFrames.clear();
 }
 
 void CCodecBufferChannel::trackReleasedFrame(const IGraphicBufferProducer::QueueBufferOutput& qbo,
@@ -1366,7 +1392,7 @@
         {
             Mutexed<OutputSurface>::Locked output(mOutputSurface);
             maxDequeueCount = output->maxDequeueBuffers = numOutputSlots +
-                    reorderDepth.value + kRenderingDepth;
+                    reorderDepth.value + mRenderingDepth;
             outputSurface = output->surface ?
                     output->surface->getIGraphicBufferProducer() : nullptr;
             if (outputSurface) {
@@ -1489,6 +1515,7 @@
         Mutexed<Output>::Locked output(mOutput);
         output->outputDelay = outputDelayValue;
         output->numSlots = numOutputSlots;
+        output->bounded = bool(outputSurface);
         if (graphic) {
             if (outputSurface || !buffersBoundToCodec) {
                 output->buffers.reset(new GraphicOutputBuffers(mName));
@@ -1571,7 +1598,8 @@
         watcher->inputDelay(inputDelayValue)
                 .pipelineDelay(pipelineDelayValue)
                 .outputDelay(outputDelayValue)
-                .smoothnessFactor(kSmoothnessFactor);
+                .smoothnessFactor(kSmoothnessFactor)
+                .tunneled(mTunneled);
         watcher->flush();
     }
 
@@ -1781,8 +1809,6 @@
             output->buffers->flushStash();
         }
     }
-    // reset the frames that are being tracked for onFrameRendered callbacks
-    mTrackedFrames.clear();
 }
 
 void CCodecBufferChannel::onWorkDone(
@@ -1982,6 +2008,7 @@
             newInputDelay.value_or(input->inputDelay) +
             newPipelineDelay.value_or(input->pipelineDelay) +
             kSmoothnessFactor;
+        input->inputDelay = newInputDelay.value_or(input->inputDelay);
         if (input->buffers->isArrayMode()) {
             if (input->numSlots >= newNumSlots) {
                 input->numExtraSlots = 0;
@@ -2035,7 +2062,7 @@
         {
             Mutexed<OutputSurface>::Locked output(mOutputSurface);
             maxDequeueCount = output->maxDequeueBuffers =
-                    numOutputSlots + reorderDepth + kRenderingDepth;
+                    numOutputSlots + reorderDepth + mRenderingDepth;
             if (output->surface) {
                 output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
             }
@@ -2076,7 +2103,10 @@
     // csd cannot be re-ordered and will always arrive first.
     if (initData != nullptr) {
         Mutexed<Output>::Locked output(mOutput);
-        if (output->buffers && outputFormat) {
+        if (!output->buffers) {
+            return false;
+        }
+        if (outputFormat) {
             output->buffers->updateSkipCutBuffer(outputFormat);
             output->buffers->setFormat(outputFormat);
         }
@@ -2085,12 +2115,15 @@
         }
         size_t index;
         sp<MediaCodecBuffer> outBuffer;
-        if (output->buffers && output->buffers->registerCsd(initData, &index, &outBuffer) == OK) {
+        if (output->buffers->registerCsd(initData, &index, &outBuffer) == OK) {
             outBuffer->meta()->setInt64("timeUs", timestamp.peek());
             outBuffer->meta()->setInt32("flags", BUFFER_FLAG_CODEC_CONFIG);
             ALOGV("[%s] onWorkDone: csd index = %zu [%p]", mName, index, outBuffer.get());
 
-            output.unlock();
+            // TRICKY: we want popped buffers reported in order, so sending
+            // the callback while holding the lock here. This assumes that
+            // onOutputBufferAvailable() does not block. onOutputBufferAvailable()
+            // callbacks are always sent with the Output lock held.
             mCallback->onOutputBufferAvailable(index, outBuffer);
         } else {
             ALOGD("[%s] onWorkDone: unable to register csd", mName);
@@ -2180,7 +2213,10 @@
         case OutputBuffers::DISCARD:
             break;
         case OutputBuffers::NOTIFY_CLIENT:
-            output.unlock();
+            // TRICKY: we want popped buffers reported in order, so sending
+            // the callback while holding the lock here. This assumes that
+            // onOutputBufferAvailable() does not block. onOutputBufferAvailable()
+            // callbacks are always sent with the Output lock held.
             mCallback->onOutputBufferAvailable(index, outBuffer);
             break;
         case OutputBuffers::REALLOCATE:
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index e2e55b9..2d87aa9 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -86,7 +86,8 @@
             size_t offset,
             const CryptoPlugin::SubSample *subSamples,
             size_t numSubSamples,
-            const sp<MediaCodecBuffer> &buffer) override;
+            const sp<MediaCodecBuffer> &buffer,
+            AString* errorDetailMsg) override;
     virtual status_t renderOutputBuffer(
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
     virtual void pollForRenderedBuffers() override;
@@ -320,6 +321,9 @@
         std::unique_ptr<OutputBuffers> buffers;
         size_t numSlots;
         uint32_t outputDelay;
+        // true iff the underlying block pool is bounded --- for example,
+        // a BufferQueue-based block pool would be bounded by the BufferQueue.
+        bool bounded;
     };
     Mutexed<Output> mOutput;
     Mutexed<std::list<std::unique_ptr<C2Work>>> mFlushedConfigs;
@@ -330,6 +334,7 @@
     sp<MemoryDealer> makeMemoryDealer(size_t heapSize);
 
     std::deque<TrackedFrame> mTrackedFrames;
+    bool mIsSurfaceToDisplay;
     bool mHasPresentFenceTimes;
 
     struct OutputSurface {
@@ -339,6 +344,7 @@
         std::map<uint64_t, int> rotation;
     };
     Mutexed<OutputSurface> mOutputSurface;
+    int mRenderingDepth;
 
     struct BlockPools {
         C2Allocator::id_t inputAllocatorId;
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index cfadc95..6d49fa8 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -285,6 +285,12 @@
         }
     }
 
+    // Updates or adds a mapper for a "sdkkey"
+    void updateConfigMappersForKey(const SdkKey& key,
+            const std::vector<ConfigMapper>& vec_cm) {
+        mConfigMappers.insert_or_assign(key, vec_cm);
+    }
+
     /**
      * Returns all paths for a specific domain.
      *
@@ -619,30 +625,30 @@
         .limitTo(D::OUTPUT & D::READ));
 
     deprecated(ConfigMapper(KEY_HDR10_PLUS_INFO, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO, "value")
-        .limitTo(D::VIDEO & D::PARAM & D::INPUT));
+        .limitTo(D::VIDEO & D::PARAM & D::INPUT & (D::CONFIG | D::PARAM)));
 
     deprecated(ConfigMapper(KEY_HDR10_PLUS_INFO, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO, "value")
-        .limitTo(D::VIDEO & D::OUTPUT));
+        .limitTo(D::VIDEO & D::OUTPUT & D::READ));
 
     add(ConfigMapper(
             std::string(C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO) + ".type",
             C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO, "type")
-        .limitTo(D::VIDEO & D::PARAM & D::INPUT));
+        .limitTo(D::VIDEO & D::PARAM & D::INPUT & (D::CONFIG | D::PARAM)));
 
     add(ConfigMapper(
             std::string(C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO) + ".data",
             C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO, "data")
-        .limitTo(D::VIDEO & D::PARAM & D::INPUT));
+        .limitTo(D::VIDEO & D::PARAM & D::INPUT & (D::CONFIG | D::PARAM)));
 
     add(ConfigMapper(
             std::string(C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO) + ".type",
             C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO, "type")
-        .limitTo(D::VIDEO & D::OUTPUT));
+        .limitTo(D::VIDEO & D::OUTPUT & D::READ));
 
     add(ConfigMapper(
             std::string(C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO) + ".data",
             C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO, "data")
-        .limitTo(D::VIDEO & D::OUTPUT));
+        .limitTo(D::VIDEO & D::OUTPUT & D::READ));
 
     add(ConfigMapper(C2_PARAMKEY_TEMPORAL_LAYERING, C2_PARAMKEY_TEMPORAL_LAYERING, "")
         .limitTo(D::ENCODER & D::VIDEO & D::OUTPUT));
@@ -934,6 +940,9 @@
     add(ConfigMapper(KEY_CHANNEL_MASK, C2_PARAMKEY_CHANNEL_MASK, "value")
         .limitTo(D::AUDIO & D::DECODER & D::READ));
 
+    add(ConfigMapper(KEY_CHANNEL_MASK, C2_PARAMKEY_CHANNEL_MASK, "value")
+        .limitTo(D::AUDIO & D::ENCODER & D::CONFIG));
+
     add(ConfigMapper(KEY_AAC_SBR_MODE, C2_PARAMKEY_AAC_SBR_MODE, "value")
         .limitTo(D::AUDIO & D::ENCODER & (D::CONFIG | D::PARAM | D::READ))
         .withMapper([](C2Value v) -> C2Value {
@@ -1914,6 +1923,67 @@
         const sp<AMessage> &sdkParams, Domain configDomain,
         c2_blocking_t blocking,
         std::vector<std::unique_ptr<C2Param>> *configUpdate) const {
+    // update the mappers if we know something more of this format.
+    // AV1 10b or 8b encoding request.
+    AString mime;
+    int32_t requestedSdkProfile = -1;
+    if ((mDomain == (IS_VIDEO | IS_ENCODER)) &&
+            sdkParams->findString(KEY_MIME, &mime) &&
+            mime == MIMETYPE_VIDEO_AV1) {
+
+        sdkParams->findInt32(KEY_PROFILE, &requestedSdkProfile);
+        bool is10bAv1EncodeRequested = (requestedSdkProfile == AV1ProfileMain10);
+
+        int32_t bitDepth = (is10bAv1EncodeRequested) ? 10 : 8;
+        // we always initilze with an 8b mapper. Update this only if needed.
+        if (bitDepth != 8) {
+            std::shared_ptr<C2Mapper::ProfileLevelMapper> mapper =
+                C2Mapper::GetBitDepthProfileLevelMapper(mCodingMediaType, bitDepth);
+            mStandardParams->updateConfigMappersForKey(StandardParams::SdkKey(KEY_PROFILE),
+                {
+                    ConfigMapper(KEY_PROFILE, C2_PARAMKEY_PROFILE_LEVEL, "profile")
+                    .limitTo(Domain::CODED)
+                    .withMappers([mapper](C2Value v) -> C2Value {
+                        C2Config::profile_t c2 = PROFILE_UNUSED;
+                        int32_t sdk;
+                        if (mapper && v.get(&sdk) && mapper->mapProfile(sdk, &c2)) {
+                            return c2;
+                        }
+                        return PROFILE_UNUSED;
+                        }, [mapper](C2Value v) -> C2Value {
+                        C2Config::profile_t c2;
+                        int32_t sdk;
+                        using C2ValueType =
+                            typename _c2_reduce_enum_to_underlying_type<decltype(c2)>::type;
+                        if (mapper && v.get((C2ValueType*)&c2) && mapper->mapProfile(c2, &sdk)) {
+                            return sdk;
+                        }
+                        return C2Value();
+                })});
+            mStandardParams->updateConfigMappersForKey(StandardParams::SdkKey(KEY_LEVEL),
+                {
+                    ConfigMapper(KEY_LEVEL, C2_PARAMKEY_PROFILE_LEVEL, "level")
+                    .limitTo(Domain::CODED)
+                    .withMappers([mapper](C2Value v) -> C2Value {
+                        C2Config::level_t c2 = LEVEL_UNUSED;
+                        int32_t sdk;
+                        if (mapper && v.get(&sdk) && mapper->mapLevel(sdk, &c2)) {
+                            return c2;
+                        }
+                        return LEVEL_UNUSED;
+                        }, [mapper](C2Value v) -> C2Value {
+                        C2Config::level_t c2;
+                        int32_t sdk;
+                        using C2ValueType =
+                            typename _c2_reduce_enum_to_underlying_type<decltype(c2)>::type;
+                        if (mapper && v.get((C2ValueType*)&c2) && mapper->mapLevel(c2, &sdk)) {
+                            return sdk;
+                        }
+                        return C2Value();
+                })});
+        }
+    }
+
     ReflectedParamUpdater::Dict params = getReflectedFormat(sdkParams, configDomain);
 
     std::vector<C2Param::Index> indices;
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 82c31a8..228ad7e 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -478,19 +478,56 @@
                 mInitCheck = NO_INIT;
                 return;
             case C2PlanarLayout::TYPE_RGB:
-                ALOGD("Converter: unrecognized color format "
-                        "(client %d component %d) for RGB layout",
-                        mClientColorFormat, mComponentColorFormat);
-                mInitCheck = NO_INIT;
+                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGB;
                 // TODO: support MediaImage layout
-                return;
+                switch (mClientColorFormat) {
+                    case COLOR_FormatSurface:
+                    case COLOR_FormatRGBFlexible:
+                    case COLOR_Format24bitBGR888:
+                    case COLOR_Format24bitRGB888:
+                        ALOGD("Converter: accept color format "
+                                "(client %d component %d) for RGB layout",
+                                mClientColorFormat, mComponentColorFormat);
+                        break;
+                    default:
+                        ALOGD("Converter: unrecognized color format "
+                                "(client %d component %d) for RGB layout",
+                                mClientColorFormat, mComponentColorFormat);
+                        mInitCheck = BAD_VALUE;
+                        return;
+                }
+                if (layout.numPlanes != 3) {
+                    ALOGD("Converter: %d planes for RGB layout", layout.numPlanes);
+                    mInitCheck = BAD_VALUE;
+                    return;
+                }
+                break;
             case C2PlanarLayout::TYPE_RGBA:
-                ALOGD("Converter: unrecognized color format "
-                        "(client %d component %d) for RGBA layout",
-                        mClientColorFormat, mComponentColorFormat);
-                mInitCheck = NO_INIT;
+                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGBA;
                 // TODO: support MediaImage layout
-                return;
+                switch (mClientColorFormat) {
+                    case COLOR_FormatSurface:
+                    case COLOR_FormatRGBAFlexible:
+                    case COLOR_Format32bitABGR8888:
+                    case COLOR_Format32bitARGB8888:
+                    case COLOR_Format32bitBGRA8888:
+                        ALOGD("Converter: accept color format "
+                                "(client %d component %d) for RGBA layout",
+                                mClientColorFormat, mComponentColorFormat);
+                        break;
+                    default:
+                        ALOGD("Converter: unrecognized color format "
+                                "(client %d component %d) for RGBA layout",
+                                mClientColorFormat, mComponentColorFormat);
+                        mInitCheck = BAD_VALUE;
+                        return;
+                }
+                if (layout.numPlanes != 4) {
+                    ALOGD("Converter: %d planes for RGBA layout", layout.numPlanes);
+                    mInitCheck = BAD_VALUE;
+                    return;
+                }
+                break;
             default:
                 mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
                 if (layout.numPlanes == 1) {
diff --git a/media/codec2/sfplugin/PipelineWatcher.cpp b/media/codec2/sfplugin/PipelineWatcher.cpp
index bc9197c..fa70a28 100644
--- a/media/codec2/sfplugin/PipelineWatcher.cpp
+++ b/media/codec2/sfplugin/PipelineWatcher.cpp
@@ -45,6 +45,11 @@
     return *this;
 }
 
+PipelineWatcher &PipelineWatcher::tunneled(bool value) {
+    mTunneled = value;
+    return *this;
+}
+
 void PipelineWatcher::onWorkQueued(
         uint64_t frameIndex,
         std::vector<std::shared_ptr<C2Buffer>> &&buffers,
@@ -87,8 +92,13 @@
     ALOGV("onWorkDone(frameIndex=%llu)", (unsigned long long)frameIndex);
     auto it = mFramesInPipeline.find(frameIndex);
     if (it == mFramesInPipeline.end()) {
-        ALOGD("onWorkDone: frameIndex not found (%llu); ignored",
-              (unsigned long long)frameIndex);
+        if (!mTunneled) {
+            ALOGD("onWorkDone: frameIndex not found (%llu); ignored",
+                  (unsigned long long)frameIndex);
+        } else {
+            ALOGV("onWorkDone: frameIndex not found (%llu); ignored",
+                  (unsigned long long)frameIndex);
+        }
         return;
     }
     (void)mFramesInPipeline.erase(it);
diff --git a/media/codec2/sfplugin/PipelineWatcher.h b/media/codec2/sfplugin/PipelineWatcher.h
index 1e23147..b29c7cd 100644
--- a/media/codec2/sfplugin/PipelineWatcher.h
+++ b/media/codec2/sfplugin/PipelineWatcher.h
@@ -37,7 +37,8 @@
         : mInputDelay(0),
           mPipelineDelay(0),
           mOutputDelay(0),
-          mSmoothnessFactor(0) {}
+          mSmoothnessFactor(0),
+          mTunneled(false) {}
     ~PipelineWatcher() = default;
 
     /**
@@ -65,6 +66,12 @@
     PipelineWatcher &smoothnessFactor(uint32_t value);
 
     /**
+     * \param value the new tunneled value
+     * \return  this object
+     */
+    PipelineWatcher &tunneled(bool value);
+
+    /**
      * Client queued a work item to the component.
      *
      * \param frameIndex  input frame index of this work
@@ -122,6 +129,7 @@
     uint32_t mPipelineDelay;
     uint32_t mOutputDelay;
     uint32_t mSmoothnessFactor;
+    bool mTunneled;
 
     struct Frame {
         Frame(std::vector<std::shared_ptr<C2Buffer>> &&b,
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index c606d6f..9297520 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -119,8 +119,8 @@
     { C2Color::PRIMARIES_BT601_525,    ColorAspects::PrimariesBT601_6_525 },
     { C2Color::PRIMARIES_GENERIC_FILM, ColorAspects::PrimariesGenericFilm },
     { C2Color::PRIMARIES_BT2020,       ColorAspects::PrimariesBT2020 },
-//    { C2Color::PRIMARIES_RP431,        ColorAspects::Primaries... },
-//    { C2Color::PRIMARIES_EG432,        ColorAspects::Primaries... },
+    { C2Color::PRIMARIES_RP431,        ColorAspects::PrimariesRP431 },
+    { C2Color::PRIMARIES_EG432,        ColorAspects::PrimariesEG432 },
 //    { C2Color::PRIMARIES_EBU3213,      ColorAspects::Primaries... },
     { C2Color::PRIMARIES_OTHER,        ColorAspects::PrimariesOther },
 };
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index aa908a8..0344fd3 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -190,7 +190,6 @@
         if (timeoutMs > INT_MAX) {
             timeoutMs = INT_MAX;
         }
-
         switch (mFence->wait((int)timeoutMs)) {
             case NO_ERROR:
                 return C2_OK;
@@ -202,7 +201,7 @@
     }
 
     virtual bool valid() const {
-        return mFence->getStatus() != Fence::Status::Invalid;
+        return (mFence && (mFence->getStatus() != Fence::Status::Invalid));
     }
 
     virtual bool ready() const {
@@ -213,6 +212,14 @@
         return mFence->dup();
     }
 
+    std::vector<int> fds() const {
+        std::vector<int> retFds;
+        for (int index = 0; index < mListFences.size(); index++) {
+            retFds.push_back(mListFences[index]->dup());
+        }
+        return retFds;
+    }
+
     virtual bool isHW() const {
         return true;
     }
@@ -222,39 +229,95 @@
     }
 
     virtual native_handle_t *createNativeHandle() const {
-        native_handle_t* nh = native_handle_create(1, 1);
+        std::vector<int> nativeFds = fds();
+        nativeFds.push_back(fd());
+        native_handle_t* nh = native_handle_create(nativeFds.size(), 1);
         if (!nh) {
             ALOGE("Failed to allocate native handle for sync fence");
+            for (int fd : nativeFds) {
+                close(fd);
+            }
             return nullptr;
         }
-        nh->data[0] = fd();
-        nh->data[1] = type();
+
+        for (int i = 0; i < nativeFds.size(); i++) {
+            nh->data[i] = nativeFds[i];
+        }
+        nh->data[nativeFds.size()] = type();
         return nh;
     }
 
     virtual ~SyncFenceImpl() {};
 
     SyncFenceImpl(int fenceFd) :
-            mFence(sp<Fence>::make(fenceFd)) {}
+        mFence(sp<Fence>::make(fenceFd)) {
+        mListFences.clear();
+        if (mFence) {
+            mListFences.push_back(mFence);
+        }
+    }
+
+    SyncFenceImpl(const std::vector<int>& fenceFds, int mergedFd) {
+        mListFences.clear();
+
+        for (int fenceFd : fenceFds) {
+            if (fenceFd < 0) {
+                continue;
+            } else {
+                mListFences.push_back(sp<Fence>::make(fenceFd));
+                if (!mListFences.back()) {
+                    mFence.clear();
+                    break;
+                }
+                if (mergedFd == -1) {
+                    mFence = (mFence == nullptr) ? (mListFences.back()) :
+                        (Fence::merge("syncFence", mFence, mListFences.back()));
+                }
+            }
+        }
+        if (mergedFd != -1)
+        {
+            mFence = sp<Fence>::make(mergedFd);
+        }
+        if (!mFence) {
+            mListFences.clear();
+        }
+    }
 
     static std::shared_ptr<SyncFenceImpl> CreateFromNativeHandle(const native_handle_t* nh) {
-        if (!nh || nh->numFds != 1 || nh->numInts != 1) {
+        if (!nh || nh->numFds < 1 || nh->numInts < 1) {
             ALOGE("Invalid handle for sync fence");
             return nullptr;
         }
-        int fd = dup(nh->data[0]);
-        std::shared_ptr<SyncFenceImpl> p = std::make_shared<SyncFenceImpl>(fd);
+        std::vector<int> fds;
+        for (int i = 0; i < nh->numFds-1; i++) {
+            fds.push_back(dup(nh->data[i]));
+        }
+        std::shared_ptr<SyncFenceImpl> p = (nh->numFds == 1)?
+                (std::make_shared<SyncFenceImpl>(fds.back())):
+                (std::make_shared<SyncFenceImpl>(fds, (dup(nh->data[nh->numFds-1]))));
         if (!p) {
             ALOGE("Failed to allocate sync fence impl");
-            close(fd);
+            for (int fd : fds) {
+                close(fd);
+            }
         }
         return p;
     }
 
 private:
-    const sp<Fence> mFence;
+    std::vector<sp<Fence>> mListFences;
+    sp<Fence> mFence;  //merged fence in case mListFences size > 0
 };
 
+std::vector<int> ExtractFdsFromCodec2SyncFence(const C2Fence& fence) {
+    std::vector<int> retFds;
+    if ((fence.mImpl) && (fence.mImpl->type() == C2Fence::Impl::SYNC_FENCE)) {
+        retFds = static_cast<_C2FenceFactory::SyncFenceImpl *>(fence.mImpl.get())->fds();
+    }
+    return retFds;
+}
+
 C2Fence _C2FenceFactory::CreateSyncFence(int fenceFd) {
     std::shared_ptr<C2Fence::Impl> p;
     if (fenceFd >= 0) {
@@ -262,8 +325,7 @@
         if (!p) {
             ALOGE("Failed to allocate sync fence impl");
             close(fenceFd);
-        }
-        if (!p->valid()) {
+        } else if (!p->valid()) {
             p.reset();
         }
     } else {
@@ -272,6 +334,25 @@
     return C2Fence(p);
 }
 
+C2Fence _C2FenceFactory::CreateMultipleFdSyncFence(const std::vector<int>& fenceFds) {
+    std::shared_ptr<C2Fence::Impl> p;
+    if (fenceFds.size() > 0) {
+        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fenceFds, -1);
+        if (!p) {
+            ALOGE("Failed to allocate sync fence impl closing FDs");
+            for (int fenceFd : fenceFds) {
+                close(fenceFd);
+            }
+        } else if (!p->valid()) {
+            ALOGE("Invalid sync fence created");
+            p.reset();
+        }
+    } else {
+        ALOGE("Create sync fence from invalid fd list of size 0");
+    }
+    return C2Fence(p);
+}
+
 native_handle_t* _C2FenceFactory::CreateNativeHandle(const C2Fence& fence) {
     return fence.mImpl? fence.mImpl->createNativeHandle() : nullptr;
 }
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index d7a9764..f6f97da 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -15,7 +15,7 @@
  */
 
 #define LOG_TAG "C2Store"
-#define LOG_NDEBUG 0
+// #define LOG_NDEBUG 0
 #include <utils/Log.h>
 
 #include <C2AllocatorBlob.h>
diff --git a/media/codec2/vndk/include/C2FenceFactory.h b/media/codec2/vndk/include/C2FenceFactory.h
index 4944115..ef25c47 100644
--- a/media/codec2/vndk/include/C2FenceFactory.h
+++ b/media/codec2/vndk/include/C2FenceFactory.h
@@ -20,6 +20,16 @@
 
 #include <C2Buffer.h>
 
+/*
+ * Create a list of fds from fence
+ *
+ * \param fence   C2Fence object from which associated
+ *                file descriptors need to be extracted
+ * \return a vector of fds otherwise return vector of size 0
+ */
+
+std::vector<int> ExtractFdsFromCodec2SyncFence(const C2Fence& fence);
+
 class C2SurfaceSyncMemory;
 
 /**
@@ -48,6 +58,14 @@
      */
     static C2Fence CreateSyncFence(int fenceFd);
 
+    /*
+     * Create C2Fence from list of fence file fds.
+     *
+     * \param fenceFds          Vector of file descriptor for fence.
+     *                          It will be owned and closed by the returned fence object.
+     */
+    static C2Fence CreateMultipleFdSyncFence(const std::vector<int>& fenceFds);
+
     /**
      * Create a native handle from fence for marshalling
      *
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 13e430a..7648c76 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -134,7 +134,11 @@
      * The call was successful.
      */
     AAUDIO_OK,
-    AAUDIO_ERROR_BASE = -900, // TODO review
+
+    /**
+     * Reserved. This should not be returned.
+     */
+    AAUDIO_ERROR_BASE = -900,
 
     /**
      * The audio device was disconnected. This could occur, for example, when headphones
@@ -150,6 +154,10 @@
      */
     AAUDIO_ERROR_ILLEGAL_ARGUMENT,
     // reserved
+
+    /**
+     * An internal error occurred.
+     */
     AAUDIO_ERROR_INTERNAL = AAUDIO_ERROR_ILLEGAL_ARGUMENT + 2,
 
     /**
@@ -158,7 +166,9 @@
     AAUDIO_ERROR_INVALID_STATE,
     // reserved
     // reserved
-    /* The server rejected the handle used to identify the stream.
+
+    /**
+     * The server rejected the handle used to identify the stream.
      */
     AAUDIO_ERROR_INVALID_HANDLE = AAUDIO_ERROR_INVALID_STATE + 3,
     // reserved
@@ -174,6 +184,10 @@
      * or a timestamp is not available.
      */
     AAUDIO_ERROR_UNAVAILABLE,
+
+    /**
+     * Reserved. This should not be returned.
+     */
     AAUDIO_ERROR_NO_FREE_HANDLES,
 
     /**
@@ -191,6 +205,10 @@
      * An operation took longer than expected.
      */
     AAUDIO_ERROR_TIMEOUT,
+
+    /**
+     * A queue is full. This queue would be blocked.
+     */
     AAUDIO_ERROR_WOULD_BLOCK,
 
     /**
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 84c715f..93ac966 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -132,10 +132,6 @@
 
     request.getConfiguration().setBufferCapacity(builder.getBufferCapacity());
 
-    request.getConfiguration().setHardwareSamplesPerFrame(builder.getHardwareSamplesPerFrame());
-    request.getConfiguration().setHardwareSampleRate(builder.getHardwareSampleRate());
-    request.getConfiguration().setHardwareFormat(builder.getHardwareFormat());
-
     mDeviceChannelCount = getSamplesPerFrame(); // Assume it will be the same. Update if not.
 
     mServiceStreamHandleInfo = mServiceInterface.openStream(request, configurationOutput);
diff --git a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
index a3ce58c..611ddcd 100644
--- a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
@@ -135,10 +135,9 @@
     int coefficientIndex = 0;
     double phase = 0.0; // ranges from 0.0 to 1.0, fraction between samples
     // Stretch the sinc function for low pass filtering.
-    const float cutoffScaler = normalizedCutoff *
-            ((outputRate < inputRate)
-             ? ((float)outputRate / inputRate)
-             : ((float)inputRate / outputRate));
+    const float cutoffScaler = (outputRate < inputRate)
+             ? (normalizedCutoff * (float)outputRate / inputRate)
+             : 1.0f; // Do not filter when upsampling.
     const int numTapsHalf = getNumTaps() / 2; // numTaps must be even.
     const float numTapsHalfInverse = 1.0f / numTapsHalf;
     for (int i = 0; i < numRows; i++) {
diff --git a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
index 717f3fd..9e47335 100644
--- a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
+++ b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
@@ -111,6 +111,9 @@
          * Set lower to reduce aliasing.
          * Default is 0.70.
          *
+         * Note that this value is ignored when upsampling, which is when
+         * the outputRate is higher than the inputRate.
+         *
          * @param normalizedCutoff anti-aliasing filter cutoff
          * @return address of this builder for chaining calls
          */
@@ -227,6 +230,10 @@
 
     /**
      * Generate the filter coefficients in optimal order.
+     *
+     * Note that normalizedCutoff is ignored when upsampling, which is when
+     * the outputRate is higher than the inputRate.
+     *
      * @param inputRate sample rate of the input stream
      * @param outputRate  sample rate of the output stream
      * @param numRows number of rows in the array that contain a set of tap coefficients
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index fc8ba9e..67ee42e 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -132,10 +132,11 @@
             AAudioConvert_contentTypeToInternal(builder.getContentType());
     const audio_usage_t usage =
             AAudioConvert_usageToInternal(builder.getUsage());
-    const audio_flags_mask_t attributesFlags =
-        AAudioConvert_allowCapturePolicyToAudioFlagsMask(builder.getAllowedCapturePolicy(),
-                                                         builder.getSpatializationBehavior(),
-                                                         builder.isContentSpatialized());
+    const audio_flags_mask_t attributesFlags = AAudio_computeAudioFlagsMask(
+                                                            builder.getAllowedCapturePolicy(),
+                                                            builder.getSpatializationBehavior(),
+                                                            builder.isContentSpatialized(),
+                                                            flags);
 
     const audio_attributes_t attributes = {
             .content_type = contentType,
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index 8920e53..e8324a8 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -255,10 +255,11 @@
     return (audio_source_t) preset; // same value
 }
 
-audio_flags_mask_t AAudioConvert_allowCapturePolicyToAudioFlagsMask(
+audio_flags_mask_t AAudio_computeAudioFlagsMask(
         aaudio_allowed_capture_policy_t policy,
         aaudio_spatialization_behavior_t spatializationBehavior,
-        bool isContentSpatialized) {
+        bool isContentSpatialized,
+        audio_output_flags_t outputFlags) {
     audio_flags_mask_t flagsMask = AUDIO_FLAG_NONE;
     switch (policy) {
         case AAUDIO_UNSPECIFIED:
@@ -295,6 +296,15 @@
         flagsMask = static_cast<audio_flags_mask_t>(flagsMask | AUDIO_FLAG_CONTENT_SPATIALIZED);
     }
 
+    if ((outputFlags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) != 0) {
+        flagsMask = static_cast<audio_flags_mask_t>(flagsMask | AUDIO_FLAG_HW_AV_SYNC);
+    }
+    if ((outputFlags & AUDIO_OUTPUT_FLAG_FAST) != 0) {
+        flagsMask = static_cast<audio_flags_mask_t>(flagsMask | AUDIO_FLAG_LOW_LATENCY);
+    } else if ((outputFlags & AUDIO_OUTPUT_FLAG_DEEP_BUFFER) != 0) {
+        flagsMask = static_cast<audio_flags_mask_t>(flagsMask | AUDIO_FLAG_DEEP_BUFFER);
+    }
+
     return flagsMask;
 }
 
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index d8e5b00..d44bbab 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -94,10 +94,11 @@
  * That is done somewhere else.
  * @return internal audio flags mask
  */
-audio_flags_mask_t AAudioConvert_allowCapturePolicyToAudioFlagsMask(
+audio_flags_mask_t AAudio_computeAudioFlagsMask(
         aaudio_allowed_capture_policy_t policy,
         aaudio_spatialization_behavior_t spatializationBehavior,
-        bool isContentSpatialized);
+        bool isContentSpatialized,
+        audio_output_flags_t outputFlags);
 
 audio_flags_mask_t AAudioConvert_privacySensitiveToAudioFlagsMask(
         bool privacySensitive);
diff --git a/media/libaaudio/tests/test_resampler.cpp b/media/libaaudio/tests/test_resampler.cpp
index 1e4f59c..13e4a20 100644
--- a/media/libaaudio/tests/test_resampler.cpp
+++ b/media/libaaudio/tests/test_resampler.cpp
@@ -101,14 +101,20 @@
         }
     }
 
+    // Flush out remaining frames from the flowgraph
+    while (!mcResampler->isWriteNeeded()) {
+        mcResampler->readNextFrame(output);
+        output++;
+        numRead++;
+    }
+
     ASSERT_LE(numRead, kNumOutputSamples);
     // Some frames are lost priming the FIR filter.
-    const int kMaxAlgorithmicFrameLoss = 16;
+    const int kMaxAlgorithmicFrameLoss = 5;
     EXPECT_GT(numRead, kNumOutputSamples - kMaxAlgorithmicFrameLoss);
 
     int sinkZeroCrossingCount = countZeroCrossingsWithHysteresis(outputBuffer.get(), numRead);
-    // Some cycles may get chopped off at the end.
-    const int kMaxZeroCrossingDelta = 3;
+    const int kMaxZeroCrossingDelta = std::max(sinkRate / sourceRate / 2, 1);
     EXPECT_LE(abs(sourceZeroCrossingCount - sinkZeroCrossingCount), kMaxZeroCrossingDelta);
 
     // Detect glitches by looking for spikes in the second derivative.
@@ -136,8 +142,7 @@
 
 
 TEST(test_resampler, resampler_scan_all) {
-    // TODO Add 64000, 88200, 96000 when they work. Failing now.
-    const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000};
+    const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000, 64000, 88200, 96000};
     const MultiChannelResampler::Quality qualities[] =
     {
         MultiChannelResampler::Quality::Fastest,
@@ -193,10 +198,9 @@
     checkResampler(11025, 44100, MultiChannelResampler::Quality::Best);
 }
 
-// TODO This fails because the output is very low.
-//TEST(test_resampler, resampler_11025_88200_best) {
-//    checkResampler(11025, 88200, MultiChannelResampler::Quality::Best);
-//}
+TEST(test_resampler, resampler_11025_88200_best) {
+    checkResampler(11025, 88200, MultiChannelResampler::Quality::Best);
+}
 
 TEST(test_resampler, resampler_16000_48000_best) {
     checkResampler(16000, 48000, MultiChannelResampler::Quality::Best);
diff --git a/media/libaudioclient/AidlConversion.cpp b/media/libaudioclient/AidlConversion.cpp
index b32667e..bd10e44 100644
--- a/media/libaudioclient/AidlConversion.cpp
+++ b/media/libaudioclient/AidlConversion.cpp
@@ -480,129 +480,6 @@
     return aidl;
 }
 
-ConversionResult<audio_flags_mask_t>
-aidl2legacy_AudioFlag_audio_flags_mask_t(media::AudioFlag aidl) {
-    switch (aidl) {
-        case media::AudioFlag::AUDIBILITY_ENFORCED:
-            return AUDIO_FLAG_AUDIBILITY_ENFORCED;
-        case media::AudioFlag::SECURE:
-            return AUDIO_FLAG_SECURE;
-        case media::AudioFlag::SCO:
-            return AUDIO_FLAG_SCO;
-        case media::AudioFlag::BEACON:
-            return AUDIO_FLAG_BEACON;
-        case media::AudioFlag::HW_AV_SYNC:
-            return AUDIO_FLAG_HW_AV_SYNC;
-        case media::AudioFlag::HW_HOTWORD:
-            return AUDIO_FLAG_HW_HOTWORD;
-        case media::AudioFlag::BYPASS_INTERRUPTION_POLICY:
-            return AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY;
-        case media::AudioFlag::BYPASS_MUTE:
-            return AUDIO_FLAG_BYPASS_MUTE;
-        case media::AudioFlag::LOW_LATENCY:
-            return AUDIO_FLAG_LOW_LATENCY;
-        case media::AudioFlag::DEEP_BUFFER:
-            return AUDIO_FLAG_DEEP_BUFFER;
-        case media::AudioFlag::NO_MEDIA_PROJECTION:
-            return AUDIO_FLAG_NO_MEDIA_PROJECTION;
-        case media::AudioFlag::MUTE_HAPTIC:
-            return AUDIO_FLAG_MUTE_HAPTIC;
-        case media::AudioFlag::NO_SYSTEM_CAPTURE:
-            return AUDIO_FLAG_NO_SYSTEM_CAPTURE;
-        case media::AudioFlag::CAPTURE_PRIVATE:
-            return AUDIO_FLAG_CAPTURE_PRIVATE;
-        case media::AudioFlag::CONTENT_SPATIALIZED:
-            return AUDIO_FLAG_CONTENT_SPATIALIZED;
-        case media::AudioFlag::NEVER_SPATIALIZE:
-            return AUDIO_FLAG_NEVER_SPATIALIZE;
-        case media::AudioFlag::CALL_REDIRECTION:
-            return AUDIO_FLAG_CALL_REDIRECTION;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<media::AudioFlag>
-legacy2aidl_audio_flags_mask_t_AudioFlag(audio_flags_mask_t legacy) {
-    switch (legacy) {
-        case AUDIO_FLAG_NONE:
-            return unexpected(BAD_VALUE);
-        case AUDIO_FLAG_AUDIBILITY_ENFORCED:
-            return media::AudioFlag::AUDIBILITY_ENFORCED;
-        case AUDIO_FLAG_SECURE:
-            return media::AudioFlag::SECURE;
-        case AUDIO_FLAG_SCO:
-            return media::AudioFlag::SCO;
-        case AUDIO_FLAG_BEACON:
-            return media::AudioFlag::BEACON;
-        case AUDIO_FLAG_HW_AV_SYNC:
-            return media::AudioFlag::HW_AV_SYNC;
-        case AUDIO_FLAG_HW_HOTWORD:
-            return media::AudioFlag::HW_HOTWORD;
-        case AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY:
-            return media::AudioFlag::BYPASS_INTERRUPTION_POLICY;
-        case AUDIO_FLAG_BYPASS_MUTE:
-            return media::AudioFlag::BYPASS_MUTE;
-        case AUDIO_FLAG_LOW_LATENCY:
-            return media::AudioFlag::LOW_LATENCY;
-        case AUDIO_FLAG_DEEP_BUFFER:
-            return media::AudioFlag::DEEP_BUFFER;
-        case AUDIO_FLAG_NO_MEDIA_PROJECTION:
-            return media::AudioFlag::NO_MEDIA_PROJECTION;
-        case AUDIO_FLAG_MUTE_HAPTIC:
-            return media::AudioFlag::MUTE_HAPTIC;
-        case AUDIO_FLAG_NO_SYSTEM_CAPTURE:
-            return media::AudioFlag::NO_SYSTEM_CAPTURE;
-        case AUDIO_FLAG_CAPTURE_PRIVATE:
-            return media::AudioFlag::CAPTURE_PRIVATE;
-        case AUDIO_FLAG_CONTENT_SPATIALIZED:
-            return media::AudioFlag::CONTENT_SPATIALIZED;
-        case AUDIO_FLAG_NEVER_SPATIALIZE:
-            return media::AudioFlag::NEVER_SPATIALIZE;
-        case AUDIO_FLAG_CALL_REDIRECTION:
-            return media::AudioFlag::CALL_REDIRECTION;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<audio_flags_mask_t>
-aidl2legacy_int32_t_audio_flags_mask_t_mask(int32_t aidl) {
-    return convertBitmask<audio_flags_mask_t, int32_t, audio_flags_mask_t, media::AudioFlag>(
-            aidl, aidl2legacy_AudioFlag_audio_flags_mask_t, indexToEnum_index<media::AudioFlag>,
-            enumToMask_bitmask<audio_flags_mask_t, audio_flags_mask_t>);
-}
-
-ConversionResult<int32_t>
-legacy2aidl_audio_flags_mask_t_int32_t_mask(audio_flags_mask_t legacy) {
-    return convertBitmask<int32_t, audio_flags_mask_t, media::AudioFlag, audio_flags_mask_t>(
-            legacy, legacy2aidl_audio_flags_mask_t_AudioFlag,
-            indexToEnum_bitmask<audio_flags_mask_t>,
-            enumToMask_index<int32_t, media::AudioFlag>);
-}
-
-ConversionResult<audio_attributes_t>
-aidl2legacy_AudioAttributesInternal_audio_attributes_t(const media::AudioAttributesInternal& aidl) {
-    audio_attributes_t legacy;
-    legacy.content_type = VALUE_OR_RETURN(
-            aidl2legacy_AudioContentType_audio_content_type_t(aidl.contentType));
-    legacy.usage = VALUE_OR_RETURN(aidl2legacy_AudioUsage_audio_usage_t(aidl.usage));
-    legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSource_audio_source_t(aidl.source));
-    legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_flags_mask_t_mask(aidl.flags));
-    RETURN_IF_ERROR(aidl2legacy_string(aidl.tags, legacy.tags, sizeof(legacy.tags)));
-    return legacy;
-}
-
-ConversionResult<media::AudioAttributesInternal>
-legacy2aidl_audio_attributes_t_AudioAttributesInternal(const audio_attributes_t& legacy) {
-    media::AudioAttributesInternal aidl;
-    aidl.contentType = VALUE_OR_RETURN(
-            legacy2aidl_audio_content_type_t_AudioContentType(legacy.content_type));
-    aidl.usage = VALUE_OR_RETURN(legacy2aidl_audio_usage_t_AudioUsage(legacy.usage));
-    aidl.source = VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSource(legacy.source));
-    aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_flags_mask_t_int32_t_mask(legacy.flags));
-    aidl.tags = VALUE_OR_RETURN(legacy2aidl_string(legacy.tags, sizeof(legacy.tags)));
-    return aidl;
-}
-
 ConversionResult<sp<IMemory>>
 aidl2legacy_SharedFileRegion_IMemory(const media::SharedFileRegion& aidl) {
     sp<IMemory> legacy;
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 2bbfb76..01e3d53 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -281,15 +281,15 @@
     double_loadable: true,
     local_include_dir: "aidl",
     srcs: [
-        "aidl/android/media/AudioAttributesInternal.aidl",
         "aidl/android/media/AudioClient.aidl",
         "aidl/android/media/AudioDirectMode.aidl",
-        "aidl/android/media/AudioFlag.aidl",
         "aidl/android/media/AudioGainSys.aidl",
         "aidl/android/media/AudioHalVersion.aidl",
+        "aidl/android/media/AudioHwModule.aidl",
         "aidl/android/media/AudioIoConfigEvent.aidl",
         "aidl/android/media/AudioIoDescriptor.aidl",
         "aidl/android/media/AudioPatchFw.aidl",
+        "aidl/android/media/AudioPolicyConfig.aidl",
         "aidl/android/media/AudioPortFw.aidl",
         "aidl/android/media/AudioPortSys.aidl",
         "aidl/android/media/AudioPortConfigFw.aidl",
@@ -300,11 +300,14 @@
         "aidl/android/media/AudioPortRole.aidl",
         "aidl/android/media/AudioPortType.aidl",
         "aidl/android/media/AudioProfileSys.aidl",
+        "aidl/android/media/AudioRoute.aidl",
         "aidl/android/media/AudioTimestampInternal.aidl",
         "aidl/android/media/AudioUniqueIdUse.aidl",
         "aidl/android/media/AudioVibratorInfo.aidl",
+        "aidl/android/media/DeviceConnectedState.aidl",
         "aidl/android/media/EffectDescriptor.aidl",
         "aidl/android/media/TrackSecondaryOutputInfo.aidl",
+        "aidl/android/media/SurroundSoundConfig.aidl",
     ],
     imports: [
         "android.media.audio.common.types-V2",
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 7116b82..871318f 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -262,6 +262,12 @@
     return af->setMode(mode);
 }
 
+status_t AudioSystem::setSimulateDeviceConnections(bool enabled) {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == 0) return PERMISSION_DENIED;
+    return af->setSimulateDeviceConnections(enabled);
+}
+
 status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
@@ -1054,8 +1060,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return NO_INIT;
 
-    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+    media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
     int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
     AudioConfig configAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_config_t_AudioConfig(*config, false /*isInput*/));
@@ -1094,7 +1100,7 @@
     *isSpatialized = responseAidl.isSpatialized;
     *isBitPerfect = responseAidl.isBitPerfect;
     *attr = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(responseAidl.attr));
+            aidl2legacy_AudioAttributes_audio_attributes_t(responseAidl.attr));
 
     return OK;
 }
@@ -1159,8 +1165,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return NO_INIT;
 
-    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+    media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
     int32_t inputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(*input));
     int32_t riidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_unique_id_t_int32_t(riid));
     int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
@@ -1281,8 +1287,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+    media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(attr));
     int32_t indexAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(index));
     AudioDeviceDescription deviceAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_devices_t_AudioDeviceDescription(device));
@@ -1296,8 +1302,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+    media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(attr));
     AudioDeviceDescription deviceAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_devices_t_AudioDeviceDescription(device));
     int32_t indexAidl;
@@ -1311,8 +1317,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+    media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(attr));
     int32_t indexAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             aps->getMaxVolumeIndexForAttributes(attrAidl, &indexAidl)));
@@ -1324,8 +1330,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+    media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(attr));
     int32_t indexAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             aps->getMinVolumeIndexForAttributes(attrAidl, &indexAidl)));
@@ -1357,8 +1363,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioAttributesInternal aaAidl = VALUE_OR_RETURN_STATUS(
-             legacy2aidl_audio_attributes_t_AudioAttributesInternal(aa));
+    media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
+             legacy2aidl_audio_attributes_t_AudioAttributes(aa));
     std::vector<AudioDevice> retAidl;
     RETURN_STATUS_IF_ERROR(
             statusTFromBinderStatus(aps->getDevicesForAttributes(aaAidl, forVolume, &retAidl)));
@@ -1570,6 +1576,15 @@
     return OK;
 }
 
+status_t AudioSystem::listDeclaredDevicePorts(media::AudioPortRole role,
+                                              std::vector<media::AudioPortFw>* result) {
+    if (result == nullptr) return BAD_VALUE;
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) return PERMISSION_DENIED;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(aps->listDeclaredDevicePorts(role, result)));
+    return OK;
+}
+
 status_t AudioSystem::getAudioPort(struct audio_port_v7* port) {
     if (port == nullptr) {
         return BAD_VALUE;
@@ -1867,8 +1882,8 @@
 
     media::AudioPortConfigFw sourceAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_port_config_AudioPortConfigFw(*source));
-    media::AudioAttributesInternal attributesAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attributes));
+    media::audio::common::AudioAttributes attributesAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(*attributes));
     int32_t portIdAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             aps->startAudioSource(sourceAidl, attributesAidl, &portIdAidl)));
@@ -2153,8 +2168,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioAttributesInternal aaAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_attributes_t_AudioAttributesInternal(aa));
+    media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(aa));
     int32_t productStrategyAidl;
 
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -2183,8 +2198,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioAttributesInternal aaAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_attributes_t_AudioAttributesInternal(aa));
+    media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(aa));
     int32_t volumeGroupAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             aps->getVolumeGroupFromAudioAttributes(aaAidl, fallbackOnDefault, &volumeGroupAidl)));
@@ -2389,8 +2404,8 @@
     audio_attributes_t attributes = attr != nullptr ? *attr : AUDIO_ATTRIBUTES_INITIALIZER;
     audio_config_t configuration = config != nullptr ? *config : AUDIO_CONFIG_INITIALIZER;
 
-    std::optional<media::AudioAttributesInternal> attrAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_attributes_t_AudioAttributesInternal(attributes));
+    std::optional<media::audio::common::AudioAttributes> attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(attributes));
     std::optional<AudioConfig> configAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_config_t_AudioConfig(configuration, false /*isInput*/));
     std::vector<AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
@@ -2427,8 +2442,8 @@
         return PERMISSION_DENIED;
     }
 
-    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+    media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
     AudioConfig configAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_config_t_AudioConfig(*config, false /*isInput*/));
 
@@ -2451,8 +2466,8 @@
         return PERMISSION_DENIED;
     }
 
-    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+    media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
 
     std::vector<media::audio::common::AudioProfile> audioProfilesAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -2507,6 +2522,14 @@
     return af->supportsBluetoothVariableLatency(support);
 }
 
+status_t AudioSystem::getAudioPolicyConfig(media::AudioPolicyConfig *config) {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->getAudioPolicyConfig(config);
+}
+
 class CaptureStateListenerImpl : public media::BnCaptureStateListener,
                                  public IBinder::DeathRecipient {
 public:
@@ -2623,8 +2646,8 @@
         return PERMISSION_DENIED;
     }
 
-    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+    media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
     media::AudioMixerAttributesInternal mixerAttrAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_mixer_attributes_t_AudioMixerAttributesInternal(*mixerAttr));
     int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
@@ -2643,8 +2666,8 @@
         return PERMISSION_DENIED;
     }
 
-    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+    media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
     int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
     std::optional<media::AudioMixerAttributesInternal> _aidlReturn;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -2666,8 +2689,8 @@
         return PERMISSION_DENIED;
     }
 
-    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+    media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
     int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
     int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
     return statusTFromBinderStatus(
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 9386b9b..359c140 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -178,8 +178,8 @@
     auto result = [&]() -> ConversionResult<bool> {
         media::audio::common::AudioConfigBase configAidl = VALUE_OR_RETURN(
                 legacy2aidl_audio_config_base_t_AudioConfigBase(config, false /*isInput*/));
-        media::AudioAttributesInternal attributesAidl = VALUE_OR_RETURN(
-                legacy2aidl_audio_attributes_t_AudioAttributesInternal(attributes));
+        media::audio::common::AudioAttributes attributesAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_attributes_t_AudioAttributes(attributes));
         bool retAidl;
         RETURN_IF_ERROR(aidl_utils::statusTFromBinderStatus(
                 aps->isDirectOutputSupported(configAidl, attributesAidl, &retAidl)));
@@ -1616,13 +1616,16 @@
     }
 
     AutoMutex lock(mLock);
+    // IAudioTrack::stop() isn't synchronous; we don't know when presentation completes
+    if (mState == STATE_STOPPED || mState == STATE_FLUSHED) {
+        *position = 0;
+        return NO_ERROR;
+    }
     // FIXME: offloaded and direct tracks call into the HAL for render positions
     // for compressed/synced data; however, we use proxy position for pure linear pcm data
     // as we do not know the capability of the HAL for pcm position support and standby.
     // There may be some latency differences between the HAL position and the proxy position.
     if (isOffloadedOrDirect_l() && !isPurePcmData_l()) {
-        uint32_t dspFrames = 0;
-
         if (isOffloaded_l() && ((mState == STATE_PAUSED) || (mState == STATE_PAUSED_STOPPING))) {
             ALOGV("%s(%d): called in paused state, return cached position %u",
                 __func__, mPortId, mPausedPosition);
@@ -1630,13 +1633,15 @@
             return NO_ERROR;
         }
 
+        uint32_t dspFrames = 0;
         if (mOutput != AUDIO_IO_HANDLE_NONE) {
             uint32_t halFrames; // actually unused
-            (void) AudioSystem::getRenderPosition(mOutput, &halFrames, &dspFrames);
             // FIXME: on getRenderPosition() error, we return OK with frame position 0.
+            if (AudioSystem::getRenderPosition(mOutput, &halFrames, &dspFrames) != NO_ERROR) {
+                *position = 0;
+                return NO_ERROR;
+            }
         }
-        // FIXME: dspFrames may not be zero in (mState == STATE_STOPPED || mState == STATE_FLUSHED)
-        // due to hardware latency. We leave this behavior for now.
         *position = dspFrames;
     } else {
         if (mCblk->mFlags & CBLK_INVALID) {
@@ -1644,11 +1649,9 @@
             // FIXME: for compatibility with the Java API we ignore the restoreTrack_l()
             // error here (e.g. DEAD_OBJECT) and return OK with the last recorded server position.
         }
-
-        // IAudioTrack::stop() isn't synchronous; we don't know when presentation completes
-        *position = (mState == STATE_STOPPED || mState == STATE_FLUSHED) ?
-                0 : updateAndGetPosition_l().value();
+        *position = updateAndGetPosition_l().value();
     }
+
     return NO_ERROR;
 }
 
@@ -1702,13 +1705,21 @@
 
 status_t AudioTrack::setOutputDevice(audio_port_handle_t deviceId) {
     AutoMutex lock(mLock);
-    ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
-            __func__, mPortId, deviceId, mSelectedDeviceId);
+    ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d mRoutedDeviceId %d",
+            __func__, mPortId, deviceId, mSelectedDeviceId, mRoutedDeviceId);
     if (mSelectedDeviceId != deviceId) {
         mSelectedDeviceId = deviceId;
-        if (mStatus == NO_ERROR) {
-            android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
-            mProxy->interrupt();
+        if (mStatus == NO_ERROR && mSelectedDeviceId != mRoutedDeviceId) {
+            if (isPlaying_l()) {
+                android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                mProxy->interrupt();
+            } else {
+                // if the track is idle, try to restore now and
+                // defer to next start if not possible
+                if (restoreTrack_l("setOutputDevice") != OK) {
+                    android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                }
+            }
         }
     }
     return NO_ERROR;
@@ -2185,7 +2196,6 @@
         // obtainBuffer() is called with mutex unlocked, so keep extra references to these fields to
         // keep them from going away if another thread re-creates the track during obtainBuffer()
         sp<AudioTrackClientProxy> proxy;
-        sp<IMemory> iMem;
 
         {   // start of lock scope
             AutoMutex lock(mLock);
@@ -2211,8 +2221,9 @@
             }
 
             // Keep the extra references
+            mProxyObtainBufferRef = mProxy;
             proxy = mProxy;
-            iMem = mCblkMemory;
+            mCblkMemoryObtainBufferRef = mCblkMemory;
 
             if (mState == STATE_STOPPING) {
                 status = -EINTR;
@@ -2260,6 +2271,8 @@
     buffer.mFrameCount = stepCount;
     buffer.mRaw = audioBuffer->raw;
 
+    sp<IMemory> tempMemory;
+    sp<AudioTrackClientProxy> tempProxy;
     AutoMutex lock(mLock);
     if (audioBuffer->sequence != mSequence) {
         // This Buffer came from a different IAudioTrack instance, so ignore the releaseBuffer
@@ -2269,7 +2282,12 @@
     }
     mReleased += stepCount;
     mInUnderrun = false;
-    mProxy->releaseBuffer(&buffer);
+    mProxyObtainBufferRef->releaseBuffer(&buffer);
+    // The extra reference of shared memory and proxy from `obtainBuffer` is not used after
+    // calling `releaseBuffer`. Move the extra reference to a temp strong pointer so that it
+    // will be cleared outside `releaseBuffer`.
+    tempMemory = std::move(mCblkMemoryObtainBufferRef);
+    tempProxy = std::move(mProxyObtainBufferRef);
 
     // restart track if it was disabled by audioflinger due to previous underrun
     restartIfDisabled();
@@ -2507,11 +2525,22 @@
         timeout.tv_sec = WAIT_STREAM_END_TIMEOUT_SEC;
         timeout.tv_nsec = 0;
 
+        // Use timestamp progress to safeguard we don't falsely time out.
+        AudioTimestamp timestamp{};
+        const bool isTimestampValid = getTimestamp(timestamp) == OK;
+        const auto frameCount = isTimestampValid ? timestamp.mPosition : 0;
+
         status_t status = proxy->waitStreamEndDone(&timeout);
         switch (status) {
+        case TIMED_OUT:
+            if (isTimestampValid
+                    && getTimestamp(timestamp) == OK && frameCount != timestamp.mPosition) {
+                ALOGD("%s: waitStreamEndDone retrying", __func__);
+                break;  // we retry again (and recheck possible state change).
+            }
+            [[fallthrough]];
         case NO_ERROR:
         case DEAD_OBJECT:
-        case TIMED_OUT:
             if (status != DEAD_OBJECT) {
                 // for DEAD_OBJECT, we do not send a EVENT_STREAM_END after stop();
                 // instead, the application should handle the EVENT_NEW_IAUDIOTRACK.
@@ -2529,6 +2558,7 @@
                 }
             }
             if (waitStreamEnd && status != DEAD_OBJECT) {
+               ALOGV("%s: waitStreamEndDone complete", __func__);
                return NS_INACTIVE;
             }
             break;
diff --git a/media/libaudioclient/AudioVolumeGroup.cpp b/media/libaudioclient/AudioVolumeGroup.cpp
index 978599e..c4ca5b9 100644
--- a/media/libaudioclient/AudioVolumeGroup.cpp
+++ b/media/libaudioclient/AudioVolumeGroup.cpp
@@ -49,9 +49,9 @@
     aidl.groupId = VALUE_OR_RETURN(legacy2aidl_volume_group_t_int32_t(legacy.getId()));
     aidl.name = legacy.getName();
     aidl.audioAttributes = VALUE_OR_RETURN(
-            convertContainer<std::vector<media::AudioAttributesInternal>>(
+            convertContainer<std::vector<media::audio::common::AudioAttributes>>(
                     legacy.getAudioAttributes(),
-                    legacy2aidl_audio_attributes_t_AudioAttributesInternal));
+                    legacy2aidl_audio_attributes_t_AudioAttributes));
     aidl.streams = VALUE_OR_RETURN(
             convertContainer<std::vector<AudioStreamType>>(legacy.getStreamTypes(),
             legacy2aidl_audio_stream_type_t_AudioStreamType));
@@ -65,7 +65,7 @@
             VALUE_OR_RETURN(aidl2legacy_int32_t_volume_group_t(aidl.groupId)),
             VALUE_OR_RETURN(convertContainer<AttributesVector>(
                     aidl.audioAttributes,
-                    aidl2legacy_AudioAttributesInternal_audio_attributes_t)),
+                    aidl2legacy_AudioAttributes_audio_attributes_t)),
             VALUE_OR_RETURN(convertContainer<StreamTypeVector>(
                     aidl.streams,
                     aidl2legacy_AudioStreamType_audio_stream_type_t))
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index bbc39e8..7caaaaf 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -56,7 +56,7 @@
 
 ConversionResult<media::CreateTrackRequest> IAudioFlinger::CreateTrackInput::toAidl() const {
     media::CreateTrackRequest aidl;
-    aidl.attr = VALUE_OR_RETURN(legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+    aidl.attr = VALUE_OR_RETURN(legacy2aidl_audio_attributes_t_AudioAttributes(attr));
     // Do not be mislead by 'Input'--this is an input to 'createTrack', which creates output tracks.
     aidl.config = VALUE_OR_RETURN(legacy2aidl_audio_config_t_AudioConfig(
                     config, false /*isInput*/));
@@ -77,7 +77,7 @@
 ConversionResult<IAudioFlinger::CreateTrackInput>
 IAudioFlinger::CreateTrackInput::fromAidl(const media::CreateTrackRequest& aidl) {
     IAudioFlinger::CreateTrackInput legacy;
-    legacy.attr = VALUE_OR_RETURN(aidl2legacy_AudioAttributesInternal_audio_attributes_t(aidl.attr));
+    legacy.attr = VALUE_OR_RETURN(aidl2legacy_AudioAttributes_audio_attributes_t(aidl.attr));
     // Do not be mislead by 'Input'--this is an input to 'createTrack', which creates output tracks.
     legacy.config = VALUE_OR_RETURN(
             aidl2legacy_AudioConfig_audio_config_t(aidl.config, false /*isInput*/));
@@ -153,7 +153,7 @@
 ConversionResult<media::CreateRecordRequest>
 IAudioFlinger::CreateRecordInput::toAidl() const {
     media::CreateRecordRequest aidl;
-    aidl.attr = VALUE_OR_RETURN(legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+    aidl.attr = VALUE_OR_RETURN(legacy2aidl_audio_attributes_t_AudioAttributes(attr));
     aidl.config = VALUE_OR_RETURN(
             legacy2aidl_audio_config_base_t_AudioConfigBase(config, true /*isInput*/));
     aidl.clientInfo = VALUE_OR_RETURN(legacy2aidl_AudioClient_AudioClient(clientInfo));
@@ -174,7 +174,7 @@
         const media::CreateRecordRequest& aidl) {
     IAudioFlinger::CreateRecordInput legacy;
     legacy.attr = VALUE_OR_RETURN(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(aidl.attr));
+            aidl2legacy_AudioAttributes_audio_attributes_t(aidl.attr));
     legacy.config = VALUE_OR_RETURN(
             aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.config, true /*isInput*/));
     legacy.clientInfo = VALUE_OR_RETURN(aidl2legacy_AudioClient_AudioClient(aidl.clientInfo));
@@ -810,10 +810,14 @@
 }
 
 status_t AudioFlingerClientAdapter::setDeviceConnectedState(
-        const struct audio_port_v7 *port, bool connected) {
+        const struct audio_port_v7 *port, media::DeviceConnectedState state) {
     media::AudioPortFw aidlPort = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_port_v7_AudioPortFw(*port));
-    return statusTFromBinderStatus(mDelegate->setDeviceConnectedState(aidlPort, connected));
+    return statusTFromBinderStatus(mDelegate->setDeviceConnectedState(aidlPort, state));
+}
+
+status_t AudioFlingerClientAdapter::setSimulateDeviceConnections(bool enabled) {
+    return statusTFromBinderStatus(mDelegate->setSimulateDeviceConnections(enabled));
 }
 
 status_t AudioFlingerClientAdapter::setRequestedLatencyMode(
@@ -883,6 +887,16 @@
     return statusTFromBinderStatus(mDelegate->invalidateTracks(portIdsAidl));
 }
 
+status_t AudioFlingerClientAdapter::getAudioPolicyConfig(media::AudioPolicyConfig *config) {
+    if (config == nullptr) {
+        return BAD_VALUE;
+    }
+
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mDelegate->getAudioPolicyConfig(config)));
+
+    return NO_ERROR;
+}
+
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // AudioFlingerServerAdapter
 AudioFlingerServerAdapter::AudioFlingerServerAdapter(
@@ -1365,9 +1379,13 @@
 }
 
 Status AudioFlingerServerAdapter::setDeviceConnectedState(
-        const media::AudioPortFw& port, bool connected) {
+        const media::AudioPortFw& port, media::DeviceConnectedState state) {
     audio_port_v7 portLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPortFw_audio_port_v7(port));
-    return Status::fromStatusT(mDelegate->setDeviceConnectedState(&portLegacy, connected));
+    return Status::fromStatusT(mDelegate->setDeviceConnectedState(&portLegacy, state));
+}
+
+Status AudioFlingerServerAdapter::setSimulateDeviceConnections(bool enabled) {
+    return Status::fromStatusT(mDelegate->setSimulateDeviceConnections(enabled));
 }
 
 Status AudioFlingerServerAdapter::setRequestedLatencyMode(
@@ -1421,4 +1439,8 @@
     return Status::ok();
 }
 
+Status AudioFlingerServerAdapter::getAudioPolicyConfig(media::AudioPolicyConfig* _aidl_return) {
+    return Status::fromStatusT(mDelegate->getAudioPolicyConfig(_aidl_return));
+}
+
 } // namespace android
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index f0b4d11..9c4ccb8 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -1306,6 +1306,7 @@
         streamType = AUDIO_STREAM_DTMF;
     }
     attr = AudioSystem::streamTypeToAttributes(streamType);
+    attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_LOW_LATENCY);
 
     const size_t frameCount = mProcessSize;
     status_t status = mpAudioTrack->set(
@@ -1314,7 +1315,7 @@
             AUDIO_FORMAT_PCM_16_BIT,
             AUDIO_CHANNEL_OUT_MONO,
             frameCount,
-            AUDIO_OUTPUT_FLAG_FAST,
+            AUDIO_OUTPUT_FLAG_NONE,
             wp<AudioTrack::IAudioTrackCallback>::fromExisting(this),
             0,    // notificationFrames
             0,    // sharedBuffer
diff --git a/media/libaudioclient/VolumeGroupAttributes.cpp b/media/libaudioclient/VolumeGroupAttributes.cpp
index 530e73f..938e574 100644
--- a/media/libaudioclient/VolumeGroupAttributes.cpp
+++ b/media/libaudioclient/VolumeGroupAttributes.cpp
@@ -48,7 +48,7 @@
 legacy2aidl_VolumeGroupAttributes_AudioAttributesEx(const VolumeGroupAttributes& legacy) {
     media::AudioAttributesEx aidl;
     aidl.attributes = VALUE_OR_RETURN(
-            legacy2aidl_audio_attributes_t_AudioAttributesInternal(legacy.getAttributes()));
+            legacy2aidl_audio_attributes_t_AudioAttributes(legacy.getAttributes()));
     aidl.streamType = VALUE_OR_RETURN(
             legacy2aidl_audio_stream_type_t_AudioStreamType(legacy.getStreamType()));
     aidl.groupId = VALUE_OR_RETURN(legacy2aidl_volume_group_t_int32_t(legacy.getGroupId()));
@@ -60,7 +60,7 @@
     return VolumeGroupAttributes(VALUE_OR_RETURN(aidl2legacy_int32_t_volume_group_t(aidl.groupId)),
                            VALUE_OR_RETURN(aidl2legacy_AudioStreamType_audio_stream_type_t(
                                    aidl.streamType)),
-                           VALUE_OR_RETURN(aidl2legacy_AudioAttributesInternal_audio_attributes_t(
+                           VALUE_OR_RETURN(aidl2legacy_AudioAttributes_audio_attributes_t(
                                    aidl.attributes)));
 }
 
diff --git a/media/libaudioclient/aidl/android/media/AudioAttributesEx.aidl b/media/libaudioclient/aidl/android/media/AudioAttributesEx.aidl
index 335866f..7827bdb 100644
--- a/media/libaudioclient/aidl/android/media/AudioAttributesEx.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioAttributesEx.aidl
@@ -16,7 +16,7 @@
 
 package android.media;
 
-import android.media.AudioAttributesInternal;
+import android.media.audio.common.AudioAttributes;
 import android.media.audio.common.AudioStreamType;
 
 /**
@@ -24,7 +24,7 @@
  * {@hide}
  */
 parcelable AudioAttributesEx {
-    AudioAttributesInternal attributes;
+    AudioAttributes attributes;
     AudioStreamType streamType;
     /** Interpreted as volume_group_t. */
     int groupId;
diff --git a/media/libaudioclient/aidl/android/media/AudioAttributesInternal.aidl b/media/libaudioclient/aidl/android/media/AudioAttributesInternal.aidl
deleted file mode 100644
index 2e74206..0000000
--- a/media/libaudioclient/aidl/android/media/AudioAttributesInternal.aidl
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.audio.common.AudioContentType;
-import android.media.audio.common.AudioSource;
-import android.media.audio.common.AudioUsage;
-
-/**
- * The "Internal" suffix of this type name is to disambiguate it from the
- * android.media.AudioAttributes SDK type.
- * {@hide}
- */
-parcelable AudioAttributesInternal {
-    AudioContentType contentType;
-    AudioUsage usage;
-    AudioSource source;
-    // Bitmask, indexed by AudioFlag.
-    int flags;
-    @utf8InCpp String tags; /* UTF8 */
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioFlag.aidl b/media/libaudioclient/aidl/android/media/AudioFlag.aidl
deleted file mode 100644
index acf4e6d..0000000
--- a/media/libaudioclient/aidl/android/media/AudioFlag.aidl
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-/**
- * {@hide}
- */
-@Backing(type="int")
-enum AudioFlag {
-    AUDIBILITY_ENFORCED = 0,
-    SECURE = 1,
-    SCO = 2,
-    BEACON = 3,
-    HW_AV_SYNC = 4,
-    HW_HOTWORD = 5,
-    BYPASS_INTERRUPTION_POLICY = 6,
-    BYPASS_MUTE = 7,
-    LOW_LATENCY = 8,
-    DEEP_BUFFER = 9,
-    NO_MEDIA_PROJECTION = 10,
-    MUTE_HAPTIC = 11,
-    NO_SYSTEM_CAPTURE = 12,
-    CAPTURE_PRIVATE = 13,
-    CONTENT_SPATIALIZED = 14,
-    NEVER_SPATIALIZE = 15,
-    CALL_REDIRECTION = 16,
-}
diff --git a/services/audioflinger/StateQueueInstantiations.cpp b/media/libaudioclient/aidl/android/media/AudioHwModule.aidl
similarity index 60%
rename from services/audioflinger/StateQueueInstantiations.cpp
rename to media/libaudioclient/aidl/android/media/AudioHwModule.aidl
index 6f4505e..9251400 100644
--- a/services/audioflinger/StateQueueInstantiations.cpp
+++ b/media/libaudioclient/aidl/android/media/AudioHwModule.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012 The Android Open Source Project
+ * Copyright (C) 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,16 +14,18 @@
  * limitations under the License.
  */
 
-#include "Configuration.h"
-#include "FastMixerState.h"
-#include "FastCaptureState.h"
-#include "StateQueue.h"
+package android.media;
 
-// FIXME hack for gcc
+import android.media.audio.common.AudioPort;
+import android.media.AudioRoute;
 
-namespace android {
-
-template class StateQueue<FastMixerState>;      // typedef FastMixerStateQueue
-template class StateQueue<FastCaptureState>;    // typedef FastCaptureStateQueue
-
+/*
+ * A representation of a HAL module configuration.
+ * {@hide}
+ */
+parcelable AudioHwModule {
+    int /* audio_module_handle_t */ handle;
+    @utf8InCpp String name;
+    AudioPort[] ports;
+    AudioRoute[] routes;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioPolicyConfig.aidl b/media/libaudioclient/aidl/android/media/AudioPolicyConfig.aidl
new file mode 100644
index 0000000..87767c2
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPolicyConfig.aidl
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioHwModule;
+import android.media.SurroundSoundConfig;
+import android.media.audio.common.AudioHalEngineConfig;
+import android.media.audio.common.AudioMode;
+
+/*
+ * Audio policy configuration. Functionally replaces the APM XML file.
+ * {@hide}
+ */
+parcelable AudioPolicyConfig {
+    AudioHwModule[] modules;
+    AudioMode[] supportedModes;
+    SurroundSoundConfig surroundSoundConfig;
+    AudioHalEngineConfig engineConfig;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioRoute.aidl b/media/libaudioclient/aidl/android/media/AudioRoute.aidl
new file mode 100644
index 0000000..5ee2161
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioRoute.aidl
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * TODO(b/280077672): This is a temporary copy of the stable
+ * android.hardware.audio.core.AudioRoute. Interfaces from the Core API do not
+ * support the CPP backend. This copy will be removed either by moving the
+ * AudioRoute from core to a.m.a.common or by switching the framework internal
+ * interfaces to the NDK backend.
+ * {@hide}
+ */
+parcelable AudioRoute {
+    /**
+     * The list of IDs of source audio ports ('AudioPort.id').
+     * There must be at least one source in a valid route and all IDs must be
+     * unique.
+     */
+    int[] sourcePortIds;
+    /** The ID of the sink audio port ('AudioPort.id'). */
+    int sinkPortId;
+    /** If set, only one source can be active, mixing is not supported. */
+    boolean isExclusive;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioVolumeGroup.aidl b/media/libaudioclient/aidl/android/media/AudioVolumeGroup.aidl
index b95a1d3..424f8b8 100644
--- a/media/libaudioclient/aidl/android/media/AudioVolumeGroup.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioVolumeGroup.aidl
@@ -16,7 +16,7 @@
 
 package android.media;
 
-import android.media.AudioAttributesInternal;
+import android.media.audio.common.AudioAttributes;
 import android.media.audio.common.AudioStreamType;
 
 /**
@@ -26,6 +26,6 @@
     /** Interpreted as volume_group_t. */
     int groupId;
     @utf8InCpp String name;
-    AudioAttributesInternal[] audioAttributes;
+    AudioAttributes[] audioAttributes;
     AudioStreamType[] streams;
 }
diff --git a/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl b/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
index b938a3e..57e8f42 100644
--- a/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
@@ -16,8 +16,8 @@
 
 package android.media;
 
-import android.media.AudioAttributesInternal;
 import android.media.AudioClient;
+import android.media.audio.common.AudioAttributes;
 import android.media.audio.common.AudioConfigBase;
 
 /**
@@ -28,7 +28,7 @@
  * {@hide}
  */
 parcelable CreateRecordRequest {
-    AudioAttributesInternal attr;
+    AudioAttributes attr;
     AudioConfigBase config;
     AudioClient clientInfo;
     /** Interpreted as audio_unique_id_t. */
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl b/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl
index 212221e..24e6a6c 100644
--- a/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl
@@ -16,7 +16,7 @@
 
 package android.media;
 
-import android.media.AudioAttributesInternal;
+import android.media.audio.common.AudioAttributes;
 import android.media.AudioClient;
 import android.media.IAudioTrackCallback;
 import android.media.SharedFileRegion;
@@ -30,7 +30,7 @@
  * {@hide}
  */
 parcelable CreateTrackRequest {
-    AudioAttributesInternal attr;
+    AudioAttributes attr;
     AudioConfig config;
     AudioClient clientInfo;
     @nullable SharedFileRegion sharedBuffer;
diff --git a/services/audioflinger/TypedLogger.cpp b/media/libaudioclient/aidl/android/media/DeviceConnectedState.aidl
similarity index 63%
rename from services/audioflinger/TypedLogger.cpp
rename to media/libaudioclient/aidl/android/media/DeviceConnectedState.aidl
index 57c206b..e401384 100644
--- a/services/audioflinger/TypedLogger.cpp
+++ b/media/libaudioclient/aidl/android/media/DeviceConnectedState.aidl
@@ -1,12 +1,11 @@
 /*
- *
- * Copyright 2017, The Android Open Source Project
+ * Copyright (C) 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *     http://www.apache.org/licenses/LICENSE-2.0
+ *      http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -14,14 +13,14 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+package android.media;
 
-#define LOG_TAG "AudioFlinger"
-//#define LOG_NDEBUG 0
-#include <utils/Log.h>
-
-#include <pthread.h>
-#include "TypedLogger.h"
-
-namespace android {
-thread_local NBLog::Writer *tlNBLogWriter;
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum DeviceConnectedState {
+    CONNECTED = 0,
+    DISCONNECTED = 1,
+    PREPARE_TO_DISCONNECT = 2,
 }
diff --git a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
index 9d44bb0..b814b85 100644
--- a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
@@ -16,9 +16,9 @@
 
 package android.media;
 
+import android.media.audio.common.AudioAttributes;
 import android.media.audio.common.AudioConfigBase;
 import android.media.audio.common.AudioStreamType;
-import android.media.AudioAttributesInternal;
 /**
  * {@hide}
  */
@@ -38,5 +38,5 @@
     AudioConfigBase configBase;
     boolean isBitPerfect;
     /** The corrected audio attributes. **/
-    AudioAttributesInternal attr;
+    AudioAttributes attr;
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 568c865..6412810 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -17,6 +17,7 @@
 package android.media;
 
 import android.media.AudioPatchFw;
+import android.media.AudioPolicyConfig;
 import android.media.AudioPortFw;
 import android.media.AudioPortConfigFw;
 import android.media.AudioUniqueIdUse;
@@ -27,6 +28,7 @@
 import android.media.CreateRecordResponse;
 import android.media.CreateTrackRequest;
 import android.media.CreateTrackResponse;
+import android.media.DeviceConnectedState;
 import android.media.OpenInputRequest;
 import android.media.OpenInputResponse;
 import android.media.OpenOutputRequest;
@@ -227,7 +229,10 @@
 
     int getAAudioHardwareBurstMinUsec();
 
-    void setDeviceConnectedState(in AudioPortFw devicePort, boolean connected);
+    void setDeviceConnectedState(in AudioPortFw devicePort, DeviceConnectedState state);
+
+    // Used for tests only. Requires AIDL HAL to work.
+    void setSimulateDeviceConnections(boolean enabled);
 
     /**
      * Requests a given latency mode (See AudioLatencyMode.aidl) on an output stream.
@@ -277,6 +282,12 @@
      */
     void invalidateTracks(in int[] /* audio_port_handle_t[] */ portIds);
 
+    /**
+     * Only implemented for AIDL. Provides the APM configuration which
+     * used to be in the XML file.
+     */
+    AudioPolicyConfig getAudioPolicyConfig();
+
     // When adding a new method, please review and update
     // IAudioFlinger.h AudioFlingerServerAdapter::Delegate::TransactionCode
     // AudioFlinger.cpp AudioFlinger::onTransactWrapper()
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index fa6c733..3e9b27f 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -18,8 +18,6 @@
 
 import android.content.AttributionSourceState;
 
-import android.media.AudioAttributesEx;
-import android.media.AudioAttributesInternal;
 import android.media.AudioDirectMode;
 import android.media.AudioMix;
 import android.media.AudioMixerAttributesInternal;
@@ -43,6 +41,7 @@
 import android.media.ICaptureStateListener;
 import android.media.INativeSpatializerCallback;
 import android.media.SoundTriggerSession;
+import android.media.audio.common.AudioAttributes;
 import android.media.audio.common.AudioConfig;
 import android.media.audio.common.AudioConfigBase;
 import android.media.audio.common.AudioDevice;
@@ -85,7 +84,7 @@
 
     int /* audio_io_handle_t */ getOutput(AudioStreamType stream);
 
-    GetOutputForAttrResponse getOutputForAttr(in AudioAttributesInternal attr,
+    GetOutputForAttrResponse getOutputForAttr(in AudioAttributes attr,
                                               int /* audio_session_t */ session,
                                               in AttributionSourceState attributionSource,
                                               in AudioConfig config,
@@ -98,7 +97,7 @@
 
     void releaseOutput(int /* audio_port_handle_t */ portId);
 
-    GetInputForAttrResponse getInputForAttr(in AudioAttributesInternal attr,
+    GetInputForAttrResponse getInputForAttr(in AudioAttributes attr,
                                             int /* audio_io_handle_t */ input,
                                             int /* audio_unique_id_t */ riid,
                                             int /* audio_session_t */ session,
@@ -125,20 +124,20 @@
     int getStreamVolumeIndex(AudioStreamType stream,
                              in AudioDeviceDescription device);
 
-    void setVolumeIndexForAttributes(in AudioAttributesInternal attr,
+    void setVolumeIndexForAttributes(in AudioAttributes attr,
                                      in AudioDeviceDescription device,
                                      int index);
 
-    int getVolumeIndexForAttributes(in AudioAttributesInternal attr,
+    int getVolumeIndexForAttributes(in AudioAttributes attr,
                                     in AudioDeviceDescription device);
 
-    int getMaxVolumeIndexForAttributes(in AudioAttributesInternal attr);
+    int getMaxVolumeIndexForAttributes(in AudioAttributes attr);
 
-    int getMinVolumeIndexForAttributes(in AudioAttributesInternal attr);
+    int getMinVolumeIndexForAttributes(in AudioAttributes attr);
 
     int /* product_strategy_t */ getStrategyForStream(AudioStreamType stream);
 
-    AudioDevice[] getDevicesForAttributes(in AudioAttributesInternal attr, boolean forVolume);
+    AudioDevice[] getDevicesForAttributes(in AudioAttributes attr, boolean forVolume);
 
     int /* audio_io_handle_t */ getOutputForEffect(in EffectDescriptor desc);
 
@@ -200,10 +199,12 @@
      * Check if direct playback is possible for given format, sample rate, channel mask and flags.
      */
     boolean isDirectOutputSupported(in AudioConfigBase config,
-                                    in AudioAttributesInternal attributes);
+                                    in AudioAttributes attributes);
 
     /**
-     * List available audio ports and their attributes. Returns the generation.
+     * List currently attached audio ports and their attributes. Returns the generation.
+     * The generation is incremented each time when anything changes in the ports
+     * configuration.
      *
      * On input, count represents the maximum length of the returned array.
      * On output, count is the total number of elements, which may be larger than the array size.
@@ -215,6 +216,13 @@
                        inout Int count,
                        out AudioPortFw[] ports);
 
+    /**
+     * List all device ports declared in the configuration (including currently detached ones)
+     * 'role' can be 'NONE' to get both input and output devices,
+     * 'SINK' for output devices, and 'SOURCE' for input devices.
+     */
+    AudioPortFw[] listDeclaredDevicePorts(AudioPortRole role);
+
     /** Get attributes for the audio port with the given id (AudioPort.hal.id field). */
     AudioPortFw getAudioPort(int /* audio_port_handle_t */ portId);
 
@@ -263,7 +271,7 @@
     void removeUserIdDeviceAffinities(int userId);
 
     int /* audio_port_handle_t */ startAudioSource(in AudioPortConfigFw source,
-                                                   in AudioAttributesInternal attributes);
+                                                   in AudioAttributes attributes);
 
     void stopAudioSource(int /* audio_port_handle_t */ portId);
 
@@ -324,10 +332,10 @@
 
     AudioProductStrategy[] listAudioProductStrategies();
     int /* product_strategy_t */ getProductStrategyFromAudioAttributes(
-            in AudioAttributesInternal aa, boolean fallbackOnDefault);
+            in AudioAttributes aa, boolean fallbackOnDefault);
 
     AudioVolumeGroup[] listAudioVolumeGroups();
-    int /* volume_group_t */ getVolumeGroupFromAudioAttributes(in AudioAttributesInternal aa,
+    int /* volume_group_t */ getVolumeGroupFromAudioAttributes(in AudioAttributes aa,
                                                                boolean fallbackOnDefault);
 
     void setRttEnabled(boolean enabled);
@@ -388,21 +396,21 @@
      * supported criteria. For instance, supplying no argument will tell if spatialization is
      * supported or not in general.
      */
-    boolean canBeSpatialized(in @nullable AudioAttributesInternal attr,
+    boolean canBeSpatialized(in @nullable AudioAttributes attr,
                              in @nullable AudioConfig config,
                              in AudioDevice[] devices);
 
     /**
      * Query how the direct playback is currently supported on the device.
      */
-    AudioDirectMode getDirectPlaybackSupport(in AudioAttributesInternal attr,
+    AudioDirectMode getDirectPlaybackSupport(in AudioAttributes attr,
                                               in AudioConfig config);
 
     /**
      * Query audio profiles available for direct playback on the current output device(s)
      * for the specified audio attributes.
      */
-    AudioProfile[] getDirectProfilesForAttributes(in AudioAttributesInternal attr);
+    AudioProfile[] getDirectProfilesForAttributes(in AudioAttributes attr);
 
     /**
      * Return a list of AudioMixerAttributes that can be used to set preferred mixer attributes
@@ -426,7 +434,7 @@
      *            playback is routed to the given device.
      * @param mixerAttr the preferred mixer attributes.
      */
-    void setPreferredMixerAttributes(in AudioAttributesInternal attr,
+    void setPreferredMixerAttributes(in AudioAttributes attr,
                                      int /* audio_port_handle_t */ portId,
                                      int /* uid_t */ uid,
                                      in AudioMixerAttributesInternal mixerAttr);
@@ -440,7 +448,7 @@
      * @param portId the port id of the device to be routed.
      */
     @nullable AudioMixerAttributesInternal getPreferredMixerAttributes(
-            in AudioAttributesInternal attr,
+            in AudioAttributes attr,
             int /* audio_port_handle_t */ portId);
 
     /**
@@ -453,7 +461,7 @@
      *            preferred mixer attributes. The preferred mixer attributes will only be cleared
      *            if the uid is the same as the owner of current preferred mixer attributes.
      */
-    void clearPreferredMixerAttributes(in AudioAttributesInternal attr,
+    void clearPreferredMixerAttributes(in AudioAttributes attr,
                                        int /* audio_port_handle_t */ portId,
                                        int /* uid_t */ uid);
 
diff --git a/media/libaudioclient/aidl/android/media/ISoundDose.aidl b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
index 0e2a5ab..6cb22ef 100644
--- a/media/libaudioclient/aidl/android/media/ISoundDose.aidl
+++ b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
@@ -49,13 +49,11 @@
     oneway void updateAttenuation(float attenuationDB, int device);
 
     /**
-     * Disable the calculation of sound dose. This has the effect that no MEL
-     * values will be computed on the framework side. The MEL returned from
-     * the IHalSoundDoseCallbacks will be ignored.
-     * Should only be called once at startup if the AudioService does not
-     * support CSD.
+     * Enables/disables the calculation of sound dose. This has the effect that
+     * if disabled no MEL values will be computed on the framework side. The MEL
+     * returned from the IHalSoundDoseCallbacks will be ignored.
      */
-    oneway void disableCsd();
+    oneway void setCsdEnabled(boolean enabled);
 
     /* -------------------------- Test API methods --------------------------
     /** Get the currently used RS2 upper bound. */
diff --git a/media/libaudioclient/aidl/android/media/SurroundSoundConfig.aidl b/media/libaudioclient/aidl/android/media/SurroundSoundConfig.aidl
new file mode 100644
index 0000000..f83fdef
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/SurroundSoundConfig.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.audio.common.AudioFormatDescription;
+
+/**
+ * TODO(b/280077672): This is a temporary copy of the stable
+ * android.hardware.audio.core.SurroundSoundConfig parcelable.
+ * Interfaces from the Core API do not support the CPP backend. This copy will
+ * be removed either by moving the AudioRoute from core to a.m.a.common or by
+ * switching the framework internal interfaces to the NDK backend.
+ * {@hide}
+ */
+parcelable SurroundSoundConfig {
+    parcelable SurroundFormatFamily {
+        /**
+         * A primaryFormat shall get an entry in the Surround Settings dialog on TV
+         * devices. There must be a corresponding Java ENCODING_... constant
+         * defined in AudioFormat.java, and a display name defined in
+         * AudioFormat.toDisplayName.
+         */
+        AudioFormatDescription primaryFormat;
+        /**
+         * List of formats that shall be equivalent to the primaryFormat from the
+         * users' point of view and don't need a dedicated Surround Settings
+         * dialog entry.
+         */
+        AudioFormatDescription[] subFormats;
+    }
+    SurroundFormatFamily[] formatFamilies;
+}
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
new file mode 100644
index 0000000..1ca3042
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_defaults {
+    name: "libaudioclient_aidl_fuzzer_defaults",
+    static_libs: [
+        "android.hardware.audio.common@7.0-enums",
+        "effect-aidl-cpp",
+        "liblog",
+        "libbinder_random_parcel",
+        "libbase",
+        "libcgrouprc",
+        "libcgrouprc_format",
+        "libcutils",
+        "libjsoncpp",
+        "libmediametricsservice",
+        "libmedia_helper",
+        "libprocessgroup",
+        "shared-file-region-aidl-cpp",
+        "libfakeservicemanager"
+    ],
+    shared_libs: [
+        "libaudioclient",
+        "libaudioflinger",
+        "libmediautils",
+        "libnblog",
+        "libaudioprocessing",
+        "libnbaio",
+        "libpowermanager",
+        "libvibrator",
+        "packagemanager_aidl-cpp",
+        "android.hardware.audio.common-util",
+        "audioclient-types-aidl-cpp",
+        "audioflinger-aidl-cpp",
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "av-types-aidl-cpp",
+        "capture_state_listener-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "libaudiofoundation",
+        "libaudiomanager",
+        "libaudiopolicy",
+        "libaudioutils",
+        "libdl",
+        "libutils",
+        "libxml2",
+        "mediametricsservice-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libvndksupport",
+        "libmediametrics",
+        "libbinder_ndk",
+        "libbinder",
+        "libfakeservicemanager",
+        "libactivitymanager_aidl",
+        "libheadtracking",
+        "libaudiopolicyservice",
+        "libsensorprivacy",
+        "libaudiopolicymanagerdefault",
+        "libaudiohal",
+        "libhidlbase",
+        "libpermission",
+        "libaudiohal@7.0",
+    ],
+    header_libs: [
+        "libaudiopolicymanager_interface_headers",
+        "libbinder_headers",
+        "libaudiofoundation_headers",
+        "libmedia_headers",
+        "libaudiohal_headers",
+        "libaudioflinger_headers",
+        "mediautils_headers",
+    ],
+     fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+        hotlists: ["4593311"],
+        description: "The fuzzer targets the APIs of libaudioflinger",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
+    },
+}
+
+cc_fuzz {
+    name: "audioflinger_aidl_fuzzer",
+    srcs: ["audioflinger_aidl_fuzzer.cpp"],
+    defaults: ["libaudioclient_aidl_fuzzer_defaults"],
+}
diff --git a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
new file mode 100644
index 0000000..fac5f53
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
@@ -0,0 +1,218 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <AudioFlinger.h>
+#include <ISchedulingPolicyService.h>
+#include <fakeservicemanager/FakeServiceManager.h>
+#include <android-base/logging.h>
+#include <android/binder_interface_utils.h>
+#include <android/binder_process.h>
+#include <android/media/IAudioPolicyService.h>
+#include <binder/IActivityManager.h>
+#include <binder/IPermissionController.h>
+#include <binder/IServiceManager.h>
+#include <binder/PermissionController.h>
+#include <fuzzbinder/libbinder_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/IAudioFlinger.h>
+#include <mediautils/SchedulingPolicyService.h>
+#include <sensorprivacy/SensorPrivacyManager.h>
+#include <service/AudioPolicyService.h>
+
+using namespace android;
+using namespace android::binder;
+using android::fuzzService;
+
+static sp<media::IAudioFlingerService> gAudioFlingerService;
+
+class FuzzerSchedulingPolicyService : public BnInterface<ISchedulingPolicyService> {
+    int32_t requestPriority(int32_t /*pid_t*/, int32_t /*tid*/, int32_t /*prio*/, bool /*isForApp*/,
+                            bool /*asynchronous*/) {
+        return 0;
+    }
+
+    int32_t requestCpusetBoost(bool /*enable*/, const sp<IBinder>& /*client*/) { return 0; }
+};
+
+class FuzzerPermissionController : public BnInterface<IPermissionController> {
+  public:
+    bool checkPermission(const String16& /*permission*/, int32_t /*pid*/, int32_t /*uid*/) {
+        return true;
+    }
+    int32_t noteOp(const String16& /*op*/, int32_t /*uid*/, const String16& /*packageName*/) {
+        return 0;
+    }
+    void getPackagesForUid(const uid_t /*uid*/, Vector<String16>& /*packages*/) {}
+    bool isRuntimePermission(const String16& /*permission*/) { return true; }
+    int32_t getPackageUid(const String16& /*package*/, int /*flags*/) { return 0; }
+};
+
+class FuzzerSensorPrivacyManager : public BnInterface<hardware::ISensorPrivacyManager> {
+  public:
+    Status supportsSensorToggle(int32_t /*toggleType*/, int32_t /*sensor*/,
+                                bool* /*_aidl_return*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status addSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    Status addToggleSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status removeSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    Status removeToggleSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    Status isSensorPrivacyEnabled(bool* /*_aidl_return*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status isCombinedToggleSensorPrivacyEnabled(int32_t /*sensor*/,
+                                                bool* /*_aidl_return*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status isToggleSensorPrivacyEnabled(int32_t /*toggleType*/, int32_t /*sensor*/,
+                                        bool* /*_aidl_return*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status setSensorPrivacy(bool /*enable*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status setToggleSensorPrivacy(int32_t /*userId*/, int32_t /*source*/, int32_t /*sensor*/,
+                                  bool /*enable*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status setToggleSensorPrivacyForProfileGroup(int32_t /*userId*/, int32_t /*source*/,
+                                                 int32_t /*sensor*/, bool /*enable*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+};
+
+class FuzzerActivityManager : public BnInterface<IActivityManager> {
+  public:
+    int32_t openContentUri(const String16& /*stringUri*/) override { return 0; }
+
+    status_t registerUidObserver(const sp<IUidObserver>& /*observer*/, const int32_t /*event*/,
+                                 const int32_t /*cutpoint*/,
+                                 const String16& /*callingPackage*/) override {
+        return OK;
+    }
+
+    status_t unregisterUidObserver(const sp<IUidObserver>& /*observer*/) override { return OK; }
+
+    bool isUidActive(const uid_t /*uid*/, const String16& /*callingPackage*/) override {
+        return true;
+    }
+
+    int32_t getUidProcessState(const uid_t /*uid*/, const String16& /*callingPackage*/) override {
+        return ActivityManager::PROCESS_STATE_UNKNOWN;
+    }
+
+    status_t checkPermission(const String16& /*permission*/, const pid_t /*pid*/,
+                             const uid_t /*uid*/, int32_t* /*outResult*/) override {
+        return NO_ERROR;
+    }
+
+    status_t registerUidObserverForUids(const sp<IUidObserver>& /*observer*/ ,
+                                        const int32_t /*event*/ ,
+                                        const int32_t /*cutpoint*/ ,
+                                        const String16& /*callingPackage*/ ,
+                                        const int32_t uids[] ,
+                                        size_t /*nUids*/ ,
+                                        /*out*/ sp<IBinder>& /*observerToken*/ ) {
+        (void)uids;
+        return OK;
+    }
+
+    status_t addUidToObserver(const sp<IBinder>& /*observerToken*/ ,
+                              const String16& /*callingPackage*/ ,
+                              int32_t /*uid*/ ) override {
+        return NO_ERROR;
+    }
+
+    status_t removeUidFromObserver(const sp<IBinder>& /*observerToken*/ ,
+                                   const String16& /*callingPackage*/ ,
+                                   int32_t /*uid*/ ) override {
+        return NO_ERROR;
+    }
+
+    status_t logFgsApiBegin(int32_t /*apiType*/ , int32_t /*appUid*/ ,
+                            int32_t /*appPid*/ ) override {
+        return NO_ERROR;
+    }
+    status_t logFgsApiEnd(int32_t /*apiType*/ , int32_t /*appUid*/ ,
+                          int32_t /*appPid*/ ) override {
+        return NO_ERROR;
+    }
+    status_t logFgsApiStateChanged(int32_t /*apiType*/ , int32_t /*state*/ ,
+                                   int32_t /*appUid*/ ,
+                                   int32_t /*appPid*/ ) override {
+        return NO_ERROR;
+    }
+};
+
+extern "C" int LLVMFuzzerInitialize(int* /* argc */, char*** /* argv */) {
+    /* Create a FakeServiceManager instance and add required services */
+    sp<FakeServiceManager> fakeServiceManager = new FakeServiceManager();
+    setDefaultServiceManager(fakeServiceManager);
+    ABinderProcess_setThreadPoolMaxThreadCount(0);
+    sp<FuzzerActivityManager> am = new FuzzerActivityManager();
+    fakeServiceManager->addService(String16("activity"), IInterface::asBinder(am));
+
+    sp<FuzzerSensorPrivacyManager> sensorPrivacyManager = new FuzzerSensorPrivacyManager();
+    fakeServiceManager->addService(String16("sensor_privacy"),
+                                   IInterface::asBinder(sensorPrivacyManager));
+    sp<FuzzerPermissionController> permissionController = new FuzzerPermissionController();
+    fakeServiceManager->addService(String16("permission"),
+                                   IInterface::asBinder(permissionController));
+
+    sp<FuzzerSchedulingPolicyService> schedulingService = new FuzzerSchedulingPolicyService();
+    fakeServiceManager->addService(String16("scheduling_policy"),
+                                   IInterface::asBinder(schedulingService));
+
+    const auto audioFlingerObj = sp<AudioFlinger>::make();
+    const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlingerObj);
+
+    fakeServiceManager->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+                                   IInterface::asBinder(afAdapter), false /* allowIsolated */,
+                                   IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT);
+
+    const auto audioPolicyService = sp<AudioPolicyService>::make();
+    fakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
+                                   false /* allowIsolated */,
+                                   IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT);
+
+    sp<IBinder> binder =
+            fakeServiceManager->getService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
+    gAudioFlingerService = interface_cast<media::IAudioFlingerService>(binder);
+    return 0;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    if (!gAudioFlingerService) {
+        return 0;
+    }
+
+    fuzzService(media::IAudioFlingerService::asBinder(gAudioFlingerService),
+                FuzzedDataProvider(data, size));
+
+    return 0;
+}
diff --git a/media/libaudioclient/fuzzer/Android.bp b/media/libaudioclient/fuzzer/Android.bp
index b1feb60..6080314 100644
--- a/media/libaudioclient/fuzzer/Android.bp
+++ b/media/libaudioclient/fuzzer/Android.bp
@@ -80,5 +80,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libaudioflinger",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/libaudioclient/include/media/AidlConversion.h b/media/libaudioclient/include/media/AidlConversion.h
index 5bd0114..10f6d4a 100644
--- a/media/libaudioclient/include/media/AidlConversion.h
+++ b/media/libaudioclient/include/media/AidlConversion.h
@@ -21,10 +21,8 @@
 
 #include <system/audio.h>
 
-#include <android/media/AudioAttributesInternal.h>
 #include <android/media/AudioClient.h>
 #include <android/media/AudioDirectMode.h>
-#include <android/media/AudioFlag.h>
 #include <android/media/AudioIoConfigEvent.h>
 #include <android/media/AudioIoDescriptor.h>
 #include <android/media/AudioPortFw.h>
@@ -72,11 +70,6 @@
         media::audio::common::AudioPortDeviceExt* aidl,
         media::AudioPortDeviceExtSys* aidlDeviceExt);
 
-ConversionResult<audio_stream_type_t> aidl2legacy_AudioStreamType_audio_stream_type_t(
-        media::audio::common::AudioStreamType aidl);
-ConversionResult<media::audio::common::AudioStreamType>
-legacy2aidl_audio_stream_type_t_AudioStreamType(audio_stream_type_t legacy);
-
 ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortMixExt(
         const media::audio::common::AudioPortMixExt& aidl, media::AudioPortRole role,
         const media::AudioPortMixExtSys& aidlMixExt);
@@ -110,21 +103,6 @@
 ConversionResult<media::AudioClient> legacy2aidl_AudioClient_AudioClient(
         const AudioClient& legacy);
 
-ConversionResult<audio_flags_mask_t>
-aidl2legacy_AudioFlag_audio_flags_mask_t(media::AudioFlag aidl);
-ConversionResult<media::AudioFlag>
-legacy2aidl_audio_flags_mask_t_AudioFlag(audio_flags_mask_t legacy);
-
-ConversionResult<audio_flags_mask_t>
-aidl2legacy_int32_t_audio_flags_mask_t_mask(int32_t aidl);
-ConversionResult<int32_t>
-legacy2aidl_audio_flags_mask_t_int32_t_mask(audio_flags_mask_t legacy);
-
-ConversionResult<audio_attributes_t>
-aidl2legacy_AudioAttributesInternal_audio_attributes_t(const media::AudioAttributesInternal& aidl);
-ConversionResult<media::AudioAttributesInternal>
-legacy2aidl_audio_attributes_t_AudioAttributesInternal(const audio_attributes_t& legacy);
-
 ConversionResult<sp<IMemory>>
 aidl2legacy_SharedFileRegion_IMemory(const media::SharedFileRegion& aidl);
 ConversionResult<media::SharedFileRegion>
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index d033d4f..8f8c9dd 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -23,6 +23,8 @@
 #include <vector>
 
 #include <android/content/AttributionSourceState.h>
+#include <android/media/AudioPolicyConfig.h>
+#include <android/media/AudioPortFw.h>
 #include <android/media/AudioVibratorInfo.h>
 #include <android/media/BnAudioFlingerClient.h>
 #include <android/media/BnAudioPolicyServiceClient.h>
@@ -126,6 +128,9 @@
     // set audio mode in audio hardware
     static status_t setMode(audio_mode_t mode);
 
+    // test API: switch HALs into the mode which simulates external device connections
+    static status_t setSimulateDeviceConnections(bool enabled);
+
     // returns true in *state if tracks are active on the specified stream or have been active
     // in the past inPastMs milliseconds
     static status_t isStreamActive(audio_stream_type_t stream, bool *state, uint32_t inPastMs);
@@ -425,6 +430,9 @@
                                    struct audio_port_v7 *ports,
                                    unsigned int *generation);
 
+    static status_t listDeclaredDevicePorts(media::AudioPortRole role,
+                                            std::vector<media::AudioPortFw>* result);
+
     /* Get attributes for a given audio port. On input, the port
      * only needs the 'id' field to be filled in. */
     static status_t getAudioPort(struct audio_port_v7 *port);
@@ -656,6 +664,8 @@
                                                   audio_port_handle_t portId,
                                                   uid_t uid);
 
+    static status_t getAudioPolicyConfig(media::AudioPolicyConfig *config);
+
     // A listener for capture state changes.
     class CaptureStateListener : public virtual RefBase {
     public:
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 31f81be..8f712db 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1126,6 +1126,9 @@
 
             bool isPlaying() {
                 AutoMutex lock(mLock);
+                return isPlaying_l();
+            }
+            bool isPlaying_l() {
                 return mState == STATE_ACTIVE || mState == STATE_STOPPING;
             }
 
@@ -1262,6 +1265,11 @@
     audio_track_cblk_t*     mCblk;                  // re-load after mLock.unlock()
     audio_io_handle_t       mOutput = AUDIO_IO_HANDLE_NONE; // from AudioSystem::getOutputForAttr()
 
+    // A copy of shared memory and proxy between obtainBuffer and releaseBuffer to keep the
+    // shared memory valid when processing data.
+    sp<IMemory>               mCblkMemoryObtainBufferRef GUARDED_BY(mLock);
+    sp<AudioTrackClientProxy> mProxyObtainBufferRef GUARDED_BY(mLock);
+
     sp<AudioTrackThread>    mAudioTrackThread;
     bool                    mThreadCanCallJava;
 
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 02d0511..2e2ef65 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -361,7 +361,10 @@
 
     virtual int32_t getAAudioHardwareBurstMinUsec() = 0;
 
-    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
+    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port,
+                                             media::DeviceConnectedState state) = 0;
+
+    virtual status_t setSimulateDeviceConnections(bool enabled) = 0;
 
     virtual status_t setRequestedLatencyMode(
             audio_io_handle_t output, audio_latency_mode_t mode) = 0;
@@ -379,6 +382,8 @@
     virtual status_t isBluetoothVariableLatencyEnabled(bool* enabled) = 0;
 
     virtual status_t supportsBluetoothVariableLatency(bool* support) = 0;
+
+    virtual status_t getAudioPolicyConfig(media::AudioPolicyConfig* output) = 0;
 };
 
 /**
@@ -479,7 +484,9 @@
             std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) override;
     int32_t getAAudioMixerBurstCount() override;
     int32_t getAAudioHardwareBurstMinUsec() override;
-    status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) override;
+    status_t setDeviceConnectedState(const struct audio_port_v7 *port,
+                                     media::DeviceConnectedState state) override;
+    status_t setSimulateDeviceConnections(bool enabled) override;
     status_t setRequestedLatencyMode(audio_io_handle_t output,
             audio_latency_mode_t mode) override;
     status_t getSupportedLatencyModes(
@@ -490,6 +497,7 @@
     status_t getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
                                    sp<media::ISoundDose>* soundDose) override;
     status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) override;
+    status_t getAudioPolicyConfig(media::AudioPolicyConfig* output) override;
 
 private:
     const sp<media::IAudioFlingerService> mDelegate;
@@ -578,6 +586,7 @@
             GET_AAUDIO_MIXER_BURST_COUNT = media::BnAudioFlingerService::TRANSACTION_getAAudioMixerBurstCount,
             GET_AAUDIO_HARDWARE_BURST_MIN_USEC = media::BnAudioFlingerService::TRANSACTION_getAAudioHardwareBurstMinUsec,
             SET_DEVICE_CONNECTED_STATE = media::BnAudioFlingerService::TRANSACTION_setDeviceConnectedState,
+            SET_SIMULATE_DEVICE_CONNECTIONS = media::BnAudioFlingerService::TRANSACTION_setSimulateDeviceConnections,
             SET_REQUESTED_LATENCY_MODE = media::BnAudioFlingerService::TRANSACTION_setRequestedLatencyMode,
             GET_SUPPORTED_LATENCY_MODES = media::BnAudioFlingerService::TRANSACTION_getSupportedLatencyModes,
             SET_BLUETOOTH_VARIABLE_LATENCY_ENABLED =
@@ -588,6 +597,8 @@
                     media::BnAudioFlingerService::TRANSACTION_supportsBluetoothVariableLatency,
             GET_SOUND_DOSE_INTERFACE = media::BnAudioFlingerService::TRANSACTION_getSoundDoseInterface,
             INVALIDATE_TRACKS = media::BnAudioFlingerService::TRANSACTION_invalidateTracks,
+            GET_AUDIO_POLICY_CONFIG =
+                    media::BnAudioFlingerService::TRANSACTION_getAudioPolicyConfig,
         };
 
     protected:
@@ -707,7 +718,9 @@
             std::vector<media::audio::common::AudioMMapPolicyInfo> *_aidl_return) override;
     Status getAAudioMixerBurstCount(int32_t* _aidl_return) override;
     Status getAAudioHardwareBurstMinUsec(int32_t* _aidl_return) override;
-    Status setDeviceConnectedState(const media::AudioPortFw& port, bool connected) override;
+    Status setDeviceConnectedState(const media::AudioPortFw& port,
+                                   media::DeviceConnectedState state) override;
+    Status setSimulateDeviceConnections(bool enabled) override;
     Status setRequestedLatencyMode(
             int output, media::audio::common::AudioLatencyMode mode) override;
     Status getSupportedLatencyModes(int output,
@@ -718,6 +731,7 @@
     Status getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
                                  sp<media::ISoundDose>* _aidl_return) override;
     Status invalidateTracks(const std::vector<int32_t>& portIds) override;
+    Status getAudioPolicyConfig(media::AudioPolicyConfig* _aidl_return) override;
 private:
     const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
 };
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index 2189521..1e8dcca 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -14,6 +14,12 @@
         "-Wall",
         "-Werror",
     ],
+    shared_libs: [
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
     sanitize: {
         misc_undefined: [
             "unsigned-integer-overflow",
@@ -22,37 +28,35 @@
     },
 }
 
-cc_test {
-    name: "audio_aidl_conversion_tests",
+cc_defaults {
+    name: "audio_aidl_conversion_test_defaults",
     defaults: [
         "libaudioclient_tests_defaults",
         "latest_android_media_audio_common_types_cpp_static",
     ],
-    srcs: ["audio_aidl_legacy_conversion_tests.cpp"],
-    shared_libs: [
-        "libbinder",
-        "libcutils",
-        "liblog",
-        "libutils",
-    ],
     static_libs: [
-        "libaudioclient_aidl_conversion",
-        "libaudio_aidl_conversion_common_cpp",
         "audioclient-types-aidl-cpp",
         "av-types-aidl-cpp",
+        "libaudio_aidl_conversion_common_cpp",
+        "libaudioclient_aidl_conversion",
         "libstagefright_foundation",
     ],
 }
 
 cc_test {
+    name: "audio_aidl_conversion_tests",
+    defaults: [
+        "audio_aidl_conversion_test_defaults",
+    ],
+    srcs: ["audio_aidl_legacy_conversion_tests.cpp"],
+}
+
+cc_test {
     name: "audio_aidl_status_tests",
     defaults: ["libaudioclient_tests_defaults"],
     srcs: ["audio_aidl_status_tests.cpp"],
     shared_libs: [
         "libaudioclient_aidl_conversion",
-        "libbinder",
-        "libcutils",
-        "libutils",
     ],
 }
 
@@ -70,9 +74,6 @@
     shared_libs: [
         "framework-permission-aidl-cpp",
         "libaudioclient",
-        "libbinder",
-        "libcutils",
-        "libutils",
     ],
     data: ["track_test_input_*.txt"],
 }
@@ -89,35 +90,23 @@
         "libmediametrics_headers",
     ],
     shared_libs: [
-        "libaudioclient",
-        "libbinder",
-        "libcutils",
-        "libutils",
         "framework-permission-aidl-cpp",
+        "libaudioclient",
     ],
     data: ["record_test_input_*.txt"],
 }
 
 cc_defaults {
     name: "libaudioclient_gtests_defaults",
-    cflags: [
-        "-Wall",
-        "-Werror",
-    ],
     defaults: [
-        "latest_android_media_audio_common_types_cpp_static",
+        "audio_aidl_conversion_test_defaults",
     ],
     shared_libs: [
         "capture_state_listener-aidl-cpp",
         "framework-permission-aidl-cpp",
-        "libaudioclient_aidl_conversion",
-        "libaudio_aidl_conversion_common_cpp",
         "libbase",
-        "libbinder",
         "libcgrouprc",
-        "libcutils",
         "libdl",
-        "liblog",
         "libmedia",
         "libmediametrics",
         "libmediautils",
@@ -125,8 +114,6 @@
         "libnblog",
         "libprocessgroup",
         "libshmemcompat",
-        "libstagefright_foundation",
-        "libutils",
         "libxml2",
         "mediametricsservice-aidl-cpp",
         "packagemanager_aidl-cpp",
@@ -148,7 +135,6 @@
     ],
     data: ["bbb*.raw"],
     test_config_template: "audio_test_template.xml",
-    test_suites: ["device-tests"],
 }
 
 cc_test {
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
index f651a37..976a532 100644
--- a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -15,6 +15,7 @@
  */
 
 #include <iostream>
+#include <string>
 
 #include <gtest/gtest.h>
 
@@ -32,6 +33,7 @@
 using media::AudioPortType;
 using media::audio::common::AudioChannelLayout;
 using media::audio::common::AudioDevice;
+using media::audio::common::AudioDeviceAddress;
 using media::audio::common::AudioDeviceDescription;
 using media::audio::common::AudioDeviceType;
 using media::audio::common::AudioEncapsulationMetadataType;
@@ -92,6 +94,11 @@
             AudioChannelLayout::LAYOUT_STEREO);
 }
 
+AudioChannelLayout make_ACL_Tri() {
+    return AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+            AudioChannelLayout::LAYOUT_TRI);
+}
+
 AudioChannelLayout make_ACL_LayoutArbitrary() {
     return AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
             // Use channels that exist both for input and output,
@@ -131,6 +138,14 @@
     return make_AudioDeviceDescription(AudioDeviceType::IN_DEFAULT);
 }
 
+AudioDeviceDescription make_ADD_MicIn() {
+    return make_AudioDeviceDescription(AudioDeviceType::IN_MICROPHONE);
+}
+
+AudioDeviceDescription make_ADD_RSubmixIn() {
+    return make_AudioDeviceDescription(AudioDeviceType::IN_SUBMIX);
+}
+
 AudioDeviceDescription make_ADD_DefaultOut() {
     return make_AudioDeviceDescription(AudioDeviceType::OUT_DEFAULT);
 }
@@ -145,6 +160,39 @@
                                        AudioDeviceDescription::CONNECTION_BT_SCO());
 }
 
+AudioDeviceDescription make_ADD_BtA2dpHeadphone() {
+    return make_AudioDeviceDescription(AudioDeviceType::OUT_HEADPHONE,
+                                       AudioDeviceDescription::CONNECTION_BT_A2DP());
+}
+
+AudioDeviceDescription make_ADD_BtLeHeadset() {
+    return make_AudioDeviceDescription(AudioDeviceType::OUT_HEADSET,
+                                       AudioDeviceDescription::CONNECTION_BT_LE());
+}
+
+AudioDeviceDescription make_ADD_BtLeBroadcast() {
+    return make_AudioDeviceDescription(AudioDeviceType::OUT_BROADCAST,
+                                       AudioDeviceDescription::CONNECTION_BT_LE());
+}
+
+AudioDeviceDescription make_ADD_IpV4Device() {
+    return make_AudioDeviceDescription(AudioDeviceType::OUT_DEVICE,
+                                       AudioDeviceDescription::CONNECTION_IP_V4());
+}
+
+AudioDeviceDescription make_ADD_UsbHeadset() {
+    return make_AudioDeviceDescription(AudioDeviceType::OUT_HEADSET,
+                                       AudioDeviceDescription::CONNECTION_USB());
+}
+
+AudioDevice make_AudioDevice(const AudioDeviceDescription& type,
+                             const AudioDeviceAddress& address) {
+    AudioDevice result;
+    result.type = type;
+    result.address = address;
+    return result;
+}
+
 AudioFormatDescription make_AudioFormatDescription(AudioFormatType type) {
     AudioFormatDescription result;
     result.type = type;
@@ -268,8 +316,8 @@
         AudioChannelLayoutRoundTrip, AudioChannelLayoutRoundTripTest,
         testing::Combine(
                 testing::Values(AudioChannelLayout{}, make_ACL_Invalid(), make_ACL_Stereo(),
-                                make_ACL_LayoutArbitrary(), make_ACL_ChannelIndex2(),
-                                make_ACL_ChannelIndexArbitrary(),
+                                make_ACL_Tri(), make_ACL_LayoutArbitrary(),
+                                make_ACL_ChannelIndex2(), make_ACL_ChannelIndexArbitrary(),
                                 AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
                                         AudioChannelLayout::CHANNEL_FRONT_LEFT),
                                 AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
@@ -390,6 +438,48 @@
                                          make_ADD_DefaultOut(), make_ADD_WiredHeadset(),
                                          make_ADD_BtScoHeadset()));
 
+class AudioDeviceRoundTripTest : public testing::TestWithParam<AudioDevice> {};
+TEST_P(AudioDeviceRoundTripTest, Aidl2Legacy2Aidl) {
+    const auto initial = GetParam();
+    audio_devices_t legacyType;
+    String8 legacyAddress;
+    status_t status = aidl2legacy_AudioDevice_audio_device(initial, &legacyType, &legacyAddress);
+    ASSERT_EQ(OK, status);
+    auto convBack = legacy2aidl_audio_device_AudioDevice(legacyType, legacyAddress);
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+INSTANTIATE_TEST_SUITE_P(
+        AudioDeviceRoundTrip, AudioDeviceRoundTripTest,
+        testing::Values(
+                make_AudioDevice(make_ADD_MicIn(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>("bottom")),
+                make_AudioDevice(make_ADD_RSubmixIn(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>("1:2-in-3")),
+                // The case of a "blueprint" device port for an external device.
+                make_AudioDevice(make_ADD_BtScoHeadset(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>("")),
+                make_AudioDevice(make_ADD_BtScoHeadset(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::mac>(
+                                         std::vector<uint8_t>{1, 2, 3, 4, 5, 6})),
+                // Another "blueprint"
+                make_AudioDevice(make_ADD_BtA2dpHeadphone(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>("")),
+                make_AudioDevice(make_ADD_BtA2dpHeadphone(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::mac>(
+                                         std::vector<uint8_t>{1, 2, 3, 4, 5, 6})),
+                make_AudioDevice(make_ADD_BtLeHeadset(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::mac>(
+                                         std::vector<uint8_t>{1, 2, 3, 4, 5, 6})),
+                make_AudioDevice(make_ADD_BtLeBroadcast(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>("42")),
+                make_AudioDevice(make_ADD_IpV4Device(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::ipv4>(
+                                         std::vector<uint8_t>{192, 168, 0, 1})),
+                make_AudioDevice(make_ADD_UsbHeadset(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::alsa>(
+                                         std::vector<int32_t>{1, 2}))));
+
 class AudioFormatDescriptionRoundTripTest : public testing::TestWithParam<AudioFormatDescription> {
 };
 TEST_P(AudioFormatDescriptionRoundTripTest, Aidl2Legacy2Aidl) {
diff --git a/media/libaudioclient/tests/audioclient_serialization_tests.cpp b/media/libaudioclient/tests/audioclient_serialization_tests.cpp
index d1e3d16..707b9b3 100644
--- a/media/libaudioclient/tests/audioclient_serialization_tests.cpp
+++ b/media/libaudioclient/tests/audioclient_serialization_tests.cpp
@@ -103,7 +103,7 @@
     attr.usage = kUsages[rand() % kUsages.size()];
     attr.source = kInputSources[rand() % kInputSources.size()];
     // attr.flags -> [0, (1 << (CAPTURE_PRIVATE + 1) - 1)]
-    attr.flags = static_cast<audio_flags_mask_t>(rand() & 0x3fff);
+    attr.flags = static_cast<audio_flags_mask_t>(rand() & 0x3ffd);  // exclude AUDIO_FLAG_SECURE
     sprintf(attr.tags, "%s",
             CreateRandomString((int)rand() % (AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1)).c_str());
 }
@@ -119,6 +119,7 @@
 TEST_F(SerializationTest, AudioProductStrategyBinderization) {
     for (int j = 0; j < 512; j++) {
         const std::string name{"Test APSBinderization for seed::" + std::to_string(mSeed)};
+        SCOPED_TRACE(name);
         std::vector<VolumeGroupAttributes> volumeGroupAttrVector;
         for (auto i = 0; i < 16; i++) {
             audio_attributes_t attributes;
@@ -132,20 +133,19 @@
         AudioProductStrategy aps{name, volumeGroupAttrVector, psId};
 
         Parcel p;
-        EXPECT_EQ(NO_ERROR, aps.writeToParcel(&p)) << name;
+        EXPECT_EQ(NO_ERROR, aps.writeToParcel(&p));
 
         AudioProductStrategy apsCopy;
         p.setDataPosition(0);
-        EXPECT_EQ(NO_ERROR, apsCopy.readFromParcel(&p)) << name;
-        EXPECT_EQ(apsCopy.getName(), name) << name;
-        EXPECT_EQ(apsCopy.getId(), psId) << name;
+        EXPECT_EQ(NO_ERROR, apsCopy.readFromParcel(&p));
+        EXPECT_EQ(apsCopy.getName(), name);
+        EXPECT_EQ(apsCopy.getId(), psId);
         auto avec = apsCopy.getVolumeGroupAttributes();
-        EXPECT_EQ(avec.size(), volumeGroupAttrVector.size()) << name;
-        for (int i = 0; i < volumeGroupAttrVector.size(); i++) {
-            EXPECT_EQ(avec[i].getGroupId(), volumeGroupAttrVector[i].getGroupId()) << name;
-            EXPECT_EQ(avec[i].getStreamType(), volumeGroupAttrVector[i].getStreamType()) << name;
-            EXPECT_TRUE(avec[i].getAttributes() == volumeGroupAttrVector[i].getAttributes())
-                    << name;
+        EXPECT_EQ(avec.size(), volumeGroupAttrVector.size());
+        for (int i = 0; i < std::min(avec.size(), volumeGroupAttrVector.size()); i++) {
+            EXPECT_EQ(avec[i].getGroupId(), volumeGroupAttrVector[i].getGroupId());
+            EXPECT_EQ(avec[i].getStreamType(), volumeGroupAttrVector[i].getStreamType());
+            EXPECT_TRUE(avec[i].getAttributes() == volumeGroupAttrVector[i].getAttributes());
         }
     }
 }
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
index 2b9b4fa..45baa94 100644
--- a/media/libaudioclient/tests/audiosystem_tests.cpp
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -18,12 +18,19 @@
 
 #include <string.h>
 
+#include <set>
+
 #include <gtest/gtest.h>
+#include <media/AidlConversionCppNdk.h>
 #include <media/IAudioFlinger.h>
 #include <utils/Log.h>
 
 #include "audio_test_utils.h"
 
+using android::media::audio::common::AudioDeviceAddress;
+using android::media::audio::common::AudioDeviceDescription;
+using android::media::audio::common::AudioDeviceType;
+using android::media::audio::common::AudioPortExt;
 using namespace android;
 
 void anyPatchContainsInputDevice(audio_port_handle_t deviceId, bool& res) {
@@ -579,3 +586,106 @@
     EXPECT_EQ(NO_ERROR, AudioSystem::setUserIdDeviceAffinities(userId, outputDevices));
     EXPECT_EQ(NO_ERROR, AudioSystem::removeUserIdDeviceAffinities(userId));
 }
+
+namespace {
+
+class WithSimulatedDeviceConnections {
+  public:
+    WithSimulatedDeviceConnections()
+        : mIsSupported(AudioSystem::setSimulateDeviceConnections(true) == OK) {}
+    ~WithSimulatedDeviceConnections() {
+        if (mIsSupported) {
+            if (status_t status = AudioSystem::setSimulateDeviceConnections(false); status != OK) {
+                ALOGE("Error restoring device connections simulation state: %d", status);
+            }
+        }
+    }
+    bool isSupported() const { return mIsSupported; }
+
+  private:
+    const bool mIsSupported;
+};
+
+android::media::audio::common::AudioPort GenerateUniqueDeviceAddress(
+        const android::media::audio::common::AudioPort& port) {
+    static int nextId = 0;
+    using Tag = AudioDeviceAddress::Tag;
+    AudioDeviceAddress address;
+    switch (suggestDeviceAddressTag(port.ext.get<AudioPortExt::Tag::device>().device.type)) {
+        case Tag::id:
+            address = AudioDeviceAddress::make<Tag::id>(std::to_string(++nextId));
+            break;
+        case Tag::mac:
+            address = AudioDeviceAddress::make<Tag::mac>(
+                    std::vector<uint8_t>{1, 2, 3, 4, 5, static_cast<uint8_t>(++nextId & 0xff)});
+            break;
+        case Tag::ipv4:
+            address = AudioDeviceAddress::make<Tag::ipv4>(
+                    std::vector<uint8_t>{192, 168, 0, static_cast<uint8_t>(++nextId & 0xff)});
+            break;
+        case Tag::ipv6:
+            address = AudioDeviceAddress::make<Tag::ipv6>(std::vector<int32_t>{
+                    0xfc00, 0x0123, 0x4567, 0x89ab, 0xcdef, 0, 0, ++nextId & 0xffff});
+            break;
+        case Tag::alsa:
+            address = AudioDeviceAddress::make<Tag::alsa>(std::vector<int32_t>{1, ++nextId});
+            break;
+    }
+    android::media::audio::common::AudioPort result = port;
+    result.ext.get<AudioPortExt::Tag::device>().device.address = std::move(address);
+    return result;
+}
+
+}  // namespace
+
+TEST_F(AudioSystemTest, SetDeviceConnectedState) {
+    WithSimulatedDeviceConnections connSim;
+    if (!connSim.isSupported()) {
+        GTEST_SKIP() << "Simulation of external device connections not supported";
+    }
+    std::vector<media::AudioPortFw> ports;
+    ASSERT_EQ(OK, AudioSystem::listDeclaredDevicePorts(media::AudioPortRole::NONE, &ports));
+    if (ports.empty()) {
+        GTEST_SKIP() << "No ports returned by the audio system";
+    }
+    const std::set<AudioDeviceType> typesToUse{
+            AudioDeviceType::IN_DEVICE,       AudioDeviceType::IN_HEADSET,
+            AudioDeviceType::IN_MICROPHONE,   AudioDeviceType::OUT_DEVICE,
+            AudioDeviceType::OUT_HEADPHONE,   AudioDeviceType::OUT_HEADSET,
+            AudioDeviceType::OUT_HEARING_AID, AudioDeviceType::OUT_SPEAKER};
+    std::vector<media::AudioPortFw> externalDevicePorts;
+    for (const auto& port : ports) {
+        if (const auto& device = port.hal.ext.get<AudioPortExt::device>().device;
+            !device.type.connection.empty() && typesToUse.count(device.type.type)) {
+            externalDevicePorts.push_back(port);
+        }
+    }
+    if (externalDevicePorts.empty()) {
+        GTEST_SKIP() << "No ports for considered non-attached devices";
+    }
+    for (auto& port : externalDevicePorts) {
+        android::media::audio::common::AudioPort aidlPort = GenerateUniqueDeviceAddress(port.hal);
+        SCOPED_TRACE(aidlPort.toString());
+        audio_devices_t type;
+        char address[AUDIO_DEVICE_MAX_ADDRESS_LEN];
+        status_t status = aidl2legacy_AudioDevice_audio_device(
+                aidlPort.ext.get<AudioPortExt::Tag::device>().device, &type, address);
+        ASSERT_EQ(OK, status);
+        audio_policy_dev_state_t deviceState = AudioSystem::getDeviceConnectionState(type, address);
+        EXPECT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, deviceState);
+        if (deviceState != AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) continue;
+        // !!! Instead of the default format, use each format from 'ext.encodedFormats'
+        // !!! if they are not empty
+        status = AudioSystem::setDeviceConnectionState(AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                       aidlPort, AUDIO_FORMAT_DEFAULT);
+        EXPECT_EQ(OK, status);
+        if (status != OK) continue;
+        deviceState = AudioSystem::getDeviceConnectionState(type, address);
+        EXPECT_EQ(AUDIO_POLICY_DEVICE_STATE_AVAILABLE, deviceState);
+        status = AudioSystem::setDeviceConnectionState(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                       aidlPort, AUDIO_FORMAT_DEFAULT);
+        EXPECT_EQ(OK, status);
+        deviceState = AudioSystem::getDeviceConnectionState(type, address);
+        EXPECT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, deviceState);
+    }
+}
diff --git a/media/libaudiofoundation/AudioProfile.cpp b/media/libaudiofoundation/AudioProfile.cpp
index 2170cd8..999e263 100644
--- a/media/libaudiofoundation/AudioProfile.cpp
+++ b/media/libaudiofoundation/AudioProfile.cpp
@@ -327,6 +327,24 @@
     return false;
 }
 
+const SampleRateSet AudioProfileVector::getSampleRatesFor(audio_format_t format) const {
+    for (const auto& profile : *this) {
+        if (profile->getFormat() == format) {
+            return profile->getSampleRates();
+        }
+    }
+    return {};
+}
+
+const ChannelMaskSet AudioProfileVector::getChannelMasksFor(audio_format_t format) const {
+    for (const auto& profile : *this) {
+        if (profile->getFormat() == format) {
+            return profile->getChannels();
+        }
+    }
+    return {};
+}
+
 bool AudioProfileVector::contains(const sp<AudioProfile>& profile, bool ignoreDynamicFlags) const
 {
     for (const auto& audioProfile : *this) {
diff --git a/media/libaudiofoundation/include/media/AudioProfile.h b/media/libaudiofoundation/include/media/AudioProfile.h
index 79dfd12..a668afe 100644
--- a/media/libaudiofoundation/include/media/AudioProfile.h
+++ b/media/libaudiofoundation/include/media/AudioProfile.h
@@ -139,6 +139,9 @@
     bool hasDynamicProfile() const;
     bool hasDynamicRateFor(audio_format_t format) const;
 
+    const SampleRateSet getSampleRatesFor(audio_format_t format) const;
+    const ChannelMaskSet getChannelMasksFor(audio_format_t format) const;
+
     bool contains(const sp<AudioProfile>& profile, bool ignoreDynamicFlags = false) const;
 
     virtual void dump(std::string *dst, int spaces) const;
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index f47dd0b..3c05b0b 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -74,5 +74,19 @@
 cc_library_headers {
     name: "libaudiohal_headers",
 
+    header_libs: [
+        "libeffectsconfig_headers",
+    ],
+
+    export_header_lib_headers: ["libeffectsconfig_headers"],
+
     export_include_dirs: ["include"],
 }
+
+cc_library_headers {
+    name: "libaudiohalimpl_headers",
+
+    header_libs: ["libaudiohal_headers"],
+    export_header_lib_headers: ["libaudiohal_headers"],
+    export_include_dirs: ["impl"],
+}
diff --git a/media/libaudiohal/TEST_MAPPING b/media/libaudiohal/TEST_MAPPING
index 5d3fb0a..90f481b 100644
--- a/media/libaudiohal/TEST_MAPPING
+++ b/media/libaudiohal/TEST_MAPPING
@@ -16,6 +16,9 @@
           "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
         }
       ]
+    },
+    {
+      "name": "CoreAudioHalAidlTest"
     }
   ]
 }
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index ff817d4..1689365 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -35,6 +35,7 @@
         "android.hidl.allocator@1.0",
         "android.hidl.memory@1.0",
         "libaudiohal_deathhandler",
+        "libeffectsconfig",
         "libhidlbase",
         "libhidlmemory",
     ],
@@ -244,20 +245,48 @@
     ]
 }
 
-cc_library_shared {
-    name: "libaudiohal@aidl",
+cc_defaults {
+    name: "libaudiohal_aidl_default",
     defaults: [
-        "libaudiohal_default",
         "latest_android_hardware_audio_common_ndk_shared",
         "latest_android_hardware_audio_core_ndk_shared",
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_hardware_audio_effect_ndk_shared",
         "latest_android_media_audio_common_types_ndk_shared",
     ],
+    shared_libs: [
+        "android.hardware.common-V2-ndk",
+        "android.hardware.common.fmq-V1-ndk",
+        "av-audio-types-aidl-ndk",
+        "libaudio_aidl_conversion_common_cpp",
+        "libaudio_aidl_conversion_common_ndk",
+        "libaudio_aidl_conversion_common_ndk_cpp",
+        "libaudio_aidl_conversion_core_ndk",
+        "libaudio_aidl_conversion_effect_ndk",
+        "libaudioaidlcommon",
+        "libbinder_ndk",
+    ],
+    header_libs: [
+        "libaudio_system_headers",
+        "libeffectsconfig_headers",
+    ],
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+        "-Wthread-safety",
+        "-DBACKEND_CPP_NDK",
+    ],
+}
+
+cc_library_shared {
+    name: "libaudiohal@aidl",
+    defaults: [
+        "libaudiohal_default",
+        "libaudiohal_aidl_default",
+    ],
     srcs: [
-        "DeviceHalAidl.cpp",
         "DevicesFactoryHalEntry.cpp",
-        "DevicesFactoryHalAidl.cpp",
         "EffectConversionHelperAidl.cpp",
         "EffectBufferHalAidl.cpp",
         "EffectHalAidl.cpp",
@@ -279,27 +308,22 @@
         "effectsAidlConversion/AidlConversionVisualizer.cpp",
         "EffectsFactoryHalAidl.cpp",
         "EffectsFactoryHalEntry.cpp",
+        ":audio_effectproxy_src_files",
+        ":core_audio_hal_aidl_src_files",
+    ],
+}
+
+filegroup {
+    name: "core_audio_hal_aidl_src_files",
+    srcs: [
+        "ConversionHelperAidl.cpp",
+        "DeviceHalAidl.cpp",
+        "DevicesFactoryHalAidl.cpp",
         "StreamHalAidl.cpp",
     ],
-    static_libs: [
-        "android.hardware.common-V2-ndk",
-        "android.hardware.common.fmq-V1-ndk",
-    ],
-    shared_libs: [
-        "libbinder_ndk",
-        "libaudio_aidl_conversion_common_cpp",
-        "libaudio_aidl_conversion_common_ndk",
-        "libaudio_aidl_conversion_effect_ndk",
-        "libaudioaidlcommon",
-    ],
-    header_libs: [
-        "libaudio_system_headers",
-    ],
-    cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-        "-Wthread-safety",
-        "-DBACKEND_CPP_NDK",
-    ],
+}
+
+filegroup {
+    name: "audio_effectproxy_src_files",
+    srcs: ["EffectProxy.cpp"],
 }
diff --git a/media/libaudiohal/impl/ConversionHelperAidl.cpp b/media/libaudiohal/impl/ConversionHelperAidl.cpp
new file mode 100644
index 0000000..7197bf2
--- /dev/null
+++ b/media/libaudiohal/impl/ConversionHelperAidl.cpp
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "ConversionHelperAidl"
+
+#include <memory>
+
+#include <media/AidlConversionUtil.h>
+#include <utils/Log.h>
+
+#include "ConversionHelperAidl.h"
+
+using aidl::android::aidl_utils::statusTFromBinderStatus;
+using aidl::android::hardware::audio::core::VendorParameter;
+using aidl::android::media::audio::IHalAdapterVendorExtension;
+
+namespace android {
+
+status_t parseAndGetVendorParameters(
+        std::shared_ptr<IHalAdapterVendorExtension> vendorExt,
+        const VendorParametersRecipient& recipient,
+        const AudioParameter& parameterKeys,
+        String8* values) {
+    using ParameterScope = IHalAdapterVendorExtension::ParameterScope;
+    if (parameterKeys.size() == 0) return OK;
+    const String8 rawKeys = parameterKeys.keysToString();
+    if (vendorExt == nullptr) {
+        ALOGW("%s: unknown parameters, ignored: \"%s\"", __func__, rawKeys.c_str());
+        return OK;
+    }
+
+    std::vector<std::string> parameterIds;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(vendorExt->parseVendorParameterIds(
+                            ParameterScope(recipient.index()),
+                            std::string(rawKeys.c_str()), &parameterIds)));
+    if (parameterIds.empty()) return OK;
+
+    std::vector<VendorParameter> parameters;
+    if (recipient.index() == static_cast<int>(ParameterScope::MODULE)) {
+        auto module = std::get<static_cast<int>(ParameterScope::MODULE)>(recipient);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(module->getVendorParameters(
+                                parameterIds, &parameters)));
+    } else if (recipient.index() == static_cast<int>(ParameterScope::STREAM)) {
+        auto stream = std::get<static_cast<int>(ParameterScope::STREAM)>(recipient);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(stream->getVendorParameters(
+                                parameterIds, &parameters)));
+    } else {
+        LOG_ALWAYS_FATAL("%s: unexpected recipient variant index: %zu",
+                __func__, recipient.index());
+    }
+    if (!parameters.empty()) {
+        std::string vendorParameters;
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(vendorExt->processVendorParameters(
+                                ParameterScope(recipient.index()),
+                                parameters, &vendorParameters)));
+        // Re-parse the vendor-provided string to ensure that it is correct.
+        AudioParameter reparse(String8(vendorParameters.c_str()));
+        if (reparse.size() != 0) {
+            if (!values->empty()) {
+                values->append(";");
+            }
+            values->append(reparse.toString().c_str());
+        }
+    }
+    return OK;
+}
+
+status_t parseAndSetVendorParameters(
+        std::shared_ptr<IHalAdapterVendorExtension> vendorExt,
+        const VendorParametersRecipient& recipient,
+        const AudioParameter& parameters) {
+    using ParameterScope = IHalAdapterVendorExtension::ParameterScope;
+    if (parameters.size() == 0) return OK;
+    const String8 rawKeysAndValues = parameters.toString();
+    if (vendorExt == nullptr) {
+        ALOGW("%s: unknown parameters, ignored: \"%s\"", __func__, rawKeysAndValues.c_str());
+        return OK;
+    }
+
+    std::vector<VendorParameter> syncParameters, asyncParameters;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(vendorExt->parseVendorParameters(
+                            ParameterScope(recipient.index()),
+                            std::string(rawKeysAndValues.c_str()),
+                            &syncParameters, &asyncParameters)));
+    if (recipient.index() == static_cast<int>(ParameterScope::MODULE)) {
+        auto module = std::get<static_cast<int>(ParameterScope::MODULE)>(recipient);
+        if (!syncParameters.empty()) {
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(module->setVendorParameters(
+                                    syncParameters, false /*async*/)));
+        }
+        if (!asyncParameters.empty()) {
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(module->setVendorParameters(
+                                    asyncParameters, true /*async*/)));
+        }
+    } else if (recipient.index() == static_cast<int>(ParameterScope::STREAM)) {
+        auto stream = std::get<static_cast<int>(ParameterScope::STREAM)>(recipient);
+        if (!syncParameters.empty()) {
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(stream->setVendorParameters(
+                                    syncParameters, false /*async*/)));
+        }
+        if (!asyncParameters.empty()) {
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(stream->setVendorParameters(
+                                    asyncParameters, true /*async*/)));
+        }
+    } else {
+        LOG_ALWAYS_FATAL("%s: unexpected recipient variant index: %zu",
+                __func__, recipient.index());
+    }
+    return OK;
+}
+
+} // namespace android
diff --git a/media/libaudiohal/impl/ConversionHelperAidl.h b/media/libaudiohal/impl/ConversionHelperAidl.h
index db6b6cf..0fadd9c 100644
--- a/media/libaudiohal/impl/ConversionHelperAidl.h
+++ b/media/libaudiohal/impl/ConversionHelperAidl.h
@@ -18,8 +18,15 @@
 
 #include <string>
 #include <string_view>
+#include <variant>
 #include <vector>
 
+#include <aidl/android/hardware/audio/core/IModule.h>
+#include <aidl/android/hardware/audio/core/IStreamCommon.h>
+#include <aidl/android/media/audio/IHalAdapterVendorExtension.h>
+#include <android-base/expected.h>
+#include <error/Result.h>
+#include <media/AudioParameter.h>
 #include <utils/String16.h>
 #include <utils/Vector.h>
 
@@ -51,4 +58,38 @@
     const std::string mClassName;
 };
 
+// 'action' must accept a value of type 'T' and return 'status_t'.
+// The function returns 'true' if the parameter was found, and the action has succeeded.
+// The function returns 'false' if the parameter was not found.
+// Any errors get propagated, if there are errors it means the parameter was found.
+template<typename T, typename F>
+error::Result<bool> filterOutAndProcessParameter(
+        AudioParameter& parameters, const String8& key, const F& action) {
+    if (parameters.containsKey(key)) {
+        T value;
+        status_t status = parameters.get(key, value);
+        if (status == OK) {
+            parameters.remove(key);
+            status = action(value);
+            if (status == OK) return true;
+        }
+        return base::unexpected(status);
+    }
+    return false;
+}
+
+// Must use the same order of elements as IHalAdapterVendorExtension::ParameterScope.
+using VendorParametersRecipient = std::variant<
+        std::shared_ptr<::aidl::android::hardware::audio::core::IModule>,
+        std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon>>;
+status_t parseAndGetVendorParameters(
+        std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> vendorExt,
+        const VendorParametersRecipient& recipient,
+        const AudioParameter& parameterKeys,
+        String8* values);
+status_t parseAndSetVendorParameters(
+        std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> vendorExt,
+        const VendorParametersRecipient& recipient,
+        const AudioParameter& parameters);
+
 }  // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 0d6bb3f..ae15190 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -25,42 +25,60 @@
 #include <aidl/android/hardware/audio/core/StreamDescriptor.h>
 #include <error/expected_utils.h>
 #include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionNdkCpp.h>
 #include <media/AidlConversionUtil.h>
 #include <mediautils/TimeCheck.h>
 #include <Utils.h>
 #include <utils/Log.h>
 
 #include "DeviceHalAidl.h"
+#include "EffectHalAidl.h"
 #include "StreamHalAidl.h"
 
 using aidl::android::aidl_utils::statusTFromBinderStatus;
+using aidl::android::media::audio::common::Boolean;
+using aidl::android::media::audio::common::AudioChannelLayout;
 using aidl::android::media::audio::common::AudioConfig;
 using aidl::android::media::audio::common::AudioDevice;
+using aidl::android::media::audio::common::AudioDeviceAddress;
 using aidl::android::media::audio::common::AudioDeviceType;
+using aidl::android::media::audio::common::AudioFormatType;
 using aidl::android::media::audio::common::AudioInputFlags;
 using aidl::android::media::audio::common::AudioIoFlags;
 using aidl::android::media::audio::common::AudioLatencyMode;
+using aidl::android::media::audio::common::AudioMMapPolicy;
+using aidl::android::media::audio::common::AudioMMapPolicyInfo;
+using aidl::android::media::audio::common::AudioMMapPolicyType;
 using aidl::android::media::audio::common::AudioMode;
 using aidl::android::media::audio::common::AudioOutputFlags;
 using aidl::android::media::audio::common::AudioPort;
 using aidl::android::media::audio::common::AudioPortConfig;
 using aidl::android::media::audio::common::AudioPortDeviceExt;
+using aidl::android::media::audio::common::AudioPortExt;
 using aidl::android::media::audio::common::AudioPortMixExt;
 using aidl::android::media::audio::common::AudioPortMixExtUseCase;
-using aidl::android::media::audio::common::AudioPortExt;
+using aidl::android::media::audio::common::AudioProfile;
 using aidl::android::media::audio::common::AudioSource;
-using aidl::android::media::audio::common::Int;
 using aidl::android::media::audio::common::Float;
-using aidl::android::hardware::audio::common::getFrameSizeInBytes;
-using aidl::android::hardware::audio::common::isBitPositionFlagSet;
-using aidl::android::hardware::audio::common::makeBitPositionFlagMask;
+using aidl::android::media::audio::common::Int;
 using aidl::android::media::audio::common::MicrophoneDynamicInfo;
 using aidl::android::media::audio::common::MicrophoneInfo;
+using aidl::android::media::audio::IHalAdapterVendorExtension;
+using aidl::android::hardware::audio::common::getFrameSizeInBytes;
+using aidl::android::hardware::audio::common::isBitPositionFlagSet;
+using aidl::android::hardware::audio::common::isDefaultAudioFormat;
+using aidl::android::hardware::audio::common::makeBitPositionFlagMask;
 using aidl::android::hardware::audio::common::RecordTrackMetadata;
 using aidl::android::hardware::audio::core::AudioPatch;
+using aidl::android::hardware::audio::core::AudioRoute;
+using aidl::android::hardware::audio::core::IBluetooth;
+using aidl::android::hardware::audio::core::IBluetoothA2dp;
+using aidl::android::hardware::audio::core::IBluetoothLe;
 using aidl::android::hardware::audio::core::IModule;
 using aidl::android::hardware::audio::core::ITelephony;
+using aidl::android::hardware::audio::core::ModuleDebug;
 using aidl::android::hardware::audio::core::StreamDescriptor;
+using aidl::android::hardware::audio::core::VendorParameter;
 
 namespace android {
 
@@ -84,8 +102,70 @@
     portConfig->format = config.base.format;
 }
 
+// Note: these converters are for types defined in different AIDL files. Although these
+// AIDL files are copies of each other, however formally these are different types
+// thus we don't use a conversion via a parcelable.
+ConversionResult<media::AudioRoute> ndk2cpp_AudioRoute(const AudioRoute& ndk) {
+    media::AudioRoute cpp;
+    cpp.sourcePortIds.insert(
+            cpp.sourcePortIds.end(), ndk.sourcePortIds.begin(), ndk.sourcePortIds.end());
+    cpp.sinkPortId = ndk.sinkPortId;
+    cpp.isExclusive = ndk.isExclusive;
+    return cpp;
+}
+
+template<typename T>
+std::shared_ptr<T> retrieveSubInterface(const std::shared_ptr<IModule>& module,
+        ::ndk::ScopedAStatus (IModule::*getT)(std::shared_ptr<T>*)) {
+    if (module != nullptr) {
+        std::shared_ptr<T> instance;
+        if (auto status = (module.get()->*getT)(&instance); status.isOk()) {
+            return instance;
+        }
+    }
+    return nullptr;
+}
+
 }  // namespace
 
+DeviceHalAidl::DeviceHalAidl(const std::string& instance, const std::shared_ptr<IModule>& module,
+                             const std::shared_ptr<IHalAdapterVendorExtension>& vext)
+        : ConversionHelperAidl("DeviceHalAidl"),
+          mInstance(instance), mModule(module), mVendorExt(vext),
+          mTelephony(retrieveSubInterface<ITelephony>(module, &IModule::getTelephony)),
+          mBluetooth(retrieveSubInterface<IBluetooth>(module, &IModule::getBluetooth)),
+          mBluetoothA2dp(retrieveSubInterface<IBluetoothA2dp>(module, &IModule::getBluetoothA2dp)),
+          mBluetoothLe(retrieveSubInterface<IBluetoothLe>(module, &IModule::getBluetoothLe)) {
+}
+
+status_t DeviceHalAidl::getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) {
+    return ::aidl::android::convertContainer(mPorts, ports,
+            [](const Ports::value_type& pair) { return ndk2cpp_AudioPort(pair.second); });
+}
+
+status_t DeviceHalAidl::getAudioRoutes(std::vector<media::AudioRoute> *routes) {
+    *routes = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::convertContainer<std::vector<media::AudioRoute>>(
+                    mRoutes, ndk2cpp_AudioRoute));
+    return OK;
+}
+
+status_t DeviceHalAidl::getSupportedModes(std::vector<media::audio::common::AudioMode> *modes) {
+    TIME_CHECK();
+    if (modes == nullptr) {
+        return BAD_VALUE;
+    }
+    if (mModule == nullptr) return NO_INIT;
+    if (mTelephony == nullptr) return INVALID_OPERATION;
+    std::vector<AudioMode> aidlModes;
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(mTelephony->getSupportedAudioModes(&aidlModes)));
+    *modes = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::convertContainer<std::vector<media::audio::common::AudioMode>>(
+                    aidlModes, ndk2cpp_AudioMode));
+    return OK;
+}
+
 status_t DeviceHalAidl::getSupportedDevices(uint32_t*) {
     // Obsolete.
     return INVALID_OPERATION;
@@ -95,8 +175,7 @@
     TIME_CHECK();
     if (mModule == nullptr) return NO_INIT;
     std::vector<AudioPort> ports;
-    RETURN_STATUS_IF_ERROR(
-            statusTFromBinderStatus(mModule->getAudioPorts(&ports)));
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->getAudioPorts(&ports)));
     ALOGW_IF(ports.empty(), "%s: module %s returned an empty list of audio ports",
             __func__, mInstance.c_str());
     std::transform(ports.begin(), ports.end(), std::inserter(mPorts, mPorts.end()),
@@ -116,12 +195,16 @@
     }
     ALOGI("%s: module %s default port ids: input %d, output %d",
             __func__, mInstance.c_str(), mDefaultInputPortId, mDefaultOutputPortId);
+    RETURN_STATUS_IF_ERROR(updateRoutes());
     std::vector<AudioPortConfig> portConfigs;
     RETURN_STATUS_IF_ERROR(
             statusTFromBinderStatus(mModule->getAudioPortConfigs(&portConfigs)));  // OK if empty
     std::transform(portConfigs.begin(), portConfigs.end(),
             std::inserter(mPortConfigs, mPortConfigs.end()),
             [](const auto& p) { return std::make_pair(p.id, p); });
+    std::transform(mPortConfigs.begin(), mPortConfigs.end(),
+            std::inserter(mInitialPortConfigIds, mInitialPortConfigIds.end()),
+            [](const auto& pcPair) { return pcPair.first; });
     std::vector<AudioPatch> patches;
     RETURN_STATUS_IF_ERROR(
             statusTFromBinderStatus(mModule->getAudioPatches(&patches)));  // OK if empty
@@ -134,17 +217,14 @@
 status_t DeviceHalAidl::setVoiceVolume(float volume) {
     TIME_CHECK();
     if (!mModule) return NO_INIT;
-    std::shared_ptr<ITelephony> telephony;
-    if (ndk::ScopedAStatus status = mModule->getTelephony(&telephony);
-            status.isOk() && telephony != nullptr) {
-        ITelephony::TelecomConfig inConfig{ .voiceVolume = Float{volume} }, outConfig;
-        RETURN_STATUS_IF_ERROR(
-                statusTFromBinderStatus(telephony->setTelecomConfig(inConfig, &outConfig)));
-        ALOGW_IF(outConfig.voiceVolume.has_value() && volume != outConfig.voiceVolume.value().value,
-                "%s: the resulting voice volume %f is not the same as requested %f",
-                __func__, outConfig.voiceVolume.value().value, volume);
-    }
-    return INVALID_OPERATION;
+    if (mTelephony == nullptr) return INVALID_OPERATION;
+    ITelephony::TelecomConfig inConfig{ .voiceVolume = Float{volume} }, outConfig;
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(mTelephony->setTelecomConfig(inConfig, &outConfig)));
+    ALOGW_IF(outConfig.voiceVolume.has_value() && volume != outConfig.voiceVolume.value().value,
+            "%s: the resulting voice volume %f is not the same as requested %f",
+            __func__, outConfig.voiceVolume.value().value, volume);
+    return OK;
 }
 
 status_t DeviceHalAidl::setMasterVolume(float volume) {
@@ -163,10 +243,8 @@
     TIME_CHECK();
     if (!mModule) return NO_INIT;
     AudioMode audioMode = VALUE_OR_FATAL(::aidl::android::legacy2aidl_audio_mode_t_AudioMode(mode));
-    std::shared_ptr<ITelephony> telephony;
-    if (ndk::ScopedAStatus status = mModule->getTelephony(&telephony);
-            status.isOk() && telephony != nullptr) {
-        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(telephony->switchAudioMode(audioMode)));
+    if (mTelephony != nullptr) {
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mTelephony->switchAudioMode(audioMode)));
     }
     return statusTFromBinderStatus(mModule->updateAudioMode(audioMode));
 }
@@ -195,19 +273,41 @@
     return statusTFromBinderStatus(mModule->getMasterMute(state));
 }
 
-status_t DeviceHalAidl::setParameters(const String8& kvPairs __unused) {
-    TIME_CHECK();
+status_t DeviceHalAidl::setParameters(const String8& kvPairs) {
     if (!mModule) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    AudioParameter parameters(kvPairs);
+    ALOGD("%s: parameters: \"%s\"", __func__, parameters.toString().c_str());
+
+    if (status_t status = filterAndUpdateBtA2dpParameters(parameters); status != OK) {
+        ALOGW("%s: filtering or updating BT A2DP parameters failed: %d", __func__, status);
+    }
+    if (status_t status = filterAndUpdateBtHfpParameters(parameters); status != OK) {
+        ALOGW("%s: filtering or updating BT HFP parameters failed: %d", __func__, status);
+    }
+    if (status_t status = filterAndUpdateBtLeParameters(parameters); status != OK) {
+        ALOGW("%s: filtering or updating BT LE parameters failed: %d", __func__, status);
+    }
+    if (status_t status = filterAndUpdateBtScoParameters(parameters); status != OK) {
+        ALOGW("%s: filtering or updating BT SCO parameters failed: %d", __func__, status);
+    }
+    if (status_t status = filterAndUpdateScreenParameters(parameters); status != OK) {
+        ALOGW("%s: filtering or updating screen parameters failed: %d", __func__, status);
+    }
+    return parseAndSetVendorParameters(mVendorExt, mModule, parameters);
 }
 
-status_t DeviceHalAidl::getParameters(const String8& keys __unused, String8 *values) {
+status_t DeviceHalAidl::getParameters(const String8& keys, String8 *values) {
     TIME_CHECK();
-    values->clear();
     if (!mModule) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    if (values == nullptr) {
+        return BAD_VALUE;
+    }
+    AudioParameter parameterKeys(keys), result;
+    if (status_t status = filterAndRetrieveBtA2dpParameters(parameterKeys, &result); status != OK) {
+        ALOGW("%s: filtering or retrieving BT A2DP parameters failed: %d", __func__, status);
+    }
+    *values = result.toString();
+    return parseAndGetVendorParameters(mVendorExt, mModule, parameterKeys, values);
 }
 
 namespace {
@@ -257,9 +357,9 @@
     AudioPortConfig mixPortConfig;
     Cleanups cleanups;
     audio_config writableConfig = *config;
-    int32_t nominalLatency;
+    AudioPatch aidlPatch;
     RETURN_STATUS_IF_ERROR(prepareToOpenStream(0 /*handle*/, aidlDevice, aidlFlags, aidlSource,
-                    &writableConfig, &cleanups, &aidlConfig, &mixPortConfig, &nominalLatency));
+                    &writableConfig, &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
     *size = aidlConfig.frameCount *
             getFrameSizeInBytes(aidlConfig.base.format, aidlConfig.base.channelMask);
     // Do not disarm cleanups to release temporary port configs.
@@ -270,36 +370,40 @@
         int32_t aidlHandle, const AudioDevice& aidlDevice, const AudioIoFlags& aidlFlags,
         AudioSource aidlSource, struct audio_config* config,
         Cleanups* cleanups, AudioConfig* aidlConfig, AudioPortConfig* mixPortConfig,
-        int32_t* nominalLatency) {
+        AudioPatch* aidlPatch) {
+    ALOGD("%p %s::%s: handle %d, device %s, flags %s, source %s, config %s, mix port config %s",
+            this, getClassName().c_str(), __func__, aidlHandle, aidlDevice.toString().c_str(),
+            aidlFlags.toString().c_str(), toString(aidlSource).c_str(),
+            aidlConfig->toString().c_str(), mixPortConfig->toString().c_str());
+    resetUnusedPatchesAndPortConfigs();
     const bool isInput = aidlFlags.getTag() == AudioIoFlags::Tag::input;
     // Find / create AudioPortConfigs for the device port and the mix port,
     // then find / create a patch between them, and open a stream on the mix port.
     AudioPortConfig devicePortConfig;
     bool created = false;
-    RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(aidlDevice, &devicePortConfig, &created));
+    RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(aidlDevice, aidlConfig,
+                                                  &devicePortConfig, &created));
     if (created) {
         cleanups->emplace_front(this, &DeviceHalAidl::resetPortConfig, devicePortConfig.id);
     }
     RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(*aidlConfig, aidlFlags, aidlHandle, aidlSource,
-                    mixPortConfig, &created));
+                    std::set<int32_t>{devicePortConfig.portId}, mixPortConfig, &created));
     if (created) {
         cleanups->emplace_front(this, &DeviceHalAidl::resetPortConfig, mixPortConfig->id);
     }
     setConfigFromPortConfig(aidlConfig, *mixPortConfig);
-    AudioPatch patch;
     if (isInput) {
         RETURN_STATUS_IF_ERROR(findOrCreatePatch(
-                        {devicePortConfig.id}, {mixPortConfig->id}, &patch, &created));
+                        {devicePortConfig.id}, {mixPortConfig->id}, aidlPatch, &created));
     } else {
         RETURN_STATUS_IF_ERROR(findOrCreatePatch(
-                        {mixPortConfig->id}, {devicePortConfig.id}, &patch, &created));
+                        {mixPortConfig->id}, {devicePortConfig.id}, aidlPatch, &created));
     }
     if (created) {
-        cleanups->emplace_front(this, &DeviceHalAidl::resetPatch, patch.id);
+        cleanups->emplace_front(this, &DeviceHalAidl::resetPatch, aidlPatch->id);
     }
-    *nominalLatency = patch.latenciesMs[0];
     if (aidlConfig->frameCount <= 0) {
-        aidlConfig->frameCount = patch.minimumStreamBufferSizeFrames;
+        aidlConfig->frameCount = aidlPatch->minimumStreamBufferSizeFrames;
     }
     *config = VALUE_OR_RETURN_STATUS(
             ::aidl::android::aidl2legacy_AudioConfig_audio_config_t(*aidlConfig, isInput));
@@ -445,10 +549,10 @@
     AudioIoFlags aidlFlags = AudioIoFlags::make<AudioIoFlags::Tag::output>(aidlOutputFlags);
     AudioPortConfig mixPortConfig;
     Cleanups cleanups;
-    int32_t nominalLatency;
+    AudioPatch aidlPatch;
     RETURN_STATUS_IF_ERROR(prepareToOpenStream(aidlHandle, aidlDevice, aidlFlags,
                     AudioSource::SYS_RESERVED_INVALID /*only needed for input*/,
-                    config, &cleanups, &aidlConfig, &mixPortConfig, &nominalLatency));
+                    config, &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
     ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamArguments args;
     args.portConfigId = mixPortConfig.id;
     const bool isOffload = isBitPositionFlagSet(
@@ -472,8 +576,9 @@
                 __func__, ret.desc.toString().c_str());
         return NO_INIT;
     }
-    *outStream = sp<StreamOutHalAidl>::make(*config, std::move(context), nominalLatency,
-            std::move(ret.stream), this /*callbackBroker*/);
+    *outStream = sp<StreamOutHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
+            std::move(ret.stream), mVendorExt, this /*callbackBroker*/);
+    mStreams.insert(std::pair(*outStream, aidlPatch.id));
     void* cbCookie = (*outStream).get();
     {
         std::lock_guard l(mLock);
@@ -510,9 +615,9 @@
             ::aidl::android::legacy2aidl_audio_source_t_AudioSource(source));
     AudioPortConfig mixPortConfig;
     Cleanups cleanups;
-    int32_t nominalLatency;
+    AudioPatch aidlPatch;
     RETURN_STATUS_IF_ERROR(prepareToOpenStream(aidlHandle, aidlDevice, aidlFlags, aidlSource,
-                    config, &cleanups, &aidlConfig, &mixPortConfig, &nominalLatency));
+                    config, &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
     ::aidl::android::hardware::audio::core::IModule::OpenInputStreamArguments args;
     args.portConfigId = mixPortConfig.id;
     RecordTrackMetadata aidlTrackMetadata{
@@ -532,8 +637,9 @@
                 __func__, ret.desc.toString().c_str());
         return NO_INIT;
     }
-    *inStream = sp<StreamInHalAidl>::make(*config, std::move(context), nominalLatency,
-            std::move(ret.stream), this /*micInfoProvider*/);
+    *inStream = sp<StreamInHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
+            std::move(ret.stream), mVendorExt, this /*micInfoProvider*/);
+    mStreams.insert(std::pair(*inStream, aidlPatch.id));
     cleanups.disarmAll();
     return OK;
 }
@@ -601,20 +707,41 @@
             __func__, ::android::internal::ToString(aidlSources).c_str(),
             ::android::internal::ToString(aidlSinks).c_str());
     auto fillPortConfigs = [&](
-            const std::vector<AudioPortConfig>& configs, std::vector<int32_t>* ids) -> status_t {
+            const std::vector<AudioPortConfig>& configs,
+            const std::set<int32_t>& destinationPortIds,
+            std::vector<int32_t>* ids, std::set<int32_t>* portIds) -> status_t {
         for (const auto& s : configs) {
             AudioPortConfig portConfig;
             bool created = false;
-            RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(s, &portConfig, &created));
+            RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(
+                            s, destinationPortIds, &portConfig, &created));
             if (created) {
                 cleanups.emplace_front(this, &DeviceHalAidl::resetPortConfig, portConfig.id);
             }
             ids->push_back(portConfig.id);
+            if (portIds != nullptr) {
+                portIds->insert(portConfig.portId);
+            }
         }
         return OK;
     };
-    RETURN_STATUS_IF_ERROR(fillPortConfigs(aidlSources, &aidlPatch.sourcePortConfigIds));
-    RETURN_STATUS_IF_ERROR(fillPortConfigs(aidlSinks, &aidlPatch.sinkPortConfigIds));
+    // When looking up port configs, the destinationPortId is only used for mix ports.
+    // Thus, we process device port configs first, and look up the destination port ID from them.
+    bool sourceIsDevice = std::any_of(aidlSources.begin(), aidlSources.end(),
+            [](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
+    const std::vector<AudioPortConfig>& devicePortConfigs =
+            sourceIsDevice ? aidlSources : aidlSinks;
+    std::vector<int32_t>* devicePortConfigIds =
+            sourceIsDevice ? &aidlPatch.sourcePortConfigIds : &aidlPatch.sinkPortConfigIds;
+    const std::vector<AudioPortConfig>& mixPortConfigs =
+            sourceIsDevice ? aidlSinks : aidlSources;
+    std::vector<int32_t>* mixPortConfigIds =
+            sourceIsDevice ? &aidlPatch.sinkPortConfigIds : &aidlPatch.sourcePortConfigIds;
+    std::set<int32_t> devicePortIds;
+    RETURN_STATUS_IF_ERROR(fillPortConfigs(
+                    devicePortConfigs, std::set<int32_t>(), devicePortConfigIds, &devicePortIds));
+    RETURN_STATUS_IF_ERROR(fillPortConfigs(
+                    mixPortConfigs, devicePortIds, mixPortConfigIds, nullptr));
     if (existingPatchIt != mPatches.end()) {
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
                         mModule->setAudioPatch(aidlPatch, &aidlPatch)));
@@ -649,22 +776,68 @@
     return OK;
 }
 
-status_t DeviceHalAidl::getAudioPort(struct audio_port* port __unused) {
-    TIME_CHECK();
-    ALOGE("%s not implemented yet", __func__);
-    return INVALID_OPERATION;
-}
-
-status_t DeviceHalAidl::getAudioPort(struct audio_port_v7 *port __unused) {
-    TIME_CHECK();
-    ALOGE("%s not implemented yet", __func__);
-    return INVALID_OPERATION;
-}
-
-status_t DeviceHalAidl::setAudioPortConfig(const struct audio_port_config* config __unused) {
+status_t DeviceHalAidl::getAudioPort(struct audio_port* port) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mModule) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
+    if (port == nullptr) {
+        return BAD_VALUE;
+    }
+    audio_port_v7 portV7;
+    audio_populate_audio_port_v7(port, &portV7);
+    RETURN_STATUS_IF_ERROR(getAudioPort(&portV7));
+    return audio_populate_audio_port(&portV7, port) ? OK : BAD_VALUE;
+}
+
+status_t DeviceHalAidl::getAudioPort(struct audio_port_v7 *port) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    if (port == nullptr) {
+        return BAD_VALUE;
+    }
+    bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(port->role, port->type)) ==
+            ::aidl::android::AudioPortDirection::INPUT;
+    auto aidlPort = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
+    if (aidlPort.ext.getTag() != AudioPortExt::device) {
+        ALOGE("%s: provided port is not a device port (module %s): %s",
+                __func__, mInstance.c_str(), aidlPort.toString().c_str());
+        return BAD_VALUE;
+    }
+    const auto& matchDevice = aidlPort.ext.get<AudioPortExt::device>().device;
+    // It seems that we don't have to call HAL since all valid ports have been added either
+    // during initialization, or while handling connection of an external device.
+    auto portsIt = findPort(matchDevice);
+    if (portsIt == mPorts.end()) {
+        ALOGE("%s: device port for device %s is not found in the module %s",
+                __func__, matchDevice.toString().c_str(), mInstance.c_str());
+        return BAD_VALUE;
+    }
+    const int32_t fwkId = aidlPort.id;
+    aidlPort = portsIt->second;
+    aidlPort.id = fwkId;
+    *port = VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_AudioPort_audio_port_v7(
+                    aidlPort, isInput));
+    return OK;
+}
+
+status_t DeviceHalAidl::setAudioPortConfig(const struct audio_port_config* config) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    if (config == nullptr) {
+        return BAD_VALUE;
+    }
+    bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
+                    config->role, config->type)) == ::aidl::android::AudioPortDirection::INPUT;
+    AudioPortConfig requestedPortConfig = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
+                    *config, isInput, 0 /*portId*/));
+    AudioPortConfig portConfig;
+    bool created = false;
+    RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(
+                    requestedPortConfig, std::set<int32_t>(), &portConfig, &created));
     return OK;
 }
 
@@ -710,45 +883,102 @@
     return OK;
 }
 
-status_t DeviceHalAidl::addDeviceEffect(audio_port_handle_t device __unused,
-        sp<EffectHalInterface> effect) {
+status_t DeviceHalAidl::addDeviceEffect(
+        const struct audio_port_config *device, sp<EffectHalInterface> effect) {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
     if (!effect) {
         return BAD_VALUE;
     }
-    TIME_CHECK();
-    if (!mModule) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
+    bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
+                    device->role, device->type)) == ::aidl::android::AudioPortDirection::INPUT;
+    auto requestedPortConfig = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
+                    *device, isInput, 0));
+    if (requestedPortConfig.ext.getTag() != AudioPortExt::Tag::device) {
+        ALOGE("%s: provided port config is not a device port config: %s",
+                __func__, requestedPortConfig.toString().c_str());
+        return BAD_VALUE;
+    }
+    AudioPortConfig devicePortConfig;
+    bool created;
+    RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(
+                    requestedPortConfig, {} /*destinationPortIds*/, &devicePortConfig, &created));
+    Cleanups cleanups;
+    if (created) {
+        cleanups.emplace_front(this, &DeviceHalAidl::resetPortConfig, devicePortConfig.id);
+    }
+    auto aidlEffect = sp<effect::EffectHalAidl>::cast(effect);
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->addDeviceEffect(
+                            devicePortConfig.id, aidlEffect->getIEffect())));
+    cleanups.disarmAll();
     return OK;
 }
-status_t DeviceHalAidl::removeDeviceEffect(audio_port_handle_t device __unused,
-                            sp<EffectHalInterface> effect) {
+status_t DeviceHalAidl::removeDeviceEffect(
+        const struct audio_port_config *device, sp<EffectHalInterface> effect) {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
     if (!effect) {
         return BAD_VALUE;
     }
-    TIME_CHECK();
-    if (!mModule) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
+                    device->role, device->type)) == ::aidl::android::AudioPortDirection::INPUT;
+    auto requestedPortConfig = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
+                    *device, isInput, 0));
+    if (requestedPortConfig.ext.getTag() != AudioPortExt::Tag::device) {
+        ALOGE("%s: provided port config is not a device port config: %s",
+                __func__, requestedPortConfig.toString().c_str());
+        return BAD_VALUE;
+    }
+    auto existingPortConfigIt = findPortConfig(
+            requestedPortConfig.ext.get<AudioPortExt::Tag::device>().device);
+    if (existingPortConfigIt == mPortConfigs.end()) {
+        ALOGE("%s: could not find a configured device port for the config %s",
+                __func__, requestedPortConfig.toString().c_str());
+        return BAD_VALUE;
+    }
+    auto aidlEffect = sp<effect::EffectHalAidl>::cast(effect);
+    return statusTFromBinderStatus(mModule->removeDeviceEffect(
+                    existingPortConfigIt->first, aidlEffect->getIEffect()));
 }
 
 status_t DeviceHalAidl::getMmapPolicyInfos(
-        media::audio::common::AudioMMapPolicyType policyType __unused,
-        std::vector<media::audio::common::AudioMMapPolicyInfo>* policyInfos __unused) {
+        media::audio::common::AudioMMapPolicyType policyType,
+        std::vector<media::audio::common::AudioMMapPolicyInfo>* policyInfos) {
     TIME_CHECK();
-    ALOGE("%s not implemented yet", __func__);
+    AudioMMapPolicyType mmapPolicyType = VALUE_OR_RETURN_STATUS(
+            cpp2ndk_AudioMMapPolicyType(policyType));
+
+    std::vector<AudioMMapPolicyInfo> mmapPolicyInfos;
+
+    if (status_t status = statusTFromBinderStatus(
+            mModule->getMmapPolicyInfos(mmapPolicyType, &mmapPolicyInfos)); status != OK) {
+        return status;
+    }
+
+    *policyInfos = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<media::audio::common::AudioMMapPolicyInfo>>(
+                mmapPolicyInfos, ndk2cpp_AudioMMapPolicyInfo));
     return OK;
 }
 
 int32_t DeviceHalAidl::getAAudioMixerBurstCount() {
     TIME_CHECK();
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    int32_t mixerBurstCount = 0;
+    if (mModule->getAAudioMixerBurstCount(&mixerBurstCount).isOk()) {
+        return mixerBurstCount;
+    }
+    return 0;
 }
 
 int32_t DeviceHalAidl::getAAudioHardwareBurstMinUsec() {
     TIME_CHECK();
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    int32_t hardwareBurstMinUsec = 0;
+    if (mModule->getAAudioHardwareBurstMinUsec(&hardwareBurstMinUsec).isOk()) {
+        return hardwareBurstMinUsec;
+    }
+    return 0;
 }
 
 error::Result<audio_hw_sync_t> DeviceHalAidl::getHwAvSync() {
@@ -764,12 +994,15 @@
     TIME_CHECK();
     if (!mModule) return NO_INIT;
     return mModule->dump(fd, Args(args).args(), args.size());
-};
+}
 
-int32_t DeviceHalAidl::supportsBluetoothVariableLatency(bool* supports __unused) {
+int32_t DeviceHalAidl::supportsBluetoothVariableLatency(bool* supports) {
     TIME_CHECK();
-    ALOGE("%s not implemented yet", __func__);
-    return INVALID_OPERATION;
+    if (!mModule) return NO_INIT;
+    if (supports == nullptr) {
+        return BAD_VALUE;
+    }
+    return statusTFromBinderStatus(mModule->supportsVariableLatency(supports));
 }
 
 status_t DeviceHalAidl::getSoundDoseInterface(const std::string& module,
@@ -792,6 +1025,93 @@
     return OK;
 }
 
+status_t DeviceHalAidl::prepareToDisconnectExternalDevice(const struct audio_port_v7* port) {
+    // There is not AIDL API defined for `prepareToDisconnectExternalDevice`.
+    // Call `setConnectedState` instead.
+    // TODO(b/279824103): call prepareToDisconnectExternalDevice when it is added.
+    const status_t status = setConnectedState(port, false /*connected*/);
+    if (status == NO_ERROR) {
+        mDeviceDisconnectionNotified.insert(port->id);
+    }
+    return status;
+}
+
+status_t DeviceHalAidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    if (port == nullptr) {
+        return BAD_VALUE;
+    }
+    if (!connected && mDeviceDisconnectionNotified.erase(port->id) > 0) {
+        // For device disconnection, APM will first call `prepareToDisconnectExternalDevice`
+        // and then call `setConnectedState`. However, there is no API for
+        // `prepareToDisconnectExternalDevice` yet. In that case, `setConnectedState` will be
+        // called when calling `prepareToDisconnectExternalDevice`. Do not call to the HAL if
+        // previous call is successful. Also remove the cache here to avoid a large cache after
+        // a long run.
+        return NO_ERROR;
+    }
+    bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(port->role, port->type)) ==
+            ::aidl::android::AudioPortDirection::INPUT;
+    AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
+    if (aidlPort.ext.getTag() != AudioPortExt::device) {
+        ALOGE("%s: provided port is not a device port (module %s): %s",
+                __func__, mInstance.c_str(), aidlPort.toString().c_str());
+        return BAD_VALUE;
+    }
+    if (connected) {
+        AudioDevice matchDevice = aidlPort.ext.get<AudioPortExt::device>().device;
+        // Reset the device address to find the "template" port.
+        matchDevice.address = AudioDeviceAddress::make<AudioDeviceAddress::id>();
+        auto portsIt = findPort(matchDevice);
+        if (portsIt == mPorts.end()) {
+            ALOGW("%s: device port for device %s is not found in the module %s",
+                    __func__, matchDevice.toString().c_str(), mInstance.c_str());
+            return BAD_VALUE;
+        }
+        // Use the ID of the "template" port, use all the information from the provided port.
+        aidlPort.id = portsIt->first;
+        AudioPort connectedPort;
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->connectExternalDevice(
+                                aidlPort, &connectedPort)));
+        const auto [it, inserted] = mPorts.insert(std::make_pair(connectedPort.id, connectedPort));
+        LOG_ALWAYS_FATAL_IF(!inserted,
+                "%s: module %s, duplicate port ID received from HAL: %s, existing port: %s",
+                __func__, mInstance.c_str(), connectedPort.toString().c_str(),
+                it->second.toString().c_str());
+    } else {  // !connected
+        AudioDevice matchDevice = aidlPort.ext.get<AudioPortExt::device>().device;
+        auto portsIt = findPort(matchDevice);
+        if (portsIt == mPorts.end()) {
+            ALOGW("%s: device port for device %s is not found in the module %s",
+                    __func__, matchDevice.toString().c_str(), mInstance.c_str());
+            return BAD_VALUE;
+        }
+        // Any streams opened on the external device must be closed by this time,
+        // thus we can clean up patches and port configs that were created for them.
+        resetUnusedPatchesAndPortConfigs();
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->disconnectExternalDevice(
+                                portsIt->second.id)));
+        mPorts.erase(portsIt);
+    }
+    return updateRoutes();
+}
+
+status_t DeviceHalAidl::setSimulateDeviceConnections(bool enabled) {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    ModuleDebug debug{ .simulateDeviceConnections = enabled };
+    status_t status = statusTFromBinderStatus(mModule->setModuleDebug(debug));
+    // This is important to log as it affects HAL behavior.
+    if (status == OK) {
+        ALOGI("%s: set enabled: %d", __func__, enabled);
+    } else {
+        ALOGW("%s: set enabled to %d failed: %d", __func__, enabled, status);
+    }
+    return status;
+}
+
 bool DeviceHalAidl::audioDeviceMatches(const AudioDevice& device, const AudioPort& p) {
     if (p.ext.getTag() != AudioPortExt::Tag::device) return false;
     return p.ext.get<AudioPortExt::Tag::device>().device == device;
@@ -807,8 +1127,8 @@
     return p.ext.get<AudioPortExt::Tag::device>().device == device;
 }
 
-status_t DeviceHalAidl::createPortConfig(
-        const AudioPortConfig& requestedPortConfig, PortConfigs::iterator* result) {
+status_t DeviceHalAidl::createOrUpdatePortConfig(
+        const AudioPortConfig& requestedPortConfig, PortConfigs::iterator* result, bool* created) {
     TIME_CHECK();
     AudioPortConfig appliedPortConfig;
     bool applied = false;
@@ -823,11 +1143,230 @@
             return NO_INIT;
         }
     }
-    auto id = appliedPortConfig.id;
-    auto [it, inserted] = mPortConfigs.emplace(std::move(id), std::move(appliedPortConfig));
-    LOG_ALWAYS_FATAL_IF(!inserted, "%s: port config with id %d already exists",
-            __func__, it->first);
+
+    int32_t id = appliedPortConfig.id;
+    if (requestedPortConfig.id != 0 && requestedPortConfig.id != id) {
+        LOG_ALWAYS_FATAL("%s: requested port config id %d changed to %d", __func__,
+                requestedPortConfig.id, id);
+    }
+
+    auto [it, inserted] = mPortConfigs.insert_or_assign(std::move(id),
+            std::move(appliedPortConfig));
     *result = it;
+    *created = inserted;
+    return OK;
+}
+
+status_t DeviceHalAidl::filterAndRetrieveBtA2dpParameters(
+        AudioParameter &keys, AudioParameter *result) {
+    TIME_CHECK();
+    if (String8 key = String8(AudioParameter::keyReconfigA2dpSupported); keys.containsKey(key)) {
+        keys.remove(key);
+        bool supports;
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                        mBluetoothA2dp->supportsOffloadReconfiguration(&supports)));
+        result->addInt(key, supports ? 1 : 0);
+    }
+    return OK;
+}
+
+status_t DeviceHalAidl::filterAndUpdateBtA2dpParameters(AudioParameter &parameters) {
+    TIME_CHECK();
+    std::optional<bool> a2dpEnabled;
+    std::optional<std::vector<VendorParameter>> reconfigureOffload;
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyBtA2dpSuspended),
+                    [&a2dpEnabled](const String8& trueOrFalse) {
+                        if (trueOrFalse == AudioParameter::valueTrue) {
+                            a2dpEnabled = false;  // 'suspended' == true
+                            return OK;
+                        } else if (trueOrFalse == AudioParameter::valueFalse) {
+                            a2dpEnabled = true;  // 'suspended' == false
+                            return OK;
+                        }
+                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                AudioParameter::keyBtA2dpSuspended, trueOrFalse.c_str());
+                        return BAD_VALUE;
+                    }));
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyReconfigA2dp),
+                    [&](const String8& value) -> status_t {
+                        if (mVendorExt != nullptr) {
+                            std::vector<VendorParameter> result;
+                            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                                    mVendorExt->parseBluetoothA2dpReconfigureOffload(
+                                            std::string(value.c_str()), &result)));
+                            reconfigureOffload = std::move(result);
+                        } else {
+                            reconfigureOffload = std::vector<VendorParameter>();
+                        }
+                        return OK;
+                    }));
+    if (mBluetoothA2dp != nullptr && a2dpEnabled.has_value()) {
+        return statusTFromBinderStatus(mBluetoothA2dp->setEnabled(a2dpEnabled.value()));
+    }
+    if (mBluetoothA2dp != nullptr && reconfigureOffload.has_value()) {
+        return statusTFromBinderStatus(mBluetoothA2dp->reconfigureOffload(
+                        reconfigureOffload.value()));
+    }
+    return OK;
+}
+
+status_t DeviceHalAidl::filterAndUpdateBtHfpParameters(AudioParameter &parameters) {
+    TIME_CHECK();
+    IBluetooth::HfpConfig hfpConfig;
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyBtHfpEnable),
+                    [&hfpConfig](const String8& trueOrFalse) {
+                        if (trueOrFalse == AudioParameter::valueTrue) {
+                            hfpConfig.isEnabled = Boolean{ .value = true };
+                            return OK;
+                        } else if (trueOrFalse == AudioParameter::valueFalse) {
+                            hfpConfig.isEnabled = Boolean{ .value = false };
+                            return OK;
+                        }
+                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                AudioParameter::keyBtHfpEnable, trueOrFalse.c_str());
+                        return BAD_VALUE;
+                    }));
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                    parameters, String8(AudioParameter::keyBtHfpSamplingRate),
+                    [&hfpConfig](int sampleRate) {
+                        return sampleRate > 0 ?
+                                hfpConfig.sampleRate = Int{ .value = sampleRate }, OK : BAD_VALUE;
+                    }));
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                    parameters, String8(AudioParameter::keyBtHfpVolume),
+                    [&hfpConfig](int volume0to15) {
+                        if (volume0to15 >= 0 && volume0to15 <= 15) {
+                            hfpConfig.volume = Float{ .value = volume0to15 / 15.0f };
+                            return OK;
+                        }
+                        return BAD_VALUE;
+                    }));
+    if (mBluetooth != nullptr && hfpConfig != IBluetooth::HfpConfig{}) {
+        IBluetooth::HfpConfig newHfpConfig;
+        return statusTFromBinderStatus(mBluetooth->setHfpConfig(hfpConfig, &newHfpConfig));
+    }
+    return OK;
+}
+
+status_t DeviceHalAidl::filterAndUpdateBtLeParameters(AudioParameter &parameters) {
+    TIME_CHECK();
+    std::optional<bool> leEnabled;
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyBtLeSuspended),
+                    [&leEnabled](const String8& trueOrFalse) {
+                        if (trueOrFalse == AudioParameter::valueTrue) {
+                            leEnabled = false;  // 'suspended' == true
+                            return OK;
+                        } else if (trueOrFalse == AudioParameter::valueFalse) {
+                            leEnabled = true;  // 'suspended' == false
+                            return OK;
+                        }
+                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                AudioParameter::keyBtLeSuspended, trueOrFalse.c_str());
+                        return BAD_VALUE;
+                    }));
+    if (mBluetoothLe != nullptr && leEnabled.has_value()) {
+        return statusTFromBinderStatus(mBluetoothLe->setEnabled(leEnabled.value()));
+    }
+    return OK;
+}
+
+status_t DeviceHalAidl::filterAndUpdateBtScoParameters(AudioParameter &parameters) {
+    TIME_CHECK();
+    IBluetooth::ScoConfig scoConfig;
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyBtSco),
+                    [&scoConfig](const String8& onOrOff) {
+                        if (onOrOff == AudioParameter::valueOn) {
+                            scoConfig.isEnabled = Boolean{ .value = true };
+                            return OK;
+                        } else if (onOrOff == AudioParameter::valueOff) {
+                            scoConfig.isEnabled = Boolean{ .value = false };
+                            return OK;
+                        }
+                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                AudioParameter::keyBtSco, onOrOff.c_str());
+                        return BAD_VALUE;
+                    }));
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyBtScoHeadsetName),
+                    [&scoConfig](const String8& name) {
+                        scoConfig.debugName = name;
+                        return OK;
+                    }));
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyBtNrec),
+                    [&scoConfig](const String8& onOrOff) {
+                        if (onOrOff == AudioParameter::valueOn) {
+                            scoConfig.isNrecEnabled = Boolean{ .value = true };
+                            return OK;
+                        } else if (onOrOff == AudioParameter::valueOff) {
+                            scoConfig.isNrecEnabled = Boolean{ .value = false };
+                            return OK;
+                        }
+                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                AudioParameter::keyBtNrec, onOrOff.c_str());
+                        return BAD_VALUE;
+                    }));
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyBtScoWb),
+                    [&scoConfig](const String8& onOrOff) {
+                        if (onOrOff == AudioParameter::valueOn) {
+                            scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO_WB;
+                            return OK;
+                        } else if (onOrOff == AudioParameter::valueOff) {
+                            scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO;
+                            return OK;
+                        }
+                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                AudioParameter::keyBtScoWb, onOrOff.c_str());
+                        return BAD_VALUE;
+                    }));
+    if (mBluetooth != nullptr && scoConfig != IBluetooth::ScoConfig{}) {
+        IBluetooth::ScoConfig newScoConfig;
+        return statusTFromBinderStatus(mBluetooth->setScoConfig(scoConfig, &newScoConfig));
+    }
+    return OK;
+}
+
+status_t DeviceHalAidl::filterAndUpdateScreenParameters(AudioParameter &parameters) {
+    TIME_CHECK();
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyScreenState),
+                    [&](const String8& onOrOff) -> status_t {
+                        std::optional<bool> isTurnedOn;
+                        if (onOrOff == AudioParameter::valueOn) {
+                            isTurnedOn = true;
+                        } else if (onOrOff == AudioParameter::valueOff) {
+                            isTurnedOn = false;
+                        }
+                        if (!isTurnedOn.has_value()) {
+                            ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                    AudioParameter::keyScreenState, onOrOff.c_str());
+                            return BAD_VALUE;
+                        }
+                        return statusTFromBinderStatus(
+                                mModule->updateScreenState(isTurnedOn.value()));
+                    }));
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                    parameters, String8(AudioParameter::keyScreenRotation),
+            [&](int rotationDegrees) -> status_t {
+                IModule::ScreenRotation rotation;
+                switch (rotationDegrees) {
+                    case 0: rotation = IModule::ScreenRotation::DEG_0; break;
+                    case 90: rotation = IModule::ScreenRotation::DEG_90; break;
+                    case 180: rotation = IModule::ScreenRotation::DEG_180; break;
+                    case 270: rotation = IModule::ScreenRotation::DEG_270; break;
+                    default:
+                        ALOGE("setParameters: parameter key \"%s\" has invalid value %d",
+                                AudioParameter::keyScreenRotation, rotationDegrees);
+                        return BAD_VALUE;
+                }
+                return statusTFromBinderStatus(mModule->updateScreenRotation(rotation));
+            }));
     return OK;
 }
 
@@ -862,7 +1401,7 @@
     return OK;
 }
 
-status_t DeviceHalAidl::findOrCreatePortConfig(const AudioDevice& device,
+status_t DeviceHalAidl::findOrCreatePortConfig(const AudioDevice& device, const AudioConfig* config,
         AudioPortConfig* portConfig, bool* created) {
     auto portConfigIt = findPortConfig(device);
     if (portConfigIt == mPortConfigs.end()) {
@@ -874,8 +1413,11 @@
         }
         AudioPortConfig requestedPortConfig;
         requestedPortConfig.portId = portsIt->first;
-        RETURN_STATUS_IF_ERROR(createPortConfig(requestedPortConfig, &portConfigIt));
-        *created = true;
+        if (config != nullptr) {
+            setPortConfigFromConfig(&requestedPortConfig, *config);
+        }
+        RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(requestedPortConfig, &portConfigIt,
+                created));
     } else {
         *created = false;
     }
@@ -885,7 +1427,8 @@
 
 status_t DeviceHalAidl::findOrCreatePortConfig(
         const AudioConfig& config, const std::optional<AudioIoFlags>& flags, int32_t ioHandle,
-        AudioSource source, AudioPortConfig* portConfig, bool* created) {
+        AudioSource source, const std::set<int32_t>& destinationPortIds,
+        AudioPortConfig* portConfig, bool* created) {
     // These flags get removed one by one in this order when retrying port finding.
     static const std::vector<AudioInputFlags> kOptionalInputFlags{
         AudioInputFlags::FAST, AudioInputFlags::RAW };
@@ -893,7 +1436,7 @@
     if (portConfigIt == mPortConfigs.end() && flags.has_value()) {
         auto optionalInputFlagsIt = kOptionalInputFlags.begin();
         AudioIoFlags matchFlags = flags.value();
-        auto portsIt = findPort(config, matchFlags);
+        auto portsIt = findPort(config, matchFlags, destinationPortIds);
         while (portsIt == mPorts.end() && matchFlags.getTag() == AudioIoFlags::Tag::input
                 && optionalInputFlagsIt != kOptionalInputFlags.end()) {
             if (!isBitPositionFlagSet(
@@ -903,7 +1446,7 @@
             }
             matchFlags.set<AudioIoFlags::Tag::input>(matchFlags.get<AudioIoFlags::Tag::input>() &
                     ~makeBitPositionFlagMask(*optionalInputFlagsIt++));
-            portsIt = findPort(config, matchFlags);
+            portsIt = findPort(config, matchFlags, destinationPortIds);
             ALOGI("%s: mix port for config %s, flags %s was not found in the module %s, "
                     "retried with flags %s", __func__, config.toString().c_str(),
                     flags.value().toString().c_str(), mInstance.c_str(),
@@ -924,22 +1467,37 @@
             requestedPortConfig.ext.get<AudioPortExt::Tag::mix>().usecase =
                     AudioPortMixExtUseCase::make<AudioPortMixExtUseCase::Tag::source>(source);
         }
-        RETURN_STATUS_IF_ERROR(createPortConfig(requestedPortConfig, &portConfigIt));
-        *created = true;
+        RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(requestedPortConfig, &portConfigIt,
+                created));
     } else if (!flags.has_value()) {
         ALOGW("%s: mix port config for %s, handle %d not found in the module %s, "
                 "and was not created as flags are not specified",
                 __func__, config.toString().c_str(), ioHandle, mInstance.c_str());
         return BAD_VALUE;
     } else {
-        *created = false;
+        AudioPortConfig requestedPortConfig = portConfigIt->second;
+        if (requestedPortConfig.ext.getTag() == AudioPortExt::Tag::mix) {
+            AudioPortMixExt& mixExt = requestedPortConfig.ext.get<AudioPortExt::Tag::mix>();
+            if (mixExt.usecase.getTag() == AudioPortMixExtUseCase::Tag::source &&
+                    source != AudioSource::SYS_RESERVED_INVALID) {
+                mixExt.usecase.get<AudioPortMixExtUseCase::Tag::source>() = source;
+            }
+        }
+
+        if (requestedPortConfig != portConfigIt->second) {
+            RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(requestedPortConfig, &portConfigIt,
+                    created));
+        } else {
+            *created = false;
+        }
     }
     *portConfig = portConfigIt->second;
     return OK;
 }
 
 status_t DeviceHalAidl::findOrCreatePortConfig(
-        const AudioPortConfig& requestedPortConfig, AudioPortConfig* portConfig, bool* created) {
+        const AudioPortConfig& requestedPortConfig, const std::set<int32_t>& destinationPortIds,
+        AudioPortConfig* portConfig, bool* created) {
     using Tag = AudioPortExt::Tag;
     if (requestedPortConfig.ext.getTag() == Tag::mix) {
         if (const auto& p = requestedPortConfig;
@@ -956,10 +1514,12 @@
                 requestedPortConfig.ext.get<Tag::mix>().usecase.
                 get<AudioPortMixExtUseCase::Tag::source>() : AudioSource::SYS_RESERVED_INVALID;
         return findOrCreatePortConfig(config, requestedPortConfig.flags,
-                requestedPortConfig.ext.get<Tag::mix>().handle, source, portConfig, created);
+                requestedPortConfig.ext.get<Tag::mix>().handle, source, destinationPortIds,
+                portConfig, created);
     } else if (requestedPortConfig.ext.getTag() == Tag::device) {
         return findOrCreatePortConfig(
-                requestedPortConfig.ext.get<Tag::device>().device, portConfig, created);
+                requestedPortConfig.ext.get<Tag::device>().device, nullptr /*config*/,
+                portConfig, created);
     }
     ALOGW("%s: unsupported audio port config: %s",
             __func__, requestedPortConfig.toString().c_str());
@@ -989,20 +1549,56 @@
 }
 
 DeviceHalAidl::Ports::iterator DeviceHalAidl::findPort(
-            const AudioConfig& config, const AudioIoFlags& flags) {
+            const AudioConfig& config, const AudioIoFlags& flags,
+            const std::set<int32_t>& destinationPortIds) {
+    auto belongsToProfile = [&config](const AudioProfile& prof) {
+        return (isDefaultAudioFormat(config.base.format) || prof.format == config.base.format) &&
+                (config.base.channelMask.getTag() == AudioChannelLayout::none ||
+                        std::find(prof.channelMasks.begin(), prof.channelMasks.end(),
+                                config.base.channelMask) != prof.channelMasks.end()) &&
+                (config.base.sampleRate == 0 ||
+                        std::find(prof.sampleRates.begin(), prof.sampleRates.end(),
+                                config.base.sampleRate) != prof.sampleRates.end());
+    };
+    static const std::vector<AudioOutputFlags> kOptionalOutputFlags{AudioOutputFlags::BIT_PERFECT};
+    int optionalFlags = 0;
+    auto flagMatches = [&flags, &optionalFlags](const AudioIoFlags& portFlags) {
+        // Ports should be able to match if the optional flags are not requested.
+        return portFlags == flags ||
+               (portFlags.getTag() == AudioIoFlags::Tag::output &&
+                        AudioIoFlags::make<AudioIoFlags::Tag::output>(
+                                portFlags.get<AudioIoFlags::Tag::output>() &
+                                        ~optionalFlags) == flags);
+    };
     auto matcher = [&](const auto& pair) {
         const auto& p = pair.second;
         return p.ext.getTag() == AudioPortExt::Tag::mix &&
-                p.flags == flags &&
-                std::find_if(p.profiles.begin(), p.profiles.end(),
-                        [&](const auto& prof) {
-                            return prof.format == config.base.format &&
-                                    std::find(prof.channelMasks.begin(), prof.channelMasks.end(),
-                                            config.base.channelMask) != prof.channelMasks.end() &&
-                                    std::find(prof.sampleRates.begin(), prof.sampleRates.end(),
-                                            config.base.sampleRate) != prof.sampleRates.end();
-                        }) != p.profiles.end(); };
-    return std::find_if(mPorts.begin(), mPorts.end(), matcher);
+                flagMatches(p.flags) &&
+                (destinationPortIds.empty() ||
+                        std::any_of(destinationPortIds.begin(), destinationPortIds.end(),
+                                [&](const int32_t destId) { return mRoutingMatrix.count(
+                                            std::make_pair(p.id, destId)) != 0; })) &&
+                (p.profiles.empty() ||
+                        std::find_if(p.profiles.begin(), p.profiles.end(), belongsToProfile) !=
+                        p.profiles.end()); };
+    auto result = std::find_if(mPorts.begin(), mPorts.end(), matcher);
+    if (result == mPorts.end() && flags.getTag() == AudioIoFlags::Tag::output) {
+        auto optionalOutputFlagsIt = kOptionalOutputFlags.begin();
+        while (result == mPorts.end() && optionalOutputFlagsIt != kOptionalOutputFlags.end()) {
+            if (isBitPositionFlagSet(
+                        flags.get<AudioIoFlags::Tag::output>(), *optionalOutputFlagsIt)) {
+                // If the flag is set by the request, it must be matched.
+                ++optionalOutputFlagsIt;
+                continue;
+            }
+            optionalFlags |= makeBitPositionFlagMask(*optionalOutputFlagsIt++);
+            result = std::find_if(mPorts.begin(), mPorts.end(), matcher);
+            ALOGI("%s: port for config %s, flags %s was not found in the module %s, "
+                  "retried with excluding optional flags %#x", __func__, config.toString().c_str(),
+                    flags.toString().c_str(), mInstance.c_str(), optionalFlags);
+        }
+    }
+    return result;
 }
 
 DeviceHalAidl::PortConfigs::iterator DeviceHalAidl::findPortConfig(const AudioDevice& device) {
@@ -1026,34 +1622,7 @@
                         (!flags.has_value() || p.flags.value() == flags.value()) &&
                         p.ext.template get<Tag::mix>().handle == ioHandle; });
 }
-/*
-DeviceHalAidl::PortConfigs::iterator DeviceHalAidl::findPortConfig(
-        const AudioPortConfig& portConfig) {
-    using Tag = AudioPortExt::Tag;
-    if (portConfig.ext.getTag() == Tag::mix) {
-        return std::find_if(mPortConfigs.begin(), mPortConfigs.end(),
-                [&](const auto& pair) {
-                    const auto& p = pair.second;
-                    LOG_ALWAYS_FATAL_IF(p.ext.getTag() == Tag::mix &&
-                            !p.sampleRate.has_value() || !p.channelMask.has_value() ||
-                            !p.format.has_value() || !p.flags.has_value(),
-                            "%s: stored mix port config is not fully specified: %s",
-                            __func__, p.toString().c_str());
-                    return p.ext.getTag() == Tag::mix &&
-                            (!portConfig.sampleRate.has_value() ||
-                                    p.sampleRate == portConfig.sampleRate) &&
-                            (!portConfig.channelMask.has_value() ||
-                                    p.channelMask == portConfig.channelMask) &&
-                            (!portConfig.format.has_value() || p.format == portConfig.format) &&
-                            (!portConfig.flags.has_value() || p.flags == portConfig.flags) &&
-                            p.ext.template get<Tag::mix>().handle ==
-                            portConfig.ext.template get<Tag::mix>().handle; });
-    } else if (portConfig.ext.getTag() == Tag::device) {
-        return findPortConfig(portConfig.ext.get<Tag::device>().device);
-    }
-    return mPortConfigs.end();
-}
-*/
+
 void DeviceHalAidl::resetPatch(int32_t patchId) {
     if (auto it = mPatches.find(patchId); it != mPatches.end()) {
         mPatches.erase(it);
@@ -1081,6 +1650,58 @@
     ALOGE("%s: port config id %d not found", __func__, portConfigId);
 }
 
+void DeviceHalAidl::resetUnusedPatches() {
+    // Since patches can be created independently of streams via 'createAudioPatch',
+    // here we only clean up patches for released streams.
+    for (auto it = mStreams.begin(); it != mStreams.end(); ) {
+        if (auto streamSp = it->first.promote(); streamSp) {
+            ++it;
+        } else {
+            resetPatch(it->second);
+            it = mStreams.erase(it);
+        }
+    }
+}
+
+void DeviceHalAidl::resetUnusedPatchesAndPortConfigs() {
+    resetUnusedPatches();
+    resetUnusedPortConfigs();
+}
+
+void DeviceHalAidl::resetUnusedPortConfigs() {
+    // The assumption is that port configs are used to create patches
+    // (or to open streams, but that involves creation of patches, too). Thus,
+    // orphaned port configs can and should be reset.
+    std::set<int32_t> portConfigIds;
+    std::transform(mPortConfigs.begin(), mPortConfigs.end(),
+            std::inserter(portConfigIds, portConfigIds.end()),
+            [](const auto& pcPair) { return pcPair.first; });
+    for (const auto& p : mPatches) {
+        for (int32_t id : p.second.sourcePortConfigIds) portConfigIds.erase(id);
+        for (int32_t id : p.second.sinkPortConfigIds) portConfigIds.erase(id);
+    }
+    for (int32_t id : mInitialPortConfigIds) {
+        portConfigIds.erase(id);
+    }
+    for (int32_t id : portConfigIds) resetPortConfig(id);
+}
+
+status_t DeviceHalAidl::updateRoutes() {
+    TIME_CHECK();
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(mModule->getAudioRoutes(&mRoutes)));
+    ALOGW_IF(mRoutes.empty(), "%s: module %s returned an empty list of audio routes",
+            __func__, mInstance.c_str());
+    mRoutingMatrix.clear();
+    for (const auto& r : mRoutes) {
+        for (auto portId : r.sourcePortIds) {
+            mRoutingMatrix.emplace(r.sinkPortId, portId);
+            mRoutingMatrix.emplace(portId, r.sinkPortId);
+        }
+    }
+    return OK;
+}
+
 void DeviceHalAidl::clearCallbacks(void* cookie) {
     std::lock_guard l(mLock);
     mCallbacks.erase(cookie);
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index 9687ec9..74a8b51 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -20,11 +20,13 @@
 #include <set>
 #include <vector>
 
+#include <aidl/android/media/audio/IHalAdapterVendorExtension.h>
 #include <aidl/android/hardware/audio/core/BpModule.h>
 #include <aidl/android/hardware/audio/core/sounddose/BpSoundDose.h>
 #include <android-base/thread_annotations.h>
 #include <media/audiohal/DeviceHalInterface.h>
 #include <media/audiohal/EffectHalInterface.h>
+#include <media/audiohal/StreamHalInterface.h>
 
 #include "ConversionHelperAidl.h"
 
@@ -68,6 +70,12 @@
 class DeviceHalAidl : public DeviceHalInterface, public ConversionHelperAidl,
                       public CallbackBroker, public MicrophoneInfoProvider {
   public:
+    status_t getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) override;
+
+    status_t getAudioRoutes(std::vector<media::AudioRoute> *routes) override;
+
+    status_t getSupportedModes(std::vector<media::audio::common::AudioMode> *modes) override;
+
     // Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
     status_t getSupportedDevices(uint32_t *devices) override;
 
@@ -139,11 +147,13 @@
     status_t setAudioPortConfig(const struct audio_port_config* config) override;
 
     // List microphones
-    status_t getMicrophones(std::vector<audio_microphone_characteristic_t>* microphones);
+    status_t getMicrophones(std::vector<audio_microphone_characteristic_t>* microphones) override;
 
-    status_t addDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
+    status_t addDeviceEffect(
+            const struct audio_port_config *device, sp<EffectHalInterface> effect) override;
 
-    status_t removeDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
+    status_t removeDeviceEffect(
+            const struct audio_port_config *device, sp<EffectHalInterface> effect) override;
 
     status_t getMmapPolicyInfos(media::audio::common::AudioMMapPolicyType policyType __unused,
                                 std::vector<media::audio::common::AudioMMapPolicyInfo>* policyInfos
@@ -155,13 +165,19 @@
 
     error::Result<audio_hw_sync_t> getHwAvSync() override;
 
-    status_t dump(int __unused, const Vector<String16>& __unused) override;
-
     int32_t supportsBluetoothVariableLatency(bool* supports __unused) override;
 
     status_t getSoundDoseInterface(const std::string& module,
                                    ::ndk::SpAIBinder* soundDoseBinder) override;
 
+    status_t prepareToDisconnectExternalDevice(const struct audio_port_v7 *port) override;
+
+    status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
+
+    status_t setSimulateDeviceConnections(bool enabled) override;
+
+    status_t dump(int __unused, const Vector<String16>& __unused) override;
+
   private:
     friend class sp<DeviceHalAidl>;
 
@@ -180,13 +196,18 @@
     using PortConfigs = std::map<int32_t /*port config ID*/,
             ::aidl::android::media::audio::common::AudioPortConfig>;
     using Ports = std::map<int32_t /*port ID*/, ::aidl::android::media::audio::common::AudioPort>;
+    using Routes = std::vector<::aidl::android::hardware::audio::core::AudioRoute>;
+    // Answers the question "whether portID 'first' is reachable from portID 'second'?"
+    // It's not a map because both portIDs are known. The matrix is symmetric.
+    using RoutingMatrix = std::set<std::pair<int32_t, int32_t>>;
+    using Streams = std::map<wp<StreamHalInterface>, int32_t /*patch ID*/>;
     class Cleanups;
 
     // Must not be constructed directly by clients.
     DeviceHalAidl(
             const std::string& instance,
-            const std::shared_ptr<::aidl::android::hardware::audio::core::IModule>& module)
-            : ConversionHelperAidl("DeviceHalAidl"), mInstance(instance), mModule(module) {}
+            const std::shared_ptr<::aidl::android::hardware::audio::core::IModule>& module,
+            const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension>& vext);
 
     ~DeviceHalAidl() override = default;
 
@@ -194,9 +215,15 @@
             const ::aidl::android::media::audio::common::AudioPort& p);
     bool audioDeviceMatches(const ::aidl::android::media::audio::common::AudioDevice& device,
             const ::aidl::android::media::audio::common::AudioPortConfig& p);
-    status_t createPortConfig(
+    status_t createOrUpdatePortConfig(
             const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
-            PortConfigs::iterator* result);
+            PortConfigs::iterator* result, bool *created);
+    status_t filterAndRetrieveBtA2dpParameters(AudioParameter &keys, AudioParameter *result);
+    status_t filterAndUpdateBtA2dpParameters(AudioParameter &parameters);
+    status_t filterAndUpdateBtHfpParameters(AudioParameter &parameters);
+    status_t filterAndUpdateBtLeParameters(AudioParameter &parameters);
+    status_t filterAndUpdateBtScoParameters(AudioParameter &parameters);
+    status_t filterAndUpdateScreenParameters(AudioParameter &parameters);
     status_t findOrCreatePatch(
         const std::set<int32_t>& sourcePortConfigIds,
         const std::set<int32_t>& sinkPortConfigIds,
@@ -206,6 +233,7 @@
         ::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
     status_t findOrCreatePortConfig(
             const ::aidl::android::media::audio::common::AudioDevice& device,
+            const ::aidl::android::media::audio::common::AudioConfig* config,
             ::aidl::android::media::audio::common::AudioPortConfig* portConfig,
             bool* created);
     status_t findOrCreatePortConfig(
@@ -213,25 +241,25 @@
             const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
             int32_t ioHandle,
             ::aidl::android::media::audio::common::AudioSource aidlSource,
+            const std::set<int32_t>& destinationPortIds,
             ::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
     status_t findOrCreatePortConfig(
         const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
+        const std::set<int32_t>& destinationPortIds,
         ::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
     Patches::iterator findPatch(const std::set<int32_t>& sourcePortConfigIds,
             const std::set<int32_t>& sinkPortConfigIds);
     Ports::iterator findPort(const ::aidl::android::media::audio::common::AudioDevice& device);
     Ports::iterator findPort(
             const ::aidl::android::media::audio::common::AudioConfig& config,
-            const ::aidl::android::media::audio::common::AudioIoFlags& flags);
+            const ::aidl::android::media::audio::common::AudioIoFlags& flags,
+            const std::set<int32_t>& destinationPortIds);
     PortConfigs::iterator findPortConfig(
             const ::aidl::android::media::audio::common::AudioDevice& device);
     PortConfigs::iterator findPortConfig(
             const ::aidl::android::media::audio::common::AudioConfig& config,
             const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
             int32_t ioHandle);
-    // Currently unused but may be useful for implementing setAudioPortConfig
-    // PortConfigs::iterator findPortConfig(
-    //         const ::aidl::android::media::audio::common::AudioPortConfig& portConfig);
     status_t prepareToOpenStream(
         int32_t aidlHandle,
         const ::aidl::android::media::audio::common::AudioDevice& aidlDevice,
@@ -241,9 +269,13 @@
         Cleanups* cleanups,
         ::aidl::android::media::audio::common::AudioConfig* aidlConfig,
         ::aidl::android::media::audio::common::AudioPortConfig* mixPortConfig,
-        int32_t* nominalLatency);
+        ::aidl::android::hardware::audio::core::AudioPatch* aidlPatch);
     void resetPatch(int32_t patchId);
     void resetPortConfig(int32_t portConfigId);
+    void resetUnusedPatches();
+    void resetUnusedPatchesAndPortConfigs();
+    void resetUnusedPortConfigs();
+    status_t updateRoutes();
 
     // CallbackBroker implementation
     void clearCallbacks(void* cookie) override;
@@ -265,16 +297,26 @@
 
     const std::string mInstance;
     const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
+    const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
+    const std::shared_ptr<::aidl::android::hardware::audio::core::ITelephony> mTelephony;
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetooth> mBluetooth;
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothA2dp> mBluetoothA2dp;
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothLe> mBluetoothLe;
     std::shared_ptr<::aidl::android::hardware::audio::core::sounddose::ISoundDose>
         mSoundDose = nullptr;
     Ports mPorts;
     int32_t mDefaultInputPortId = -1;
     int32_t mDefaultOutputPortId = -1;
     PortConfigs mPortConfigs;
+    std::set<int32_t> mInitialPortConfigIds;
     Patches mPatches;
+    Routes mRoutes;
+    RoutingMatrix mRoutingMatrix;
+    Streams mStreams;
     Microphones mMicrophones;
     std::mutex mLock;
     std::map<void*, Callbacks> mCallbacks GUARDED_BY(mLock);
+    std::set<audio_port_handle_t> mDeviceDisconnectionNotified;
 };
 
 } // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index e0b1afb..f96d419 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -32,6 +32,7 @@
 #include <util/CoreUtils.h>
 
 #include "DeviceHalHidl.h"
+#include "EffectHalHidl.h"
 #include "ParameterUtils.h"
 #include "StreamHalHidl.h"
 
@@ -104,6 +105,20 @@
     }
 }
 
+status_t DeviceHalHidl::getAudioPorts(
+        std::vector<media::audio::common::AudioPort> *ports __unused) {
+    return INVALID_OPERATION;
+}
+
+status_t DeviceHalHidl::getAudioRoutes(std::vector<media::AudioRoute> *routes __unused) {
+    return INVALID_OPERATION;
+}
+
+status_t DeviceHalHidl::getSupportedModes(
+        std::vector<media::audio::common::AudioMode> *modes __unused) {
+    return INVALID_OPERATION;
+}
+
 status_t DeviceHalHidl::getSupportedDevices(uint32_t*) {
     // Obsolete.
     return INVALID_OPERATION;
@@ -430,6 +445,7 @@
 
 template <typename HalPort>
 status_t DeviceHalHidl::getAudioPortImpl(HalPort *port) {
+    using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::AudioPort;
     if (mDevice == 0) return NO_INIT;
     AudioPort hidlPort;
     HidlUtils::audioPortFromHal(*port, &hidlPort);
@@ -472,6 +488,7 @@
 }
 
 status_t DeviceHalHidl::setAudioPortConfig(const struct audio_port_config *config) {
+    using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::AudioPortConfig;
     TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     AudioPortConfig hidlConfig;
@@ -507,37 +524,59 @@
 
 #if MAJOR_VERSION >= 6
 status_t DeviceHalHidl::addDeviceEffect(
-        audio_port_handle_t device, sp<EffectHalInterface> effect) {
+        const struct audio_port_config *device, sp<EffectHalInterface> effect) {
     TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
+    auto hidlEffect = sp<effect::EffectHalHidl>::cast(effect);
     return processReturn("addDeviceEffect", mDevice->addDeviceEffect(
-            static_cast<AudioPortHandle>(device), effect->effectId()));
+            static_cast<AudioPortHandle>(device->id), hidlEffect->effectId()));
 }
 #else
 status_t DeviceHalHidl::addDeviceEffect(
-        audio_port_handle_t device __unused, sp<EffectHalInterface> effect __unused) {
+        const struct audio_port_config *device __unused, sp<EffectHalInterface> effect __unused) {
     return INVALID_OPERATION;
 }
 #endif
 
 #if MAJOR_VERSION >= 6
 status_t DeviceHalHidl::removeDeviceEffect(
-        audio_port_handle_t device, sp<EffectHalInterface> effect) {
+        const struct audio_port_config *device, sp<EffectHalInterface> effect) {
     TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
+    auto hidlEffect = sp<effect::EffectHalHidl>::cast(effect);
     return processReturn("removeDeviceEffect", mDevice->removeDeviceEffect(
-            static_cast<AudioPortHandle>(device), effect->effectId()));
+            static_cast<AudioPortHandle>(device->id), hidlEffect->effectId()));
 }
 #else
 status_t DeviceHalHidl::removeDeviceEffect(
-        audio_port_handle_t device __unused, sp<EffectHalInterface> effect __unused) {
+        const struct audio_port_config *device __unused, sp<EffectHalInterface> effect __unused) {
     return INVALID_OPERATION;
 }
 #endif
 
+status_t DeviceHalHidl::prepareToDisconnectExternalDevice(const struct audio_port_v7* port) {
+    // For HIDL HAL, there is not API to call notify the HAL to prepare for device connected
+    // state changed. Call `setConnectedState` directly.
+    const status_t status = setConnectedState(port, false /*connected*/);
+    if (status == NO_ERROR) {
+        // Cache the port id so that it won't disconnect twice.
+        mDeviceDisconnectionNotified.insert(port->id);
+    }
+    return status;
+}
+
 status_t DeviceHalHidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
+    using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::AudioPort;
     TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
+    if (!connected && mDeviceDisconnectionNotified.erase(port->id) > 0) {
+        // For device disconnection, APM will first call `prepareToDisconnectExternalDevice` and
+        // then call `setConnectedState`. However, in HIDL HAL, there is no API for
+        // `prepareToDisconnectExternalDevice`. In that case, HIDL HAL will call `setConnectedState`
+        // when calling `prepareToDisconnectExternalDevice`. Do not call to the HAL if previous
+        // call is successful. Also remove the cache here to avoid a large cache after a long run.
+        return NO_ERROR;
+    }
 #if MAJOR_VERSION == 7 && MINOR_VERSION == 1
     if (supportsSetConnectedState7_1) {
         AudioPort hidlPort;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 30fbd6d..989c1f5 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -29,6 +29,12 @@
 class DeviceHalHidl : public DeviceHalInterface, public CoreConversionHelperHidl
 {
   public:
+    status_t getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) override;
+
+    status_t getAudioRoutes(std::vector<media::AudioRoute> *routes) override;
+
+    status_t getSupportedModes(std::vector<media::audio::common::AudioMode> *modes) override;
+
     // Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
     status_t getSupportedDevices(uint32_t *devices) override;
 
@@ -99,8 +105,10 @@
     // List microphones
     status_t getMicrophones(std::vector<audio_microphone_characteristic_t>* microphones) override;
 
-    status_t addDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
-    status_t removeDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
+    status_t addDeviceEffect(
+            const struct audio_port_config *device, sp<EffectHalInterface> effect) override;
+    status_t removeDeviceEffect(
+            const struct audio_port_config *device, sp<EffectHalInterface> effect) override;
 
     status_t getMmapPolicyInfos(
             media::audio::common::AudioMMapPolicyType policyType __unused,
@@ -126,6 +134,11 @@
 
     status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
 
+    status_t setSimulateDeviceConnections(bool enabled __unused) override {
+        // Only supported by AIDL HALs.
+        return INVALID_OPERATION;
+    }
+
     error::Result<audio_hw_sync_t> getHwAvSync() override;
 
     status_t dump(int fd, const Vector<String16>& args) override;
@@ -133,6 +146,8 @@
     status_t getSoundDoseInterface(const std::string& module,
                                    ::ndk::SpAIBinder* soundDoseBinder) override;
 
+    status_t prepareToDisconnectExternalDevice(const struct audio_port_v7* port) override;
+
   private:
     friend class DevicesFactoryHalHidl;
     sp<::android::hardware::audio::CPP_VERSION::IDevice> mDevice;
@@ -141,6 +156,7 @@
     bool supportsSetConnectedState7_1 = true;
     class SoundDoseWrapper;
     const std::unique_ptr<SoundDoseWrapper> mSoundDoseWrapper;
+    std::set<audio_port_handle_t> mDeviceDisconnectionNotified;
 
     // Can not be constructed directly by clients.
     explicit DeviceHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IDevice>& device);
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
index 2eaaf5d..f00b1a0 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
@@ -14,26 +14,68 @@
  * limitations under the License.
  */
 
+#include <memory>
+
 #define LOG_TAG "DevicesFactoryHalAidl"
 //#define LOG_NDEBUG 0
 
 #include <aidl/android/hardware/audio/core/IModule.h>
 #include <android/binder_manager.h>
 #include <binder/IServiceManager.h>
-#include <memory>
+#include <media/AidlConversionNdkCpp.h>
+#include <media/AidlConversionUtil.h>
 #include <utils/Log.h>
 
 #include "DeviceHalAidl.h"
 #include "DevicesFactoryHalAidl.h"
 
-using namespace ::aidl::android::hardware::audio::core;
-using ::android::detail::AudioHalVersionInfo;
+using aidl::android::aidl_utils::statusTFromBinderStatus;
+using aidl::android::hardware::audio::core::IConfig;
+using aidl::android::hardware::audio::core::IModule;
+using aidl::android::hardware::audio::core::SurroundSoundConfig;
+using aidl::android::media::audio::common::AudioHalEngineConfig;
+using aidl::android::media::audio::IHalAdapterVendorExtension;
+using android::detail::AudioHalVersionInfo;
 
 namespace android {
 
-DevicesFactoryHalAidl::DevicesFactoryHalAidl(std::shared_ptr<IConfig> iconfig)
-    : mIConfig(std::move(iconfig)) {
-    ALOG_ASSERT(iconfig != nullptr, "Provided default IConfig service is NULL");
+namespace {
+
+ConversionResult<media::SurroundSoundConfig::SurroundFormatFamily>
+ndk2cpp_SurroundSoundConfigFormatFamily(const SurroundSoundConfig::SurroundFormatFamily& ndk) {
+    media::SurroundSoundConfig::SurroundFormatFamily cpp;
+    cpp.primaryFormat = VALUE_OR_RETURN(ndk2cpp_AudioFormatDescription(ndk.primaryFormat));
+    cpp.subFormats = VALUE_OR_RETURN(::aidl::android::convertContainer<std::vector<
+            media::audio::common::AudioFormatDescription>>(ndk.subFormats,
+                    ndk2cpp_AudioFormatDescription));
+    return cpp;
+}
+
+ConversionResult<media::SurroundSoundConfig>
+ndk2cpp_SurroundSoundConfig(const SurroundSoundConfig& ndk) {
+    media::SurroundSoundConfig cpp;
+    cpp.formatFamilies = VALUE_OR_RETURN(::aidl::android::convertContainer<std::vector<
+            media::SurroundSoundConfig::SurroundFormatFamily>>(ndk.formatFamilies,
+                    ndk2cpp_SurroundSoundConfigFormatFamily));
+    return cpp;
+}
+
+}  // namespace
+
+DevicesFactoryHalAidl::DevicesFactoryHalAidl(std::shared_ptr<IConfig> config)
+    : mConfig(std::move(config)) {
+}
+
+status_t DevicesFactoryHalAidl::getDeviceNames(std::vector<std::string> *names) {
+    if (names == nullptr) {
+        return BAD_VALUE;
+    }
+    AServiceManager_forEachDeclaredInstance(IModule::descriptor, static_cast<void*>(names),
+            [](const char* instance, void* context) {
+                if (strcmp(instance, "default") == 0) instance = "primary";
+                static_cast<decltype(names)>(context)->push_back(instance);
+            });
+    return OK;
 }
 
 // Opens a device with the specified name. To close the device, it is
@@ -42,22 +84,16 @@
     if (name == nullptr || device == nullptr) {
         return BAD_VALUE;
     }
-
     std::shared_ptr<IModule> service;
-    // FIXME: Normally we will list available HAL modules and connect to them,
-    // however currently we still get the list of module names from the config.
-    // Since the example service does not have all modules, the SM will wait
-    // for the missing ones forever.
-    if (strcmp(name, "primary") == 0 || strcmp(name, "r_submix") == 0 || strcmp(name, "usb") == 0) {
-        if (strcmp(name, "primary") == 0) name = "default";
-        auto serviceName = std::string(IModule::descriptor) + "/" + name;
-        service = IModule::fromBinder(
-                ndk::SpAIBinder(AServiceManager_waitForService(serviceName.c_str())));
-        ALOGE_IF(service == nullptr, "%s fromBinder %s failed", __func__, serviceName.c_str());
+    if (strcmp(name, "primary") == 0) name = "default";
+    auto serviceName = std::string(IModule::descriptor) + "/" + name;
+    service = IModule::fromBinder(
+            ndk::SpAIBinder(AServiceManager_waitForService(serviceName.c_str())));
+    if (service == nullptr) {
+        ALOGE("%s fromBinder %s failed", __func__, serviceName.c_str());
+        return NO_INIT;
     }
-    // If the service is a nullptr, the device will not be really functional,
-    // but will not crash either.
-    *device = sp<DeviceHalAidl>::make(name, service);
+    *device = sp<DeviceHalAidl>::make(name, service, getVendorExtension());
     return OK;
 }
 
@@ -97,18 +133,42 @@
 
 AudioHalVersionInfo DevicesFactoryHalAidl::getHalVersion() const {
     int32_t versionNumber = 0;
-    if (mIConfig != 0) {
-        if (ndk::ScopedAStatus status = mIConfig->getInterfaceVersion(&versionNumber);
-                !status.isOk()) {
-            ALOGE("%s getInterfaceVersion failed: %s", __func__, status.getDescription().c_str());
-        }
-    } else {
-        ALOGW("%s no IConfig instance", __func__);
+    if (ndk::ScopedAStatus status = mConfig->getInterfaceVersion(&versionNumber); !status.isOk()) {
+        ALOGE("%s getInterfaceVersion failed: %s", __func__, status.getDescription().c_str());
     }
     // AIDL does not have minor version, fill 0 for all versions
     return AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, versionNumber);
 }
 
+status_t DevicesFactoryHalAidl::getSurroundSoundConfig(media::SurroundSoundConfig *config) {
+    SurroundSoundConfig ndkConfig;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mConfig->getSurroundSoundConfig(&ndkConfig)));
+    *config = VALUE_OR_RETURN_STATUS(ndk2cpp_SurroundSoundConfig(ndkConfig));
+    return OK;
+}
+
+status_t DevicesFactoryHalAidl::getEngineConfig(
+        media::audio::common::AudioHalEngineConfig *config) {
+    AudioHalEngineConfig ndkConfig;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mConfig->getEngineConfig(&ndkConfig)));
+    *config = VALUE_OR_RETURN_STATUS(ndk2cpp_AudioHalEngineConfig(ndkConfig));
+    return OK;
+}
+
+std::shared_ptr<IHalAdapterVendorExtension> DevicesFactoryHalAidl::getVendorExtension() {
+    if (!mVendorExt.has_value()) {
+        auto serviceName = std::string(IHalAdapterVendorExtension::descriptor) + "/default";
+        if (AServiceManager_isDeclared(serviceName.c_str())) {
+            mVendorExt = std::shared_ptr<IHalAdapterVendorExtension>(
+                    IHalAdapterVendorExtension::fromBinder(ndk::SpAIBinder(
+                                    AServiceManager_waitForService(serviceName.c_str()))));
+        } else {
+            mVendorExt = nullptr;
+        }
+    }
+    return mVendorExt.value();
+}
+
 // Main entry-point to the shared library.
 extern "C" __attribute__((visibility("default"))) void* createIDevicesFactoryImpl() {
     auto serviceName = std::string(IConfig::descriptor) + "/default";
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.h b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
index cb627bc..97e3796 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
@@ -17,6 +17,7 @@
 #pragma once
 
 #include <aidl/android/hardware/audio/core/IConfig.h>
+#include <aidl/android/media/audio/IHalAdapterVendorExtension.h>
 #include <media/audiohal/DevicesFactoryHalInterface.h>
 #include <utils/RefBase.h>
 
@@ -26,7 +27,9 @@
 {
   public:
     explicit DevicesFactoryHalAidl(
-            std::shared_ptr<::aidl::android::hardware::audio::core::IConfig> iConfig);
+            std::shared_ptr<::aidl::android::hardware::audio::core::IConfig> config);
+
+    status_t getDeviceNames(std::vector<std::string> *names) override;
 
     // Opens a device with the specified name. To close the device, it is
     // necessary to release references to the returned object.
@@ -38,8 +41,17 @@
 
     android::detail::AudioHalVersionInfo getHalVersion() const override;
 
+    status_t getSurroundSoundConfig(media::SurroundSoundConfig *config) override;
+
+    status_t getEngineConfig(media::audio::common::AudioHalEngineConfig *config) override;
+
   private:
-    const std::shared_ptr<::aidl::android::hardware::audio::core::IConfig> mIConfig;
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IConfig> mConfig;
+    std::optional<std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension>>
+            mVendorExt;
+
+    std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> getVendorExtension();
+
     ~DevicesFactoryHalAidl() = default;
 };
 
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
index 9f06f83..eef60b5 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
@@ -106,6 +106,10 @@
 }
 #endif
 
+status_t DevicesFactoryHalHidl::getDeviceNames(std::vector<std::string> *names __unused) {
+    return INVALID_OPERATION;
+}
+
 status_t DevicesFactoryHalHidl::openDevice(const char *name, sp<DeviceHalInterface> *device) {
     auto factories = copyDeviceFactories();
     if (factories.empty()) return NO_INIT;
@@ -232,6 +236,16 @@
     return AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, MAJOR_VERSION, MINOR_VERSION);
 }
 
+status_t DevicesFactoryHalHidl::getSurroundSoundConfig(
+        media::SurroundSoundConfig *config __unused) {
+    return INVALID_OPERATION;
+}
+
+status_t DevicesFactoryHalHidl::getEngineConfig(
+        media::audio::common::AudioHalEngineConfig *config __unused) {
+    return INVALID_OPERATION;
+}
+
 // Main entry-point to the shared library.
 extern "C" __attribute__((visibility("default"))) void* createIDevicesFactoryImpl() {
     auto service = hardware::audio::CPP_VERSION::IDevicesFactory::getService();
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.h b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
index 5294728..3285af7 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
@@ -37,6 +37,8 @@
     explicit DevicesFactoryHalHidl(sp<IDevicesFactory> devicesFactory);
     void onFirstRef() override;
 
+    status_t getDeviceNames(std::vector<std::string> *names) override;
+
     // Opens a device with the specified name. To close the device, it is
     // necessary to release references to the returned object.
     status_t openDevice(const char *name, sp<DeviceHalInterface> *device) override;
@@ -47,6 +49,10 @@
 
     android::detail::AudioHalVersionInfo getHalVersion() const override;
 
+    status_t getSurroundSoundConfig(media::SurroundSoundConfig *config) override;
+
+    status_t getEngineConfig(media::audio::common::AudioHalEngineConfig *config) override;
+
   private:
     friend class ServiceNotificationListener;
     void addDeviceFactory(sp<IDevicesFactory> factory, bool needToNotify);
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
index 519b871..8a582a5 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <cstddef>
 #include <cstdint>
 #include <cstring>
 #include <optional>
@@ -29,6 +30,7 @@
 #include <utils/Log.h>
 
 #include "EffectConversionHelperAidl.h"
+#include "EffectProxy.h"
 
 namespace android {
 namespace effect {
@@ -37,10 +39,13 @@
 using ::aidl::android::hardware::audio::effect::CommandId;
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::Flags;
+using ::aidl::android::hardware::audio::effect::IEffect;
 using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::State;
 using ::aidl::android::media::audio::common::AudioDeviceDescription;
 using ::aidl::android::media::audio::common::AudioMode;
 using ::aidl::android::media::audio::common::AudioSource;
+using ::android::hardware::EventFlag;
 using android::effect::utils::EffectParamReader;
 using android::effect::utils::EffectParamWriter;
 
@@ -67,12 +72,13 @@
 
 EffectConversionHelperAidl::EffectConversionHelperAidl(
         std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
-        int32_t sessionId, int32_t ioId, const Descriptor& desc)
+        int32_t sessionId, int32_t ioId, const Descriptor& desc, bool isProxy)
     : mSessionId(sessionId),
       mIoId(ioId),
       mDesc(desc),
       mEffect(std::move(effect)),
-      mIsInputStream(mDesc.common.flags.type == Flags::Type::PRE_PROC) {
+      mIsInputStream(mDesc.common.flags.type == Flags::Type::PRE_PROC),
+      mIsProxyEffect(isProxy) {
     mCommon.session = sessionId;
     mCommon.ioHandle = ioId;
     mCommon.input = mCommon.output = kDefaultAudioConfig;
@@ -96,8 +102,8 @@
         return BAD_VALUE;
     }
 
-    return *(status_t*)pReplyData =
-                   statusTFromBinderStatus(mEffect->open(mCommon, std::nullopt, &mOpenReturn));
+    // Do nothing for EFFECT_CMD_INIT, call IEffect.open() with EFFECT_CMD_SET_CONFIG
+    return *(status_t*)pReplyData = OK;
 }
 
 status_t EffectConversionHelperAidl::handleSetParameter(uint32_t cmdSize, const void* pCmdData,
@@ -122,6 +128,8 @@
 status_t EffectConversionHelperAidl::handleGetParameter(uint32_t cmdSize, const void* pCmdData,
                                                         uint32_t* replySize, void* pReplyData) {
     if (cmdSize < sizeof(effect_param_t) || !pCmdData || !replySize || !pReplyData) {
+        ALOGE("%s illegal cmdSize %u pCmdData %p replySize %p replyData %p", __func__, cmdSize,
+              pCmdData, replySize, pReplyData);
         return BAD_VALUE;
     }
 
@@ -154,22 +162,60 @@
     }
 
     effect_config_t* config = (effect_config_t*)pCmdData;
-    Parameter::Common aidlCommon = {
-            .session = mSessionId,
-            .ioHandle = mIoId,
-            .input = {.base = VALUE_OR_RETURN_STATUS(
-                              ::aidl::android::legacy2aidl_buffer_config_t_AudioConfigBase(
-                                      config->inputCfg, mIsInputStream))},
-            .output = {.base = VALUE_OR_RETURN_STATUS(
-                               ::aidl::android::legacy2aidl_buffer_config_t_AudioConfigBase(
-                                       config->outputCfg, mIsInputStream))}};
+    Parameter::Common common = {
+            .input =
+                    VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_buffer_config_t_AudioConfig(
+                            config->inputCfg, mIsInputStream)),
+            .output =
+                    VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_buffer_config_t_AudioConfig(
+                            config->outputCfg, mIsInputStream)),
+            .session = mCommon.session,
+            .ioHandle = mCommon.ioHandle};
 
-    Parameter aidlParam = UNION_MAKE(Parameter, common, aidlCommon);
+    State state;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getState(&state)));
+    // in case of buffer/ioHandle re-configure for an opened effect, close it and re-open
+    if (state != State::INIT && mCommon != common) {
+        ALOGI("%s at state %s, closing effect", __func__,
+              android::internal::ToString(state).c_str());
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->close()));
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getState(&state)));
+        mStatusQ.reset();
+        mInputQ.reset();
+        mOutputQ.reset();
+    }
 
-    status_t ret = statusTFromBinderStatus(mEffect->setParameter(aidlParam));
-    EffectParamWriter writer(*(effect_param_t*)pReplyData);
-    writer.setStatus(ret);
-    return ret;
+    if (state == State::INIT) {
+        ALOGI("%s at state %s, opening effect with input %s output %s", __func__,
+              android::internal::ToString(state).c_str(), common.input.toString().c_str(),
+              common.output.toString().c_str());
+        IEffect::OpenEffectReturn openReturn;
+        RETURN_STATUS_IF_ERROR(
+                statusTFromBinderStatus(mEffect->open(common, std::nullopt, &openReturn)));
+
+        if (mIsProxyEffect) {
+            mStatusQ = std::static_pointer_cast<EffectProxy>(mEffect)->getStatusMQ();
+            mInputQ = std::static_pointer_cast<EffectProxy>(mEffect)->getInputMQ();
+            mOutputQ = std::static_pointer_cast<EffectProxy>(mEffect)->getOutputMQ();
+        } else {
+            mStatusQ = std::make_shared<StatusMQ>(openReturn.statusMQ);
+            mInputQ = std::make_shared<DataMQ>(openReturn.inputDataMQ);
+            mOutputQ = std::make_shared<DataMQ>(openReturn.outputDataMQ);
+        }
+
+        if (status_t status = updateEventFlags(); status != OK) {
+            ALOGV("%s closing at status %d", __func__, status);
+            mEffect->close();
+            return status;
+        }
+        mCommon = common;
+    } else if (mCommon != common) {
+        ALOGI("%s at state %s, setParameter", __func__, android::internal::ToString(state).c_str());
+        Parameter aidlParam = UNION_MAKE(Parameter, common, mCommon);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam)));
+    }
+
+    return *static_cast<int32_t*>(pReplyData) = OK;
 }
 
 status_t EffectConversionHelperAidl::handleGetConfig(uint32_t cmdSize __unused,
@@ -187,11 +233,9 @@
     const auto& common = param.get<Parameter::common>();
     effect_config_t* pConfig = (effect_config_t*)pReplyData;
     pConfig->inputCfg = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::aidl2legacy_AudioConfigBase_buffer_config_t(common.input.base, true));
-    pConfig->outputCfg =
-            VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_AudioConfigBase_buffer_config_t(
-                    common.output.base, false));
-    mCommon = common;
+            ::aidl::android::aidl2legacy_AudioConfig_buffer_config_t(common.input, true));
+    pConfig->outputCfg = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioConfig_buffer_config_t(common.output, false));
     return OK;
 }
 
@@ -273,17 +317,25 @@
             mEffect->setParameter(Parameter::make<Parameter::deviceDescription>(aidlDevices))));
     return *static_cast<int32_t*>(pReplyData) = OK;
 }
+
 status_t EffectConversionHelperAidl::handleSetVolume(uint32_t cmdSize, const void* pCmdData,
-                                                     uint32_t* replySize __unused,
-                                                     void* pReplyData __unused) {
+                                                     uint32_t* replySize, void* pReplyData) {
     if (cmdSize != 2 * sizeof(uint32_t) || !pCmdData) {
         ALOGE("%s parameter invalid %u %p", __func__, cmdSize, pCmdData);
         return BAD_VALUE;
     }
-    Parameter::VolumeStereo volume = {.left = (float)(*(uint32_t*)pCmdData) / (1 << 24),
-                                      .right = (float)(*(uint32_t*)pCmdData + 1) / (1 << 24)};
+
+    constexpr uint32_t unityGain = 1 << 24;
+    Parameter::VolumeStereo volume = {.left = (float)(*(uint32_t*)pCmdData) / unityGain,
+                                      .right = (float)(*(uint32_t*)pCmdData + 1) / unityGain};
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             mEffect->setParameter(Parameter::make<Parameter::volumeStereo>(volume))));
+
+    // write unity gain back if volume was successfully set
+    if (replySize && *replySize == 2 * sizeof(uint32_t) && pReplyData) {
+        constexpr uint32_t vol_ret[2] = {unityGain, unityGain};
+        memcpy(pReplyData, vol_ret, sizeof(vol_ret));
+    }
     return OK;
 }
 
@@ -294,7 +346,22 @@
               pReplyData);
         return BAD_VALUE;
     }
-    // TODO: handle this after effectproxy implemented in libaudiohal
+    effect_offload_param_t* offload = (effect_offload_param_t*)pCmdData;
+    // send to proxy to update active sub-effect
+    if (mIsProxyEffect) {
+        ALOGI("%s offload param offload %s ioHandle %d", __func__,
+              offload->isOffload ? "true" : "false", offload->ioHandle);
+        mCommon.ioHandle = offload->ioHandle;
+        const auto& effectProxy = std::static_pointer_cast<EffectProxy>(mEffect);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(effectProxy->setOffloadParam(offload)));
+        // update FMQs if the effect instance already open
+        if (State state; effectProxy->getState(&state).isOk() && state != State::INIT) {
+            mStatusQ = effectProxy->getStatusMQ();
+            mInputQ = effectProxy->getInputMQ();
+            mOutputQ = effectProxy->getOutputMQ();
+            updateEventFlags();
+        }
+    }
     return *static_cast<int32_t*>(pReplyData) = OK;
 }
 
@@ -338,5 +405,30 @@
     return visualizerMeasure(replySize, pReplyData);
 }
 
+status_t EffectConversionHelperAidl::updateEventFlags() {
+    status_t status = BAD_VALUE;
+    EventFlag* efGroup = nullptr;
+    if (mStatusQ && mStatusQ->isValid()) {
+        status = EventFlag::createEventFlag(mStatusQ->getEventFlagWord(), &efGroup);
+        if (status != OK || !efGroup) {
+            ALOGE("%s: create EventFlagGroup failed, ret %d, egGroup %p", __func__, status,
+                  efGroup);
+            status = (status == OK) ? BAD_VALUE : status;
+        }
+    } else if (isBypassing()) {
+        // for effect with bypass (no processing) flag, it's okay to not have statusQ
+        return OK;
+    }
+
+    mEfGroup.reset(efGroup, EventFlagDeleter());
+    return status;
+}
+
+bool EffectConversionHelperAidl::isBypassing() const {
+    return mEffect &&
+           (mDesc.common.flags.bypass ||
+            (mIsProxyEffect && std::static_pointer_cast<EffectProxy>(mEffect)->isBypassing()));
+}
+
 }  // namespace effect
 }  // namespace android
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.h b/media/libaudiohal/impl/EffectConversionHelperAidl.h
index 54df1b8..7c8f11b 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.h
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.h
@@ -19,6 +19,7 @@
 #include <utils/Errors.h>
 
 #include <aidl/android/hardware/audio/effect/BpEffect.h>
+#include <fmq/AidlMessageQueue.h>
 #include <system/audio_effect.h>
 #include <system/audio_effects/audio_effects_utils.h>
 
@@ -30,10 +31,17 @@
     status_t handleCommand(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData, uint32_t* replySize,
                            void* pReplyData);
     virtual ~EffectConversionHelperAidl() {}
-    const ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn&
-    getEffectReturnParam() const {
-        return mOpenReturn;
-    }
+
+    using StatusMQ = ::android::AidlMessageQueue<
+            ::aidl::android::hardware::audio::effect::IEffect::Status,
+            ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
+    using DataMQ = ::android::AidlMessageQueue<
+            float, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
+    std::shared_ptr<StatusMQ> getStatusMQ() { return mStatusQ; }
+    std::shared_ptr<DataMQ> getInputMQ() { return mInputQ; }
+    std::shared_ptr<DataMQ> getOutputMQ() { return mOutputQ; }
+    std::shared_ptr<android::hardware::EventFlag> getEventFlagGroup() { return mEfGroup; }
+    bool isBypassing() const;
 
   protected:
     const int32_t mSessionId;
@@ -42,13 +50,12 @@
     const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> mEffect;
     // whether the effect is instantiated on an input stream
     const bool mIsInputStream;
-    ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn mOpenReturn;
     ::aidl::android::hardware::audio::effect::Parameter::Common mCommon;
 
     EffectConversionHelperAidl(
             std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
             int32_t sessionId, int32_t ioId,
-            const ::aidl::android::hardware::audio::effect::Descriptor& desc);
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc, bool isProxy);
 
     status_t handleSetParameter(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
                                 void* pReplyData);
@@ -59,6 +66,7 @@
     const aidl::android::media::audio::common::AudioFormatDescription kDefaultFormatDescription = {
             .type = aidl::android::media::audio::common::AudioFormatType::PCM,
             .pcm = aidl::android::media::audio::common::PcmType::FLOAT_32_BIT};
+    const bool mIsProxyEffect;
 
     static constexpr int kDefaultframeCount = 0x100;
 
@@ -75,6 +83,20 @@
                                                                    uint32_t* /* replySize */,
                                                                    void* /* pReplyData */);
     static const std::map<uint32_t /* effect_command_e */, CommandHandler> mCommandHandlerMap;
+    // data and status FMQ
+    std::shared_ptr<StatusMQ> mStatusQ = nullptr;
+    std::shared_ptr<DataMQ> mInputQ = nullptr, mOutputQ = nullptr;
+
+
+    struct EventFlagDeleter {
+        void operator()(::android::hardware::EventFlag* flag) const {
+            if (flag) {
+                ::android::hardware::EventFlag::deleteEventFlag(&flag);
+            }
+        }
+    };
+    std::shared_ptr<android::hardware::EventFlag> mEfGroup = nullptr;
+    status_t updateEventFlags();
 
     status_t handleInit(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
                         void* pReplyData);
@@ -112,6 +134,7 @@
     virtual status_t visualizerMeasure(uint32_t* replySize __unused, void* pReplyData __unused) {
         return BAD_VALUE;
     }
+
 };
 
 }  // namespace effect
diff --git a/media/libaudiohal/impl/EffectConversionHelperHidl.h b/media/libaudiohal/impl/EffectConversionHelperHidl.h
index 4371d12..ed696bf 100644
--- a/media/libaudiohal/impl/EffectConversionHelperHidl.h
+++ b/media/libaudiohal/impl/EffectConversionHelperHidl.h
@@ -19,9 +19,9 @@
 
 #include "ConversionHelperHidl.h"
 
-#include PATH(android/hardware/audio/effect/FILE_VERSION/types.h)
+#include PATH(android/hardware/audio/effect/COMMON_TYPES_FILE_VERSION/types.h)
 
-using EffectResult = ::android::hardware::audio::effect::CPP_VERSION::Result;
+using EffectResult = ::android::hardware::audio::effect::COMMON_TYPES_CPP_VERSION::Result;
 
 namespace android {
 
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index 0c19ac8..ae4a530 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -31,6 +31,7 @@
 #include <utils/Log.h>
 
 #include "EffectHalAidl.h"
+#include "EffectProxy.h"
 
 #include <aidl/android/hardware/audio/effect/IEffect.h>
 
@@ -60,20 +61,25 @@
 namespace effect {
 
 EffectHalAidl::EffectHalAidl(const std::shared_ptr<IFactory>& factory,
-                             const std::shared_ptr<IEffect>& effect, uint64_t effectId,
-                             int32_t sessionId, int32_t ioId, const Descriptor& desc)
+                             const std::shared_ptr<IEffect>& effect,
+                             int32_t sessionId, int32_t ioId, const Descriptor& desc,
+                             bool isProxyEffect)
     : mFactory(factory),
       mEffect(effect),
-      mEffectId(effectId),
       mSessionId(sessionId),
       mIoId(ioId),
-      mDesc(desc) {
+      mDesc(desc),
+      mIsProxyEffect(isProxyEffect) {
     createAidlConversion(effect, sessionId, ioId, desc);
 }
 
 EffectHalAidl::~EffectHalAidl() {
-    if (mFactory) {
-        mFactory->destroyEffect(mEffect);
+    if (mEffect) {
+        if (mIsProxyEffect) {
+            std::static_pointer_cast<EffectProxy>(mEffect)->destroy();
+        } else if (mFactory) {
+            mFactory->destroyEffect(mEffect);
+        }
     }
 }
 
@@ -85,64 +91,64 @@
     ALOGI("%s create UUID %s", __func__, typeUuid.toString().c_str());
     if (typeUuid ==
         ::aidl::android::hardware::audio::effect::getEffectTypeUuidAcousticEchoCanceler()) {
-        mConversion =
-                std::make_unique<android::effect::AidlConversionAec>(effect, sessionId, ioId, desc);
+        mConversion = std::make_unique<android::effect::AidlConversionAec>(effect, sessionId, ioId,
+                                                                           desc, mIsProxyEffect);
     } else if (typeUuid == ::aidl::android::hardware::audio::effect::
                                    getEffectTypeUuidAutomaticGainControlV1()) {
         mConversion = std::make_unique<android::effect::AidlConversionAgc1>(effect, sessionId, ioId,
-                                                                            desc);
+                                                                            desc, mIsProxyEffect);
     } else if (typeUuid == ::aidl::android::hardware::audio::effect::
                                    getEffectTypeUuidAutomaticGainControlV2()) {
         mConversion = std::make_unique<android::effect::AidlConversionAgc2>(effect, sessionId, ioId,
-                                                                            desc);
+                                                                            desc, mIsProxyEffect);
     } else if (typeUuid == ::aidl::android::hardware::audio::effect::getEffectTypeUuidBassBoost()) {
-        mConversion = std::make_unique<android::effect::AidlConversionBassBoost>(effect, sessionId,
-                                                                                 ioId, desc);
+        mConversion = std::make_unique<android::effect::AidlConversionBassBoost>(
+                effect, sessionId, ioId, desc, mIsProxyEffect);
     } else if (typeUuid == ::aidl::android::hardware::audio::effect::getEffectTypeUuidDownmix()) {
-        mConversion = std::make_unique<android::effect::AidlConversionDownmix>(effect, sessionId,
-                                                                               ioId, desc);
+        mConversion = std::make_unique<android::effect::AidlConversionDownmix>(
+                effect, sessionId, ioId, desc, mIsProxyEffect);
     } else if (typeUuid ==
                ::aidl::android::hardware::audio::effect::getEffectTypeUuidDynamicsProcessing()) {
-        mConversion =
-                std::make_unique<android::effect::AidlConversionDp>(effect, sessionId, ioId, desc);
+        mConversion = std::make_unique<android::effect::AidlConversionDp>(effect, sessionId, ioId,
+                                                                          desc, mIsProxyEffect);
     } else if (typeUuid == ::aidl::android::hardware::audio::effect::getEffectTypeUuidEnvReverb()) {
-        mConversion = std::make_unique<android::effect::AidlConversionEnvReverb>(effect, sessionId,
-                                                                                 ioId, desc);
+        mConversion = std::make_unique<android::effect::AidlConversionEnvReverb>(
+                effect, sessionId, ioId, desc, mIsProxyEffect);
     } else if (typeUuid == ::aidl::android::hardware::audio::effect::getEffectTypeUuidEqualizer()) {
-        mConversion =
-                std::make_unique<android::effect::AidlConversionEq>(effect, sessionId, ioId, desc);
+        mConversion = std::make_unique<android::effect::AidlConversionEq>(effect, sessionId, ioId,
+                                                                          desc, mIsProxyEffect);
     } else if (typeUuid ==
                ::aidl::android::hardware::audio::effect::getEffectTypeUuidHapticGenerator()) {
         mConversion = std::make_unique<android::effect::AidlConversionHapticGenerator>(
-                effect, sessionId, ioId, desc);
+                effect, sessionId, ioId, desc, mIsProxyEffect);
     } else if (typeUuid ==
                ::aidl::android::hardware::audio::effect::getEffectTypeUuidLoudnessEnhancer()) {
         mConversion = std::make_unique<android::effect::AidlConversionLoudnessEnhancer>(
-                effect, sessionId, ioId, desc);
+                effect, sessionId, ioId, desc, mIsProxyEffect);
     } else if (typeUuid ==
                ::aidl::android::hardware::audio::effect::getEffectTypeUuidNoiseSuppression()) {
         mConversion = std::make_unique<android::effect::AidlConversionNoiseSuppression>(
-                effect, sessionId, ioId, desc);
+                effect, sessionId, ioId, desc, mIsProxyEffect);
     } else if (typeUuid ==
                ::aidl::android::hardware::audio::effect::getEffectTypeUuidPresetReverb()) {
         mConversion = std::make_unique<android::effect::AidlConversionPresetReverb>(
-                effect, sessionId, ioId, desc);
+                effect, sessionId, ioId, desc, mIsProxyEffect);
     } else if (typeUuid ==
                ::aidl::android::hardware::audio::effect::getEffectTypeUuidSpatializer()) {
         mConversion = std::make_unique<android::effect::AidlConversionSpatializer>(
-                effect, sessionId, ioId, desc);
+                effect, sessionId, ioId, desc, mIsProxyEffect);
     } else if (typeUuid ==
                ::aidl::android::hardware::audio::effect::getEffectTypeUuidVirtualizer()) {
         mConversion = std::make_unique<android::effect::AidlConversionVirtualizer>(
-                effect, sessionId, ioId, desc);
+                effect, sessionId, ioId, desc, mIsProxyEffect);
     } else if (typeUuid ==
                ::aidl::android::hardware::audio::effect::getEffectTypeUuidVisualizer()) {
-        mConversion = std::make_unique<android::effect::AidlConversionVisualizer>(effect, sessionId,
-                                                                                  ioId, desc);
+        mConversion = std::make_unique<android::effect::AidlConversionVisualizer>(
+                effect, sessionId, ioId, desc, mIsProxyEffect);
     } else {
         // For unknown UUID, use vendor extension implementation
         mConversion = std::make_unique<android::effect::AidlConversionVendorExtension>(
-                effect, sessionId, ioId, desc);
+                effect, sessionId, ioId, desc, mIsProxyEffect);
     }
     return OK;
 }
@@ -157,37 +163,57 @@
     return OK;
 }
 
-
 // write to input FMQ here, wait for statusMQ STATUS_OK, and read from output FMQ
 status_t EffectHalAidl::process() {
-    size_t available = mInputQ->availableToWrite();
+    auto statusQ = mConversion->getStatusMQ();
+    auto inputQ = mConversion->getInputMQ();
+    auto outputQ = mConversion->getOutputMQ();
+    auto efGroup = mConversion->getEventFlagGroup();
+    if (mConversion->isBypassing()) {
+        return OK;
+    }
+    if (!statusQ || !statusQ->isValid() || !inputQ || !inputQ->isValid() || !outputQ ||
+        !outputQ->isValid() || !efGroup) {
+        ALOGE("%s invalid FMQ [Status %d I %d O %d] efGroup %p", __func__,
+              statusQ ? statusQ->isValid() : 0, inputQ ? inputQ->isValid() : 0,
+              outputQ ? outputQ->isValid() : 0, efGroup.get());
+        return INVALID_OPERATION;
+    }
+
+    size_t available = inputQ->availableToWrite();
     size_t floatsToWrite = std::min(available, mInBuffer->getSize() / sizeof(float));
     if (floatsToWrite == 0) {
-        ALOGW("%s not able to write, floats in buffer %zu, space in FMQ %zu", __func__,
+        ALOGE("%s not able to write, floats in buffer %zu, space in FMQ %zu", __func__,
               mInBuffer->getSize() / sizeof(float), available);
         return INVALID_OPERATION;
     }
-    if (!mInputQ->write((float*)mInBuffer->ptr(), floatsToWrite)) {
-        ALOGW("%s failed to write %zu into inputQ", __func__, floatsToWrite);
+    if (!mInBuffer->audioBuffer() ||
+        !inputQ->write((float*)mInBuffer->audioBuffer()->f32, floatsToWrite)) {
+        ALOGE("%s failed to write %zu floats from audiobuffer %p to inputQ [avail %zu]", __func__,
+              floatsToWrite, mInBuffer->audioBuffer(), inputQ->availableToWrite());
         return INVALID_OPERATION;
     }
+    efGroup->wake(aidl::android::hardware::audio::effect::kEventFlagNotEmpty);
 
     IEffect::Status retStatus{};
-    if (!mStatusQ->readBlocking(&retStatus, 1) || retStatus.status != OK ||
+    if (!statusQ->readBlocking(&retStatus, 1) || retStatus.status != OK ||
         (size_t)retStatus.fmqConsumed != floatsToWrite || retStatus.fmqProduced == 0) {
-        ALOGW("%s read status failed: %s", __func__, retStatus.toString().c_str());
+        ALOGE("%s read status failed: %s", __func__, retStatus.toString().c_str());
         return INVALID_OPERATION;
     }
 
-    available = mOutputQ->availableToRead();
+    available = outputQ->availableToRead();
     size_t floatsToRead = std::min(available, mOutBuffer->getSize() / sizeof(float));
     if (floatsToRead == 0) {
-        ALOGW("%s not able to read, buffer space %zu, floats in FMQ %zu", __func__,
+        ALOGE("%s not able to read, buffer space %zu, floats in FMQ %zu", __func__,
               mOutBuffer->getSize() / sizeof(float), available);
         return INVALID_OPERATION;
     }
-    if (!mOutputQ->read((float*)mOutBuffer->ptr(), floatsToRead)) {
-        ALOGW("%s failed to read %zu from outputQ", __func__, floatsToRead);
+    // always read floating point data for AIDL
+    if (!mOutBuffer->audioBuffer() ||
+        !outputQ->read(mOutBuffer->audioBuffer()->f32, floatsToRead)) {
+        ALOGE("%s failed to read %zu from outputQ to audioBuffer %p", __func__, floatsToRead,
+              mOutBuffer->audioBuffer());
         return INVALID_OPERATION;
     }
 
@@ -210,20 +236,7 @@
         return INVALID_OPERATION;
     }
 
-    status_t ret = mConversion->handleCommand(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
-    // update FMQs when effect open successfully
-    if (ret == OK && cmdCode == EFFECT_CMD_INIT) {
-        const auto& retParam = mConversion->getEffectReturnParam();
-        mStatusQ = std::make_unique<StatusMQ>(retParam.statusMQ);
-        mInputQ = std::make_unique<DataMQ>(retParam.inputDataMQ);
-        mOutputQ = std::make_unique<DataMQ>(retParam.outputDataMQ);
-        if (!mStatusQ->isValid() || !mInputQ->isValid() || !mOutputQ->isValid()) {
-            ALOGE("%s return with invalid FMQ", __func__);
-            return NO_INIT;
-        }
-    }
-
-    return ret;
+    return mConversion->handleCommand(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
 }
 
 status_t EffectHalAidl::getDescriptor(effect_descriptor_t* pDescriptor) {
diff --git a/media/libaudiohal/impl/EffectHalAidl.h b/media/libaudiohal/impl/EffectHalAidl.h
index 194150d..1b7a3d6 100644
--- a/media/libaudiohal/impl/EffectHalAidl.h
+++ b/media/libaudiohal/impl/EffectHalAidl.h
@@ -23,6 +23,7 @@
 #include <fmq/AidlMessageQueue.h>
 #include <media/audiohal/EffectHalInterface.h>
 #include <system/audio_effect.h>
+#include <system/audio_effects/aidl_effects_utils.h>
 
 #include "EffectConversionHelperAidl.h"
 
@@ -31,11 +32,6 @@
 
 class EffectHalAidl : public EffectHalInterface {
   public:
-    using StatusMQ = ::android::AidlMessageQueue<
-            ::aidl::android::hardware::audio::effect::IEffect::Status,
-            ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
-    using DataMQ = ::android::AidlMessageQueue<
-            float, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
 
     // Set the input buffer.
     status_t setInBuffer(const sp<EffectBufferHalInterface>& buffer) override;
@@ -60,13 +56,8 @@
     // Free resources on the remote side.
     status_t close() override;
 
-    // Whether it's a local implementation.
-    bool isLocal() const override { return false; }
-
     status_t dump(int fd) override;
 
-    uint64_t effectId() const override { return mEffectId; }
-
     const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> getIEffect() const {
         return mEffect;
     }
@@ -79,16 +70,14 @@
 
     const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory> mFactory;
     const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> mEffect;
-    const uint64_t mEffectId;
     const int32_t mSessionId;
     const int32_t mIoId;
     const ::aidl::android::hardware::audio::effect::Descriptor mDesc;
+    const bool mIsProxyEffect;
+
     std::unique_ptr<EffectConversionHelperAidl> mConversion;
-    std::unique_ptr<StatusMQ> mStatusQ;
-    std::unique_ptr<DataMQ> mInputQ, mOutputQ;
 
     sp<EffectBufferHalInterface> mInBuffer, mOutBuffer;
-    effect_config_t mConfig;
 
     status_t createAidlConversion(
             std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
@@ -98,9 +87,11 @@
     EffectHalAidl(
             const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory>& factory,
             const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& effect,
-            uint64_t effectId, int32_t sessionId, int32_t ioId,
-            const ::aidl::android::hardware::audio::effect::Descriptor& desc);
+            int32_t sessionId, int32_t ioId,
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc,
+            bool isProxyEffect);
     bool setEffectReverse(bool reverse);
+    bool needUpdateReturnParam(uint32_t cmdCode);
 
     // The destructor automatically releases the effect.
     virtual ~EffectHalAidl();
diff --git a/media/libaudiohal/impl/EffectHalHidl.h b/media/libaudiohal/impl/EffectHalHidl.h
index 94dcd7e..dda21ed 100644
--- a/media/libaudiohal/impl/EffectHalHidl.h
+++ b/media/libaudiohal/impl/EffectHalHidl.h
@@ -17,7 +17,7 @@
 #ifndef ANDROID_HARDWARE_EFFECT_HAL_HIDL_H
 #define ANDROID_HARDWARE_EFFECT_HAL_HIDL_H
 
-#include PATH(android/hardware/audio/effect/FILE_VERSION/IEffect.h)
+#include PATH(android/hardware/audio/effect/COMMON_TYPES_FILE_VERSION/IEffect.h)
 #include <media/audiohal/EffectHalInterface.h>
 #include <fmq/EventFlag.h>
 #include <fmq/MessageQueue.h>
@@ -31,7 +31,7 @@
 namespace android {
 namespace effect {
 
-using namespace ::android::hardware::audio::effect::CPP_VERSION;
+using namespace ::android::hardware::audio::effect::COMMON_TYPES_CPP_VERSION;
 
 class EffectHalHidl : public EffectHalInterface, public EffectConversionHelperHidl
 {
@@ -59,12 +59,9 @@
     // Free resources on the remote side.
     virtual status_t close();
 
-    // Whether it's a local implementation.
-    virtual bool isLocal() const { return false; }
-
     virtual status_t dump(int fd);
 
-    virtual uint64_t effectId() const { return mEffectId; }
+    uint64_t effectId() const { return mEffectId; }
 
   private:
     friend class EffectsFactoryHalHidl;
diff --git a/media/libaudiohal/impl/EffectProxy.cpp b/media/libaudiohal/impl/EffectProxy.cpp
new file mode 100644
index 0000000..fac03b7
--- /dev/null
+++ b/media/libaudiohal/impl/EffectProxy.cpp
@@ -0,0 +1,301 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <iterator>
+#include <memory>
+#define LOG_TAG "EffectProxy"
+// #define LOG_NDEBUG 0
+
+#include <fmq/AidlMessageQueue.h>
+#include <system/audio_aidl_utils.h>
+#include <utils/Log.h>
+
+#include "EffectProxy.h"
+
+using ::aidl::android::hardware::audio::effect::Capability;
+using ::aidl::android::hardware::audio::effect::CommandId;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::Flags;
+using ::aidl::android::hardware::audio::effect::IEffect;
+using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::State;
+using ::aidl::android::media::audio::common::AudioUuid;
+
+namespace android::effect {
+
+EffectProxy::EffectProxy(const AudioUuid& uuid, const std::vector<Descriptor>& descriptors,
+                         const std::shared_ptr<IFactory>& factory)
+    : mDescriptorCommon(buildDescriptorCommon(uuid, descriptors)),
+      mSubEffects(
+              [](const std::vector<Descriptor>& descs, const std::shared_ptr<IFactory>& factory) {
+                  std::vector<SubEffect> subEffects;
+                  ALOG_ASSERT(factory, "invalid EffectFactory handle");
+                  ndk::ScopedAStatus status = ndk::ScopedAStatus::ok();
+                  for (const auto& desc : descs) {
+                      SubEffect sub({.descriptor = desc});
+                      status = factory->createEffect(desc.common.id.uuid, &sub.handle);
+                      if (!status.isOk() || !sub.handle) {
+                          sub.handle = nullptr;
+                          ALOGW("%s create sub-effect %s failed", __func__,
+                                ::android::audio::utils::toString(desc.common.id.uuid).c_str());
+                      }
+                      subEffects.emplace_back(sub);
+                  }
+                  return subEffects;
+              }(descriptors, factory)),
+      mFactory(factory) {}
+
+EffectProxy::~EffectProxy() {
+    close();
+    destroy();
+    mSubEffects.clear();
+}
+
+ndk::ScopedAStatus EffectProxy::destroy() {
+    ALOGV("%s: %s", __func__,
+          ::android::audio::utils::toString(mDescriptorCommon.id.type).c_str());
+    return runWithAllSubEffects([&](std::shared_ptr<IEffect>& effect) {
+        ndk::ScopedAStatus status = mFactory->destroyEffect(effect);
+        if (status.isOk()) {
+            effect.reset();
+        }
+        return status;
+    });
+}
+
+ndk::ScopedAStatus EffectProxy::setOffloadParam(const effect_offload_param_t* offload) {
+    const auto& itor = std::find_if(mSubEffects.begin(), mSubEffects.end(), [&](const auto& sub) {
+        const auto& desc = sub.descriptor;
+        return offload->isOffload ==
+               (desc.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL);
+    });
+    if (itor == mSubEffects.end()) {
+        ALOGE("%s no %soffload sub-effect found", __func__, offload->isOffload ? "" : "non-");
+        mActiveSubIdx = 0;
+        return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_NULL_POINTER,
+                                                                "noActiveEffctFound");
+    }
+
+    mActiveSubIdx = std::distance(mSubEffects.begin(), itor);
+    ALOGV("%s: active %soffload sub-effect %zu descriptor: %s", __func__,
+          offload->isOffload ? "" : "non-", mActiveSubIdx,
+          ::android::audio::utils::toString(mSubEffects[mActiveSubIdx].descriptor.common.id.uuid)
+                  .c_str());
+    return ndk::ScopedAStatus::ok();
+}
+
+// EffectProxy go over sub-effects and call IEffect interfaces
+ndk::ScopedAStatus EffectProxy::open(const Parameter::Common& common,
+                                     const std::optional<Parameter::Specific>& specific,
+                                     IEffect::OpenEffectReturn* ret __unused) {
+    ndk::ScopedAStatus status = ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+            EX_ILLEGAL_ARGUMENT, "nullEffectHandle");
+    for (auto& sub : mSubEffects) {
+        IEffect::OpenEffectReturn openReturn;
+        if (!sub.handle || !(status = sub.handle->open(common, specific, &openReturn)).isOk()) {
+            ALOGE("%s: failed to open %p UUID %s", __func__, sub.handle.get(),
+                  ::android::audio::utils::toString(sub.descriptor.common.id.uuid).c_str());
+            break;
+        }
+        sub.effectMq.statusQ = std::make_shared<StatusMQ>(openReturn.statusMQ);
+        sub.effectMq.inputQ = std::make_shared<DataMQ>(openReturn.inputDataMQ);
+        sub.effectMq.outputQ = std::make_shared<DataMQ>(openReturn.outputDataMQ);
+    }
+
+    // close all opened effects if failure
+    if (!status.isOk()) {
+        ALOGE("%s: closing all sub-effects with error %s", __func__,
+              status.getDescription().c_str());
+        close();
+    }
+
+    return status;
+}
+
+ndk::ScopedAStatus EffectProxy::close() {
+    return runWithAllSubEffects([&](std::shared_ptr<IEffect>& effect) {
+        return effect->close();
+    });
+}
+
+ndk::ScopedAStatus EffectProxy::getDescriptor(Descriptor* desc) {
+    desc->common = mDescriptorCommon;
+    desc->capability = mSubEffects[mActiveSubIdx].descriptor.capability;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectProxy::buildDescriptor(const AudioUuid& uuid,
+                                                const std::vector<Descriptor>& subEffectDescs,
+                                                Descriptor* desc) {
+    if (!desc) {
+        ALOGE("%s: null descriptor pointer", __func__);
+        return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_NULL_POINTER, "nullptr");
+    }
+
+    if (subEffectDescs.size() < 2) {
+        ALOGE("%s: proxy need at least 2 sub-effects, got %zu", __func__, subEffectDescs.size());
+        return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                "needMoreSubEffects");
+    }
+
+    desc->common = buildDescriptorCommon(uuid, subEffectDescs);
+    return ndk::ScopedAStatus::ok();
+}
+
+Descriptor::Common EffectProxy::buildDescriptorCommon(
+        const AudioUuid& uuid, const std::vector<Descriptor>& subEffectDescs) {
+    Descriptor::Common common;
+
+    for (const auto& desc : subEffectDescs) {
+        if (desc.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL) {
+            common.flags.hwAcceleratorMode = Flags::HardwareAccelerator::TUNNEL;
+        }
+
+        // set indication if any sub-effect indication was set
+        common.flags.offloadIndication |= desc.common.flags.offloadIndication;
+        common.flags.deviceIndication |= desc.common.flags.deviceIndication;
+        common.flags.audioModeIndication |= desc.common.flags.audioModeIndication;
+        common.flags.audioSourceIndication |= desc.common.flags.audioSourceIndication;
+    }
+
+    // initial flag values before we know which sub-effect to active (with setOffloadParam)
+    // same as HIDL EffectProxy flags
+    common.flags.type = Flags::Type::INSERT;
+    common.flags.insert = Flags::Insert::LAST;
+    common.flags.volume = Flags::Volume::CTRL;
+
+    // copy type UUID from any of sub-effects, all sub-effects should have same type
+    common.id.type = subEffectDescs[0].common.id.type;
+    // replace implementation UUID with proxy UUID.
+    common.id.uuid = uuid;
+    common.id.proxy = std::nullopt;
+    common.name = "Proxy";
+    common.implementor = "AOSP";
+    return common;
+}
+
+// Handle with active sub-effect first, only send to other sub-effects when success
+ndk::ScopedAStatus EffectProxy::command(CommandId id) {
+    return runWithActiveSubEffectThenOthers(
+            [&](const std::shared_ptr<IEffect>& effect) -> ndk::ScopedAStatus {
+                return effect->command(id);
+            });
+}
+
+// Return the active sub-effect state
+ndk::ScopedAStatus EffectProxy::getState(State* state) {
+    return runWithActiveSubEffect(
+            [&](const std::shared_ptr<IEffect>& effect) -> ndk::ScopedAStatus {
+                return effect->getState(state);
+            });
+}
+
+// Handle with active sub-effect first, only send to other sub-effects when success
+ndk::ScopedAStatus EffectProxy::setParameter(const Parameter& param) {
+    return runWithActiveSubEffectThenOthers(
+            [&](const std::shared_ptr<IEffect>& effect) -> ndk::ScopedAStatus {
+                return effect->setParameter(param);
+            });
+}
+
+// Return the active sub-effect parameter
+ndk::ScopedAStatus EffectProxy::getParameter(const Parameter::Id& id, Parameter* param) {
+    return runWithActiveSubEffect(
+            [&](const std::shared_ptr<IEffect>& effect) -> ndk::ScopedAStatus {
+                return effect->getParameter(id, param);
+            });
+}
+
+ndk::ScopedAStatus EffectProxy::runWithActiveSubEffectThenOthers(
+        std::function<ndk::ScopedAStatus(const std::shared_ptr<IEffect>&)> const& func) {
+    ndk::ScopedAStatus status = runWithActiveSubEffect(func);
+    if (!status.isOk()) {
+        ALOGE("%s active sub-effect return error %s", __func__, status.getDescription().c_str());
+    }
+
+    // proceed with others
+    for (size_t i = 0; i < mSubEffects.size() && i != mActiveSubIdx; i++) {
+        if (!mSubEffects[i].handle) {
+            ALOGE("%s null sub-effect interface for %s", __func__,
+                  mSubEffects[i].descriptor.common.id.uuid.toString().c_str());
+            continue;
+        }
+        func(mSubEffects[i].handle);
+    }
+    return status;
+}
+
+ndk::ScopedAStatus EffectProxy::runWithActiveSubEffect(
+        std::function<ndk::ScopedAStatus(const std::shared_ptr<IEffect>&)> const& func) {
+    if (!mSubEffects[mActiveSubIdx].handle) {
+        ALOGE("%s null active sub-effect interface, active %s", __func__,
+              mSubEffects[mActiveSubIdx].descriptor.toString().c_str());
+        return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_NULL_POINTER,
+                                                                "activeSubEffectNull");
+    }
+    return func(mSubEffects[mActiveSubIdx].handle);
+}
+
+ndk::ScopedAStatus EffectProxy::runWithAllSubEffects(
+        std::function<ndk::ScopedAStatus(std::shared_ptr<IEffect>&)> const& func) {
+    ndk::ScopedAStatus status = ndk::ScopedAStatus::ok();
+    // proceed with others if active sub-effect success
+    for (auto& sub : mSubEffects) {
+        if (!sub.handle) {
+            ALOGW("%s null sub-effect interface %s", __func__, sub.descriptor.toString().c_str());
+            continue;
+        }
+        ndk::ScopedAStatus temp = func(sub.handle);
+        if (!temp.isOk()) {
+            status = ndk::ScopedAStatus::fromStatus(temp.getStatus());
+        }
+    }
+    return status;
+}
+
+bool EffectProxy::isBypassing() const {
+    return mSubEffects[mActiveSubIdx].descriptor.common.flags.bypass;
+}
+
+binder_status_t EffectProxy::dump(int fd, const char** args, uint32_t numArgs) {
+    const std::string dumpString = toString();
+    write(fd, dumpString.c_str(), dumpString.size());
+
+    return runWithAllSubEffects([&](std::shared_ptr<IEffect>& effect) {
+               return ndk::ScopedAStatus::fromStatus(effect->dump(fd, args, numArgs));
+           })
+            .getStatus();
+}
+
+std::string EffectProxy::toString(size_t level) const {
+    std::string prefixSpace(level, ' ');
+    std::string ss = prefixSpace + "EffectProxy:\n";
+    prefixSpace += " ";
+    base::StringAppendF(&ss, "%sDescriptorCommon: %s\n", prefixSpace.c_str(),
+                        mDescriptorCommon.toString().c_str());
+    base::StringAppendF(&ss, "%sActiveSubIdx: %zu\n", prefixSpace.c_str(), mActiveSubIdx);
+    base::StringAppendF(&ss, "%sAllSubEffects:\n", prefixSpace.c_str());
+    for (size_t i = 0; i < mSubEffects.size(); i++) {
+        base::StringAppendF(&ss, "%s[%zu] - Handle: %p, %s\n", prefixSpace.c_str(), i,
+                            mSubEffects[i].handle.get(),
+                            mSubEffects[i].descriptor.toString().c_str());
+    }
+    return ss;
+}
+
+} // namespace android::effect
diff --git a/media/libaudiohal/impl/EffectProxy.h b/media/libaudiohal/impl/EffectProxy.h
new file mode 100644
index 0000000..18e1567
--- /dev/null
+++ b/media/libaudiohal/impl/EffectProxy.h
@@ -0,0 +1,151 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <map>
+#include <memory>
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+#include <aidl/android/hardware/audio/effect/BnFactory.h>
+#include <fmq/AidlMessageQueue.h>
+#include <system/audio_effect.h>
+
+namespace android {
+namespace effect {
+
+/**
+ * EffectProxy is the proxy for one or more effect AIDL implementations (sub effect) of same type.
+ * The audio framework use EffectProxy as a composite implementation of all sub effect
+ * implementations.
+ *
+ * At any given time, there is only one active effect which consuming and producing data for each
+ * proxy. All setter commands (except the legacy EFFECT_CMD_OFFLOAD, it will be handled by the audio
+ * framework directly) and parameters will be pass through to all sub effects, the getter commands
+ * and parameters will only passthrough to the active sub-effect.
+ *
+ */
+class EffectProxy final : public ::aidl::android::hardware::audio::effect::BnEffect {
+  public:
+    EffectProxy(
+            const ::aidl::android::media::audio::common::AudioUuid& uuid,
+            const std::vector<::aidl::android::hardware::audio::effect::Descriptor>& descriptors,
+            const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory>& factory);
+
+    /**
+     * Handle offload parameter setting from framework.
+     */
+    ndk::ScopedAStatus setOffloadParam(const effect_offload_param_t* offload);
+
+    /**
+     * Destroy all sub-effects via AIDL IFactory.
+     */
+    ndk::ScopedAStatus destroy();
+
+    // IEffect interfaces override
+    ndk::ScopedAStatus open(
+            const ::aidl::android::hardware::audio::effect::Parameter::Common& common,
+            const std::optional<::aidl::android::hardware::audio::effect::Parameter::Specific>&
+                    specific,
+            ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn* ret) override;
+    ndk::ScopedAStatus close() override;
+    ndk::ScopedAStatus getDescriptor(
+            ::aidl::android::hardware::audio::effect::Descriptor* desc) override;
+    ndk::ScopedAStatus command(::aidl::android::hardware::audio::effect::CommandId id) override;
+    ndk::ScopedAStatus getState(::aidl::android::hardware::audio::effect::State* state) override;
+    ndk::ScopedAStatus setParameter(
+            const ::aidl::android::hardware::audio::effect::Parameter& param) override;
+    ndk::ScopedAStatus getParameter(
+            const ::aidl::android::hardware::audio::effect::Parameter::Id& id,
+            ::aidl::android::hardware::audio::effect::Parameter* param) override;
+
+    static ndk::ScopedAStatus buildDescriptor(
+            const ::aidl::android::media::audio::common::AudioUuid& uuid,
+            const std::vector<::aidl::android::hardware::audio::effect::Descriptor>& subEffectDescs,
+            ::aidl::android::hardware::audio::effect::Descriptor* desc);
+
+    /**
+     * Get the const reference of the active sub-effect return parameters.
+     * Always use this interface to get the effect open return parameters (FMQs) after a success
+     * setOffloadParam() call.
+     */
+    using StatusMQ = ::android::AidlMessageQueue<
+            ::aidl::android::hardware::audio::effect::IEffect::Status,
+            ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
+    using DataMQ = ::android::AidlMessageQueue<
+            float, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
+    const std::shared_ptr<StatusMQ>& getStatusMQ() const {
+        return mSubEffects[mActiveSubIdx].effectMq.statusQ;
+    }
+    const std::shared_ptr<DataMQ>& getInputMQ() const {
+        return mSubEffects[mActiveSubIdx].effectMq.inputQ;
+    }
+    const std::shared_ptr<DataMQ>& getOutputMQ() const {
+        return mSubEffects[mActiveSubIdx].effectMq.outputQ;
+    }
+
+    bool isBypassing() const;
+
+    // call dump for all sub-effects
+    binder_status_t dump(int fd, const char** args, uint32_t numArgs) override;
+
+    std::string toString(size_t indent = 0) const;
+
+  private:
+    // Proxy descriptor common part, copy from one sub-effect, and update the implementation UUID to
+    // proxy UUID, proxy descriptor capability part comes from the active sub-effect capability
+    const ::aidl::android::hardware::audio::effect::Descriptor::Common mDescriptorCommon;
+
+    struct EffectMQ {
+        std::shared_ptr<StatusMQ> statusQ;
+        std::shared_ptr<DataMQ> inputQ, outputQ;
+    };
+    struct SubEffect {
+        const ::aidl::android::hardware::audio::effect::Descriptor descriptor;
+        std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> handle;
+        EffectMQ effectMq;
+    };
+    std::vector<SubEffect> mSubEffects;
+
+    const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory> mFactory;
+
+    // index of the active sub-effects, by default use the first one (index 0)
+    // It's safe to assume there will always at least two SubEffects in mSubEffects
+    size_t mActiveSubIdx = 0;
+
+    ndk::ScopedAStatus runWithActiveSubEffectThenOthers(
+            std::function<ndk::ScopedAStatus(
+                    const std::shared_ptr<
+                            ::aidl::android::hardware::audio::effect::IEffect>&)> const& func);
+
+    ndk::ScopedAStatus runWithActiveSubEffect(
+            std::function<ndk::ScopedAStatus(const std::shared_ptr<IEffect>&)> const& func);
+
+    ndk::ScopedAStatus runWithAllSubEffects(
+            std::function<ndk::ScopedAStatus(std::shared_ptr<IEffect>&)> const& func);
+
+    // build Descriptor.Common with all sub-effect descriptors
+    static ::aidl::android::hardware::audio::effect::Descriptor::Common buildDescriptorCommon(
+            const ::aidl::android::media::audio::common::AudioUuid& uuid,
+            const std::vector<::aidl::android::hardware::audio::effect::Descriptor>&
+                    subEffectDescs);
+
+    // close and release all sub-effects
+    ~EffectProxy();
+};
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index f289f24..57395fa 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -15,39 +15,103 @@
  */
 
 #include <algorithm>
+#include <cstddef>
 #include <cstdint>
+#include <iterator>
 #include <memory>
 #define LOG_TAG "EffectsFactoryHalAidl"
 //#define LOG_NDEBUG 0
 
 #include <error/expected_utils.h>
+#include <aidl/android/media/audio/common/AudioStreamType.h>
 #include <android/binder_manager.h>
 #include <media/AidlConversionCppNdk.h>
 #include <media/AidlConversionEffect.h>
 #include <system/audio.h>
+#include <system/audio_aidl_utils.h>
 #include <utils/Log.h>
 
 #include "EffectBufferHalAidl.h"
 #include "EffectHalAidl.h"
+#include "EffectProxy.h"
 #include "EffectsFactoryHalAidl.h"
 
 using ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid;
-using aidl::android::aidl_utils::statusTFromBinderStatus;
-using aidl::android::hardware::audio::effect::IFactory;
-using aidl::android::media::audio::common::AudioUuid;
-using android::detail::AudioHalVersionInfo;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::Processing;
+using ::aidl::android::media::audio::common::AudioSource;
+using ::aidl::android::media::audio::common::AudioStreamType;
+using ::aidl::android::media::audio::common::AudioUuid;
+using ::android::audio::utils::toString;
+using ::android::base::unexpected;
+using ::android::detail::AudioHalVersionInfo;
 
 namespace android {
 namespace effect {
 
 EffectsFactoryHalAidl::EffectsFactoryHalAidl(std::shared_ptr<IFactory> effectsFactory)
     : mFactory(effectsFactory),
-      mHalVersion(AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, [this]() {
-          int32_t majorVersion = 0;
-          return (mFactory && mFactory->getInterfaceVersion(&majorVersion).isOk()) ? majorVersion
-                                                                                   : 0;
-      }())) {
-    ALOG_ASSERT(effectsFactory != nullptr, "Provided IEffectsFactory service is NULL");
+      mHalVersion(AudioHalVersionInfo(
+              AudioHalVersionInfo::Type::AIDL,
+              [this]() {
+                  int32_t majorVersion = 0;
+                  return (mFactory && mFactory->getInterfaceVersion(&majorVersion).isOk())
+                                 ? majorVersion
+                                 : 0;
+              }())),
+      mHalDescList([this]() {
+          std::vector<Descriptor> list;
+          if (mFactory) {
+              mFactory->queryEffects(std::nullopt, std::nullopt, std::nullopt, &list).isOk();
+          }
+          return list;
+      }()),
+      mProxyUuidDescriptorMap([this]() {
+          std::map<AudioUuid, std::vector<Descriptor>> proxyUuidMap;
+          for (const auto& desc : mHalDescList) {
+              if (desc.common.id.proxy.has_value()) {
+                  const auto& uuid = desc.common.id.proxy.value();
+                  if (proxyUuidMap.count(uuid) == 0) {
+                      proxyUuidMap.insert({uuid, {desc}});
+                  } else {
+                      proxyUuidMap[uuid].emplace_back(desc);
+                  }
+              }
+          }
+          return proxyUuidMap;
+      }()),
+      mProxyDescList([this]() {
+          std::vector<Descriptor> list;
+          for (const auto& proxy : mProxyUuidDescriptorMap) {
+              if (Descriptor desc;
+                  EffectProxy::buildDescriptor(proxy.first /* uuid */,
+                                               proxy.second /* sub-effect descriptor list */,
+                                               &desc /* proxy descriptor */)
+                          .isOk()) {
+                  list.emplace_back(std::move(desc));
+              }
+          }
+          return list;
+      }()),
+      mNonProxyDescList([this]() {
+          std::vector<Descriptor> list;
+          std::copy_if(mHalDescList.begin(), mHalDescList.end(), std::back_inserter(list),
+                       [](const Descriptor& desc) { return !desc.common.id.proxy.has_value(); });
+          return list;
+      }()),
+      mEffectCount(mNonProxyDescList.size() + mProxyDescList.size()),
+      mAidlProcessings([this]() -> std::vector<Processing> {
+          std::vector<Processing> processings;
+          if (!mFactory || !mFactory->queryProcessing(std::nullopt, &processings).isOk()) {
+              ALOGE("%s queryProcessing failed", __func__);
+          }
+          return processings;
+      }()) {
+    ALOG_ASSERT(mFactory != nullptr, "Provided IEffectsFactory service is NULL");
+    ALOGI("%s with %zu nonProxyEffects and %zu proxyEffects", __func__, mNonProxyDescList.size(),
+          mProxyDescList.size());
 }
 
 status_t EffectsFactoryHalAidl::queryNumberEffects(uint32_t *pNumEffects) {
@@ -55,12 +119,9 @@
         return BAD_VALUE;
     }
 
-    {
-        std::lock_guard lg(mLock);
-        RETURN_STATUS_IF_ERROR(queryEffectList_l());
-        *pNumEffects = mDescList->size();
-    }
-    ALOGI("%s %d", __func__, *pNumEffects);
+    *pNumEffects = mEffectCount;
+    ALOGD("%s %u non %zu proxyMap %zu proxyDesc %zu", __func__, *pNumEffects,
+          mNonProxyDescList.size(), mProxyUuidDescriptorMap.size(), mProxyDescList.size());
     return OK;
 }
 
@@ -69,42 +130,43 @@
         return BAD_VALUE;
     }
 
-    std::lock_guard lg(mLock);
-    RETURN_STATUS_IF_ERROR(queryEffectList_l());
-
-    auto listSize = mDescList->size();
-    if (index >= listSize) {
-        ALOGE("%s index %d exceed size DescList %zd", __func__, index, listSize);
+    if (index >= mEffectCount) {
+        ALOGE("%s index %d exceed max number %zu", __func__, index, mEffectCount);
         return INVALID_OPERATION;
     }
 
-    *pDescriptor = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::aidl2legacy_Descriptor_effect_descriptor(mDescList->at(index)));
+    if (index >= mNonProxyDescList.size()) {
+        *pDescriptor =
+                VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_Descriptor_effect_descriptor(
+                        mProxyDescList.at(index - mNonProxyDescList.size())));
+    } else {
+        *pDescriptor =
+                VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_Descriptor_effect_descriptor(
+                        mNonProxyDescList.at(index)));
+    }
     return OK;
 }
 
 status_t EffectsFactoryHalAidl::getDescriptor(const effect_uuid_t* halUuid,
                                               effect_descriptor_t* pDescriptor) {
-    if (halUuid == nullptr || pDescriptor == nullptr) {
+    if (halUuid == nullptr) {
         return BAD_VALUE;
     }
 
-    AudioUuid uuid = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*halUuid));
-    std::lock_guard lg(mLock);
-    return getHalDescriptorWithImplUuid_l(uuid, pDescriptor);
+    AudioUuid uuid =
+            VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*halUuid));
+    return getHalDescriptorWithImplUuid(uuid, pDescriptor);
 }
 
 status_t EffectsFactoryHalAidl::getDescriptors(const effect_uuid_t* halType,
                                                std::vector<effect_descriptor_t>* descriptors) {
-    if (halType == nullptr || descriptors == nullptr) {
+    if (halType == nullptr) {
         return BAD_VALUE;
     }
 
-    AudioUuid type = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*halType));
-    std::lock_guard lg(mLock);
-    return getHalDescriptorWithTypeUuid_l(type, descriptors);
+    AudioUuid type =
+            VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*halType));
+    return getHalDescriptorWithTypeUuid(type, descriptors);
 }
 
 status_t EffectsFactoryHalAidl::createEffect(const effect_uuid_t* uuid, int32_t sessionId,
@@ -116,33 +178,43 @@
     if (sessionId == AUDIO_SESSION_DEVICE && ioId == AUDIO_IO_HANDLE_NONE) {
         return INVALID_OPERATION;
     }
-
     ALOGI("%s session %d ioId %d", __func__, sessionId, ioId);
 
-    AudioUuid aidlUuid = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
+    AudioUuid aidlUuid =
+            VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
     std::shared_ptr<IEffect> aidlEffect;
-    Descriptor desc;
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mFactory->createEffect(aidlUuid, &aidlEffect)));
+    // Use EffectProxy interface instead of IFactory to create
+    const bool isProxy = isProxyEffect(aidlUuid);
+    if (isProxy) {
+        aidlEffect = ndk::SharedRefBase::make<EffectProxy>(
+                aidlUuid, mProxyUuidDescriptorMap.at(aidlUuid) /* sub-effect descriptor list */,
+                mFactory);
+        mProxyList.emplace_back(std::static_pointer_cast<EffectProxy>(aidlEffect));
+    } else {
+        RETURN_STATUS_IF_ERROR(
+                statusTFromBinderStatus(mFactory->createEffect(aidlUuid, &aidlEffect)));
+    }
     if (aidlEffect == nullptr) {
-        ALOGE("%s IFactory::createFactory failed UUID %s", __func__, aidlUuid.toString().c_str());
+        ALOGE("%s failed to create effect with UUID: %s", __func__, toString(aidlUuid).c_str());
         return NAME_NOT_FOUND;
     }
+    Descriptor desc;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(aidlEffect->getDescriptor(&desc)));
 
-    uint64_t effectId;
-    {
-        std::lock_guard lg(mLock);
-        effectId = ++mEffectIdCounter;
-    }
-
-    *effect = sp<EffectHalAidl>::make(mFactory, aidlEffect, effectId, sessionId, ioId, desc);
+    *effect = sp<EffectHalAidl>::make(mFactory, aidlEffect, sessionId, ioId, desc, isProxy);
     return OK;
 }
 
 status_t EffectsFactoryHalAidl::dumpEffects(int fd) {
-    // TODO: add proxy dump here because AIDL service EffectFactory doesn't have proxy handle
-    return mFactory->dump(fd, nullptr, 0);
+    status_t ret = OK;
+    // record the error ret and continue dump as many effects as possible
+    for (const auto& proxy : mProxyList) {
+        if (status_t temp = BAD_VALUE; proxy && (temp = proxy->dump(fd, nullptr, 0)) != OK) {
+            ret = temp;
+        }
+    }
+    RETURN_STATUS_IF_ERROR(mFactory->dump(fd, nullptr, 0));
+    return ret;
 }
 
 status_t EffectsFactoryHalAidl::allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) {
@@ -160,56 +232,42 @@
     return mHalVersion;
 }
 
-status_t EffectsFactoryHalAidl::queryEffectList_l() {
-    if (!mDescList) {
-        std::vector<Descriptor> list;
-        auto status = mFactory->queryEffects(std::nullopt, std::nullopt, std::nullopt, &list);
-        if (!status.isOk()) {
-            ALOGE("%s IFactory::queryEffects failed %s", __func__, status.getDescription().c_str());
-            return status.getStatus();
-        }
-
-        mDescList = std::make_unique<std::vector<Descriptor>>(list);
-    }
-    return OK;
-}
-
-status_t EffectsFactoryHalAidl::getHalDescriptorWithImplUuid_l(const AudioUuid& uuid,
-                                                               effect_descriptor_t* pDescriptor) {
+status_t EffectsFactoryHalAidl::getHalDescriptorWithImplUuid(const AudioUuid& uuid,
+                                                             effect_descriptor_t* pDescriptor) {
     if (pDescriptor == nullptr) {
         return BAD_VALUE;
     }
-    if (!mDescList) {
-        RETURN_STATUS_IF_ERROR(queryEffectList_l());
-    }
 
-    auto matchIt = std::find_if(mDescList->begin(), mDescList->end(),
-                                 [&](const auto& desc) { return desc.common.id.uuid == uuid; });
-    if (matchIt == mDescList->end()) {
-        ALOGE("%s UUID %s not found", __func__, uuid.toString().c_str());
+    const auto& list = isProxyEffect(uuid) ? mProxyDescList : mNonProxyDescList;
+    auto matchIt = std::find_if(list.begin(), list.end(),
+                                [&](const auto& desc) { return desc.common.id.uuid == uuid; });
+    if (matchIt == list.end()) {
+        ALOGE("%s UUID not found in HAL and proxy list %s", __func__, toString(uuid).c_str());
         return BAD_VALUE;
     }
+    ALOGI("%s UUID impl found %s", __func__, toString(uuid).c_str());
 
     *pDescriptor = VALUE_OR_RETURN_STATUS(
             ::aidl::android::aidl2legacy_Descriptor_effect_descriptor(*matchIt));
     return OK;
 }
 
-status_t EffectsFactoryHalAidl::getHalDescriptorWithTypeUuid_l(
+status_t EffectsFactoryHalAidl::getHalDescriptorWithTypeUuid(
         const AudioUuid& type, std::vector<effect_descriptor_t>* descriptors) {
     if (descriptors == nullptr) {
         return BAD_VALUE;
     }
-    if (!mDescList) {
-        RETURN_STATUS_IF_ERROR(queryEffectList_l());
-    }
+
     std::vector<Descriptor> result;
-    std::copy_if(mDescList->begin(), mDescList->end(), std::back_inserter(result),
+    std::copy_if(mNonProxyDescList.begin(), mNonProxyDescList.end(), std::back_inserter(result),
                  [&](auto& desc) { return desc.common.id.type == type; });
-    if (result.size() == 0) {
-        ALOGE("%s type UUID %s not found", __func__, type.toString().c_str());
+    std::copy_if(mProxyDescList.begin(), mProxyDescList.end(), std::back_inserter(result),
+                 [&](auto& desc) { return desc.common.id.type == type; });
+    if (result.empty()) {
+        ALOGW("%s UUID type not found in HAL and proxy list %s", __func__, toString(type).c_str());
         return BAD_VALUE;
     }
+    ALOGI("%s UUID type found %zu \n %s", __func__, result.size(), toString(type).c_str());
 
     *descriptors = VALUE_OR_RETURN_STATUS(
             aidl::android::convertContainer<std::vector<effect_descriptor_t>>(
@@ -217,6 +275,87 @@
     return OK;
 }
 
+bool EffectsFactoryHalAidl::isProxyEffect(const AudioUuid& uuid) const {
+    return 0 != mProxyUuidDescriptorMap.count(uuid);
+}
+
+std::shared_ptr<const effectsConfig::Processings> EffectsFactoryHalAidl::getProcessings() const {
+
+    auto getConfigEffectWithDescriptor =
+            [](const auto& desc) -> std::shared_ptr<const effectsConfig::Effect> {
+        effectsConfig::Effect effect = {.name = desc.common.name, .isProxy = false};
+        if (const auto uuid =
+                    ::aidl::android::aidl2legacy_AudioUuid_audio_uuid_t(desc.common.id.uuid);
+            uuid.ok()) {
+            static_cast<effectsConfig::EffectImpl>(effect).uuid = uuid.value();
+            return std::make_shared<const effectsConfig::Effect>(effect);
+        } else {
+            return nullptr;
+        }
+    };
+
+    auto getConfigProcessingWithAidlProcessing =
+            [&](const auto& aidlProcess, std::vector<effectsConfig::InputStream>& preprocess,
+                std::vector<effectsConfig::OutputStream>& postprocess) {
+                if (aidlProcess.type.getTag() == Processing::Type::streamType) {
+                    AudioStreamType aidlType =
+                            aidlProcess.type.template get<Processing::Type::streamType>();
+                    const auto type =
+                            ::aidl::android::aidl2legacy_AudioStreamType_audio_stream_type_t(
+                                    aidlType);
+                    if (!type.ok()) {
+                        return;
+                    }
+
+                    std::vector<std::shared_ptr<const effectsConfig::Effect>> effects;
+                    std::transform(aidlProcess.ids.begin(), aidlProcess.ids.end(),
+                                   std::back_inserter(effects), getConfigEffectWithDescriptor);
+                    effectsConfig::OutputStream stream = {.type = type.value(),
+                                                          .effects = std::move(effects)};
+                    postprocess.emplace_back(stream);
+                } else if (aidlProcess.type.getTag() == Processing::Type::source) {
+                    AudioSource aidlType =
+                            aidlProcess.type.template get<Processing::Type::source>();
+                    const auto type =
+                            ::aidl::android::aidl2legacy_AudioSource_audio_source_t(aidlType);
+                    if (!type.ok()) {
+                        return;
+                    }
+
+                    std::vector<std::shared_ptr<const effectsConfig::Effect>> effects;
+                    std::transform(aidlProcess.ids.begin(), aidlProcess.ids.end(),
+                                   std::back_inserter(effects), getConfigEffectWithDescriptor);
+                    effectsConfig::InputStream stream = {.type = type.value(),
+                                                         .effects = std::move(effects)};
+                    preprocess.emplace_back(stream);
+                }
+            };
+
+    static std::shared_ptr<const effectsConfig::Processings> processings(
+            [&]() -> std::shared_ptr<const effectsConfig::Processings> {
+                std::vector<effectsConfig::InputStream> preprocess;
+                std::vector<effectsConfig::OutputStream> postprocess;
+                for (const auto& processing : mAidlProcessings) {
+                    getConfigProcessingWithAidlProcessing(processing, preprocess, postprocess);
+                }
+
+                if (0 == preprocess.size() && 0 == postprocess.size()) {
+                    return nullptr;
+                }
+
+                return std::make_shared<const effectsConfig::Processings>(
+                        effectsConfig::Processings({.preprocess = std::move(preprocess),
+                                                    .postprocess = std::move(postprocess)}));
+            }());
+
+    return processings;
+}
+
+// Return 0 for AIDL, as the AIDL interface is not aware of the configuration file.
+::android::error::Result<size_t> EffectsFactoryHalAidl::getSkippedElements() const {
+    return 0;
+}
+
 } // namespace effect
 
 // When a shared library is built from a static library, even explicit
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.h b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
index 9c3643b..73089b0 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
@@ -17,14 +17,17 @@
 #pragma once
 
 #include <cstddef>
+#include <list>
 #include <memory>
-#include <mutex>
 
 #include <aidl/android/hardware/audio/effect/IFactory.h>
+#include <aidl/android/hardware/audio/effect/Processing.h>
 #include <android-base/thread_annotations.h>
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <system/thread_defs.h>
 
+#include "EffectProxy.h"
+
 namespace android {
 namespace effect {
 
@@ -60,24 +63,40 @@
 
     detail::AudioHalVersionInfo getHalVersion() const override;
 
-    // for TIME_CHECK
-    const std::string getClassName() const { return "EffectHalAidl"; }
+    std::shared_ptr<const effectsConfig::Processings> getProcessings() const override;
+
+    ::android::error::Result<size_t> getSkippedElements() const override;
 
   private:
-    std::mutex mLock;
     const std::shared_ptr<IFactory> mFactory;
-    uint64_t mEffectIdCounter GUARDED_BY(mLock) = 0; // Align with HIDL (0 is INVALID_ID)
-    std::unique_ptr<std::vector<Descriptor>> mDescList GUARDED_BY(mLock) = nullptr;
     const detail::AudioHalVersionInfo mHalVersion;
+    // Full list of HAL effect descriptors
+    const std::vector<Descriptor> mHalDescList;
+    // Map of proxy UUID (key) to the Descriptor of sub-effects
+    const std::map<::aidl::android::media::audio::common::AudioUuid, std::vector<Descriptor>>
+            mProxyUuidDescriptorMap;
+    // List of effect proxy, initialize after mUuidProxyMap because it need to have all sub-effects
+    const std::vector<Descriptor> mProxyDescList;
+    // List of non-proxy effects
+    const std::vector<Descriptor> mNonProxyDescList;
+    // total number of effects including proxy effects
+    const size_t mEffectCount;
+    // Query result of pre and post processing from effect factory
+    const std::vector<Processing> mAidlProcessings;
+
+    // list of the EffectProxy instances
+    std::list<std::shared_ptr<EffectProxy>> mProxyList;
 
     virtual ~EffectsFactoryHalAidl() = default;
-    status_t queryEffectList_l() REQUIRES(mLock);
-    status_t getHalDescriptorWithImplUuid_l(
-            const aidl::android::media::audio::common::AudioUuid& uuid,
-            effect_descriptor_t* pDescriptor) REQUIRES(mLock);
-    status_t getHalDescriptorWithTypeUuid_l(
-            const aidl::android::media::audio::common::AudioUuid& type,
-            std::vector<effect_descriptor_t>* descriptors) REQUIRES(mLock);
+    status_t getHalDescriptorWithImplUuid(
+            const ::aidl::android::media::audio::common::AudioUuid& uuid,
+            effect_descriptor_t* pDescriptor);
+
+    status_t getHalDescriptorWithTypeUuid(
+            const ::aidl::android::media::audio::common::AudioUuid& type,
+            std::vector<effect_descriptor_t>* descriptors);
+
+    bool isProxyEffect(const aidl::android::media::audio::common::AudioUuid& uuid) const;
 };
 
 } // namespace effect
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
index 172ebdf..210c4b5 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
@@ -33,10 +33,11 @@
 
 #include "android/media/AudioHalVersion.h"
 
+using ::android::base::unexpected;
 using ::android::detail::AudioHalVersionInfo;
+using ::android::hardware::Return;
 using ::android::hardware::audio::common::CPP_VERSION::implementation::UuidUtils;
 using ::android::hardware::audio::effect::CPP_VERSION::implementation::EffectUtils;
-using ::android::hardware::Return;
 
 namespace android {
 namespace effect {
@@ -78,9 +79,11 @@
 }
 
 EffectsFactoryHalHidl::EffectsFactoryHalHidl(sp<IEffectsFactory> effectsFactory)
-        : EffectConversionHelperHidl("EffectsFactory"), mCache(new EffectDescriptorCache) {
-    ALOG_ASSERT(effectsFactory != nullptr, "Provided IEffectsFactory service is NULL");
-    mEffectsFactory = std::move(effectsFactory);
+    : EffectConversionHelperHidl("EffectsFactory"),
+      mEffectsFactory(std::move(effectsFactory)),
+      mCache(new EffectDescriptorCache),
+      mParsingResult(effectsConfig::parse()) {
+    ALOG_ASSERT(mEffectsFactory != nullptr, "Provided IEffectsFactory service is NULL");
 }
 
 status_t EffectsFactoryHalHidl::queryNumberEffects(uint32_t *pNumEffects) {
@@ -228,6 +231,17 @@
     return AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, MAJOR_VERSION, MINOR_VERSION);
 }
 
+std::shared_ptr<const effectsConfig::Processings> EffectsFactoryHalHidl::getProcessings() const {
+    return mParsingResult.parsedConfig;
+}
+
+::android::error::Result<size_t> EffectsFactoryHalHidl::getSkippedElements() const {
+    if (!mParsingResult.parsedConfig) {
+        return ::android::base::unexpected(BAD_VALUE);
+    }
+    return mParsingResult.nbSkippedElement;
+}
+
 } // namespace effect
 
 // When a shared library is built from a static library, even explicit
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.h b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
index 9875154..4110ba3 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
@@ -17,6 +17,7 @@
 #pragma once
 
 #include <memory>
+#include <vector>
 
 #include PATH(android/hardware/audio/effect/FILE_VERSION/IEffectsFactory.h)
 #include <media/audiohal/EffectsFactoryHalInterface.h>
@@ -62,9 +63,19 @@
 
     android::detail::AudioHalVersionInfo getHalVersion() const override;
 
+    std::shared_ptr<const effectsConfig::Processings> getProcessings() const override;
+
+    ::android::error::Result<size_t> getSkippedElements() const override;
+
   private:
-    sp<IEffectsFactory> mEffectsFactory;
-    std::unique_ptr<EffectDescriptorCache> mCache;
+    const sp<IEffectsFactory> mEffectsFactory;
+    const std::unique_ptr<EffectDescriptorCache> mCache;
+    /**
+     * Configuration file parser result, used by getProcessings() and getConfigParseResult().
+     * This struct holds the result of parsing a configuration file. The result includes the parsed
+     * configuration data, as well as any errors that occurred during parsing.
+     */
+    const effectsConfig::ParsingResult mParsingResult;
 };
 
 } // namespace effect
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index cbc1578..80e19a0 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -22,6 +22,7 @@
 
 #include <audio_utils/clock.h>
 #include <media/AidlConversion.h>
+#include <media/AidlConversionCore.h>
 #include <media/AidlConversionCppNdk.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionUtil.h>
@@ -31,6 +32,7 @@
 #include <utils/Log.h>
 
 #include "DeviceHalAidl.h"
+#include "EffectHalAidl.h"
 #include "StreamHalAidl.h"
 
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
@@ -40,7 +42,9 @@
 using ::aidl::android::hardware::audio::core::IStreamIn;
 using ::aidl::android::hardware::audio::core::IStreamOut;
 using ::aidl::android::hardware::audio::core::StreamDescriptor;
+using ::aidl::android::hardware::audio::core::MmapBufferDescriptor;
 using ::aidl::android::media::audio::common::MicrophoneDynamicInfo;
+using ::aidl::android::media::audio::IHalAdapterVendorExtension;
 
 namespace android {
 
@@ -71,12 +75,14 @@
 StreamHalAidl::StreamHalAidl(
         std::string_view className, bool isInput, const audio_config& config,
         int32_t nominalLatency, StreamContextAidl&& context,
-        const std::shared_ptr<IStreamCommon>& stream)
+        const std::shared_ptr<IStreamCommon>& stream,
+        const std::shared_ptr<IHalAdapterVendorExtension>& vext)
         : ConversionHelperAidl(className),
           mIsInput(isInput),
           mConfig(configToBase(config)),
           mContext(std::move(context)),
-          mStream(stream) {
+          mStream(stream),
+          mVendorExt(vext) {
     {
         std::lock_guard l(mLock);
         mLastReply.latencyMs = nominalLatency;
@@ -120,34 +126,9 @@
     return OK;
 }
 
-namespace {
-
-// 'action' must accept a value of type 'T' and return 'status_t'.
-// The function returns 'true' if the parameter was found, and the action has succeeded.
-// The function returns 'false' if the parameter was not found.
-// Any errors get propagated, if there are errors it means the parameter was found.
-template<typename T, typename F>
-error::Result<bool> filterOutAndProcessParameter(
-        AudioParameter& parameters, const String8& key, const F& action) {
-    if (parameters.containsKey(key)) {
-        T value;
-        status_t status = parameters.get(key, value);
-        if (status == OK) {
-            parameters.remove(key);
-            status = action(value);
-            if (status == OK) return true;
-        }
-        return base::unexpected(status);
-    }
-    return false;
-}
-
-}  // namespace
-
 status_t StreamHalAidl::setParameters(const String8& kvPairs) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-
     AudioParameter parameters(kvPairs);
     ALOGD("%s: parameters: %s", __func__, parameters.toString().c_str());
 
@@ -156,19 +137,18 @@
             [&](int hwAvSyncId) {
                 return statusTFromBinderStatus(mStream->updateHwAvSyncId(hwAvSyncId));
             }));
-
-    ALOGW_IF(parameters.size() != 0, "%s: unknown parameters, ignored: %s",
-            __func__, parameters.toString().c_str());
-    return OK;
+    return parseAndSetVendorParameters(mVendorExt, mStream, parameters);
 }
 
 status_t StreamHalAidl::getParameters(const String8& keys __unused, String8 *values) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    values->clear();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    if (values == nullptr) {
+        return BAD_VALUE;
+    }
+    AudioParameter parameterKeys(keys), result;
+    *values = result.toString();
+    return parseAndGetVendorParameters(mVendorExt, mStream, parameterKeys, values);
 }
 
 status_t StreamHalAidl::getFrameSize(size_t *size) {
@@ -183,20 +163,26 @@
     return OK;
 }
 
-status_t StreamHalAidl::addEffect(sp<EffectHalInterface> effect __unused) {
+status_t StreamHalAidl::addEffect(sp<EffectHalInterface> effect) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    if (effect == nullptr) {
+        return BAD_VALUE;
+    }
+    auto aidlEffect = sp<effect::EffectHalAidl>::cast(effect);
+    return statusTFromBinderStatus(mStream->addEffect(aidlEffect->getIEffect()));
 }
 
-status_t StreamHalAidl::removeEffect(sp<EffectHalInterface> effect __unused) {
+status_t StreamHalAidl::removeEffect(sp<EffectHalInterface> effect) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    if (effect == nullptr) {
+        return BAD_VALUE;
+    }
+    auto aidlEffect = sp<effect::EffectHalAidl>::cast(effect);
+    return statusTFromBinderStatus(mStream->removeEffect(aidlEffect->getIEffect()));
 }
 
 status_t StreamHalAidl::standby() {
@@ -255,16 +241,23 @@
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    const auto state = getState();
+    StreamDescriptor::Reply reply;
+    if (state == StreamDescriptor::State::STANDBY) {
+        if (status_t status = sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true);
+                status != OK) {
+            return status;
+        }
+        return sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true);
+    }
+
+    return INVALID_OPERATION;
 }
 
 status_t StreamHalAidl::stop() {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
-    TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    return standby();
 }
 
 status_t StreamHalAidl::getLatency(uint32_t *latency) {
@@ -290,6 +283,20 @@
     return OK;
 }
 
+status_t StreamHalAidl::getHardwarePosition(int64_t *frames, int64_t *timestamp) {
+    ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+    if (!mStream) return NO_INIT;
+    StreamDescriptor::Reply reply;
+    // TODO: switch to updateCountersIfNeeded once we sort out mWorkerTid initialization
+    if (status_t status = sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), &reply, true);
+            status != OK) {
+        return status;
+    }
+    *frames = reply.hardware.frames;
+    *timestamp = reply.hardware.timeNs;
+    return OK;
+}
+
 status_t StreamHalAidl::getXruns(int32_t *frames) {
     ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
     if (!mStream) return NO_INIT;
@@ -414,24 +421,39 @@
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    return statusTFromBinderStatus(mStream->prepareToClose());
 }
 
 status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
-                                  struct audio_mmap_buffer_info *info __unused) {
+                                         struct audio_mmap_buffer_info *info) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
+    if (!mContext.isMmapped()) {
+        return BAD_VALUE;
+    }
+    const MmapBufferDescriptor& bufferDescriptor = mContext.getMmapBufferDescriptor();
+    info->shared_memory_fd = bufferDescriptor.sharedMemory.fd.get();
+    info->buffer_size_frames = mContext.getBufferSizeFrames();
+    info->burst_size_frames = bufferDescriptor.burstSizeFrames;
+    info->flags = static_cast<audio_mmap_buffer_flag>(bufferDescriptor.flags);
+
     return OK;
 }
 
-status_t StreamHalAidl::getMmapPosition(struct audio_mmap_position *position __unused) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+status_t StreamHalAidl::getMmapPosition(struct audio_mmap_position *position) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
+    if (!mContext.isMmapped()) {
+        return BAD_VALUE;
+    }
+    int64_t aidlPosition = 0, aidlTimestamp = 0;
+    if (status_t status = getHardwarePosition(&aidlPosition, &aidlTimestamp); status != OK) {
+        return status;
+    }
+
+    position->time_nanoseconds = aidlTimestamp;
+    position->position_frames = static_cast<int32_t>(aidlPosition);
     return OK;
 }
 
@@ -440,19 +462,6 @@
     return OK;
 }
 
-status_t StreamHalAidl::getHalPid(pid_t *pid __unused) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
-    TIME_CHECK();
-    if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
-}
-
-bool StreamHalAidl::requestHalThreadPriority(pid_t threadPid __unused, pid_t threadId __unused) {
-    // Obsolete, must be done by the HAL module.
-    return true;
-}
-
 status_t StreamHalAidl::legacyCreateAudioPatch(const struct audio_port_config& port __unused,
                                                std::optional<audio_source_t> source __unused,
                                                audio_devices_t type __unused) {
@@ -533,9 +542,11 @@
 
 StreamOutHalAidl::StreamOutHalAidl(
         const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
-        const std::shared_ptr<IStreamOut>& stream, const sp<CallbackBroker>& callbackBroker)
+        const std::shared_ptr<IStreamOut>& stream,
+        const std::shared_ptr<IHalAdapterVendorExtension>& vext,
+        const sp<CallbackBroker>& callbackBroker)
         : StreamHalAidl("StreamOutHalAidl", false /*isInput*/, config, nominalLatency,
-                std::move(context), getStreamCommon(stream)),
+                std::move(context), getStreamCommon(stream), vext),
           mStream(stream), mCallbackBroker(callbackBroker) {
     // Initialize the offload metadata
     mOffloadMetadata.sampleRate = static_cast<int32_t>(config.sample_rate);
@@ -555,10 +566,10 @@
     if (!mStream) return NO_INIT;
 
     AudioParameter parameters(kvPairs);
-    ALOGD("%s parameters: %s", __func__, parameters.toString().c_str());
+    ALOGD("%s: parameters: \"%s\"", __func__, parameters.toString().c_str());
 
     if (status_t status = filterAndUpdateOffloadMetadata(parameters); status != OK) {
-        ALOGW("%s filtering or updating offload metadata failed: %d", __func__, status);
+        ALOGW("%s: filtering or updating offload metadata failed: %d", __func__, status);
     }
 
     return StreamHalAidl::setParameters(parameters.toString());
@@ -574,11 +585,10 @@
     return statusTFromBinderStatus(mStream->setHwVolume({left, right}));
 }
 
-status_t StreamOutHalAidl::selectPresentation(int presentationId __unused, int programId __unused) {
+status_t StreamOutHalAidl::selectPresentation(int presentationId, int programId) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    return statusTFromBinderStatus(mStream->selectPresentation(presentationId, programId));
 }
 
 status_t StreamOutHalAidl::write(const void *buffer, size_t bytes, size_t *written) {
@@ -684,48 +694,61 @@
     return statusTFromBinderStatus(mStream->updateMetadata(aidlMetadata));
 }
 
-status_t StreamOutHalAidl::getDualMonoMode(audio_dual_mono_mode_t* mode __unused) {
+status_t StreamOutHalAidl::getDualMonoMode(audio_dual_mono_mode_t* mode) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
+    if (mode == nullptr) {
+        return BAD_VALUE;
+    }
+    ::aidl::android::media::audio::common::AudioDualMonoMode aidlMode;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mStream->getDualMonoMode(&aidlMode)));
+    *mode = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(aidlMode));
     return OK;
 }
 
-status_t StreamOutHalAidl::setDualMonoMode(audio_dual_mono_mode_t mode __unused) {
+status_t StreamOutHalAidl::setDualMonoMode(audio_dual_mono_mode_t mode) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
+    ::aidl::android::media::audio::common::AudioDualMonoMode aidlMode = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(mode));
+    return statusTFromBinderStatus(mStream->setDualMonoMode(aidlMode));
+}
+
+status_t StreamOutHalAidl::getAudioDescriptionMixLevel(float* leveldB) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    if (leveldB == nullptr) {
+        return BAD_VALUE;
+    }
+    return statusTFromBinderStatus(mStream->getAudioDescriptionMixLevel(leveldB));
+}
+
+status_t StreamOutHalAidl::setAudioDescriptionMixLevel(float leveldB) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    return statusTFromBinderStatus(mStream->setAudioDescriptionMixLevel(leveldB));
+}
+
+status_t StreamOutHalAidl::getPlaybackRateParameters(audio_playback_rate_t* playbackRate) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    if (playbackRate == nullptr) {
+        return BAD_VALUE;
+    }
+    ::aidl::android::media::audio::common::AudioPlaybackRate aidlRate;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mStream->getPlaybackRateParameters(&aidlRate)));
+    *playbackRate = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(aidlRate));
     return OK;
 }
 
-status_t StreamOutHalAidl::getAudioDescriptionMixLevel(float* leveldB __unused) {
+status_t StreamOutHalAidl::setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
-}
-
-status_t StreamOutHalAidl::setAudioDescriptionMixLevel(float leveldB __unused) {
-    TIME_CHECK();
-    if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
-}
-
-status_t StreamOutHalAidl::getPlaybackRateParameters(
-        audio_playback_rate_t* playbackRate __unused) {
-    TIME_CHECK();
-    if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return BAD_VALUE;
-}
-
-status_t StreamOutHalAidl::setPlaybackRateParameters(
-        const audio_playback_rate_t& playbackRate __unused) {
-    TIME_CHECK();
-    if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return BAD_VALUE;
+    ::aidl::android::media::audio::common::AudioPlaybackRate aidlRate = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(playbackRate));
+    return statusTFromBinderStatus(mStream->setPlaybackRateParameters(aidlRate));
 }
 
 status_t StreamOutHalAidl::setEventCallback(
@@ -738,18 +761,27 @@
     return OK;
 }
 
-status_t StreamOutHalAidl::setLatencyMode(audio_latency_mode_t mode __unused) {
+status_t StreamOutHalAidl::setLatencyMode(audio_latency_mode_t mode) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    ::aidl::android::media::audio::common::AudioLatencyMode aidlMode = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_latency_mode_t_AudioLatencyMode(mode));
+    return statusTFromBinderStatus(mStream->setLatencyMode(aidlMode));
 };
 
-status_t StreamOutHalAidl::getRecommendedLatencyModes(
-        std::vector<audio_latency_mode_t> *modes __unused) {
+status_t StreamOutHalAidl::getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
+    if (modes == nullptr) {
+        return BAD_VALUE;
+    }
+    std::vector<::aidl::android::media::audio::common::AudioLatencyMode> aidlModes;
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(mStream->getRecommendedLatencyModes(&aidlModes)));
+    *modes = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::convertContainer<std::vector<audio_latency_mode_t>>(
+                    aidlModes,
+                    ::aidl::android::aidl2legacy_AudioLatencyMode_audio_latency_mode_t));
     return OK;
 };
 
@@ -840,16 +872,17 @@
 
 StreamInHalAidl::StreamInHalAidl(
         const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
-        const std::shared_ptr<IStreamIn>& stream, const sp<MicrophoneInfoProvider>& micInfoProvider)
+        const std::shared_ptr<IStreamIn>& stream,
+        const std::shared_ptr<IHalAdapterVendorExtension>& vext,
+        const sp<MicrophoneInfoProvider>& micInfoProvider)
         : StreamHalAidl("StreamInHalAidl", true /*isInput*/, config, nominalLatency,
-                std::move(context), getStreamCommon(stream)),
+                std::move(context), getStreamCommon(stream), vext),
           mStream(stream), mMicInfoProvider(micInfoProvider) {}
 
-status_t StreamInHalAidl::setGain(float gain __unused) {
+status_t StreamInHalAidl::setGain(float gain) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    return statusTFromBinderStatus(mStream->setHwGain({gain}));
 }
 
 status_t StreamInHalAidl::read(void *buffer, size_t bytes, size_t *read) {
@@ -922,19 +955,20 @@
     return statusTFromBinderStatus(mStream->updateMetadata(aidlMetadata));
 }
 
-status_t StreamInHalAidl::setPreferredMicrophoneDirection(
-            audio_microphone_direction_t direction __unused) {
+status_t StreamInHalAidl::setPreferredMicrophoneDirection(audio_microphone_direction_t direction) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    ::aidl::android::hardware::audio::core::IStreamIn::MicrophoneDirection aidlDirection =
+              VALUE_OR_RETURN_STATUS(
+                      ::aidl::android::legacy2aidl_audio_microphone_direction_t_MicrophoneDirection(
+                              direction));
+    return statusTFromBinderStatus(mStream->setMicrophoneDirection(aidlDirection));
 }
 
-status_t StreamInHalAidl::setPreferredMicrophoneFieldDimension(float zoom __unused) {
+status_t StreamInHalAidl::setPreferredMicrophoneFieldDimension(float zoom) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    return statusTFromBinderStatus(mStream->setMicrophoneFieldDimension(zoom));
 }
 
 } // namespace android
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index 157e8bb..3b369bd 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -25,15 +25,19 @@
 #include <aidl/android/hardware/audio/core/BpStreamCommon.h>
 #include <aidl/android/hardware/audio/core/BpStreamIn.h>
 #include <aidl/android/hardware/audio/core/BpStreamOut.h>
+#include <aidl/android/hardware/audio/core/MmapBufferDescriptor.h>
+#include <aidl/android/media/audio/IHalAdapterVendorExtension.h>
 #include <fmq/AidlMessageQueue.h>
 #include <media/audiohal/EffectHalInterface.h>
 #include <media/audiohal/StreamHalInterface.h>
+#include <media/AidlConversionUtil.h>
 #include <media/AudioParameter.h>
 
 #include "ConversionHelperAidl.h"
 #include "StreamPowerLog.h"
 
 using ::aidl::android::hardware::audio::common::AudioOffloadMetadata;
+using ::aidl::android::hardware::audio::core::MmapBufferDescriptor;
 
 namespace android {
 
@@ -46,22 +50,26 @@
     typedef AidlMessageQueue<int8_t,
             ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> DataMQ;
 
-    explicit StreamContextAidl(
-            const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor,
+    StreamContextAidl(
+            ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor,
             bool isAsynchronous)
         : mFrameSizeBytes(descriptor.frameSizeBytes),
           mCommandMQ(new CommandMQ(descriptor.command)),
           mReplyMQ(new ReplyMQ(descriptor.reply)),
           mBufferSizeFrames(descriptor.bufferSizeFrames),
           mDataMQ(maybeCreateDataMQ(descriptor)),
-          mIsAsynchronous(isAsynchronous) {}
+          mIsAsynchronous(isAsynchronous),
+          mIsMmapped(isMmapped(descriptor)),
+          mMmapBufferDescriptor(maybeGetMmapBuffer(descriptor)) {}
     StreamContextAidl(StreamContextAidl&& other) :
             mFrameSizeBytes(other.mFrameSizeBytes),
             mCommandMQ(std::move(other.mCommandMQ)),
             mReplyMQ(std::move(other.mReplyMQ)),
             mBufferSizeFrames(other.mBufferSizeFrames),
             mDataMQ(std::move(other.mDataMQ)),
-            mIsAsynchronous(other.mIsAsynchronous) {}
+            mIsAsynchronous(other.mIsAsynchronous),
+            mIsMmapped(other.mIsMmapped),
+            mMmapBufferDescriptor(std::move(other.mMmapBufferDescriptor)) {}
     StreamContextAidl& operator=(StreamContextAidl&& other) {
         mFrameSizeBytes = other.mFrameSizeBytes;
         mCommandMQ = std::move(other.mCommandMQ);
@@ -69,16 +77,19 @@
         mBufferSizeFrames = other.mBufferSizeFrames;
         mDataMQ = std::move(other.mDataMQ);
         mIsAsynchronous = other.mIsAsynchronous;
+        mIsMmapped = other.mIsMmapped;
+        mMmapBufferDescriptor = std::move(other.mMmapBufferDescriptor);
         return *this;
     }
     bool isValid() const {
         return mFrameSizeBytes != 0 &&
                 mCommandMQ != nullptr && mCommandMQ->isValid() &&
                 mReplyMQ != nullptr && mReplyMQ->isValid() &&
-                (mDataMQ != nullptr || (
+                (mDataMQ == nullptr || (
                         mDataMQ->isValid() &&
                         mDataMQ->getQuantumCount() * mDataMQ->getQuantumSize() >=
-                        mFrameSizeBytes * mBufferSizeFrames));
+                        mFrameSizeBytes * mBufferSizeFrames)) &&
+                (!mIsMmapped || mMmapBufferDescriptor.sharedMemory.fd.get() >= 0);
     }
     size_t getBufferSizeBytes() const { return mFrameSizeBytes * mBufferSizeFrames; }
     size_t getBufferSizeFrames() const { return mBufferSizeFrames; }
@@ -87,6 +98,8 @@
     size_t getFrameSizeBytes() const { return mFrameSizeBytes; }
     ReplyMQ* getReplyMQ() const { return mReplyMQ.get(); }
     bool isAsynchronous() const { return mIsAsynchronous; }
+    bool isMmapped() const { return mIsMmapped; }
+    const MmapBufferDescriptor& getMmapBufferDescriptor() const { return mMmapBufferDescriptor; }
 
   private:
     static std::unique_ptr<DataMQ> maybeCreateDataMQ(
@@ -97,6 +110,19 @@
         }
         return nullptr;
     }
+    static bool isMmapped(
+            const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
+        using Tag = ::aidl::android::hardware::audio::core::StreamDescriptor::AudioBuffer::Tag;
+        return descriptor.audio.getTag() == Tag::mmap;
+    }
+    static MmapBufferDescriptor maybeGetMmapBuffer(
+            ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
+        using Tag = ::aidl::android::hardware::audio::core::StreamDescriptor::AudioBuffer::Tag;
+        if (descriptor.audio.getTag() == Tag::mmap) {
+            return std::move(descriptor.audio.get<Tag::mmap>());
+        }
+        return {};
+    }
 
     size_t mFrameSizeBytes;
     std::unique_ptr<CommandMQ> mCommandMQ;
@@ -104,6 +130,8 @@
     size_t mBufferSizeFrames;
     std::unique_ptr<DataMQ> mDataMQ;
     bool mIsAsynchronous;
+    bool mIsMmapped;
+    MmapBufferDescriptor mMmapBufferDescriptor;
 };
 
 class StreamHalAidl : public virtual StreamHalInterface, public ConversionHelperAidl {
@@ -159,6 +187,9 @@
     status_t legacyReleaseAudioPatch() override;
 
   protected:
+    // For tests.
+    friend class sp<StreamHalAidl>;
+
     template<class T>
     static std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> getStreamCommon(
             const std::shared_ptr<T>& stream);
@@ -169,18 +200,17 @@
             const audio_config& config,
             int32_t nominalLatency,
             StreamContextAidl&& context,
-            const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon>& stream);
+            const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon>& stream,
+            const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension>& vext);
 
     ~StreamHalAidl() override;
 
-    status_t getHalPid(pid_t *pid);
-
-    bool requestHalThreadPriority(pid_t threadPid, pid_t threadId);
-
     status_t getLatency(uint32_t *latency);
 
     status_t getObservablePosition(int64_t *frames, int64_t *timestamp);
 
+    status_t getHardwarePosition(int64_t *frames, int64_t *timestamp);
+
     status_t getXruns(int32_t *frames);
 
     status_t transfer(void *buffer, size_t bytes, size_t *transferred);
@@ -223,6 +253,7 @@
             ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr);
 
     const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> mStream;
+    const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
     std::mutex mLock;
     ::aidl::android::hardware::audio::core::StreamDescriptor::Reply mLastReply GUARDED_BY(mLock);
     // mStreamPowerLog is used for audio signal power logging.
@@ -325,6 +356,7 @@
     StreamOutHalAidl(
             const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
             const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOut>& stream,
+            const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension>& vext,
             const sp<CallbackBroker>& callbackBroker);
 
     ~StreamOutHalAidl() override;
@@ -377,6 +409,7 @@
     StreamInHalAidl(
             const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
             const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamIn>& stream,
+            const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension>& vext,
             const sp<MicrophoneInfoProvider>& micInfoProvider);
 
     ~StreamInHalAidl() override = default;
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 192790c..72eadc6 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -33,6 +33,7 @@
 #include <util/CoreUtils.h>
 
 #include "DeviceHalHidl.h"
+#include "EffectHalHidl.h"
 #include "ParameterUtils.h"
 #include "StreamHalHidl.h"
 
@@ -144,13 +145,15 @@
 status_t StreamHalHidl::addEffect(sp<EffectHalInterface> effect) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return processReturn("addEffect", mStream->addEffect(effect->effectId()));
+    auto hidlEffect = sp<effect::EffectHalHidl>::cast(effect);
+    return processReturn("addEffect", mStream->addEffect(hidlEffect->effectId()));
 }
 
 status_t StreamHalHidl::removeEffect(sp<EffectHalInterface> effect) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return processReturn("removeEffect", mStream->removeEffect(effect->effectId()));
+    auto hidlEffect = sp<effect::EffectHalHidl>::cast(effect);
+    return processReturn("removeEffect", mStream->removeEffect(hidlEffect->effectId()));
 }
 
 status_t StreamHalHidl::standby() {
@@ -979,9 +982,11 @@
 }
 
 status_t StreamOutHalHidl::exit() {
-    // FIXME this is using hard-coded strings but in the future, this functionality will be
-    //       converted to use audio HAL extensions required to support tunneling
-    return setParameters(String8("exiting=1"));
+    // Signal exiting to HALs that use intermediate pipes to close them.
+    AudioParameter param;
+    param.addInt(String8(AudioParameter::keyExiting), 1);
+    param.add(String8(AudioParameter::keyClosing), String8(AudioParameter::valueTrue));
+    return setParameters(param.toString());
 }
 
 StreamInHalHidl::StreamInHalHidl(
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.h
index 3ee419a..61dd36a 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.h
@@ -26,8 +26,9 @@
   public:
     AidlConversionAec(std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
                       int32_t sessionId, int32_t ioId,
-                      const ::aidl::android::hardware::audio::effect::Descriptor& desc)
-        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+                      const ::aidl::android::hardware::audio::effect::Descriptor& desc,
+                      bool isProxyEffect)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc, isProxyEffect) {}
     ~AidlConversionAec() {}
 
   private:
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.h
index b0509fd..364b473 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.h
@@ -26,8 +26,9 @@
   public:
     AidlConversionAgc1(std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
                        int32_t sessionId, int32_t ioId,
-                       const ::aidl::android::hardware::audio::effect::Descriptor& desc)
-        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+                       const ::aidl::android::hardware::audio::effect::Descriptor& desc,
+                       bool isProxyEffect)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc, isProxyEffect) {}
     ~AidlConversionAgc1() {}
 
   private:
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.h
index 8f7eac7..df9a9ec 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.h
@@ -26,8 +26,9 @@
   public:
     AidlConversionAgc2(std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
                        int32_t sessionId, int32_t ioId,
-                       const ::aidl::android::hardware::audio::effect::Descriptor& desc)
-        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+                       const ::aidl::android::hardware::audio::effect::Descriptor& desc,
+                       bool isProxyEffect)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc, isProxyEffect) {}
     ~AidlConversionAgc2() {}
 
   private:
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.h
index 9664aa1..424b837 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.h
@@ -27,8 +27,8 @@
     AidlConversionBassBoost(
             std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
             int32_t sessionId, int32_t ioId,
-            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
-        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc, bool isProxyEffect)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc, isProxyEffect) {}
     ~AidlConversionBassBoost() {}
 
   private:
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.h
index 8b28ca3..f963f66 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.h
@@ -26,8 +26,9 @@
   public:
     AidlConversionDownmix(std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
                           int32_t sessionId, int32_t ioId,
-                          const ::aidl::android::hardware::audio::effect::Descriptor& desc)
-        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+                          const ::aidl::android::hardware::audio::effect::Descriptor& desc,
+                          bool isProxyEffect)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc, isProxyEffect) {}
     ~AidlConversionDownmix() {}
 
   private:
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.h
index c5d5a54..62714c3 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.h
@@ -26,8 +26,9 @@
   public:
     AidlConversionDp(std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
                      int32_t sessionId, int32_t ioId,
-                     const ::aidl::android::hardware::audio::effect::Descriptor& desc)
-        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+                     const ::aidl::android::hardware::audio::effect::Descriptor& desc,
+                     bool isProxyEffect)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc, isProxyEffect) {}
     ~AidlConversionDp() {}
 
   private:
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.h
index 8b92374..95042eb 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.h
@@ -27,8 +27,8 @@
     AidlConversionEnvReverb(
             std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
             int32_t sessionId, int32_t ioId,
-            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
-        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc, bool isProxyEffect)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc, isProxyEffect) {}
     ~AidlConversionEnvReverb() {}
 
   private:
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
index 45b98a1..fc867c7 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
@@ -161,6 +161,9 @@
             return param.writeToValue(&bands);
         }
         case EQ_PARAM_LEVEL_RANGE: {
+            if (mDesc.capability.range.getTag() != Range::equalizer) {
+                return OK;
+            }
             const auto& ranges = mDesc.capability.range.get<Range::equalizer>();
             for (const auto& r : ranges) {
                 if (r.min.getTag() == Equalizer::bandLevels &&
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.h
index f94556c..53566e2 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.h
@@ -25,9 +25,10 @@
 class AidlConversionEq : public EffectConversionHelperAidl {
   public:
     AidlConversionEq(std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
-                      int32_t sessionId, int32_t ioId,
-                      const ::aidl::android::hardware::audio::effect::Descriptor& desc)
-        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+                     int32_t sessionId, int32_t ioId,
+                     const ::aidl::android::hardware::audio::effect::Descriptor& desc,
+                     bool isProxyEffect)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc, isProxyEffect) {}
     ~AidlConversionEq() {}
 
   private:
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.h
index 03114a5..9890bfb 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.h
@@ -27,8 +27,8 @@
     AidlConversionHapticGenerator(
             std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
             int32_t sessionId, int32_t ioId,
-            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
-        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc, bool isProxyEffect)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc, isProxyEffect) {}
     ~AidlConversionHapticGenerator() {}
 
   private:
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.h
index c0402f9..2ce14a6 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.h
@@ -27,8 +27,8 @@
     AidlConversionLoudnessEnhancer(
             std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
             int32_t sessionId, int32_t ioId,
-            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
-        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc, bool isProxyEffect)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc, isProxyEffect) {}
     ~AidlConversionLoudnessEnhancer() {}
 
   private:
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.h
index f51e13a..fac121d 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.h
@@ -27,8 +27,8 @@
     AidlConversionNoiseSuppression(
             std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
             int32_t sessionId, int32_t ioId,
-            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
-        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc, bool isProxyEffect)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc, isProxyEffect) {}
     ~AidlConversionNoiseSuppression() {}
 
   private:
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.h
index 397d6e6..b975d72 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.h
@@ -27,8 +27,8 @@
     AidlConversionPresetReverb(
             std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
             int32_t sessionId, int32_t ioId,
-            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
-        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc, bool isProxyEffect)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc, isProxyEffect) {}
     ~AidlConversionPresetReverb() {}
 
   private:
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h
index c44567c..7c60b14 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h
@@ -27,8 +27,8 @@
     AidlConversionSpatializer(
             std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
             int32_t sessionId, int32_t ioId,
-            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
-        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc, bool isProxyEffect)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc, isProxyEffect) {}
     ~AidlConversionSpatializer() {}
 
   private:
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.h
index fd22e5c..16bfeba 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.h
@@ -27,8 +27,8 @@
     AidlConversionVendorExtension(
             std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
             int32_t sessionId, int32_t ioId,
-            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
-        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc, bool isProxyEffect)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc, isProxyEffect) {}
     ~AidlConversionVendorExtension() {}
 
   private:
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.h
index 91c0fcd..359d884 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.h
@@ -27,8 +27,8 @@
     AidlConversionVirtualizer(
             std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
             int32_t sessionId, int32_t ioId,
-            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
-        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc, bool isProxyEffect)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc, isProxyEffect) {}
     ~AidlConversionVirtualizer() {}
 
   private:
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
index 2d5af59..b4440ee 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
@@ -52,6 +52,7 @@
     Parameter aidlParam;
     switch (type) {
         case VISUALIZER_PARAM_CAPTURE_SIZE: {
+            mCaptureSize = value;
             aidlParam = MAKE_SPECIFIC_PARAMETER(Visualizer, visualizer, captureSamples, value);
             break;
         }
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.h
index e380bc6..bc9320f 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.h
@@ -27,8 +27,8 @@
     AidlConversionVisualizer(
             std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
             int32_t sessionId, int32_t ioId,
-            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
-        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc, bool isProxyEffect)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc, isProxyEffect) {}
     ~AidlConversionVisualizer() {}
 
   private:
diff --git a/media/libaudiohal/include/media/audiohal/AudioHalVersionInfo.h b/media/libaudiohal/include/media/audiohal/AudioHalVersionInfo.h
index 6e09463..2323ed6 100644
--- a/media/libaudiohal/include/media/audiohal/AudioHalVersionInfo.h
+++ b/media/libaudiohal/include/media/audiohal/AudioHalVersionInfo.h
@@ -30,6 +30,8 @@
         minor = halMinor;
     }
 
+    bool isHidl() const { return type == Type::HIDL; }
+
     Type getType() const { return type; }
 
     int getMajorVersion() const { return major; }
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 2df2f5d..a965709 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -19,6 +19,9 @@
 
 #include <android/media/audio/common/AudioMMapPolicyInfo.h>
 #include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <android/media/audio/common/AudioMode.h>
+#include <android/media/audio/common/AudioPort.h>
+#include <android/media/AudioRoute.h>
 #include <error/Result.h>
 #include <media/audiohal/EffectHalInterface.h>
 #include <system/audio.h>
@@ -38,6 +41,12 @@
 class DeviceHalInterface : public virtual RefBase
 {
   public:
+    virtual status_t getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) = 0;
+
+    virtual status_t getAudioRoutes(std::vector<media::AudioRoute> *routes) = 0;
+
+    virtual status_t getSupportedModes(std::vector<media::audio::common::AudioMode> *modes) = 0;
+
     // Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
     virtual status_t getSupportedDevices(uint32_t *devices) = 0;
 
@@ -123,22 +132,23 @@
             std::vector<audio_microphone_characteristic_t>* microphones) = 0;
 
     virtual status_t addDeviceEffect(
-            audio_port_handle_t device, sp<EffectHalInterface> effect) = 0;
+            const struct audio_port_config *device, sp<EffectHalInterface> effect) = 0;
     virtual status_t removeDeviceEffect(
-            audio_port_handle_t device, sp<EffectHalInterface> effect) = 0;
+            const struct audio_port_config *device, sp<EffectHalInterface> effect) = 0;
 
     virtual status_t getMmapPolicyInfos(
             media::audio::common::AudioMMapPolicyType policyType,
             std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) = 0;
     virtual int32_t getAAudioMixerBurstCount() = 0;
     virtual int32_t getAAudioHardwareBurstMinUsec() = 0;
+
     virtual int32_t supportsBluetoothVariableLatency(bool* supports) = 0;
 
     // Update the connection status of an external device.
-    virtual status_t setConnectedState(const struct audio_port_v7* port, bool connected) {
-        ALOGE("%s override me port %p connected %d", __func__, port, connected);
-        return OK;
-    }
+    virtual status_t setConnectedState(const struct audio_port_v7* port, bool connected) = 0;
+
+    // Enable simulation of external devices connection at the HAL level.
+    virtual status_t setSimulateDeviceConnections(bool enabled) = 0;
 
     virtual error::Result<audio_hw_sync_t> getHwAvSync() = 0;
 
@@ -148,6 +158,8 @@
     virtual status_t getSoundDoseInterface(const std::string& module,
                                            ::ndk::SpAIBinder* soundDoseBinder) = 0;
 
+    virtual status_t prepareToDisconnectExternalDevice(const struct audio_port_v7* port) = 0;
+
   protected:
     // Subclasses can not be constructed directly by clients.
     DeviceHalInterface() {}
diff --git a/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h b/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
index be3a723..8397e9b 100644
--- a/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
@@ -16,6 +16,8 @@
 
 #pragma once
 
+#include <android/media/audio/common/AudioHalEngineConfig.h>
+#include <android/media/SurroundSoundConfig.h>
 #include <media/audiohal/DeviceHalInterface.h>
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
@@ -34,6 +36,8 @@
 class DevicesFactoryHalInterface : public RefBase
 {
   public:
+    virtual status_t getDeviceNames(std::vector<std::string> *names) = 0;
+
     // Opens a device with the specified name. To close the device, it is
     // necessary to release references to the returned object.
     virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device) = 0;
@@ -46,6 +50,10 @@
 
     virtual android::detail::AudioHalVersionInfo getHalVersion() const = 0;
 
+    virtual status_t getSurroundSoundConfig(media::SurroundSoundConfig *config) = 0;
+
+    virtual status_t getEngineConfig(media::audio::common::AudioHalEngineConfig *config) = 0;
+
     static sp<DevicesFactoryHalInterface> create();
 
   protected:
diff --git a/media/libaudiohal/include/media/audiohal/EffectHalInterface.h b/media/libaudiohal/include/media/audiohal/EffectHalInterface.h
index 2969c92..cf8d7f0 100644
--- a/media/libaudiohal/include/media/audiohal/EffectHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/EffectHalInterface.h
@@ -52,20 +52,14 @@
     // Free resources on the remote side.
     virtual status_t close() = 0;
 
-    // Whether it's a local implementation.
-    virtual bool isLocal() const = 0;
-
     virtual status_t dump(int fd) = 0;
 
-    // Unique effect ID to use with the core HAL.
-    virtual uint64_t effectId() const = 0;
-
   protected:
     // Subclasses can not be constructed directly by clients.
-    EffectHalInterface() {}
+    EffectHalInterface() = default;
 
     // The destructor automatically releases the effect.
-    virtual ~EffectHalInterface() {}
+    virtual ~EffectHalInterface() = default;
 };
 
 } // namespace android
diff --git a/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h b/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
index d740fe9..832df18 100644
--- a/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
@@ -15,8 +15,10 @@
  */
 
 #pragma once
+#include <vector>
 
 #include <media/audiohal/EffectHalInterface.h>
+#include <media/EffectsConfig.h>
 #include <system/audio_effect.h>
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
@@ -33,21 +35,24 @@
     virtual status_t queryNumberEffects(uint32_t *pNumEffects) = 0;
 
     // Returns a descriptor of the next available effect.
-    virtual status_t getDescriptor(uint32_t index,
-            effect_descriptor_t *pDescriptor) = 0;
+    virtual status_t getDescriptor(uint32_t index, effect_descriptor_t* pDescriptor) = 0;
 
-    virtual status_t getDescriptor(const effect_uuid_t *pEffectUuid,
-            effect_descriptor_t *pDescriptor) = 0;
+    virtual status_t getDescriptor(const effect_uuid_t* pEffectUuid,
+                                   effect_descriptor_t* pDescriptor) = 0;
 
     virtual status_t getDescriptors(const effect_uuid_t *pEffectType,
                                     std::vector<effect_descriptor_t> *descriptors) = 0;
 
+    virtual std::shared_ptr<const effectsConfig::Processings> getProcessings() const = 0;
+
+    // status_t if parser return error, skipped elements if parsing result is OK (always 0 for AIDL)
+    virtual error::Result<size_t> getSkippedElements() const = 0;
+
     // Creates an effect engine of the specified type.
     // To release the effect engine, it is necessary to release references
     // to the returned effect object.
-    virtual status_t createEffect(const effect_uuid_t *pEffectUuid,
-            int32_t sessionId, int32_t ioId, int32_t deviceId,
-            sp<EffectHalInterface> *effect) = 0;
+    virtual status_t createEffect(const effect_uuid_t* pEffectUuid, int32_t sessionId, int32_t ioId,
+                                  int32_t deviceId, sp<EffectHalInterface>* effect) = 0;
 
     virtual status_t dumpEffects(int fd) = 0;
 
diff --git a/media/libaudiohal/tests/Android.bp b/media/libaudiohal/tests/Android.bp
index 2f78dd0..8f011c8 100644
--- a/media/libaudiohal/tests/Android.bp
+++ b/media/libaudiohal/tests/Android.bp
@@ -20,36 +20,46 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
+cc_defaults {
+    name: "libaudiohal_aidl_test_default",
+    test_suites: ["device-tests"],
+    defaults: [
+        "libaudiohal_default",
+        "libaudiohal_aidl_default",
+    ],
+    shared_libs: [
+        "libaudiohal",
+    ],
+}
+
+cc_test {
+    name: "CoreAudioHalAidlTest",
+    srcs: [
+        "CoreAudioHalAidl_test.cpp",
+        ":core_audio_hal_aidl_src_files",
+    ],
+    defaults: ["libaudiohal_aidl_test_default"],
+    header_libs: ["libaudiohalimpl_headers"],
+}
+
 cc_test {
     name: "EffectsFactoryHalInterfaceTest",
-    test_suites: ["device-tests"],
-
-    srcs: [
-        "EffectsFactoryHalInterface_test.cpp",
-    ],
-
-    defaults: [
-        "latest_android_media_audio_common_types_ndk_shared",
-    ],
-
-    cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-        "-Wthread-safety",
-        "-DBACKEND_NDK",
-    ],
-
+    srcs: ["EffectsFactoryHalInterface_test.cpp"],
+    defaults: ["libaudiohal_aidl_test_default"],
     shared_libs: [
-        "audioclient-types-aidl-cpp",
-        "libaudio_aidl_conversion_common_ndk",
-        "libaudiohal",
-        "liblog",
-        "libutils",
         "libvibrator",
     ],
+}
 
-    header_libs: [
-        "libaudiohal_headers",
+cc_test {
+    name: "EffectProxyTest",
+    srcs: [
+        "EffectProxy_test.cpp",
+        ":audio_effectproxy_src_files",
     ],
+    defaults: [
+        "libaudiohal_aidl_test_default",
+        "use_libaidlvintf_gtest_helper_static",
+    ],
+    header_libs: ["libaudiohalimpl_headers"],
 }
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
new file mode 100644
index 0000000..ea20794
--- /dev/null
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -0,0 +1,607 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#define LOG_TAG "CoreAudioHalAidlTest"
+#include <gtest/gtest.h>
+
+#include <DeviceHalAidl.h>
+#include <StreamHalAidl.h>
+#include <aidl/android/hardware/audio/core/BnModule.h>
+#include <aidl/android/hardware/audio/core/BnStreamCommon.h>
+#include <aidl/android/media/audio/BnHalAdapterVendorExtension.h>
+#include <aidl/android/media/audio/common/Int.h>
+#include <utils/Log.h>
+
+namespace {
+
+using ::aidl::android::hardware::audio::core::VendorParameter;
+
+class VendorParameterMock {
+  public:
+    const std::vector<std::string>& getRetrievedParameterIds() const { return mGetParameterIds; }
+    const std::vector<VendorParameter>& getAsyncParameters() const { return mAsyncParameters; }
+    const std::vector<VendorParameter>& getSyncParameters() const { return mSyncParameters; }
+
+  protected:
+    ndk::ScopedAStatus getVendorParametersImpl(const std::vector<std::string>& in_parameterIds) {
+        mGetParameterIds.insert(mGetParameterIds.end(), in_parameterIds.begin(),
+                                in_parameterIds.end());
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setVendorParametersImpl(const std::vector<VendorParameter>& in_parameters,
+                                               bool async) {
+        if (async) {
+            mAsyncParameters.insert(mAsyncParameters.end(), in_parameters.begin(),
+                                    in_parameters.end());
+        } else {
+            mSyncParameters.insert(mSyncParameters.end(), in_parameters.begin(),
+                                   in_parameters.end());
+        }
+        return ndk::ScopedAStatus::ok();
+    }
+
+  private:
+    std::vector<std::string> mGetParameterIds;
+    std::vector<VendorParameter> mAsyncParameters;
+    std::vector<VendorParameter> mSyncParameters;
+};
+
+class ModuleMock : public ::aidl::android::hardware::audio::core::BnModule,
+                   public VendorParameterMock {
+  public:
+    bool isScreenTurnedOn() const { return mIsScreenTurnedOn; }
+    ScreenRotation getScreenRotation() const { return mScreenRotation; }
+
+  private:
+    ndk::ScopedAStatus setModuleDebug(
+            const ::aidl::android::hardware::audio::core::ModuleDebug&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getTelephony(
+            std::shared_ptr<::aidl::android::hardware::audio::core::ITelephony>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getBluetooth(
+            std::shared_ptr<::aidl::android::hardware::audio::core::IBluetooth>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getBluetoothA2dp(
+            std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothA2dp>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getBluetoothLe(
+            std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothLe>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus connectExternalDevice(
+            const ::aidl::android::media::audio::common::AudioPort&,
+            ::aidl::android::media::audio::common::AudioPort*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus disconnectExternalDevice(int32_t) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAudioPatches(
+            std::vector<::aidl::android::hardware::audio::core::AudioPatch>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAudioPort(int32_t,
+                                    ::aidl::android::media::audio::common::AudioPort*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAudioPortConfigs(
+            std::vector<::aidl::android::media::audio::common::AudioPortConfig>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAudioPorts(
+            std::vector<::aidl::android::media::audio::common::AudioPort>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAudioRoutes(
+            std::vector<::aidl::android::hardware::audio::core::AudioRoute>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAudioRoutesForAudioPort(
+            int32_t, std::vector<::aidl::android::hardware::audio::core::AudioRoute>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus openInputStream(const OpenInputStreamArguments&,
+                                       OpenInputStreamReturn*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus openOutputStream(const OpenOutputStreamArguments&,
+                                        OpenOutputStreamReturn*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getSupportedPlaybackRateFactors(SupportedPlaybackRateFactors*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setAudioPatch(const ::aidl::android::hardware::audio::core::AudioPatch&,
+                                     ::aidl::android::hardware::audio::core::AudioPatch*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setAudioPortConfig(
+            const ::aidl::android::media::audio::common::AudioPortConfig&,
+            ::aidl::android::media::audio::common::AudioPortConfig*, bool*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus resetAudioPatch(int32_t) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus resetAudioPortConfig(int32_t) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus getMasterMute(bool*) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus setMasterMute(bool) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus getMasterVolume(float*) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus setMasterVolume(float) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus getMicMute(bool*) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus setMicMute(bool) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus getMicrophones(
+            std::vector<::aidl::android::media::audio::common::MicrophoneInfo>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus updateAudioMode(::aidl::android::media::audio::common::AudioMode) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus updateScreenRotation(ScreenRotation in_rotation) override {
+        mScreenRotation = in_rotation;
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus updateScreenState(bool in_isTurnedOn) override {
+        mIsScreenTurnedOn = in_isTurnedOn;
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getSoundDose(
+            std::shared_ptr<::aidl::android::hardware::audio::core::sounddose::ISoundDose>*)
+            override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus generateHwAvSyncId(int32_t*) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds,
+                                           std::vector<VendorParameter>*) override {
+        return getVendorParametersImpl(in_parameterIds);
+    }
+    ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
+                                           bool async) override {
+        return setVendorParametersImpl(in_parameters, async);
+    }
+    ndk::ScopedAStatus addDeviceEffect(
+            int32_t,
+            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus removeDeviceEffect(
+            int32_t,
+            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getMmapPolicyInfos(
+            ::aidl::android::media::audio::common::AudioMMapPolicyType,
+            std::vector<::aidl::android::media::audio::common::AudioMMapPolicyInfo>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus supportsVariableLatency(bool*) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus getAAudioMixerBurstCount(int32_t*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAAudioHardwareBurstMinUsec(int32_t*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+
+    bool mIsScreenTurnedOn = false;
+    ScreenRotation mScreenRotation = ScreenRotation::DEG_0;
+};
+
+class StreamCommonMock : public ::aidl::android::hardware::audio::core::BnStreamCommon,
+                         public VendorParameterMock {
+    ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus prepareToClose() override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus updateHwAvSyncId(int32_t) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds,
+                                           std::vector<VendorParameter>*) override {
+        return getVendorParametersImpl(in_parameterIds);
+    }
+    ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
+                                           bool async) override {
+        return setVendorParametersImpl(in_parameters, async);
+    }
+    ndk::ScopedAStatus addEffect(
+            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus removeEffect(
+            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+};
+
+VendorParameter makeVendorParameter(const std::string& id, int value) {
+    VendorParameter result{.id = id};
+    // Note: in real life, a parcelable type defined by vendor must be used,
+    // here we use Int just for test purposes.
+    ::aidl::android::media::audio::common::Int vendorValue{.value = value};
+    result.ext.setParcelable(std::move(vendorValue));
+    return result;
+}
+
+android::status_t parseVendorParameter(const VendorParameter& param, int* value) {
+    std::optional<::aidl::android::media::audio::common::Int> vendorValue;
+    RETURN_STATUS_IF_ERROR(param.ext.getParcelable(&vendorValue));
+    if (!vendorValue.has_value()) return android::BAD_VALUE;
+    *value = vendorValue.value().value;
+    return android::OK;
+}
+
+class TestHalAdapterVendorExtension
+    : public ::aidl::android::media::audio::BnHalAdapterVendorExtension {
+  public:
+    static const std::string kLegacyParameterKey;
+    static const std::string kLegacyAsyncParameterKey;
+    static const std::string kModuleVendorParameterId;
+    static const std::string kStreamVendorParameterId;
+
+  private:
+    ndk::ScopedAStatus parseVendorParameterIds(ParameterScope in_scope,
+                                               const std::string& in_rawKeys,
+                                               std::vector<std::string>* _aidl_return) override {
+        android::AudioParameter keys(android::String8(in_rawKeys.c_str()));
+        for (size_t i = 0; i < keys.size(); ++i) {
+            android::String8 key;
+            if (android::status_t status = keys.getAt(i, key); status != android::OK) {
+                return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+            }
+            switch (in_scope) {
+                case ParameterScope::MODULE:
+                    if (key == android::String8(kLegacyParameterKey.c_str()) ||
+                        key == android::String8(kLegacyAsyncParameterKey.c_str())) {
+                        _aidl_return->push_back(kModuleVendorParameterId);
+                    } else {
+                        return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+                    }
+                    break;
+                case ParameterScope::STREAM:
+                    if (key == android::String8(kLegacyParameterKey.c_str()) ||
+                        key == android::String8(kLegacyAsyncParameterKey.c_str())) {
+                        _aidl_return->push_back(kStreamVendorParameterId);
+                    } else {
+                        return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+                    }
+                    break;
+            }
+        }
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus parseVendorParameters(
+            ParameterScope in_scope, const std::string& in_rawKeysAndValues,
+            std::vector<VendorParameter>* out_syncParameters,
+            std::vector<VendorParameter>* out_asyncParameters) override {
+        android::AudioParameter legacy(android::String8(in_rawKeysAndValues.c_str()));
+        for (size_t i = 0; i < legacy.size(); ++i) {
+            android::String8 key;
+            if (android::status_t status = legacy.getAt(i, key); status != android::OK) {
+                return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+            }
+            int value;
+            if (android::status_t status = legacy.getInt(key, value); status != android::OK) {
+                return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+            }
+            std::string parameterId;
+            switch (in_scope) {
+                case ParameterScope::MODULE:
+                    parameterId = kModuleVendorParameterId;
+                    break;
+                case ParameterScope::STREAM:
+                    parameterId = kStreamVendorParameterId;
+                    break;
+            }
+            if (key == android::String8(kLegacyParameterKey.c_str())) {
+                out_syncParameters->push_back(makeVendorParameter(parameterId, value));
+            } else if (key == android::String8(kLegacyAsyncParameterKey.c_str())) {
+                out_asyncParameters->push_back(makeVendorParameter(parameterId, value));
+            } else {
+                return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+            }
+        }
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus parseBluetoothA2dpReconfigureOffload(
+            const std::string&, std::vector<VendorParameter>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus parseBluetoothLeReconfigureOffload(const std::string&,
+                                                          std::vector<VendorParameter>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus processVendorParameters(ParameterScope in_scope,
+                                               const std::vector<VendorParameter>& in_parameters,
+                                               std::string* _aidl_return) override {
+        android::AudioParameter legacy;
+        for (const auto& vendorParam : in_parameters) {
+            if ((in_scope == ParameterScope::MODULE &&
+                 vendorParam.id == kModuleVendorParameterId) ||
+                (in_scope == ParameterScope::STREAM &&
+                 vendorParam.id == kStreamVendorParameterId)) {
+                int value;
+                if (android::status_t status = parseVendorParameter(vendorParam, &value);
+                    status != android::OK) {
+                    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+                }
+                legacy.addInt(android::String8(kLegacyParameterKey.c_str()), value);
+            }
+        }
+        *_aidl_return = legacy.toString().c_str();
+        return ndk::ScopedAStatus::ok();
+    }
+};
+
+const std::string TestHalAdapterVendorExtension::kLegacyParameterKey = "aosp_test_param";
+const std::string TestHalAdapterVendorExtension::kLegacyAsyncParameterKey = "aosp_test_param_async";
+// Note: in real life, there is no need to explicitly separate "module" and "stream"
+// parameters, here it's done just for test purposes.
+const std::string TestHalAdapterVendorExtension::kModuleVendorParameterId =
+        "aosp.test.module.parameter";
+const std::string TestHalAdapterVendorExtension::kStreamVendorParameterId =
+        "aosp.test.stream.parameter";
+
+android::String8 createParameterString(const std::string& key, const std::string& value) {
+    android::AudioParameter params;
+    params.add(android::String8(key.c_str()), android::String8(value.c_str()));
+    return params.toString();
+}
+
+android::String8 createParameterString(const std::string& key, int value) {
+    android::AudioParameter params;
+    params.addInt(android::String8(key.c_str()), value);
+    return params.toString();
+}
+
+template <typename>
+struct mf_traits {};
+template <class T, class U>
+struct mf_traits<U T::*> {
+    using member_type = U;
+};
+
+}  // namespace
+
+// Provide value printers for types generated from AIDL
+// They need to be in the same namespace as the types we intend to print
+namespace aidl::android::hardware::audio::core {
+template <typename P>
+std::enable_if_t<std::is_function_v<typename mf_traits<decltype(&P::toString)>::member_type>,
+                 std::ostream&>
+operator<<(std::ostream& os, const P& p) {
+    return os << p.toString();
+}
+template <typename E>
+std::enable_if_t<std::is_enum_v<E>, std::ostream&> operator<<(std::ostream& os, const E& e) {
+    return os << toString(e);
+}
+}  // namespace aidl::android::hardware::audio::core
+
+using namespace android;
+
+class DeviceHalAidlTest : public testing::Test {
+  public:
+    void SetUp() override {
+        mModule = ndk::SharedRefBase::make<ModuleMock>();
+        mDevice = sp<DeviceHalAidl>::make("test", mModule, nullptr /*vext*/);
+    }
+    void TearDown() override {
+        mDevice.clear();
+        mModule.reset();
+    }
+
+  protected:
+    std::shared_ptr<ModuleMock> mModule;
+    sp<DeviceHalAidl> mDevice;
+};
+
+TEST_F(DeviceHalAidlTest, ScreenState) {
+    EXPECT_FALSE(mModule->isScreenTurnedOn());
+    EXPECT_EQ(OK, mDevice->setParameters(createParameterString(AudioParameter::keyScreenState,
+                                                               AudioParameter::valueOn)));
+    EXPECT_TRUE(mModule->isScreenTurnedOn());
+    EXPECT_EQ(OK, mDevice->setParameters(createParameterString(AudioParameter::keyScreenState,
+                                                               AudioParameter::valueOff)));
+    EXPECT_FALSE(mModule->isScreenTurnedOn());
+    // The adaptation layer only logs a warning.
+    EXPECT_EQ(OK, mDevice->setParameters(
+                          createParameterString(AudioParameter::keyScreenState, "blah")));
+    EXPECT_FALSE(mModule->isScreenTurnedOn());
+}
+
+TEST_F(DeviceHalAidlTest, ScreenRotation) {
+    using ScreenRotation = ::aidl::android::hardware::audio::core::IModule::ScreenRotation;
+    EXPECT_EQ(ScreenRotation::DEG_0, mModule->getScreenRotation());
+    EXPECT_EQ(OK,
+              mDevice->setParameters(createParameterString(AudioParameter::keyScreenRotation, 90)));
+    EXPECT_EQ(ScreenRotation::DEG_90, mModule->getScreenRotation());
+    EXPECT_EQ(OK,
+              mDevice->setParameters(createParameterString(AudioParameter::keyScreenRotation, 0)));
+    EXPECT_EQ(ScreenRotation::DEG_0, mModule->getScreenRotation());
+    // The adaptation layer only logs a warning.
+    EXPECT_EQ(OK,
+              mDevice->setParameters(createParameterString(AudioParameter::keyScreenRotation, 42)));
+    EXPECT_EQ(ScreenRotation::DEG_0, mModule->getScreenRotation());
+}
+
+// Without a vendor extension, any unrecognized parameters must be ignored.
+TEST_F(DeviceHalAidlTest, VendorParameterIgnored) {
+    EXPECT_EQ(0UL, mModule->getAsyncParameters().size());
+    EXPECT_EQ(0UL, mModule->getSyncParameters().size());
+    EXPECT_EQ(OK, mDevice->setParameters(createParameterString("random_name", "random_value")));
+    EXPECT_EQ(0UL, mModule->getAsyncParameters().size());
+    EXPECT_EQ(0UL, mModule->getSyncParameters().size());
+
+    EXPECT_EQ(0UL, mModule->getRetrievedParameterIds().size());
+    String8 values;
+    EXPECT_EQ(OK, mDevice->getParameters(String8("random_name"), &values));
+    EXPECT_EQ(0UL, mModule->getRetrievedParameterIds().size());
+    EXPECT_TRUE(values.empty());
+}
+
+class DeviceHalAidlVendorParametersTest : public testing::Test {
+  public:
+    void SetUp() override {
+        mModule = ndk::SharedRefBase::make<ModuleMock>();
+        mVendorExt = ndk::SharedRefBase::make<TestHalAdapterVendorExtension>();
+        mDevice = sp<DeviceHalAidl>::make("test", mModule, mVendorExt);
+    }
+    void TearDown() override {
+        mDevice.clear();
+        mVendorExt.reset();
+        mModule.reset();
+    }
+
+  protected:
+    std::shared_ptr<ModuleMock> mModule;
+    std::shared_ptr<TestHalAdapterVendorExtension> mVendorExt;
+    sp<DeviceHalAidl> mDevice;
+};
+
+TEST_F(DeviceHalAidlVendorParametersTest, GetVendorParameter) {
+    EXPECT_EQ(0UL, mModule->getRetrievedParameterIds().size());
+    String8 values;
+    EXPECT_EQ(OK, mDevice->getParameters(
+                          String8(TestHalAdapterVendorExtension::kLegacyParameterKey.c_str()),
+                          &values));
+    EXPECT_EQ(1UL, mModule->getRetrievedParameterIds().size());
+    if (mModule->getRetrievedParameterIds().size() >= 1) {
+        EXPECT_EQ(TestHalAdapterVendorExtension::kModuleVendorParameterId,
+                  mModule->getRetrievedParameterIds()[0]);
+    }
+}
+
+TEST_F(DeviceHalAidlVendorParametersTest, SetVendorParameter) {
+    EXPECT_EQ(0UL, mModule->getAsyncParameters().size());
+    EXPECT_EQ(0UL, mModule->getSyncParameters().size());
+    EXPECT_EQ(OK, mDevice->setParameters(createParameterString(
+                          TestHalAdapterVendorExtension::kLegacyParameterKey, 42)));
+    EXPECT_EQ(0UL, mModule->getAsyncParameters().size());
+    EXPECT_EQ(1UL, mModule->getSyncParameters().size());
+    EXPECT_EQ(OK, mDevice->setParameters(createParameterString(
+                          TestHalAdapterVendorExtension::kLegacyAsyncParameterKey, 43)));
+    EXPECT_EQ(1UL, mModule->getAsyncParameters().size());
+    EXPECT_EQ(1UL, mModule->getSyncParameters().size());
+    if (mModule->getSyncParameters().size() >= 1) {
+        EXPECT_EQ(TestHalAdapterVendorExtension::kModuleVendorParameterId,
+                  mModule->getSyncParameters()[0].id);
+        int value{};
+        EXPECT_EQ(android::OK, parseVendorParameter(mModule->getSyncParameters()[0], &value));
+        EXPECT_EQ(42, value);
+    }
+    if (mModule->getAsyncParameters().size() >= 1) {
+        EXPECT_EQ(TestHalAdapterVendorExtension::kModuleVendorParameterId,
+                  mModule->getAsyncParameters()[0].id);
+        int value{};
+        EXPECT_EQ(android::OK, parseVendorParameter(mModule->getAsyncParameters()[0], &value));
+        EXPECT_EQ(43, value);
+    }
+}
+
+TEST_F(DeviceHalAidlVendorParametersTest, SetInvalidVendorParameters) {
+    android::AudioParameter legacy;
+    legacy.addInt(android::String8(TestHalAdapterVendorExtension::kLegacyParameterKey.c_str()), 42);
+    legacy.addInt(android::String8(TestHalAdapterVendorExtension::kLegacyAsyncParameterKey.c_str()),
+                  43);
+    legacy.addInt(android::String8("random_name"), 44);
+    EXPECT_EQ(0UL, mModule->getAsyncParameters().size());
+    EXPECT_EQ(0UL, mModule->getSyncParameters().size());
+    // TestHalAdapterVendorExtension throws an error for unknown parameters.
+    EXPECT_EQ(android::BAD_VALUE, mDevice->setParameters(legacy.toString()));
+    EXPECT_EQ(0UL, mModule->getAsyncParameters().size());
+    EXPECT_EQ(0UL, mModule->getSyncParameters().size());
+}
+
+class StreamHalAidlVendorParametersTest : public testing::Test {
+  public:
+    void SetUp() override {
+        mStreamCommon = ndk::SharedRefBase::make<StreamCommonMock>();
+        mVendorExt = ndk::SharedRefBase::make<TestHalAdapterVendorExtension>();
+        struct audio_config config = AUDIO_CONFIG_INITIALIZER;
+        ::aidl::android::hardware::audio::core::StreamDescriptor descriptor;
+        mStream = sp<StreamHalAidl>::make("test", false /*isInput*/, config, 0 /*nominalLatency*/,
+                                          StreamContextAidl(descriptor, false /*isAsynchronous*/),
+                                          mStreamCommon, mVendorExt);
+    }
+    void TearDown() override {
+        mStream.clear();
+        mVendorExt.reset();
+        mStreamCommon.reset();
+    }
+
+  protected:
+    std::shared_ptr<StreamCommonMock> mStreamCommon;
+    std::shared_ptr<TestHalAdapterVendorExtension> mVendorExt;
+    sp<StreamHalAidl> mStream;
+};
+
+TEST_F(StreamHalAidlVendorParametersTest, GetVendorParameter) {
+    EXPECT_EQ(0UL, mStreamCommon->getRetrievedParameterIds().size());
+    String8 values;
+    EXPECT_EQ(OK, mStream->getParameters(
+                          String8(TestHalAdapterVendorExtension::kLegacyParameterKey.c_str()),
+                          &values));
+    EXPECT_EQ(1UL, mStreamCommon->getRetrievedParameterIds().size());
+    if (mStreamCommon->getRetrievedParameterIds().size() >= 1) {
+        EXPECT_EQ(TestHalAdapterVendorExtension::kStreamVendorParameterId,
+                  mStreamCommon->getRetrievedParameterIds()[0]);
+    }
+}
+
+TEST_F(StreamHalAidlVendorParametersTest, SetVendorParameter) {
+    EXPECT_EQ(0UL, mStreamCommon->getAsyncParameters().size());
+    EXPECT_EQ(0UL, mStreamCommon->getSyncParameters().size());
+    EXPECT_EQ(OK, mStream->setParameters(createParameterString(
+                          TestHalAdapterVendorExtension::kLegacyParameterKey, 42)));
+    EXPECT_EQ(0UL, mStreamCommon->getAsyncParameters().size());
+    EXPECT_EQ(1UL, mStreamCommon->getSyncParameters().size());
+    EXPECT_EQ(OK, mStream->setParameters(createParameterString(
+                          TestHalAdapterVendorExtension::kLegacyAsyncParameterKey, 43)));
+    EXPECT_EQ(1UL, mStreamCommon->getAsyncParameters().size());
+    EXPECT_EQ(1UL, mStreamCommon->getSyncParameters().size());
+    if (mStreamCommon->getSyncParameters().size() >= 1) {
+        EXPECT_EQ(TestHalAdapterVendorExtension::kStreamVendorParameterId,
+                  mStreamCommon->getSyncParameters()[0].id);
+        int value{};
+        EXPECT_EQ(android::OK, parseVendorParameter(mStreamCommon->getSyncParameters()[0], &value));
+        EXPECT_EQ(42, value);
+    }
+    if (mStreamCommon->getAsyncParameters().size() >= 1) {
+        EXPECT_EQ(TestHalAdapterVendorExtension::kStreamVendorParameterId,
+                  mStreamCommon->getAsyncParameters()[0].id);
+        int value{};
+        EXPECT_EQ(android::OK,
+                  parseVendorParameter(mStreamCommon->getAsyncParameters()[0], &value));
+        EXPECT_EQ(43, value);
+    }
+}
+
+TEST_F(StreamHalAidlVendorParametersTest, SetInvalidVendorParameters) {
+    android::AudioParameter legacy;
+    legacy.addInt(android::String8(TestHalAdapterVendorExtension::kLegacyParameterKey.c_str()), 42);
+    legacy.addInt(android::String8(TestHalAdapterVendorExtension::kLegacyAsyncParameterKey.c_str()),
+                  43);
+    legacy.addInt(android::String8("random_name"), 44);
+    EXPECT_EQ(0UL, mStreamCommon->getAsyncParameters().size());
+    EXPECT_EQ(0UL, mStreamCommon->getSyncParameters().size());
+    // TestHalAdapterVendorExtension throws an error for unknown parameters.
+    EXPECT_EQ(android::BAD_VALUE, mStream->setParameters(legacy.toString()));
+    EXPECT_EQ(0UL, mStreamCommon->getAsyncParameters().size());
+    EXPECT_EQ(0UL, mStreamCommon->getSyncParameters().size());
+}
diff --git a/media/libaudiohal/tests/EffectProxy_test.cpp b/media/libaudiohal/tests/EffectProxy_test.cpp
new file mode 100644
index 0000000..8668e85
--- /dev/null
+++ b/media/libaudiohal/tests/EffectProxy_test.cpp
@@ -0,0 +1,293 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#include <cstddef>
+#include <cstdint>
+#include <memory>
+#include <utility>
+#define LOG_TAG "EffectProxyTest"
+
+#include <aidl/android/media/audio/common/AudioUuid.h>
+#include <aidl/Vintf.h>
+#include <android/binder_manager.h>
+#include <gtest/gtest.h>
+#include <utils/RefBase.h>
+
+#include "EffectProxy.h"
+
+/**
+ * This test suite is depending on audio effect AIDL service.
+ */
+namespace android {
+
+using ::aidl::android::hardware::audio::effect::CommandId;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::Flags;
+using ::aidl::android::hardware::audio::effect::IEffect;
+using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::State;
+using ::aidl::android::media::audio::common::AudioChannelLayout;
+using ::aidl::android::media::audio::common::AudioFormatDescription;
+using ::aidl::android::media::audio::common::AudioFormatType;
+using ::aidl::android::media::audio::common::AudioUuid;
+using ::aidl::android::media::audio::common::PcmType;
+using ::android::effect::EffectProxy;
+
+class EffectProxyTest : public testing::Test {
+  public:
+    void SetUp() override {
+        auto serviceName = android::getAidlHalInstanceNames(IFactory::descriptor);
+        // only unit test with the first one in case more than one EffectFactory service exist
+        ASSERT_NE(0ul, serviceName.size());
+        mFactory = IFactory::fromBinder(
+                ndk::SpAIBinder(AServiceManager_waitForService(serviceName[0].c_str())));
+        ASSERT_NE(nullptr, mFactory);
+        mFactory->queryEffects(std::nullopt, std::nullopt, std::nullopt, &mDescs);
+        for (const auto& desc : mDescs) {
+            if (desc.common.id.proxy.has_value()) {
+                mProxyDescs[desc.common.id.proxy.value()].emplace_back(desc);
+            }
+        }
+    }
+
+    void TearDown() override {}
+
+    const AudioFormatDescription kDefaultFormatDescription = {
+            .type = AudioFormatType::PCM, .pcm = PcmType::FLOAT_32_BIT, .encoding = ""};
+
+    Parameter::Common createParamCommon(
+            int session = 0, int ioHandle = -1, int iSampleRate = 48000, int oSampleRate = 48000,
+            long iFrameCount = 0x100, long oFrameCount = 0x100,
+            AudioChannelLayout inputChannelLayout =
+                    AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+                            AudioChannelLayout::LAYOUT_STEREO),
+            AudioChannelLayout outputChannelLayout =
+                    AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+                            AudioChannelLayout::LAYOUT_STEREO)) {
+        Parameter::Common common;
+        common.session = session;
+        common.ioHandle = ioHandle;
+
+        auto& input = common.input;
+        auto& output = common.output;
+        input.base.sampleRate = iSampleRate;
+        input.base.channelMask = inputChannelLayout;
+        input.base.format = kDefaultFormatDescription;
+        input.frameCount = iFrameCount;
+        output.base.sampleRate = oSampleRate;
+        output.base.channelMask = outputChannelLayout;
+        output.base.format = kDefaultFormatDescription;
+        output.frameCount = oFrameCount;
+        return common;
+    }
+
+    enum TupleIndex { HANDLE, DESCRIPTOR };
+    using EffectProxyTuple = std::tuple<std::shared_ptr<EffectProxy>, std::vector<Descriptor>>;
+
+    std::map<AudioUuid, EffectProxyTuple> createAllProxies() {
+        std::map<AudioUuid, EffectProxyTuple> proxyMap;
+        for (const auto& itor : mProxyDescs) {
+            const auto& uuid = itor.first;
+            if (proxyMap.end() == proxyMap.find(uuid)) {
+                std::get<TupleIndex::HANDLE>(proxyMap[uuid]) =
+                        ndk::SharedRefBase::make<EffectProxy>(itor.first, itor.second, mFactory);
+            }
+        }
+        return proxyMap;
+    }
+
+    std::shared_ptr<IFactory> mFactory;
+    std::vector<Descriptor> mDescs;
+    std::map<const AudioUuid, std::vector<Descriptor>> mProxyDescs;
+};
+
+TEST_F(EffectProxyTest, createProxy) {
+    auto proxyMap = createAllProxies();
+    // if there are some descriptor defined with proxy, then proxyMap can not be empty
+    EXPECT_EQ(mProxyDescs.size() == 0, proxyMap.size() == 0);
+}
+
+TEST_F(EffectProxyTest, addSubEffectsCreateAndDestroy) {
+    auto proxyMap = createAllProxies();
+
+    for (const auto& itor : proxyMap) {
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+TEST_F(EffectProxyTest, addSubEffectsCreateOpenCloseDestroy) {
+    auto proxyMap = createAllProxies();
+
+    Parameter::Common common = createParamCommon();
+    IEffect::OpenEffectReturn ret;
+    for (const auto& itor : proxyMap) {
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+// Add sub-effects, set active sub-effect with different checkers
+TEST_F(EffectProxyTest, setOffloadParam) {
+    auto proxyMap = createAllProxies();
+
+    // Any flag exist should be able to set successfully
+    Parameter::Common common = createParamCommon();
+    IEffect::OpenEffectReturn ret;
+    for (const auto& itor : proxyMap) {
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+        effect_offload_param_t offloadParam{false, 0};
+        EXPECT_TRUE(proxy->setOffloadParam(&offloadParam).isOk());
+        offloadParam.isOffload = true;
+        EXPECT_TRUE(proxy->setOffloadParam(&offloadParam).isOk());
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+TEST_F(EffectProxyTest, destroyWithoutCreate) {
+    auto proxyMap = createAllProxies();
+
+    for (const auto& itor : proxyMap) {
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+TEST_F(EffectProxyTest, closeWithoutOpen) {
+    auto proxyMap = createAllProxies();
+
+    for (const auto& itor : proxyMap) {
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+// Add sub-effects, set active sub-effect, create, open, and send command, expect success handling
+TEST_F(EffectProxyTest, normalSequency) {
+    auto proxyMap = createAllProxies();
+
+    Parameter::Common common = createParamCommon();
+    IEffect::OpenEffectReturn ret;
+    Parameter::VolumeStereo volumeStereo({.left = .1f, .right = -0.8f});
+    Parameter param = Parameter::make<Parameter::volumeStereo>(volumeStereo);
+    Parameter::Id id = Parameter::Id::make<Parameter::Id::commonTag>(Parameter::volumeStereo);
+    State state;
+    for (const auto& itor : proxyMap) {
+        Parameter expect;
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        effect_offload_param_t offloadParam{true, 0};
+        EXPECT_TRUE(proxy->setOffloadParam(&offloadParam).isOk());
+
+        EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+
+        EXPECT_TRUE(proxy->setParameter(param).isOk());
+        EXPECT_TRUE(proxy->getParameter(id, &expect).isOk());
+        EXPECT_EQ(expect, param) << " EXPECTED: " << expect.toString()
+                                 << "\nACTUAL: " << param.toString();
+
+        EXPECT_TRUE(proxy->command(CommandId::START).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::PROCESSING, state);
+
+        EXPECT_TRUE(proxy->command(CommandId::STOP).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::IDLE, state);
+
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+// setParameter, change active sub-effect, verify with getParameter
+TEST_F(EffectProxyTest, changeActiveSubAndVerifyParameter) {
+    auto proxyMap = createAllProxies();
+
+    Parameter::Common common = createParamCommon();
+    IEffect::OpenEffectReturn ret;
+    Parameter::VolumeStereo volumeStereo({.left = .5f, .right = .8f});
+    Parameter param = Parameter::make<Parameter::volumeStereo>(volumeStereo);
+    Parameter::Id id = Parameter::Id::make<Parameter::Id::commonTag>(Parameter::volumeStereo);
+    for (const auto& itor : proxyMap) {
+        Parameter expect;
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+        EXPECT_TRUE(proxy->setParameter(param).isOk());
+        EXPECT_TRUE(proxy->getParameter(id, &expect).isOk());
+        EXPECT_EQ(expect, param);
+
+        effect_offload_param_t offloadParam{false, 0};
+        EXPECT_TRUE(proxy->setOffloadParam(&offloadParam).isOk());
+        EXPECT_TRUE(proxy->getParameter(id, &expect).isOk());
+        EXPECT_EQ(expect, param);
+
+        offloadParam.isOffload = true;
+        EXPECT_TRUE(proxy->setOffloadParam(&offloadParam).isOk());
+        EXPECT_TRUE(proxy->getParameter(id, &expect).isOk());
+        EXPECT_EQ(expect, param);
+
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+// send command, change active sub-effect, then verify the state with getState
+TEST_F(EffectProxyTest, changeActiveSubAndVerifyState) {
+    auto proxyMap = createAllProxies();
+
+    Parameter::Common common = createParamCommon();
+    IEffect::OpenEffectReturn ret;
+    State state;
+    for (const auto& itor : proxyMap) {
+        Parameter expect;
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::INIT, state);
+        EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::IDLE, state);
+        EXPECT_TRUE(proxy->command(CommandId::START).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::PROCESSING, state);
+
+        effect_offload_param_t offloadParam{false, 0};
+        EXPECT_TRUE(proxy->setOffloadParam(&offloadParam).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::PROCESSING, state);
+
+        offloadParam.isOffload = true;
+        EXPECT_TRUE(proxy->setOffloadParam(&offloadParam).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::PROCESSING, state);
+
+        EXPECT_TRUE(proxy->command(CommandId::STOP).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::IDLE, state);
+
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::INIT, state);
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+} // namespace android
diff --git a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
index c076ccc..63f895f 100644
--- a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
+++ b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
@@ -15,6 +15,7 @@
  */
 
 //#define LOG_NDEBUG 0
+#include <algorithm>
 #include <cstddef>
 #include <cstdint>
 #include <cstring>
@@ -92,6 +93,47 @@
     }
 }
 
+TEST(libAudioHalTest, getProcessings) {
+    auto factory = EffectsFactoryHalInterface::create();
+    ASSERT_NE(nullptr, factory);
+
+    const auto &processings = factory->getProcessings();
+    if (processings) {
+        EXPECT_NE(0UL, processings->preprocess.size() + processings->postprocess.size() +
+                               processings->deviceprocess.size());
+
+        auto processingChecker = [](const auto& processings) {
+            if (processings.size() != 0) {
+                // any process need at least 1 effect inside
+                std::for_each(processings.begin(), processings.end(), [](const auto& process) {
+                    EXPECT_NE(0ul, process.effects.size());
+                    // any effect should have a valid name string, and not proxy
+                    for (const auto& effect : process.effects) {
+                        SCOPED_TRACE("Effect: {" +
+                                     (effect == nullptr
+                                              ? "NULL}"
+                                              : ("{name: " + effect->name + ", isproxy: " +
+                                                 (effect->isProxy ? "true" : "false") + ", sw: " +
+                                                 (effect->libSw ? "non-null" : "null") + ", hw: " +
+                                                 (effect->libHw ? "non-null" : "null") + "}")));
+                        EXPECT_NE(nullptr, effect);
+                        EXPECT_NE("", effect->name);
+                        EXPECT_EQ(false, effect->isProxy);
+                        EXPECT_EQ(nullptr, effect->libSw);
+                        EXPECT_EQ(nullptr, effect->libHw);
+                    }
+                });
+            }
+        };
+
+        processingChecker(processings->preprocess);
+        processingChecker(processings->postprocess);
+        processingChecker(processings->deviceprocess);
+    } else {
+        GTEST_SKIP() << "no processing found, skipping the test";
+    }
+}
+
 TEST(libAudioHalTest, getHalVersion) {
     auto factory = EffectsFactoryHalInterface::create();
     ASSERT_NE(nullptr, factory);
diff --git a/media/libaudioprocessing/Android.bp b/media/libaudioprocessing/Android.bp
index 309765a..6160d7d 100644
--- a/media/libaudioprocessing/Android.bp
+++ b/media/libaudioprocessing/Android.bp
@@ -72,6 +72,10 @@
     ],
 
     whole_static_libs: ["libaudioprocessing_base"],
+
+    export_shared_lib_headers: [
+        "libvibrator",
+    ],
 }
 
 cc_library_static {
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index 6a39108..57b860d 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -193,7 +193,7 @@
 
     // See if we should use our built-in non-effect downmixer.
     if (mMixerInFormat == AUDIO_FORMAT_PCM_FLOAT
-            && mMixerChannelMask == AUDIO_CHANNEL_OUT_STEREO
+            && ChannelMixBufferProvider::isOutputChannelMaskSupported(mMixerChannelMask)
             && audio_channel_mask_get_representation(channelMask)
                     == AUDIO_CHANNEL_REPRESENTATION_POSITION) {
         mDownmixerBufferProvider.reset(new ChannelMixBufferProvider(channelMask,
diff --git a/media/libaudioprocessing/AudioMixerBase.cpp b/media/libaudioprocessing/AudioMixerBase.cpp
index 427bd55..3d11d92 100644
--- a/media/libaudioprocessing/AudioMixerBase.cpp
+++ b/media/libaudioprocessing/AudioMixerBase.cpp
@@ -471,9 +471,9 @@
                         &track->mVolume[param - VOLUME0],
                         &track->mPrevVolume[param - VOLUME0],
                         &track->mVolumeInc[param - VOLUME0])) {
-                    ALOGV("setParameter(%s, VOLUME%d: %04x)",
-                            target == VOLUME ? "VOLUME" : "RAMP_VOLUME", param - VOLUME0,
-                                    track->volume[param - VOLUME0]);
+                    ALOGV("setParameter(%s, VOLUME%d: %f)",
+                          target == VOLUME ? "VOLUME" : "RAMP_VOLUME", param - VOLUME0,
+                          track->mVolume[param - VOLUME0]);
                     invalidate();
                 }
             } else {
@@ -648,7 +648,7 @@
 
         if (t->volumeInc[0]|t->volumeInc[1]) {
             volumeRamp = true;
-        } else if (!t->doesResample() && t->volumeRL == 0) {
+        } else if (!t->doesResample() && t->isVolumeMuted()) {
             n |= NEEDS_MUTE;
         }
         t->needs = n;
@@ -748,7 +748,7 @@
 
         for (const int name : mEnabled) {
             const std::shared_ptr<TrackBase> &t = mTracks[name];
-            if (!t->doesResample() && t->volumeRL == 0) {
+            if (!t->doesResample() && t->isVolumeMuted()) {
                 t->needs |= NEEDS_MUTE;
                 t->hook = &TrackBase::track__nop;
             } else {
diff --git a/media/libaudioprocessing/BufferProviders.cpp b/media/libaudioprocessing/BufferProviders.cpp
index a9944fb..9f19f7b 100644
--- a/media/libaudioprocessing/BufferProviders.cpp
+++ b/media/libaudioprocessing/BufferProviders.cpp
@@ -373,18 +373,23 @@
                 audio_bytes_per_sample(format)
                     * audio_channel_count_from_out_mask(outputChannelMask),
                 bufferFrameCount)
+        , mChannelMix{format == AUDIO_FORMAT_PCM_FLOAT
+                ? audio_utils::channels::IChannelMix::create(outputChannelMask) : nullptr}
+        , mIsValid{mChannelMix && mChannelMix->setInputChannelMask(inputChannelMask)}
 {
     ALOGV("ChannelMixBufferProvider(%p)(%#x, %#x, %#x)",
             this, format, inputChannelMask, outputChannelMask);
-    if (outputChannelMask == AUDIO_CHANNEL_OUT_STEREO && format == AUDIO_FORMAT_PCM_FLOAT) {
-        mIsValid = mChannelMix.setInputChannelMask(inputChannelMask);
-    }
 }
 
 void ChannelMixBufferProvider::copyFrames(void *dst, const void *src, size_t frames)
 {
-    mChannelMix.process(static_cast<const float *>(src), static_cast<float *>(dst),
-            frames, false /* accumulate */);
+    if (mIsValid) {
+        mChannelMix->process(static_cast<const float *>(src), static_cast<float *>(dst),
+                frames, false /* accumulate */);
+    } else {
+        // Should fall back to a different BufferProvider if not valid.
+        ALOGE("%s: Use without being valid!", __func__);
+    }
 }
 
 ReformatBufferProvider::ReformatBufferProvider(int32_t channelCount,
diff --git a/media/libaudioprocessing/include/media/AudioMixerBase.h b/media/libaudioprocessing/include/media/AudioMixerBase.h
index 4bd85d8..b44ff20 100644
--- a/media/libaudioprocessing/include/media/AudioMixerBase.h
+++ b/media/libaudioprocessing/include/media/AudioMixerBase.h
@@ -299,6 +299,16 @@
 
         uint32_t       mInputFrameSize; // The track input frame size, used for tee buffer
 
+        // consider volume muted only if all channel volume (floating point) is 0.f
+        inline bool isVolumeMuted() const {
+            for (const auto volume : mVolume) {
+                if (volume != 0) {
+                    return false;
+                }
+            }
+            return true;
+        }
+
       protected:
 
         // hooks
diff --git a/media/libaudioprocessing/include/media/BufferProviders.h b/media/libaudioprocessing/include/media/BufferProviders.h
index 7a41002..8d18010 100644
--- a/media/libaudioprocessing/include/media/BufferProviders.h
+++ b/media/libaudioprocessing/include/media/BufferProviders.h
@@ -142,9 +142,14 @@
 
     bool isValid() const { return mIsValid; }
 
+    static bool isOutputChannelMaskSupported(audio_channel_mask_t outputChannelMask) {
+        return audio_utils::channels::IChannelMix::isOutputChannelMaskSupported(
+                outputChannelMask);
+    }
+
 protected:
-    audio_utils::channels::ChannelMix mChannelMix;
-    bool mIsValid = false;
+    const std::shared_ptr<audio_utils::channels::IChannelMix> mChannelMix;
+    const bool mIsValid;
 };
 
 // RemixBufferProvider derives from CopyBufferProvider to perform an
diff --git a/media/libaudiousecasevalidation/UsecaseValidator.cpp b/media/libaudiousecasevalidation/UsecaseValidator.cpp
index 0e5a824..bf532de 100644
--- a/media/libaudiousecasevalidation/UsecaseValidator.cpp
+++ b/media/libaudiousecasevalidation/UsecaseValidator.cpp
@@ -99,10 +99,8 @@
 
         audio_attributes_t attrRet = attributes;
 
-        // Check if attribute usage media or unknown has been set.
-        bool isUsageValid = this->isUsageValid(attributes);
-
-        if (isUsageValid && m_lookup.isGameStream(streamId)) {
+        if (isUsageValid(attributes.usage) && isContentTypeValid(attributes.content_type)
+                && areFlagsValid(attributes.flags) && m_lookup.isGameStream(streamId)) {
             ALOGI("%s update usage: %d to AUDIO_USAGE_GAME for output: %d pid: %d package: %s",
                     __func__, attributes.usage, streamId, attributionSource.pid,
                     attributionSource.packageName.value_or("").c_str());
@@ -117,9 +115,9 @@
     /**
      * Check if attribute usage valid.
      */
-    bool isUsageValid(const audio_attributes_t& attr) {
-        ALOGV("isUsageValid attr.usage: %d", attr.usage);
-        switch (attr.usage) {
+    bool isUsageValid(audio_usage_t usage) {
+        ALOGV("isUsageValid usage: %d", usage);
+        switch (usage) {
             case AUDIO_USAGE_MEDIA:
             case AUDIO_USAGE_UNKNOWN:
                 return true;
@@ -129,6 +127,30 @@
         return false;
     }
 
+    bool isContentTypeValid(audio_content_type_t contentType) {
+        ALOGV("isContentTypeValid contentType: %d", contentType);
+        switch (contentType) {
+            case AUDIO_CONTENT_TYPE_MUSIC:
+            case AUDIO_CONTENT_TYPE_MOVIE:
+            case AUDIO_CONTENT_TYPE_UNKNOWN:
+                return true;
+            default:
+                break;
+        }
+        return false;
+    }
+
+    bool areFlagsValid(audio_flags_mask_t flags) {
+        ALOGV("areFlagsValid flags: %#x", flags);
+        if ((flags & (AUDIO_FLAG_SCO|AUDIO_FLAG_AUDIBILITY_ENFORCED|AUDIO_FLAG_BEACON)) != 0) {
+            return false;
+        }
+        if ((flags & AUDIO_FLAG_LOW_LATENCY) != 0) {
+            return true;
+        }
+        return false;
+    }
+
  protected:
     UsecaseLookup m_lookup;
 };
diff --git a/media/libaudiousecasevalidation/tests/UsecaseValidator-test.cpp b/media/libaudiousecasevalidation/tests/UsecaseValidator-test.cpp
index d92c8ba..5768a9b 100644
--- a/media/libaudiousecasevalidation/tests/UsecaseValidator-test.cpp
+++ b/media/libaudiousecasevalidation/tests/UsecaseValidator-test.cpp
@@ -52,10 +52,8 @@
  */
 error::Result<audio_attributes_t> UsecaseValidatorTest::testStartClient(audio_io_handle_t streamId,
         audio_port_handle_t portId,
-        audio_usage_t usage) {
+        audio_attributes_t attributes) {
     content::AttributionSourceState attributionSource;
-    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
-    attributes.usage = usage;
 
     return m_validator->startClient(streamId, portId, attributionSource, attributes, NULL);
 }
@@ -141,11 +139,14 @@
     mediaPortId = testCreatePortId(mediaStreamId);
     EXPECT_NE(mediaPortId, 0);
 
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    attributes.usage = AUDIO_USAGE_GAME;
     // Start client on game stream.
-    testStartClient(gameStreamId, gamePortId, AUDIO_USAGE_GAME);
+    testStartClient(gameStreamId, gamePortId, attributes);
 
+    attributes.usage = AUDIO_USAGE_MEDIA;
     // Start client on media stream.
-    testStartClient(mediaStreamId, mediaPortId, AUDIO_USAGE_MEDIA);
+    testStartClient(mediaStreamId, mediaPortId, attributes);
 
     // Unregister media stream before stopClient.
     EXPECT_EQ(m_validator->unregisterStream(gameStreamId), 0);
@@ -175,18 +176,23 @@
     voiceCommPortId = testCreatePortId(gameStreamId);
     EXPECT_NE(voiceCommPortId, 0);
 
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
     // Verify attributes on game stream.
-    auto attr = testStartClient(gameStreamId, gamePortId, AUDIO_USAGE_GAME);
+    attributes.usage = AUDIO_USAGE_GAME;
+    auto attr = testStartClient(gameStreamId, gamePortId, attributes);
     EXPECT_EQ(attr.value().usage, AUDIO_USAGE_GAME);
 
-    attr = testStartClient(gameStreamId, voiceCommPortId, AUDIO_USAGE_VOICE_COMMUNICATION);
+    attributes.usage = AUDIO_USAGE_VOICE_COMMUNICATION;
+    attr = testStartClient(gameStreamId, voiceCommPortId, attributes);
     EXPECT_EQ(attr.value().usage, AUDIO_USAGE_VOICE_COMMUNICATION);
 
     // Verify attributes on media stream.
-    attr = testStartClient(mediaStreamId, mediaPortId, AUDIO_USAGE_MEDIA);
+    attributes.usage = AUDIO_USAGE_MEDIA;
+    attr = testStartClient(mediaStreamId, mediaPortId, attributes);
     EXPECT_EQ(attr.value().usage, AUDIO_USAGE_MEDIA);
 
-    attr = testStartClient(mediaStreamId, unknownPortId, AUDIO_USAGE_UNKNOWN);
+    attributes.usage = AUDIO_USAGE_UNKNOWN;
+    attr = testStartClient(mediaStreamId, unknownPortId, attributes);
     EXPECT_EQ(attr.value().usage, AUDIO_USAGE_UNKNOWN);
 
     // Stop client on game and media stream.
@@ -215,16 +221,79 @@
     unknownPortId = testCreatePortId(gameStreamId);
     EXPECT_NE(unknownPortId, 0);
 
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    attributes.flags = AUDIO_FLAG_LOW_LATENCY;
     // Verify attributes on game stream.
-    auto attr = testStartClient(gameStreamId, mediaPortId, AUDIO_USAGE_MEDIA);
+    attributes.usage = AUDIO_USAGE_MEDIA;
+    auto attr = testStartClient(gameStreamId, mediaPortId, attributes);
     EXPECT_EQ(attr.value().usage, AUDIO_USAGE_GAME);
 
-    attr = testStartClient(gameStreamId, unknownPortId, AUDIO_USAGE_UNKNOWN);
+    attributes.usage = AUDIO_USAGE_UNKNOWN;
+    attr = testStartClient(gameStreamId, unknownPortId, attributes);
     EXPECT_EQ(attr.value().usage, AUDIO_USAGE_GAME);
 
     // Unregister game stream.
     EXPECT_EQ(m_validator->unregisterStream(gameStreamId), 0);
 }
 
+/**
+ * Verify attributes usage does not change for non low latency clients.
+ */
+TEST_F(UsecaseValidatorTest, testAttributesUsageUnChangedIfNotLowLatency) {
+    audio_io_handle_t gameStreamId;
+    audio_port_handle_t mediaPortId, unknownPortId;
+
+    // Register game and media stream.
+    gameStreamId = testRegisterStream(true);
+    EXPECT_NE(gameStreamId, 0);
+
+    // Assign portId.
+    mediaPortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(mediaPortId, 0);
+    unknownPortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(unknownPortId, 0);
+
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    // Verify attributes on game stream.
+    attributes.usage = AUDIO_USAGE_MEDIA;
+    auto attr = testStartClient(gameStreamId, mediaPortId, attributes);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_MEDIA);
+
+    attributes.usage = AUDIO_USAGE_UNKNOWN;
+    attr = testStartClient(gameStreamId, unknownPortId, attributes);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_UNKNOWN);
+
+    // Unregister game stream.
+    EXPECT_EQ(m_validator->unregisterStream(gameStreamId), 0);
+}
+
+/**
+ * Verify attributes usage does not change for content type speech.
+ */
+TEST_F(UsecaseValidatorTest, testAttributesUsageUnChangedIfSpeech) {
+    audio_io_handle_t gameStreamId;
+    audio_port_handle_t mediaPortId, unknownPortId;
+
+    // Register game and media stream.
+    gameStreamId = testRegisterStream(true);
+    EXPECT_NE(gameStreamId, 0);
+
+    // Assign portId.
+    mediaPortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(mediaPortId, 0);
+    unknownPortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(unknownPortId, 0);
+
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    // Verify attributes on game stream.
+    attributes.usage = AUDIO_USAGE_MEDIA;
+    attributes.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+    auto attr = testStartClient(gameStreamId, mediaPortId, attributes);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_MEDIA);
+
+    // Unregister game stream.
+    EXPECT_EQ(m_validator->unregisterStream(gameStreamId), 0);
+}
+
 }  // namespace media
 }  // namespace android
diff --git a/media/libaudiousecasevalidation/tests/UsecaseValidator-test.h b/media/libaudiousecasevalidation/tests/UsecaseValidator-test.h
index 3159ab4..8cbd0f0 100644
--- a/media/libaudiousecasevalidation/tests/UsecaseValidator-test.h
+++ b/media/libaudiousecasevalidation/tests/UsecaseValidator-test.h
@@ -69,7 +69,7 @@
     audio_port_handle_t testCreatePortId(audio_io_handle_t streamId);
     error::Result<audio_attributes_t> testStartClient(audio_io_handle_t streamId,
                                                       audio_port_handle_t portId,
-                                                      audio_usage_t usage);
+                                                      audio_attributes_t attributes);
     error::Result<audio_attributes_t> testVerifyAudioAttributes(audio_io_handle_t streamId,
                                                                 audio_usage_t usage);
 
diff --git a/media/libeffects/config/Android.bp b/media/libeffects/config/Android.bp
index b02dcb6..293a9c2 100644
--- a/media/libeffects/config/Android.bp
+++ b/media/libeffects/config/Android.bp
@@ -27,8 +27,21 @@
         "libcutils",
     ],
 
-    header_libs: ["libaudio_system_headers"],
-    export_header_lib_headers: ["libaudio_system_headers"],
+    header_libs: [
+        "libaudio_system_headers",
+        "liberror_headers",
+    ],
+
+    export_header_lib_headers: [
+        "libaudio_system_headers",
+        "liberror_headers",
+    ],
+
+    export_include_dirs: ["include"],
+}
+
+cc_library_headers {
+    name: "libeffectsconfig_headers",
 
     export_include_dirs: ["include"],
 }
diff --git a/media/libeffects/config/include/media/EffectsConfig.h b/media/libeffects/config/include/media/EffectsConfig.h
index 57d4dd7..09a060d 100644
--- a/media/libeffects/config/include/media/EffectsConfig.h
+++ b/media/libeffects/config/include/media/EffectsConfig.h
@@ -22,8 +22,10 @@
  * @see audio_effects_conf_V2_0.xsd for documentation on each structure
  */
 
+#include <error/Result.h>
 #include <system/audio_effect.h>
 
+#include <cstddef>
 #include <map>
 #include <memory>
 #include <string>
@@ -47,26 +49,27 @@
     std::string name;
     std::string path;
 };
-using Libraries = std::vector<Library>;
+using Libraries = std::vector<std::shared_ptr<const Library>>;
 
 struct EffectImpl {
-    Library* library; //< Only valid as long as the associated library vector is unmodified
+    //< Only valid as long as the associated library vector is unmodified
+    std::shared_ptr<const Library> library;
     effect_uuid_t uuid;
 };
 
 struct Effect : public EffectImpl {
     std::string name;
     bool isProxy;
-    EffectImpl libSw; //< Only valid if isProxy
-    EffectImpl libHw; //< Only valid if isProxy
+    std::shared_ptr<EffectImpl> libSw; //< Only valid if isProxy
+    std::shared_ptr<EffectImpl> libHw; //< Only valid if isProxy
 };
 
-using Effects = std::vector<Effect>;
+using Effects = std::vector<std::shared_ptr<const Effect>>;
 
 template <class Type>
 struct Stream {
     Type type;
-    std::vector<std::reference_wrapper<Effect>> effects;
+    Effects effects;
 };
 using OutputStream = Stream<audio_stream_type_t>;
 using InputStream = Stream<audio_source_t>;
@@ -75,6 +78,12 @@
     std::string address;
 };
 
+struct Processings {
+    std::vector<InputStream> preprocess;
+    std::vector<OutputStream> postprocess;
+    std::vector<DeviceEffects> deviceprocess;
+};
+
 /** Parsed configuration.
  * Intended to be a transient structure only used for deserialization.
  * Note: Everything is copied in the configuration from the xml dom.
@@ -82,19 +91,16 @@
  *       consider keeping a private handle on the xml dom and replace all strings by dom pointers.
  *       Or even better, use SAX parsing to avoid the allocations all together.
  */
-struct Config {
+struct Config : public Processings {
     float version;
     Libraries libraries;
     Effects effects;
-    std::vector<OutputStream> postprocess;
-    std::vector<InputStream> preprocess;
-    std::vector<DeviceEffects> deviceprocess;
 };
 
 /** Result of `parse(const char*)` */
 struct ParsingResult {
     /** Parsed config, nullptr if the xml lib could not load the file */
-    std::unique_ptr<Config> parsedConfig;
+    std::shared_ptr<const Config> parsedConfig;
     size_t nbSkippedElement; //< Number of skipped invalid library, effect or processing chain
     const std::string configPath; //< Path to the loaded configuration
 };
diff --git a/media/libeffects/config/src/EffectsConfig.cpp b/media/libeffects/config/src/EffectsConfig.cpp
index 1696233..2ff057e 100644
--- a/media/libeffects/config/src/EffectsConfig.cpp
+++ b/media/libeffects/config/src/EffectsConfig.cpp
@@ -19,6 +19,7 @@
 #include <algorithm>
 #include <cstdint>
 #include <functional>
+#include <memory>
 #include <string>
 #include <unistd.h>
 
@@ -149,7 +150,10 @@
         ALOGE("library must have a name and a path: %s", dump(xmlLibrary));
         return false;
     }
-    libraries->push_back({name, path});
+
+    // need this temp variable because `struct Library` doesn't have a constructor
+    Library lib({.name = name, .path = path});
+    libraries->push_back(std::make_shared<const Library>(lib));
     return true;
 }
 
@@ -157,10 +161,10 @@
  * @return nullptr if not found, the element address if found.
  */
 template <class T>
-T* findByName(const char* name, std::vector<T>& collection) {
+T findByName(const char* name, std::vector<T>& collection) {
     auto it = find_if(begin(collection), end(collection),
-                         [name] (auto& item) { return item.name == name; });
-    return it != end(collection) ? &*it : nullptr;
+                      [name](auto& item) { return item && item->name == name; });
+    return it != end(collection) ? *it : nullptr;
 }
 
 /** Parse an effect from an xml element describing it.
@@ -187,7 +191,7 @@
         }
 
         // Convert library name to a pointer to the previously loaded library
-        auto* library = findByName(libraryName, libraries);
+        auto library = findByName(libraryName, libraries);
         if (library == nullptr) {
             ALOGE("Could not find library referenced in: %s", dump(xmlImpl));
             return false;
@@ -211,20 +215,25 @@
         effect.isProxy = true;
 
         // Function to parse libhw and libsw
-        auto parseProxy = [&xmlEffect, &parseImpl](const char* tag, EffectImpl& proxyLib) {
+        auto parseProxy = [&xmlEffect, &parseImpl](const char* tag,
+                                                   const std::shared_ptr<EffectImpl>& proxyLib) {
             auto* xmlProxyLib = xmlEffect.FirstChildElement(tag);
             if (xmlProxyLib == nullptr) {
                 ALOGE("effectProxy must contain a <%s>: %s", tag, dump(xmlEffect));
                 return false;
             }
-            return parseImpl(*xmlProxyLib, proxyLib);
+            return parseImpl(*xmlProxyLib, *proxyLib);
         };
+        effect.libSw = std::make_shared<EffectImpl>();
+        effect.libHw = std::make_shared<EffectImpl>();
         if (!parseProxy("libhw", effect.libHw) || !parseProxy("libsw", effect.libSw)) {
+            effect.libSw.reset();
+            effect.libHw.reset();
             return false;
         }
     }
 
-    effects->push_back(std::move(effect));
+    effects->push_back(std::make_shared<const Effect>(effect));
     return true;
 }
 
@@ -250,12 +259,12 @@
             ALOGE("<stream|device>/apply must have reference an effect: %s", dump(xmlApply));
             return false;
         }
-        auto* effect = findByName(effectName, effects);
+        auto effect = findByName(effectName, effects);
         if (effect == nullptr) {
             ALOGE("Could not find effect referenced in: %s", dump(xmlApply));
             return false;
         }
-        stream.effects.emplace_back(*effect);
+        stream.effects.emplace_back(effect);
     }
     streams->push_back(std::move(stream));
     return true;
@@ -286,7 +295,7 @@
         return {nullptr, 0, std::move(path)};
     }
 
-    auto config = std::make_unique<Config>();
+    auto config = std::make_shared<Config>();
     size_t nbSkippedElements = 0;
     auto registerFailure = [&nbSkippedElements](bool result) {
         nbSkippedElements += result ? 0 : 1;
diff --git a/media/libeffects/downmix/EffectDownmix.cpp b/media/libeffects/downmix/EffectDownmix.cpp
index d8f5787..b921537 100644
--- a/media/libeffects/downmix/EffectDownmix.cpp
+++ b/media/libeffects/downmix/EffectDownmix.cpp
@@ -40,7 +40,7 @@
     downmix_type_t type;
     bool apply_volume_correction;
     uint8_t input_channel_count;
-    android::audio_utils::channels::ChannelMix channelMix;
+    android::audio_utils::channels::ChannelMix<AUDIO_CHANNEL_OUT_STEREO> channelMix;
 };
 
 typedef struct downmix_module_s {
@@ -259,7 +259,7 @@
     ret = Downmix_Init(module);
     if (ret < 0) {
         ALOGW("DownmixLib_Create() init failed");
-        free(module);
+        delete module;
         return ret;
     }
 
@@ -582,7 +582,7 @@
     ALOGV("Downmix_Init module %p", pDwmModule);
     int ret = 0;
 
-    memset(&pDwmModule->context, 0, sizeof(downmix_object_t));
+    pDwmModule->context = downmix_object_t{};  // zero initialize (contains class with vtable).
 
     pDwmModule->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
     pDwmModule->config.inputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
diff --git a/media/libeffects/downmix/aidl/DownmixContext.h b/media/libeffects/downmix/aidl/DownmixContext.h
index 9a9f2da..1571c38 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.h
+++ b/media/libeffects/downmix/aidl/DownmixContext.h
@@ -56,7 +56,7 @@
     DownmixState mState;
     Downmix::Type mType;
     ::aidl::android::media::audio::common::AudioChannelLayout mChMask;
-    ::android::audio_utils::channels::ChannelMix mChannelMix;
+    ::android::audio_utils::channels::ChannelMix<AUDIO_CHANNEL_OUT_STEREO> mChannelMix;
 
     // Common Params
     void init_params(const Parameter::Common& common);
diff --git a/media/libeffects/downmix/benchmark/downmix_benchmark.cpp b/media/libeffects/downmix/benchmark/downmix_benchmark.cpp
index d9d40ed..c4e0d65 100644
--- a/media/libeffects/downmix/benchmark/downmix_benchmark.cpp
+++ b/media/libeffects/downmix/benchmark/downmix_benchmark.cpp
@@ -60,34 +60,35 @@
 static constexpr size_t kFrameCount = 1000;
 
 /*
-Pixel 4XL
-$ adb shell /data/benchmarktest/downmix_benchmark/vendor/downmix_benchmark
+Pixel 7
+$ atest downmix_benchmark
 
 --------------------------------------------------------
 Benchmark              Time             CPU   Iterations
 --------------------------------------------------------
-BM_Downmix/0        3638 ns         3624 ns       197517 AUDIO_CHANNEL_OUT_MONO
-BM_Downmix/1        4040 ns         4024 ns       178766
-BM_Downmix/2        4759 ns         4740 ns       134741 AUDIO_CHANNEL_OUT_STEREO
-BM_Downmix/3        6042 ns         6017 ns       129546 AUDIO_CHANNEL_OUT_2POINT1
-BM_Downmix/4        6897 ns         6868 ns        96316 AUDIO_CHANNEL_OUT_2POINT0POINT2
-BM_Downmix/5        2117 ns         2109 ns       331705 AUDIO_CHANNEL_OUT_QUAD
-BM_Downmix/6        2097 ns         2088 ns       335421 AUDIO_CHANNEL_OUT_QUAD_SIDE
-BM_Downmix/7        7291 ns         7263 ns        96256 AUDIO_CHANNEL_OUT_SURROUND
-BM_Downmix/8        8246 ns         8206 ns        84318 AUDIO_CHANNEL_OUT_2POINT1POINT2
-BM_Downmix/9        8341 ns         8303 ns        84298 AUDIO_CHANNEL_OUT_3POINT0POINT2
-BM_Downmix/10       7549 ns         7517 ns        84293 AUDIO_CHANNEL_OUT_PENTA
-BM_Downmix/11       9395 ns         9354 ns        75209 AUDIO_CHANNEL_OUT_3POINT1POINT2
-BM_Downmix/12       3267 ns         3253 ns       215596 AUDIO_CHANNEL_OUT_5POINT1
-BM_Downmix/13       3178 ns         3163 ns       220132 AUDIO_CHANNEL_OUT_5POINT1_SIDE
-BM_Downmix/14      10245 ns        10199 ns        67486 AUDIO_CHANNEL_OUT_6POINT1
-BM_Downmix/15      10975 ns        10929 ns        61359 AUDIO_CHANNEL_OUT_5POINT1POINT2
-BM_Downmix/16       3796 ns         3780 ns       184728 AUDIO_CHANNEL_OUT_7POINT1
-BM_Downmix/17      13562 ns        13503 ns        51823 AUDIO_CHANNEL_OUT_5POINT1POINT4
-BM_Downmix/18      13573 ns        13516 ns        51800 AUDIO_CHANNEL_OUT_7POINT1POINT2
-BM_Downmix/19      15502 ns        15435 ns        47147 AUDIO_CHANNEL_OUT_7POINT1POINT4
-BM_Downmix/20      16693 ns        16624 ns        42109 AUDIO_CHANNEL_OUT_13POINT_360RA
-BM_Downmix/21      28267 ns        28116 ns        24982 AUDIO_CHANNEL_OUT_22POINT2
+downmix_benchmark:
+  #BM_Downmix/0     2216 ns    2208 ns       308323
+  #BM_Downmix/1     2237 ns    2228 ns       314730
+  #BM_Downmix/2      270 ns     268 ns      2681469
+  #BM_Downmix/3     3016 ns    2999 ns       234146
+  #BM_Downmix/4     3331 ns    3313 ns       212026
+  #BM_Downmix/5      816 ns     809 ns       864395
+  #BM_Downmix/6      813 ns     809 ns       863876
+  #BM_Downmix/7     3336 ns    3319 ns       211938
+  #BM_Downmix/8     3786 ns    3762 ns       185047
+  #BM_Downmix/9     3810 ns    3797 ns       186840
+  #BM_Downmix/10    3767 ns    3746 ns       187015
+  #BM_Downmix/11    4212 ns    4191 ns       166119
+  #BM_Downmix/12    1245 ns    1231 ns       574388
+  #BM_Downmix/13    1234 ns    1228 ns       574743
+  #BM_Downmix/14    4795 ns    4771 ns       147157
+  #BM_Downmix/15    1334 ns    1327 ns       527728
+  #BM_Downmix/16    1346 ns    1332 ns       525444
+  #BM_Downmix/17    2144 ns    2121 ns       333343
+  #BM_Downmix/18    2133 ns    2118 ns       330391
+  #BM_Downmix/19    2527 ns    2513 ns       278553
+  #BM_Downmix/20    8148 ns    8113 ns        86136
+  #BM_Downmix/21    6332 ns    6301 ns       111134
 */
 
 static void BM_Downmix(benchmark::State& state) {
diff --git a/media/libeffects/dynamicsproc/Android.bp b/media/libeffects/dynamicsproc/Android.bp
index 736a086..7838117 100644
--- a/media/libeffects/dynamicsproc/Android.bp
+++ b/media/libeffects/dynamicsproc/Android.bp
@@ -92,6 +92,10 @@
         "dynamicsprocessingdefaults",
     ],
 
+    static_libs: [
+        "libaudioaidlranges",
+    ],
+
     visibility: [
         "//hardware/interfaces/audio/aidl/default",
     ],
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
index e508d48..1fed9a5 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
@@ -62,32 +62,135 @@
 
 const std::string DynamicsProcessingImpl::kEffectName = "DynamicsProcessing";
 
-const DynamicsProcessing::EqBandConfig DynamicsProcessingImpl::kEqBandConfigMin =
+static const Range::DynamicsProcessingRange kEngineConfigRange = {
+        .min = DynamicsProcessing::make<
+                DynamicsProcessing::engineArchitecture>(DynamicsProcessing::EngineArchitecture(
+                {.resolutionPreference =
+                         DynamicsProcessing::ResolutionPreference::FAVOR_FREQUENCY_RESOLUTION,
+                 .preferredProcessingDurationMs = 1.0f,
+                 .preEqStage = {.inUse = false, .bandCount = 0},
+                 .postEqStage = {.inUse = false, .bandCount = 0},
+                 .mbcStage = {.inUse = false, .bandCount = 0},
+                 .limiterInUse = false})),
+        .max = DynamicsProcessing::make<
+                DynamicsProcessing::engineArchitecture>(DynamicsProcessing::EngineArchitecture(
+                {.resolutionPreference =
+                         DynamicsProcessing::ResolutionPreference::FAVOR_TIME_RESOLUTION,
+                 .preferredProcessingDurationMs = 1000.0f,
+                 .preEqStage = {.inUse = true, .bandCount = 128},
+                 .postEqStage = {.inUse = true, .bandCount = 128},
+                 .mbcStage = {.inUse = true, .bandCount = 128},
+                 .limiterInUse = true}))};
+
+static const DynamicsProcessing::ChannelConfig kChannelConfigMin =
+        DynamicsProcessing::ChannelConfig({.channel = 0, .enable = false});
+
+static const DynamicsProcessing::ChannelConfig kChannelConfigMax =
+        DynamicsProcessing::ChannelConfig(
+                {.channel = std::numeric_limits<int>::max(), .enable = true});
+
+static const Range::DynamicsProcessingRange kPreEqChannelConfigRange = {
+        .min = DynamicsProcessing::make<DynamicsProcessing::preEq>({kChannelConfigMin}),
+        .max = DynamicsProcessing::make<DynamicsProcessing::preEq>({kChannelConfigMax})};
+
+static const Range::DynamicsProcessingRange kPostEqChannelConfigRange = {
+        .min = DynamicsProcessing::make<DynamicsProcessing::postEq>({kChannelConfigMin}),
+        .max = DynamicsProcessing::make<DynamicsProcessing::postEq>({kChannelConfigMax})};
+
+static const Range::DynamicsProcessingRange kMbcChannelConfigRange = {
+        .min = DynamicsProcessing::make<DynamicsProcessing::mbc>({kChannelConfigMin}),
+        .max = DynamicsProcessing::make<DynamicsProcessing::mbc>({kChannelConfigMax})};
+
+static const DynamicsProcessing::EqBandConfig kEqBandConfigMin =
         DynamicsProcessing::EqBandConfig({.channel = 0,
                                           .band = 0,
                                           .enable = false,
-                                          .cutoffFrequencyHz = 220,
-                                          .gainDb = std::numeric_limits<float>::min()});
-const DynamicsProcessing::EqBandConfig DynamicsProcessingImpl::kEqBandConfigMax =
+                                          .cutoffFrequencyHz = 20,
+                                          .gainDb = -200});
+
+static const DynamicsProcessing::EqBandConfig kEqBandConfigMax =
         DynamicsProcessing::EqBandConfig({.channel = std::numeric_limits<int>::max(),
                                           .band = std::numeric_limits<int>::max(),
                                           .enable = true,
                                           .cutoffFrequencyHz = 20000,
-                                          .gainDb = std::numeric_limits<float>::max()});
-const Range::DynamicsProcessingRange DynamicsProcessingImpl::kPreEqBandRange = {
-        .min = DynamicsProcessing::make<DynamicsProcessing::preEqBand>(
-                {DynamicsProcessingImpl::kEqBandConfigMin}),
-        .max = DynamicsProcessing::make<DynamicsProcessing::preEqBand>(
-                {DynamicsProcessingImpl::kEqBandConfigMax})};
-const Range::DynamicsProcessingRange DynamicsProcessingImpl::kPostEqBandRange = {
-        .min = DynamicsProcessing::make<DynamicsProcessing::postEqBand>(
-                {DynamicsProcessingImpl::kEqBandConfigMin}),
-        .max = DynamicsProcessing::make<DynamicsProcessing::postEqBand>(
-                {DynamicsProcessingImpl::kEqBandConfigMax})};
-const Range DynamicsProcessingImpl::kRange =
-        Range::make<Range::dynamicsProcessing>({DynamicsProcessingImpl::kPreEqBandRange});
+                                          .gainDb = 200});
 
-const Capability DynamicsProcessingImpl::kCapability = {.range = {DynamicsProcessingImpl::kRange}};
+static const Range::DynamicsProcessingRange kPreEqBandConfigRange = {
+        .min = DynamicsProcessing::make<DynamicsProcessing::preEqBand>({kEqBandConfigMin}),
+        .max = DynamicsProcessing::make<DynamicsProcessing::preEqBand>({kEqBandConfigMax})};
+
+static const Range::DynamicsProcessingRange kPostEqBandConfigRange = {
+        .min = DynamicsProcessing::make<DynamicsProcessing::postEqBand>({kEqBandConfigMin}),
+        .max = DynamicsProcessing::make<DynamicsProcessing::postEqBand>({kEqBandConfigMax})};
+
+static const Range::DynamicsProcessingRange kMbcBandConfigRange = {
+        .min = DynamicsProcessing::make<DynamicsProcessing::mbcBand>(
+                {DynamicsProcessing::MbcBandConfig(
+                        {.channel = 0,
+                         .band = 0,
+                         .enable = false,
+                         .cutoffFrequencyHz = 20,
+                         .attackTimeMs = 0,
+                         .releaseTimeMs = 0,
+                         .ratio = 1,
+                         .thresholdDb = -200,
+                         .kneeWidthDb = 0,
+                         .noiseGateThresholdDb = -200,
+                         .expanderRatio = 1,
+                         .preGainDb = -200,
+                         .postGainDb = -200})}),
+        .max = DynamicsProcessing::make<DynamicsProcessing::mbcBand>(
+                {DynamicsProcessing::MbcBandConfig(
+                        {.channel = std::numeric_limits<int>::max(),
+                         .band = std::numeric_limits<int>::max(),
+                         .enable = true,
+                         .cutoffFrequencyHz = 20000,
+                         .attackTimeMs = 60000,
+                         .releaseTimeMs = 60000,
+                         .ratio = 50,
+                         .thresholdDb = 200,
+                         .kneeWidthDb = 100,
+                         .noiseGateThresholdDb = 200,
+                         .expanderRatio = 50,
+                         .preGainDb = 200,
+                         .postGainDb = 200})})};
+
+static const Range::DynamicsProcessingRange kInputGainRange = {
+        .min = DynamicsProcessing::make<DynamicsProcessing::inputGain>(
+                {DynamicsProcessing::InputGain(
+                        {.channel = 0, .gainDb = -200.0f})}),
+        .max = DynamicsProcessing::make<DynamicsProcessing::inputGain>(
+                {DynamicsProcessing::InputGain({.channel = std::numeric_limits<int>::max(),
+                                                .gainDb = 200.0f})})};
+
+static const Range::DynamicsProcessingRange kLimiterRange = {
+        .min = DynamicsProcessing::make<DynamicsProcessing::limiter>(
+                {DynamicsProcessing::LimiterConfig(
+                        {.channel = 0,
+                         .enable = false,
+                         .linkGroup = std::numeric_limits<int>::min(),
+                         .attackTimeMs = 0,
+                         .releaseTimeMs = 0,
+                         .ratio = 1,
+                         .thresholdDb = -200,
+                         .postGainDb = -200})}),
+        .max = DynamicsProcessing::make<DynamicsProcessing::limiter>(
+                {DynamicsProcessing::LimiterConfig(
+                        {.channel = std::numeric_limits<int>::max(),
+                         .enable = true,
+                         .linkGroup = std::numeric_limits<int>::max(),
+                         .attackTimeMs = 60000,
+                         .releaseTimeMs = 60000,
+                         .ratio = 50,
+                         .thresholdDb = 200,
+                         .postGainDb = 200})})};
+
+const std::vector<Range::DynamicsProcessingRange> kRanges = {
+        kEngineConfigRange,     kPreEqChannelConfigRange, kPostEqChannelConfigRange,
+        kMbcChannelConfigRange, kPreEqBandConfigRange,    kPostEqBandConfigRange,
+        kMbcBandConfigRange,    kInputGainRange,          kLimiterRange};
+
+const Capability DynamicsProcessingImpl::kCapability = {.range = kRanges};
 
 const Descriptor DynamicsProcessingImpl::kDescriptor = {
         .common = {.id = {.type = getEffectTypeUuidDynamicsProcessing(),
@@ -158,14 +261,19 @@
     }
 }
 
+bool DynamicsProcessingImpl::isParamInRange(const Parameter::Specific& specific) {
+    auto& dp = specific.get<Parameter::Specific::dynamicsProcessing>();
+    return DynamicsProcessingRanges::isParamInRange(dp, kRanges);
+}
+
 ndk::ScopedAStatus DynamicsProcessingImpl::setParameterSpecific(
         const Parameter::Specific& specific) {
     RETURN_IF(Parameter::Specific::dynamicsProcessing != specific.getTag(), EX_ILLEGAL_ARGUMENT,
               "EffectNotSupported");
     RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
 
+    RETURN_IF(!isParamInRange(specific), EX_ILLEGAL_ARGUMENT, "outOfRange");
     auto& param = specific.get<Parameter::Specific::dynamicsProcessing>();
-    // TODO: check range here, dynamicsProcessing need customized method for nested parameters.
     auto tag = param.getTag();
 
     switch (tag) {
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
index d094c69..1e1e72e 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
@@ -18,8 +18,9 @@
 
 #include <aidl/android/hardware/audio/effect/BnEffect.h>
 
-#include "effect-impl/EffectImpl.h"
 #include "DynamicsProcessingContext.h"
+#include "EffectRangeSpecific.h"
+#include "effect-impl/EffectImpl.h"
 
 namespace aidl::android::hardware::audio::effect {
 
@@ -51,14 +52,10 @@
     std::string getEffectName() override { return kEffectName; }
 
   private:
-    static const DynamicsProcessing::EqBandConfig kEqBandConfigMin;
-    static const DynamicsProcessing::EqBandConfig kEqBandConfigMax;
-    static const Range::DynamicsProcessingRange kPreEqBandRange;
-    static const Range::DynamicsProcessingRange kPostEqBandRange;
-    static const Range kRange;
     std::shared_ptr<DynamicsProcessingContext> mContext;
     ndk::ScopedAStatus getParameterDynamicsProcessing(const DynamicsProcessing::Tag& tag,
                                                       Parameter::Specific* specific);
+    bool isParamInRange(const Parameter::Specific& specific);
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index 69ff522..9d77135 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -16,11 +16,11 @@
 
 #define LOG_TAG "AHAL_DPLibEffectsContext"
 
-#include "DynamicsProcessing.h"
 #include "DynamicsProcessingContext.h"
+#include "DynamicsProcessing.h"
 
-#include <functional>
 #include <sys/param.h>
+#include <functional>
 #include <unordered_set>
 
 namespace aidl::android::hardware::audio::effect {
@@ -83,7 +83,7 @@
     if (block < minBlockSize) {
         block = minBlockSize;
     } else if (!powerof2(block)) {
-        //find next highest power of 2.
+        // find next highest power of 2.
         block = 1 << (32 - __builtin_clz(block));
     }
     mDpFreq->configure(block, block >> 1, sampleRate);
@@ -91,9 +91,6 @@
 
 RetCode DynamicsProcessingContext::setEngineArchitecture(
         const DynamicsProcessing::EngineArchitecture& engineArchitecture) {
-    RETURN_VALUE_IF(!validateEngineConfig(engineArchitecture), RetCode::ERROR_ILLEGAL_PARAMETER,
-                    "illegalEngineConfig");
-
     std::lock_guard lg(mMutex);
     if (!mEngineInited || mEngineArchitecture != engineArchitecture) {
         if (engineArchitecture.resolutionPreference ==
@@ -134,10 +131,12 @@
 RetCode DynamicsProcessingContext::setPreEqBand(
         const std::vector<DynamicsProcessing::EqBandConfig>& bands) {
     std::lock_guard lg(mMutex);
-    RETURN_VALUE_IF(!mEngineArchitecture.postEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
-                    "postEqNotInUse");
-    return setBands_l<DynamicsProcessing::EqBandConfig>(
-            bands, mEngineArchitecture.preEqStage.bandCount, StageType::PREEQ);
+    RETURN_VALUE_IF(!mEngineArchitecture.preEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
+                    "preEqNotInUse");
+    RETURN_VALUE_IF(
+            !validateBandConfig(bands, mChannelCount, mEngineArchitecture.preEqStage.bandCount),
+            RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
+    return setBands_l<DynamicsProcessing::EqBandConfig>(bands, StageType::PREEQ);
 }
 
 RetCode DynamicsProcessingContext::setPostEqBand(
@@ -145,8 +144,10 @@
     std::lock_guard lg(mMutex);
     RETURN_VALUE_IF(!mEngineArchitecture.postEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
                     "postEqNotInUse");
-    return setBands_l<DynamicsProcessing::EqBandConfig>(
-            bands, mEngineArchitecture.postEqStage.bandCount, StageType::POSTEQ);
+    RETURN_VALUE_IF(
+            !validateBandConfig(bands, mChannelCount, mEngineArchitecture.postEqStage.bandCount),
+            RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
+    return setBands_l<DynamicsProcessing::EqBandConfig>(bands, StageType::POSTEQ);
 }
 
 RetCode DynamicsProcessingContext::setMbcBand(
@@ -154,8 +155,10 @@
     std::lock_guard lg(mMutex);
     RETURN_VALUE_IF(!mEngineArchitecture.mbcStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
                     "mbcNotInUse");
-    return setBands_l<DynamicsProcessing::MbcBandConfig>(
-            bands, mEngineArchitecture.preEqStage.bandCount, StageType::MBC);
+    RETURN_VALUE_IF(
+            !validateBandConfig(bands, mChannelCount, mEngineArchitecture.mbcStage.bandCount),
+            RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
+    return setBands_l<DynamicsProcessing::MbcBandConfig>(bands, StageType::MBC);
 }
 
 RetCode DynamicsProcessingContext::setLimiter(
@@ -163,13 +166,17 @@
     std::lock_guard lg(mMutex);
     RETURN_VALUE_IF(!mEngineArchitecture.limiterInUse, RetCode::ERROR_ILLEGAL_PARAMETER,
                     "limiterNotInUse");
-    return setBands_l<DynamicsProcessing::LimiterConfig>(limiters, -1, StageType::LIMITER);
+    RETURN_VALUE_IF(!validateLimiterConfig(limiters, mChannelCount),
+                    RetCode::ERROR_ILLEGAL_PARAMETER, "limiterConfigNotValid");
+    return setBands_l<DynamicsProcessing::LimiterConfig>(limiters, StageType::LIMITER);
 }
 
 RetCode DynamicsProcessingContext::setInputGain(
         const std::vector<DynamicsProcessing::InputGain>& inputGains) {
     std::lock_guard lg(mMutex);
-    return setBands_l<DynamicsProcessing::InputGain>(inputGains, -1, StageType::INPUTGAIN);
+    RETURN_VALUE_IF(!validateInputGainConfig(inputGains, mChannelCount),
+                    RetCode::ERROR_ILLEGAL_PARAMETER, "inputGainNotValid");
+    return setBands_l<DynamicsProcessing::InputGain>(inputGains, StageType::INPUTGAIN);
 }
 
 DynamicsProcessing::EngineArchitecture DynamicsProcessingContext::getEngineArchitecture() {
@@ -406,45 +413,33 @@
     return eqBands;
 }
 
-/**
- * When StageEnablement is in use, bandCount needs to be positive.
- */
-bool DynamicsProcessingContext::validateStageEnablement(
-        const DynamicsProcessing::StageEnablement& enablement) {
-    return !enablement.inUse || (enablement.inUse && enablement.bandCount > 0);
-}
-
-bool DynamicsProcessingContext::validateEngineConfig(
-        const DynamicsProcessing::EngineArchitecture& engine) {
-    return engine.preferredProcessingDurationMs >= 0 &&
-           validateStageEnablement(engine.preEqStage) &&
-           validateStageEnablement(engine.postEqStage) && validateStageEnablement(engine.mbcStage);
-}
-
-bool DynamicsProcessingContext::validateEqBandConfig(const DynamicsProcessing::EqBandConfig& band,
-                                                     int maxChannel, int maxBand) {
-    return validateChannel(band.channel, maxChannel) && validateBand(band.band, maxBand);
-}
-
-bool DynamicsProcessingContext::validateMbcBandConfig(const DynamicsProcessing::MbcBandConfig& band,
-                                                      int maxChannel, int maxBand) {
-    return validateChannel(band.channel, maxChannel) && validateBand(band.band, maxBand) &&
-           validateTime(band.attackTimeMs) && validateTime(band.releaseTimeMs) &&
-           validateRatio(band.ratio) && validateBandDb(band.thresholdDb) &&
-           validateBandDb(band.kneeWidthDb) && validateBandDb(band.noiseGateThresholdDb) &&
-           validateRatio(band.expanderRatio);
+template <typename T>
+bool DynamicsProcessingContext::validateBandConfig(const std::vector<T>& bands, int maxChannel,
+                                                   int maxBand) {
+    std::vector<float> freqs(bands.size(), -1);
+    for (auto band : bands) {
+        if (!validateChannel(band.channel, maxChannel)) return false;
+        if (!validateBand(band.band, maxBand)) return false;
+        freqs[band.band] = band.cutoffFrequencyHz;
+    }
+    if (std::count(freqs.begin(), freqs.end(), -1)) return false;
+    return std::is_sorted(freqs.begin(), freqs.end());
 }
 
 bool DynamicsProcessingContext::validateLimiterConfig(
-        const DynamicsProcessing::LimiterConfig& limiter, int maxChannel) {
-    return validateChannel(limiter.channel, maxChannel) && validateTime(limiter.attackTimeMs) &&
-           validateTime(limiter.releaseTimeMs) && validateRatio(limiter.ratio) &&
-           validateBandDb(limiter.thresholdDb);
+        const std::vector<DynamicsProcessing::LimiterConfig>& cfgs, int maxChannel) {
+    for (auto cfg : cfgs) {
+        if (!validateChannel(cfg.channel, maxChannel)) return false;
+    }
+    return true;
 }
 
-bool DynamicsProcessingContext::validateInputGainConfig(const DynamicsProcessing::InputGain& gain,
-                                                        int maxChannel) {
-    return validateChannel(gain.channel, maxChannel);
+bool DynamicsProcessingContext::validateInputGainConfig(
+        const std::vector<DynamicsProcessing::InputGain>& cfgs, int maxChannel) {
+    for (auto cfg : cfgs) {
+        if (!validateChannel(cfg.channel, maxChannel)) return false;
+    }
+    return true;
 }
 
 template <typename D>
@@ -483,7 +478,6 @@
 }
 
 RetCode DynamicsProcessingContext::setDpChannelBand_l(const std::any& anyConfig, StageType type,
-                                                      int maxCh, int maxBand,
                                                       std::set<std::pair<int, int>>& chBandSet) {
     RETURN_VALUE_IF(!anyConfig.has_value(), RetCode::ERROR_ILLEGAL_PARAMETER, "bandInvalid");
     RetCode ret = RetCode::SUCCESS;
@@ -494,8 +488,6 @@
         case StageType::POSTEQ: {
             dp_fx::DPEq* dp;
             const auto& config = std::any_cast<DynamicsProcessing::EqBandConfig>(anyConfig);
-            RETURN_VALUE_IF(!validateEqBandConfig(config, maxCh, maxBand),
-                            RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
             RETURN_VALUE_IF(
                     nullptr == (dp = getEqWithType_l(type, config.channel)) || !dp->isEnabled(),
                     RetCode::ERROR_ILLEGAL_PARAMETER, "dpEqNotExist");
@@ -508,8 +500,6 @@
         case StageType::MBC: {
             dp_fx::DPMbc* dp;
             const auto& config = std::any_cast<DynamicsProcessing::MbcBandConfig>(anyConfig);
-            RETURN_VALUE_IF(!validateMbcBandConfig(config, maxCh, maxBand),
-                            RetCode::ERROR_ILLEGAL_PARAMETER, "mbcBandNotValid");
             RETURN_VALUE_IF(nullptr == (dp = getMbc_l(config.channel)) || !dp->isEnabled(),
                             RetCode::ERROR_ILLEGAL_PARAMETER, "dpMbcNotExist");
             dp_fx::DPMbcBand band;
@@ -524,8 +514,6 @@
         case StageType::LIMITER: {
             dp_fx::DPChannel* dp;
             const auto& config = std::any_cast<DynamicsProcessing::LimiterConfig>(anyConfig);
-            RETURN_VALUE_IF(!validateLimiterConfig(config, maxCh),
-                            RetCode::ERROR_ILLEGAL_PARAMETER, "limiterBandNotValid");
             RETURN_VALUE_IF(nullptr == (dp = getChannel_l(config.channel)),
                             RetCode::ERROR_ILLEGAL_PARAMETER, "dpChNotExist");
             dp_fx::DPLimiter limiter;
@@ -539,8 +527,6 @@
         case StageType::INPUTGAIN: {
             dp_fx::DPChannel* dp;
             const auto& config = std::any_cast<DynamicsProcessing::InputGain>(anyConfig);
-            RETURN_VALUE_IF(!validateInputGainConfig(config, maxCh),
-                            RetCode::ERROR_ILLEGAL_PARAMETER, "inputGainNotValid");
             RETURN_VALUE_IF(nullptr == (dp = getChannel_l(config.channel)),
                             RetCode::ERROR_ILLEGAL_PARAMETER, "dpChNotExist");
             dp->setInputGain(config.gainDb);
@@ -555,14 +541,12 @@
 }
 
 template <typename T /* BandConfig */>
-RetCode DynamicsProcessingContext::setBands_l(
-        const std::vector<T>& bands, int maxBand, StageType type) {
+RetCode DynamicsProcessingContext::setBands_l(const std::vector<T>& bands, StageType type) {
     RetCode ret = RetCode::SUCCESS;
     std::set<std::pair<int /* channel */, int /* band */>> bandSet;
 
     for (const auto& it : bands) {
-        if (RetCode::SUCCESS !=
-            setDpChannelBand_l(std::make_any<T>(it), type, mChannelCount, maxBand, bandSet)) {
+        if (RetCode::SUCCESS != setDpChannelBand_l(std::make_any<T>(it), type, bandSet)) {
             LOG(WARNING) << __func__ << " skipping band " << it.toString();
             ret = RetCode::ERROR_ILLEGAL_PARAMETER;
             continue;
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
index 8be784e..b8539f6 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
@@ -103,28 +103,22 @@
     RetCode setDpChannels_l(const std::vector<DynamicsProcessing::ChannelConfig>& channels,
                             bool stageInUse, StageType type) REQUIRES(mMutex);
     template <typename T /* BandConfig */>
-    RetCode setBands_l(const std::vector<T>& bands, int maxBand, StageType type) REQUIRES(mMutex);
-    RetCode setDpChannelBand_l(const std::any& anyConfig, StageType type, int maxCh, int maxBand,
+    RetCode setBands_l(const std::vector<T>& bands, StageType type) REQUIRES(mMutex);
+    RetCode setDpChannelBand_l(const std::any& anyConfig, StageType type,
                                std::set<std::pair<int, int>>& chBandSet) REQUIRES(mMutex);
 
     std::vector<DynamicsProcessing::EqBandConfig> getEqBandConfigs(StageType type);
     std::vector<DynamicsProcessing::ChannelConfig> getChannelConfig(StageType type);
 
-    bool validateStageEnablement(const DynamicsProcessing::StageEnablement& enablement);
-    bool validateEngineConfig(const DynamicsProcessing::EngineArchitecture& engine);
-    bool validateEqBandConfig(const DynamicsProcessing::EqBandConfig& band, int maxChannel,
-                              int maxBand);
-    bool validateMbcBandConfig(const DynamicsProcessing::MbcBandConfig& band, int maxChannel,
-                               int maxBand);
-    bool validateLimiterConfig(const DynamicsProcessing::LimiterConfig& limiter, int maxChannel);
-    bool validateInputGainConfig(const DynamicsProcessing::InputGain& gain, int maxChannel);
+    template <typename T /* BandConfig */>
+    bool validateBandConfig(const std::vector<T>& bands, int maxChannel, int maxBand);
+    bool validateLimiterConfig(const std::vector<DynamicsProcessing::LimiterConfig>& cfgs,
+                               int maxChannel);
+    bool validateInputGainConfig(const std::vector<DynamicsProcessing::InputGain>& cfgs,
+                                 int maxChannel);
 
-    inline bool validateCutoffFrequency(float freq);
     inline bool validateChannel(int ch, int maxCh) { return ch >= 0 && ch < maxCh; }
     inline bool validateBand(int band, int maxBand) { return band >= 0 && band < maxBand; }
-    inline bool validateTime(int time) { return time >= 0; }
-    inline bool validateRatio(int ratio) { return ratio >= 0; }
-    inline bool validateBandDb(int db) { return db <= 0; }
 };
 
 }  // namespace aidl::android::hardware::audio::effect
\ No newline at end of file
diff --git a/media/libeffects/factory/Android.bp b/media/libeffects/factory/Android.bp
index 22838a3..d94093e 100644
--- a/media/libeffects/factory/Android.bp
+++ b/media/libeffects/factory/Android.bp
@@ -39,6 +39,7 @@
     header_libs: [
         "libaudioeffects",
         "libeffects_headers",
+        "liberror_headers",
     ],
     export_header_lib_headers: ["libeffects_headers"],
 }
@@ -56,7 +57,6 @@
         "-Werror",
     ],
 
-
     shared_libs: [
         "libeffectsconfig",
         "libeffects",
diff --git a/media/libeffects/factory/EffectsConfigLoader.c b/media/libeffects/factory/EffectsConfigLoader.c
index e23530e..a1de7b3 100644
--- a/media/libeffects/factory/EffectsConfigLoader.c
+++ b/media/libeffects/factory/EffectsConfigLoader.c
@@ -137,7 +137,7 @@
                  kLibraryPathRoot[i],
                  lib_name);
         if (F_OK == access(path, 0)) {
-            strcpy(lib_path_out, path);
+            strlcpy(lib_path_out, path, PATH_MAX);
             ALOGW_IF(strncmp(lib_path_out, lib_path_in, PATH_MAX) != 0,
                 "checkLibraryPath() corrected library path %s to %s", lib_path_in, lib_path_out);
             return 0;
diff --git a/media/libeffects/factory/EffectsXmlConfigLoader.cpp b/media/libeffects/factory/EffectsXmlConfigLoader.cpp
index 30a9007..9bff136 100644
--- a/media/libeffects/factory/EffectsXmlConfigLoader.cpp
+++ b/media/libeffects/factory/EffectsXmlConfigLoader.cpp
@@ -64,7 +64,7 @@
 
     std::string absolutePath;
     if (!resolveLibrary(relativePath, &absolutePath)) {
-        ALOGE("Could not find library in effect directories: %s", relativePath);
+        ALOGE("%s Could not find library in effect directories: %s", __func__, relativePath);
         libEntry->path = strdup(relativePath);
         return false;
     }
@@ -75,20 +75,20 @@
     std::unique_ptr<void, decltype(dlclose)*> libHandle(dlopen(path, RTLD_NOW),
                                                        dlclose);
     if (libHandle == nullptr) {
-        ALOGE("Could not dlopen library %s: %s", path, dlerror());
+        ALOGE("%s Could not dlopen library %s: %s", __func__, path, dlerror());
         return false;
     }
 
     auto* description = static_cast<audio_effect_library_t*>(
           dlsym(libHandle.get(), AUDIO_EFFECT_LIBRARY_INFO_SYM_AS_STR));
     if (description == nullptr) {
-        ALOGE("Invalid effect library, failed not find symbol '%s' in %s: %s",
+        ALOGE("%s Invalid effect library, failed not find symbol '%s' in %s: %s", __func__,
               AUDIO_EFFECT_LIBRARY_INFO_SYM_AS_STR, path, dlerror());
         return false;
     }
 
     if (description->tag != AUDIO_EFFECT_LIBRARY_TAG) {
-        ALOGE("Bad tag %#08x in description structure, expected %#08x for library %s",
+        ALOGE("%s Bad tag %#08x in description structure, expected %#08x for library %s", __func__,
               description->tag, AUDIO_EFFECT_LIBRARY_TAG, path);
         return false;
     }
@@ -96,8 +96,8 @@
     uint32_t majorVersion = EFFECT_API_VERSION_MAJOR(description->version);
     uint32_t expectedMajorVersion = EFFECT_API_VERSION_MAJOR(EFFECT_LIBRARY_API_VERSION_CURRENT);
     if (majorVersion != expectedMajorVersion) {
-        ALOGE("Unsupported major version %#08x, expected %#08x for library %s",
-              majorVersion, expectedMajorVersion, path);
+        ALOGE("%s Unsupported major version %#08x, expected %#08x for library %s",
+              __func__, majorVersion, expectedMajorVersion, path);
         return false;
     }
 
@@ -155,14 +155,13 @@
 {
     size_t nbSkippedElement = 0;
     for (auto& library : libs) {
-
         // Construct a lib entry
         auto libEntry = makeUniqueC<lib_entry_t>();
-        libEntry->name = strdup(library.name.c_str());
+        libEntry->name = strdup(library->name.c_str());
         libEntry->effects = nullptr;
         pthread_mutex_init(&libEntry->lock, nullptr);
 
-        if (!loadLibrary(library.path.c_str(), libEntry.get())) {
+        if (!loadLibrary(library->path.c_str(), libEntry.get())) {
             // Register library load failure
             listPush(std::move(libEntry), libFailedList);
             ++nbSkippedElement;
@@ -209,24 +208,24 @@
     UniqueCPtr<effect_descriptor_t> effectDesc;
 };
 
-LoadEffectResult loadEffect(const EffectImpl& effect, const std::string& name,
-                            list_elem_t* libList) {
+LoadEffectResult loadEffect(const std::shared_ptr<const EffectImpl>& effect,
+                            const std::string& name, list_elem_t* libList) {
     LoadEffectResult result;
 
     // Find the effect library
-    result.lib = findLibrary(effect.library->name.c_str(), libList);
+    result.lib = findLibrary(effect->library->name.c_str(), libList);
     if (result.lib == nullptr) {
-        ALOGE("Could not find library %s to load effect %s",
-              effect.library->name.c_str(), name.c_str());
+        ALOGE("%s Could not find library %s to load effect %s",
+              __func__, effect->library->name.c_str(), name.c_str());
         return result;
     }
 
     result.effectDesc = makeUniqueC<effect_descriptor_t>();
 
     // Get the effect descriptor
-    if (result.lib->desc->get_descriptor(&effect.uuid, result.effectDesc.get()) != 0) {
+    if (result.lib->desc->get_descriptor(&effect->uuid, result.effectDesc.get()) != 0) {
         ALOGE("Error querying effect %s on lib %s",
-              uuidToString(effect.uuid), result.lib->name);
+              uuidToString(effect->uuid), result.lib->name);
         result.effectDesc.reset();
         return result;
     }
@@ -241,14 +240,15 @@
     // Check effect is supported
     uint32_t expectedMajorVersion = EFFECT_API_VERSION_MAJOR(EFFECT_CONTROL_API_VERSION);
     if (EFFECT_API_VERSION_MAJOR(result.effectDesc->apiVersion) != expectedMajorVersion) {
-        ALOGE("Bad API version %#08x for effect %s in lib %s, expected major %#08x",
+        ALOGE("%s Bad API version %#08x for effect %s in lib %s, expected major %#08x", __func__,
               result.effectDesc->apiVersion, name.c_str(), result.lib->name, expectedMajorVersion);
         return result;
     }
 
     lib_entry_t *_;
-    if (findEffect(nullptr, &effect.uuid, &_, nullptr) == 0) {
-        ALOGE("Effect %s uuid %s already exist", uuidToString(effect.uuid), name.c_str());
+    if (findEffect(nullptr, &effect->uuid, &_, nullptr) == 0) {
+        ALOGE("%s Effect %s uuid %s already exist", __func__, uuidToString(effect->uuid),
+              name.c_str());
         return result;
     }
 
@@ -261,8 +261,11 @@
     size_t nbSkippedElement = 0;
 
     for (auto& effect : effects) {
+        if (!effect) {
+            continue;
+        }
 
-        auto effectLoadResult = loadEffect(effect, effect.name, libList);
+        auto effectLoadResult = loadEffect(effect, effect->name, libList);
         if (!effectLoadResult.success) {
             if (effectLoadResult.effectDesc != nullptr) {
                 listPush(std::move(effectLoadResult.effectDesc), skippedEffects);
@@ -271,9 +274,9 @@
             continue;
         }
 
-        if (effect.isProxy) {
-            auto swEffectLoadResult = loadEffect(effect.libSw, effect.name + " libsw", libList);
-            auto hwEffectLoadResult = loadEffect(effect.libHw, effect.name + " libhw", libList);
+        if (effect->isProxy) {
+            auto swEffectLoadResult = loadEffect(effect->libSw, effect->name + " libsw", libList);
+            auto hwEffectLoadResult = loadEffect(effect->libHw, effect->name + " libhw", libList);
             if (!swEffectLoadResult.success || !hwEffectLoadResult.success) {
                 // Push the main effect in the skipped list even if only a subeffect is invalid
                 // as the main effect is not usable without its subeffects.
@@ -287,7 +290,7 @@
             // get_descriptor call, we replace it with the corresponding
             // sw effect descriptor, but keep the Proxy UUID
             *effectLoadResult.effectDesc = *swEffectLoadResult.effectDesc;
-            effectLoadResult.effectDesc->uuid = effect.uuid;
+            effectLoadResult.effectDesc->uuid = effect->uuid;
 
             effectLoadResult.effectDesc->flags |= EFFECT_FLAG_OFFLOAD_SUPPORTED;
 
@@ -326,8 +329,8 @@
                                loadEffects(result.parsedConfig->effects, gLibraryList,
                                            &gSkippedEffects, &gSubEffectList);
 
-    ALOGE_IF(result.nbSkippedElement != 0, "%zu errors during loading of configuration: %s",
-             result.nbSkippedElement,
+    ALOGE_IF(result.nbSkippedElement != 0, "%s %zu errors during loading of configuration: %s",
+             __func__, result.nbSkippedElement,
              result.configPath.empty() ? "No config file found" : result.configPath.c_str());
 
     return result.nbSkippedElement;
diff --git a/media/libeffects/factory/test/DumpConfig.cpp b/media/libeffects/factory/test/DumpConfig.cpp
index 0a156b4..331826f 100644
--- a/media/libeffects/factory/test/DumpConfig.cpp
+++ b/media/libeffects/factory/test/DumpConfig.cpp
@@ -14,54 +14,49 @@
  * limitations under the License.
  */
 
+#include <getopt.h>
+
 #include <media/EffectsFactoryApi.h>
-#include <unistd.h>
 #include "EffectsXmlConfigLoader.h"
 #include "EffectsConfigLoader.h"
 
 int main(int argc, char* argv[]) {
-    const char* path = nullptr;
-    bool legacyFormat;
+    const char* const short_opts = "lx:h";
+    const option long_opts[] = {{"legacy", no_argument, nullptr, 'l'},
+                                {"xml", optional_argument, nullptr, 'x'},
+                                {"help", no_argument, nullptr, 'h'}};
 
-    if (argc == 2 && strcmp(argv[1], "--legacy") == 0) {
-        legacyFormat = true;
-        fprintf(stderr, "Dumping legacy effect config file\n");
-    } else if ((argc == 2 || argc == 3) && strcmp(argv[1], "--xml") == 0) {
-        legacyFormat = false;
-        if (argc == 3) {
-            fprintf(stderr, "Dumping XML effect config file: %s\n", path);
-        } else {
-            fprintf(stderr, "Dumping default XML effect config file.\n");
+    const auto opt = getopt_long(argc, argv, short_opts, long_opts, nullptr);
+    switch (opt) {
+        case 'l': { // -l or --legacy
+            printf("Dumping legacy effect config file\n");
+            if (EffectLoadEffectConfig() < 0) {
+                fprintf(stderr, "loadEffectConfig failed, see logcat for detail.\n");
+                return 1;
+            }
+            return EffectDumpEffects(STDOUT_FILENO);
         }
-    } else {
-        fprintf(stderr, "Invalid arguments.\n"
-                        "Usage: %s [--legacy|--xml [FILE]]\n", argv[0]);
-        return 1;
-    }
-
-    if (!legacyFormat) {
-        ssize_t ret = EffectLoadXmlEffectConfig(path);
-        if (ret < 0) {
-            fprintf(stderr, "loadXmlEffectConfig failed, see logcat for detail.\n");
-            return 2;
+        case 'x': { // -x or --xml
+            printf("Dumping effect config file: %s\n", (optarg == NULL) ? "default" : optarg);
+            ssize_t ret = EffectLoadXmlEffectConfig(optarg);
+            if (ret < 0) {
+                fprintf(stderr, "loadXmlEffectConfig failed, see logcat for detail.\n");
+                return 1;
+            }
+            if (ret > 0) {
+                printf("Partially failed to load config. Skipped %zu elements.\n",
+                        (size_t)ret);
+            }
+            return EffectDumpEffects(STDOUT_FILENO);
         }
-        if (ret > 0) {
-            fprintf(stderr, "Partially failed to load config. Skipped %zu elements, "
-                    "see logcat for detail.\n", (size_t)ret);
+        case 'h': // -h or --help
+        default: {
+            printf("Usage: %s\n"
+                   "--legacy (or -l):        Legacy audio effect config file to load\n"
+                   "--xml (or -x) <FILE>:    Audio effect config file to load\n"
+                   "--help (or -h):          Show this help\n",
+                   argv[0]);
+            return 0;
         }
     }
-
-    if (legacyFormat) {
-        auto ret = EffectLoadEffectConfig();
-        if (ret < 0) {
-            fprintf(stderr, "loadEffectConfig failed, see logcat for detail.\n");
-            return 3;
-        }
-        fprintf(stderr, "legacy loadEffectConfig has probably succeed, see logcat to make sure.\n");
-    }
-
-    if (EffectDumpEffects(STDOUT_FILENO) != 0) {
-        fprintf(stderr, "Effect dump failed, see logcat for detail.\n");
-        return 4;
-    }
 }
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
index 4eea04f..bfc5059 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
@@ -166,9 +166,9 @@
              * Bypass mode or everything off, so copy the input to the output
              */
             if (pToProcess != pProcessed) {
-                Copy_Float(pToProcess,                          /* Source */
-                           pProcessed,                          /* Destination */
-                           (LVM_INT16)(NrChannels * NrFrames)); /* Copy all samples */
+                Copy_Float(pToProcess,   /* Source */
+                           pProcessed,   /* Destination */
+                           SampleCount); /* Copy all samples */
             }
 
             /*
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index d026e2b..0db7a73 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -15,9 +15,11 @@
  */
 
 #include <cstddef>
+#include <cstdio>
 
 #define LOG_TAG "BundleContext"
 #include <android-base/logging.h>
+#include <audio_utils/power.h>
 #include <Utils.h>
 
 #include "BundleContext.h"
@@ -34,7 +36,7 @@
     std::lock_guard lg(mMutex);
     // init with pre-defined preset NORMAL
     for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
-        mBandGaindB[i] = lvm::kSoftPresets[0 /* normal */][i];
+        mBandGainMdB[i] = lvm::kSoftPresets[0 /* normal */][i] * 100;
     }
 
     // allocate lvm instance
@@ -212,7 +214,7 @@
 
         if (eqEnabled) {
             for (int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
-                float bandFactor = mBandGaindB[i] / 15.0;
+                float bandFactor = mBandGainMdB[i] / 1500.0;
                 float bandCoefficient = lvm::kBandEnergyCoefficient[i];
                 float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;
                 if (bandEnergy > 0) energyContribution += bandEnergy;
@@ -221,8 +223,8 @@
             // cross EQ coefficients
             float bandFactorSum = 0;
             for (int i = 0; i < lvm::MAX_NUM_BANDS - 1; i++) {
-                float bandFactor1 = mBandGaindB[i] / 15.0;
-                float bandFactor2 = mBandGaindB[i + 1] / 15.0;
+                float bandFactor1 = mBandGainMdB[i] / 1500.0;
+                float bandFactor2 = mBandGainMdB[i + 1] / 1500.0;
 
                 if (bandFactor1 > 0 && bandFactor2 > 0) {
                     float crossEnergy =
@@ -244,7 +246,7 @@
 
             if (eqEnabled) {
                 for (int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
-                    float bandFactor = mBandGaindB[i] / 15.0;
+                    float bandFactor = mBandGainMdB[i] / 1500.0;
                     float bandCrossCoefficient = lvm::kBassBoostEnergyCrossCoefficient[i];
                     float bandEnergy = boostFactor * bandFactor * bandCrossCoefficient;
                     if (bandEnergy > 0) energyBassBoost += bandEnergy;
@@ -397,15 +399,10 @@
     return db_fix;
 }
 
-// TODO: replace with more generic approach, like: audio_utils_power_from_amplitude
-int16_t BundleContext::VolToDb(uint32_t vol) const {
-    int16_t dB;
-
-    dB = LVC_ToDB_s32Tos16(vol << 7);
-    dB = (dB + 8) >> 4;
-    dB = (dB < -96) ? -96 : dB;
-
-    return dB;
+/* static */
+float BundleContext::VolToDb(float vol) {
+    float dB = audio_utils_power_from_amplitude(vol);
+    return std::max(dB, -96.f);
 }
 
 RetCode BundleContext::setVolumeStereo(const Parameter::VolumeStereo& volume) {
@@ -413,11 +410,12 @@
     LVM_ReturnStatus_en status = LVM_SUCCESS;
 
     // Convert volume to dB
-    int leftdB = VolToDb(volume.left);
-    int rightdB = VolToDb(volume.right);
-    int maxdB = std::max(leftdB, rightdB);
-    int pandB = rightdB - leftdB;
-    setVolumeLevel(maxdB * 100);
+    float leftdB = VolToDb(volume.left);
+    float rightdB = VolToDb(volume.right);
+
+    float maxdB = std::max(leftdB, rightdB);
+    float pandB = rightdB - leftdB;
+    setVolumeLevel(maxdB);
     LOG(DEBUG) << __func__ << " pandB: " << pandB << " maxdB " << maxdB;
 
     {
@@ -441,8 +439,8 @@
     std::vector<Equalizer::BandLevel> bandLevels;
     bandLevels.reserve(lvm::MAX_NUM_BANDS);
     for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
-        bandLevels.emplace_back(
-                Equalizer::BandLevel{static_cast<int32_t>(i), lvm::kSoftPresets[presetIdx][i]});
+        bandLevels.emplace_back(Equalizer::BandLevel{static_cast<int32_t>(i),
+                                                     lvm::kSoftPresets[presetIdx][i] * 100});
     }
 
     RetCode ret = updateControlParameter(bandLevels);
@@ -472,7 +470,8 @@
     std::vector<Equalizer::BandLevel> bandLevels;
     bandLevels.reserve(lvm::MAX_NUM_BANDS);
     for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
-        bandLevels.emplace_back(Equalizer::BandLevel{static_cast<int32_t>(i), mBandGaindB[i]});
+        bandLevels.emplace_back(
+                Equalizer::BandLevel{static_cast<int32_t>(i), mBandGainMdB[i]});
     }
     return bandLevels;
 }
@@ -506,7 +505,7 @@
     RETURN_VALUE_IF(!isBandLevelIndexInRange(bandLevels), RetCode::ERROR_ILLEGAL_PARAMETER,
                     "indexOutOfRange");
 
-    std::array<int, lvm::MAX_NUM_BANDS> tempLevel;
+    std::array<int, lvm::MAX_NUM_BANDS> tempLevel(mBandGainMdB);
     for (const auto& it : bandLevels) {
         tempLevel[it.index] = it.levelMb;
     }
@@ -520,14 +519,16 @@
         for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
             params.pEQNB_BandDefinition[i].Frequency = lvm::kPresetsFrequencies[i];
             params.pEQNB_BandDefinition[i].QFactor = lvm::kPresetsQFactors[i];
-            params.pEQNB_BandDefinition[i].Gain = tempLevel[i];
+            params.pEQNB_BandDefinition[i].Gain =
+                    tempLevel[i] > 0 ? (tempLevel[i] + 50) / 100 : (tempLevel[i] - 50) / 100;
         }
 
         RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
                         RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
     }
-    mBandGaindB = tempLevel;
-    LOG(INFO) << __func__ << " update bandGain to " << ::android::internal::ToString(mBandGaindB);
+    mBandGainMdB = tempLevel;
+    LOG(DEBUG) << __func__ << " update bandGain to " << ::android::internal::ToString(mBandGainMdB)
+               << "mdB";
 
     return RetCode::SUCCESS;
 }
@@ -551,18 +552,18 @@
     return limitLevel();
 }
 
-RetCode BundleContext::setVolumeLevel(int level) {
+RetCode BundleContext::setVolumeLevel(float level) {
     if (mMuteEnabled) {
-        mLevelSaved = level / 100;
+        mLevelSaved = level;
     } else {
-        mVolume = level / 100;
+        mVolume = level;
     }
     LOG(INFO) << __func__ << " success with level " << level;
     return limitLevel();
 }
 
-int BundleContext::getVolumeLevel() const {
-    return (mMuteEnabled ? mLevelSaved * 100 : mVolume * 100);
+float BundleContext::getVolumeLevel() const {
+    return (mMuteEnabled ? mLevelSaved : mVolume);
 }
 
 RetCode BundleContext::setVolumeMute(bool mute) {
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
index 47d5e5a..62bb6e4 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -80,8 +80,8 @@
     RetCode setBassBoostStrength(int strength);
     int getBassBoostStrength() const { return mBassStrengthSaved; }
 
-    RetCode setVolumeLevel(int level);
-    int getVolumeLevel() const;
+    RetCode setVolumeLevel(float level);
+    float getVolumeLevel() const;
 
     RetCode setVolumeMute(bool mute);
     int getVolumeMute() const { return mMuteEnabled; }
@@ -135,20 +135,20 @@
     int mBassStrengthSaved = 0;
     // Equalizer
     int mCurPresetIdx = lvm::PRESET_CUSTOM; /* Current preset being used */
-    std::array<int, lvm::MAX_NUM_BANDS> mBandGaindB;
+    std::array<int, lvm::MAX_NUM_BANDS> mBandGainMdB; /* band gain in millibels */
     // Virtualizer
     int mVirtStrengthSaved = 0; /* Conversion between Get/Set */
     bool mVirtualizerTempDisabled = false;
     ::aidl::android::media::audio::common::AudioDeviceDescription mForceDevice;
     // Volume
-    int mLevelSaved = 0; /* for when mute is set, level must be saved */
-    int mVolume = 0;
+    float mLevelSaved = 0; /* for when mute is set, level must be saved */
+    float mVolume = 0;
     bool mMuteEnabled = false; /* Must store as mute = -96dB level */
 
     void initControlParameter(LVM_ControlParams_t& params) const;
     void initHeadroomParameter(LVM_HeadroomParams_t& params) const;
     RetCode limitLevel();
-    int16_t VolToDb(uint32_t vol) const;
+    static float VolToDb(float vol);
     LVM_INT16 LVC_ToDB_s32Tos16(LVM_INT32 Lin_fix) const;
     RetCode updateControlParameter(const std::vector<Equalizer::BandLevel>& bandLevels);
     bool isBandLevelIndexInRange(const std::vector<Equalizer::BandLevel>& bandLevels) const;
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
index b3371a3..143329d 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
@@ -73,9 +73,9 @@
         MAKE_RANGE(Equalizer, preset, 0, MAX_NUM_PRESETS - 1),
         MAKE_RANGE(Equalizer, bandLevels,
                    std::vector<Equalizer::BandLevel>{
-                           Equalizer::BandLevel({.index = 0, .levelMb = -15})},
+                           Equalizer::BandLevel({.index = 0, .levelMb = -1500})},
                    std::vector<Equalizer::BandLevel>{
-                           Equalizer::BandLevel({.index = MAX_NUM_BANDS - 1, .levelMb = 15})}),
+                           Equalizer::BandLevel({.index = MAX_NUM_BANDS - 1, .levelMb = 1500})}),
         /* capability definition */
         MAKE_RANGE(Equalizer, bandFrequencies, kEqBandFrequency, kEqBandFrequency),
         MAKE_RANGE(Equalizer, presets, kEqPresets, kEqPresets),
@@ -85,9 +85,7 @@
 static const std::string kEqualizerEffectName = "EqualizerBundle";
 static const Descriptor kEqualizerDesc = {
         .common = {.id = {.type = getEffectTypeUuidEqualizer(),
-                          .uuid = getEffectImplUuidEqualizerBundle(),
-                          .proxy = getEffectImplUuidEqualizerProxy()},
-
+                          .uuid = getEffectImplUuidEqualizerBundle()},
                    .flags = {.type = Flags::Type::INSERT,
                              .insert = Flags::Insert::FIRST,
                              .volume = Flags::Volume::CTRL},
@@ -102,8 +100,7 @@
 static const std::string kBassBoostEffectName = "Dynamic Bass Boost";
 static const Descriptor kBassBoostDesc = {
         .common = {.id = {.type = getEffectTypeUuidBassBoost(),
-                          .uuid = getEffectImplUuidBassBoostBundle(),
-                          .proxy = getEffectImplUuidBassBoostProxy()},
+                          .uuid = getEffectImplUuidBassBoostBundle()},
                    .flags = {.type = Flags::Type::INSERT,
                              .insert = Flags::Insert::FIRST,
                              .volume = Flags::Volume::CTRL,
@@ -121,8 +118,7 @@
 
 static const Descriptor kVirtualizerDesc = {
         .common = {.id = {.type = getEffectTypeUuidVirtualizer(),
-                          .uuid = getEffectImplUuidVirtualizerBundle(),
-                          .proxy = getEffectImplUuidVirtualizerProxy()},
+                          .uuid = getEffectImplUuidVirtualizerBundle()},
                    .flags = {.type = Flags::Type::INSERT,
                              .insert = Flags::Insert::LAST,
                              .volume = Flags::Volume::CTRL,
@@ -139,8 +135,7 @@
 static const std::string kVolumeEffectName = "Volume";
 static const Descriptor kVolumeDesc = {
         .common = {.id = {.type = getEffectTypeUuidVolume(),
-                          .uuid = getEffectImplUuidVolumeBundle(),
-                          .proxy = std::nullopt},
+                          .uuid = getEffectImplUuidVolumeBundle()},
                    .flags = {.type = Flags::Type::INSERT,
                              .insert = Flags::Insert::LAST,
                              .volume = Flags::Volume::CTRL},
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
index cd9fb60..eb7ab1a 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
@@ -355,7 +355,7 @@
     auto tag = id.get<Volume::Id::commonTag>();
     switch (tag) {
         case Volume::levelDb: {
-            volParam.set<Volume::levelDb>(mContext->getVolumeLevel());
+            volParam.set<Volume::levelDb>(static_cast<int>(mContext->getVolumeLevel()));
             break;
         }
         case Volume::mute: {
@@ -384,6 +384,7 @@
 
     if (id.getTag() == Virtualizer::Id::speakerAnglesPayload) {
         auto angles = mContext->getSpeakerAngles(id.get<Virtualizer::Id::speakerAnglesPayload>());
+        RETURN_IF(angles.size() == 0, EX_ILLEGAL_ARGUMENT, "getSpeakerAnglesFailed");
         Virtualizer param = Virtualizer::make<Virtualizer::speakerAngles>(angles);
         specific->set<Parameter::Specific::virtualizer>(param);
         return ndk::ScopedAStatus::ok();
diff --git a/media/libeffects/spatializer/benchmarks/spatializer_benchmark.cpp b/media/libeffects/spatializer/benchmarks/spatializer_benchmark.cpp
index e8ac480..e2177db 100644
--- a/media/libeffects/spatializer/benchmarks/spatializer_benchmark.cpp
+++ b/media/libeffects/spatializer/benchmarks/spatializer_benchmark.cpp
@@ -31,6 +31,7 @@
                 (audio_effect_library_t*)dlsym(effectLib, AUDIO_EFFECT_LIBRARY_INFO_SYM_AS_STR);
         if (effectInterface == nullptr) {
             ALOGE("dlsym failed: %s", dlerror());
+            dlclose(effectLib);
             exit(-1);
         }
         symbol = (audio_effect_library_t)(*effectInterface);
diff --git a/media/libeffects/spatializer/tests/SpatializerTest.cpp b/media/libeffects/spatializer/tests/SpatializerTest.cpp
index 110fbb1..3db42b6 100644
--- a/media/libeffects/spatializer/tests/SpatializerTest.cpp
+++ b/media/libeffects/spatializer/tests/SpatializerTest.cpp
@@ -30,6 +30,7 @@
                 (audio_effect_library_t*)dlsym(effectLib, AUDIO_EFFECT_LIBRARY_INFO_SYM_AS_STR);
         if (effectInterface == nullptr) {
             ALOGE("dlsym failed: %s", dlerror());
+            dlclose(effectLib);
             exit(-1);
         }
         symbol = (audio_effect_library_t)(*effectInterface);
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 2ba1fc3..6834b7d 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -476,35 +476,37 @@
 }
 
 bool HeifDecoderImpl::setOutputColor(HeifColorFormat heifColor) {
-    if (heifColor == (HeifColorFormat)mOutputColor) {
-        return true;
-    }
-
+    android_pixel_format_t outputColor;
     switch(heifColor) {
         case kHeifColorFormat_RGB565:
         {
-            mOutputColor = HAL_PIXEL_FORMAT_RGB_565;
+            outputColor = HAL_PIXEL_FORMAT_RGB_565;
             break;
         }
         case kHeifColorFormat_RGBA_8888:
         {
-            mOutputColor = HAL_PIXEL_FORMAT_RGBA_8888;
+            outputColor = HAL_PIXEL_FORMAT_RGBA_8888;
             break;
         }
         case kHeifColorFormat_BGRA_8888:
         {
-            mOutputColor = HAL_PIXEL_FORMAT_BGRA_8888;
+            outputColor = HAL_PIXEL_FORMAT_BGRA_8888;
             break;
         }
         case kHeifColorFormat_RGBA_1010102:
         {
-            mOutputColor = HAL_PIXEL_FORMAT_RGBA_1010102;
+            outputColor = HAL_PIXEL_FORMAT_RGBA_1010102;
             break;
         }
         default:
             ALOGE("Unsupported output color format %d", heifColor);
             return false;
     }
+    if (outputColor == mOutputColor) {
+        return true;
+    }
+
+    mOutputColor = outputColor;
 
     if (mFrameDecoded) {
         return reinit(nullptr);
diff --git a/media/libmedia/CharacterEncodingDetector.cpp b/media/libmedia/CharacterEncodingDetector.cpp
index 64ba977..e33cc0f 100644
--- a/media/libmedia/CharacterEncodingDetector.cpp
+++ b/media/libmedia/CharacterEncodingDetector.cpp
@@ -393,10 +393,6 @@
         while (true) {
             // demerit the current encoding for each "special" character found after conversion.
             // The amount of demerit is somewhat arbitrarily chosen.
-            int inchar;
-            if (source != sourceLimit) {
-                inchar = (source[0] << 8) + source[1];
-            }
             UChar32 c = ucnv_getNextUChar(conv, &source, sourceLimit, &status);
             if (!U_SUCCESS(status)) {
                 break;
diff --git a/media/libmedia/IMediaDeathNotifier.cpp b/media/libmedia/IMediaDeathNotifier.cpp
index c43ef66..f498453 100644
--- a/media/libmedia/IMediaDeathNotifier.cpp
+++ b/media/libmedia/IMediaDeathNotifier.cpp
@@ -38,16 +38,10 @@
     Mutex::Autolock _l(sServiceLock);
     if (sMediaPlayerService == 0) {
         sp<IServiceManager> sm = defaultServiceManager();
-        sp<IBinder> binder;
-        do {
-            binder = sm->getService(String16("media.player"));
-            if (binder != 0) {
-                break;
-            }
-            ALOGW("Media player service not published, waiting...");
-            usleep(500000); // 0.5 s
-        } while (true);
-
+        sp<IBinder> binder = sm->waitForService(String16("media.player"));
+        if (binder == nullptr) {
+            return nullptr;
+        }
         if (sDeathNotifier == NULL) {
             sDeathNotifier = new DeathNotifier();
         }
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index 8a3b84e..86427ed 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -27,40 +27,6 @@
 #include <utils/String8.h>
 #include <utils/KeyedVector.h>
 
-// The binder is supposed to propagate the scheduler group across
-// the binder interface so that remote calls are executed with
-// the same priority as local calls. This is currently not working
-// so this change puts in a temporary hack to fix the issue with
-// metadata retrieval which can be a huge CPU hit if done on a
-// foreground thread.
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-
-#undef LOG_TAG
-#define LOG_TAG "IMediaMetadataRetriever"
-#include <utils/Log.h>
-#include <cutils/sched_policy.h>
-
-namespace android {
-
-static void sendSchedPolicy(Parcel& data)
-{
-    SchedPolicy policy;
-    get_sched_policy(gettid(), &policy);
-    data.writeInt32(policy);
-}
-
-static void setSchedPolicy(const Parcel& data)
-{
-    SchedPolicy policy = (SchedPolicy) data.readInt32();
-    set_sched_policy(gettid(), policy);
-}
-static void restoreSchedPolicy()
-{
-    set_sched_policy(gettid(), SP_FOREGROUND);
-}
-}; // end namespace android
-#endif
-
 namespace android {
 
 enum {
@@ -157,9 +123,6 @@
         data.writeInt32(option);
         data.writeInt32(colorFormat);
         data.writeInt32(metaOnly);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-        sendSchedPolicy(data);
-#endif
         remote()->transact(GET_FRAME_AT_TIME, data, &reply);
         status_t ret = reply.readInt32();
         if (ret != NO_ERROR) {
@@ -178,9 +141,6 @@
         data.writeInt32(colorFormat);
         data.writeInt32(metaOnly);
         data.writeInt32(thumbnail);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-        sendSchedPolicy(data);
-#endif
         remote()->transact(GET_IMAGE_AT_INDEX, data, &reply);
         status_t ret = reply.readInt32();
         if (ret != NO_ERROR) {
@@ -202,9 +162,6 @@
         data.writeInt32(top);
         data.writeInt32(right);
         data.writeInt32(bottom);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-        sendSchedPolicy(data);
-#endif
         remote()->transact(GET_IMAGE_RECT_AT_INDEX, data, &reply);
         status_t ret = reply.readInt32();
         if (ret != NO_ERROR) {
@@ -223,9 +180,6 @@
         data.writeInt32(index);
         data.writeInt32(colorFormat);
         data.writeInt32(metaOnly);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-        sendSchedPolicy(data);
-#endif
         remote()->transact(GET_FRAME_AT_INDEX, data, &reply);
         status_t ret = reply.readInt32();
         if (ret != NO_ERROR) {
@@ -238,9 +192,6 @@
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-        sendSchedPolicy(data);
-#endif
         remote()->transact(EXTRACT_ALBUM_ART, data, &reply);
         status_t ret = reply.readInt32();
         if (ret != NO_ERROR) {
@@ -253,9 +204,6 @@
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-        sendSchedPolicy(data);
-#endif
         data.writeInt32(keyCode);
         remote()->transact(EXTRACT_METADATA, data, &reply);
         status_t ret = reply.readInt32();
@@ -366,9 +314,6 @@
             bool metaOnly = (data.readInt32() != 0);
             ALOGV("getTimeAtTime: time(%" PRId64 " us), option(%d), colorFormat(%d), metaOnly(%d)",
                     timeUs, option, colorFormat, metaOnly);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            setSchedPolicy(data);
-#endif
             sp<IMemory> bitmap = getFrameAtTime(timeUs, option, colorFormat, metaOnly);
             if (bitmap != 0) {  // Don't send NULL across the binder interface
                 reply->writeInt32(NO_ERROR);
@@ -376,9 +321,6 @@
             } else {
                 reply->writeInt32(UNKNOWN_ERROR);
             }
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            restoreSchedPolicy();
-#endif
             return NO_ERROR;
         } break;
         case GET_IMAGE_AT_INDEX: {
@@ -389,9 +331,6 @@
             bool thumbnail = (data.readInt32() != 0);
             ALOGV("getImageAtIndex: index(%d), colorFormat(%d), metaOnly(%d), thumbnail(%d)",
                     index, colorFormat, metaOnly, thumbnail);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            setSchedPolicy(data);
-#endif
             sp<IMemory> bitmap = getImageAtIndex(index, colorFormat, metaOnly, thumbnail);
             if (bitmap != 0) {  // Don't send NULL across the binder interface
                 reply->writeInt32(NO_ERROR);
@@ -399,9 +338,6 @@
             } else {
                 reply->writeInt32(UNKNOWN_ERROR);
             }
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            restoreSchedPolicy();
-#endif
             return NO_ERROR;
         } break;
 
@@ -415,9 +351,6 @@
             int bottom = data.readInt32();
             ALOGV("getImageRectAtIndex: index(%d), colorFormat(%d), rect {%d, %d, %d, %d}",
                     index, colorFormat, left, top, right, bottom);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            setSchedPolicy(data);
-#endif
             sp<IMemory> bitmap = getImageRectAtIndex(
                     index, colorFormat, left, top, right, bottom);
             if (bitmap != 0) {  // Don't send NULL across the binder interface
@@ -426,9 +359,6 @@
             } else {
                 reply->writeInt32(UNKNOWN_ERROR);
             }
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            restoreSchedPolicy();
-#endif
             return NO_ERROR;
         } break;
 
@@ -439,9 +369,6 @@
             bool metaOnly = (data.readInt32() != 0);
             ALOGV("getFrameAtIndex: index(%d), colorFormat(%d), metaOnly(%d)",
                     index, colorFormat, metaOnly);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            setSchedPolicy(data);
-#endif
             sp<IMemory> frame = getFrameAtIndex(index, colorFormat, metaOnly);
             if (frame != nullptr) {  // Don't send NULL across the binder interface
                 reply->writeInt32(NO_ERROR);
@@ -449,16 +376,10 @@
             } else {
                 reply->writeInt32(UNKNOWN_ERROR);
             }
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            restoreSchedPolicy();
-#endif
             return NO_ERROR;
         } break;
         case EXTRACT_ALBUM_ART: {
             CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            setSchedPolicy(data);
-#endif
             sp<IMemory> albumArt = extractAlbumArt();
             if (albumArt != 0) {  // Don't send NULL across the binder interface
                 reply->writeInt32(NO_ERROR);
@@ -466,16 +387,10 @@
             } else {
                 reply->writeInt32(UNKNOWN_ERROR);
             }
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            restoreSchedPolicy();
-#endif
             return NO_ERROR;
         } break;
         case EXTRACT_METADATA: {
             CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            setSchedPolicy(data);
-#endif
             int keyCode = data.readInt32();
             const char* value = extractMetadata(keyCode);
             if (value != NULL) {  // Don't send NULL across the binder interface
@@ -484,9 +399,6 @@
             } else {
                 reply->writeInt32(UNKNOWN_ERROR);
             }
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            restoreSchedPolicy();
-#endif
             return NO_ERROR;
         } break;
         default:
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index 2ae76b3..40fd022 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -41,14 +41,10 @@
     if (sService == 0) {
         sp<IServiceManager> sm = defaultServiceManager();
         sp<IBinder> binder;
-        do {
-            binder = sm->getService(String16("media.player"));
-            if (binder != 0) {
-                break;
-            }
-            ALOGW("MediaPlayerService not published, waiting...");
-            usleep(500000); // 0.5 s
-        } while (true);
+        binder = sm->waitForService(String16("media.player"));
+        if (binder == nullptr) {
+            return nullptr;
+        }
         if (sDeathNotifier == NULL) {
             sDeathNotifier = new DeathNotifier();
         }
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index c66861b..649f813 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -49,8 +49,9 @@
         "liblog",
     ],
     header_libs: [
-        "libmedia_helper_headers",
         "libaudio_system_headers",
+        "libhardware_headers",
+        "libmedia_helper_headers",
     ],
     export_header_lib_headers: [
         "libmedia_helper_headers",
diff --git a/media/libmediahelper/AudioParameter.cpp b/media/libmediahelper/AudioParameter.cpp
index 9a8156e..3832e90 100644
--- a/media/libmediahelper/AudioParameter.cpp
+++ b/media/libmediahelper/AudioParameter.cpp
@@ -20,6 +20,7 @@
 #include <utils/Log.h>
 
 #include <media/AudioParameter.h>
+#include <hardware/audio.h>
 #include <system/audio.h>
 
 namespace android {
@@ -32,7 +33,16 @@
 const char * const AudioParameter::keyFrameCount = AUDIO_PARAMETER_STREAM_FRAME_COUNT;
 const char * const AudioParameter::keyInputSource = AUDIO_PARAMETER_STREAM_INPUT_SOURCE;
 const char * const AudioParameter::keyScreenState = AUDIO_PARAMETER_KEY_SCREEN_STATE;
+const char * const AudioParameter::keyScreenRotation = AUDIO_PARAMETER_KEY_ROTATION;
+const char * const AudioParameter::keyClosing = AUDIO_PARAMETER_KEY_CLOSING;
+const char * const AudioParameter::keyExiting = AUDIO_PARAMETER_KEY_EXITING;
+const char * const AudioParameter::keyBtSco = AUDIO_PARAMETER_KEY_BT_SCO;
+const char * const AudioParameter::keyBtScoHeadsetName = AUDIO_PARAMETER_KEY_BT_SCO_HEADSET_NAME;
 const char * const AudioParameter::keyBtNrec = AUDIO_PARAMETER_KEY_BT_NREC;
+const char * const AudioParameter::keyBtScoWb = AUDIO_PARAMETER_KEY_BT_SCO_WB;
+const char * const AudioParameter::keyBtHfpEnable = AUDIO_PARAMETER_KEY_HFP_ENABLE;
+const char * const AudioParameter::keyBtHfpSamplingRate = AUDIO_PARAMETER_KEY_HFP_SET_SAMPLING_RATE;
+const char * const AudioParameter::keyBtHfpVolume = AUDIO_PARAMETER_KEY_HFP_VOLUME;
 const char * const AudioParameter::keyHwAvSync = AUDIO_PARAMETER_HW_AV_SYNC;
 const char * const AudioParameter::keyPresentationId = AUDIO_PARAMETER_STREAM_PRESENTATION_ID;
 const char * const AudioParameter::keyProgramId = AUDIO_PARAMETER_STREAM_PROGRAM_ID;
@@ -50,9 +60,13 @@
         AUDIO_PARAMETER_STREAM_SUP_SAMPLING_RATES;
 const char * const AudioParameter::valueOn = AUDIO_PARAMETER_VALUE_ON;
 const char * const AudioParameter::valueOff = AUDIO_PARAMETER_VALUE_OFF;
+const char * const AudioParameter::valueTrue = AUDIO_PARAMETER_VALUE_TRUE;
+const char * const AudioParameter::valueFalse = AUDIO_PARAMETER_VALUE_FALSE;
 const char * const AudioParameter::valueListSeparator = AUDIO_PARAMETER_VALUE_LIST_SEPARATOR;
+const char * const AudioParameter::keyBtA2dpSuspended = AUDIO_PARAMETER_KEY_BT_A2DP_SUSPENDED;
 const char * const AudioParameter::keyReconfigA2dp = AUDIO_PARAMETER_RECONFIG_A2DP;
 const char * const AudioParameter::keyReconfigA2dpSupported = AUDIO_PARAMETER_A2DP_RECONFIG_SUPPORTED;
+const char * const AudioParameter::keyBtLeSuspended = AUDIO_PARAMETER_KEY_BT_LE_SUSPENDED;
 // const char * const AudioParameter::keyDeviceSupportedEncapsulationModes =
 //        AUDIO_PARAMETER_DEVICE_SUP_ENCAPSULATION_MODES;
 // const char * const AudioParameter::keyDeviceSupportedEncapsulationMetadataTypes =
diff --git a/media/libmediahelper/include/media/AudioParameter.h b/media/libmediahelper/include/media/AudioParameter.h
index 41aff7c..3eee854 100644
--- a/media/libmediahelper/include/media/AudioParameter.h
+++ b/media/libmediahelper/include/media/AudioParameter.h
@@ -41,6 +41,7 @@
     //  keyInputSource: to change audio input source, value is an int in audio_source_t
     //     (defined in media/mediarecorder.h)
     //  keyScreenState: either "on" or "off"
+    //  keyScreenRotation: one of: 0, 90, 180, 270
     static const char * const keyRouting;
     static const char * const keySamplingRate;
     static const char * const keyFormat;
@@ -48,12 +49,29 @@
     static const char * const keyFrameCount;
     static const char * const keyInputSource;
     static const char * const keyScreenState;
+    static const char * const keyScreenRotation;
 
+    // keyClosing: "true" on AudioFlinger Thread preExit.  Used by A2DP HAL.
+    // keyExiting: "1" on AudioFlinger Thread preExit.  Used by remote_submix and A2DP HAL.
+    static const char * const keyClosing;
+    static const char * const keyExiting;
+
+    //  keyBtSco: Whether BT SCO is 'on' or 'off'
+    //  keyBtScoHeadsetName: BT SCO headset name (for debugging)
     //  keyBtNrec: BT SCO Noise Reduction + Echo Cancellation parameters
+    //  keyBtScoWb: BT SCO NR wideband mode
+    //  keyHfp...: Parameters of the Hands-Free Profile
+    static const char * const keyBtSco;
+    static const char * const keyBtScoHeadsetName;
+    static const char * const keyBtNrec;
+    static const char * const keyBtScoWb;
+    static const char * const keyBtHfpEnable;
+    static const char * const keyBtHfpSamplingRate;
+    static const char * const keyBtHfpVolume;
+
     //  keyHwAvSync: get HW synchronization source identifier from a device
     //  keyMonoOutput: Enable mono audio playback
     //  keyStreamHwAvSync: set HW synchronization source identifier on a stream
-    static const char * const keyBtNrec;
     static const char * const keyHwAvSync;
     static const char * const keyMonoOutput;
     static const char * const keyStreamHwAvSync;
@@ -84,13 +102,19 @@
 
     static const char * const valueOn;
     static const char * const valueOff;
+    static const char * const valueTrue;
+    static const char * const valueFalse;
 
     static const char * const valueListSeparator;
 
+    // keyBtA2dpSuspended: 'true' or 'false'
     // keyReconfigA2dp: Ask HwModule to reconfigure A2DP offloaded codec
     // keyReconfigA2dpSupported: Query if HwModule supports A2DP offload codec config
+    // keyBtLeSuspended: 'true' or 'false'
+    static const char * const keyBtA2dpSuspended;
     static const char * const keyReconfigA2dp;
     static const char * const keyReconfigA2dpSupported;
+    static const char * const keyBtLeSuspended;
 
     // For querying device supported encapsulation capabilities. All returned values are integer,
     // which are bit fields composed from using encapsulation capability values as position bits.
diff --git a/media/libmediametrics/MediaMetrics.cpp b/media/libmediametrics/MediaMetrics.cpp
index a3c2f1a..2240223 100644
--- a/media/libmediametrics/MediaMetrics.cpp
+++ b/media/libmediametrics/MediaMetrics.cpp
@@ -86,6 +86,11 @@
     if (item != NULL) item->setRate(attr, count, duration);
 }
 
+void mediametrics_setString(mediametrics_handle_t handle, attr_t attr,
+                                 const std::string &string) {
+    mediametrics_setCString(handle, attr, string.c_str());
+}
+
 void mediametrics_setCString(mediametrics_handle_t handle, attr_t attr,
                                  const char *value) {
     Item *item = (Item *) handle;
@@ -152,6 +157,14 @@
     return item->getRate(attr, count, duration, rate);
 }
 
+bool mediametrics_getString(mediametrics_handle_t handle, attr_t attr,
+                                 std::string *string) {
+    Item *item = (Item *) handle;
+    if (item == NULL) return false;
+
+    return item->getString(attr, string);
+}
+
 // NB: caller owns the string that comes back, is responsible for freeing it
 bool mediametrics_getCString(mediametrics_handle_t handle, attr_t attr,
                                  char **value) {
diff --git a/media/libmediametrics/include/media/MediaMetrics.h b/media/libmediametrics/include/media/MediaMetrics.h
index 76abe86..58612a3 100644
--- a/media/libmediametrics/include/media/MediaMetrics.h
+++ b/media/libmediametrics/include/media/MediaMetrics.h
@@ -50,7 +50,7 @@
 void mediametrics_setRate(mediametrics_handle_t handle, attr_t attr,
                           int64_t count, int64_t duration);
 void mediametrics_setCString(mediametrics_handle_t handle, attr_t attr,
-                            const char * value);
+                             const char * value);
 
 // fused get/add/set; if attr wasn't there, it's a simple set.
 // these do not provide atomicity or mutual exclusion, only simpler code sequences.
@@ -95,4 +95,11 @@
 
 __END_DECLS
 
+#ifdef __cplusplus
+#include <string>
+void mediametrics_setString(mediametrics_handle_t handle, attr_t attr,
+                            const std::string &value);
+bool mediametrics_getString(mediametrics_handle_t handle, attr_t attr, std::string *value);
+#endif // __cplusplus
+
 #endif
diff --git a/media/libmediametrics/include/media/MediaMetricsItem.h b/media/libmediametrics/include/media/MediaMetricsItem.h
index de56665..03834d4 100644
--- a/media/libmediametrics/include/media/MediaMetricsItem.h
+++ b/media/libmediametrics/include/media/MediaMetricsItem.h
@@ -1048,6 +1048,9 @@
         }
         return true;
     }
+    bool getString(const char *key, std::string *value) const {
+        return get(key, value);
+    }
     // Caller owns the returned string
     bool getCString(const char *key, char **value) const {
         std::string s;
@@ -1057,9 +1060,6 @@
         }
         return false;
     }
-    bool getString(const char *key, std::string *value) const {
-        return get(key, value);
-    }
 
     const Prop::Elem* get(const char *key) const {
         const Prop *prop = findProp(key);
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index bdf1cbc..3f08be5 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -388,7 +388,9 @@
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP9)
                             ? asString_VP9Profile(pl.mProfile) :
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AV1)
-                            ? asString_AV1Profile(pl.mProfile) : "??";
+                            ? asString_AV1Profile(pl.mProfile) :
+                        mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_DOLBY_VISION)
+                            ? asString_DolbyVisionProfile(pl.mProfile) : "??";
                     const char *niceLevel =
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_MPEG2)
                             ? asString_MPEG2Level(pl.mLevel) :
@@ -405,7 +407,9 @@
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP9)
                             ? asString_VP9Level(pl.mLevel) :
                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AV1)
-                            ? asString_AV1Level(pl.mLevel) : "??";
+                            ? asString_AV1Level(pl.mLevel) :
+                        mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_DOLBY_VISION)
+                            ? asString_DolbyVisionLevel(pl.mLevel) : "??";
 
                     list.add(AStringPrintf("% 5u/% 5u (%s/%s)",
                             pl.mProfile, pl.mLevel, niceProfile, niceLevel));
@@ -1837,7 +1841,6 @@
     } else {
         mAttributes = NULL;
     }
-
     setMinBufferCount();
 }
 
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index ec79b99..cb2007d 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -1254,10 +1254,6 @@
         case OUTPUT_FORMAT_MPEG_4:
         case OUTPUT_FORMAT_WEBM:
         {
-            bool isMPEG4 = true;
-            if (mOutputFormat == OUTPUT_FORMAT_WEBM) {
-                isMPEG4 = false;
-            }
             sp<MetaData> meta = new MetaData;
             setupMPEG4orWEBMMetaData(&meta);
             status = mWriter->start(meta.get());
@@ -1986,6 +1982,10 @@
             format->setString("mime", MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
             break;
 
+        case VIDEO_ENCODER_AV1:
+            format->setString("mime", MEDIA_MIMETYPE_VIDEO_AV1);
+            break;
+
         default:
             CHECK(!"Should not be here, unsupported video encoding.");
             break;
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index 5abac81..5e95c87 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -46,6 +46,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmediaplayerservice",
+        vector: "remote",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
diff --git a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
index 366956c..e8556dd 100644
--- a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
+++ b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
@@ -28,11 +28,13 @@
 
 namespace android {
 
+const int32_t INVALID_DISPLAY_ID = -1;
+
 AWakeLock::AWakeLock() :
     mPowerManager(NULL),
     mWakeLockToken(NULL),
     mWakeLockCount(0),
-    mDeathRecipient(new PMDeathRecipient(this)) {}
+    mDeathRecipient(new PMDeathRecipient(this)){}
 
 AWakeLock::~AWakeLock() {
     if (mPowerManager != NULL) {
@@ -59,10 +61,15 @@
         if (mPowerManager != NULL) {
             sp<IBinder> binder = new BBinder();
             int64_t token = IPCThreadState::self()->clearCallingIdentity();
-            binder::Status status = mPowerManager->acquireWakeLockAsync(
-                    binder, POWERMANAGER_PARTIAL_WAKE_LOCK,
-                    String16("AWakeLock"), String16("media"),
-                    {} /* workSource */, {} /* historyTag */);
+            binder::Status status = mPowerManager->acquireWakeLock(
+                binder,
+                /*flags= */ POWERMANAGER_PARTIAL_WAKE_LOCK,
+                /*tag=*/ String16("AWakeLock"),
+                /*packageName=*/ String16("media"),
+                /*ws=*/ {},
+                /*historyTag=*/ {},
+                /*displayId=*/ INVALID_DISPLAY_ID,
+                /*callback=*/NULL);
             IPCThreadState::self()->restoreCallingIdentity(token);
             if (status.isOk()) {
                 mWakeLockToken = binder;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index e5f2b2b..485923f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -2224,6 +2224,11 @@
                 -ret, strerror(-ret));
             return ret;
         }
+        if (mVideoDecoder != NULL) {
+            sp<AMessage> params = new AMessage();
+            params->setInt32("android._video-scaling", mode);
+            mVideoDecoder->setParameters(params);
+        }
     }
     return OK;
 }
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 8da09c4..f4143da 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -459,6 +459,14 @@
         codecParams->setFloat("operating-rate", decodeFrameRate * mPlaybackSpeed);
         mCodec->setParameters(codecParams);
     }
+
+    int32_t videoScalingMode;
+    if (params->findInt32("android._video-scaling", &videoScalingMode)
+            && mCodec != NULL) {
+        sp<AMessage> codecParams = new AMessage();
+        codecParams->setInt32("android._video-scaling", videoScalingMode);
+        mCodec->setParameters(codecParams);
+    }
 }
 
 void NuPlayer::Decoder::onSetRenderer(const sp<Renderer> &renderer) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 9dae16e..bcb4756 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -157,7 +157,8 @@
       mTotalBuffersQueued(0),
       mLastAudioBufferDrained(0),
       mUseAudioCallback(false),
-      mWakeLock(new AWakeLock()) {
+      mWakeLock(new AWakeLock()),
+      mNeedVideoClearAnchor(false) {
     CHECK(mediaClock != NULL);
     mPlaybackRate = mPlaybackSettings.mSpeed;
     mMediaClock->setPlaybackRate(mPlaybackRate);
@@ -234,6 +235,10 @@
             return err;
         }
     }
+
+    if (!mHasAudio && mHasVideo) {
+        mNeedVideoClearAnchor = true;
+    }
     mPlaybackSettings = rate;
     mPlaybackRate = rate.mSpeed;
     mMediaClock->setPlaybackRate(mPlaybackRate);
@@ -327,7 +332,6 @@
             mNextVideoTimeMediaUs = -1;
         }
 
-        mMediaClock->clearAnchor();
         mVideoLateByUs = 0;
         mSyncQueues = false;
     }
@@ -1371,6 +1375,10 @@
 
     {
         Mutex::Autolock autoLock(mLock);
+        if (mNeedVideoClearAnchor && !mHasAudio) {
+            mNeedVideoClearAnchor = false;
+            clearAnchorTime();
+        }
         if (mAnchorTimeMediaUs < 0) {
             mMediaClock->updateAnchor(mediaTimeUs, nowUs, mediaTimeUs);
             mAnchorTimeMediaUs = mediaTimeUs;
@@ -1525,6 +1533,8 @@
                         mNextVideoTimeMediaUs + kDefaultVideoFrameIntervalUs);
             }
         }
+    } else {
+        mHasVideo = false;
     }
 }
 
@@ -1686,6 +1696,7 @@
         } else {
             notifyComplete = mNotifyCompleteVideo;
             mNotifyCompleteVideo = false;
+            mHasVideo = false;
         }
 
         // If we're currently syncing the queues, i.e. dropping audio while
@@ -1698,7 +1709,17 @@
         // is flushed.
         syncQueuesDone_l();
     }
-    clearAnchorTime();
+
+    if (audio && mDrainVideoQueuePending) {
+        // Audio should not clear anchor(MediaClock) directly, because video
+        // postDrainVideoQueue sets msg kWhatDrainVideoQueue into MediaClock
+        // timer, clear anchor without update immediately may block msg posting.
+        // So, postpone clear action to video to ensure anchor can be updated
+        // immediately after clear
+        mNeedVideoClearAnchor = true;
+    } else {
+        clearAnchorTime();
+    }
 
     ALOGV("flushing %s", audio ? "audio" : "video");
     if (audio) {
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
index 2ca040f..f0c0a35 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
@@ -332,6 +332,9 @@
     int64_t getDurationUsIfPlayedAtSampleRate(uint32_t numFrames);
 
     DISALLOW_EVIL_CONSTRUCTORS(Renderer);
+
+private:
+    bool mNeedVideoClearAnchor;
 };
 
 } // namespace android
diff --git a/media/libnbaio/Android.bp b/media/libnbaio/Android.bp
index 89e9806..434ae00 100644
--- a/media/libnbaio/Android.bp
+++ b/media/libnbaio/Android.bp
@@ -49,7 +49,7 @@
     defaults: ["libnbaio_mono_defaults"],
 }
 
-cc_library_shared {
+cc_library {
     name: "libnbaio",
     defaults: ["libnbaio_mono_defaults"],
     srcs: [
diff --git a/media/libnbaio/AudioStreamOutSink.cpp b/media/libnbaio/AudioStreamOutSink.cpp
index 8c54a3a..0ab5874 100644
--- a/media/libnbaio/AudioStreamOutSink.cpp
+++ b/media/libnbaio/AudioStreamOutSink.cpp
@@ -103,19 +103,24 @@
 void AudioStreamOutSink::startMelComputation(const sp<audio_utils::MelProcessor>& processor)
 {
     ALOGV("%s start mel computation for device %d", __func__, processor->getDeviceId());
-    mMelProcessor = processor;
-    // update format for MEL computation
+
+    mMelProcessor.store(processor);
     if (processor) {
-        processor->updateAudioFormat(mFormat.mSampleRate,  mFormat.mChannelCount, mFormat.mFormat);
+        // update format for MEL computation
+        processor->updateAudioFormat(mFormat.mSampleRate,
+                                     mFormat.mChannelCount,
+                                     mFormat.mFormat);
+        processor->resume();
     }
+
 }
 
 void AudioStreamOutSink::stopMelComputation()
 {
     auto melProcessor = mMelProcessor.load();
     if (melProcessor != nullptr) {
-        ALOGV("%s stop mel computation for device %d", __func__, melProcessor->getDeviceId());
-        mMelProcessor = nullptr;
+        ALOGV("%s pause mel computation for device %d", __func__, melProcessor->getDeviceId());
+        melProcessor->pause();
     }
 }
 
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 505775b..a91b24a 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -293,6 +293,8 @@
         }
     }
 
+    void setSurfaceParameters(const sp<AMessage> &msg);
+
 private:
     // Handles an OMX message. Returns true iff message was handled.
     bool onOMXMessage(const sp<AMessage> &msg);
@@ -6511,6 +6513,59 @@
     postFillThisBuffer(eligible);
 }
 
+void ACodec::BaseState::setSurfaceParameters(const sp<AMessage> &msg) {
+    sp<AMessage> params;
+    CHECK(msg->findMessage("params", &params));
+
+    status_t err = mCodec->setSurfaceParameters(params);
+    if (err != OK) {
+        ALOGE("[%s] Unable to set input surface parameters (err %d)",
+                mCodec->mComponentName.c_str(),
+                err);
+        return;
+    }
+
+    int64_t timeOffsetUs;
+    if (params->findInt64(PARAMETER_KEY_OFFSET_TIME, &timeOffsetUs)) {
+        params->removeEntryAt(params->findEntryByName(PARAMETER_KEY_OFFSET_TIME));
+
+        if (params->countEntries() == 0) {
+            msg->removeEntryAt(msg->findEntryByName("params"));
+            return;
+        }
+    }
+
+    int64_t skipFramesBeforeUs;
+    if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) {
+        params->removeEntryAt(params->findEntryByName("skip-frames-before"));
+
+        if (params->countEntries() == 0) {
+            msg->removeEntryAt(msg->findEntryByName("params"));
+            return;
+        }
+    }
+
+    int32_t dropInputFrames;
+    if (params->findInt32(PARAMETER_KEY_SUSPEND, &dropInputFrames)) {
+        params->removeEntryAt(params->findEntryByName(PARAMETER_KEY_SUSPEND));
+
+        if (params->countEntries() == 0) {
+            msg->removeEntryAt(msg->findEntryByName("params"));
+            return;
+        }
+    }
+
+    int64_t stopTimeUs;
+    if (params->findInt64("stop-time-us", &stopTimeUs)) {
+        params->removeEntryAt(params->findEntryByName("stop-time-us"));
+
+        if (params->countEntries() == 0) {
+            msg->removeEntryAt(msg->findEntryByName("params"));
+            return;
+        }
+    }
+}
+
 bool ACodec::BaseState::onOMXFillBufferDone(
         IOMX::buffer_id bufferID,
         size_t rangeOffset, size_t rangeLength,
@@ -7411,6 +7466,13 @@
 bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
         case kWhatSetParameters:
+        {
+            BaseState::setSurfaceParameters(msg);
+            if (msg->countEntries() > 0) {
+                mCodec->deferMessage(msg);
+            }
+            return true;
+        }
         case kWhatShutdown:
         {
             mCodec->deferMessage(msg);
@@ -7487,6 +7549,13 @@
 bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
         case kWhatSetParameters:
+        {
+            BaseState::setSurfaceParameters(msg);
+            if (msg->countEntries() > 0) {
+                mCodec->deferMessage(msg);
+            }
+            return true;
+        }
         case kWhatShutdown:
         {
             mCodec->deferMessage(msg);
@@ -7766,27 +7835,7 @@
     return handled;
 }
 
-status_t ACodec::setParameters(const sp<AMessage> &params) {
-    int32_t videoBitrate;
-    if (params->findInt32("video-bitrate", &videoBitrate)) {
-        OMX_VIDEO_CONFIG_BITRATETYPE configParams;
-        InitOMXParams(&configParams);
-        configParams.nPortIndex = kPortIndexOutput;
-        configParams.nEncodeBitrate = videoBitrate;
-
-        status_t err = mOMXNode->setConfig(
-                OMX_IndexConfigVideoBitrate,
-                &configParams,
-                sizeof(configParams));
-
-        if (err != OK) {
-            ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d",
-                   videoBitrate, err);
-
-            return err;
-        }
-    }
-
+status_t ACodec::setSurfaceParameters(const sp<AMessage> &params) {
     int64_t timeOffsetUs;
     if (params->findInt64(PARAMETER_KEY_OFFSET_TIME, &timeOffsetUs)) {
         if (mGraphicBufferSource == NULL) {
@@ -7874,9 +7923,41 @@
         mInputFormat->setInt64("android._stop-time-offset-us", stopTimeOffsetUs);
     }
 
+    return OK;
+}
+
+status_t ACodec::setParameters(const sp<AMessage> &params) {
+    status_t err;
+
+    int32_t videoBitrate;
+    if (params->findInt32("video-bitrate", &videoBitrate)) {
+        OMX_VIDEO_CONFIG_BITRATETYPE configParams;
+        InitOMXParams(&configParams);
+        configParams.nPortIndex = kPortIndexOutput;
+        configParams.nEncodeBitrate = videoBitrate;
+
+        err = mOMXNode->setConfig(
+                OMX_IndexConfigVideoBitrate,
+                &configParams,
+                sizeof(configParams));
+
+        if (err != OK) {
+            ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d",
+                   videoBitrate, err);
+
+            return err;
+        }
+    }
+
+    err = setSurfaceParameters(params);
+    if (err != OK) {
+        ALOGE("Failed to set input surface parameters (err %d)", err);
+        return err;
+    }
+
     int32_t tmp;
     if (params->findInt32("request-sync", &tmp)) {
-        status_t err = requestIDRFrame();
+        err = requestIDRFrame();
 
         if (err != OK) {
             ALOGE("Requesting a sync frame failed w/ err %d", err);
@@ -7891,7 +7972,7 @@
         rateFloat = (float) rateInt; // 16MHz (FLINTMAX) is OK for upper bound.
     }
     if (rateFloat > 0) {
-        status_t err = setOperatingRate(rateFloat, mIsVideo);
+        err = setOperatingRate(rateFloat, mIsVideo);
         if (err != OK) {
             ALOGI("Failed to set parameter 'operating-rate' (err %d)", err);
         }
@@ -7900,7 +7981,7 @@
     int32_t intraRefreshPeriod = 0;
     if (params->findInt32("intra-refresh-period", &intraRefreshPeriod)
             && intraRefreshPeriod > 0) {
-        status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false);
+        err = setIntraRefreshPeriod(intraRefreshPeriod, false);
         if (err != OK) {
             ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional",
                     mComponentName.c_str());
@@ -7910,7 +7991,7 @@
 
     int32_t lowLatency = 0;
     if (params->findInt32("low-latency", &lowLatency)) {
-        status_t err = setLowLatency(lowLatency);
+        err = setLowLatency(lowLatency);
         if (err != OK) {
             return err;
         }
@@ -7918,7 +7999,7 @@
 
     int32_t latency = 0;
     if (params->findInt32("latency", &latency) && latency > 0) {
-        status_t err = setLatency(latency);
+        err = setLatency(latency);
         if (err != OK) {
             ALOGI("[%s] failed setLatency. Failure is fine since this key is optional",
                     mComponentName.c_str());
@@ -7930,7 +8011,7 @@
     if (params->findInt32("audio-presentation-presentation-id", &presentationId)) {
         int32_t programId = -1;
         params->findInt32("audio-presentation-program-id", &programId);
-        status_t err = setAudioPresentation(presentationId, programId);
+        err = setAudioPresentation(presentationId, programId);
         if (err != OK) {
             ALOGI("[%s] failed setAudioPresentation. Failure is fine since this key is optional",
                     mComponentName.c_str());
@@ -8003,7 +8084,7 @@
     {
         int32_t tunnelPeek = 0;
         if (params->findInt32(TUNNEL_PEEK_KEY, &tunnelPeek)) {
-            status_t err = setTunnelPeek(tunnelPeek);
+            err = setTunnelPeek(tunnelPeek);
             if (err != OK) {
                 return err;
             }
@@ -8012,7 +8093,7 @@
     {
         int32_t tunnelPeekSetLegacy = 0;
         if (params->findInt32(TUNNEL_PEEK_SET_LEGACY_KEY, &tunnelPeekSetLegacy)) {
-            status_t err = setTunnelPeekLegacy(tunnelPeekSetLegacy);
+            err = setTunnelPeekLegacy(tunnelPeekSetLegacy);
             if (err != OK) {
                 return err;
             }
diff --git a/media/libstagefright/ACodecBufferChannel.cpp b/media/libstagefright/ACodecBufferChannel.cpp
index 57f26d0..8f2bed2 100644
--- a/media/libstagefright/ACodecBufferChannel.cpp
+++ b/media/libstagefright/ACodecBufferChannel.cpp
@@ -347,7 +347,8 @@
         size_t offset,
         const CryptoPlugin::SubSample *subSamples,
         size_t numSubSamples,
-        const sp<MediaCodecBuffer> &buffer) {
+        const sp<MediaCodecBuffer> &buffer,
+        AString* errorDetailMsg) {
     std::shared_ptr<const std::vector<const BufferInfo>> array(
             std::atomic_load(&mInputBuffers));
     BufferInfoIterator it = findClientBuffer(array, buffer);
@@ -371,7 +372,6 @@
     ssize_t result = -1;
     ssize_t codecDataOffset = 0;
     if (mCrypto != NULL) {
-        AString errorDetailMsg;
         hardware::drm::V1_0::DestinationBuffer destination;
         if (secure) {
             destination.type = DrmBufferType::NATIVE_HANDLE;
@@ -387,7 +387,7 @@
 
         result = mCrypto->decrypt(key, iv, mode, pattern,
                 source, it->mClientBuffer->offset(),
-                subSamples, numSubSamples, destination, &errorDetailMsg);
+                subSamples, numSubSamples, destination, errorDetailMsg);
 
         if (result < 0) {
             return result;
@@ -441,7 +441,9 @@
                     result = (ssize_t)_bytesWritten;
                     detailedError = _detailedError;
                 });
-
+        if (errorDetailMsg) {
+            errorDetailMsg->setTo(detailedError.c_str(), detailedError.size());
+        }
         if (!returnVoid.isOk() || status != Status::OK || result < 0) {
             ALOGE("descramble failed, trans=%s, status=%d, result=%zd",
                     returnVoid.description().c_str(), status, result);
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index ddc0f2f..a26fcbe 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -238,6 +238,7 @@
         "CallbackMediaSource.cpp",
         "CameraSource.cpp",
         "CameraSourceTimeLapse.cpp",
+        "CodecErrorLog.cpp",
         "CryptoAsync.cpp",
         "FrameDecoder.cpp",
         "HevcUtils.cpp",
@@ -269,6 +270,7 @@
         "SurfaceUtils.cpp",
         "ThrottledSource.cpp",
         "Utils.cpp",
+        "VideoRenderQualityTracker.cpp",
         "VideoFrameSchedulerBase.cpp",
         "VideoFrameScheduler.cpp",
     ],
@@ -312,6 +314,7 @@
         "framework-permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
         "packagemanager_aidl-cpp",
+        "server_configurable_flags",
     ],
 
     static_libs: [
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 88e3362..967c316 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -150,7 +150,8 @@
 
     if (camera == 0) {
         mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                /*forceSlowJpegMode*/false);
         if (mCamera == 0) return -EBUSY;
         mCameraFlags &= ~FLAGS_HOT_CAMERA;
     } else {
diff --git a/media/libstagefright/CodecErrorLog.cpp b/media/libstagefright/CodecErrorLog.cpp
new file mode 100644
index 0000000..9785623
--- /dev/null
+++ b/media/libstagefright/CodecErrorLog.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecErrorLog"
+
+#include <log/log.h>
+#include <media/stagefright/CodecErrorLog.h>
+
+namespace android {
+
+void CodecErrorLog::log(const char *tag, const char *message) {
+    std::unique_lock lock(mLock);
+    ALOG(LOG_ERROR, tag, "%s", message);
+    mStream << message << std::endl;
+}
+
+void CodecErrorLog::log(const char *tag, const std::string &message) {
+    log(tag, message.c_str());
+}
+
+std::string CodecErrorLog::extract() {
+    std::unique_lock lock(mLock);
+    std::string msg = mStream.str();
+    mStream.str("");
+    return msg;
+}
+
+void CodecErrorLog::clear() {
+    std::unique_lock lock(mLock);
+    mStream.str("");
+}
+
+}  // namespace android
diff --git a/media/libstagefright/CryptoAsync.cpp b/media/libstagefright/CryptoAsync.cpp
index 32fd3be..8b5c8ed 100644
--- a/media/libstagefright/CryptoAsync.cpp
+++ b/media/libstagefright/CryptoAsync.cpp
@@ -153,7 +153,7 @@
     // attach buffer
     err = channel->attachEncryptedBuffer(
         memory, secure, key, iv, mode, pattern,
-        offset, subSamples, numSubSamples, buffer);
+        offset, subSamples, numSubSamples, buffer, &errorDetailMsg);
 
     // a generic error
     auto handleError = [this, &err, &msg]() {
diff --git a/media/libstagefright/FrameCaptureLayer.cpp b/media/libstagefright/FrameCaptureLayer.cpp
index d2cfd41..4e71943 100644
--- a/media/libstagefright/FrameCaptureLayer.cpp
+++ b/media/libstagefright/FrameCaptureLayer.cpp
@@ -64,14 +64,6 @@
     return updatedDataspace;
 }
 
-bool isHdrY410(const BufferItem &bi) {
-    ui::Dataspace dataspace = translateDataspace(static_cast<ui::Dataspace>(bi.mDataSpace));
-    // pixel format is HDR Y410 masquerading as RGBA_1010102
-    return ((dataspace == ui::Dataspace::BT2020_ITU_PQ ||
-            dataspace == ui::Dataspace::BT2020_ITU_HLG) &&
-            bi.mGraphicBuffer->getPixelFormat() == HAL_PIXEL_FORMAT_RGBA_1010102);
-}
-
 struct FrameCaptureLayer::BufferLayer : public FrameCaptureProcessor::Layer {
     BufferLayer(const BufferItem &bi) : mBufferItem(bi) {}
     void getLayerSettings(
@@ -95,7 +87,6 @@
     layerSettings->source.buffer.fence = mBufferItem.mFence;
     layerSettings->source.buffer.textureName = textureName;
     layerSettings->source.buffer.usePremultipliedAlpha = false;
-    layerSettings->source.buffer.isY410BT2020 = isHdrY410(mBufferItem);
     bool hasSmpte2086 = mBufferItem.mHdrMetadata.validTypes & HdrMetadata::SMPTE2086;
     bool hasCta861_3 = mBufferItem.mHdrMetadata.validTypes & HdrMetadata::CTA861_3;
     layerSettings->source.buffer.maxMasteringLuminance = hasSmpte2086
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 6e97bf7..b5bd975 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -240,6 +240,9 @@
 
     sp<IMemory> metaMem =
             allocMetaFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp, bitDepth);
+    if (metaMem == nullptr) {
+        return NULL;
+    }
 
     // try to fill sequence meta's duration based on average frame rate,
     // default to 33ms if frame rate is unavailable.
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index c93d033..2cd3768 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -31,7 +31,6 @@
 #include <utils/Log.h>
 
 #include <functional>
-#include <fcntl.h>
 
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -564,6 +563,10 @@
     mResetStatus = OK;
     mPreAllocFirstTime = true;
     mPrevAllTracksTotalMetaDataSizeEstimate = 0;
+    mIsFirstChunk = false;
+    mDone = false;
+    mThread = 0;
+    mDriftTimeUs = 0;
 
     // Following variables only need to be set for the first recording session.
     // And they will stay the same for all the recording sessions.
@@ -3581,7 +3584,7 @@
                             (const uint8_t *)buffer->data()
                                 + buffer->range_offset(),
                             buffer->range_length());
-                } else if (mIsMPEG4) {
+                } else if (mIsMPEG4 || mIsAv1) {
                     err = copyCodecSpecificData((const uint8_t *)buffer->data() + buffer->range_offset(),
                             buffer->range_length());
                 }
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index e5cc991..d8c356a 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -19,20 +19,18 @@
 #define LOG_TAG "MediaCodec"
 #include <utils/Log.h>
 
-#include <set>
-#include <random>
-#include <stdlib.h>
-
-#include <inttypes.h>
-#include <stdlib.h>
 #include <dlfcn.h>
+#include <inttypes.h>
+#include <future>
+#include <random>
+#include <set>
+#include <string>
 
 #include <C2Buffer.h>
 
 #include "include/SoftwareRenderer.h"
-#include "PlaybackDurationAccumulator.h"
 
-#include <android/binder_manager.h>
+#include <android/api-level.h>
 #include <android/content/pm/IPackageManagerNative.h>
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -42,6 +40,7 @@
 #include <android/binder_ibinder.h>
 #include <android/binder_manager.h>
 #include <android/dlext.h>
+#include <android-base/stringprintf.h>
 #include <binder/IMemory.h>
 #include <binder/IServiceManager.h>
 #include <binder/MemoryDealer.h>
@@ -74,7 +73,6 @@
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaCodecList.h>
-#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/OMXClient.h>
@@ -82,6 +80,7 @@
 #include <media/stagefright/SurfaceUtils.h>
 #include <nativeloader/dlext_namespaces.h>
 #include <private/android_filesystem_config.h>
+#include <server_configurable_flags/get_flags.h>
 #include <utils/Singleton.h>
 
 namespace android {
@@ -91,6 +90,8 @@
 using aidl::android::media::IResourceManagerClient;
 using aidl::android::media::IResourceManagerService;
 using aidl::android::media::ClientInfoParcel;
+using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
+using JudderEvent = VideoRenderQualityTracker::JudderEvent;
 
 // key for media statistics
 static const char *kCodecKeyName = "codec";
@@ -108,7 +109,9 @@
 static const char *kCodecModeImage = "image";
 static const char *kCodecModeUnknown = "unknown";
 static const char *kCodecEncoder = "android.media.mediacodec.encoder"; /* 0,1 */
+static const char *kCodecHardware = "android.media.mediacodec.hardware"; /* 0,1 */
 static const char *kCodecSecure = "android.media.mediacodec.secure";   /* 0, 1 */
+static const char *kCodecTunneled = "android.media.mediacodec.tunneled"; /* 0,1 */
 static const char *kCodecWidth = "android.media.mediacodec.width";     /* 0..n */
 static const char *kCodecHeight = "android.media.mediacodec.height";   /* 0..n */
 static const char *kCodecRotation = "android.media.mediacodec.rotation-degrees";  /* 0/90/180/270 */
@@ -117,15 +120,6 @@
 static const char *kCodecCaptureRate = "android.media.mediacodec.capture-rate";
 static const char *kCodecOperatingRate = "android.media.mediacodec.operating-rate";
 static const char *kCodecPriority = "android.media.mediacodec.priority";
-static const char *kCodecConfigColorStandard = "android.media.mediacodec.config-color-standard";
-static const char *kCodecConfigColorRange = "android.media.mediacodec.config-color-range";
-static const char *kCodecConfigColorTransfer = "android.media.mediacodec.config-color-transfer";
-static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
-static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
-static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
-static const char *kCodecHDRStaticInfo = "android.media.mediacodec.hdr-static-info";
-static const char *kCodecHDR10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
-static const char *kCodecHDRFormat = "android.media.mediacodec.hdr-format";
 
 // Min/Max QP before shaping
 static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
@@ -163,6 +157,7 @@
 static const char *kCodecLatencyUnknown = "android.media.mediacodec.latency.unknown";
 static const char *kCodecQueueSecureInputBufferError = "android.media.mediacodec.queueSecureInputBufferError";
 static const char *kCodecQueueInputBufferError = "android.media.mediacodec.queueInputBufferError";
+static const char *kCodecComponentColorFormat = "android.media.mediacodec.component-color-format";
 
 static const char *kCodecNumLowLatencyModeOn = "android.media.mediacodec.low-latency.on";  /* 0..n */
 static const char *kCodecNumLowLatencyModeOff = "android.media.mediacodec.low-latency.off";  /* 0..n */
@@ -174,6 +169,29 @@
 static const char *kCodecVideoInputBytes = "android.media.mediacodec.video.input.bytes";
 static const char *kCodecVideoInputFrames = "android.media.mediacodec.video.input.frames";
 static const char *kCodecVideoEncodedDurationUs = "android.media.mediacodec.vencode.durationUs";
+// HDR metrics
+static const char *kCodecConfigColorStandard = "android.media.mediacodec.config-color-standard";
+static const char *kCodecConfigColorRange = "android.media.mediacodec.config-color-range";
+static const char *kCodecConfigColorTransfer = "android.media.mediacodec.config-color-transfer";
+static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
+static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
+static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
+static const char *kCodecHdrStaticInfo = "android.media.mediacodec.hdr-static-info";
+static const char *kCodecHdr10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
+static const char *kCodecHdrFormat = "android.media.mediacodec.hdr-format";
+// array/sync/async/block modes
+static const char *kCodecArrayMode = "android.media.mediacodec.array-mode";
+static const char *kCodecOperationMode = "android.media.mediacodec.operation-mode";
+static const char *kCodecOutputSurface = "android.media.mediacodec.output-surface";
+// max size configured by the app
+static const char *kCodecAppMaxInputSize = "android.media.mediacodec.app-max-input-size";
+// max size actually used
+static const char *kCodecUsedMaxInputSize = "android.media.mediacodec.used-max-input-size";
+// max size suggested by the codec
+static const char *kCodecCodecMaxInputSize = "android.media.mediacodec.codec-max-input-size";
+static const char *kCodecFlushCount = "android.media.mediacodec.flush-count";
+static const char *kCodecSetSurfaceCount = "android.media.mediacodec.set-surface-count";
+static const char *kCodecResolutionChangeCount = "android.media.mediacodec.resolution-change-count";
 
 // the kCodecRecent* fields appear only in getMetrics() results
 static const char *kCodecRecentLatencyMax = "android.media.mediacodec.recent.max";      /* in us */
@@ -181,13 +199,72 @@
 static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg";      /* in us */
 static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
 static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist";    /* in us */
-static const char *kCodecPlaybackDurationSec =
-        "android.media.mediacodec.playback-duration-sec"; /* in sec */
 
 /* -1: shaper disabled
    >=0: number of fields changed */
 static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";
 
+// Render metrics
+static const char *kCodecPlaybackDurationSec = "android.media.mediacodec.playback-duration-sec";
+static const char *kCodecFirstRenderTimeUs = "android.media.mediacodec.first-render-time-us";
+static const char *kCodecFramesReleased = "android.media.mediacodec.frames-released";
+static const char *kCodecFramesRendered = "android.media.mediacodec.frames-rendered";
+static const char *kCodecFramesDropped = "android.media.mediacodec.frames-dropped";
+static const char *kCodecFramesSkipped = "android.media.mediacodec.frames-skipped";
+static const char *kCodecFramerateContent = "android.media.mediacodec.framerate-content";
+static const char *kCodecFramerateDesired = "android.media.mediacodec.framerate-desired";
+static const char *kCodecFramerateActual = "android.media.mediacodec.framerate-actual";
+// Freeze
+static const char *kCodecFreezeCount = "android.media.mediacodec.freeze-count";
+static const char *kCodecFreezeScore = "android.media.mediacodec.freeze-score";
+static const char *kCodecFreezeRate = "android.media.mediacodec.freeze-rate";
+static const char *kCodecFreezeDurationMsAvg = "android.media.mediacodec.freeze-duration-ms-avg";
+static const char *kCodecFreezeDurationMsMax = "android.media.mediacodec.freeze-duration-ms-max";
+static const char *kCodecFreezeDurationMsHistogram =
+        "android.media.mediacodec.freeze-duration-ms-histogram";
+static const char *kCodecFreezeDurationMsHistogramBuckets =
+        "android.media.mediacodec.freeze-duration-ms-histogram-buckets";
+static const char *kCodecFreezeDistanceMsAvg = "android.media.mediacodec.freeze-distance-ms-avg";
+static const char *kCodecFreezeDistanceMsHistogram =
+        "android.media.mediacodec.freeze-distance-ms-histogram";
+static const char *kCodecFreezeDistanceMsHistogramBuckets =
+        "android.media.mediacodec.freeze-distance-ms-histogram-buckets";
+// Judder
+static const char *kCodecJudderCount = "android.media.mediacodec.judder-count";
+static const char *kCodecJudderScore = "android.media.mediacodec.judder-score";
+static const char *kCodecJudderRate = "android.media.mediacodec.judder-rate";
+static const char *kCodecJudderScoreAvg = "android.media.mediacodec.judder-score-avg";
+static const char *kCodecJudderScoreMax = "android.media.mediacodec.judder-score-max";
+static const char *kCodecJudderScoreHistogram = "android.media.mediacodec.judder-score-histogram";
+static const char *kCodecJudderScoreHistogramBuckets =
+        "android.media.mediacodec.judder-score-histogram-buckets";
+// Freeze event
+static const char *kCodecFreezeEventCount = "android.media.mediacodec.freeze-event-count";
+static const char *kFreezeEventKeyName = "freeze";
+static const char *kFreezeEventInitialTimeUs = "android.media.mediacodec.freeze.initial-time-us";
+static const char *kFreezeEventDurationMs = "android.media.mediacodec.freeze.duration-ms";
+static const char *kFreezeEventCount = "android.media.mediacodec.freeze.count";
+static const char *kFreezeEventAvgDurationMs = "android.media.mediacodec.freeze.avg-duration-ms";
+static const char *kFreezeEventAvgDistanceMs = "android.media.mediacodec.freeze.avg-distance-ms";
+static const char *kFreezeEventDetailsDurationMs =
+        "android.media.mediacodec.freeze.details-duration-ms";
+static const char *kFreezeEventDetailsDistanceMs =
+        "android.media.mediacodec.freeze.details-distance-ms";
+// Judder event
+static const char *kCodecJudderEventCount = "android.media.mediacodec.judder-event-count";
+static const char *kJudderEventKeyName = "judder";
+static const char *kJudderEventInitialTimeUs = "android.media.mediacodec.judder.initial-time-us";
+static const char *kJudderEventDurationMs = "android.media.mediacodec.judder.duration-ms";
+static const char *kJudderEventCount = "android.media.mediacodec.judder.count";
+static const char *kJudderEventAvgScore = "android.media.mediacodec.judder.avg-score";
+static const char *kJudderEventAvgDistanceMs = "android.media.mediacodec.judder.avg-distance-ms";
+static const char *kJudderEventDetailsActualDurationUs =
+        "android.media.mediacodec.judder.details-actual-duration-us";
+static const char *kJudderEventDetailsContentDurationUs =
+        "android.media.mediacodec.judder.details-content-duration-us";
+static const char *kJudderEventDetailsDistanceMs =
+        "android.media.mediacodec.judder.details-distance-ms";
+
 // XXX suppress until we get our representation right
 static bool kEmitHistogram = false;
 
@@ -212,6 +289,10 @@
 
 ////////////////////////////////////////////////////////////////////////////////
 
+/*
+ * Implementation of IResourceManagerClient interrface that facilitates
+ * MediaCodec reclaim for the ResourceManagerService.
+ */
 struct ResourceManagerClient : public BnResourceManagerClient {
     explicit ResourceManagerClient(MediaCodec* codec, int32_t pid, int32_t uid) :
             mMediaCodec(codec), mPid(pid), mUid(uid) {}
@@ -224,7 +305,9 @@
             std::shared_ptr<IResourceManagerService> service =
                     IResourceManagerService::fromBinder(binder);
             if (service == nullptr) {
-                ALOGW("MediaCodec::ResourceManagerClient unable to find ResourceManagerService");
+                ALOGE("MediaCodec::ResourceManagerClient unable to find ResourceManagerService");
+                *_aidl_return = false;
+                return Status::fromStatus(STATUS_INVALID_OPERATION);
             }
             ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
                                 .uid = static_cast<int32_t>(mUid),
@@ -272,21 +355,25 @@
     DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
 };
 
-struct MediaCodec::ResourceManagerServiceProxy : public RefBase {
+/*
+ * Proxy for ResourceManagerService that communicates with the
+ * ResourceManagerService for MediaCodec
+ */
+struct MediaCodec::ResourceManagerServiceProxy :
+    public std::enable_shared_from_this<ResourceManagerServiceProxy> {
+
+    // BinderDiedContext defines the cookie that is passed as DeathRecipient.
+    // Since this can maintain more context than a raw pointer, we can
+    // validate the scope of ResourceManagerServiceProxy,
+    // before deferencing it upon the binder death.
+    struct BinderDiedContext {
+        std::weak_ptr<ResourceManagerServiceProxy> mRMServiceProxy;
+    };
+
     ResourceManagerServiceProxy(pid_t pid, uid_t uid,
             const std::shared_ptr<IResourceManagerClient> &client);
-    virtual ~ResourceManagerServiceProxy();
-
+    ~ResourceManagerServiceProxy();
     status_t init();
-
-    // implements DeathRecipient
-    static void BinderDiedCallback(void* cookie);
-    void binderDied();
-    static Mutex sLockCookies;
-    static std::set<void*> sCookies;
-    static void addCookie(void* cookie);
-    static void removeCookie(void* cookie);
-
     void addResource(const MediaResourceParcel &resource);
     void removeResource(const MediaResourceParcel &resource);
     void removeClient();
@@ -295,56 +382,99 @@
     void notifyClientCreated();
     void notifyClientStarted(ClientConfigParcel& clientConfig);
     void notifyClientStopped(ClientConfigParcel& clientConfig);
+    void notifyClientConfigChanged(ClientConfigParcel& clientConfig);
 
     inline void setCodecName(const char* name) {
         mCodecName = name;
     }
 
 private:
-    Mutex mLock;
+    // To get the binder interface to ResourceManagerService.
+    void getService() {
+        std::scoped_lock lock{mLock};
+        getService_l();
+    }
+
+    std::shared_ptr<IResourceManagerService> getService_l();
+
+    // To add/register all the resources currently added/registered with
+    // the ResourceManagerService.
+    // This function will be called right after the death of the Resource
+    // Manager to make sure that the newly started ResourceManagerService
+    // knows about the current resource usage.
+    void reRegisterAllResources_l();
+
+    void deinit() {
+        std::scoped_lock lock{mLock};
+        // Unregistering from DeathRecipient notification.
+        if (mService != nullptr) {
+            AIBinder_unlinkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
+            mService = nullptr;
+        }
+    }
+
+    // For binder death handling
+    static void BinderDiedCallback(void* cookie);
+    static void BinderUnlinkedCallback(void* cookie);
+
+    void binderDied() {
+        std::scoped_lock lock{mLock};
+        ALOGE("ResourceManagerService died.");
+        mService = nullptr;
+        mBinderDied = true;
+        // start an async operation that will reconnect with the RM and
+        // re-registers all the resources.
+        mGetServiceFuture = std::async(std::launch::async, [this] { getService(); });
+    }
+
+
+private:
+    std::mutex mLock;
     pid_t mPid;
     uid_t mUid;
+    bool mBinderDied = false;
     std::string mCodecName;
-    std::shared_ptr<IResourceManagerService> mService;
+    /**
+     * Reconnecting with the ResourceManagerService, after its binder interface dies,
+     * is done asynchronously. It will also make sure that, all the resources
+     * asssociated with this Proxy (MediaCodec) is added with the new instance
+     * of the ResourceManagerService to persist the state of resources.
+     * We must store the reference of the furture to guarantee real asynchronous operation.
+     */
+    std::future<void> mGetServiceFuture;
+    // To maintain the list of all the resources currently added/registered with
+    // the ResourceManagerService.
+    std::set<MediaResourceParcel> mMediaResourceParcel;
     std::shared_ptr<IResourceManagerClient> mClient;
     ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+    std::shared_ptr<IResourceManagerService> mService;
 };
 
 MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy(
-        pid_t pid, uid_t uid, const std::shared_ptr<IResourceManagerClient> &client)
-        : mPid(pid), mUid(uid), mClient(client),
-          mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)) {
+        pid_t pid, uid_t uid, const std::shared_ptr<IResourceManagerClient> &client) :
+    mPid(pid), mUid(uid), mClient(client),
+    mDeathRecipient(::ndk::ScopedAIBinder_DeathRecipient(
+            AIBinder_DeathRecipient_new(BinderDiedCallback))) {
     if (mUid == MediaCodec::kNoUid) {
         mUid = AIBinder_getCallingUid();
     }
     if (mPid == MediaCodec::kNoPid) {
         mPid = AIBinder_getCallingPid();
     }
+    // Setting callback notification when DeathRecipient gets deleted.
+    AIBinder_DeathRecipient_setOnUnlinked(mDeathRecipient.get(), BinderUnlinkedCallback);
 }
 
 MediaCodec::ResourceManagerServiceProxy::~ResourceManagerServiceProxy() {
-
-    // remove the cookie, so any in-flight death notification will get dropped
-    // by our handler.
-    removeCookie(this);
-
-    Mutex::Autolock _l(mLock);
-    if (mService != nullptr) {
-        AIBinder_unlinkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
-        mService = nullptr;
-    }
+    deinit();
 }
 
 status_t MediaCodec::ResourceManagerServiceProxy::init() {
-    ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
-    mService = IResourceManagerService::fromBinder(binder);
-    if (mService == nullptr) {
-        ALOGE("Failed to get ResourceManagerService");
-        return UNKNOWN_ERROR;
-    }
+    std::scoped_lock lock{mLock};
 
     int callerPid = AIBinder_getCallingPid();
     int callerUid = AIBinder_getCallingUid();
+
     if (mPid != callerPid || mUid != callerUid) {
         // Media processes don't need special permissions to act on behalf of other processes.
         if (callerUid != AID_MEDIA) {
@@ -357,60 +487,57 @@
         }
     }
 
+    mService = getService_l();
+    if (mService == nullptr) {
+        return DEAD_OBJECT;
+    }
+
     // Kill clients pending removal.
     mService->reclaimResourcesFromClientsPendingRemoval(mPid);
-
-    // so our handler will process the death notifications
-    addCookie(this);
-
-    // after this, require mLock whenever using mService
-    AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
     return OK;
 }
 
-//static
-// these are no_destroy to keep them from being destroyed at process exit
-// where some thread calls exit() while other threads are still running.
-// see b/194783918
-[[clang::no_destroy]] Mutex MediaCodec::ResourceManagerServiceProxy::sLockCookies;
-[[clang::no_destroy]] std::set<void*> MediaCodec::ResourceManagerServiceProxy::sCookies;
-
-//static
-void MediaCodec::ResourceManagerServiceProxy::addCookie(void* cookie) {
-    Mutex::Autolock _l(sLockCookies);
-    sCookies.insert(cookie);
-}
-
-//static
-void MediaCodec::ResourceManagerServiceProxy::removeCookie(void* cookie) {
-    Mutex::Autolock _l(sLockCookies);
-    sCookies.erase(cookie);
-}
-
-//static
-void MediaCodec::ResourceManagerServiceProxy::BinderDiedCallback(void* cookie) {
-    Mutex::Autolock _l(sLockCookies);
-    if (sCookies.find(cookie) != sCookies.end()) {
-        auto thiz = static_cast<ResourceManagerServiceProxy*>(cookie);
-        thiz->binderDied();
+std::shared_ptr<IResourceManagerService> MediaCodec::ResourceManagerServiceProxy::getService_l() {
+    if (mService != nullptr) {
+        return mService;
     }
-}
 
-void MediaCodec::ResourceManagerServiceProxy::binderDied() {
-    ALOGW("ResourceManagerService died.");
-    Mutex::Autolock _l(mLock);
-    mService = nullptr;
-}
-
-void MediaCodec::ResourceManagerServiceProxy::addResource(
-        const MediaResourceParcel &resource) {
-    std::vector<MediaResourceParcel> resources;
-    resources.push_back(resource);
-
-    Mutex::Autolock _l(mLock);
+    // Get binder interface to resource manager.
+    ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
+    mService = IResourceManagerService::fromBinder(binder);
     if (mService == nullptr) {
+        ALOGE("Failed to get ResourceManagerService");
+        return mService;
+    }
+
+    // Create the context that is passed as cookie to the binder death notification.
+    // The context gets deleted at BinderUnlinkedCallback.
+    BinderDiedContext* context = new BinderDiedContext{.mRMServiceProxy = weak_from_this()};
+    // Register for the callbacks by linking to death notification.
+    AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), context);
+
+    // If the RM was restarted, re-register all the resources.
+    if (mBinderDied) {
+        reRegisterAllResources_l();
+        mBinderDied = false;
+    }
+    return mService;
+}
+
+void MediaCodec::ResourceManagerServiceProxy::reRegisterAllResources_l() {
+    if (mMediaResourceParcel.empty()) {
+        ALOGV("No resources to add");
         return;
     }
+
+    if (mService == nullptr) {
+        ALOGW("Service isn't available");
+        return;
+    }
+
+    std::vector<MediaResourceParcel> resources;
+    std::copy(mMediaResourceParcel.begin(), mMediaResourceParcel.end(),
+              std::back_inserter(resources));
     ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
                                 .uid = static_cast<int32_t>(mUid),
                                 .id = getId(mClient),
@@ -418,50 +545,98 @@
     mService->addResource(clientInfo, mClient, resources);
 }
 
-void MediaCodec::ResourceManagerServiceProxy::removeResource(
-        const MediaResourceParcel &resource) {
-    std::vector<MediaResourceParcel> resources;
-    resources.push_back(resource);
+void MediaCodec::ResourceManagerServiceProxy::BinderDiedCallback(void* cookie) {
+    BinderDiedContext* context = reinterpret_cast<BinderDiedContext*>(cookie);
 
-    Mutex::Autolock _l(mLock);
-    if (mService == nullptr) {
+    // Validate the context and check if the ResourceManagerServiceProxy object is still in scope.
+    if (context != nullptr) {
+        std::shared_ptr<ResourceManagerServiceProxy> thiz = context->mRMServiceProxy.lock();
+        if (thiz != nullptr) {
+            thiz->binderDied();
+        } else {
+            ALOGI("ResourceManagerServiceProxy is out of scope already");
+        }
+    }
+}
+
+void MediaCodec::ResourceManagerServiceProxy::BinderUnlinkedCallback(void* cookie) {
+    BinderDiedContext* context = reinterpret_cast<BinderDiedContext*>(cookie);
+    // Since we don't need the context anymore, we are deleting it now.
+    delete context;
+}
+
+void MediaCodec::ResourceManagerServiceProxy::addResource(
+        const MediaResourceParcel &resource) {
+    std::scoped_lock lock{mLock};
+    std::shared_ptr<IResourceManagerService> service = getService_l();
+    if (service == nullptr) {
+        ALOGW("Service isn't available");
         return;
     }
+    std::vector<MediaResourceParcel> resources;
+    resources.push_back(resource);
     ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
                                 .uid = static_cast<int32_t>(mUid),
                                 .id = getId(mClient),
                                 .name = mCodecName};
-    mService->removeResource(clientInfo, resources);
+    service->addResource(clientInfo, mClient, resources);
+    mMediaResourceParcel.emplace(resource);
+}
+
+void MediaCodec::ResourceManagerServiceProxy::removeResource(
+        const MediaResourceParcel &resource) {
+    std::scoped_lock lock{mLock};
+    std::shared_ptr<IResourceManagerService> service = getService_l();
+    if (service == nullptr) {
+        ALOGW("Service isn't available");
+        return;
+    }
+    std::vector<MediaResourceParcel> resources;
+    resources.push_back(resource);
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                .uid = static_cast<int32_t>(mUid),
+                                .id = getId(mClient),
+                                .name = mCodecName};
+    service->removeResource(clientInfo, resources);
+    mMediaResourceParcel.erase(resource);
 }
 
 void MediaCodec::ResourceManagerServiceProxy::removeClient() {
-    Mutex::Autolock _l(mLock);
-    if (mService == nullptr) {
+    std::scoped_lock lock{mLock};
+    std::shared_ptr<IResourceManagerService> service = getService_l();
+    if (service == nullptr) {
+        ALOGW("Service isn't available");
         return;
     }
     ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
                                 .uid = static_cast<int32_t>(mUid),
                                 .id = getId(mClient),
                                 .name = mCodecName};
-    mService->removeClient(clientInfo);
+    service->removeClient(clientInfo);
+    mMediaResourceParcel.clear();
 }
 
 void MediaCodec::ResourceManagerServiceProxy::markClientForPendingRemoval() {
-    Mutex::Autolock _l(mLock);
-    if (mService == nullptr) {
+    std::scoped_lock lock{mLock};
+    std::shared_ptr<IResourceManagerService> service = getService_l();
+    if (service == nullptr) {
+        ALOGW("Service isn't available");
         return;
     }
     ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
                                 .uid = static_cast<int32_t>(mUid),
                                 .id = getId(mClient),
                                 .name = mCodecName};
-    mService->markClientForPendingRemoval(clientInfo);
+    service->markClientForPendingRemoval(clientInfo);
+    mMediaResourceParcel.clear();
 }
 
 bool MediaCodec::ResourceManagerServiceProxy::reclaimResource(
         const std::vector<MediaResourceParcel> &resources) {
-    Mutex::Autolock _l(mLock);
-    if (mService == NULL) {
+    std::scoped_lock lock{mLock};
+    std::shared_ptr<IResourceManagerService> service = getService_l();
+    if (service == nullptr) {
+        ALOGW("Service isn't available");
         return false;
     }
     bool success;
@@ -469,34 +644,67 @@
                                 .uid = static_cast<int32_t>(mUid),
                                 .id = getId(mClient),
                                 .name = mCodecName};
-    Status status = mService->reclaimResource(clientInfo, resources, &success);
+    Status status = service->reclaimResource(clientInfo, resources, &success);
     return status.isOk() && success;
 }
 
 void MediaCodec::ResourceManagerServiceProxy::notifyClientCreated() {
+    std::scoped_lock lock{mLock};
+    std::shared_ptr<IResourceManagerService> service = getService_l();
+    if (service == nullptr) {
+        ALOGW("Service isn't available");
+        return;
+    }
     ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
                                 .uid = static_cast<int32_t>(mUid),
                                 .id = getId(mClient),
                                 .name = mCodecName};
-    mService->notifyClientCreated(clientInfo);
+    service->notifyClientCreated(clientInfo);
 }
 
 void MediaCodec::ResourceManagerServiceProxy::notifyClientStarted(
-    ClientConfigParcel& clientConfig) {
+        ClientConfigParcel& clientConfig) {
+    std::scoped_lock lock{mLock};
+    std::shared_ptr<IResourceManagerService> service = getService_l();
+    if (service == nullptr) {
+        ALOGW("Service isn't available");
+        return;
+    }
     clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
     clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
     clientConfig.clientInfo.id = getId(mClient);
     clientConfig.clientInfo.name = mCodecName;
-    mService->notifyClientStarted(clientConfig);
+    service->notifyClientStarted(clientConfig);
 }
 
 void MediaCodec::ResourceManagerServiceProxy::notifyClientStopped(
-    ClientConfigParcel& clientConfig) {
+        ClientConfigParcel& clientConfig) {
+    std::scoped_lock lock{mLock};
+    std::shared_ptr<IResourceManagerService> service = getService_l();
+    if (service == nullptr) {
+        ALOGW("Service isn't available");
+        return;
+    }
     clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
     clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
     clientConfig.clientInfo.id = getId(mClient);
     clientConfig.clientInfo.name = mCodecName;
-    mService->notifyClientStopped(clientConfig);
+    service->notifyClientStopped(clientConfig);
+}
+
+void MediaCodec::ResourceManagerServiceProxy::notifyClientConfigChanged(
+        ClientConfigParcel& clientConfig) {
+    std::scoped_lock lock{mLock};
+    std::shared_ptr<IResourceManagerService> service = getService_l();
+    if (service == nullptr) {
+        ALOGW("Service isn't available");
+        return;
+    }
+    clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
+    clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
+    clientConfig.clientInfo.id = getId(mClient);
+    clientConfig.clientInfo.name = mCodecName;
+    service->notifyClientConfigChanged(clientConfig);
 }
 
 ////////////////////////////////////////////////////////////////////////////////
@@ -924,7 +1132,6 @@
       mWidth(0),
       mHeight(0),
       mRotationDegrees(0),
-      mHdrInfoFlags(0),
       mDequeueInputTimeoutGeneration(0),
       mDequeueInputReplyID(0),
       mDequeueOutputTimeoutGeneration(0),
@@ -936,8 +1143,10 @@
       mHaveInputSurface(false),
       mHavePendingInputBuffers(false),
       mCpuBoostRequested(false),
-      mPlaybackDurationAccumulator(new PlaybackDurationAccumulator()),
-      mIsSurfaceToScreen(false),
+      mIsSurfaceToDisplay(false),
+      mVideoRenderQualityTracker(
+              VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
+                      server_configurable_flags::GetServerConfigurableFlag)),
       mLatencyUnknown(0),
       mBytesEncoded(0),
       mEarliestEncodedPtsUs(INT64_MAX),
@@ -951,7 +1160,7 @@
       mGetCodecBase(getCodecBase),
       mGetCodecInfo(getCodecInfo) {
     mCodecId = GenerateCodecId();
-    mResourceManagerProxy = new ResourceManagerServiceProxy(pid, uid,
+    mResourceManagerProxy = std::make_shared<ResourceManagerServiceProxy>(pid, uid,
             ::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid, uid));
     if (!mGetCodecBase) {
         mGetCodecBase = [](const AString &name, const char *owner) {
@@ -959,10 +1168,12 @@
         };
     }
     if (!mGetCodecInfo) {
-        mGetCodecInfo = [](const AString &name, sp<MediaCodecInfo> *info) -> status_t {
+        mGetCodecInfo = [&log = mErrorLog](const AString &name,
+                                           sp<MediaCodecInfo> *info) -> status_t {
             *info = nullptr;
             const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
             if (!mcl) {
+                log.log(LOG_TAG, "Fatal error: failed to initialize MediaCodecList");
                 return NO_INIT;  // if called from Java should raise IOException
             }
             AString tmp = name;
@@ -977,6 +1188,8 @@
                 *info = mcl->getCodecInfo(codecIdx);
                 return OK;
             }
+            log.log(LOG_TAG, base::StringPrintf("Codec with name '%s' is not found on the device.",
+                                  name.c_str()));
             return NAME_NOT_FOUND;
         };
     }
@@ -1028,15 +1241,96 @@
     }
 
     mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
+    resetMetricsFields();
+}
+
+void MediaCodec::resetMetricsFields() {
+    mHdrInfoFlags = 0;
+
+    mApiUsageMetrics = ApiUsageMetrics();
+    mReliabilityContextMetrics = ReliabilityContextMetrics();
 }
 
 void MediaCodec::updateMediametrics() {
     if (mMetricsHandle == 0) {
+        ALOGW("no metrics handle found");
         return;
     }
 
     Mutex::Autolock _lock(mMetricsLock);
 
+    mediametrics_setInt32(mMetricsHandle, kCodecArrayMode, mApiUsageMetrics.isArrayMode ? 1 : 0);
+    mApiUsageMetrics.operationMode = (mFlags & kFlagIsAsync) ?
+            ((mFlags & kFlagUseBlockModel) ? ApiUsageMetrics::kBlockMode
+                    : ApiUsageMetrics::kAsynchronousMode)
+            : ApiUsageMetrics::kSynchronousMode;
+    mediametrics_setInt32(mMetricsHandle, kCodecOperationMode, mApiUsageMetrics.operationMode);
+    mediametrics_setInt32(mMetricsHandle, kCodecOutputSurface,
+            mApiUsageMetrics.isUsingOutputSurface ? 1 : 0);
+
+    mediametrics_setInt32(mMetricsHandle, kCodecAppMaxInputSize,
+            mApiUsageMetrics.inputBufferSize.appMax);
+    mediametrics_setInt32(mMetricsHandle, kCodecUsedMaxInputSize,
+            mApiUsageMetrics.inputBufferSize.usedMax);
+    mediametrics_setInt32(mMetricsHandle, kCodecCodecMaxInputSize,
+            mApiUsageMetrics.inputBufferSize.codecMax);
+
+    mediametrics_setInt32(mMetricsHandle, kCodecFlushCount, mReliabilityContextMetrics.flushCount);
+    mediametrics_setInt32(mMetricsHandle, kCodecSetSurfaceCount,
+            mReliabilityContextMetrics.setOutputSurfaceCount);
+    mediametrics_setInt32(mMetricsHandle, kCodecResolutionChangeCount,
+            mReliabilityContextMetrics.resolutionChangeCount);
+
+    // Video rendering quality metrics
+    {
+        const VideoRenderQualityMetrics &m = mVideoRenderQualityTracker.getMetrics();
+        if (m.frameReleasedCount > 0) {
+            mediametrics_setInt64(mMetricsHandle, kCodecFirstRenderTimeUs, m.firstRenderTimeUs);
+            mediametrics_setInt64(mMetricsHandle, kCodecFramesReleased, m.frameReleasedCount);
+            mediametrics_setInt64(mMetricsHandle, kCodecFramesRendered, m.frameRenderedCount);
+            mediametrics_setInt64(mMetricsHandle, kCodecFramesSkipped, m.frameSkippedCount);
+            mediametrics_setInt64(mMetricsHandle, kCodecFramesDropped, m.frameDroppedCount);
+            mediametrics_setDouble(mMetricsHandle, kCodecFramerateContent, m.contentFrameRate);
+            mediametrics_setDouble(mMetricsHandle, kCodecFramerateDesired, m.desiredFrameRate);
+            mediametrics_setDouble(mMetricsHandle, kCodecFramerateActual, m.actualFrameRate);
+        }
+        if (m.freezeDurationMsHistogram.getCount() >= 1) {
+            const MediaHistogram<int32_t> &h = m.freezeDurationMsHistogram;
+            mediametrics_setInt64(mMetricsHandle, kCodecFreezeScore, m.freezeScore);
+            mediametrics_setDouble(mMetricsHandle, kCodecFreezeRate, m.freezeRate);
+            mediametrics_setInt64(mMetricsHandle, kCodecFreezeCount, h.getCount());
+            mediametrics_setInt32(mMetricsHandle, kCodecFreezeDurationMsAvg, h.getAvg());
+            mediametrics_setInt32(mMetricsHandle, kCodecFreezeDurationMsMax, h.getMax());
+            mediametrics_setString(mMetricsHandle, kCodecFreezeDurationMsHistogram, h.emit());
+            mediametrics_setString(mMetricsHandle, kCodecFreezeDurationMsHistogramBuckets,
+                                   h.emitBuckets());
+        }
+        if (m.freezeDistanceMsHistogram.getCount() >= 1) {
+            const MediaHistogram<int32_t> &h = m.freezeDistanceMsHistogram;
+            mediametrics_setInt32(mMetricsHandle, kCodecFreezeDistanceMsAvg, h.getAvg());
+            mediametrics_setString(mMetricsHandle, kCodecFreezeDistanceMsHistogram, h.emit());
+            mediametrics_setString(mMetricsHandle, kCodecFreezeDistanceMsHistogramBuckets,
+                                   h.emitBuckets());
+        }
+        if (m.judderScoreHistogram.getCount() >= 1) {
+            const MediaHistogram<int32_t> &h = m.judderScoreHistogram;
+            mediametrics_setInt64(mMetricsHandle, kCodecJudderScore, m.judderScore);
+            mediametrics_setDouble(mMetricsHandle, kCodecJudderRate, m.judderRate);
+            mediametrics_setInt64(mMetricsHandle, kCodecJudderCount, h.getCount());
+            mediametrics_setInt32(mMetricsHandle, kCodecJudderScoreAvg, h.getAvg());
+            mediametrics_setInt32(mMetricsHandle, kCodecJudderScoreMax, h.getMax());
+            mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogram, h.emit());
+            mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogramBuckets,
+                                   h.emitBuckets());
+        }
+        if (m.freezeEventCount != 0) {
+            mediametrics_setInt32(mMetricsHandle, kCodecFreezeEventCount, m.freezeEventCount);
+        }
+        if (m.judderEventCount != 0) {
+            mediametrics_setInt32(mMetricsHandle, kCodecJudderEventCount, m.judderEventCount);
+        }
+    }
+
     if (mLatencyHist.getCount() != 0 ) {
         mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
         mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
@@ -1052,7 +1346,7 @@
     if (mLatencyUnknown > 0) {
         mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
     }
-    int64_t playbackDurationSec = mPlaybackDurationAccumulator->getDurationInSeconds();
+    int64_t playbackDurationSec = mPlaybackDurationAccumulator.getDurationInSeconds();
     if (playbackDurationSec > 0) {
         mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDurationSec, playbackDurationSec);
     }
@@ -1115,14 +1409,14 @@
             && ColorUtils::isHDRStaticInfoValid(&info)) {
         mHdrInfoFlags |= kFlagHasHdrStaticInfo;
     }
-    mediametrics_setInt32(mMetricsHandle, kCodecHDRStaticInfo,
+    mediametrics_setInt32(mMetricsHandle, kCodecHdrStaticInfo,
             (mHdrInfoFlags & kFlagHasHdrStaticInfo) ? 1 : 0);
     sp<ABuffer> hdr10PlusInfo;
     if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
             && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
         mHdrInfoFlags |= kFlagHasHdr10PlusInfo;
     }
-    mediametrics_setInt32(mMetricsHandle, kCodecHDR10PlusInfo,
+    mediametrics_setInt32(mMetricsHandle, kCodecHdr10PlusInfo,
             (mHdrInfoFlags & kFlagHasHdr10PlusInfo) ? 1 : 0);
 
     // hdr format
@@ -1135,7 +1429,7 @@
             && codedFormat->findInt32(KEY_PROFILE, &profile)
             && colorTransfer != -1) {
         hdr_format hdrFormat = getHdrFormat(mime, profile, colorTransfer);
-        mediametrics_setInt32(mMetricsHandle, kCodecHDRFormat, static_cast<int>(hdrFormat));
+        mediametrics_setInt32(mMetricsHandle, kCodecHdrFormat, static_cast<int>(hdrFormat));
     }
 }
 
@@ -1243,16 +1537,15 @@
         return;
     }
 
-    Histogram recentHist;
-
     // build an empty histogram
+    MediaHistogram<int64_t> recentHist;
     recentHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
 
     // stuff it with the samples in the ring buffer
     {
         Mutex::Autolock al(mRecentLock);
 
-        for (int i=0; i<kRecentLatencyFrames; i++) {
+        for (int i = 0; i < kRecentLatencyFrames; i++) {
             if (mRecentSamples[i] != kRecentSampleInvalid) {
                 recentHist.insert(mRecentSamples[i]);
             }
@@ -1260,7 +1553,7 @@
     }
 
     // spit the data (if any) into the supplied analytics record
-    if (recentHist.getCount()!= 0 ) {
+    if (recentHist.getCount() != 0 ) {
         mediametrics_setInt64(item, kCodecRecentLatencyMax, recentHist.getMax());
         mediametrics_setInt64(item, kCodecRecentLatencyMin, recentHist.getMin());
         mediametrics_setInt64(item, kCodecRecentLatencyAvg, recentHist.getAvg());
@@ -1274,12 +1567,59 @@
     }
 }
 
+static std::string emitVector(std::vector<int32_t> vector) {
+    std::ostringstream sstr;
+    for (size_t i = 0; i < vector.size(); ++i) {
+        if (i != 0) {
+            sstr << ',';
+        }
+        sstr << vector[i];
+    }
+    return sstr.str();
+}
+
+static void reportToMediaMetricsIfValid(const FreezeEvent &e) {
+    if (e.valid) {
+        mediametrics_handle_t handle = mediametrics_create(kFreezeEventKeyName);
+        mediametrics_setInt64(handle, kFreezeEventInitialTimeUs, e.initialTimeUs);
+        mediametrics_setInt32(handle, kFreezeEventDurationMs, e.durationMs);
+        mediametrics_setInt64(handle, kFreezeEventCount, e.count);
+        mediametrics_setInt32(handle, kFreezeEventAvgDurationMs, e.sumDurationMs / e.count);
+        mediametrics_setInt32(handle, kFreezeEventAvgDistanceMs, e.sumDistanceMs / e.count);
+        mediametrics_setString(handle, kFreezeEventDetailsDurationMs,
+                               emitVector(e.details.durationMs));
+        mediametrics_setString(handle, kFreezeEventDetailsDistanceMs,
+                               emitVector(e.details.distanceMs));
+        mediametrics_selfRecord(handle);
+        mediametrics_delete(handle);
+    }
+}
+
+static void reportToMediaMetricsIfValid(const JudderEvent &e) {
+    if (e.valid) {
+        mediametrics_handle_t handle = mediametrics_create(kJudderEventKeyName);
+        mediametrics_setInt64(handle, kJudderEventInitialTimeUs, e.initialTimeUs);
+        mediametrics_setInt32(handle, kJudderEventDurationMs, e.durationMs);
+        mediametrics_setInt64(handle, kJudderEventCount, e.count);
+        mediametrics_setInt32(handle, kJudderEventAvgScore, e.sumScore / e.count);
+        mediametrics_setInt32(handle, kJudderEventAvgDistanceMs, e.sumDistanceMs / e.count);
+        mediametrics_setString(handle, kJudderEventDetailsActualDurationUs,
+                               emitVector(e.details.actualRenderDurationUs));
+        mediametrics_setString(handle, kJudderEventDetailsContentDurationUs,
+                               emitVector(e.details.contentRenderDurationUs));
+        mediametrics_setString(handle, kJudderEventDetailsDistanceMs,
+                               emitVector(e.details.distanceMs));
+        mediametrics_selfRecord(handle);
+        mediametrics_delete(handle);
+    }
+}
+
 void MediaCodec::flushMediametrics() {
     ALOGD("flushMediametrics");
 
     // update does its own mutex locking
     updateMediametrics();
-    mHdrInfoFlags = 0;
+    resetMetricsFields();
 
     // ensure mutex while we do our own work
     Mutex::Autolock _lock(mMetricsLock);
@@ -1292,6 +1632,10 @@
     }
     // we no longer have anything pending upload
     mMetricsToUpload = false;
+
+    // Freeze and judder events are reported separately
+    reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetFreezeEvent());
+    reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetJudderEvent());
 }
 
 void MediaCodec::updateLowLatency(const sp<AMessage> &msg) {
@@ -1377,116 +1721,43 @@
     ALOGV("TunnelPeekState: %s -> %s", asString(previousState), asString(mTunnelPeekState));
 }
 
-void MediaCodec::updatePlaybackDuration(const sp<AMessage> &msg) {
+void MediaCodec::processRenderedFrames(const sp<AMessage> &msg) {
     int what = 0;
     msg->findInt32("what", &what);
     if (msg->what() != kWhatCodecNotify && what != kWhatOutputFramesRendered) {
         static bool logged = false;
         if (!logged) {
             logged = true;
-            ALOGE("updatePlaybackDuration: expected kWhatOuputFramesRendered (%d)", msg->what());
+            ALOGE("processRenderedFrames: expected kWhatOutputFramesRendered (%d)", msg->what());
         }
         return;
     }
-    // Playback duration only counts if the buffers are going to the screen.
-    if (!mIsSurfaceToScreen) {
-        return;
-    }
-    int64_t renderTimeNs;
-    size_t index = 0;
-    while (msg->findInt64(AStringPrintf("%zu-system-nano", index++).c_str(), &renderTimeNs)) {
-        mPlaybackDurationAccumulator->processRenderTime(renderTimeNs);
-    }
-}
-
-bool MediaCodec::Histogram::setup(int nbuckets, int64_t width, int64_t floor)
-{
-    if (nbuckets <= 0 || width <= 0) {
-        return false;
-    }
-
-    // get histogram buckets
-    if (nbuckets == mBucketCount && mBuckets != NULL) {
-        // reuse our existing buffer
-        memset(mBuckets, 0, sizeof(*mBuckets) * mBucketCount);
-    } else {
-        // get a new pre-zeroed buffer
-        int64_t *newbuckets = (int64_t *)calloc(nbuckets, sizeof (*mBuckets));
-        if (newbuckets == NULL) {
-            goto bad;
+    // Rendered frames only matter if they're being sent to the display
+    if (mIsSurfaceToDisplay) {
+        int64_t renderTimeNs;
+        for (size_t index = 0;
+            msg->findInt64(AStringPrintf("%zu-system-nano", index).c_str(), &renderTimeNs);
+            index++) {
+            // Capture metrics for playback duration
+            mPlaybackDurationAccumulator.onFrameRendered(renderTimeNs);
+            // Capture metrics for quality
+            int64_t mediaTimeUs = 0;
+            if (!msg->findInt64(AStringPrintf("%zu-media-time-us", index).c_str(), &mediaTimeUs)) {
+                ALOGE("processRenderedFrames: no media time found");
+                continue;
+            }
+            // Tunneled frames use INT64_MAX to indicate end-of-stream, so don't report it as a
+            // rendered frame.
+            if (!mTunneled || mediaTimeUs != INT64_MAX) {
+                FreezeEvent freezeEvent;
+                JudderEvent judderEvent;
+                mVideoRenderQualityTracker.onFrameRendered(mediaTimeUs, renderTimeNs, &freezeEvent,
+                                                           &judderEvent);
+                reportToMediaMetricsIfValid(freezeEvent);
+                reportToMediaMetricsIfValid(judderEvent);
+            }
         }
-        if (mBuckets != NULL)
-            free(mBuckets);
-        mBuckets = newbuckets;
     }
-
-    mWidth = width;
-    mFloor = floor;
-    mCeiling = floor + nbuckets * width;
-    mBucketCount = nbuckets;
-
-    mMin = INT64_MAX;
-    mMax = INT64_MIN;
-    mSum = 0;
-    mCount = 0;
-    mBelow = mAbove = 0;
-
-    return true;
-
-  bad:
-    if (mBuckets != NULL) {
-        free(mBuckets);
-        mBuckets = NULL;
-    }
-
-    return false;
-}
-
-void MediaCodec::Histogram::insert(int64_t sample)
-{
-    // histogram is not set up
-    if (mBuckets == NULL) {
-        return;
-    }
-
-    mCount++;
-    mSum += sample;
-    if (mMin > sample) mMin = sample;
-    if (mMax < sample) mMax = sample;
-
-    if (sample < mFloor) {
-        mBelow++;
-    } else if (sample >= mCeiling) {
-        mAbove++;
-    } else {
-        int64_t slot = (sample - mFloor) / mWidth;
-        CHECK(slot < mBucketCount);
-        mBuckets[slot]++;
-    }
-    return;
-}
-
-std::string MediaCodec::Histogram::emit()
-{
-    std::string value;
-    char buffer[64];
-
-    // emits:  width,Below{bucket0,bucket1,...., bucketN}above
-    // unconfigured will emit: 0,0{}0
-    // XXX: is this best representation?
-    snprintf(buffer, sizeof(buffer), "%" PRId64 ",%" PRId64 ",%" PRId64 "{",
-             mFloor, mWidth, mBelow);
-    value = buffer;
-    for (int i = 0; i < mBucketCount; i++) {
-        if (i != 0) {
-            value = value + ",";
-        }
-        snprintf(buffer, sizeof(buffer), "%" PRId64, mBuckets[i]);
-        value = value + buffer;
-    }
-    snprintf(buffer, sizeof(buffer), "}%" PRId64 , mAbove);
-    value = value + buffer;
-    return value;
 }
 
 // when we send a buffer to the codec;
@@ -1727,6 +1998,8 @@
 status_t MediaCodec::init(const AString &name) {
     status_t err = mResourceManagerProxy->init();
     if (err != OK) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "Fatal error: failed to initialize ResourceManager (err=%d)", err));
         mCodec = NULL; // remove the codec
         return err;
     }
@@ -1746,11 +2019,14 @@
     if (!name.startsWith("android.filter.")) {
         err = mGetCodecInfo(name, &mCodecInfo);
         if (err != OK) {
+            mErrorLog.log(LOG_TAG, base::StringPrintf(
+                    "Getting codec info with name '%s' failed (err=%d)", name.c_str(), err));
             mCodec = NULL;  // remove the codec.
             return err;
         }
         if (mCodecInfo == nullptr) {
-            ALOGE("Getting codec info with name '%s' failed", name.c_str());
+            mErrorLog.log(LOG_TAG, base::StringPrintf(
+                    "Getting codec info with name '%s' failed", name.c_str()));
             return NAME_NOT_FOUND;
         }
         secureCodec = name.endsWith(".secure");
@@ -1773,7 +2049,8 @@
 
     mCodec = mGetCodecBase(name, owner);
     if (mCodec == NULL) {
-        ALOGE("Getting codec base with name '%s' (owner='%s') failed", name.c_str(), owner);
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "Getting codec base with name '%s' (from '%s' HAL) failed", name.c_str(), owner));
         return NAME_NOT_FOUND;
     }
 
@@ -1785,7 +2062,7 @@
             mCodecLooper->setName("CodecLooper");
             err = mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
             if (OK != err) {
-                ALOGE("Codec Looper failed to start");
+                mErrorLog.log(LOG_TAG, "Fatal error: codec looper failed to start");
                 return err;
             }
         }
@@ -1945,6 +2222,10 @@
             if (format->findInt32("color-format", &colorFormat)) {
                 mediametrics_setInt32(nextMetricsHandle, kCodecColorFormat, colorFormat);
             }
+            int32_t appMaxInputSize = -1;
+            if (format->findInt32(KEY_MAX_INPUT_SIZE, &appMaxInputSize)) {
+                mApiUsageMetrics.inputBufferSize.appMax = appMaxInputSize;
+            }
             if (mDomain == DOMAIN_VIDEO) {
                 float frameRate = -1.0;
                 if (format->findFloat("frame-rate", &frameRate)) {
@@ -1968,7 +2249,8 @@
         // Prevent possible integer overflow in downstream code.
         if (mWidth < 0 || mHeight < 0 ||
                (uint64_t)mWidth * mHeight > (uint64_t)INT32_MAX / 4) {
-            ALOGE("Invalid size(s), width=%d, height=%d", mWidth, mHeight);
+            mErrorLog.log(LOG_TAG, base::StringPrintf(
+                    "Invalid size(s), width=%d, height=%d", mWidth, mHeight));
             mediametrics_delete(nextMetricsHandle);
             return BAD_VALUE;
         }
@@ -2067,9 +2349,11 @@
     std::vector<MediaResourceParcel> resources;
     resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
             toMediaResourceSubType(mDomain)));
-    // Don't know the buffer size at this point, but it's fine to use 1 because
-    // the reclaimResource call doesn't consider the requester's buffer size for now.
-    resources.push_back(MediaResource::GraphicMemoryResource(1));
+    if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
+        // Don't know the buffer size at this point, but it's fine to use 1 because
+        // the reclaimResource call doesn't consider the requester's buffer size for now.
+        resources.push_back(MediaResource::GraphicMemoryResource(1));
+    }
     for (int i = 0; i <= kMaxRetry; ++i) {
         sp<AMessage> response;
         err = PostAndAwaitResponse(msg, &response);
@@ -2670,9 +2954,11 @@
     std::vector<MediaResourceParcel> resources;
     resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
             toMediaResourceSubType(mDomain)));
-    // Don't know the buffer size at this point, but it's fine to use 1 because
-    // the reclaimResource call doesn't consider the requester's buffer size for now.
-    resources.push_back(MediaResource::GraphicMemoryResource(1));
+    if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
+        // Don't know the buffer size at this point, but it's fine to use 1 because
+        // the reclaimResource call doesn't consider the requester's buffer size for now.
+        resources.push_back(MediaResource::GraphicMemoryResource(1));
+    }
     for (int i = 0; i <= kMaxRetry; ++i) {
         if (i > 0) {
             // Don't try to reclaim resource for the first time.
@@ -3139,17 +3425,17 @@
         sp<MediaCodecBuffer> *buffer, sp<AMessage> *format) {
     // use mutex instead of a context switch
     if (mReleasedByResourceManager) {
-        ALOGE("getBufferAndFormat - resource already released");
+        mErrorLog.log(LOG_TAG, "resource already released");
         return DEAD_OBJECT;
     }
 
     if (buffer == NULL) {
-        ALOGE("getBufferAndFormat - null MediaCodecBuffer");
+        mErrorLog.log(LOG_TAG, "null buffer");
         return INVALID_OPERATION;
     }
 
     if (format == NULL) {
-        ALOGE("getBufferAndFormat - null AMessage");
+        mErrorLog.log(LOG_TAG, "null format");
         return INVALID_OPERATION;
     }
 
@@ -3157,7 +3443,9 @@
     format->clear();
 
     if (!isExecuting()) {
-        ALOGE("getBufferAndFormat - not executing");
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "Invalid to call %s; only valid in Executing states",
+                apiStateString().c_str()));
         return INVALID_OPERATION;
     }
 
@@ -3169,6 +3457,7 @@
     if (index >= buffers.size()) {
         ALOGE("getBufferAndFormat - trying to get buffer with "
               "bad index (index=%zu buffer_size=%zu)", index, buffers.size());
+        mErrorLog.log(LOG_TAG, base::StringPrintf("Bad index (index=%zu)", index));
         return INVALID_OPERATION;
     }
 
@@ -3176,6 +3465,7 @@
     if (!info.mOwnedByClient) {
         ALOGE("getBufferAndFormat - invalid operation "
               "(the index %zu is not owned by client)", index);
+        mErrorLog.log(LOG_TAG, base::StringPrintf("index %zu is not owned by client", index));
         return INVALID_OPERATION;
     }
 
@@ -3303,6 +3593,7 @@
 
 void MediaCodec::cancelPendingDequeueOperations() {
     if (mFlags & kFlagDequeueInputPending) {
+        mErrorLog.log(LOG_TAG, "Pending dequeue input buffer request cancelled");
         PostReplyWithError(mDequeueInputReplyID, INVALID_OPERATION);
 
         ++mDequeueInputTimeoutGeneration;
@@ -3311,6 +3602,7 @@
     }
 
     if (mFlags & kFlagDequeueOutputPending) {
+        mErrorLog.log(LOG_TAG, "Pending dequeue output buffer request cancelled");
         PostReplyWithError(mDequeueOutputReplyID, INVALID_OPERATION);
 
         ++mDequeueOutputTimeoutGeneration;
@@ -3320,8 +3612,16 @@
 }
 
 bool MediaCodec::handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
-    if (!isExecuting() || (mFlags & kFlagIsAsync)
-            || (newRequest && (mFlags & kFlagDequeueInputPending))) {
+    if (!isExecuting()) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "Invalid to call %s; only valid in executing state",
+                apiStateString().c_str()));
+        PostReplyWithError(replyID, INVALID_OPERATION);
+    } else if (mFlags & kFlagIsAsync) {
+        mErrorLog.log(LOG_TAG, "Invalid to call in async mode");
+        PostReplyWithError(replyID, INVALID_OPERATION);
+    } else if (newRequest && (mFlags & kFlagDequeueInputPending)) {
+        mErrorLog.log(LOG_TAG, "Invalid to call while another dequeue input request is pending");
         PostReplyWithError(replyID, INVALID_OPERATION);
         return true;
     } else if (mFlags & kFlagStickyError) {
@@ -3345,8 +3645,16 @@
 
 MediaCodec::DequeueOutputResult MediaCodec::handleDequeueOutputBuffer(
         const sp<AReplyToken> &replyID, bool newRequest) {
-    if (!isExecuting() || (mFlags & kFlagIsAsync)
-            || (newRequest && (mFlags & kFlagDequeueOutputPending))) {
+    if (!isExecuting()) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "Invalid to call %s; only valid in executing state",
+                apiStateString().c_str()));
+        PostReplyWithError(replyID, INVALID_OPERATION);
+    } else if (mFlags & kFlagIsAsync) {
+        mErrorLog.log(LOG_TAG, "Invalid to call in async mode");
+        PostReplyWithError(replyID, INVALID_OPERATION);
+    } else if (newRequest && (mFlags & kFlagDequeueOutputPending)) {
+        mErrorLog.log(LOG_TAG, "Invalid to call while another dequeue output request is pending");
         PostReplyWithError(replyID, INVALID_OPERATION);
     } else if (mFlags & kFlagStickyError) {
         PostReplyWithError(replyID, getStickyError());
@@ -3530,8 +3838,7 @@
 
                                 setState(UNINITIALIZED);
                             } else {
-                                setState(
-                                        (mFlags & kFlagIsAsync) ? FLUSHED : STARTED);
+                                setState((mFlags & kFlagIsAsync) ? FLUSHED : STARTED);
                             }
                             break;
                         }
@@ -3656,6 +3963,9 @@
                         mediametrics_setInt32(mMetricsHandle, kCodecSecure, 0);
                     }
 
+                    mediametrics_setInt32(mMetricsHandle, kCodecHardware,
+                                          MediaCodecList::isSoftwareCodec(mComponentName) ? 0 : 1);
+
                     mResourceManagerProxy->addResource(MediaResource::CodecResource(
                             mFlags & kFlagIsSecure, toMediaResourceSubType(mDomain)));
 
@@ -3722,7 +4032,19 @@
                         if (interestingFormat->findInt32("level", &level)) {
                             mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
                         }
+                        sp<AMessage> uncompressedFormat =
+                                (mFlags & kFlagIsEncoder) ? mInputFormat : mOutputFormat;
+                        int32_t componentColorFormat  = -1;
+                        if (uncompressedFormat->findInt32("android._color-format",
+                                &componentColorFormat)) {
+                            mediametrics_setInt32(mMetricsHandle,
+                                    kCodecComponentColorFormat, componentColorFormat);
+                        }
                         updateHdrMetrics(true /* isConfig */);
+                        int32_t codecMaxInputSize = -1;
+                        if (mInputFormat->findInt32(KEY_MAX_INPUT_SIZE, &codecMaxInputSize)) {
+                            mApiUsageMetrics.inputBufferSize.codecMax = codecMaxInputSize;
+                        }
                         // bitrate and bitrate mode, encoder only
                         if (mFlags & kFlagIsEncoder) {
                             // encoder specific values
@@ -3868,7 +4190,7 @@
                                 asString(previousState),
                                 asString(TunnelPeekState::kBufferRendered));
                     }
-                    updatePlaybackDuration(msg);
+                    processRenderedFrames(msg);
                     // check that we have a notification set
                     if (mOnFrameRenderedNotification != NULL) {
                         sp<AMessage> notify = mOnFrameRenderedNotification->dup();
@@ -4062,6 +4384,11 @@
                               mState, stateString(mState).c_str());
                         break;
                     }
+
+                    if (mIsSurfaceToDisplay) {
+                        mVideoRenderQualityTracker.resetForDiscontinuity();
+                    }
+
                     // Notify the RM that the codec has been stopped.
                     ClientConfigParcel clientConfig;
                     initClientConfigParcel(clientConfig);
@@ -4117,12 +4444,17 @@
                         break;
                     }
 
+                    if (mIsSurfaceToDisplay) {
+                        mVideoRenderQualityTracker.resetForDiscontinuity();
+                    }
+
                     if (mFlags & kFlagIsAsync) {
                         setState(FLUSHED);
                     } else {
                         setState(STARTED);
                         mCodec->signalResume();
                     }
+                    mReliabilityContextMetrics.flushCount++;
 
                     postPendingRepliesAndDeferredMessages("kWhatFlushCompleted");
                     break;
@@ -4189,6 +4521,9 @@
                 // callback can't be set after codec is executing,
                 // or before it's initialized (as the callback
                 // will be cleared when it goes to INITIALIZED)
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "Invalid to call %s; only valid at Initialized state",
+                        apiStateString().c_str()));
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             }
@@ -4220,6 +4555,9 @@
         case kWhatConfigure:
         {
             if (mState != INITIALIZED) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "configure() is valid only at Initialized state; currently %s",
+                        apiStateString().c_str()));
                 PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             }
@@ -4277,12 +4615,15 @@
                 handleSetSurface(NULL);
             }
 
+            mApiUsageMetrics.isUsingOutputSurface = true;
+
             uint32_t flags;
             CHECK(msg->findInt32("flags", (int32_t *)&flags));
             if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL ||
                 flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
                 if (!(mFlags & kFlagIsAsync)) {
-                    ALOGE("Error: configuration requires async operation");
+                    mErrorLog.log(
+                            LOG_TAG, "Block model is only valid with callback set (async mode)");
                     PostReplyWithError(replyID, INVALID_OPERATION);
                     break;
                 }
@@ -4290,11 +4631,10 @@
                     mFlags |= kFlagUseBlockModel;
                 }
                 if (flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
-                    // silently disable crytoasync with blockmodel
-                    if (!(mFlags & kFlagUseBlockModel)) {
-                        mFlags |= kFlagUseCryptoAsync;
-                    } else {
-                        ALOGW("CrytoAsync not yet enabled for block model, falling back to normal");
+                    mFlags |= kFlagUseCryptoAsync;
+                    if ((mFlags & kFlagUseBlockModel)) {
+                        ALOGW("CrytoAsync not yet enabled for block model,\
+                                falling back to normal");
                     }
                 }
             }
@@ -4324,17 +4664,23 @@
             mBufferChannel->setDescrambler(mDescrambler);
             if ((mFlags & kFlagUseCryptoAsync) &&
                 mCrypto  && (mDomain == DOMAIN_VIDEO)) {
-                mCryptoAsync = new CryptoAsync(mBufferChannel);
-                mCryptoAsync->setCallback(
-                std::make_unique<CryptoAsyncCallback>(new AMessage(kWhatCodecNotify, this)));
-                mCryptoLooper = new ALooper();
-                mCryptoLooper->setName("CryptoAsyncLooper");
-                mCryptoLooper->registerHandler(mCryptoAsync);
-                status_t err = mCryptoLooper->start();
-                if (err != OK) {
-                    ALOGE("Crypto Looper failed to start");
-                    mCryptoAsync = nullptr;
-                    mCryptoLooper = nullptr;
+                // set kFlagUseCryptoAsync but do-not use this for block model
+                // this is to propagate the error in onCryptoError()
+                // TODO (b/274628160): Enable Use of CONFIG_FLAG_USE_CRYPTO_ASYNC
+                //                     with CONFIGURE_FLAG_USE_BLOCK_MODEL)
+                if (!(mFlags & kFlagUseBlockModel)) {
+                    mCryptoAsync = new CryptoAsync(mBufferChannel);
+                    mCryptoAsync->setCallback(
+                    std::make_unique<CryptoAsyncCallback>(new AMessage(kWhatCodecNotify, this)));
+                    mCryptoLooper = new ALooper();
+                    mCryptoLooper->setName("CryptoAsyncLooper");
+                    mCryptoLooper->registerHandler(mCryptoAsync);
+                    status_t err = mCryptoLooper->start();
+                    if (err != OK) {
+                        ALOGE("Crypto Looper failed to start");
+                        mCryptoAsync = nullptr;
+                        mCryptoLooper = nullptr;
+                    }
                 }
             }
 
@@ -4353,6 +4699,7 @@
             } else {
                 mTunneled = false;
             }
+            mediametrics_setInt32(mMetricsHandle, kCodecTunneled, mTunneled ? 1 : 0);
 
             int32_t background = 0;
             if (format->findInt32("android._background-mode", &background) && background) {
@@ -4380,9 +4727,13 @@
                     sp<Surface> surface = static_cast<Surface *>(obj.get());
                     if (mSurface == NULL) {
                         // do not support setting surface if it was not set
+                        mErrorLog.log(LOG_TAG,
+                                      "Cannot set surface if the codec is not configured with "
+                                      "a surface already");
                         err = INVALID_OPERATION;
                     } else if (obj == NULL) {
                         // do not support unsetting surface
+                        mErrorLog.log(LOG_TAG, "Unsetting surface is not supported");
                         err = BAD_VALUE;
                     } else {
                         err = connectToSurface(surface);
@@ -4407,12 +4758,16 @@
                                 (void)disconnectFromSurface();
                                 mSurface = surface;
                             }
+                            mReliabilityContextMetrics.setOutputSurfaceCount++;
                         }
                     }
                     break;
                 }
 
                 default:
+                    mErrorLog.log(LOG_TAG, base::StringPrintf(
+                            "setSurface() is valid only at Executing states; currently %s",
+                            apiStateString().c_str()));
                     err = INVALID_OPERATION;
                     break;
             }
@@ -4426,6 +4781,9 @@
         {
             // Must be configured, but can't have been started yet.
             if (mState != CONFIGURED) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "setInputSurface() is valid only at Configured state; currently %s",
+                        apiStateString().c_str()));
                 PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             }
@@ -4461,6 +4819,9 @@
                 PostReplyWithError(msg, OK);
                 break;
             } else if (mState != CONFIGURED) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "start() is valid only at Configured state; currently %s",
+                        apiStateString().c_str()));
                 PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             }
@@ -4540,6 +4901,7 @@
                     if (mFlags & kFlagIsAsync) {
                         onError(DEAD_OBJECT, ACTION_CODE_FATAL);
                     }
+                    mErrorLog.log(LOG_TAG, "Released by resource manager");
                     mReleasedByResourceManager = true;
                 }
 
@@ -4576,6 +4938,7 @@
                 // the previous stop/release completes and then reply with OK.
                 status_t err = mState == targetState ? OK : INVALID_OPERATION;
                 response->setInt32("err", err);
+                // TODO: mErrorLog
                 if (err == OK && targetState == UNINITIALIZED) {
                     mComponentName.clear();
                 }
@@ -4684,13 +5047,13 @@
             CHECK(msg->senderAwaitsResponse(&replyID));
 
             if (mFlags & kFlagIsAsync) {
-                ALOGE("dequeueInputBuffer can't be used in async mode");
+                mErrorLog.log(LOG_TAG, "dequeueInputBuffer can't be used in async mode");
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             }
 
             if (mHaveInputSurface) {
-                ALOGE("dequeueInputBuffer can't be used with input surface");
+                mErrorLog.log(LOG_TAG, "dequeueInputBuffer can't be used with input surface");
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             }
@@ -4745,6 +5108,9 @@
             CHECK(msg->senderAwaitsResponse(&replyID));
 
             if (!isExecuting()) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "queueInputBuffer() is valid only at Executing states; currently %s",
+                        apiStateString().c_str()));
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             } else if (mFlags & kFlagStickyError) {
@@ -4772,7 +5138,7 @@
             CHECK(msg->senderAwaitsResponse(&replyID));
 
             if (mFlags & kFlagIsAsync) {
-                ALOGE("dequeueOutputBuffer can't be used in async mode");
+                mErrorLog.log(LOG_TAG, "dequeueOutputBuffer can't be used in async mode");
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             }
@@ -4839,6 +5205,9 @@
             CHECK(msg->senderAwaitsResponse(&replyID));
 
             if (!isExecuting()) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "releaseOutputBuffer() is valid only at Executing states; currently %s",
+                        apiStateString().c_str()));
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             } else if (mFlags & kFlagStickyError) {
@@ -4862,7 +5231,15 @@
 
         case kWhatSignalEndOfInputStream:
         {
-            if (!isExecuting() || !mHaveInputSurface) {
+            if (!isExecuting()) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "signalEndOfInputStream() is valid only at Executing states; currently %s",
+                        apiStateString().c_str()));
+                PostReplyWithError(msg, INVALID_OPERATION);
+                break;
+            } else if (!mHaveInputSurface) {
+                mErrorLog.log(
+                        LOG_TAG, "signalEndOfInputStream() called without an input surface set");
                 PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             } else if (mFlags & kFlagStickyError) {
@@ -4886,7 +5263,14 @@
         {
             sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
-            if (!isExecuting() || (mFlags & kFlagIsAsync)) {
+            if (!isExecuting()) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "getInput/OutputBuffers() is valid only at Executing states; currently %s",
+                        apiStateString().c_str()));
+                PostReplyWithError(replyID, INVALID_OPERATION);
+                break;
+            } else if (mFlags & kFlagIsAsync) {
+                mErrorLog.log(LOG_TAG, "getInput/OutputBuffers() is not supported with callbacks");
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             } else if (mFlags & kFlagStickyError) {
@@ -4912,6 +5296,8 @@
                 }
             }
 
+            mApiUsageMetrics.isArrayMode = true;
+
             (new AMessage)->postReply(replyID);
             break;
         }
@@ -4919,6 +5305,9 @@
         case kWhatFlush:
         {
             if (!isExecuting()) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "flush() is valid only at Executing states; currently %s",
+                        apiStateString().c_str()));
                 PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             } else if (mFlags & kFlagStickyError) {
@@ -4962,10 +5351,17 @@
             sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
 
-            if ((mState != CONFIGURED && mState != STARTING &&
-                 mState != STARTED && mState != FLUSHING &&
-                 mState != FLUSHED)
-                    || format == NULL) {
+            if (mState != CONFIGURED && mState != STARTING &&
+                    mState != STARTED && mState != FLUSHING &&
+                    mState != FLUSHED) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "getInput/OutputFormat() is valid at Executing states "
+                        "and Configured state; currently %s",
+                        apiStateString().c_str()));
+                PostReplyWithError(replyID, INVALID_OPERATION);
+                break;
+            } else if (format == NULL) {
+                mErrorLog.log(LOG_TAG, "Fatal error: format is not initialized");
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             } else if (mFlags & kFlagStickyError) {
@@ -5000,6 +5396,7 @@
             CHECK(msg->senderAwaitsResponse(&replyID));
 
             if (mComponentName.empty()) {
+                mErrorLog.log(LOG_TAG, "Fatal error: name is not set");
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             }
@@ -5147,15 +5544,29 @@
         postActivityNotificationIfPossible();
     }
 
-    // Notify mCrypto of video resolution changes
-    if (mCrypto != NULL) {
-        int32_t left, top, right, bottom, width, height;
-        if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
-            mCrypto->notifyResolution(right - left + 1, bottom - top + 1);
-        } else if (mOutputFormat->findInt32("width", &width)
-                && mOutputFormat->findInt32("height", &height)) {
-            mCrypto->notifyResolution(width, height);
+    // Update the width and the height.
+    int32_t left = 0, top = 0, right = 0, bottom = 0, width = 0, height = 0;
+    bool resolutionChanged = false;
+    if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
+        mWidth = right - left + 1;
+        mHeight = bottom - top + 1;
+        resolutionChanged = true;
+    } else if (mOutputFormat->findInt32("width", &width) &&
+               mOutputFormat->findInt32("height", &height)) {
+        mWidth = width;
+        mHeight = height;
+        resolutionChanged = true;
+    }
+
+    // Notify mCrypto and the RM of video resolution changes
+    if (resolutionChanged) {
+        if (mCrypto != NULL) {
+            mCrypto->notifyResolution(mWidth, mHeight);
         }
+        ClientConfigParcel clientConfig;
+        initClientConfigParcel(clientConfig);
+        mResourceManagerProxy->notifyClientConfigChanged(clientConfig);
+        mReliabilityContextMetrics.resolutionChangeCount++;
     }
 
     updateHdrMetrics(false /* isConfig */);
@@ -5167,7 +5578,7 @@
     size_t i = 0;
     for (;;) {
         sp<ABuffer> csd;
-        if (!format->findBuffer(AStringPrintf("csd-%u", i).c_str(), &csd)) {
+        if (!format->findBuffer(base::StringPrintf("csd-%zu", i).c_str(), &csd)) {
             break;
         }
         if (csd->size() == 0) {
@@ -5202,7 +5613,7 @@
                 }
                 sDealer = new MemoryDealer(
                         newDealerCapacity,
-                        AStringPrintf("CSD(%dMB)", newDealerCapacity / 1048576).c_str());
+                        base::StringPrintf("CSD(%zuMB)", newDealerCapacity / 1048576).c_str());
                 mem = sDealer->allocate(csd->size());
             }
             memcpy(mem->unsecurePointer(), csd->data(), csd->size());
@@ -5213,9 +5624,14 @@
                 FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
             C2WriteView view{block->map().get()};
             if (view.error() != C2_OK) {
+                mErrorLog.log(LOG_TAG, "Fatal error: failed to allocate and map a block");
                 return -EINVAL;
             }
             if (csd->size() > view.capacity()) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "Fatal error: allocated block is too small "
+                        "(csd size %zu; block cap %u)",
+                        csd->size(), view.capacity()));
                 return -EINVAL;
             }
             memcpy(view.base(), csd->data(), csd->size());
@@ -5226,10 +5642,16 @@
         const sp<MediaCodecBuffer> &codecInputData = info.mData;
 
         if (csd->size() > codecInputData->capacity()) {
+            mErrorLog.log(LOG_TAG, base::StringPrintf(
+                    "CSD is too large to fit in input buffer "
+                    "(csd size %zu; buffer cap %zu)",
+                    csd->size(), codecInputData->capacity()));
             return -EINVAL;
         }
         if (codecInputData->data() == NULL) {
             ALOGV("Input buffer %zu is not properly allocated", bufferIndex);
+            mErrorLog.log(LOG_TAG, base::StringPrintf(
+                    "Fatal error: input buffer %zu is not properly allocated", bufferIndex));
             return -EINVAL;
         }
 
@@ -5282,6 +5704,7 @@
 
         mActivityNotify.clear();
         mCallback.clear();
+        mErrorLog.clear();
     }
 
     if (newState == UNINITIALIZED) {
@@ -5402,6 +5825,7 @@
         if (!hasCryptoOrDescrambler()) {
             ALOGE("[%s] queuing secure buffer without mCrypto or mDescrambler!",
                     mComponentName.c_str());
+            mErrorLog.log(LOG_TAG, "queuing secure buffer without mCrypto or mDescrambler!");
             return -EINVAL;
         }
         CHECK(msg->findPointer("subSamples", (void **)&subSamples));
@@ -5424,12 +5848,21 @@
     }
 
     if (index >= mPortBuffers[kPortIndexInput].size()) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "index out of range (index=%zu)", mPortBuffers[kPortIndexInput].size()));
         return -ERANGE;
     }
 
     BufferInfo *info = &mPortBuffers[kPortIndexInput][index];
     sp<MediaCodecBuffer> buffer = info->mData;
-    if (buffer == nullptr || !info->mOwnedByClient) {
+    if (buffer == nullptr) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "Fatal error: failed to fetch buffer for index %zu", index));
+        return -EACCES;
+    }
+    if (!info->mOwnedByClient) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "client does not own the buffer #%zu", index));
         return -EACCES;
     }
     auto setInputBufferParams = [this, &buffer]
@@ -5510,10 +5943,26 @@
         if (c2Buffer) {
             err = mBufferChannel->attachBuffer(c2Buffer, buffer);
         } else if (memory) {
+            AString errorDetailMsg;
             err = mBufferChannel->attachEncryptedBuffer(
                     memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
-                    offset, subSamples, numSubSamples, buffer);
+                    offset, subSamples, numSubSamples, buffer, &errorDetailMsg);
+            if (err != OK && hasCryptoOrDescrambler()
+                    && (mFlags & kFlagUseCryptoAsync)) {
+                // create error detail
+                AString errorDetailMsg;
+                sp<AMessage> cryptoErrorInfo = new AMessage();
+                buildCryptoInfoAMessage(cryptoErrorInfo, CryptoAsync::kActionDecrypt);
+                cryptoErrorInfo->setInt32("err", err);
+                cryptoErrorInfo->setInt32("actionCode", ACTION_CODE_FATAL);
+                cryptoErrorInfo->setString("errorDetail", errorDetailMsg);
+                onCryptoError(cryptoErrorInfo);
+                // we want cryptoError to be in the callback
+                // but Codec IllegalStateException to be triggered.
+                err = INVALID_OPERATION;
+            }
         } else {
+            mErrorLog.log(LOG_TAG, "Fatal error: invalid queue request without a buffer");
             err = UNKNOWN_ERROR;
         }
         if (err == OK && !buffer->asC2Buffer()
@@ -5534,12 +5983,17 @@
         offset = buffer->offset();
         size = buffer->size();
         if (err != OK) {
-            ALOGI("block model buffer attach failed: err = %s (%d)",
-                    StrMediaError(err).c_str(), err);
+            ALOGE("block model buffer attach failed: err = %s (%d)",
+                  StrMediaError(err).c_str(), err);
             return err;
         }
     }
+
     if (offset + size > buffer->capacity()) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "buffer offset and size goes beyond the capacity: "
+                "offset=%zu, size=%zu, cap=%zu",
+                offset, size, buffer->capacity()));
         return -EINVAL;
     }
     buffer->setRange(offset, size);
@@ -5548,6 +6002,10 @@
     if (err != OK) {
         return -EINVAL;
     }
+
+    int32_t usedMaxInputSize = mApiUsageMetrics.inputBufferSize.usedMax;
+    mApiUsageMetrics.inputBufferSize.usedMax = size > usedMaxInputSize ? size : usedMaxInputSize;
+
     if (hasCryptoOrDescrambler() && !c2Buffer && !memory) {
         AString *errorDetailMsg;
         CHECK(msg->findPointer("errorDetailMsg", (void **)&errorDetailMsg));
@@ -5593,6 +6051,10 @@
     }
 
     if (err == OK) {
+        if (mTunneled && (flags & (BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_END_OF_STREAM)) == 0) {
+            mVideoRenderQualityTracker.onTunnelFrameQueued(timeUs);
+        }
+
         // synchronization boundary for getBufferAndFormat
         Mutex::Autolock al(mBufferLock);
         info->mOwnedByClient = false;
@@ -5623,8 +6085,8 @@
         if (it->getRenderTimeNs() < 0) {
             continue; // dropped frame from tracking
         }
-        msg->setInt64(AStringPrintf("%zu-media-time-us", index).c_str(), it->getMediaTimeUs());
-        msg->setInt64(AStringPrintf("%zu-system-nano", index).c_str(), it->getRenderTimeNs());
+        msg->setInt64(base::StringPrintf("%zu-media-time-us", index).c_str(), it->getMediaTimeUs());
+        msg->setInt64(base::StringPrintf("%zu-system-nano", index).c_str(), it->getRenderTimeNs());
         ++index;
     }
     return index;
@@ -5640,16 +6102,28 @@
     }
 
     if (!isExecuting()) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "releaseOutputBuffer() is valid at Executing states; currently %s",
+                apiStateString().c_str()));
         return -EINVAL;
     }
 
     if (index >= mPortBuffers[kPortIndexOutput].size()) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "index out of range (index=%zu)", mPortBuffers[kPortIndexOutput].size()));
         return -ERANGE;
     }
 
     BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
 
-    if (info->mData == nullptr || !info->mOwnedByClient) {
+    if (!info->mOwnedByClient) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "client does not own the buffer #%zu", index));
+        return -EACCES;
+    }
+    if (info->mData == nullptr) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "Fatal error: null buffer for index %zu", index));
         return -EACCES;
     }
 
@@ -5663,7 +6137,7 @@
     }
 
     if (render && buffer->size() != 0) {
-        int64_t mediaTimeUs = -1;
+        int64_t mediaTimeUs = INT64_MIN;
         buffer->meta()->findInt64("timeUs", &mediaTimeUs);
 
         bool noRenderTime = false;
@@ -5693,7 +6167,12 @@
 
         // If rendering to the screen, then schedule a time in the future to poll to see if this
         // frame was ever rendered to seed onFrameRendered callbacks.
-        if (mIsSurfaceToScreen) {
+        if (mIsSurfaceToDisplay) {
+            if (mediaTimeUs != INT64_MIN) {
+                noRenderTime ? mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs)
+                             : mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs,
+                                                                          renderTimeNs);
+            }
             // can't initialize this in the constructor because the Looper parent class needs to be
             // initialized first
             if (mMsgPollForRenderedBuffers == nullptr) {
@@ -5716,13 +6195,19 @@
         status_t err = mBufferChannel->renderOutputBuffer(buffer, renderTimeNs);
 
         if (err == NO_INIT) {
-            ALOGE("rendering to non-initilized(obsolete) surface");
+            mErrorLog.log(LOG_TAG, "rendering to non-initialized(obsolete) surface");
             return err;
         }
         if (err != OK) {
             ALOGI("rendring output error %d", err);
         }
     } else {
+        if (mIsSurfaceToDisplay && buffer->size() != 0) {
+            int64_t mediaTimeUs = INT64_MIN;
+            if (buffer->meta()->findInt64("timeUs", &mediaTimeUs)) {
+                mVideoRenderQualityTracker.onFrameSkipped(mediaTimeUs);
+            }
+        }
         mBufferChannel->discardBuffer(buffer);
     }
 
@@ -5789,7 +6274,7 @@
 
         // in case we don't connect, ensure that we don't signal the surface is
         // connected to the screen
-        mIsSurfaceToScreen = false;
+        mIsSurfaceToDisplay = false;
 
         err = nativeWindowConnect(surface.get(), "connectToSurface");
         if (err == OK) {
@@ -5819,7 +6304,7 @@
             // keep track whether or not the buffers of the connected surface go to the screen
             int result = 0;
             surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
-            mIsSurfaceToScreen = result != 0;
+            mIsSurfaceToDisplay = result != 0;
         }
     }
     // do not return ALREADY_EXISTS unless surfaces are the same
@@ -5837,7 +6322,7 @@
         }
         // assume disconnected even on error
         mSurface.clear();
-        mIsSurfaceToScreen = false;
+        mIsSurfaceToDisplay = false;
     }
     return err;
 }
@@ -5999,12 +6484,14 @@
             memcpy(csd->data() + 4, nalStart, nalSize);
 
             mOutputFormat->setBuffer(
-                    AStringPrintf("csd-%u", csdIndex).c_str(), csd);
+                    base::StringPrintf("csd-%u", csdIndex).c_str(), csd);
 
             ++csdIndex;
         }
 
         if (csdIndex != 2) {
+            mErrorLog.log(LOG_TAG, base::StringPrintf(
+                    "codec config data contains %u NAL units; expected 2.", csdIndex));
             return ERROR_MALFORMED;
         }
     } else {
@@ -6046,6 +6533,32 @@
     mDeferredMessages.clear();
 }
 
+std::string MediaCodec::apiStateString() {
+    const char *rval = NULL;
+    char rawbuffer[16]; // room for "%d"
+
+    switch (mState) {
+        case UNINITIALIZED:
+            rval = (mFlags & kFlagStickyError) ? "at Error state" : "at Released state";
+            break;
+        case INITIALIZING: rval = "while constructing"; break;
+        case INITIALIZED: rval = "at Uninitialized state"; break;
+        case CONFIGURING: rval = "during configure()"; break;
+        case CONFIGURED: rval = "at Configured state"; break;
+        case STARTING: rval = "during start()"; break;
+        case STARTED: rval = "at Running state"; break;
+        case FLUSHING: rval = "during flush()"; break;
+        case FLUSHED: rval = "at Flushed state"; break;
+        case STOPPING: rval = "during stop()"; break;
+        case RELEASING: rval = "during release()"; break;
+        default:
+            snprintf(rawbuffer, sizeof(rawbuffer), "at %d", mState);
+            rval = rawbuffer;
+            break;
+    }
+    return rval;
+}
+
 std::string MediaCodec::stateString(State state) {
     const char *rval = NULL;
     char rawbuffer[16]; // room for "%d"
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 0536f2a..28ca9ff 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -298,6 +298,9 @@
     size_t psshsize;
     if (meta->findData(kKeyPssh, &type, &pssh, &psshsize)) {
         sp<ABuffer> buf = new ABuffer(psshsize);
+        if (buf->data() == nullptr) {
+            return -ENOMEM;
+        }
         memcpy(buf->data(), pssh, psshsize);
         (*format)->setBuffer("pssh", buf);
     }
@@ -308,6 +311,9 @@
     if (meta->findData(kKeySlowMotionMarkers, &type, &slomoMarkers, &slomoMarkersSize)
             && slomoMarkersSize > 0) {
         sp<ABuffer> buf = new ABuffer(slomoMarkersSize);
+        if (buf->data() == nullptr) {
+            return -ENOMEM;
+        }
         memcpy(buf->data(), slomoMarkers, slomoMarkersSize);
         (*format)->setBuffer("slow-motion-markers", buf);
     }
@@ -639,9 +645,12 @@
         numPageSamples = -1;
     }
 
+    // caller has verified there is sufficient space
+    // insert, including accounting for the space used.
     memcpy((uint8_t *)buffer->data() + mbuf->range_length(),
            &numPageSamples,
            sizeof(numPageSamples));
+    buffer->setRange(buffer->offset(), buffer->size() + sizeof(numPageSamples));
 
     uint32_t type;
     const void *data;
@@ -690,6 +699,8 @@
 
     ssize_t minIndex = fetchAllTrackSamples();
 
+    buffer->setRange(0, 0);     // start with an empty buffer
+
     if (minIndex < 0) {
         return ERROR_END_OF_STREAM;
     }
@@ -705,25 +716,25 @@
         sampleSize += sizeof(int32_t);
     }
 
+    // capacity() is ok since we cleared out the buffer
     if (buffer->capacity() < sampleSize) {
         return -ENOMEM;
     }
 
+    const size_t srclen = it->mBuffer->range_length();
     const uint8_t *src =
         (const uint8_t *)it->mBuffer->data()
             + it->mBuffer->range_offset();
 
-    memcpy((uint8_t *)buffer->data(), src, it->mBuffer->range_length());
+    memcpy((uint8_t *)buffer->data(), src, srclen);
+    buffer->setRange(0, srclen);
 
     status_t err = OK;
     if (info->mTrackFlags & kIsVorbis) {
+        // adjusts range when it inserts the extra bits
         err = appendVorbisNumPageSamples(it->mBuffer, buffer);
     }
 
-    if (err == OK) {
-        buffer->setRange(0, sampleSize);
-    }
-
     return err;
 }
 
diff --git a/media/libstagefright/VideoFrameSchedulerBase.cpp b/media/libstagefright/VideoFrameSchedulerBase.cpp
index 0d1517b..965014c 100644
--- a/media/libstagefright/VideoFrameSchedulerBase.cpp
+++ b/media/libstagefright/VideoFrameSchedulerBase.cpp
@@ -451,7 +451,7 @@
                 return origRenderTime;
             }
 
-            ATRACE_INT("FRAME_VSYNCS", vsyncsForLastFrame);
+            ATRACE_INT64("FRAME_VSYNCS", vsyncsForLastFrame);
         }
         mLastVsyncTime = nextVsyncTime;
     }
@@ -460,7 +460,7 @@
     renderTime -= (renderTime - mVsyncTime) % mVsyncPeriod;
     renderTime += mVsyncPeriod / 2;
     ALOGV("adjusting render: %lld => %lld", (long long)origRenderTime, (long long)renderTime);
-    ATRACE_INT("FRAME_FLIP_IN(ms)", (renderTime - now) / 1000000);
+    ATRACE_INT64("FRAME_FLIP_IN(ms)", (renderTime - now) / 1000000);
     return renderTime;
 }
 
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
new file mode 100644
index 0000000..4f12a37
--- /dev/null
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -0,0 +1,739 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "VideoRenderQualityTracker"
+#include <utils/Log.h>
+
+#include <media/stagefright/VideoRenderQualityTracker.h>
+
+#include <assert.h>
+#include <charconv>
+#include <cmath>
+#include <stdio.h>
+#include <sys/time.h>
+
+#include <android-base/parsebool.h>
+#include <android-base/parseint.h>
+
+namespace android {
+
+using android::base::ParseBoolResult;
+
+static constexpr float FRAME_RATE_UNDETERMINED = VideoRenderQualityMetrics::FRAME_RATE_UNDETERMINED;
+static constexpr float FRAME_RATE_24_3_2_PULLDOWN =
+        VideoRenderQualityMetrics::FRAME_RATE_24_3_2_PULLDOWN;
+
+typedef VideoRenderQualityTracker::Configuration::GetServerConfigurableFlagFn
+        GetServerConfigurableFlagFn;
+
+static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
+                                      char const *flagNameSuffix, bool *value) {
+    std::string flagName("render_metrics_");
+    flagName.append(flagNameSuffix);
+    std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName,
+                                                          *value ? "true" : "false");
+    switch (android::base::ParseBool(valueStr)) {
+    case ParseBoolResult::kTrue: *value = true; break;
+    case ParseBoolResult::kFalse: *value = false; break;
+    case ParseBoolResult::kError:
+        ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
+              valueStr.c_str());
+        break;
+    }
+}
+
+static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
+                                      char const *flagNameSuffix, int32_t *value) {
+    char defaultStr[11];
+    sprintf(defaultStr, "%d", int(*value));
+    std::string flagName("render_metrics_");
+    flagName.append(flagNameSuffix);
+    std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName, defaultStr);
+    if (!android::base::ParseInt(valueStr.c_str(), value) || valueStr.size() == 0) {
+        ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
+              valueStr.c_str());
+        return;
+    }
+}
+
+template<typename T>
+static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
+                                      char const *flagNameSuffix, std::vector<T> *value) {
+    std::stringstream sstr;
+    for (int i = 0; i < value->size(); ++i) {
+        if (i != 0) {
+            sstr << ",";
+        }
+        sstr << (*value)[i];
+    }
+    std::string flagName("render_metrics_");
+    flagName.append(flagNameSuffix);
+    std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName, sstr.str());
+    if (valueStr.size() == 0) {
+        return;
+    }
+    // note: using android::base::Tokenize fails to catch parsing failures for values ending in ','
+    std::vector<T> newValues;
+    const char *p = valueStr.c_str();
+    const char *last = p + valueStr.size();
+    while (p != last) {
+        if (*p == ',') {
+            p++;
+        }
+        T value = -1;
+        auto [ptr, error] = std::from_chars(p, last, value);
+        if (error == std::errc::invalid_argument || error == std::errc::result_out_of_range) {
+            ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
+                  valueStr.c_str());
+            return;
+        }
+        p = ptr;
+        newValues.push_back(value);
+    }
+    *value = std::move(newValues);
+}
+
+VideoRenderQualityMetrics::VideoRenderQualityMetrics() {
+    clear();
+}
+
+void VideoRenderQualityMetrics::clear() {
+    firstRenderTimeUs = 0;
+    frameReleasedCount = 0;
+    frameRenderedCount = 0;
+    frameDroppedCount = 0;
+    frameSkippedCount = 0;
+    contentFrameRate = FRAME_RATE_UNDETERMINED;
+    desiredFrameRate = FRAME_RATE_UNDETERMINED;
+    actualFrameRate = FRAME_RATE_UNDETERMINED;
+    freezeEventCount = 0;
+    freezeDurationMsHistogram.clear();
+    freezeDistanceMsHistogram.clear();
+    judderEventCount = 0;
+    judderScoreHistogram.clear();
+}
+
+VideoRenderQualityTracker::Configuration
+        VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
+            GetServerConfigurableFlagFn getServerConfigurableFlagFn) {
+    VideoRenderQualityTracker::Configuration c;
+#define getFlag(FIELDNAME, FLAGNAME) \
+    getServerConfigurableFlag(getServerConfigurableFlagFn, FLAGNAME, &c.FIELDNAME)
+    getFlag(enabled, "enabled");
+    getFlag(areSkippedFramesDropped, "are_skipped_frames_dropped");
+    getFlag(maxExpectedContentFrameDurationUs, "max_expected_content_frame_duration_us");
+    getFlag(frameRateDetectionToleranceUs, "frame_rate_detection_tolerance_us");
+    getFlag(liveContentFrameDropToleranceUs, "live_content_frame_drop_tolerance_us");
+    getFlag(freezeDurationMsHistogramBuckets, "freeze_duration_ms_histogram_buckets");
+    getFlag(freezeDurationMsHistogramToScore, "freeze_duration_ms_histogram_to_score");
+    getFlag(freezeDistanceMsHistogramBuckets, "freeze_distance_ms_histogram_buckets");
+    getFlag(freezeEventMax, "freeze_event_max");
+    getFlag(freezeEventDetailsMax, "freeze_event_details_max");
+    getFlag(freezeEventDistanceToleranceMs, "freeze_event_distance_tolerance_ms");
+    getFlag(judderErrorToleranceUs, "judder_error_tolerance_us");
+    getFlag(judderScoreHistogramBuckets, "judder_score_histogram_buckets");
+    getFlag(judderScoreHistogramToScore, "judder_score_histogram_to_score");
+    getFlag(judderEventMax, "judder_event_max");
+    getFlag(judderEventDetailsMax, "judder_event_details_max");
+    getFlag(judderEventDistanceToleranceMs, "judder_event_distance_tolerance_ms");
+#undef getFlag
+    return c;
+}
+
+VideoRenderQualityTracker::Configuration::Configuration() {
+    enabled = true;
+
+    // Assume that the app is skipping frames because it's detected that the frame couldn't be
+    // rendered in time.
+    areSkippedFramesDropped = true;
+
+    // 400ms is 8 frames at 20 frames per second and 24 frames at 60 frames per second
+    maxExpectedContentFrameDurationUs = 400 * 1000;
+
+    // Allow for 2 milliseconds of deviation when detecting frame rates
+    frameRateDetectionToleranceUs = 2 * 1000;
+
+    // Allow for a tolerance of 200 milliseconds for determining if we moved forward in content time
+    // because of frame drops for live content, or because the user is seeking.
+    liveContentFrameDropToleranceUs = 200 * 1000;
+
+    // Freeze configuration
+    freezeDurationMsHistogramBuckets = {1, 20, 40, 60, 80, 100, 120, 150, 175, 225, 300, 400, 500};
+    freezeDurationMsHistogramToScore = {1,  1,  1,  1,  1,   1,   1,   1,   1,   1,   1,   1,   1};
+    freezeDistanceMsHistogramBuckets = {0, 20, 100, 400, 1000, 2000, 3000, 4000, 8000, 15000, 30000,
+                                        60000};
+    freezeEventMax = 0; // enabled only when debugging
+    freezeEventDetailsMax = 20;
+    freezeEventDistanceToleranceMs = 60000; // lump freeze occurrences together when 60s or less
+
+    // Judder configuration
+    judderErrorToleranceUs = 2000;
+    judderScoreHistogramBuckets = {1, 4, 5, 9, 11, 20, 30, 40, 50, 60, 70, 80};
+    judderScoreHistogramToScore = {1, 1, 1, 1,  1,  1,  1,  1,  1,  1,  1,  1};
+    judderEventMax = 0; // enabled only when debugging
+    judderEventDetailsMax = 20;
+    judderEventDistanceToleranceMs = 5000; // lump judder occurrences together when 5s or less
+}
+
+VideoRenderQualityTracker::VideoRenderQualityTracker() : mConfiguration(Configuration()) {
+    configureHistograms(mMetrics, mConfiguration);
+    clear();
+}
+
+VideoRenderQualityTracker::VideoRenderQualityTracker(const Configuration &configuration) :
+        mConfiguration(configuration) {
+    configureHistograms(mMetrics, mConfiguration);
+    clear();
+}
+
+void VideoRenderQualityTracker::onTunnelFrameQueued(int64_t contentTimeUs) {
+    if (!mConfiguration.enabled) {
+        return;
+    }
+
+    // Since P-frames are queued out of order, hold onto the P-frame until we can track it in
+    // render order. This only works because it depends on today's encoding algorithms that only
+    // allow B-frames to refer to ONE P-frame that comes after it. If the cardinality of P-frames
+    // in a single mini-GOP is increased, this algorithm breaks down.
+    if (mTunnelFrameQueuedContentTimeUs == -1) {
+        mTunnelFrameQueuedContentTimeUs = contentTimeUs;
+    } else if (contentTimeUs < mTunnelFrameQueuedContentTimeUs) {
+        onFrameReleased(contentTimeUs, 0);
+    } else {
+        onFrameReleased(mTunnelFrameQueuedContentTimeUs, 0);
+        mTunnelFrameQueuedContentTimeUs = contentTimeUs;
+    }
+}
+
+void VideoRenderQualityTracker::onFrameSkipped(int64_t contentTimeUs) {
+    if (!mConfiguration.enabled) {
+        return;
+    }
+
+    // Frames skipped at the beginning shouldn't really be counted as skipped frames, since the
+    // app might be seeking to a starting point that isn't the first key frame.
+    if (mLastRenderTimeUs == -1) {
+        return;
+    }
+
+    resetIfDiscontinuity(contentTimeUs, -1);
+
+    // Frames skipped at the end of playback shouldn't be counted as skipped frames, since the
+    // app could be terminating the playback. The pending count will be added to the metrics if and
+    // when the next frame is rendered.
+    mPendingSkippedFrameContentTimeUsList.push_back(contentTimeUs);
+}
+
+void VideoRenderQualityTracker::onFrameReleased(int64_t contentTimeUs) {
+    onFrameReleased(contentTimeUs, nowUs() * 1000);
+}
+
+void VideoRenderQualityTracker::onFrameReleased(int64_t contentTimeUs,
+                                                int64_t desiredRenderTimeNs) {
+    if (!mConfiguration.enabled) {
+        return;
+    }
+
+    int64_t desiredRenderTimeUs = desiredRenderTimeNs / 1000;
+    resetIfDiscontinuity(contentTimeUs, desiredRenderTimeUs);
+    mMetrics.frameReleasedCount++;
+    mNextExpectedRenderedFrameQueue.push({contentTimeUs, desiredRenderTimeUs});
+    mLastContentTimeUs = contentTimeUs;
+}
+
+void VideoRenderQualityTracker::onFrameRendered(int64_t contentTimeUs, int64_t actualRenderTimeNs,
+                                                FreezeEvent *freezeEventOut,
+                                                JudderEvent *judderEventOut) {
+    if (!mConfiguration.enabled) {
+        return;
+    }
+
+    int64_t actualRenderTimeUs = actualRenderTimeNs / 1000;
+
+    if (mLastRenderTimeUs != -1) {
+        mRenderDurationMs += (actualRenderTimeUs - mLastRenderTimeUs) / 1000;
+    }
+    // Now that a frame has been rendered, the previously skipped frames can be processed as skipped
+    // frames since the app is not skipping them to terminate playback.
+    for (int64_t contentTimeUs : mPendingSkippedFrameContentTimeUsList) {
+        processMetricsForSkippedFrame(contentTimeUs);
+    }
+    mPendingSkippedFrameContentTimeUsList = {};
+
+    // We can render a pending queued frame if it's the last frame of the video, so release it
+    // immediately.
+    if (contentTimeUs == mTunnelFrameQueuedContentTimeUs && mTunnelFrameQueuedContentTimeUs != -1) {
+        onFrameReleased(mTunnelFrameQueuedContentTimeUs, 0);
+        mTunnelFrameQueuedContentTimeUs = -1;
+    }
+
+    static const FrameInfo noFrame = {-1, -1};
+    FrameInfo nextExpectedFrame = noFrame;
+    while (!mNextExpectedRenderedFrameQueue.empty()) {
+        nextExpectedFrame = mNextExpectedRenderedFrameQueue.front();
+        mNextExpectedRenderedFrameQueue.pop();
+        // Happy path - the rendered frame is what we expected it to be
+        if (contentTimeUs == nextExpectedFrame.contentTimeUs) {
+            break;
+        }
+        // This isn't really supposed to happen - the next rendered frame should be the expected
+        // frame, or, if there's frame drops, it will be a frame later in the content stream
+        if (contentTimeUs < nextExpectedFrame.contentTimeUs) {
+            ALOGW("Rendered frame is earlier than the next expected frame (%lld, %lld)",
+                  (long long) contentTimeUs, (long long) nextExpectedFrame.contentTimeUs);
+            break;
+        }
+        processMetricsForDroppedFrame(nextExpectedFrame.contentTimeUs,
+                                      nextExpectedFrame.desiredRenderTimeUs);
+    }
+    processMetricsForRenderedFrame(nextExpectedFrame.contentTimeUs,
+                                   nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs,
+                                   freezeEventOut, judderEventOut);
+    mLastRenderTimeUs = actualRenderTimeUs;
+}
+
+VideoRenderQualityTracker::FreezeEvent VideoRenderQualityTracker::getAndResetFreezeEvent() {
+    FreezeEvent event = std::move(mFreezeEvent);
+    mFreezeEvent.valid = false;
+    return event;
+}
+
+VideoRenderQualityTracker::JudderEvent VideoRenderQualityTracker::getAndResetJudderEvent() {
+    JudderEvent event = std::move(mJudderEvent);
+    mJudderEvent.valid = false;
+    return event;
+}
+
+const VideoRenderQualityMetrics &VideoRenderQualityTracker::getMetrics() {
+    if (!mConfiguration.enabled) {
+        return mMetrics;
+    }
+
+    mMetrics.freezeScore = 0;
+    if (mConfiguration.freezeDurationMsHistogramToScore.size() ==
+        mMetrics.freezeDurationMsHistogram.size()) {
+        for (int i = 0; i < mMetrics.freezeDurationMsHistogram.size(); ++i) {
+            mMetrics.freezeScore += mMetrics.freezeDurationMsHistogram[i] *
+                    mConfiguration.freezeDurationMsHistogramToScore[i];
+        }
+    }
+    mMetrics.freezeRate = float(double(mMetrics.freezeDurationMsHistogram.getSum()) /
+            mRenderDurationMs);
+
+    mMetrics.judderScore = 0;
+    if (mConfiguration.judderScoreHistogramToScore.size() == mMetrics.judderScoreHistogram.size()) {
+        for (int i = 0; i < mMetrics.judderScoreHistogram.size(); ++i) {
+            mMetrics.judderScore += mMetrics.judderScoreHistogram[i] *
+                    mConfiguration.judderScoreHistogramToScore[i];
+        }
+    }
+    mMetrics.judderRate = float(double(mMetrics.judderScoreHistogram.getCount()) /
+            (mMetrics.frameReleasedCount + mMetrics.frameSkippedCount));
+
+    return mMetrics;
+}
+
+void VideoRenderQualityTracker::clear() {
+    mRenderDurationMs = 0;
+    mMetrics.clear();
+    resetForDiscontinuity();
+}
+
+void VideoRenderQualityTracker::resetForDiscontinuity() {
+    mLastContentTimeUs = -1;
+    mLastRenderTimeUs = -1;
+    mLastFreezeEndTimeUs = -1;
+    mLastJudderEndTimeUs = -1;
+    mWasPreviousFrameDropped = false;
+    mFreezeEvent.valid = false;
+    mJudderEvent.valid = false;
+
+    // Don't worry about tracking frame rendering times from now up until playback catches up to the
+    // discontinuity. While stuttering or freezing could be found in the next few frames, the impact
+    // to the user is is minimal, so better to just keep things simple and don't bother.
+    mNextExpectedRenderedFrameQueue = {};
+    mTunnelFrameQueuedContentTimeUs = -1;
+
+    // Ignore any frames that were skipped just prior to the discontinuity.
+    mPendingSkippedFrameContentTimeUsList = {};
+
+    // All frame durations can be now ignored since all bets are off now on what the render
+    // durations should be after the discontinuity.
+    for (int i = 0; i < FrameDurationUs::SIZE; ++i) {
+        mActualFrameDurationUs[i] = -1;
+        mDesiredFrameDurationUs[i] = -1;
+        mContentFrameDurationUs[i] = -1;
+    }
+    mActualFrameDurationUs.priorTimestampUs = -1;
+    mDesiredFrameDurationUs.priorTimestampUs = -1;
+    mContentFrameDurationUs.priorTimestampUs = -1;
+}
+
+bool VideoRenderQualityTracker::resetIfDiscontinuity(int64_t contentTimeUs,
+                                                     int64_t desiredRenderTimeUs) {
+    if (mLastContentTimeUs == -1) {
+        resetForDiscontinuity();
+        return true;
+    }
+    if (contentTimeUs < mLastContentTimeUs) {
+        ALOGI("Video playback jumped %d ms backwards in content time (%d -> %d)",
+              int((mLastContentTimeUs - contentTimeUs) / 1000), int(mLastContentTimeUs / 1000),
+              int(contentTimeUs / 1000));
+        resetForDiscontinuity();
+        return true;
+    }
+    if (contentTimeUs - mLastContentTimeUs > mConfiguration.maxExpectedContentFrameDurationUs) {
+        // The content frame duration could be long due to frame drops for live content. This can be
+        // detected by looking at the app's desired rendering duration. If the app's rendered frame
+        // duration is roughly the same as the content's frame duration, then it is assumed that
+        // the forward discontinuity is due to frame drops for live content. A false positive can
+        // occur if the time the user spends seeking is equal to the duration of the seek. This is
+        // very unlikely to occur in practice but CAN occur - the user starts seeking forward, gets
+        // distracted, and then returns to seeking forward.
+        bool skippedForwardDueToLiveContentFrameDrops = false;
+        if (desiredRenderTimeUs != -1) {
+            int64_t contentFrameDurationUs = contentTimeUs - mLastContentTimeUs;
+            int64_t desiredFrameDurationUs = desiredRenderTimeUs - mLastRenderTimeUs;
+            skippedForwardDueToLiveContentFrameDrops =
+                    abs(contentFrameDurationUs - desiredFrameDurationUs) <
+                    mConfiguration.liveContentFrameDropToleranceUs;
+        }
+        if (!skippedForwardDueToLiveContentFrameDrops) {
+            ALOGI("Video playback jumped %d ms forward in content time (%d -> %d) ",
+                int((contentTimeUs - mLastContentTimeUs) / 1000), int(mLastContentTimeUs / 1000),
+                int(contentTimeUs / 1000));
+            resetForDiscontinuity();
+            return true;
+        }
+    }
+    return false;
+}
+
+void VideoRenderQualityTracker::processMetricsForSkippedFrame(int64_t contentTimeUs) {
+    mMetrics.frameSkippedCount++;
+    if (mConfiguration.areSkippedFramesDropped) {
+        processMetricsForDroppedFrame(contentTimeUs, -1);
+        return;
+    }
+    updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
+    updateFrameDurations(mDesiredFrameDurationUs, -1);
+    updateFrameDurations(mActualFrameDurationUs, -1);
+    updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
+    mWasPreviousFrameDropped = false;
+}
+
+void VideoRenderQualityTracker::processMetricsForDroppedFrame(int64_t contentTimeUs,
+                                                              int64_t desiredRenderTimeUs) {
+    mMetrics.frameDroppedCount++;
+    updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
+    updateFrameDurations(mDesiredFrameDurationUs, desiredRenderTimeUs);
+    updateFrameDurations(mActualFrameDurationUs, -1);
+    updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
+    updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
+    mWasPreviousFrameDropped = true;
+}
+
+void VideoRenderQualityTracker::processMetricsForRenderedFrame(int64_t contentTimeUs,
+                                                               int64_t desiredRenderTimeUs,
+                                                               int64_t actualRenderTimeUs,
+                                                               FreezeEvent *freezeEventOut,
+                                                               JudderEvent *judderEventOut) {
+    // Capture the timestamp at which the first frame was rendered
+    if (mMetrics.firstRenderTimeUs == 0) {
+        mMetrics.firstRenderTimeUs = actualRenderTimeUs;
+    }
+
+    mMetrics.frameRenderedCount++;
+
+    // The content time is -1 when it was rendered after a discontinuity (e.g. seek) was detected.
+    // So, even though a frame was rendered, it's impact on the user is insignificant, so don't do
+    // anything other than count it as a rendered frame.
+    if (contentTimeUs == -1) {
+        return;
+    }
+    updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
+    updateFrameDurations(mDesiredFrameDurationUs, desiredRenderTimeUs);
+    updateFrameDurations(mActualFrameDurationUs, actualRenderTimeUs);
+    updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
+    updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
+    updateFrameRate(mMetrics.actualFrameRate, mActualFrameDurationUs, mConfiguration);
+
+    // If the previous frame was dropped, there was a freeze if we've already rendered a frame
+    if (mWasPreviousFrameDropped && mLastRenderTimeUs != -1) {
+        processFreeze(actualRenderTimeUs, mLastRenderTimeUs, mLastFreezeEndTimeUs, mFreezeEvent,
+                      mMetrics, mConfiguration);
+        mLastFreezeEndTimeUs = actualRenderTimeUs;
+    }
+    maybeCaptureFreezeEvent(actualRenderTimeUs, mLastFreezeEndTimeUs, mFreezeEvent, mMetrics,
+                            mConfiguration, freezeEventOut);
+
+    // Judder is computed on the prior video frame, not the current video frame
+    int64_t judderScore = computePreviousJudderScore(mActualFrameDurationUs,
+                                                     mContentFrameDurationUs,
+                                                     mConfiguration);
+    if (judderScore != 0) {
+        int64_t judderTimeUs = actualRenderTimeUs - mActualFrameDurationUs[0] -
+                mActualFrameDurationUs[1];
+        processJudder(judderScore, judderTimeUs, mLastJudderEndTimeUs, mActualFrameDurationUs,
+                      mContentFrameDurationUs, mJudderEvent, mMetrics, mConfiguration);
+        mLastJudderEndTimeUs = judderTimeUs + mActualFrameDurationUs[1];
+    }
+    maybeCaptureJudderEvent(actualRenderTimeUs, mLastJudderEndTimeUs, mJudderEvent, mMetrics,
+                            mConfiguration, judderEventOut);
+
+    mWasPreviousFrameDropped = false;
+}
+
+void VideoRenderQualityTracker::processFreeze(int64_t actualRenderTimeUs, int64_t lastRenderTimeUs,
+                                              int64_t lastFreezeEndTimeUs, FreezeEvent &e,
+                                              VideoRenderQualityMetrics &m,
+                                              const Configuration &c) {
+    int32_t durationMs = int32_t((actualRenderTimeUs - lastRenderTimeUs) / 1000);
+    m.freezeDurationMsHistogram.insert(durationMs);
+    int32_t distanceMs = -1;
+    if (lastFreezeEndTimeUs != -1) {
+        // The distance to the last freeze is measured from the end of the last freze to the start
+        // of this freeze.
+        distanceMs = int32_t((lastRenderTimeUs - lastFreezeEndTimeUs) / 1000);
+        m.freezeDistanceMsHistogram.insert(distanceMs);
+    }
+    if (c.freezeEventMax > 0) {
+        if (e.valid == false) {
+            m.freezeEventCount++;
+            e.valid = true;
+            e.initialTimeUs = lastRenderTimeUs;
+            e.durationMs = 0;
+            e.sumDurationMs = 0;
+            e.sumDistanceMs = 0;
+            e.count = 0;
+            e.details.durationMs.clear();
+            e.details.distanceMs.clear();
+        // The first occurrence in the event should not have the distance recorded as part of the
+        // event, because it belongs in a vacuum between two events. However we still want the
+        // distance recorded in the details to calculate times in all details in all events.
+        } else if (distanceMs != -1) {
+            e.durationMs += distanceMs;
+            e.sumDistanceMs += distanceMs;
+        }
+        e.durationMs += durationMs;
+        e.count++;
+        e.sumDurationMs += durationMs;
+        if (e.details.durationMs.size() < c.freezeEventDetailsMax) {
+            e.details.durationMs.push_back(durationMs);
+            e.details.distanceMs.push_back(distanceMs); // -1 for first detail in the first event
+        }
+    }
+}
+
+void VideoRenderQualityTracker::maybeCaptureFreezeEvent(int64_t actualRenderTimeUs,
+                                                        int64_t lastFreezeEndTimeUs, FreezeEvent &e,
+                                                        const VideoRenderQualityMetrics & m,
+                                                        const Configuration &c,
+                                                        FreezeEvent *freezeEventOut) {
+    if (lastFreezeEndTimeUs == -1 || !e.valid) {
+        return;
+    }
+    // Future freeze occurrences are still pulled into the current freeze event if under tolerance
+    int64_t distanceMs = (actualRenderTimeUs - lastFreezeEndTimeUs) / 1000;
+    if (distanceMs < c.freezeEventDistanceToleranceMs) {
+        return;
+    }
+    if (freezeEventOut != nullptr && m.freezeEventCount <= c.freezeEventMax) {
+        *freezeEventOut = std::move(e);
+    }
+    // start recording a new freeze event after pushing the current one back to the caller
+    e.valid = false;
+}
+
+int64_t VideoRenderQualityTracker::computePreviousJudderScore(
+        const FrameDurationUs &actualFrameDurationUs,
+        const FrameDurationUs &contentFrameDurationUs,
+        const Configuration &c) {
+    // If the frame before or after was dropped, then don't generate a judder score, since any
+    // problems with frame drops are scored as a freeze instead.
+    if (actualFrameDurationUs[0] == -1 || actualFrameDurationUs[1] == -1 ||
+        actualFrameDurationUs[2] == -1) {
+        return 0;
+    }
+
+    // Don't score judder for when playback is paused or rebuffering (long frame duration), or if
+    // the player is intentionally playing each frame at a slow rate (e.g. half-rate). If the long
+    // frame duration was unintentional, it is assumed that this will be coupled with a later frame
+    // drop, and be scored as a freeze instead of judder.
+    if (actualFrameDurationUs[1] >= 2 * contentFrameDurationUs[1]) {
+        return 0;
+    }
+
+    // The judder score is based on the error of this frame
+    int64_t errorUs = actualFrameDurationUs[1] - contentFrameDurationUs[1];
+    // Don't score judder if the previous frame has high error, but this frame has low error
+    if (abs(errorUs) < c.judderErrorToleranceUs) {
+        return 0;
+    }
+
+    // Add a penalty if this frame has judder that amplifies the problem introduced by previous
+    // judder, instead of catching up for the previous judder (50, 16, 16, 50) vs (50, 16, 50, 16)
+    int64_t previousErrorUs = actualFrameDurationUs[2] - contentFrameDurationUs[2];
+    // Don't add the pentalty for errors from the previous frame if the previous frame has low error
+    if (abs(previousErrorUs) >= c.judderErrorToleranceUs) {
+        errorUs = abs(errorUs) + abs(errorUs + previousErrorUs);
+    }
+
+    // Avoid scoring judder for 3:2 pulldown or other minimally-small frame duration errors
+    if (abs(errorUs) < contentFrameDurationUs[1] / 4) {
+        return 0;
+    }
+
+    return abs(errorUs) / 1000; // error in millis to keep numbers small
+}
+
+void VideoRenderQualityTracker::processJudder(int32_t judderScore, int64_t judderTimeUs,
+                                              int64_t lastJudderEndTime,
+                                              const FrameDurationUs &actualDurationUs,
+                                              const FrameDurationUs &contentDurationUs,
+                                              JudderEvent &e, VideoRenderQualityMetrics &m,
+                                              const Configuration &c) {
+    int32_t distanceMs = -1;
+    if (lastJudderEndTime != -1) {
+        distanceMs = int32_t((judderTimeUs - lastJudderEndTime) / 1000);
+    }
+    m.judderScoreHistogram.insert(judderScore);
+    if (c.judderEventMax > 0) {
+        if (!e.valid) {
+            m.judderEventCount++;
+            e.valid = true;
+            e.initialTimeUs = judderTimeUs;
+            e.durationMs = 0;
+            e.sumScore = 0;
+            e.sumDistanceMs = 0;
+            e.count = 0;
+            e.details.contentRenderDurationUs.clear();
+            e.details.actualRenderDurationUs.clear();
+            e.details.distanceMs.clear();
+        // The first occurrence in the event should not have the distance recorded as part of the
+        // event, because it belongs in a vacuum between two events. However we still want the
+        // distance recorded in the details to calculate the times using all details in all events.
+        } else if (distanceMs != -1) {
+            e.durationMs += distanceMs;
+            e.sumDistanceMs += distanceMs;
+        }
+        e.durationMs += actualDurationUs[1] / 1000;
+        e.count++;
+        e.sumScore += judderScore;
+        if (e.details.contentRenderDurationUs.size() < c.judderEventDetailsMax) {
+            e.details.actualRenderDurationUs.push_back(actualDurationUs[1]);
+            e.details.contentRenderDurationUs.push_back(contentDurationUs[1]);
+            e.details.distanceMs.push_back(distanceMs); // -1 for first detail in the first event
+        }
+    }
+}
+
+void VideoRenderQualityTracker::maybeCaptureJudderEvent(int64_t actualRenderTimeUs,
+                                                        int64_t lastJudderEndTimeUs, JudderEvent &e,
+                                                        const VideoRenderQualityMetrics &m,
+                                                        const Configuration &c,
+                                                        JudderEvent *judderEventOut) {
+    if (lastJudderEndTimeUs == -1 || !e.valid) {
+        return;
+    }
+    // Future judder occurrences are still pulled into the current judder event if under tolerance
+    int64_t distanceMs = (actualRenderTimeUs - lastJudderEndTimeUs) / 1000;
+    if (distanceMs < c.judderEventDistanceToleranceMs) {
+        return;
+    }
+    if (judderEventOut != nullptr && m.judderEventCount <= c.judderEventMax) {
+        *judderEventOut = std::move(e);
+    }
+    // start recording a new judder event after pushing the current one back to the caller
+    e.valid = false;
+}
+
+void VideoRenderQualityTracker::configureHistograms(VideoRenderQualityMetrics &m,
+                                                    const Configuration &c) {
+    m.freezeDurationMsHistogram.setup(c.freezeDurationMsHistogramBuckets);
+    m.freezeDistanceMsHistogram.setup(c.freezeDistanceMsHistogramBuckets);
+    m.judderScoreHistogram.setup(c.judderScoreHistogramBuckets);
+}
+
+int64_t VideoRenderQualityTracker::nowUs() {
+    struct timespec t;
+    t.tv_sec = t.tv_nsec = 0;
+    clock_gettime(CLOCK_MONOTONIC, &t);
+    return (t.tv_sec * 1000000000LL + t.tv_nsec) / 1000LL;
+}
+
+void VideoRenderQualityTracker::updateFrameDurations(FrameDurationUs &durationUs,
+                                                     int64_t newTimestampUs) {
+    for (int i = FrameDurationUs::SIZE - 1; i > 0; --i ) {
+        durationUs[i] = durationUs[i - 1];
+    }
+    if (newTimestampUs == -1) {
+        durationUs[0] = -1;
+    } else {
+        durationUs[0] = durationUs.priorTimestampUs == -1 ? -1 :
+                newTimestampUs - durationUs.priorTimestampUs;
+        durationUs.priorTimestampUs = newTimestampUs;
+    }
+}
+
+void VideoRenderQualityTracker::updateFrameRate(float &frameRate, const FrameDurationUs &durationUs,
+                                                const Configuration &c) {
+    float newFrameRate = detectFrameRate(durationUs, c);
+    if (newFrameRate != FRAME_RATE_UNDETERMINED) {
+        frameRate = newFrameRate;
+    }
+}
+
+float VideoRenderQualityTracker::detectFrameRate(const FrameDurationUs &durationUs,
+                                                 const Configuration &c) {
+    // At least 3 frames are necessary to detect stable frame rates
+    assert(FrameDurationUs::SIZE >= 3);
+    if (durationUs[0] == -1 || durationUs[1] == -1 || durationUs[2] == -1) {
+        return FRAME_RATE_UNDETERMINED;
+    }
+    // Only determine frame rate if the render durations are stable across 3 frames
+    if (abs(durationUs[0] - durationUs[1]) > c.frameRateDetectionToleranceUs ||
+        abs(durationUs[0] - durationUs[2]) > c.frameRateDetectionToleranceUs) {
+        return is32pulldown(durationUs, c) ? FRAME_RATE_24_3_2_PULLDOWN : FRAME_RATE_UNDETERMINED;
+    }
+    return 1000.0 * 1000.0 / durationUs[0];
+}
+
+bool VideoRenderQualityTracker::is32pulldown(const FrameDurationUs &durationUs,
+                                             const Configuration &c) {
+    // At least 5 frames are necessary to detect stable 3:2 pulldown
+    assert(FrameDurationUs::SIZE >= 5);
+    if (durationUs[0] == -1 || durationUs[1] == -1 || durationUs[2] == -1 || durationUs[3] == -1 ||
+        durationUs[4] == -1) {
+        return false;
+    }
+    // 3:2 pulldown expects that every other frame has identical duration...
+    if (abs(durationUs[0] - durationUs[2]) > c.frameRateDetectionToleranceUs ||
+        abs(durationUs[1] - durationUs[3]) > c.frameRateDetectionToleranceUs ||
+        abs(durationUs[0] - durationUs[4]) > c.frameRateDetectionToleranceUs) {
+        return false;
+    }
+    // ... for either 2 vsysncs or 3 vsyncs
+    if ((abs(durationUs[0] - 33333) < c.frameRateDetectionToleranceUs &&
+         abs(durationUs[1] - 50000) < c.frameRateDetectionToleranceUs) ||
+        (abs(durationUs[0] - 50000) < c.frameRateDetectionToleranceUs &&
+         abs(durationUs[1] - 33333) < c.frameRateDetectionToleranceUs)) {
+        return true;
+    }
+    return false;
+}
+
+} // namespace android
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 9d2568e..f91a8b2 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -733,32 +733,36 @@
     const bool is10Bit = (mSrcFormat == COLOR_FormatYUVP010
             || mSrcFormat == OMX_COLOR_FormatYUV420Planar16);
 
-    switch (mSrcColorSpace.mStandard) {
-    case ColorUtils::kColorStandardBT601_525:
-    case ColorUtils::kColorStandardBT601_625:
+    ColorAspects::Primaries primaries;
+    ColorAspects::MatrixCoeffs matrix;
+    if (ColorUtils::unwrapColorAspectsFromColorStandard(
+            mSrcColorSpace.mStandard, &primaries, &matrix) != OK) {
+        matrix = ColorAspects::MatrixUnspecified;
+    }
+
+    switch (matrix) {
+    case ColorAspects::MatrixBT601_6:
+    case ColorAspects::MatrixBT470_6M:   // use 601 matrix as that is the closest for now
+    case ColorAspects::MatrixSMPTE240M:  // use 601 matrix as that is the closest for now
         return (isFullRange ? &BT601_FULL :
                 is10Bit ? &BT601_LTD_10BIT : &BT601_LIMITED);
 
-    case ColorUtils::kColorStandardBT709:
+    case ColorAspects::MatrixBT709_5:
         return (isFullRange ? &BT709_FULL :
                 is10Bit ? &BT709_LTD_10BIT : &BT709_LIMITED);
 
-    case ColorUtils::kColorStandardBT2020:
+    case ColorAspects::MatrixBT2020:
+    case ColorAspects::MatrixBT2020Constant: // use 2020 matrix as that is the closest for now
         return (isFullRange ? &BT2020_FULL :
                 is10Bit ? &BT2020_LTD_10BIT : &BT2020_LIMITED);
 
     default:
-        // for now use the default matrices for unhandled color spaces
-        // TODO: fail?
-        // return nullptr;
-        [[fallthrough]];
-
-    case ColorUtils::kColorStandardUnspecified:
-        if (isFullRange) {
-            return is10Bit ? &BT2020_FULL : &BT601_FULL;
+        // use BT.2020 for 10-bit and 601 for 8-bit by default
+        if (is10Bit) {
+            return isFullRange ? &BT2020_FULL : &BT2020_LTD_10BIT;
+        } else {
+            return isFullRange ? &BT601_FULL : &BT601_LIMITED;
         }
-        return is10Bit ? &BT2020_LTD_10BIT : &BT601_LIMITED;
-
     }
 }
 
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index d3fd790..665ceee 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -340,8 +340,8 @@
             <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="8x8" />
             <Limit name="block-count" range="1-4096" /> <!-- max 512x512 -->
-            <Limit name="blocks-per-second" range="1-122880" />
-            <Limit name="frame-rate" range="1-120" />
+            <Limit name="blocks-per-second" range="1-259200" />
+            <Limit name="frame-rate" range="1-300" />
             <Limit name="bitrate" range="1-10000000" />
             <Limit name="complexity" range="0-10"  default="0" />
             <Limit name="quality" range="0-100"  default="80" />
diff --git a/media/libstagefright/httplive/fuzzer/Android.bp b/media/libstagefright/httplive/fuzzer/Android.bp
index 85fd8b7..dd49714 100644
--- a/media/libstagefright/httplive/fuzzer/Android.bp
+++ b/media/libstagefright/httplive/fuzzer/Android.bp
@@ -62,5 +62,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libstagefright_httplive",
+        vector: "remote",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/libstagefright/include/ACodecBufferChannel.h b/media/libstagefright/include/ACodecBufferChannel.h
index f3b0600..903280f 100644
--- a/media/libstagefright/include/ACodecBufferChannel.h
+++ b/media/libstagefright/include/ACodecBufferChannel.h
@@ -94,7 +94,8 @@
             size_t offset,
             const CryptoPlugin::SubSample *subSamples,
             size_t numSubSamples,
-            const sp<MediaCodecBuffer> &buffer) override;
+            const sp<MediaCodecBuffer> &buffer,
+            AString* errorDetailMsg) override;
     virtual status_t renderOutputBuffer(
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
     virtual void pollForRenderedBuffers() override;
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 08c7917..e535d5d 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -603,6 +603,7 @@
             status_t internalError = UNKNOWN_ERROR);
 
     status_t requestIDRFrame();
+    status_t setSurfaceParameters(const sp<AMessage> &params);
     status_t setParameters(const sp<AMessage> &params);
 
     // set vendor extension parameters specified in params that are supported by the codec
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index aa02151..916d41e 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -385,7 +385,8 @@
             size_t offset,
             const CryptoPlugin::SubSample *subSamples,
             size_t numSubSamples,
-            const sp<MediaCodecBuffer> &buffer) {
+            const sp<MediaCodecBuffer> &buffer,
+            AString* errorDetailMsg) {
         (void)memory;
         (void)secure;
         (void)key;
@@ -396,6 +397,7 @@
         (void)subSamples;
         (void)numSubSamples;
         (void)buffer;
+        (void)errorDetailMsg;
         return -ENOSYS;
     }
     /**
diff --git a/media/libstagefright/include/media/stagefright/CodecErrorLog.h b/media/libstagefright/include/media/stagefright/CodecErrorLog.h
new file mode 100644
index 0000000..673117a
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/CodecErrorLog.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC_ERROR_LOG_H_
+
+#define CODEC_ERROR_LOG_H_
+
+#include <sstream>
+#include <string>
+
+#include <android-base/thread_annotations.h>
+
+#include <media/stagefright/foundation/AString.h>
+
+namespace android {
+
+/**
+ * CodecErrorLog gathers what happened during codec failures, and make them
+ * available to clients for debugging purpose.
+ */
+class CodecErrorLog {
+public:
+    CodecErrorLog() = default;
+
+    /**
+     * Log a line of message.
+     *
+     * \note the message should be readable to developers who may not be
+     *       familiar with MediaCodec internals
+     */
+    void log(const char *tag, const char *message);
+    void log(const char *tag, const std::string &message);
+
+    /**
+     * Extract the accumulated log as string. This operation clears the log.
+     */
+    std::string extract();
+
+    /**
+     * Clears the previous log.
+     */
+    void clear();
+
+private:
+    mutable std::mutex mLock;
+    std::stringstream mStream GUARDED_BY(mLock);
+};
+
+}  // namespace android
+
+#endif  // CODEC_ERROR_LOG_H_
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 77394d5..05bc9cc 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -28,7 +28,11 @@
 #include <media/MediaMetrics.h>
 #include <media/MediaProfiles.h>
 #include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/CodecErrorLog.h>
 #include <media/stagefright/FrameRenderTracker.h>
+#include <media/stagefright/MediaHistogram.h>
+#include <media/stagefright/PlaybackDurationAccumulator.h>
+#include <media/stagefright/VideoRenderQualityTracker.h>
 #include <utils/Vector.h>
 
 class C2Buffer;
@@ -62,7 +66,6 @@
 struct PersistentSurface;
 class SoftwareRenderer;
 class Surface;
-class PlaybackDurationAccumulator;
 namespace hardware {
 namespace cas {
 namespace native {
@@ -303,6 +306,8 @@
         T value;
     };
 
+    inline CodecErrorLog &getErrorLog() { return mErrorLog; }
+
 protected:
     virtual ~MediaCodec();
     virtual void onMessageReceived(const sp<AMessage> &msg);
@@ -327,6 +332,7 @@
         RELEASING,
     };
     std::string stateString(State state);
+    std::string apiStateString();
 
     enum {
         kPortIndexInput         = 0,
@@ -449,12 +455,13 @@
     void initMediametrics();
     void updateMediametrics();
     void flushMediametrics();
+    void resetMetricsFields();
     void updateEphemeralMediametrics(mediametrics_handle_t item);
     void updateLowLatency(const sp<AMessage> &msg);
     void onGetMetrics(const sp<AMessage>& msg);
     constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
     void updateTunnelPeek(const sp<AMessage> &msg);
-    void updatePlaybackDuration(const sp<AMessage> &msg);
+    void processRenderedFrames(const sp<AMessage> &msg);
 
     inline void initClientConfigParcel(ClientConfigParcel& clientConfig);
 
@@ -465,7 +472,7 @@
     sp<AMessage> mAsyncReleaseCompleteNotification;
     sp<AMessage> mOnFirstTunnelFrameReadyNotification;
 
-    sp<ResourceManagerServiceProxy> mResourceManagerProxy;
+    std::shared_ptr<ResourceManagerServiceProxy> mResourceManagerProxy;
 
     Domain mDomain;
     AString mLogSessionId;
@@ -488,6 +495,28 @@
             const int32_t colorTransfer);
     bool profileSupport10Bits(const AString &mime, const int32_t profile);
 
+    struct ApiUsageMetrics {
+        bool isArrayMode;
+        enum OperationMode {
+            kUnknownMode = 0,
+            kSynchronousMode = 1,
+            kAsynchronousMode = 2,
+            kBlockMode = 3,
+        };
+        OperationMode operationMode;
+        bool isUsingOutputSurface;
+        struct InputBufferSize {
+            int32_t appMax;  // max size configured by the app
+            int32_t usedMax;  // max size actually used
+            int32_t codecMax;  // max size suggested by the codec
+        } inputBufferSize;
+    } mApiUsageMetrics;
+    struct ReliabilityContextMetrics {
+        int32_t flushCount;
+        int32_t setOutputSurfaceCount;
+        int32_t resolutionChangeCount;
+    } mReliabilityContextMetrics;
+
     // initial create parameters
     AString mInitName;
 
@@ -542,8 +571,9 @@
     sp<CryptoAsync> mCryptoAsync;
     sp<ALooper> mCryptoLooper;
 
-    std::unique_ptr<PlaybackDurationAccumulator> mPlaybackDurationAccumulator;
-    bool mIsSurfaceToScreen;
+    bool mIsSurfaceToDisplay;
+    PlaybackDurationAccumulator mPlaybackDurationAccumulator;
+    VideoRenderQualityTracker mVideoRenderQualityTracker;
 
     MediaCodec(
             const sp<ALooper> &looper, pid_t pid, uid_t uid,
@@ -685,31 +715,8 @@
     int mRecentHead;
     Mutex mRecentLock;
 
-    class Histogram {
-      public:
-        Histogram() : mFloor(0), mWidth(0), mBelow(0), mAbove(0),
-                      mMin(INT64_MAX), mMax(INT64_MIN), mSum(0), mCount(0),
-                      mBucketCount(0), mBuckets(NULL) {};
-        ~Histogram() { clear(); };
-        void clear() { if (mBuckets != NULL) free(mBuckets); mBuckets = NULL; };
-        bool setup(int nbuckets, int64_t width, int64_t floor = 0);
-        void insert(int64_t sample);
-        int64_t getMin() const { return mMin; }
-        int64_t getMax() const { return mMax; }
-        int64_t getCount() const { return mCount; }
-        int64_t getSum() const { return mSum; }
-        int64_t getAvg() const { return mSum / (mCount == 0 ? 1 : mCount); }
-        std::string emit();
-      private:
-        int64_t mFloor, mCeiling, mWidth;
-        int64_t mBelow, mAbove;
-        int64_t mMin, mMax, mSum, mCount;
+    MediaHistogram<int64_t> mLatencyHist;
 
-        int mBucketCount;
-        int64_t *mBuckets;
-    };
-
-    Histogram mLatencyHist;
     // An unique ID for the codec - Used by the metrics.
     uint64_t mCodecId = 0;
 
@@ -717,6 +724,8 @@
     std::function<status_t(const AString &, sp<MediaCodecInfo> *)> mGetCodecInfo;
     friend class MediaTestHelper;
 
+    CodecErrorLog mErrorLog;
+
     DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);
 };
 
diff --git a/media/libstagefright/include/media/stagefright/MediaHistogram.h b/media/libstagefright/include/media/stagefright/MediaHistogram.h
new file mode 100644
index 0000000..50fa258
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/MediaHistogram.h
@@ -0,0 +1,211 @@
+/*
+ * Copyright 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_HISTOGRAM_H_
+#define MEDIA_HISTOGRAM_H_
+
+#include <limits>
+#include <sstream>
+#include <string>
+#include <vector>
+
+namespace android {
+
+template<typename T>
+class MediaHistogram {
+public:
+    MediaHistogram();
+    void clear();
+    bool setup(int bucketCount, T width, T floor = 0);
+    bool setup(const std::vector<T> &bucketLimits);
+    void insert(T sample);
+    size_t size();
+    int64_t operator[](int);
+    T getMin() const { return mMin; }
+    T getMax() const { return mMax; }
+    T getCount() const { return mCount; }
+    T getSum() const { return mSum; }
+    T getAvg() const { return mSum / (mCount == 0 ? 1 : mCount); }
+    T getPercentile(int) const;
+    std::string emit() const;
+    std::string emitBuckets() const;
+private:
+    MediaHistogram(const MediaHistogram &); // disallow
+
+    bool allocate(int bucketCount, bool withBucketLimits);
+
+    T mFloor, mCeiling, mWidth;
+    T mMin, mMax, mSum;
+    int64_t mBelow, mAbove, mCount;
+    std::vector<T> mBuckets;
+    std::vector<T> mBucketLimits;
+};
+
+template<typename T>
+MediaHistogram<T>::MediaHistogram() {
+    mWidth = mCeiling = mFloor = -1;
+    clear();
+}
+
+template<typename T>
+void MediaHistogram<T>::clear() {
+    for (int i = 0; i < mBuckets.size(); ++i) {
+        mBuckets[i] = 0;
+    }
+    mMin = std::numeric_limits<T>::max();
+    mMax = std::numeric_limits<T>::min();
+    mSum = 0;
+    mCount = 0;
+    mBelow = mAbove = 0;
+}
+
+template<typename T>
+bool MediaHistogram<T>::setup(int bucketCount, T width, T floor) {
+    if (bucketCount <= 0 || width <= 0) {
+        return false;
+    }
+    if (!allocate(bucketCount, false)) {
+        return false;
+    }
+    mWidth = width;
+    mFloor = floor;
+    mCeiling = floor + bucketCount * width;
+    clear();
+    return true;
+}
+
+template<typename T>
+bool MediaHistogram<T>::setup(const std::vector<T> &bucketLimits) {
+    if (bucketLimits.size() <= 1) {
+        return false;
+    }
+    int bucketCount = bucketLimits.size() - 1;
+    if (!allocate(bucketCount, true)) {
+        return false;
+    }
+
+    mWidth = -1;
+    mFloor = bucketLimits[0];
+    for (int i = 0; i < bucketCount; ++i) {
+        mBucketLimits[i] = bucketLimits[i + 1];
+    }
+    mCeiling = bucketLimits[bucketCount];
+    clear();
+    return true;
+}
+
+template<typename T>
+bool MediaHistogram<T>::allocate(int bucketCount, bool withBucketLimits) {
+    assert(bucketCount > 0);
+    if (bucketCount != mBuckets.size()) {
+        mBuckets = std::vector<T>(bucketCount, 0);
+    }
+    if (withBucketLimits && mBucketLimits.size() != bucketCount) {
+        mBucketLimits = std::vector<T>(bucketCount, 0);
+    }
+    return true;
+}
+
+template<typename T>
+void MediaHistogram<T>::insert(T sample) {
+    // histogram is not set up
+    if (mBuckets.size() == 0) {
+        return;
+    }
+
+    mCount++;
+    mSum += sample;
+    if (mMin > sample) mMin = sample;
+    if (mMax < sample) mMax = sample;
+
+    if (sample < mFloor) {
+        mBelow++;
+    } else if (sample >= mCeiling) {
+        mAbove++;
+    } else if (mWidth == -1) {
+        // A binary search might be more efficient for large number of buckets, but it is expected
+        // that there will never be a large amount of buckets, so keep the code simple.
+        for (int slot = 0; slot < mBucketLimits.size(); ++slot) {
+            if (sample < mBucketLimits[slot]) {
+                mBuckets[slot]++;
+                break;
+            }
+        }
+    } else {
+        int64_t slot = (sample - mFloor) / mWidth;
+        assert(slot < mBuckets.size());
+        mBuckets[slot]++;
+    }
+    return;
+}
+
+template<typename T>
+size_t MediaHistogram<T>::size() {
+    return mBuckets.size() + 1;
+}
+
+template<typename T>
+int64_t MediaHistogram<T>::operator[](int i) {
+    assert(i >= 0);
+    assert(i <= mBuckets.size());
+    if (i == mBuckets.size()) {
+        return mAbove;
+    }
+    return mBuckets[i];
+}
+
+template<typename T>
+std::string MediaHistogram<T>::emit() const {
+    // emits:  floor,width,below{bucket0,bucket1,...., bucketN}above
+    // or.. emits:  below{bucket0,bucket1,...., bucketN}above
+    // unconfigured will emit: 0{}0
+    // XXX: is this best representation?
+    std::stringstream ss("");
+    if (mWidth == -1) {
+        ss << mBelow << "{";
+    } else {
+        ss << mFloor << "," << mWidth << "," << mBelow << "{";
+    }
+    for (int i = 0; i < mBuckets.size(); i++) {
+        if (i != 0) {
+            ss << ",";
+        }
+        ss << mBuckets[i];
+    }
+    ss << "}" << mAbove;
+    return ss.str();
+}
+
+template<typename T>
+std::string MediaHistogram<T>::emitBuckets() const {
+    std::stringstream ss("");
+    if (mWidth == -1) {
+        ss << mFloor;
+        for (int i = 0; i < mBucketLimits.size(); ++i) {
+            ss << ',' << mBucketLimits[i];
+        }
+    } else {
+        ss << mFloor;
+        for (int i = 1; i <= mBuckets.size(); ++i) {
+            ss << ',' << (mFloor + i * mWidth);
+        }
+    }
+    return ss.str();
+}
+
+} // android
+
+#endif // MEDIA_HISTOGRAM_H_
\ No newline at end of file
diff --git a/media/libstagefright/PlaybackDurationAccumulator.h b/media/libstagefright/include/media/stagefright/PlaybackDurationAccumulator.h
similarity index 95%
rename from media/libstagefright/PlaybackDurationAccumulator.h
rename to media/libstagefright/include/media/stagefright/PlaybackDurationAccumulator.h
index cb5f0c4..bdf1171 100644
--- a/media/libstagefright/PlaybackDurationAccumulator.h
+++ b/media/libstagefright/include/media/stagefright/PlaybackDurationAccumulator.h
@@ -33,7 +33,7 @@
     }
 
     // Process a render time expressed in nanoseconds.
-    void processRenderTime(int64_t newRenderTimeNs) {
+    void onFrameRendered(int64_t newRenderTimeNs) {
         // If we detect wrap-around or out of order frames, just ignore the duration for this
         // and the next frame.
         if (newRenderTimeNs < mPreviousRenderTimeNs) {
@@ -59,7 +59,7 @@
     int64_t mPreviousRenderTimeNs;
 };
 
-}
+} // android
 
-#endif
+#endif // PLAYBACK_DURATION_ACCUMULATOR_H_
 
diff --git a/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
new file mode 100644
index 0000000..82ba81c
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
@@ -0,0 +1,449 @@
+/*
+ * Copyright 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_RENDER_QUALITY_TRACKER_H_
+
+#define VIDEO_RENDER_QUALITY_TRACKER_H_
+
+#include <assert.h>
+#include <list>
+#include <queue>
+
+#include <media/stagefright/MediaHistogram.h>
+
+namespace android {
+
+// A variety of video rendering quality metrics.
+struct VideoRenderQualityMetrics {
+    static constexpr float FRAME_RATE_UNDETERMINED = -1.0f;
+    static constexpr float FRAME_RATE_24_3_2_PULLDOWN = -2.0f;
+
+    VideoRenderQualityMetrics();
+
+    void clear();
+
+    // The render time of the first video frame.
+    int64_t firstRenderTimeUs;
+
+    // The number of frames released to be rendered.
+    int64_t frameReleasedCount;
+
+    // The number of frames actually rendered.
+    int64_t frameRenderedCount;
+
+    // The number of frames dropped - frames that were released but never rendered.
+    int64_t frameDroppedCount;
+
+    // The number of frames that were intentionally dropped/skipped by the app.
+    int64_t frameSkippedCount;
+
+    // The frame rate as detected by looking at the position timestamp from the content stream.
+    float contentFrameRate;
+
+    // The frame rate as detected by looking at the desired render time passed in by the app.
+    float desiredFrameRate;
+
+    // The frame rate as detected by looking at the actual render time, as returned by the system
+    // post-render.
+    float actualFrameRate;
+
+    // A histogram of the durations of freezes due to dropped/skipped frames.
+    MediaHistogram<int32_t> freezeDurationMsHistogram;
+    // The computed overall freeze score using the above histogram and score conversion table. The
+    // score is based on counts in the histogram bucket, multiplied by the value in the score
+    // conversion table for that bucket. For example, the impact of a short freeze may be minimal,
+    // but the impact of long freeze may be disproportionally worse. Therefore, the score
+    // multipliers for each bucket might increase exponentially instead of linearly. A score
+    // multiplier of zero would reflect that small freeze durations have near-zero impact to the
+    // user experience.
+    int32_t freezeScore;
+    // The computed percentage of total playback duration that was frozen.
+    float freezeRate;
+    // The number of freeze events.
+    int32_t freezeEventCount;
+
+    // A histogram of the durations between each freeze.
+    MediaHistogram<int32_t> freezeDistanceMsHistogram;
+
+    // A histogram of the judder scores - based on the error tolerance between actual render
+    // duration of each frame and the ideal render duration.
+    MediaHistogram<int32_t> judderScoreHistogram;
+    // The computed overall judder score using the above histogram and score conversion table. The
+    // score is based on counts in the histogram bucket, multiplied by the value in the score
+    // conversion table for that bucket. For example, the impact of minimal judder may be small,
+    // but the impact of large judder may be disproportionally worse. Therefore, the score
+    // multipliers for each bucket might increase exponentially instead of linearly. A score
+    // multiplier of zero would reflect that small judder errors have near-zero impact to the user
+    // experience.
+    int32_t judderScore;
+    // The computed percentage of total frames that had judder.
+    float judderRate;
+    // The number of judder events.
+    int32_t judderEventCount;
+};
+
+///////////////////////////////////////////////////////
+// This class analyzes various timestamps related to video rendering to compute a set of metrics
+// that attempt to capture the quality of the user experience during video playback.
+//
+// The following timestamps (in microseconds) are analyzed to compute these metrics:
+//   * The content timestamp found in the content stream, indicating the position of each video
+//     frame.
+//   * The desired timestamp passed in by the app, indicating at what point in time in the future
+//     the app would like the frame to be rendered.
+//   * The actual timestamp passed in by the display subsystem, indicating the point in time at
+//     which the frame was actually rendered.
+//
+// Core to the algorithms are deriving frame durations based on these timestamps and determining
+// the result of each video frame in the content stream:
+//   * skipped: the app didn't want to render the frame
+//   * dropped: the display subsystem could not render the frame in time
+//   * rendered: the display subsystem rendered the frame
+//
+class VideoRenderQualityTracker {
+public:
+    // Configurable elements of the metrics algorithms
+    class Configuration {
+    public:
+        // system/server_configurable_flags/libflags/include/get_flags.h:GetServerConfigurableFlag
+        typedef std::string (*GetServerConfigurableFlagFn)(
+                const std::string& experiment_category_name,
+                const std::string& experiment_flag_name,
+                const std::string& default_value);
+
+        static Configuration getFromServerConfigurableFlags(
+                GetServerConfigurableFlagFn getServerConfigurableFlagFn);
+
+        Configuration();
+
+        // Whether or not frame render quality is tracked.
+        bool enabled;
+
+        // Whether or not frames that are intentionally not rendered by the app should be considered
+        // as dropped.
+        bool areSkippedFramesDropped;
+
+        // How large of a jump forward in content time is allowed before it is considered a
+        // discontinuity (seek/playlist) and various internal states are reset.
+        int32_t maxExpectedContentFrameDurationUs;
+
+        // How much tolerance in frame duration when considering whether or not two frames have the
+        // same frame rate.
+        int32_t frameRateDetectionToleranceUs;
+
+        // A skip forward in content time could occur during frame drops of live content. Therefore
+        // the content frame duration and the app-desired frame duration are compared using this
+        // tolerance to determine whether the app is intentionally seeking forward or whether the
+        // skip forward in content time is due to frame drops. If the app-desired frame duration is
+        // short, but the content frame duration is large, it is assumed the app is intentionally
+        // seeking forward.
+        int32_t liveContentFrameDropToleranceUs;
+
+        // Freeze configuration
+        //
+        // The values used to distribute freeze durations across a histogram.
+        std::vector<int32_t> freezeDurationMsHistogramBuckets;
+        //
+        // The values used to multiply the counts in the histogram buckets above to compute an
+        // overall score. This allows the score to reflect disproportionate impact as freeze
+        // durations increase.
+        std::vector<int64_t> freezeDurationMsHistogramToScore;
+        //
+        // The values used to distribute distances between freezes across a histogram.
+        std::vector<int32_t> freezeDistanceMsHistogramBuckets;
+        //
+        // The maximum number of freeze events to send back to the caller.
+        int32_t freezeEventMax;
+        //
+        // The maximum number of detail entries tracked per freeze event.
+        int32_t freezeEventDetailsMax;
+        //
+        // The maximum distance in time between two freeze occurrences such that both will be
+        // lumped into the same freeze event.
+        int32_t freezeEventDistanceToleranceMs;
+
+        // Judder configuration
+        //
+        // A judder error lower than this value is not scored as judder.
+        int32_t judderErrorToleranceUs;
+        //
+        // The values used to distribute judder scores across a histogram.
+        std::vector<int32_t> judderScoreHistogramBuckets;
+        //
+        // The values used to multiply the counts in the histogram buckets above to compute an
+        // overall score. This allows the score to reflect disproportionate impact as judder scores
+        // increase.
+        std::vector<int64_t> judderScoreHistogramToScore;
+        //
+        // The maximum number of judder events to send back to the caller.
+        int32_t judderEventMax;
+        //
+        // The maximum number of detail entries tracked per judder event.
+        int32_t judderEventDetailsMax;
+        //
+        // The maximum distance in time between two judder occurrences such that both will be
+        // lumped into the same judder event.
+        int32_t judderEventDistanceToleranceMs;
+    };
+
+    struct FreezeEvent {
+        // Details are captured for each freeze up to a limited number. The arrays are guaranteed to
+        // have the same size.
+        struct Details {
+            /// The duration of the freeze.
+            std::vector<int32_t> durationMs;
+            // The distance between the beginning of this freeze and the end of the previous freeze.
+            std::vector<int32_t> distanceMs;
+        };
+        // Whether or not the data in this structure is valid.
+        bool valid = false;
+        // The time at which the first freeze for this event was detected.
+        int64_t initialTimeUs;
+        // The total duration from the beginning of the first freeze to the end of the last freeze
+        // in this event.
+        int32_t durationMs;
+        // The number of freezes in this event.
+        int64_t count;
+        // The sum of all durations of all freezes in this event.
+        int64_t sumDurationMs;
+        // The sum of all distances between each freeze in this event.
+        int64_t sumDistanceMs;
+        // Detailed information for the first N freezes in this event.
+        Details details;
+    };
+
+    struct JudderEvent {
+        // Details are captured for each frame judder up to a limited number. The arrays are
+        // guaranteed to have the same size.
+        struct Details {
+            // The actual render duration of the frame for this judder occurrence.
+            std::vector<int32_t> actualRenderDurationUs;
+            // The content render duration of the frame for this judder occurrence.
+            std::vector<int32_t> contentRenderDurationUs;
+            // The distance from this judder occurrence and the previous judder occurrence.
+            std::vector<int32_t> distanceMs;
+        };
+        // Whether or not the data in this structure is valid.
+        bool valid = false;
+        // The time at which the first judder occurrence for this event was detected.
+        int64_t initialTimeUs;
+        // The total duration from the first judder occurrence to the last judder occurrence in this
+        // event.
+        int32_t durationMs;
+        // The number of judder occurrences in this event.
+        int64_t count;
+        // The sum of all judder scores in this event.
+        int64_t sumScore;
+        // The sum of all distances between each judder occurrence in this event.
+        int64_t sumDistanceMs;
+        // Detailed information for the first N judder occurrences in this event.
+        Details details;
+    };
+
+    VideoRenderQualityTracker();
+    VideoRenderQualityTracker(const Configuration &configuration);
+
+    // Called when a tunnel mode frame has been queued.
+    void onTunnelFrameQueued(int64_t contentTimeUs);
+
+    // Called when the app has intentionally decided not to render this frame.
+    void onFrameSkipped(int64_t contentTimeUs);
+
+    // Called when the app has requested the frame to be rendered as soon as possible.
+    void onFrameReleased(int64_t contentTimeUs);
+
+    // Called when the app has requested the frame to be rendered at a specific point in time in the
+    // future.
+    void onFrameReleased(int64_t contentTimeUs, int64_t desiredRenderTimeNs);
+
+    // Called when the system has detected that the frame has actually been rendered to the display.
+    // Returns any freeze events or judder events that were detected.
+    void onFrameRendered(int64_t contentTimeUs, int64_t actualRenderTimeNs,
+                         FreezeEvent *freezeEventOut = nullptr,
+                         JudderEvent *judderEventOut = nullptr);
+
+    // Gets and resets data for the current freeze event.
+    FreezeEvent getAndResetFreezeEvent();
+
+    // Gets and resets data for the current judder event.
+    JudderEvent getAndResetJudderEvent();
+
+    // Retrieve the metrics.
+    const VideoRenderQualityMetrics &getMetrics();
+
+    // Called when a change in codec state will result in a content discontinuity - e.g. flush.
+    void resetForDiscontinuity();
+
+    // Clear out all metrics and tracking - e.g. codec reconfigured.
+    void clear();
+
+private:
+    // Tracking of frames that are pending to be rendered to the display.
+    struct FrameInfo {
+        int64_t contentTimeUs;
+        int64_t desiredRenderTimeUs;
+    };
+
+    // Historic tracking of frame durations
+    struct FrameDurationUs {
+        static const int SIZE = 5;
+
+        FrameDurationUs() {
+            for (int i = 0; i < SIZE; ++i) {
+                durationUs[i] = -1;
+            }
+            priorTimestampUs = -1;
+        }
+
+        int32_t &operator[](int index) {
+            assert(index < SIZE);
+            return durationUs[index];
+        }
+
+        const int32_t &operator[](int index) const {
+            assert(index < SIZE);
+            return durationUs[index];
+        }
+
+        // The duration of the past N frames.
+        int32_t durationUs[SIZE];
+
+        // The timestamp of the previous frame.
+        int64_t priorTimestampUs;
+    };
+
+    // Configure histograms for the metrics.
+    static void configureHistograms(VideoRenderQualityMetrics &m, const Configuration &c);
+
+    // The current time in microseconds.
+    static int64_t nowUs();
+
+    // A new frame has been processed, so update the frame durations based on the new frame
+    // timestamp.
+    static void updateFrameDurations(FrameDurationUs &durationUs, int64_t newTimestampUs);
+
+    // Update a frame rate if, and only if, one can be detected.
+    static void updateFrameRate(float &frameRate, const FrameDurationUs &durationUs,
+                                const Configuration &c);
+
+    // Examine the past few frames to detect the frame rate based on each frame's render duration.
+    static float detectFrameRate(const FrameDurationUs &durationUs, const Configuration &c);
+
+    // Determine whether or not 3:2 pulldowng for displaying 24fps content on 60Hz displays is
+    // occurring.
+    static bool is32pulldown(const FrameDurationUs &durationUs, const Configuration &c);
+
+    // Process a frame freeze.
+    static void processFreeze(int64_t actualRenderTimeUs, int64_t lastRenderTimeUs,
+                              int64_t lastFreezeEndTimeUs, FreezeEvent &e,
+                              VideoRenderQualityMetrics &m, const Configuration &c);
+
+    // Retrieve a freeze event if an event just finished.
+    static void maybeCaptureFreezeEvent(int64_t actualRenderTimeUs, int64_t lastFreezeEndTimeUs,
+                                        FreezeEvent &e, const VideoRenderQualityMetrics & m,
+                                        const Configuration &c, FreezeEvent *freezeEventOut);
+
+    // Compute a judder score for the previously-rendered frame.
+    static int64_t computePreviousJudderScore(const FrameDurationUs &actualRenderDurationUs,
+                                              const FrameDurationUs &contentRenderDurationUs,
+                                              const Configuration &c);
+
+    // Process a frame judder.
+    static void processJudder(int32_t judderScore, int64_t judderTimeUs,
+                              int64_t lastJudderEndTimeUs,
+                              const FrameDurationUs &contentDurationUs,
+                              const FrameDurationUs &actualDurationUs, JudderEvent &e,
+                              VideoRenderQualityMetrics &m, const Configuration &c);
+
+    // Retrieve a judder event if an event just finished.
+    static void maybeCaptureJudderEvent(int64_t actualRenderTimeUs, int64_t lastJudderEndTimeUs,
+                                        JudderEvent &e, const VideoRenderQualityMetrics & m,
+                                        const Configuration &c, JudderEvent *judderEventOut);
+
+    // Check to see if a discontinuity has occurred by examining the content time and the
+    // app-desired render time. If so, reset some internal state.
+    bool resetIfDiscontinuity(int64_t contentTimeUs, int64_t desiredRenderTimeUs);
+
+    // Update the metrics because a skipped frame was detected.
+    void processMetricsForSkippedFrame(int64_t contentTimeUs);
+
+    // Update the metrics because a dropped frame was detected.
+    void processMetricsForDroppedFrame(int64_t contentTimeUs, int64_t desiredRenderTimeUs);
+
+    // Update the metrics because a rendered frame was detected.
+    void processMetricsForRenderedFrame(int64_t contentTimeUs, int64_t desiredRenderTimeUs,
+                                        int64_t actualRenderTimeUs,
+                                        FreezeEvent *freezeEventOut, JudderEvent *judderEventOut);
+
+    // Configurable elements of the metrics algorithms.
+    const Configuration mConfiguration;
+
+    // Metrics are updated every time a frame event occurs - skipped, dropped, rendered.
+    VideoRenderQualityMetrics mMetrics;
+
+    // The most recently processed timestamp referring to the position in the content stream.
+    int64_t mLastContentTimeUs;
+
+    // The most recently processed timestamp referring to the wall clock time a frame was rendered.
+    int64_t mLastRenderTimeUs;
+
+    // The most recent timestamp of the first frame rendered after the freeze.
+    int64_t mLastFreezeEndTimeUs;
+
+    // The most recent timestamp of frame judder.
+    int64_t mLastJudderEndTimeUs;
+
+    // The render duration of the playback.
+    int64_t mRenderDurationMs;
+
+    // True if the previous frame was dropped.
+    bool mWasPreviousFrameDropped;
+
+    // The freeze event that's currently being tracked.
+    FreezeEvent mFreezeEvent;
+
+    // The judder event that's currently being tracked.
+    JudderEvent mJudderEvent;
+
+    // Frames skipped at the end of playback shouldn't really be considered skipped, therefore keep
+    // a list of the frames, and process them as skipped frames the next time a frame is rendered.
+    std::list<int64_t> mPendingSkippedFrameContentTimeUsList;
+
+    // Since the system only signals when a frame is rendered, dropped frames are detected by
+    // checking to see if the next expected frame is rendered. If not, it is considered dropped.
+    std::queue<FrameInfo> mNextExpectedRenderedFrameQueue;
+
+    // When B-frames are present in the stream, a P-frame will be queued before the B-frame even
+    // though it is rendered after. Therefore, the P-frame is held here and not inserted into
+    // mNextExpectedRenderedFrameQueue until it should be inserted to maintain render order.
+    int64_t mTunnelFrameQueuedContentTimeUs;
+
+    // Frame durations derived from timestamps encoded into the content stream. These are the
+    // durations that each frame is supposed to be rendered for.
+    FrameDurationUs mContentFrameDurationUs;
+
+    // Frame durations derived from timestamps passed in by the app, indicating the wall clock time
+    // at which the app would like to have the frame rendered.
+    FrameDurationUs mDesiredFrameDurationUs;
+
+    // Frame durations derived from timestamps captured by the display subsystem, indicating the
+    // wall clock atime at which the frame is actually rendered.
+    FrameDurationUs mActualFrameDurationUs;
+};
+
+}  // namespace android
+
+#endif  // VIDEO_RENDER_QUALITY_TRACKER_H_
diff --git a/media/libstagefright/rtsp/fuzzer/Android.bp b/media/libstagefright/rtsp/fuzzer/Android.bp
new file mode 100644
index 0000000..8e10b0c
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/Android.bp
@@ -0,0 +1,64 @@
+/*
+* Copyright (C) 2023 The Android Open Source Project
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at:
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+cc_defaults {
+    name: "libstagefright_rtsp_fuzzer_defaults",
+    shared_libs: [
+        "liblog",
+        "libmedia",
+        "libutils",
+        "libstagefright_foundation",
+    ],
+    static_libs: [
+        "libdatasource",
+        "libstagefright_rtsp",
+    ],
+    header_libs: [
+        "libstagefright_rtsp_headers",
+    ],
+    fuzz_config:{
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "sdploader_fuzzer",
+    srcs: [
+        "sdploader_fuzzer.cpp",
+    ],
+    defaults: [
+        "libstagefright_rtsp_fuzzer_defaults",
+    ]
+}
+
+cc_fuzz {
+    name: "rtp_writer_fuzzer",
+    srcs: [
+        "rtp_writer_fuzzer.cpp",
+    ],
+    defaults: [
+        "libstagefright_rtsp_fuzzer_defaults",
+    ],
+    shared_libs:[
+        "libandroid_net",
+        "libbase",
+        "libstagefright",
+        "libcutils",
+    ],
+}
diff --git a/media/libstagefright/rtsp/fuzzer/README.md b/media/libstagefright/rtsp/fuzzer/README.md
new file mode 100644
index 0000000..657fb48
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/README.md
@@ -0,0 +1,64 @@
+# Fuzzers for libstagefright_rtsp
+
+## Table of contents
++ [sdploader_fuzzer](#SDPLoader)
++ [rtp_writer_fuzzer](#ARTPWriter)
+
+# <a name="SDPLoader"></a> Fuzzer for SDPLoader
+
+SDPLoader supports the following parameters:
+1. Flag (parameter name: "flags")
+2. URL (parameter name: "url")
+3. Header (parameter name: "headers")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`flags`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`url`| `String` |Value obtained from FuzzedDataProvider|
+|`headers`| `String` |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) sdploader_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/sdploader_fuzzer/sdploader_fuzzer
+```
+
+# <a name="ARTPWriter"></a> Fuzzer for ARTPWriter
+
+ARTPWriter supports the following parameters:
+1. File descriptor (parameter name: "fd")
+2. Local Ip (parameter name: "localIp")
+3. Local Port (parameter name: "localPort")
+4. Remote Ip (parameter name: "remoteIp")
+5. Remote Port (parameter name: "remotePort")
+6. Sequence No (parameter name: "seqNo")
+7. OpponentID (parameter name: "opponentID")
+8. Bit Rate (parameter name: "bitrate")
+9. kKeyMIMETypeArray (parameter name: "mimeType")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`localIp`| `String` |Value obtained from FuzzedDataProvider|
+|`localPort`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`remoteIp`| `String` |Value obtained from FuzzedDataProvider|
+|`remotePort`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`seqNo`| `0`  to  `10000000` |Value obtained from FuzzedDataProvider|
+|`opponentID`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`bitrate`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`mimeType`| 0. `MEDIA_MIMETYPE_VIDEO_AVC`<br> 1. `MEDIA_MIMETYPE_VIDEO_HEVC`<br> 2. `MEDIA_MIMETYPE_VIDEO_H263`<br> 3. `MEDIA_MIMETYPE_AUDIO_AMR_NB`<br> 4. `MEDIA_MIMETYPE_AUDIO_AMR_WB`|Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) rtp_writer_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/rtp_writer_fuzzer/rtp_writer_fuzzer
+```
diff --git a/media/libstagefright/rtsp/fuzzer/rtp_writer_fuzzer.cpp b/media/libstagefright/rtsp/fuzzer/rtp_writer_fuzzer.cpp
new file mode 100644
index 0000000..8d9f923
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/rtp_writer_fuzzer.cpp
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/rtsp/ARTPWriter.h>
+
+constexpr int32_t kMinSize = 0;
+constexpr int32_t kMaxSize = 65536;
+constexpr int32_t kMaxTime = 1000;
+constexpr int32_t kMaxBytes = 128;
+constexpr int32_t kAMRNBFrameSizes[] = {13, 14, 16, 18, 20, 21, 27, 32};
+constexpr int32_t kAMRWBFrameSizes[] = {18, 24, 33, 37, 41, 47, 51, 59, 61};
+constexpr int32_t kAMRIndexOffset = 8;
+
+using namespace android;
+
+const char* kKeyMimeTypeArray[] = {MEDIA_MIMETYPE_VIDEO_AVC, MEDIA_MIMETYPE_VIDEO_HEVC,
+                                   MEDIA_MIMETYPE_VIDEO_H263, MEDIA_MIMETYPE_AUDIO_AMR_NB,
+                                   MEDIA_MIMETYPE_AUDIO_AMR_WB};
+
+struct TestMediaSource : public MediaSource {
+  public:
+    TestMediaSource(FuzzedDataProvider& mFdp) : mTestMetaData(new MetaData) {
+        int32_t vectorSize = 0;
+        mAllowRead = mFdp.ConsumeBool();
+        mKeySps = mFdp.ConsumeIntegral<int32_t>();
+        mKeyVps = mFdp.ConsumeIntegral<int32_t>();
+        mKeyPps = mFdp.ConsumeIntegral<int32_t>();
+        mKeyTime = mFdp.ConsumeIntegralInRange<int64_t>(kMinSize, kMaxTime);
+
+        mMimeType = mFdp.PickValueInArray(kKeyMimeTypeArray);
+        mTestMetaData->setCString(kKeyMIMEType, mMimeType);
+        if (mMimeType == MEDIA_MIMETYPE_AUDIO_AMR_NB) {
+            int32_t index =
+                    mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, std::size(kAMRNBFrameSizes) - 1);
+            vectorSize = kAMRNBFrameSizes[index];
+            mData.push_back(kAMRIndexOffset * index);
+        } else if (mMimeType == MEDIA_MIMETYPE_AUDIO_AMR_WB) {
+            int32_t index =
+                    mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, std::size(kAMRWBFrameSizes) - 1);
+            vectorSize = kAMRWBFrameSizes[index];
+            mData.push_back(kAMRIndexOffset * index);
+        } else if (mMimeType == MEDIA_MIMETYPE_VIDEO_H263) {
+            // Required format for H263 media data
+            mData.push_back(0);
+            mData.push_back(0);
+            vectorSize = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+        } else {
+            vectorSize = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+        }
+        for (size_t idx = mData.size(); idx < vectorSize; ++idx) {
+            mData.push_back(mFdp.ConsumeIntegral<uint8_t>());
+        }
+    }
+    virtual status_t start(MetaData* /*params*/) { return OK; }
+    virtual status_t stop() { return OK; }
+    virtual sp<MetaData> getFormat() { return mTestMetaData; }
+    virtual status_t read(MediaBufferBase** buffer, const ReadOptions* /*options*/) {
+        if (!mAllowRead) {
+            return -1;
+        }
+        *buffer = new MediaBuffer(mData.data() /*data*/, mData.size() /*size*/);
+        if (mKeySps) {
+            (*buffer)->meta_data().setInt32(kKeySps, mKeySps);
+        }
+        if (mKeyVps) {
+            (*buffer)->meta_data().setInt32(kKeyVps, mKeyVps);
+        }
+        if (mKeyPps) {
+            (*buffer)->meta_data().setInt32(kKeyPps, mKeyPps);
+        }
+        (*buffer)->meta_data().setInt64(kKeyTime, mKeyTime);
+        return OK;
+    }
+
+  private:
+    int32_t mKeySps;
+    int32_t mKeyVps;
+    int32_t mKeyPps;
+    int64_t mKeyTime;
+    bool mAllowRead;
+    const char* mMimeType;
+    sp<MetaData> mTestMetaData;
+    std::vector<uint8_t> mData;
+};
+
+class ARTPWriterFuzzer {
+  public:
+    ARTPWriterFuzzer(const uint8_t* data, size_t size)
+        : mDataSourceFd(memfd_create("InputFile", MFD_ALLOW_SEALING)), mFdp(data, size) {}
+    ~ARTPWriterFuzzer() { close(mDataSourceFd); }
+    void process();
+
+  private:
+    void createARTPWriter();
+    const int32_t mDataSourceFd;
+    FuzzedDataProvider mFdp;
+    sp<ARTPWriter> mArtpWriter;
+};
+
+void ARTPWriterFuzzer::createARTPWriter() {
+    String8 localIp = String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str());
+    String8 remoteIp = String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str());
+    mArtpWriter = sp<ARTPWriter>::make(
+            mDataSourceFd, localIp, mFdp.ConsumeIntegral<uint16_t>() /* localPort */, remoteIp,
+            mFdp.ConsumeIntegral<uint16_t>() /* remotePort */,
+            mFdp.ConsumeIntegralInRange<uint32_t>(kMinSize, kMaxSize) /* seqNo */);
+}
+
+void ARTPWriterFuzzer::process() {
+    if (mFdp.ConsumeBool()) {
+        mArtpWriter = sp<ARTPWriter>::make(mDataSourceFd);
+        if (mArtpWriter->getSequenceNum() > kMaxSize) {
+            createARTPWriter();
+        }
+    } else {
+        createARTPWriter();
+    }
+
+    mArtpWriter->addSource(sp<TestMediaSource>::make(mFdp) /* source */);
+
+    while (mFdp.remaining_bytes()) {
+        auto invokeRTPWriterFuzzer = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    sp<MetaData> metaData = sp<MetaData>::make();
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeySelfID, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeyPayloadType, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeyRtpExtMap, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeyRtpCvoDegrees, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeyRtpDscp, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt64(kKeySocketNetwork, mFdp.ConsumeIntegral<int64_t>());
+                    }
+                    mArtpWriter->start(metaData.get() /*param*/);
+                },
+                [&]() {
+                    mArtpWriter->setTMMBNInfo(mFdp.ConsumeIntegral<uint32_t>() /* opponentID */,
+                                              mFdp.ConsumeIntegral<uint32_t>() /* bitrate */);
+                },
+                [&]() { mArtpWriter->stop(); },
+                [&]() {
+                    mArtpWriter->updateCVODegrees(mFdp.ConsumeIntegral<int32_t>() /* cvoDegrees */);
+                },
+                [&]() {
+                    mArtpWriter->updatePayloadType(
+                            mFdp.ConsumeIntegral<int32_t>() /* payloadType */);
+                },
+
+        });
+        invokeRTPWriterFuzzer();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    ARTPWriterFuzzer artpWriterFuzzer(data, size);
+    artpWriterFuzzer.process();
+    return 0;
+}
diff --git a/media/libstagefright/rtsp/fuzzer/sdploader_fuzzer.cpp b/media/libstagefright/rtsp/fuzzer/sdploader_fuzzer.cpp
new file mode 100644
index 0000000..748e5b6
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/sdploader_fuzzer.cpp
@@ -0,0 +1,141 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <datasource/HTTPBase.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/MediaHTTPConnection.h>
+#include <media/MediaHTTPService.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/rtsp/SDPLoader.h>
+
+using namespace android;
+
+constexpr int32_t kMinCapacity = 0;
+constexpr int32_t kMaxCapacity = 1000;
+constexpr int32_t kMaxStringLength = 20;
+constexpr int32_t kMaxBytes = 128;
+enum { kWhatLoad = 'load' };
+
+struct FuzzAHandler : public AHandler {
+  public:
+    FuzzAHandler(std::function<void()> signalEosFunction) : mSignalEosFunction(signalEosFunction) {}
+
+  protected:
+    void onMessageReceived(const sp<AMessage>& msg) override {
+        switch (msg->what()) {
+            case kWhatLoad: {
+                mSignalEosFunction();
+                break;
+            }
+        }
+        return;
+    }
+
+  private:
+    std::function<void()> mSignalEosFunction;
+};
+
+struct FuzzMediaHTTPConnection : public MediaHTTPConnection {
+  public:
+    FuzzMediaHTTPConnection(FuzzedDataProvider* fdp) : mFdp(fdp) {
+        mSize = mFdp->ConsumeIntegralInRange(kMinCapacity, kMaxCapacity);
+        mData = mFdp->ConsumeBytes<uint8_t>(mSize);
+        mSize = mData.size();
+    }
+    virtual bool connect(const char* /* uri */,
+                         const KeyedVector<String8, String8>* /* headers */) {
+        return mFdp->ConsumeBool();
+    }
+    virtual void disconnect() { return; }
+    virtual ssize_t readAt(off64_t offset, void* data, size_t size) {
+        if ((size + offset <= mData.size()) && (offset >= 0)) {
+           memcpy(data, mData.data() + offset, size);
+           return size;
+        }
+        return 0;
+    }
+    virtual off64_t getSize() { return mSize; }
+    virtual status_t getMIMEType(String8* /*mimeType*/) {return mFdp->ConsumeIntegral<status_t>();}
+    virtual status_t getUri(String8* /*uri*/) {return mFdp->ConsumeIntegral<status_t>();}
+
+  private:
+    FuzzedDataProvider* mFdp = nullptr;
+    std::vector<uint8_t> mData;
+    size_t mSize = 0;
+};
+
+struct FuzzMediaHTTPService : public MediaHTTPService {
+  public:
+    FuzzMediaHTTPService(FuzzedDataProvider* fdp) : mFdp(fdp) {}
+    virtual sp<MediaHTTPConnection> makeHTTPConnection() {
+        mediaHTTPConnection = sp<FuzzMediaHTTPConnection>::make(mFdp);
+        return mediaHTTPConnection;
+    }
+
+  private:
+    sp<FuzzMediaHTTPConnection> mediaHTTPConnection = nullptr;
+    FuzzedDataProvider* mFdp = nullptr;
+};
+
+class SDPLoaderFuzzer {
+  public:
+    SDPLoaderFuzzer(const uint8_t* data, size_t size) : mFdp(data, size) {}
+    void process();
+
+  private:
+    void signalEos();
+
+    bool mEosReached = false;
+    std::mutex mMsgPostCompleteMutex;
+    std::condition_variable mConditionalVariable;
+    FuzzedDataProvider mFdp;
+};
+
+void SDPLoaderFuzzer::signalEos() {
+    mEosReached = true;
+    mConditionalVariable.notify_one();
+    return;
+}
+
+void SDPLoaderFuzzer::process() {
+    sp<FuzzAHandler> handler = sp<FuzzAHandler>::make(std::bind(&SDPLoaderFuzzer::signalEos, this));
+    sp<ALooper> looper = sp<ALooper>::make();
+    looper->start();
+    looper->registerHandler(handler);
+    const sp<AMessage> notify = sp<AMessage>::make(kWhatLoad, handler);
+    sp<SDPLoader> sdpLoader =
+            sp<SDPLoader>::make(notify, mFdp.ConsumeIntegral<uint32_t>() /* flags */,
+                                sp<FuzzMediaHTTPService>::make(&mFdp) /* httpService */);
+
+    KeyedVector<String8, String8> headers;
+    for (size_t idx = 0; idx < mFdp.ConsumeIntegralInRange<size_t>(kMinCapacity, kMaxCapacity);
+         ++idx) {
+        headers.add(String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str()) /* key */,
+                    String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str()) /* value */);
+    }
+
+    sdpLoader->load(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* url */, &headers);
+
+    std::unique_lock waitForMsgPostComplete(mMsgPostCompleteMutex);
+    mConditionalVariable.wait(waitForMsgPostComplete, [this] { return mEosReached; });
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    SDPLoaderFuzzer sdpLoaderFuzzer(data, size);
+    sdpLoaderFuzzer.process();
+    return 0;
+}
diff --git a/media/libstagefright/tests/Android.bp b/media/libstagefright/tests/Android.bp
index e6b67ce..581292e 100644
--- a/media/libstagefright/tests/Android.bp
+++ b/media/libstagefright/tests/Android.bp
@@ -55,3 +55,20 @@
         "-Wall",
     ],
 }
+
+cc_test {
+    name: "VideoRenderQualityTracker_test",
+    srcs: ["VideoRenderQualityTracker_test.cpp"],
+
+    shared_libs: [
+        "libbase",
+        "liblog",
+        "libstagefright",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+}
diff --git a/media/libstagefright/tests/HEVC/Android.bp b/media/libstagefright/tests/HEVC/Android.bp
index 7f2ff12..7a0ba52 100644
--- a/media/libstagefright/tests/HEVC/Android.bp
+++ b/media/libstagefright/tests/HEVC/Android.bp
@@ -56,7 +56,4 @@
             "signed-integer-overflow",
         ],
     },
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/HEVCUtils/HEVCUtilsUnitTest.zip?unzip=true",
-    ],
 }
diff --git a/media/libstagefright/tests/HEVC/AndroidTest.xml b/media/libstagefright/tests/HEVC/AndroidTest.xml
index ff850a2..00bb3e5 100644
--- a/media/libstagefright/tests/HEVC/AndroidTest.xml
+++ b/media/libstagefright/tests/HEVC/AndroidTest.xml
@@ -18,14 +18,22 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="false" />
         <option name="push" value="HEVCUtilsUnitTest->/data/local/tmp/HEVCUtilsUnitTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/HEVCUtils/HEVCUtilsUnitTest.zip?unzip=true"
-            value="/data/local/tmp/HEVCUtilsUnitTest/" />
+    </target_preparer>
+
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="HEVCUtilsUnitTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="HEVCUtilsUnitTest-1.0" />
+        <option name="dynamic-config-module" value="HEVCUtilsUnitTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="HEVCUtilsUnitTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/HEVCUtilsUnitTest/" />
+        <option name="native-test-flag" value="-P /sdcard/tests/HEVCUtilsUnitTest-1.0/" />
     </test>
 </configuration>
diff --git a/media/libstagefright/tests/HEVC/DynamicConfig.xml b/media/libstagefright/tests/HEVC/DynamicConfig.xml
new file mode 100644
index 0000000..517449c
--- /dev/null
+++ b/media/libstagefright/tests/HEVC/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/HEVCUtils/HEVCUtilsUnitTest-1.0.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp b/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
new file mode 100644
index 0000000..7823922
--- /dev/null
+++ b/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
@@ -0,0 +1,1027 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "VideoRenderQualityTracker_test"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include <media/stagefright/VideoRenderQualityTracker.h>
+
+namespace android {
+
+using Metrics = VideoRenderQualityMetrics;
+using Configuration = VideoRenderQualityTracker::Configuration;
+using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
+using JudderEvent = VideoRenderQualityTracker::JudderEvent;
+
+static constexpr float FRAME_RATE_UNDETERMINED = VideoRenderQualityMetrics::FRAME_RATE_UNDETERMINED;
+static constexpr float FRAME_RATE_24_3_2_PULLDOWN =
+        VideoRenderQualityMetrics::FRAME_RATE_24_3_2_PULLDOWN;
+
+class Helper {
+public:
+    Helper(double contentFrameDurationMs, const Configuration &configuration) :
+            mVideoRenderQualityTracker(configuration) {
+        mContentFrameDurationUs = int64_t(contentFrameDurationMs * 1000);
+        mMediaTimeUs = 0;
+        mClockTimeNs = 0;
+    }
+
+    void changeContentFrameDuration(double contentFrameDurationMs) {
+        mContentFrameDurationUs = int64_t(contentFrameDurationMs * 1000);
+    }
+
+    template<typename T>
+    void render(std::initializer_list<T> renderDurationMsList) {
+        for (auto renderDurationMs : renderDurationMsList) {
+            mVideoRenderQualityTracker.onFrameReleased(mMediaTimeUs);
+            mVideoRenderQualityTracker.onFrameRendered(mMediaTimeUs, mClockTimeNs, &mFreezeEvent,
+                                                       &mJudderEvent);
+            mMediaTimeUs += mContentFrameDurationUs;
+            mClockTimeNs += int64_t(renderDurationMs * 1000 * 1000);
+        }
+    }
+
+    void render(int numFrames, float durationMs = -1) {
+        int64_t durationUs = durationMs < 0 ? mContentFrameDurationUs : durationMs * 1000;
+        for (int i = 0; i < numFrames; ++i) {
+            mVideoRenderQualityTracker.onFrameReleased(mMediaTimeUs);
+            mVideoRenderQualityTracker.onFrameRendered(mMediaTimeUs, mClockTimeNs, &mFreezeEvent,
+                                                       &mJudderEvent);
+            mMediaTimeUs += mContentFrameDurationUs;
+            mClockTimeNs += durationUs * 1000;
+        }
+    }
+
+    void skip(int numFrames) {
+        for (int i = 0; i < numFrames; ++i) {
+            mVideoRenderQualityTracker.onFrameSkipped(mMediaTimeUs);
+            mMediaTimeUs += mContentFrameDurationUs;
+            mClockTimeNs += mContentFrameDurationUs * 1000;
+        }
+    }
+
+    void drop(int numFrames) {
+        for (int i = 0; i < numFrames; ++i) {
+            mVideoRenderQualityTracker.onFrameReleased(mMediaTimeUs);
+            mMediaTimeUs += mContentFrameDurationUs;
+            mClockTimeNs += mContentFrameDurationUs * 1000;
+        }
+    }
+
+    const Metrics & getMetrics() {
+        return mVideoRenderQualityTracker.getMetrics();
+    }
+
+    FreezeEvent getAndClearFreezeEvent() {
+        FreezeEvent e = std::move(mFreezeEvent);
+        mFreezeEvent.valid = false;
+        return e;
+    }
+
+    JudderEvent getAndClearJudderEvent() {
+        JudderEvent e = std::move(mJudderEvent);
+        mJudderEvent.valid = false;
+        return e;
+    }
+
+private:
+    VideoRenderQualityTracker mVideoRenderQualityTracker;
+    int64_t mContentFrameDurationUs;
+    int64_t mMediaTimeUs;
+    int64_t mClockTimeNs;
+    VideoRenderQualityTracker::FreezeEvent mFreezeEvent;
+    VideoRenderQualityTracker::JudderEvent mJudderEvent;
+};
+
+class VideoRenderQualityTrackerTest : public ::testing::Test {
+public:
+    VideoRenderQualityTrackerTest() {}
+};
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withDefaults) {
+    Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn =
+        [](const std::string &, const std::string &, const std::string &defaultStr) -> std::string {
+            return defaultStr;
+        };
+
+    Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+    Configuration d; // default configuration
+    EXPECT_EQ(c.enabled, d.enabled);
+    EXPECT_EQ(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+    EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+    EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+    EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+    EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+    EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+    EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+    EXPECT_EQ(c.freezeEventMax, d.freezeEventMax);
+    EXPECT_EQ(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+    EXPECT_EQ(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+    EXPECT_EQ(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+    EXPECT_EQ(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+    EXPECT_EQ(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+    EXPECT_EQ(c.judderEventMax, d.judderEventMax);
+    EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
+    EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withEmpty) {
+    Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
+        [](const std::string &, const std::string &, const std::string &) -> std::string {
+            return "";
+        }
+    };
+    Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+    Configuration d; // default configuration
+    EXPECT_EQ(c.enabled, d.enabled);
+    EXPECT_EQ(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+    EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+    EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+    EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+    EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+    EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+    EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+    EXPECT_EQ(c.freezeEventMax, d.freezeEventMax);
+    EXPECT_EQ(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+    EXPECT_EQ(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+    EXPECT_EQ(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+    EXPECT_EQ(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+    EXPECT_EQ(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+    EXPECT_EQ(c.judderEventMax, d.judderEventMax);
+    EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
+    EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withInvalid) {
+    Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
+        [](const std::string &, const std::string &, const std::string &) -> std::string {
+            return "abc";
+        }
+    };
+    Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+    Configuration d; // default configuration
+    EXPECT_EQ(c.enabled, d.enabled);
+    EXPECT_EQ(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+    EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+    EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+    EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+    EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+    EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+    EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+    EXPECT_EQ(c.freezeEventMax, d.freezeEventMax);
+    EXPECT_EQ(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+    EXPECT_EQ(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+    EXPECT_EQ(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+    EXPECT_EQ(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+    EXPECT_EQ(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+    EXPECT_EQ(c.judderEventMax, d.judderEventMax);
+    EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
+    EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withAlmostValid) {
+    Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
+        [](const std::string &, const std::string &flag, const std::string &) -> std::string {
+            if (flag == "render_metrics_enabled") {
+                return "fals";
+            } else if (flag == "render_metrics_are_skipped_frames_dropped") {
+                return "fals";
+            } else if (flag == "render_metrics_max_expected_content_frame_duration_us") {
+                return "100a";
+            } else if (flag == "render_metrics_frame_rate_detection_tolerance_us") {
+                return "10b0";
+            } else if (flag == "render_metrics_live_content_frame_drop_tolerance_us") {
+                return "c100";
+            } else if (flag == "render_metrics_freeze_duration_ms_histogram_buckets") {
+                return "1,5300,3b400,123";
+            } else if (flag == "render_metrics_freeze_duration_ms_histogram_to_score") {
+                return "2,5300*400,132";
+            } else if (flag == "render_metrics_freeze_distance_ms_histogram_buckets") {
+                return "3,12345678901234,5,7";
+            } else if (flag == "render_metrics_freeze_event_max") {
+                return "12345678901234";
+            } else if (flag == "render_metrics_freeze_event_details_max") {
+                return "12345.11321";
+            } else if (flag == "render_metrics_freeze_event_distance_tolerance_ms") {
+                return "*!-";
+            } else if (flag == "render_metrics_judder_error_tolerance_us") {
+                return "10.5";
+            } else if (flag == "render_metrics_judder_score_histogram_buckets") {
+                return "abc";
+            } else if (flag == "render_metrics_judder_score_histogram_to_score") {
+                return "123,";
+            } else if (flag == "render_metrics_judder_event_max") {
+                return ",1234";
+            } else if (flag == "render_metrics_judder_event_details_max") {
+                return "10*10";
+            } else if (flag == "render_metrics_judder_event_distance_tolerance_ms") {
+                return "140-a";
+            }
+            return "";
+        }
+    };
+    Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+    Configuration d; // default configuration
+    EXPECT_EQ(c.enabled, d.enabled);
+    EXPECT_EQ(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+    EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+    EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+    EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+    EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+    EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+    EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+    EXPECT_EQ(c.freezeEventMax, d.freezeEventMax);
+    EXPECT_EQ(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+    EXPECT_EQ(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+    EXPECT_EQ(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+    EXPECT_EQ(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+    EXPECT_EQ(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+    EXPECT_EQ(c.judderEventMax, d.judderEventMax);
+    EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
+    EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withValid) {
+    Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
+        [](const std::string &, const std::string &flag, const std::string &) -> std::string {
+            if (flag == "render_metrics_enabled") {
+                return "false";
+            } else if (flag == "render_metrics_are_skipped_frames_dropped") {
+                return "false";
+            } else if (flag == "render_metrics_max_expected_content_frame_duration_us") {
+                return "2000";
+            } else if (flag == "render_metrics_frame_rate_detection_tolerance_us") {
+                return "3000";
+            } else if (flag == "render_metrics_live_content_frame_drop_tolerance_us") {
+                return "4000";
+            } else if (flag == "render_metrics_freeze_duration_ms_histogram_buckets") {
+                return "100,200,300,400";
+            } else if (flag == "render_metrics_freeze_duration_ms_histogram_to_score") {
+                return "1234567890120,1234567890121,1234567890122";
+            } else if (flag == "render_metrics_freeze_distance_ms_histogram_buckets") {
+                return "500,600,700,800,900";
+            } else if (flag == "render_metrics_freeze_event_max") {
+                return "5000";
+            } else if (flag == "render_metrics_freeze_event_details_max") {
+                return "6000";
+            } else if (flag == "render_metrics_freeze_event_distance_tolerance_ms") {
+                return "7000";
+            } else if (flag == "render_metrics_judder_error_tolerance_us") {
+                return "8000";
+            } else if (flag == "render_metrics_judder_score_histogram_buckets") {
+                return "1,2,3,4,5";
+            } else if (flag == "render_metrics_judder_score_histogram_to_score") {
+                return "-1,-2,-3,-4,-5";
+            } else if (flag == "render_metrics_judder_event_max") {
+                return "9000";
+            } else if (flag == "render_metrics_judder_event_details_max") {
+                return "10000";
+            } else if (flag == "render_metrics_judder_event_distance_tolerance_ms") {
+                return "11000";
+            }
+            return "";
+        }
+    };
+
+    Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+    // The default configuration here used to verify we're not configuring the values to the
+    // default - if we are accidentally configuring to the default then we're not necessarily
+    // testing the parsing.
+    Configuration d;
+    EXPECT_EQ(c.enabled, false);
+    EXPECT_NE(c.enabled, d.enabled);
+    EXPECT_EQ(c.areSkippedFramesDropped, false);
+    EXPECT_NE(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+    EXPECT_EQ(c.maxExpectedContentFrameDurationUs, 2000);
+    EXPECT_NE(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+    EXPECT_EQ(c.frameRateDetectionToleranceUs, 3000);
+    EXPECT_NE(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+    EXPECT_EQ(c.liveContentFrameDropToleranceUs, 4000);
+    EXPECT_NE(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+    {
+        std::vector<int32_t> expected({100,200,300,400});
+        EXPECT_EQ(c.freezeDurationMsHistogramBuckets, expected);
+        EXPECT_NE(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+    }
+    {
+        std::vector<int64_t> expected({1234567890120LL,1234567890121LL,1234567890122LL});
+        EXPECT_EQ(c.freezeDurationMsHistogramToScore, expected);
+        EXPECT_NE(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+    }
+    {
+        std::vector<int32_t> expected({500,600,700,800,900});
+        EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, expected);
+        EXPECT_NE(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+    }
+    EXPECT_EQ(c.freezeEventMax, 5000);
+    EXPECT_NE(c.freezeEventMax, d.freezeEventMax);
+    EXPECT_EQ(c.freezeEventDetailsMax, 6000);
+    EXPECT_NE(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+    EXPECT_EQ(c.freezeEventDistanceToleranceMs, 7000);
+    EXPECT_NE(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+    EXPECT_EQ(c.judderErrorToleranceUs, 8000);
+    EXPECT_NE(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+    {
+        std::vector<int32_t> expected({1,2,3,4,5});
+        EXPECT_EQ(c.judderScoreHistogramBuckets, expected);
+        EXPECT_NE(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+    }
+    {
+        std::vector<int64_t> expected({-1,-2,-3,-4,-5});
+        EXPECT_EQ(c.judderScoreHistogramToScore, expected);
+        EXPECT_NE(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+    }
+    EXPECT_EQ(c.judderEventMax, 9000);
+    EXPECT_NE(c.judderEventMax, d.judderEventMax);
+    EXPECT_EQ(c.judderEventDetailsMax, 10000);
+    EXPECT_NE(c.judderEventDetailsMax, d.judderEventDetailsMax);
+    EXPECT_EQ(c.judderEventDistanceToleranceMs, 11000);
+    EXPECT_NE(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, countsReleasedFrames) {
+    Configuration c;
+    Helper h(16.66, c);
+    h.drop(10);
+    h.render({16.66, 16.66, 16.66});
+    h.skip(10); // skipped frames aren't released so they are not counted
+    h.render({16.66, 16.66, 16.66, 16.66});
+    h.drop(10);
+    EXPECT_EQ(27, h.getMetrics().frameReleasedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, countsSkippedFrames) {
+    Configuration c;
+    Helper h(16.66, c);
+    h.drop(10); // dropped frames are not counted
+    h.skip(10); // frames skipped before rendering a frame are not counted
+    h.render({16.66, 16.66, 16.66}); // rendered frames are not counted
+    h.drop(10); // dropped frames are not counted
+    h.skip(10);
+    h.render({16.66, 16.66, 16.66, 16.66}); // rendered frames are not counted
+    h.skip(10); // frames skipped at the end of playback are not counted
+    h.drop(10); // dropped frames are not counted
+    EXPECT_EQ(10, h.getMetrics().frameSkippedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenSkippedFramesAreDropped_countsDroppedFrames) {
+    Configuration c;
+    c.areSkippedFramesDropped = true;
+    Helper h(16.66, c);
+    h.skip(10); // skipped frames at the beginning of playback are not counted
+    h.drop(10);
+    h.skip(10); // skipped frames at the beginning of playback after dropped frames are not counted
+    h.render({16.66, 16.66, 16.66});  // rendered frames are not counted
+    h.drop(10);
+    h.skip(10);
+    h.render({16.66, 16.66, 16.66, 16.66}); // rendered frames are not counted
+    h.drop(10); // dropped frames at the end of playback are not counted
+    h.skip(10); // skipped frames at the end of playback are not counted
+    EXPECT_EQ(30, h.getMetrics().frameDroppedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenNotSkippedFramesAreDropped_countsDroppedFrames) {
+    Configuration c;
+    c.areSkippedFramesDropped = false;
+    Helper h(16.66, c);
+    h.skip(10); // skipped frames at the beginning of playback are not counted
+    h.drop(10);
+    h.skip(10); // skipped frames at the beginning of playback after dropped frames are not coutned
+    h.render({16.66, 16.66, 16.66}); // rendered frames are not counted
+    h.drop(10);
+    h.skip(10); // skipped frames are not counted
+    h.render({16.66, 16.66, 16.66, 16.66}); // rendered frames are not counted
+    h.drop(10); // dropped frames at the end of playback are not counted
+    h.skip(10); // skipped frames at the end of playback are not counted
+    EXPECT_EQ(20, h.getMetrics().frameDroppedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, countsRenderedFrames) {
+    Configuration c;
+    Helper h(16.66, c);
+    h.drop(10); // dropped frames are not counted
+    h.render({16.66, 16.66, 16.66});
+    h.skip(10); // skipped frames are not counted
+    h.render({16.66, 16.66, 16.66, 16.66});
+    h.drop(10); // dropped frames are not counted
+    EXPECT_EQ(7, h.getMetrics().frameRenderedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, detectsFrameRate) {
+    Configuration c;
+    c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+    Helper h(16.66, c);
+    h.render({16.6, 16.7, 16.6, 16.7});
+    EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+    EXPECT_NEAR(h.getMetrics().actualFrameRate, 60.0, 0.5);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, handlesSeeking) {
+    Configuration c;
+    c.maxExpectedContentFrameDurationUs = 30;
+    VideoRenderQualityTracker v(c);
+    v.onFrameReleased(0, 0);
+    v.onFrameRendered(0, 0);
+    v.onFrameReleased(20, 20);
+    v.onFrameRendered(20, 20);
+    v.onFrameReleased(40, 40);
+    v.onFrameRendered(40, 40);
+    v.onFrameReleased(60, 60);
+    v.onFrameRendered(60, 60);
+    v.onFrameReleased(80, 80);
+    v.onFrameRendered(80, 80);
+    v.onFrameReleased(7200000000, 100);
+    v.onFrameRendered(7200000000, 100);
+    v.onFrameReleased(7200000020, 120);
+    v.onFrameRendered(7200000020, 120);
+    v.onFrameReleased(7200000040, 140);
+    v.onFrameRendered(7200000040, 140);
+    v.onFrameReleased(7200000060, 160);
+    v.onFrameRendered(7200000060, 160);
+    v.onFrameReleased(7200000080, 180);
+    v.onFrameRendered(7200000080, 180);
+    v.onFrameReleased(0, 200);
+    v.onFrameRendered(0, 200);
+    v.onFrameReleased(20, 220);
+    v.onFrameRendered(20, 220);
+    v.onFrameReleased(40, 240);
+    v.onFrameRendered(40, 240);
+    v.onFrameReleased(60, 260);
+    v.onFrameRendered(60, 260);
+    const VideoRenderQualityMetrics &m = v.getMetrics();
+    EXPECT_EQ(m.judderRate, 0); // frame durations can get messed up during discontinuities so if
+                                // the discontinuity is not detected, judder is expected
+    EXPECT_NE(m.contentFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, withSkipping_handlesSeeking) {
+    Configuration c;
+    c.maxExpectedContentFrameDurationUs = 30;
+    VideoRenderQualityTracker v(c);
+    v.onFrameReleased(0, 0);
+    v.onFrameRendered(0, 0);
+    v.onFrameReleased(20, 20);
+    v.onFrameRendered(20, 20);
+    v.onFrameReleased(40, 40);
+    v.onFrameRendered(40, 40);
+    v.onFrameReleased(60, 60);
+    v.onFrameRendered(60, 60);
+    v.onFrameReleased(80, 80);
+    v.onFrameRendered(80, 80);
+    v.onFrameSkipped(7200000000);
+    v.onFrameSkipped(7200000020);
+    v.onFrameReleased(7200000040, 100);
+    v.onFrameRendered(7200000040, 100);
+    v.onFrameReleased(7200000060, 120);
+    v.onFrameRendered(7200000060, 120);
+    v.onFrameReleased(7200000080, 140);
+    v.onFrameSkipped(0);
+    v.onFrameRendered(7200000080, 140);
+    v.onFrameSkipped(20);
+    v.onFrameReleased(40, 160);
+    v.onFrameRendered(40, 160);
+    v.onFrameReleased(60, 180);
+    v.onFrameRendered(60, 180);
+    v.onFrameReleased(80, 200);
+    v.onFrameRendered(80, 200);
+    const VideoRenderQualityMetrics &m = v.getMetrics();
+    EXPECT_EQ(m.judderRate, 0); // frame durations can get messed up during discontinuities so if
+                                // the discontinuity is not detected, judder is expected
+    EXPECT_NE(m.contentFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenLowTolerance_doesntDetectFrameRate) {
+    Configuration c;
+    c.frameRateDetectionToleranceUs = 0;
+    Helper h(16.66, c);
+    h.render({16.6, 16.7, 16.6, 16.7});
+    EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+    EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenFrameRateDestabilizes_detectsFrameRate) {
+    Configuration c;
+    c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+    Helper h(16.66, c);
+    h.render({16.6, 16.7, 16.6, 16.7});
+    h.render({30.0, 16.6, 30.0, 16.6});
+    EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+    EXPECT_NEAR(h.getMetrics().actualFrameRate, 60.0, 0.5);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, detects32Pulldown) {
+    Configuration c;
+    c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+    Helper h(41.66, c);
+    h.render({49.9, 33.2, 50.0, 33.4, 50.1, 33.2});
+    EXPECT_NEAR(h.getMetrics().contentFrameRate, 24.0, 0.5);
+    EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_24_3_2_PULLDOWN);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenBad32Pulldown_doesntDetect32Pulldown) {
+    Configuration c;
+    c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+    Helper h(41.66, c);
+    h.render({50.0, 33.33, 33.33, 50.00, 33.33, 50.00});
+    EXPECT_NEAR(h.getMetrics().contentFrameRate, 24.0, 0.5);
+    EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenFrameRateChanges_detectsMostRecentFrameRate) {
+    Configuration c;
+    c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+    Helper h(16.66, c);
+    h.render({16.6, 16.7, 16.6, 16.7});
+    EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+    EXPECT_NEAR(h.getMetrics().actualFrameRate, 60.0, 0.5);
+    h.changeContentFrameDuration(41.66);
+    h.render({50.0, 33.33, 50.0, 33.33, 50.0, 33.33});
+    EXPECT_NEAR(h.getMetrics().contentFrameRate, 24.0, 0.5);
+    EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_24_3_2_PULLDOWN);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenFrameRateIsUnstable_doesntDetectFrameRate) {
+    Configuration c;
+    c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+    Helper h(16.66, c);
+    h.render({16.66, 30.0, 16.66, 30.0, 16.66});
+    EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+    EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesFreezeRate) {
+    Configuration c;
+    Helper h(20, c);
+    h.render(3);
+    EXPECT_EQ(h.getMetrics().freezeRate, 0);
+    h.drop(3);
+    h.render(3);
+    // +1 because the first frame before drops is considered frozen
+    // and then -1 because the last frame has an unknown render duration
+    EXPECT_EQ(h.getMetrics().freezeRate, 4.0 / 8.0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesFreezeDurationHistogram) {
+    Configuration c;
+    // +17 because freeze durations include the render time of the previous frame
+    c.freezeDurationMsHistogramBuckets = {2 * 17 + 17, 3 * 17 + 17, 6 * 17 + 17};
+    Helper h(17, c);
+    h.render(1);
+    h.drop(1); // below
+    h.render(1);
+    h.drop(3); // bucket 1
+    h.render(1);
+    h.drop(2); // bucket 0
+    h.render(1);
+    h.drop(4); // bucket 1
+    h.render(1);
+    h.drop(2); // bucket 0
+    h.render(1);
+    h.drop(5); // bucket 1
+    h.render(1);
+    h.drop(10); // above
+    h.render(1);
+    h.drop(15); // above
+    h.render(1);
+    EXPECT_EQ(h.getMetrics().freezeDurationMsHistogram.emit(), "1{2,3}2");
+    EXPECT_EQ(h.getMetrics().freezeDurationMsHistogram.getCount(), 8);
+    // the smallest frame drop was 1, +17 because it includes the previous frame render time
+    EXPECT_EQ(h.getMetrics().freezeDurationMsHistogram.getMin(), 1 * 17 + 17);
+    // the largest frame drop was 10, +17 because it includes the previous frame render time
+    EXPECT_EQ(h.getMetrics().freezeDurationMsHistogram.getMax(), 15 * 17 + 17);
+    // total frame drop count, multiplied by 17, plus 17 for each occurrence, divided by occurrences
+    EXPECT_EQ(h.getMetrics().freezeDurationMsHistogram.getAvg(), ((1 + 3 + 2 + 4 + 2 + 5 + 10 + 15)
+                                                                   * 17 + 8 * 17) / 8);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesFreezeDistanceHistogram) {
+    Configuration c;
+    c.freezeDistanceMsHistogramBuckets = {1 * 17, 5 * 17, 6 * 17};
+    Helper h(17, c);
+    h.render(1);
+    h.drop(1);
+    h.render(5); // bucket 0
+    h.drop(3);
+    h.render(3); // bucket 0
+    h.drop(2);
+    h.render(9); // above
+    h.drop(5);
+    h.render(1); // below
+    h.drop(2);
+    h.render(6); // bucket 1
+    h.drop(4);
+    h.render(12); // above
+    h.drop(2);
+    h.render(1);
+    EXPECT_EQ(h.getMetrics().freezeDistanceMsHistogram.emit(), "1{2,1}2");
+    EXPECT_EQ(h.getMetrics().freezeDistanceMsHistogram.getCount(), 6);
+    // the smallest render between drops was 1, -17 because the last frame rendered also froze
+    EXPECT_EQ(h.getMetrics().freezeDistanceMsHistogram.getMin(), 1 * 17 - 17);
+    // the largest render between drops was 12, -17 because the last frame rendered also froze
+    EXPECT_EQ(h.getMetrics().freezeDistanceMsHistogram.getMax(), 12 * 17 - 17);
+    // total render count between, multiplied by 17, minus 17 for each occurrence, divided by
+    // occurrences
+    EXPECT_EQ(h.getMetrics().freezeDistanceMsHistogram.getAvg(), ((5 + 3 + 9 + 1 + 6 + 12) * 17 -
+                                                                  6 * 17) / 6);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when60hz_hasNoJudder) {
+    Configuration c;
+    Helper h(16.66, c); // ~24Hz
+    h.render({16.66, 16.66, 16.66, 16.66, 16.66, 16.66, 16.66});
+    EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenSmallVariance60hz_hasNoJudder) {
+    Configuration c;
+    Helper h(16.66, c); // ~24Hz
+    h.render({14, 18, 14, 18, 14, 18, 14, 18});
+    EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenBadSmallVariance60Hz_hasJudder) {
+    Configuration c;
+    Helper h(16.66, c); // ~24Hz
+    h.render({14, 18, 14, /* no 18 between 14s */ 14, 18, 14, 18});
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 1);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when30Hz_hasNoJudder) {
+    Configuration c;
+    Helper h(33.33, c);
+    h.render({33.33, 33.33, 33.33, 33.33, 33.33, 33.33});
+    EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenSmallVariance30Hz_hasNoJudder) {
+    Configuration c;
+    Helper h(33.33, c);
+    h.render({29.0, 35.0, 29.0, 35.0, 29.0, 35.0});
+    EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenBadSmallVariance30Hz_hasJudder) {
+    Configuration c;
+    Helper h(33.33, c);
+    h.render({29.0, 35.0, 29.0, /* no 35 between 29s */ 29.0, 35.0, 29.0, 35.0});
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 1);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenBad30HzTo60Hz_hasJudder) {
+    Configuration c;
+    Helper h(33.33, c);
+    h.render({33.33, 33.33, 50.0, /* frame stayed 1 vsync too long */ 16.66, 33.33, 33.33});
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 2); // note: 2 counts of judder
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when24HzTo60Hz_hasNoJudder) {
+    Configuration c;
+    Helper h(41.66, c);
+    h.render({50.0, 33.33, 50.0, 33.33, 50.0, 33.33});
+    EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when25HzTo60Hz_hasJudder) {
+    Configuration c;
+    Helper h(40, c);
+    h.render({33.33, 33.33, 50.0});
+    h.render({33.33, 33.33, 50.0});
+    h.render({33.33, 33.33, 50.0});
+    h.render({33.33, 33.33, 50.0});
+    h.render({33.33, 33.33, 50.0});
+    h.render({33.33, 33.33, 50.0});
+    EXPECT_GT(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when50HzTo60Hz_hasJudder) {
+    Configuration c;
+    Helper h(20, c);
+    h.render({16.66, 16.66, 16.66, 33.33});
+    h.render({16.66, 16.66, 16.66, 33.33});
+    h.render({16.66, 16.66, 16.66, 33.33});
+    h.render({16.66, 16.66, 16.66, 33.33});
+    h.render({16.66, 16.66, 16.66, 33.33});
+    h.render({16.66, 16.66, 16.66, 33.33});
+    EXPECT_GT(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when30HzTo50Hz_hasJudder) {
+    Configuration c;
+    Helper h(33.33, c);
+    h.render({40.0, 40.0, 40.0, 60.0});
+    h.render({40.0, 40.0, 40.0, 60.0});
+    h.render({40.0, 40.0, 40.0, 60.0});
+    h.render({40.0, 40.0, 40.0, 60.0});
+    h.render({40.0, 40.0, 40.0, 60.0});
+    EXPECT_GT(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenSmallVariancePulldown24HzTo60Hz_hasNoJudder) {
+    Configuration c;
+    Helper h(41.66, c);
+    h.render({52.0, 31.33, 52.0, 31.33, 52.0, 31.33});
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenBad24HzTo60Hz_hasJudder) {
+    Configuration c;
+    Helper h(41.66, c);
+    h.render({50.0, 33.33, 50.0, 33.33, /* no 50 between 33s */ 33.33, 50.0, 33.33});
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 1);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesJudderScoreHistogram) {
+    Configuration c;
+    c.judderErrorToleranceUs = 2000;
+    c.judderScoreHistogramBuckets = {1, 5, 8};
+    Helper h(16, c);
+    h.render({16, 16, 23, 16, 16, 10, 16, 4, 16, 20, 16, 16});
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.emit(), "0{1,2}1");
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 4);
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getMin(), 4);
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getMax(), 12);
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getAvg(), (7 + 6 + 12 + 4) / 4);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, ranksJudderScoresInOrder) {
+    // Each rendering is ranked from best to worst from a user experience
+    Configuration c;
+    c.judderErrorToleranceUs = 2000;
+    c.judderScoreHistogramBuckets = {0, 1000};
+    int64_t previousScore = 0;
+
+    // 30fps poorly displayed at 60Hz
+    {
+        Helper h(33.33, c);
+        h.render({33.33, 33.33, 16.66, 50.0, 33.33, 33.33});
+        int64_t scoreBad30fpsTo60Hz = h.getMetrics().judderScoreHistogram.getMax();
+        EXPECT_GT(scoreBad30fpsTo60Hz, previousScore);
+        previousScore = scoreBad30fpsTo60Hz;
+    }
+
+    // 25fps displayed at 60hz
+    {
+        Helper h(40, c);
+        h.render({33.33, 33.33, 50.0});
+        h.render({33.33, 33.33, 50.0});
+        h.render({33.33, 33.33, 50.0});
+        h.render({33.33, 33.33, 50.0});
+        h.render({33.33, 33.33, 50.0});
+        h.render({33.33, 33.33, 50.0});
+        int64_t score25fpsTo60hz = h.getMetrics().judderScoreHistogram.getMax();
+        EXPECT_GT(score25fpsTo60hz, previousScore);
+        previousScore = score25fpsTo60hz;
+    }
+
+    // 50fps displayed at 60hz
+    {
+        Helper h(20, c);
+        h.render({16.66, 16.66, 16.66, 33.33});
+        h.render({16.66, 16.66, 16.66, 33.33});
+        h.render({16.66, 16.66, 16.66, 33.33});
+        h.render({16.66, 16.66, 16.66, 33.33});
+        h.render({16.66, 16.66, 16.66, 33.33});
+        h.render({16.66, 16.66, 16.66, 33.33});
+        int64_t score50fpsTo60hz = h.getMetrics().judderScoreHistogram.getMax();
+        EXPECT_GT(score50fpsTo60hz, previousScore);
+        previousScore = score50fpsTo60hz;
+    }
+
+    // 24fps poorly displayed at 60Hz
+    {
+        Helper h(41.66, c);
+        h.render({50.0, 33.33, 50.0, 33.33, 33.33, 50.0, 33.33});
+        int64_t scoreBad24HzTo60Hz = h.getMetrics().judderScoreHistogram.getMax();
+        EXPECT_GT(scoreBad24HzTo60Hz, previousScore);
+        previousScore = scoreBad24HzTo60Hz;
+    }
+
+    // 30fps displayed at 50hz
+    {
+        Helper h(33.33, c);
+        h.render({40.0, 40.0, 40.0, 60.0});
+        h.render({40.0, 40.0, 40.0, 60.0});
+        h.render({40.0, 40.0, 40.0, 60.0});
+        h.render({40.0, 40.0, 40.0, 60.0});
+        h.render({40.0, 40.0, 40.0, 60.0});
+        int64_t score30fpsTo50hz = h.getMetrics().judderScoreHistogram.getMax();
+        EXPECT_GT(score30fpsTo50hz, previousScore);
+        previousScore = score30fpsTo50hz;
+    }
+
+    // 24fps displayed at 50Hz
+    {
+        Helper h(41.66, c);
+        h.render(40.0, 11);
+        h.render(60.0, 1);
+        h.render(40.0, 11);
+        h.render(60.0, 1);
+        h.render(40.0, 11);
+        int64_t score24HzTo50Hz = h.getMetrics().judderScoreHistogram.getMax();
+        EXPECT_GT(score24HzTo50Hz, previousScore);
+        previousScore = score24HzTo50Hz;
+    }
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesFreezeEvents) {
+    Configuration c;
+    c.freezeEventMax = 5;
+    c.freezeEventDetailsMax = 4;
+    c.freezeEventDistanceToleranceMs = 1000;
+    Helper h(20, c);
+    h.render(10);
+    EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+    h.drop(3);
+    h.render(1000 / 20); // +1 because it's unclear if the current frame is frozen
+    EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+    h.drop(1);
+    h.render(10);
+    EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+    h.drop(6);
+    h.render(12);
+    EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+    h.drop(10);
+    h.render(1000 / 20 + 1); // +1 because it's unclear if the current frame is frozen
+    EXPECT_EQ(h.getMetrics().freezeEventCount, 1);
+    FreezeEvent e = h.getAndClearFreezeEvent();
+    EXPECT_EQ(e.valid, true); // freeze event
+    // -1 because the last rendered frame is considered frozen
+    EXPECT_EQ(e.initialTimeUs, 9 * 20 * 1000);
+    // only count the last frame of the first group of rendered frames
+    EXPECT_EQ(e.durationMs, (1 + 3 + 1000 / 20 + 1 + 10 + 6 + 12 + 10) * 20);
+    EXPECT_EQ(e.count, 4);
+    // number of dropped frames
+    // +1 because the last rendered frame is considered frozen
+    EXPECT_EQ(e.sumDurationMs, (4 + 2 + 7 + 11) * 20);
+    // number of rendered frames between dropped frames
+    // -1 because the last rendered frame is considered frozen
+    EXPECT_EQ(e.sumDistanceMs, ((1000 / 20) - 1 + 9 + 11) * 20);
+    // +1 for each since the last rendered frame is considered frozen
+    ASSERT_EQ(e.details.durationMs.size(), 4);
+    EXPECT_EQ(e.details.durationMs[0], 4 * 20);
+    EXPECT_EQ(e.details.durationMs[1], 2 * 20);
+    EXPECT_EQ(e.details.durationMs[2], 7 * 20);
+    EXPECT_EQ(e.details.durationMs[3], 11 * 20);
+    // -1 for each since the last rendered frame is considered frozen
+    ASSERT_EQ(e.details.distanceMs.size(), 4);
+    EXPECT_EQ(e.details.distanceMs[0], -1);
+    EXPECT_EQ(e.details.distanceMs[1], 1000 - 20);
+    EXPECT_EQ(e.details.distanceMs[2], 9 * 20);
+    EXPECT_EQ(e.details.distanceMs[3], 11 * 20);
+    int64_t previousEventEndTimeUs = e.initialTimeUs + e.durationMs * 1000;
+    h.drop(1);
+    h.render(4);
+    h.drop(1);
+    h.render(4);
+    h.drop(1);
+    h.render(4);
+    h.drop(1);
+    h.render(4);
+    h.drop(1);
+    h.render(1000 / 20 + 1);
+    EXPECT_EQ(h.getMetrics().freezeEventCount, 2);
+    e = h.getAndClearFreezeEvent();
+    EXPECT_EQ(e.valid, true);
+    // 1000ms tolerance means 1000ms from the end of the last event to the beginning of this event
+    EXPECT_EQ(e.initialTimeUs, previousEventEndTimeUs + 1000 * 1000);
+    EXPECT_EQ(e.count, 5);
+    // 5 freezes captured in the freeze event, but only 4 details are recorded
+    EXPECT_EQ(e.details.durationMs.size(), 4);
+    EXPECT_EQ(e.details.distanceMs.size(), 4);
+    EXPECT_EQ(e.details.distanceMs[0], 1000); // same as the tolerance
+    // The duration across the entire series f freezes is captured, with only 4 details captured
+    // +1 because the first rendered frame is considered frozen (not the 1st dropped frame)
+    EXPECT_EQ(e.durationMs, (1 + 1 + 4 + 1 + 4 + 1 + 4 + 1 + 4 + 1) * 20);
+    // The duration of all 5 freeze events are captured, with only 4 details captured
+    EXPECT_EQ(e.sumDurationMs, (2 + 2 + 2 + 2 + 2) * 20);
+    // The distance of all 5 freeze events are captured, with only 4 details captured
+    EXPECT_EQ(e.sumDistanceMs, (3 + 3 + 3 + 3) * 20);
+    h.drop(1);
+    h.render(1000 / 20 + 1);
+    EXPECT_EQ(h.getMetrics().freezeEventCount, 3);
+    EXPECT_EQ(h.getAndClearFreezeEvent().valid, true);
+    h.drop(1);
+    h.render(1000 / 20 + 1);
+    EXPECT_EQ(h.getMetrics().freezeEventCount, 4);
+    EXPECT_EQ(h.getAndClearFreezeEvent().valid, true);
+    h.drop(1);
+    h.render(1000 / 20 + 1);
+    EXPECT_EQ(h.getMetrics().freezeEventCount, 5);
+    EXPECT_EQ(h.getAndClearFreezeEvent().valid, true);
+    h.drop(1);
+    h.render(1000 / 20 + 1);
+    // The 6th event isn't captured because it exceeds the configured limit
+    EXPECT_EQ(h.getMetrics().freezeEventCount, 6);
+    EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesJudderEvents) {
+    Configuration c;
+    c.judderEventMax = 4;
+    c.judderEventDetailsMax = 3;
+    c.judderEventDistanceToleranceMs = 100;
+    Helper h(20, c);
+    h.render({19, 20, 19});
+    EXPECT_EQ(h.getAndClearJudderEvent().valid, false);
+    h.render({15, 19, 20, 19});
+    EXPECT_EQ(h.getAndClearJudderEvent().valid, false);
+    h.render({28, 20, 19});
+    EXPECT_EQ(h.getAndClearJudderEvent().valid, false);
+    h.render({13, 20, 20, 20, 20});
+    EXPECT_EQ(h.getAndClearJudderEvent().valid, false);
+    // Start with judder for the next event at the end of the sequence, because judder is scored
+    // one frame behind, and for combining judder occurrences into events, it's not clear yet if
+    // the current frame has judder or not.
+    h.render({15, 20, 20, 20, 20, 20, 15});
+    JudderEvent e = h.getAndClearJudderEvent();
+    EXPECT_EQ(e.valid, true);
+    EXPECT_EQ(e.initialTimeUs, (19 + 20 + 19) * 1000);
+    EXPECT_EQ(e.durationMs, 15 + 19 + 20 + 19 /**/ + 28 + 20 + 19 /**/ + 13 + 20 * 4 /**/ + 15);
+    EXPECT_EQ(e.count, 4);
+    EXPECT_EQ(e.sumScore, (20 - 15) + (28 - 20) + (20 - 13) + (20 - 15));
+    EXPECT_EQ(e.sumDistanceMs, 19 + 20 + 19 /**/ + 20 + 19 /**/ + 20 * 4);
+    ASSERT_EQ(e.details.actualRenderDurationUs.size(), 3); // 3 details per configured maximum
+    EXPECT_EQ(e.details.actualRenderDurationUs[0], 15 * 1000);
+    EXPECT_EQ(e.details.actualRenderDurationUs[1], 28 * 1000);
+    EXPECT_EQ(e.details.actualRenderDurationUs[2], 13 * 1000);
+    ASSERT_EQ(e.details.contentRenderDurationUs.size(), 3);
+    EXPECT_EQ(e.details.contentRenderDurationUs[0], 20 * 1000);
+    EXPECT_EQ(e.details.contentRenderDurationUs[1], 20 * 1000);
+    EXPECT_EQ(e.details.contentRenderDurationUs[2], 20 * 1000);
+    ASSERT_EQ(e.details.distanceMs.size(), 3);
+    EXPECT_EQ(e.details.distanceMs[0], -1);
+    EXPECT_EQ(e.details.distanceMs[1], 19 + 20 + 19);
+    EXPECT_EQ(e.details.distanceMs[2], 20 + 19);
+    h.render({20, 20, 20, 20, 20, 15});
+    e = h.getAndClearJudderEvent();
+    EXPECT_EQ(e.valid, true);
+    ASSERT_EQ(e.details.distanceMs.size(), 1);
+    EXPECT_EQ(e.details.distanceMs[0], 100); // same as the tolerance
+    h.render({20, 20, 20, 20, 20, 15});
+    EXPECT_EQ(h.getAndClearJudderEvent().valid, true);
+    h.render({20, 20, 20, 20, 20, 15});
+    EXPECT_EQ(h.getAndClearJudderEvent().valid, true);
+    h.render({20, 20, 20, 20, 20, 20});
+    EXPECT_EQ(h.getAndClearJudderEvent().valid, false); // max number of judder events exceeded
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesOverallFreezeScore) {
+    Configuration c;
+    // # drops * 20ms + 20ms because current frame is frozen + 1 for bucket threshold
+    c.freezeDurationMsHistogramBuckets = {1 * 20 + 21, 5 * 20 + 21, 10 * 20 + 21};
+    c.freezeDurationMsHistogramToScore = {10, 100, 1000};
+    Helper h(20, c);
+    h.render(5);
+    h.drop(2); // bucket = 0, bucket count = 1, bucket score = 10
+    h.render(5);
+    h.drop(11); // bucket = 2, bucket count = 1, bucket score = 1000
+    h.render(5);
+    h.drop(6); // bucket = 1, bucket count = 1, bucket score = 100
+    h.render(5);
+    h.drop(1); // bucket = null
+    h.render(5);
+    h.drop(3); // bucket = 0, bucket count = 2, bucket score = 20
+    h.render(5);
+    h.drop(10); // bucket = 1, bucket count = 2, bucket score = 200
+    h.render(5);
+    h.drop(7); // bucket = 1, bucket count = 3, bucket score = 300
+    h.render(5);
+    EXPECT_EQ(h.getMetrics().freezeScore, 20 + 300 + 1000);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesOverallJudderScore) {
+    Configuration c;
+    c.judderScoreHistogramBuckets = {0, 6, 10};
+    c.judderScoreHistogramToScore = {10, 100, 1000};
+    Helper h(20, c);
+    h.render({20, 20, 15, 20, 20}); // bucket = 0, bucket count = 1, bucket score = 10
+    h.render({20, 20, 11, 20, 20}); // bucket = 1, bucket count = 1, bucket score = 100
+    h.render({20, 20, 13, 20, 20}); // bucket = 1, bucket count = 2, bucket score = 200
+    h.render({20, 20,  5, 20, 20}); // bucket = 2, bucket count = 1, bucket score = 1000
+    h.render({20, 20, 14, 20, 20}); // bucket = 1, bucket count = 3, bucket score = 300
+    h.render({20, 20, 10, 20, 20}); // bucket = 2, bucket count = 2, bucket score = 2000
+    EXPECT_EQ(h.getMetrics().judderScore, 10 + 300 + 2000);
+}
+
+} // android
diff --git a/media/libstagefright/tests/extractorFactory/Android.bp b/media/libstagefright/tests/extractorFactory/Android.bp
index 20ebe44..a067284 100644
--- a/media/libstagefright/tests/extractorFactory/Android.bp
+++ b/media/libstagefright/tests/extractorFactory/Android.bp
@@ -66,7 +66,4 @@
             "signed-integer-overflow",
         ],
     },
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor.zip?unzip=true",
-    ],
 }
diff --git a/media/libstagefright/tests/extractorFactory/AndroidTest.xml b/media/libstagefright/tests/extractorFactory/AndroidTest.xml
index 3aa6392..f1d4201 100644
--- a/media/libstagefright/tests/extractorFactory/AndroidTest.xml
+++ b/media/libstagefright/tests/extractorFactory/AndroidTest.xml
@@ -18,14 +18,21 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
         <option name="push" value="ExtractorFactoryTest->/data/local/tmp/ExtractorFactoryTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor.zip?unzip=true"
-            value="/data/local/tmp/ExtractorFactoryTestRes/" />
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="ExtractorFactoryTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="extractor-1.5" />
+        <option name="dynamic-config-module" value="ExtractorFactoryTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="ExtractorFactoryTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/ExtractorFactoryTestRes/" />
+        <option name="native-test-flag" value="-P /sdcard/test/extractor-1.5/" />
     </test>
 </configuration>
diff --git a/media/libstagefright/tests/extractorFactory/DynamicConfig.xml b/media/libstagefright/tests/extractorFactory/DynamicConfig.xml
new file mode 100644
index 0000000..0258808
--- /dev/null
+++ b/media/libstagefright/tests/extractorFactory/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor-1.5.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/libstagefright/tests/mediacodec/Android.bp b/media/libstagefright/tests/mediacodec/Android.bp
index 9cdc6d4..23882ea 100644
--- a/media/libstagefright/tests/mediacodec/Android.bp
+++ b/media/libstagefright/tests/mediacodec/Android.bp
@@ -70,4 +70,4 @@
     test_suites: [
         "general-tests",
     ],
-}
+}
\ No newline at end of file
diff --git a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
index ecdaac5..71ddbe5 100644
--- a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
+++ b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
@@ -62,7 +62,8 @@
              size_t offset,
              const CryptoPlugin::SubSample *subSamples,
              size_t numSubSamples,
-             const sp<MediaCodecBuffer> &buffer),
+             const sp<MediaCodecBuffer> &buffer,
+             AString* errorDetailMsg),
             (override));
     MOCK_METHOD(status_t, renderOutputBuffer,
             (const sp<MediaCodecBuffer> &buffer, int64_t timestampNs),
diff --git a/media/libstagefright/tests/writer/AndroidTest.xml b/media/libstagefright/tests/writer/AndroidTest.xml
index cc890fe..0b0eb01 100644
--- a/media/libstagefright/tests/writer/AndroidTest.xml
+++ b/media/libstagefright/tests/writer/AndroidTest.xml
@@ -18,14 +18,22 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
         <option name="push" value="writerTest->/data/local/tmp/writerTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/WriterTestRes-1.1.zip?unzip=true"
-            value="/data/local/tmp/WriterTestRes/" />
     </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="writerTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="WriterTestRes-1.2" />
+        <option name="dynamic-config-module" value="writerTest" />
+    </target_preparer>
+
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="writerTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/WriterTestRes/" />
+        <option name="native-test-flag" value="-P /sdcard/test/WriterTestRes-1.2/" />
         <option name="native-test-flag" value="-C true" />
     </test>
 </configuration>
diff --git a/media/libstagefright/tests/writer/DynamicConfig.xml b/media/libstagefright/tests/writer/DynamicConfig.xml
new file mode 100644
index 0000000..e6dc502
--- /dev/null
+++ b/media/libstagefright/tests/writer/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/WriterTestRes-1.2.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/libstagefright/timedtext/test/Android.bp b/media/libstagefright/timedtext/test/Android.bp
index 953da79..ae97c50 100644
--- a/media/libstagefright/timedtext/test/Android.bp
+++ b/media/libstagefright/timedtext/test/Android.bp
@@ -62,7 +62,4 @@
             "signed-integer-overflow",
         ],
     },
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/timedtext/test/TimedTextUnitTest.zip?unzip=true",
-    ],
 }
diff --git a/media/libstagefright/timedtext/test/AndroidTest.xml b/media/libstagefright/timedtext/test/AndroidTest.xml
index 3654e23..0d5d79f 100644
--- a/media/libstagefright/timedtext/test/AndroidTest.xml
+++ b/media/libstagefright/timedtext/test/AndroidTest.xml
@@ -18,14 +18,22 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
         <option name="push" value="TimedTextUnitTest->/data/local/tmp/TimedTextUnitTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/timedtext/test/TimedTextUnitTest.zip?unzip=true"
-            value="/data/local/tmp/TimedTextUnitTestRes/" />
+    </target_preparer>
+
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="TimedTextUnitTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="TimedTextUnitTest-1.0" />
+        <option name="dynamic-config-module" value="TimedTextUnitTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="TimedTextUnitTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/TimedTextUnitTestRes/" />
+        <option name="native-test-flag" value="-P /data/local/tmp/TimedTextUnitTest-1.0/" />
     </test>
 </configuration>
diff --git a/media/libstagefright/timedtext/test/DynamicConfig.xml b/media/libstagefright/timedtext/test/DynamicConfig.xml
new file mode 100644
index 0000000..e36277e
--- /dev/null
+++ b/media/libstagefright/timedtext/test/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/timedtext/test/TimedTextUnitTest-1.0.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/libstagefright/timedtext/test/fuzzer/Android.bp b/media/libstagefright/timedtext/test/fuzzer/Android.bp
new file mode 100644
index 0000000..6590ebb
--- /dev/null
+++ b/media/libstagefright/timedtext/test/fuzzer/Android.bp
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_library_static {
+    name: "timedtext_fuzz-protos",
+
+    srcs: ["timedtext_fuzz.proto"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    shared_libs: ["libprotobuf-cpp-full"],
+    proto: {
+        type: "full",
+        canonical_path_from_root: false,
+        local_include_dirs: ["."],
+        export_proto_headers: true,
+    },
+}
+
+cc_fuzz {
+    name: "timedtext_fuzzer",
+    srcs: [
+        "timedtext_fuzzer.cpp",
+    ],
+    static_libs: [
+        "libstagefright_timedtext",
+        "timedtext_fuzz-protos",
+    ],
+    shared_libs: [
+        "libstagefright_foundation",
+        "libprotobuf-cpp-full",
+        "libbinder",
+        "libprotobuf-mutator",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/libstagefright/timedtext/test/fuzzer/README.md b/media/libstagefright/timedtext/test/fuzzer/README.md
new file mode 100644
index 0000000..f391ea7
--- /dev/null
+++ b/media/libstagefright/timedtext/test/fuzzer/README.md
@@ -0,0 +1,23 @@
+# Fuzzer for libstagefright_timedtext
+
+libstagefright_timedtext supports the following parameters:
+1. Flags (parameter name: `flags`)
+2. TimeMs (parameter name: `timeMs`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `flags`   | 1. `TextDescriptions::OUT_OF_BAND_TEXT_SRT` 2.  `TextDescriptions::GLOBAL_DESCRIPTIONS` 3. `TextDescriptions::IN_BAND_TEXT_3GPP` 4. `TextDescriptions::LOCAL_DESCRIPTIONS` | Value chosen from valid values by obtaining index from FuzzedDataProvider|
+| `timeMs`   | `INT_MIN` to `INT_MAX` | Value obtained from FuzzedDataProvider|
+
+
+#### Steps to run
+
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) timedtext_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/timedtext_fuzzer/timedtext_fuzzer
+```
diff --git a/media/libstagefright/timedtext/test/fuzzer/timedtext_fuzz.proto b/media/libstagefright/timedtext/test/fuzzer/timedtext_fuzz.proto
new file mode 100644
index 0000000..4c90278
--- /dev/null
+++ b/media/libstagefright/timedtext/test/fuzzer/timedtext_fuzz.proto
@@ -0,0 +1,142 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * proto files are used for Structure Aware fuzzing so that fuzzing can be
+ * made more effective.
+ * timedtext_fuzz.proto is used to declare structures, which are used
+ * purely inside timedtext_fuzzer.
+ */
+
+syntax = "proto3";
+
+enum Flag {
+    flag3gppglobal = 0;
+    flag3gpplocal = 1;
+    flagsrtlocal = 2;
+}
+
+enum ChunkType {
+    default = 0;
+    tx3g = 1954034535;
+    styl = 1937013100;
+    krok = 1802661739;
+    hlit = 1751935348;
+    hclr = 1751346290;
+    dlay = 1684824441;
+    href = 1752327526;
+    tbox = 1952608120;
+    blnk = 1651273323;
+    txrp = 1953985136;
+}
+
+message FontRecord {
+    uint32 fontId = 1;
+    repeated uint32 font = 2;
+}
+
+message SRTLocal {
+    repeated uint32 data = 1;
+}
+
+message GPPGlobal {
+    uint64 reservedBytes = 1;
+    uint32 displayFlags = 2;
+    int32 horizontal_vertical_justification = 3;
+    uint32 rgba = 4;
+    int32 textBox = 5;
+    uint32 styleRecordStart = 6;
+    uint32 fontId = 7;
+    uint32 fontStyle = 8;
+    uint32 entryCount = 9;
+    repeated FontRecord fontEntry = 10;
+    uint32 defaultDisparity = 11;
+}
+
+message StyleRecord {
+    uint32 startchar = 1;
+    uint32 font = 2;
+    uint32 rgba = 3;
+}
+
+message TextStyleBox {
+    uint32 count = 1;
+    repeated StyleRecord record = 2;
+}
+
+message HighlightBox {
+    uint32 start = 1;
+    uint32 end = 2;
+}
+
+message HighlightColor {
+    uint32 rgba = 1;
+}
+
+message TextKaraokeBox {
+    uint32 highlightStartTime = 1;
+    uint32 entryCount = 2;
+    repeated uint64 highlightData = 3;
+}
+
+message BoxRecord {
+    uint32 topleft = 1;
+    uint32 bottomright = 2;
+}
+
+message BlinkBox {
+    uint32 charoffset = 1;
+}
+
+message HyperTextBox {
+    uint32 charoffset = 1;
+    uint32 urlLength = 2;
+    repeated uint32 url = 3;
+    uint32 altLength = 4;
+    repeated uint32 altString = 5;
+}
+
+message GPPLocalText {
+    string text = 1;
+}
+
+message GPPLocalFormat {
+    uint64 reservedBytes = 1;
+    oneof formatStyle {
+        TextStyleBox textbox = 2;
+        HighlightBox hltbox = 3;
+        HighlightColor hltcolor = 4;
+        TextKaraokeBox krokbox = 5;
+        uint32 scrollDelay = 6;
+        HyperTextBox hrefBox = 7;
+        BoxRecord boxrecord = 8;
+        BlinkBox blinkBox = 9;
+        uint32 wrapFlag = 10;
+    }
+}
+
+message GPPLocal {
+    GPPLocalText localtext = 1;
+    GPPLocalFormat format = 2;
+}
+
+message TimedText {
+    Flag handle = 1;
+    int32 timeMs = 2;
+    SRTLocal srt = 3;
+    GPPGlobal global = 4;
+    GPPLocal local = 5;
+}
diff --git a/media/libstagefright/timedtext/test/fuzzer/timedtext_fuzzer.cpp b/media/libstagefright/timedtext/test/fuzzer/timedtext_fuzzer.cpp
new file mode 100644
index 0000000..da1bdf8
--- /dev/null
+++ b/media/libstagefright/timedtext/test/fuzzer/timedtext_fuzzer.cpp
@@ -0,0 +1,181 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <binder/Parcel.h>
+#include <timedtext/TextDescriptions.h>
+#include <timedtext_fuzz.pb.h>
+#include "fuzzer/FuzzedDataProvider.h"
+#include "src/libfuzzer/libfuzzer_macro.h"
+
+using namespace android;
+constexpr int32_t kTextBytes = 2;
+constexpr int32_t kChunkBytes = 8;
+constexpr int32_t kChunkTypeBytes = 4;
+constexpr int32_t kGlobalTextOffset = 0;
+constexpr size_t kByte3Mask = 0xff000000UL;
+constexpr size_t kByte2Mask = 0x00ff0000UL;
+constexpr size_t kByte1Mask = 0x0000ff00UL;
+constexpr size_t kByte0Mask = 0x000000ffUL;
+
+/**
+ * Sets ChunkSize/ChunkType (uint32_t) in timedtext-description vector<uint8_t>
+ * by extracting each byte from ChunkSize and populating the vector.
+ */
+void setChunkParameter(std::vector<uint8_t>& timedtext, size_t param, size_t paramOffset) {
+    timedtext[paramOffset + 0] = (param & kByte3Mask) >> 24;
+    timedtext[paramOffset + 1] = (param & kByte2Mask) >> 16;
+    timedtext[paramOffset + 2] = (param & kByte1Mask) >> 8;
+    timedtext[paramOffset + 3] = (param & kByte0Mask);
+}
+
+/**
+ * Sets TextLength(uint16_t) in 3GPPLocal-description vector<uint8_t>
+ * by extracting each byte from TextLength and populating the vector.
+ */
+void setTextSize(std::vector<uint8_t>& local3GPPDescription, int32_t textLength) {
+    local3GPPDescription[0] = (textLength & kByte1Mask) >> 8;
+    local3GPPDescription[1] = (textLength & kByte0Mask);
+}
+
+DEFINE_PROTO_FUZZER(const TimedText& input) {
+    switch (input.handle()) {
+        case flag3gppglobal: {
+            size_t gppGlobalByteSize = input.global().ByteSizeLong();
+            if (gppGlobalByteSize) {
+                std::vector<uint8_t> global3GPPDescription(gppGlobalByteSize + kChunkBytes);
+                setChunkParameter(global3GPPDescription, gppGlobalByteSize, kGlobalTextOffset);
+                setChunkParameter(global3GPPDescription, tx3g, kGlobalTextOffset + kChunkTypeBytes);
+                input.global().SerializeToArray(global3GPPDescription.data() + kChunkBytes,
+                                                global3GPPDescription.size());
+                Parcel* parcel = new Parcel();
+                TextDescriptions::getParcelOfDescriptions(
+                        global3GPPDescription.data(), global3GPPDescription.size(),
+                        TextDescriptions::IN_BAND_TEXT_3GPP | TextDescriptions::GLOBAL_DESCRIPTIONS,
+                        input.timems(), parcel);
+                delete parcel;
+            }
+            break;
+        }
+        case flag3gpplocal: {
+            size_t gppLocalByteSize = input.local().ByteSizeLong();
+            if (gppLocalByteSize) {
+                std::vector<uint8_t> local3GPPDescription(gppLocalByteSize + kChunkBytes +
+                                                          kTextBytes);
+                std::string text = input.local().localtext().text();
+                int32_t textLength = text.size();
+                setTextSize(local3GPPDescription, textLength);
+                input.local().localtext().SerializeToArray(local3GPPDescription.data() + kTextBytes,
+                                                           textLength);
+                size_t gppLocalFormatSize = input.local().format().ByteSizeLong();
+                size_t textOffset = textLength + kTextBytes;
+                setChunkParameter(local3GPPDescription, gppLocalFormatSize, textOffset);
+                switch (input.local().format().formatStyle_case()) {
+                    case GPPLocalFormat::FormatStyleCase::kTextbox: {
+                        setChunkParameter(local3GPPDescription, styl, textOffset + kChunkTypeBytes);
+                        input.local().format().SerializeToArray(
+                                local3GPPDescription.data() + textOffset + kChunkBytes,
+                                gppLocalFormatSize);
+                        break;
+                    }
+                    case GPPLocalFormat::FormatStyleCase::kHltbox: {
+                        setChunkParameter(local3GPPDescription, hlit, textOffset + kChunkTypeBytes);
+                        input.local().format().SerializeToArray(
+                                local3GPPDescription.data() + textOffset + kChunkBytes,
+                                gppLocalFormatSize);
+                        break;
+                    }
+                    case GPPLocalFormat::FormatStyleCase::kHltcolor: {
+                        setChunkParameter(local3GPPDescription, hclr, textOffset + kChunkTypeBytes);
+                        input.local().format().SerializeToArray(
+                                local3GPPDescription.data() + textOffset + kChunkBytes,
+                                gppLocalFormatSize);
+                        break;
+                    }
+                    case GPPLocalFormat::FormatStyleCase::kKrokbox: {
+                        setChunkParameter(local3GPPDescription, krok, textOffset + kChunkTypeBytes);
+                        input.local().format().SerializeToArray(
+                                local3GPPDescription.data() + textOffset + kChunkBytes,
+                                gppLocalFormatSize);
+                        break;
+                    }
+                    case GPPLocalFormat::FormatStyleCase::kScrollDelay: {
+                        setChunkParameter(local3GPPDescription, dlay, textOffset + kChunkTypeBytes);
+                        input.local().format().SerializeToArray(
+                                local3GPPDescription.data() + textOffset + kChunkBytes,
+                                gppLocalFormatSize);
+                        break;
+                    }
+                    case GPPLocalFormat::FormatStyleCase::kHrefBox: {
+                        setChunkParameter(local3GPPDescription, href, textOffset + kChunkTypeBytes);
+                        input.local().format().SerializeToArray(
+                                local3GPPDescription.data() + textOffset + kChunkBytes,
+                                gppLocalFormatSize);
+                        break;
+                    }
+                    case GPPLocalFormat::FormatStyleCase::kBoxrecord: {
+                        setChunkParameter(local3GPPDescription, tbox, textOffset + kChunkTypeBytes);
+                        input.local().format().SerializeToArray(
+                                local3GPPDescription.data() + textOffset + kChunkBytes,
+                                gppLocalFormatSize);
+                        break;
+                    }
+                    case GPPLocalFormat::FormatStyleCase::kBlinkBox: {
+                        setChunkParameter(local3GPPDescription, blnk, textOffset + kChunkTypeBytes);
+                        input.local().format().SerializeToArray(
+                                local3GPPDescription.data() + textOffset + kChunkBytes,
+                                gppLocalFormatSize);
+                        break;
+                    }
+                    case GPPLocalFormat::FormatStyleCase::kWrapFlag: {
+                        setChunkParameter(local3GPPDescription, txrp, textOffset + kChunkTypeBytes);
+                        input.local().format().SerializeToArray(
+                                local3GPPDescription.data() + textOffset + kChunkBytes,
+                                gppLocalFormatSize);
+                        break;
+                    }
+                    default: {
+                        break;
+                    }
+                }
+                Parcel* parcel = new Parcel();
+                TextDescriptions::getParcelOfDescriptions(
+                        local3GPPDescription.data(), local3GPPDescription.size(),
+                        TextDescriptions::IN_BAND_TEXT_3GPP | TextDescriptions::LOCAL_DESCRIPTIONS,
+                        input.timems(), parcel);
+                delete parcel;
+            }
+            break;
+        }
+        case flagsrtlocal: {
+            size_t srtByteSize = input.srt().ByteSizeLong();
+            if (srtByteSize) {
+                std::vector<uint8_t> srtLocalDescription(srtByteSize);
+                input.srt().SerializeToArray(srtLocalDescription.data(),
+                                             srtLocalDescription.size());
+                Parcel* parcel = new Parcel();
+                TextDescriptions::getParcelOfDescriptions(
+                        srtLocalDescription.data(), srtLocalDescription.size(),
+                        TextDescriptions::OUT_OF_BAND_TEXT_SRT |
+                                TextDescriptions::LOCAL_DESCRIPTIONS,
+                        input.timems(), parcel);
+                delete parcel;
+            }
+            break;
+        }
+        default:
+            break;
+    }
+}
diff --git a/media/libstagefright/writer_fuzzers/Android.bp b/media/libstagefright/writer_fuzzers/Android.bp
index b81f27e..58aa7cd 100644
--- a/media/libstagefright/writer_fuzzers/Android.bp
+++ b/media/libstagefright/writer_fuzzers/Android.bp
@@ -57,6 +57,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzers target the APIs of all the various writers",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index afc873c..2f204f9 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -55,4 +55,5 @@
     name: "media_codecs",
     srcs: ["media_codecs.xsd"],
     package_name: "media.codecs",
+    root_elements: ["MediaCodecs"],
 }
diff --git a/media/libstagefright/xmlparser/api/current.txt b/media/libstagefright/xmlparser/api/current.txt
index 95c347a..7de5955 100644
--- a/media/libstagefright/xmlparser/api/current.txt
+++ b/media/libstagefright/xmlparser/api/current.txt
@@ -171,7 +171,6 @@
 
   public class XmlParser {
     ctor public XmlParser();
-    method public static media.codecs.Included readIncluded(java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException;
     method public static media.codecs.MediaCodecs readMediaCodecs(java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException;
     method public static String readText(org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
     method public static void skip(org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
diff --git a/media/module/bqhelper/GraphicBufferSource.cpp b/media/module/bqhelper/GraphicBufferSource.cpp
index 3202cc5..4bb2215 100644
--- a/media/module/bqhelper/GraphicBufferSource.cpp
+++ b/media/module/bqhelper/GraphicBufferSource.cpp
@@ -589,7 +589,7 @@
 
 void GraphicBufferSource::onDataspaceChanged_l(
         android_dataspace dataspace, android_pixel_format pixelFormat) {
-    ALOGD("got buffer with new dataSpace #%x", dataspace);
+    ALOGD("got buffer with new dataSpace %#x", dataspace);
     mLastDataspace = dataspace;
 
     if (ColorUtils::convertDataSpaceToV0(dataspace)) {
diff --git a/media/module/codecs/amrnb/dec/test/Android.bp b/media/module/codecs/amrnb/dec/test/Android.bp
index de69cfc..74258e0 100644
--- a/media/module/codecs/amrnb/dec/test/Android.bp
+++ b/media/module/codecs/amrnb/dec/test/Android.bp
@@ -58,7 +58,4 @@
             "signed-integer-overflow",
         ],
     },
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecoderTest.zip?unzip=true",
-    ],
 }
diff --git a/media/module/codecs/amrnb/dec/test/AndroidTest.xml b/media/module/codecs/amrnb/dec/test/AndroidTest.xml
index 1a9e678..539fa5c 100644
--- a/media/module/codecs/amrnb/dec/test/AndroidTest.xml
+++ b/media/module/codecs/amrnb/dec/test/AndroidTest.xml
@@ -13,19 +13,27 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<configuration description="Test module config for Amr-nb Decoder unit test">
+<configuration description="Test module config for Amr-wb Decoder unit test">
     <option name="test-suite-tag" value="AmrnbDecoderTest" />
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
         <option name="push" value="AmrnbDecoderTest->/data/local/tmp/AmrnbDecoderTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecoderTest.zip?unzip=true"
-            value="/data/local/tmp/AmrnbDecoderTestRes/" />
+    </target_preparer>
+
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="AmrnbDecoderTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="AmrnbDecoderTest-1.0" />
+        <option name="dynamic-config-module" value="AmrnbDecoderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="AmrnbDecoderTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/AmrnbDecoderTestRes/" />
+        <option name="native-test-flag" value="-P /sdcard/test/AmrnbDecoderTest-1.0/" />
     </test>
 </configuration>
diff --git a/media/module/codecs/amrnb/dec/test/DynamicConfig.xml b/media/module/codecs/amrnb/dec/test/DynamicConfig.xml
new file mode 100644
index 0000000..de81c48
--- /dev/null
+++ b/media/module/codecs/amrnb/dec/test/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecoderTest-1.0.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/module/codecs/amrnb/enc/fuzzer/Android.bp b/media/module/codecs/amrnb/enc/fuzzer/Android.bp
index 2c041b7..bcbcee2 100644
--- a/media/module/codecs/amrnb/enc/fuzzer/Android.bp
+++ b/media/module/codecs/amrnb/enc/fuzzer/Android.bp
@@ -48,5 +48,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libstagefright_amrnbenc library",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/module/codecs/amrnb/enc/test/Android.bp b/media/module/codecs/amrnb/enc/test/Android.bp
index 5871486..7e393e3 100644
--- a/media/module/codecs/amrnb/enc/test/Android.bp
+++ b/media/module/codecs/amrnb/enc/test/Android.bp
@@ -58,7 +58,4 @@
             "signed-integer-overflow",
         ],
     },
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/amrnb/enc/test/AmrnbEncoderTest.zip?unzip=true",
-    ],
 }
diff --git a/media/module/codecs/amrnb/enc/test/AndroidTest.xml b/media/module/codecs/amrnb/enc/test/AndroidTest.xml
index 9fe61b1..1509728 100644
--- a/media/module/codecs/amrnb/enc/test/AndroidTest.xml
+++ b/media/module/codecs/amrnb/enc/test/AndroidTest.xml
@@ -13,19 +13,27 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<configuration description="Test module config for Amr-nb Encoder unit test">
+<configuration description="Test module config for Amr-wb Encoder unit test">
     <option name="test-suite-tag" value="AmrnbEncoderTest" />
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
         <option name="push" value="AmrnbEncoderTest->/data/local/tmp/AmrnbEncoderTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/amrnb/enc/test/AmrnbEncoderTest.zip?unzip=true"
-            value="/data/local/tmp/AmrnbEncoderTestRes/" />
+    </target_preparer>
+
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="AmrnbEncoderTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="AmrnbEncoderTest-1.0" />
+        <option name="dynamic-config-module" value="AmrnbEncoderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="AmrnbEncoderTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/AmrnbEncoderTestRes/" />
+        <option name="native-test-flag" value="-P /sdcard/test/AmrnbEncoderTest-1.0/" />
     </test>
 </configuration>
diff --git a/media/module/codecs/amrnb/enc/test/DynamicConfig.xml b/media/module/codecs/amrnb/enc/test/DynamicConfig.xml
new file mode 100644
index 0000000..b22df38
--- /dev/null
+++ b/media/module/codecs/amrnb/enc/test/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/amrnb/enc/test/AmrnbEncoderTest-1.0.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/module/codecs/amrnb/fuzzer/Android.bp b/media/module/codecs/amrnb/fuzzer/Android.bp
index 833a7ba..3f29267 100644
--- a/media/module/codecs/amrnb/fuzzer/Android.bp
+++ b/media/module/codecs/amrnb/fuzzer/Android.bp
@@ -48,5 +48,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libstagefright_amrnbdec library",
+        vector: "remote",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/module/codecs/amrwb/dec/fuzzer/Android.bp b/media/module/codecs/amrwb/dec/fuzzer/Android.bp
index 16f08fa..31a20ff 100644
--- a/media/module/codecs/amrwb/dec/fuzzer/Android.bp
+++ b/media/module/codecs/amrwb/dec/fuzzer/Android.bp
@@ -48,5 +48,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libstagefright_amrwbdec library",
+        vector: "remote",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/module/codecs/amrwb/dec/test/Android.bp b/media/module/codecs/amrwb/dec/test/Android.bp
index 7ea39ef..7d0c964 100644
--- a/media/module/codecs/amrwb/dec/test/Android.bp
+++ b/media/module/codecs/amrwb/dec/test/Android.bp
@@ -57,7 +57,4 @@
             "signed-integer-overflow",
         ],
     },
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/amrwb/test/AmrwbDecoderTest.zip?unzip=true",
-    ],
 }
diff --git a/media/module/codecs/amrwb/dec/test/AndroidTest.xml b/media/module/codecs/amrwb/dec/test/AndroidTest.xml
index e211a1f..392df03 100644
--- a/media/module/codecs/amrwb/dec/test/AndroidTest.xml
+++ b/media/module/codecs/amrwb/dec/test/AndroidTest.xml
@@ -18,14 +18,22 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
         <option name="push" value="AmrwbDecoderTest->/data/local/tmp/AmrwbDecoderTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/amrwb/test/AmrwbDecoderTest.zip?unzip=true"
-            value="/data/local/tmp/AmrwbDecoderTestRes/" />
+    </target_preparer>
+
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="AmrwbDecoderTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="AmrwbDecoderTest-1.0" />
+        <option name="dynamic-config-module" value="AmrwbDecoderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="AmrwbDecoderTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/AmrwbDecoderTestRes/" />
+        <option name="native-test-flag" value="-P /sdcard/test/AmrwbDecoderTest-1.0/" />
     </test>
 </configuration>
diff --git a/media/module/codecs/amrwb/dec/test/DynamicConfig.xml b/media/module/codecs/amrwb/dec/test/DynamicConfig.xml
new file mode 100644
index 0000000..d41517f
--- /dev/null
+++ b/media/module/codecs/amrwb/dec/test/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/amrwb/test/AmrwbDecoderTest-1.0.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/module/codecs/amrwb/enc/fuzzer/Android.bp b/media/module/codecs/amrwb/enc/fuzzer/Android.bp
index f74fa4f..c2c13e1 100644
--- a/media/module/codecs/amrwb/enc/fuzzer/Android.bp
+++ b/media/module/codecs/amrwb/enc/fuzzer/Android.bp
@@ -48,5 +48,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libstagefright_amrwbenc library",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/module/codecs/amrwb/enc/test/Android.bp b/media/module/codecs/amrwb/enc/test/Android.bp
index f095d62..942f6c9 100644
--- a/media/module/codecs/amrwb/enc/test/Android.bp
+++ b/media/module/codecs/amrwb/enc/test/Android.bp
@@ -57,7 +57,4 @@
             "signed-integer-overflow",
         ],
     },
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/amrwbenc/test/AmrwbEncoderTest.zip?unzip=true",
-    ],
 }
diff --git a/media/module/codecs/amrwb/enc/test/AndroidTest.xml b/media/module/codecs/amrwb/enc/test/AndroidTest.xml
index 46f147c..8822cb2 100644
--- a/media/module/codecs/amrwb/enc/test/AndroidTest.xml
+++ b/media/module/codecs/amrwb/enc/test/AndroidTest.xml
@@ -18,14 +18,22 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
         <option name="push" value="AmrwbEncoderTest->/data/local/tmp/AmrwbEncoderTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/amrwbenc/test/AmrwbEncoderTest.zip?unzip=true"
-            value="/data/local/tmp/AmrwbEncoderTestRes/" />
+    </target_preparer>
+
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="AmrwbEncoderTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="AmrwbEncoderTest-1.0" />
+        <option name="dynamic-config-module" value="AmrwbEncoderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="AmrwbEncoderTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/AmrwbEncoderTestRes/" />
+        <option name="native-test-flag" value="-P /sdcard/test/AmrwbEncoderTest-1.0/" />
     </test>
 </configuration>
diff --git a/media/module/codecs/amrwb/enc/test/DynamicConfig.xml b/media/module/codecs/amrwb/enc/test/DynamicConfig.xml
new file mode 100644
index 0000000..1cf5bf5
--- /dev/null
+++ b/media/module/codecs/amrwb/enc/test/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/amrwbenc/test/AmrwbEncoderTest-1.0.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/module/codecs/flac/dec/test/AndroidTest.xml b/media/module/codecs/flac/dec/test/AndroidTest.xml
index bebba8e..015f728 100644
--- a/media/module/codecs/flac/dec/test/AndroidTest.xml
+++ b/media/module/codecs/flac/dec/test/AndroidTest.xml
@@ -18,14 +18,22 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
         <option name="push" value="FlacDecoderTest->/data/local/tmp/FlacDecoderTest/" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/flac/dec/test/FlacDecoder.zip?unzip=true"
-            value="/data/local/tmp/FlacDecoderTestRes/" />
+    </target_preparer>
+
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="FlacDecoderTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="FlacDecoder-1.0" />
+        <option name="dynamic-config-module" value="FlacDecoderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="FlacDecoderTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/FlacDecoderTestRes/" />
+        <option name="native-test-flag" value="-P /sdcard/test/FlacDecoder-1.0/" />
     </test>
-</configuration>
\ No newline at end of file
+</configuration>
diff --git a/media/module/codecs/flac/dec/test/DynamicConfig.xml b/media/module/codecs/flac/dec/test/DynamicConfig.xml
new file mode 100644
index 0000000..0258808
--- /dev/null
+++ b/media/module/codecs/flac/dec/test/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor-1.5.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/module/codecs/g711/fuzzer/Android.bp b/media/module/codecs/g711/fuzzer/Android.bp
index 376cce7..397fb9a 100644
--- a/media/module/codecs/g711/fuzzer/Android.bp
+++ b/media/module/codecs/g711/fuzzer/Android.bp
@@ -44,6 +44,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of codecs_g711dec library with a special focus on Alaw APIs",
+        vector: "remote",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
@@ -61,5 +69,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of codecs_g711dec library with a special focus on Mlaw APIs",
+        vector: "remote",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/module/codecs/m4v_h263/dec/test/Android.bp b/media/module/codecs/m4v_h263/dec/test/Android.bp
index 9dc756c..d8de569 100644
--- a/media/module/codecs/m4v_h263/dec/test/Android.bp
+++ b/media/module/codecs/m4v_h263/dec/test/Android.bp
@@ -76,7 +76,4 @@
         ],
         cfi: true,
     },
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263Decoder-1.1.zip?unzip=true",
-    ],
 }
diff --git a/media/module/codecs/m4v_h263/dec/test/AndroidTest.xml b/media/module/codecs/m4v_h263/dec/test/AndroidTest.xml
index 8bb4d1c..bd620d6 100755
--- a/media/module/codecs/m4v_h263/dec/test/AndroidTest.xml
+++ b/media/module/codecs/m4v_h263/dec/test/AndroidTest.xml
@@ -19,14 +19,22 @@
         <option name="cleanup" value="true" />
         <option name="push" value="Mpeg4H263DecoderTest->/data/local/tmp/Mpeg4H263DecoderTest" />
         <option name="append-bitness" value="true" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263Decoder-1.1.zip?unzip=true"
-            value="/data/local/tmp/Mpeg4H263DecoderTestRes/" />
+    </target_preparer>
+
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="Mpeg4H263DecoderTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="Mpeg4H263DecoderTest-1.2" />
+        <option name="dynamic-config-module" value="Mpeg4H263DecoderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="Mpeg4H263DecoderTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/Mpeg4H263DecoderTestRes/" />
+        <option name="native-test-flag" value="-P /sdcard/test/Mpeg4H263DecoderTest-1.2/" />
     </test>
 </configuration>
diff --git a/media/module/codecs/m4v_h263/dec/test/DynamicConfig.xml b/media/module/codecs/m4v_h263/dec/test/DynamicConfig.xml
new file mode 100644
index 0000000..5219361
--- /dev/null
+++ b/media/module/codecs/m4v_h263/dec/test/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263Decoder-1.2.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/module/codecs/m4v_h263/enc/test/Android.bp b/media/module/codecs/m4v_h263/enc/test/Android.bp
index d75e2d1..2b5e49c 100644
--- a/media/module/codecs/m4v_h263/enc/test/Android.bp
+++ b/media/module/codecs/m4v_h263/enc/test/Android.bp
@@ -55,7 +55,4 @@
         ],
         cfi: true,
     },
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/enc/test/Mpeg4H263Encoder.zip?unzip=true",
-    ],
 }
diff --git a/media/module/codecs/m4v_h263/enc/test/AndroidTest.xml b/media/module/codecs/m4v_h263/enc/test/AndroidTest.xml
index 5218932..6b352b0 100644
--- a/media/module/codecs/m4v_h263/enc/test/AndroidTest.xml
+++ b/media/module/codecs/m4v_h263/enc/test/AndroidTest.xml
@@ -18,14 +18,22 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
         <option name="push" value="Mpeg4H263EncoderTest->/data/local/tmp/Mpeg4H263EncoderTest/" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/enc/test/Mpeg4H263Encoder.zip?unzip=true"
-            value="/data/local/tmp/Mpeg4H263EncoderTestRes/" />
+    </target_preparer>
+
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="Mpeg4H263EncoderTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="Mpeg4H263Encoder-1.1" />
+        <option name="dynamic-config-module" value="Mpeg4H263EncoderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="Mpeg4H263EncoderTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/Mpeg4H263EncoderTestRes/" />
+        <option name="native-test-flag" value="-P /sdcard/test/Mpeg4H263Encoder-1.1/" />
     </test>
-</configuration>
\ No newline at end of file
+</configuration>
diff --git a/media/module/codecs/m4v_h263/enc/test/DynamicConfig.xml b/media/module/codecs/m4v_h263/enc/test/DynamicConfig.xml
new file mode 100644
index 0000000..ceb33ef
--- /dev/null
+++ b/media/module/codecs/m4v_h263/enc/test/DynamicConfig.xml
@@ -0,0 +1,21 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/enc/test/Mpeg4H263Encoder-1.1.zip
+            </value>
+    </entry>
+</dynamicConfig>
diff --git a/media/module/codecs/m4v_h263/fuzzer/Android.bp b/media/module/codecs/m4v_h263/fuzzer/Android.bp
index a052c11..4d0ed18 100644
--- a/media/module/codecs/m4v_h263/fuzzer/Android.bp
+++ b/media/module/codecs/m4v_h263/fuzzer/Android.bp
@@ -50,6 +50,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzers target the APIs of libstagefright_m4vh263dec library",
+        vector: "remote",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
@@ -98,6 +106,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzers target the APIs of libstagefright_m4vh263enc library",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
diff --git a/media/module/codecs/mp3dec/fuzzer/Android.bp b/media/module/codecs/mp3dec/fuzzer/Android.bp
index 514a8a8..c5e0b1f 100644
--- a/media/module/codecs/mp3dec/fuzzer/Android.bp
+++ b/media/module/codecs/mp3dec/fuzzer/Android.bp
@@ -44,5 +44,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libstagefright_mp3dec",
+        vector: "remote",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/module/codecs/mp3dec/test/Android.bp b/media/module/codecs/mp3dec/test/Android.bp
index dd06bdc..f10b6ae 100644
--- a/media/module/codecs/mp3dec/test/Android.bp
+++ b/media/module/codecs/mp3dec/test/Android.bp
@@ -56,7 +56,4 @@
             "signed-integer-overflow",
         ],
     },
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/mp3dec/test/Mp3DecoderTest-1.2.zip?unzip=true",
-    ],
 }
diff --git a/media/module/codecs/mp3dec/test/AndroidTest.xml b/media/module/codecs/mp3dec/test/AndroidTest.xml
index 29952eb..d16f152 100644
--- a/media/module/codecs/mp3dec/test/AndroidTest.xml
+++ b/media/module/codecs/mp3dec/test/AndroidTest.xml
@@ -18,14 +18,22 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
         <option name="push" value="Mp3DecoderTest->/data/local/tmp/Mp3DecoderTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/mp3dec/test/Mp3DecoderTest-1.2.zip?unzip=true"
-            value="/data/local/tmp/Mp3DecoderTestRes/" />
+</target_preparer>
+
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="Mp3DecoderTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="Mp3DecoderTest-1.3" />
+        <option name="dynamic-config-module" value="Mp3DecoderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="Mp3DecoderTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/Mp3DecoderTestRes/" />
+        <option name="native-test-flag" value="-P /sdcard/test/Mp3DecoderTest-1.3/" />
     </test>
 </configuration>
diff --git a/media/module/codecs/mp3dec/test/DynamicConfig.xml b/media/module/codecs/mp3dec/test/DynamicConfig.xml
new file mode 100644
index 0000000..048940b
--- /dev/null
+++ b/media/module/codecs/mp3dec/test/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/mp3dec/test/Mp3DecoderTest-1.3.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/module/codecserviceregistrant/fuzzer/Android.bp b/media/module/codecserviceregistrant/fuzzer/Android.bp
index 0b9affd..1cb8c2b 100644
--- a/media/module/codecserviceregistrant/fuzzer/Android.bp
+++ b/media/module/codecserviceregistrant/fuzzer/Android.bp
@@ -41,5 +41,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmedia_codecserviceregistrant",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/module/esds/tests/AndroidTest.xml b/media/module/esds/tests/AndroidTest.xml
index a4fbc7f..87ca58c 100644
--- a/media/module/esds/tests/AndroidTest.xml
+++ b/media/module/esds/tests/AndroidTest.xml
@@ -18,14 +18,22 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
         <option name="push" value="ESDSTest->/data/local/tmp/ESDSTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/ESDS/ESDSTestRes-1.0.zip?unzip=true"
-            value="/data/local/tmp/ESDSTestRes/" />
+    </target_preparer>
+
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="ESDSTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="ESDSTestRes-1.1" />
+        <option name="dynamic-config-module" value="ESDSTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="ESDSTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/ESDSTestRes/" />
+        <option name="native-test-flag" value="-P /sdcard/test/ESDSTestRes-1.1/" />
     </test>
 </configuration>
diff --git a/media/module/esds/tests/DynamicConfig.xml b/media/module/esds/tests/DynamicConfig.xml
new file mode 100644
index 0000000..9718dda
--- /dev/null
+++ b/media/module/esds/tests/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/ESDS/ESDSTestRes-1.1.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/module/extractors/fuzzers/Android.bp b/media/module/extractors/fuzzers/Android.bp
index b3e34d2..91ca7b1 100644
--- a/media/module/extractors/fuzzers/Android.bp
+++ b/media/module/extractors/fuzzers/Android.bp
@@ -72,6 +72,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzers targets the APIs of all the various extractors",
+        vector: "remote",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
diff --git a/media/module/extractors/mkv/MatroskaExtractor.cpp b/media/module/extractors/mkv/MatroskaExtractor.cpp
index 2b72387..6900341 100644
--- a/media/module/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/module/extractors/mkv/MatroskaExtractor.cpp
@@ -1800,7 +1800,6 @@
         int32_t isotransfer = 2; // ISO unspecified
         int32_t coeffs = 2; // ISO unspecified
         bool fullRange = false; // default
-        bool rangeSpecified = false;
 
         if (isValidInt32ColourValue(color->primaries)) {
             primaries = color->primaries;
@@ -1816,7 +1815,6 @@
             // We only support MKV broadcast range (== limited) and full range.
             // We treat all other value as the default limited range.
             fullRange = color->range == 2 /* MKV fullRange */;
-            rangeSpecified = true;
         }
 
         int32_t range = 0;
diff --git a/media/module/extractors/mp3/VBRISeeker.cpp b/media/module/extractors/mp3/VBRISeeker.cpp
index ca51b88..a50754b 100644
--- a/media/module/extractors/mp3/VBRISeeker.cpp
+++ b/media/module/extractors/mp3/VBRISeeker.cpp
@@ -84,7 +84,7 @@
          scale,
          entrySize);
 
-    if (entrySize > 4) {
+    if (entrySize < 1 || entrySize > 4) {
         ALOGE("invalid VBRI entry size: %zu", entrySize);
         return NULL;
     }
@@ -122,16 +122,13 @@
 
     off64_t offset = post_id3_pos;
     for (size_t i = 0; i < numEntries; ++i) {
-        uint32_t numBytes;
+        uint32_t numBytes = 0;
+        // entrySize is known to be [1..4]
         switch (entrySize) {
             case 1: numBytes = buffer[i]; break;
             case 2: numBytes = U16_AT(buffer + 2 * i); break;
             case 3: numBytes = U24_AT(buffer + 3 * i); break;
-            default:
-            {
-                CHECK_EQ(entrySize, 4u);
-                numBytes = U32_AT(buffer + 4 * i); break;
-            }
+            case 4: numBytes = U32_AT(buffer + 4 * i); break;
         }
 
         numBytes *= scale;
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index 1d88785..184e4f4 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -26,7 +26,6 @@
 #include <stdlib.h>
 #include <string.h>
 
-#include <log/log.h>
 #include <utils/Log.h>
 
 #include "AC4Parser.h"
@@ -2008,7 +2007,7 @@
             uint8_t mhac_header[mhac_header_size];
             off64_t data_offset = *offset;
 
-            if (chunk_size < sizeof(mhac_header)) {
+            if (mLastTrack == NULL || chunk_size < sizeof(mhac_header)) {
                 return ERROR_MALFORMED;
             }
 
@@ -6501,6 +6500,16 @@
             AMediaFormat_setInt32(meta, AMEDIAFORMAT_KEY_IS_SYNC_FRAME, 1);
         }
 
+        void *presentationsData;
+        size_t presentationsSize;
+        if (AMediaFormat_getBuffer(
+                    mFormat, AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_INFO,
+                    &presentationsData, &presentationsSize)) {
+            AMediaFormat_setBuffer(
+                    meta, AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_INFO,
+                    presentationsData, presentationsSize);
+        }
+
         ++mCurrentSampleIndex;
 
         *out = mBuffer;
diff --git a/media/module/extractors/tests/Android.bp b/media/module/extractors/tests/Android.bp
index f5eadbd..d6e79c7 100644
--- a/media/module/extractors/tests/Android.bp
+++ b/media/module/extractors/tests/Android.bp
@@ -100,7 +100,4 @@
             "signed-integer-overflow",
         ],
     },
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor-1.4.zip?unzip=true",
-    ],
 }
diff --git a/media/module/extractors/tests/AndroidTest.xml b/media/module/extractors/tests/AndroidTest.xml
index fc8152c..22669df 100644
--- a/media/module/extractors/tests/AndroidTest.xml
+++ b/media/module/extractors/tests/AndroidTest.xml
@@ -18,14 +18,22 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
         <option name="push" value="ExtractorUnitTest->/data/local/tmp/ExtractorUnitTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor-1.4.zip?unzip=true"
-            value="/data/local/tmp/ExtractorUnitTestRes/" />
+    </target_preparer>
+
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="ExtractorUnitTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="extractor-1.5" />
+        <option name="dynamic-config-module" value="ExtractorUnitTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="ExtractorUnitTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/ExtractorUnitTestRes/" />
+        <option name="native-test-flag" value="-P /sdcard/test/extractor-1.5/" />
     </test>
 </configuration>
diff --git a/media/module/extractors/tests/DynamicConfig.xml b/media/module/extractors/tests/DynamicConfig.xml
new file mode 100644
index 0000000..0258808
--- /dev/null
+++ b/media/module/extractors/tests/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor-1.5.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/module/foundation/AMessage.cpp b/media/module/foundation/AMessage.cpp
index b61dc47..7cc7c41 100644
--- a/media/module/foundation/AMessage.cpp
+++ b/media/module/foundation/AMessage.cpp
@@ -961,6 +961,11 @@
     return mItems.size();
 }
 
+/* static */
+size_t AMessage::maxAllowedEntries() {
+    return kMaxNumItems;
+}
+
 const char *AMessage::getEntryNameAt(size_t index, Type *type) const {
     if (index >= mItems.size()) {
         *type = kTypeInt32;
diff --git a/media/module/foundation/ColorUtils.cpp b/media/module/foundation/ColorUtils.cpp
index 6dc8157..12bffca 100644
--- a/media/module/foundation/ColorUtils.cpp
+++ b/media/module/foundation/ColorUtils.cpp
@@ -60,6 +60,10 @@
         { CU::kColorStandardBT470M,         { CA::PrimariesBT470_6M, CA::MatrixBT470_6M } },
         // NOTE: there is no close match to the matrix used by standard film, chose closest
         { CU::kColorStandardFilm,           { CA::PrimariesGenericFilm, CA::MatrixBT2020 } },
+        // DCI-P3 (in DataSpace that drives this standard) is actually Display P3
+        // ITU does not specify a matrix suitable for P3. The theoretical KR/KB numbers are
+        // 0.229 and 0.079. Assume BT.601 matrix as P3 is commonly used for JPEG with BT.601.
+        { CU::kColorStandardDisplay_P3,     { CA::PrimariesEG432, CA::MatrixBT601_6 } },
     }
 };
 
@@ -264,6 +268,8 @@
         { 8, ColorAspects::PrimariesGenericFilm },
         { 9, ColorAspects::PrimariesBT2020 },
         { 10, ColorAspects::PrimariesOther /* XYZ */ },
+        { 11, ColorAspects::PrimariesRP431 },
+        { 12, ColorAspects::PrimariesEG432 },
     }
 };
 
@@ -438,6 +444,9 @@
         { CU::kColorStandardBT2020,               CA::PrimariesBT2020 },
         { CU::kColorStandardBT601_525_Unadjusted, CA::PrimariesBT601_6_525 },
         { CU::kColorStandardBT601_625_Unadjusted, CA::PrimariesBT601_6_625 },
+        { CU::kColorStandardDisplay_P3,           CA::PrimariesEG432 },
+        // fall back DCI P3 primaries to Display P3
+        { CU::kColorStandardDisplay_P3,           CA::PrimariesRP431 },
     }
 };
 
@@ -469,7 +478,8 @@
         { CU::kColorStandardBT2020Constant,       GET_HAL_BITFIELD(STANDARD, BT2020_CONSTANT_LUMINANCE) },
         { CU::kColorStandardBT470M,               GET_HAL_BITFIELD(STANDARD, BT470M) },
         { CU::kColorStandardFilm,                 GET_HAL_BITFIELD(STANDARD, FILM) },
-        { CU::kColorStandardDCI_P3,               GET_HAL_BITFIELD(STANDARD, DCI_P3) },
+        // DCI-P3 (in DataSpace that drives this standard) is actually Display P3
+        { CU::kColorStandardDisplay_P3,           GET_HAL_BITFIELD(STANDARD, DCI_P3) },
     }
 };
 
diff --git a/media/module/foundation/include/media/stagefright/foundation/AMessage.h b/media/module/foundation/include/media/stagefright/foundation/AMessage.h
index 6f73597..7594565 100644
--- a/media/module/foundation/include/media/stagefright/foundation/AMessage.h
+++ b/media/module/foundation/include/media/stagefright/foundation/AMessage.h
@@ -199,6 +199,7 @@
     };
 
     size_t countEntries() const;
+    static size_t maxAllowedEntries();
     const char *getEntryNameAt(size_t index, Type *type) const;
 
     /**
diff --git a/media/module/foundation/include/media/stagefright/foundation/ColorUtils.h b/media/module/foundation/include/media/stagefright/foundation/ColorUtils.h
index 72c8074..f4e89bb 100644
--- a/media/module/foundation/include/media/stagefright/foundation/ColorUtils.h
+++ b/media/module/foundation/include/media/stagefright/foundation/ColorUtils.h
@@ -57,7 +57,8 @@
         kColorStandardBT2020Constant =       7, // not in SDK
         kColorStandardBT470M =               8, // not in SDK
         kColorStandardFilm =                 9, // not in SDK
-        kColorStandardDCI_P3 =               10, // not in SDK, new in Android 8.0
+        kColorStandardDisplay_P3 =           10, // not in SDK, new in Android 8.0
+        kColorStandardDCI_P3 = kColorStandardDisplay_P3, // legacy (incorrect) name for Display P3
 
         /* This marks a section of color-standard values that are not supported by graphics HAL,
            but track defined color primaries-matrix coefficient combinations in media.
@@ -211,7 +212,7 @@
         case ColorUtils::kColorStandardBT2020Constant:       return "BT2020Constant";
         case ColorUtils::kColorStandardBT470M:               return "BT470M";
         case ColorUtils::kColorStandardFilm:                 return "Film";
-        case ColorUtils::kColorStandardDCI_P3:               return "DCI_P3";
+        case ColorUtils::kColorStandardDisplay_P3:           return "Display_P3";
         default:                                             return def;
     }
 }
diff --git a/media/module/foundation/tests/AMessage_test.cpp b/media/module/foundation/tests/AMessage_test.cpp
index e08ed77..08062e5 100644
--- a/media/module/foundation/tests/AMessage_test.cpp
+++ b/media/module/foundation/tests/AMessage_test.cpp
@@ -53,6 +53,28 @@
     MOCK_METHOD(void, onMessageReceived, (const sp<AMessage>&), (override));
 };
 
+TEST(AMessage_tests, countsAndLimits) {
+  sp<AMessage> m1 = new AMessage();
+
+  // clear, countEntries, maxAllowedEntries
+
+  EXPECT_EQ(0, m1->countEntries());
+
+  m1->setInt32("smaller", INT32_MAX - 2);
+  m1->setInt64("big", INT64_MAX);
+  m1->setString("bigBallOfString", "whatever");
+  EXPECT_EQ(3, m1->countEntries());
+
+  m1->clear();
+  EXPECT_EQ(0, m1->countEntries());
+
+  EXPECT_TRUE(m1->maxAllowedEntries() > 0);
+  EXPECT_TRUE(AMessage::maxAllowedEntries() > 0);
+
+  // static function, make sure we get a consistent answer
+  EXPECT_EQ(m1->maxAllowedEntries(), AMessage::maxAllowedEntries());
+}
+
 TEST(AMessage_tests, settersAndGetters) {
   sp<AMessage> m1 = new AMessage();
 
diff --git a/media/module/foundation/tests/AVCUtils/AndroidTest.xml b/media/module/foundation/tests/AVCUtils/AndroidTest.xml
index 6a088a8..e30bfbf 100644
--- a/media/module/foundation/tests/AVCUtils/AndroidTest.xml
+++ b/media/module/foundation/tests/AVCUtils/AndroidTest.xml
@@ -18,14 +18,22 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="false" />
         <option name="push" value="AVCUtilsUnitTest->/data/local/tmp/AVCUtilsUnitTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsUnitTest.zip?unzip=true"
-            value="/data/local/tmp/AVCUtilsUnitTest/" />
+    </target_preparer>
+
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="AVCUtilsUnitTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="AVCUtilsUnitTest-1.0" />
+        <option name="dynamic-config-module" value="AVCUtilsUnitTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="AVCUtilsUnitTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/AVCUtilsUnitTest/" />
+        <option name="native-test-flag" value="-P /sdcard/test/AVCUtilsUnitTest-1.0/" />
     </test>
 </configuration>
diff --git a/media/module/foundation/tests/AVCUtils/DynamicConfig.xml b/media/module/foundation/tests/AVCUtils/DynamicConfig.xml
new file mode 100644
index 0000000..e5b8bad
--- /dev/null
+++ b/media/module/foundation/tests/AVCUtils/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value> https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsUnitTest-1.0.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/module/foundation/tests/OpusHeader/Android.bp b/media/module/foundation/tests/OpusHeader/Android.bp
index f052650..fa2b40e 100644
--- a/media/module/foundation/tests/OpusHeader/Android.bp
+++ b/media/module/foundation/tests/OpusHeader/Android.bp
@@ -54,7 +54,4 @@
         ],
         cfi: true,
     },
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/OpusHeader/OpusHeader.zip?unzip=true",
-    ],
 }
diff --git a/media/module/foundation/tests/OpusHeader/AndroidTest.xml b/media/module/foundation/tests/OpusHeader/AndroidTest.xml
index afee16a..4aa4cd2 100644
--- a/media/module/foundation/tests/OpusHeader/AndroidTest.xml
+++ b/media/module/foundation/tests/OpusHeader/AndroidTest.xml
@@ -18,14 +18,21 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
         <option name="push" value="OpusHeaderTest->/data/local/tmp/OpusHeaderTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/OpusHeader/OpusHeader.zip?unzip=true"
-            value="/data/local/tmp/OpusHeaderTestRes/" />
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="OpusHeaderTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="OpusHeader-1.0" />
+        <option name="dynamic-config-module" value="OpusHeaderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="OpusHeaderTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/OpusHeaderTestRes/" />
+        <option name="native-test-flag" value="-P /sdcard/test/OpusHeader-1.0/" />
     </test>
-</configuration>
\ No newline at end of file
+</configuration>
diff --git a/media/module/foundation/tests/OpusHeader/DynamicConfig.xml b/media/module/foundation/tests/OpusHeader/DynamicConfig.xml
new file mode 100644
index 0000000..ebac328
--- /dev/null
+++ b/media/module/foundation/tests/OpusHeader/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+        <value>https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/OpusHeader/OpusHeader-1.0.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/module/id3/test/Android.bp b/media/module/id3/test/Android.bp
index 0481e48..52cdfa5 100644
--- a/media/module/id3/test/Android.bp
+++ b/media/module/id3/test/Android.bp
@@ -57,7 +57,4 @@
             "signed-integer-overflow",
         ],
     },
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/id3/test/ID3Test-1.2.zip?unzip=true",
-    ],
 }
diff --git a/media/module/id3/test/AndroidTest.xml b/media/module/id3/test/AndroidTest.xml
index 50f9253..b169994 100644
--- a/media/module/id3/test/AndroidTest.xml
+++ b/media/module/id3/test/AndroidTest.xml
@@ -18,14 +18,21 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
         <option name="push" value="ID3Test->/data/local/tmp/ID3Test" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/id3/test/ID3Test-1.2.zip?unzip=true"
-            value="/data/local/tmp/ID3TestRes/" />
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="ID3Test" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="ID3TestRes-1.3" />
+        <option name="dynamic-config-module" value="ID3Test" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="ID3Test" />
-        <option name="native-test-flag" value="-P /data/local/tmp/ID3TestRes/" />
+        <option name="native-test-flag" value="-P /sdcard/test/ID3TestRes-1.3/" />
     </test>
 </configuration>
diff --git a/media/module/id3/test/DynamicConfig.xml b/media/module/id3/test/DynamicConfig.xml
new file mode 100644
index 0000000..5ae4fcd
--- /dev/null
+++ b/media/module/id3/test/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/id3/test/ID3Test-1.3.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/module/libmediatranscoding/transcoder/benchmark/Android.bp b/media/module/libmediatranscoding/transcoder/benchmark/Android.bp
index f98b27d..459f0ae 100644
--- a/media/module/libmediatranscoding/transcoder/benchmark/Android.bp
+++ b/media/module/libmediatranscoding/transcoder/benchmark/Android.bp
@@ -26,25 +26,16 @@
     name: "MediaTranscoderBenchmark",
     srcs: ["MediaTranscoderBenchmark.cpp"],
     defaults: ["benchmarkdefaults"],
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/libmediatranscoding/transcoder/benchmark/TranscodingBenchmark-1.2.zip?unzip=true",
-    ],
 }
 
 cc_test {
     name: "MediaSampleReaderBenchmark",
     srcs: ["MediaSampleReaderBenchmark.cpp"],
     defaults: ["benchmarkdefaults"],
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/libmediatranscoding/transcoder/benchmark/TranscodingBenchmark-1.2.zip?unzip=true",
-    ],
 }
 
 cc_test {
     name: "MediaTrackTranscoderBenchmark",
     srcs: ["MediaTrackTranscoderBenchmark.cpp"],
     defaults: ["benchmarkdefaults"],
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/libmediatranscoding/transcoder/benchmark/TranscodingBenchmark-1.2.zip?unzip=true",
-    ],
 }
diff --git a/media/module/metadatautils/test/AndroidTest.xml b/media/module/metadatautils/test/AndroidTest.xml
index d6497f3..ce8c4d6 100644
--- a/media/module/metadatautils/test/AndroidTest.xml
+++ b/media/module/metadatautils/test/AndroidTest.xml
@@ -18,13 +18,21 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="false" />
         <option name="push" value="MetaDataUtilsTest->/data/local/tmp/MetaDataUtilsTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/metadatautils/MetaDataUtilsTestRes-1.0.zip?unzip=true"
-            value="/data/local/tmp/MetaDataUtilsTestRes/" />
     </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="MetaDataUtilsTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="MetaDataUtilsTest-1.1" />
+        <option name="dynamic-config-module" value="MetaDataUtilsTest" />
+    </target_preparer>
+
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="MetaDataUtilsTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/MetaDataUtilsTestRes/" />
+        <option name="native-test-flag" value="-P /sdcard/test/MetaDataUtilsTest-1.1/" />
     </test>
 </configuration>
diff --git a/media/module/metadatautils/test/DynamicConfig.xml b/media/module/metadatautils/test/DynamicConfig.xml
new file mode 100644
index 0000000..9d80bf3
--- /dev/null
+++ b/media/module/metadatautils/test/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/metadatautils/MetaDataUtilsTestRes-1.1.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/module/mpeg2ts/test/Android.bp b/media/module/mpeg2ts/test/Android.bp
index 4b1bacd..34a8d3e 100644
--- a/media/module/mpeg2ts/test/Android.bp
+++ b/media/module/mpeg2ts/test/Android.bp
@@ -74,7 +74,4 @@
             "signed-integer-overflow",
         ],
     },
-    data: [
-        ":https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.zip?unzip=true",
-    ],
 }
diff --git a/media/module/mpeg2ts/test/AndroidTest.xml b/media/module/mpeg2ts/test/AndroidTest.xml
index ac1294d..836c9f8 100644
--- a/media/module/mpeg2ts/test/AndroidTest.xml
+++ b/media/module/mpeg2ts/test/AndroidTest.xml
@@ -18,14 +18,21 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
         <option name="push" value="Mpeg2tsUnitTest->/data/local/tmp/Mpeg2tsUnitTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.zip?unzip=true"
-            value="/data/local/tmp/Mpeg2tsUnitTestRes/" />
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="Mpeg2tsUnitTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="Mpeg2tsUnitTest-1.0" />
+        <option name="dynamic-config-module" value="Mpeg2tsUnitTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="Mpeg2tsUnitTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/Mpeg2tsUnitTestRes/" />
+        <option name="native-test-flag" value="-P /sdcard/test/Mpeg2tsUnitTest-1.0/" />
     </test>
 </configuration>
diff --git a/media/module/mpeg2ts/test/DynamicConfig.xml b/media/module/mpeg2ts/test/DynamicConfig.xml
new file mode 100644
index 0000000..017a3c6
--- /dev/null
+++ b/media/module/mpeg2ts/test/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest-1.0.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/mtp/OWNERS b/media/mtp/OWNERS
index 54d3d4a..6b5336e 100644
--- a/media/mtp/OWNERS
+++ b/media/mtp/OWNERS
@@ -1,5 +1,9 @@
 set noparent
 
+aprasath@google.com
+anothermark@google.com
+kumarashishg@google.com
+sarup@google.com
 jsharkey@android.com
 jameswei@google.com
 rmojumder@google.com
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 067c8f4..0f7d98b 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -570,6 +570,9 @@
     }
 }
 
+// The LL-NDK API is now deprecated. New devices will no longer have the token
+// manager service installed, so createHalToken will return false and this
+// will return AMEDIA_ERROR_UNKNOWN on those devices.
 media_status_t AImageReader::getWindowNativeHandle(native_handle **handle) {
     if (mWindowHandle != nullptr) {
         *handle = mWindowHandle;
diff --git a/media/ndk/fuzzer/Android.bp b/media/ndk/fuzzer/Android.bp
index 3633e00..ba92b19 100644
--- a/media/ndk/fuzzer/Android.bp
+++ b/media/ndk/fuzzer/Android.bp
@@ -56,6 +56,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmediandk library",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
diff --git a/media/ndk/fuzzer/ndk_async_codec_fuzzer.cpp b/media/ndk/fuzzer/ndk_async_codec_fuzzer.cpp
index 60d271f..28a38fe 100644
--- a/media/ndk/fuzzer/ndk_async_codec_fuzzer.cpp
+++ b/media/ndk/fuzzer/ndk_async_codec_fuzzer.cpp
@@ -60,11 +60,7 @@
   public:
     CallBackHandle() : mSawError(false), mIsDone(false) {}
 
-    virtual ~CallBackHandle() {
-        if (mIOThread.joinable()) {
-            mIOThread.join();
-        }
-    }
+    virtual ~CallBackHandle() {}
 
     void ioThread();
 
@@ -88,7 +84,6 @@
     // Keep a queue of all function callbacks.
     typedef function<void()> IOTask;
     CallBackQueue<IOTask> mIOQueue;
-    thread mIOThread;
     bool mSawError;
     bool mIsDone;
 };
@@ -144,6 +139,11 @@
         mNumOfFrames = 0;
         mNumInputFrames = 0;
     };
+    ~NdkAsyncCodecFuzzer() {
+        mIOThreadPool->stop();
+        delete (mIOThreadPool);
+    };
+
     void process();
 
     static void codecOnFrameRendered(AMediaCodec* codec, void* userdata, int64_t mediaTimeUs,
@@ -153,6 +153,20 @@
         (void)mediaTimeUs;
         (void)systemNano;
     };
+    class ThreadPool {
+      public:
+        void start();
+        void queueJob(const std::function<void()>& job);
+        void stop();
+
+      private:
+        void ThreadLoop();
+        bool mShouldTerminate = false;
+        std::vector<std::thread> mThreads;
+        std::mutex mQueueMutex;
+        std::condition_variable mQueueMutexCondition;
+        std::queue<std::function<void()>> mJobs;
+    };
 
   private:
     FuzzedDataProvider mFdp;
@@ -172,8 +186,53 @@
     int32_t mNumInputFrames;
     mutable Mutex mMutex;
     bool mIsEncoder;
+    ThreadPool* mIOThreadPool = new ThreadPool();
 };
 
+void NdkAsyncCodecFuzzer::ThreadPool::start() {
+    const uint32_t numThreads = std::thread::hardware_concurrency();
+    mThreads.resize(numThreads);
+    for (uint32_t i = 0; i < numThreads; ++i) {
+        mThreads.at(i) = std::thread(&ThreadPool::ThreadLoop, this);
+    }
+}
+
+void NdkAsyncCodecFuzzer::ThreadPool::ThreadLoop() {
+    while (true) {
+        std::function<void()> job;
+        {
+            std::unique_lock<std::mutex> lock(mQueueMutex);
+            mQueueMutexCondition.wait(lock, [this] { return !mJobs.empty() || mShouldTerminate; });
+            if (mShouldTerminate) {
+                return;
+            }
+            job = mJobs.front();
+            mJobs.pop();
+        }
+        job();
+    }
+}
+
+void NdkAsyncCodecFuzzer::ThreadPool::queueJob(const std::function<void()>& job) {
+    {
+        std::unique_lock<std::mutex> lock(mQueueMutex);
+        mJobs.push(job);
+    }
+    mQueueMutexCondition.notify_one();
+}
+
+void NdkAsyncCodecFuzzer::ThreadPool::stop() {
+    {
+        std::unique_lock<std::mutex> lock(mQueueMutex);
+        mShouldTerminate = true;
+    }
+    mQueueMutexCondition.notify_all();
+    for (std::thread& active_thread : mThreads) {
+        active_thread.join();
+    }
+    mThreads.clear();
+}
+
 void NdkAsyncCodecFuzzer::receiveError(void) {
     mSignalledError = true;
 }
@@ -258,7 +317,7 @@
         AMediaCodecOnAsyncNotifyCallback callBack = {onAsyncInputAvailable, onAsyncOutputAvailable,
                                                      onAsyncFormatChanged, onAsyncError};
         AMediaCodec_setAsyncNotifyCallback(mCodec, callBack, this);
-        mIOThread = thread(&CallBackHandle::ioThread, this);
+        mIOThreadPool->queueJob([this] { CallBackHandle::ioThread(); });
 
         AMediaCodec_start(mCodec);
         sleep(5);
@@ -322,6 +381,8 @@
 }
 
 void NdkAsyncCodecFuzzer::invokeAsyncCodeConfigAPI() {
+    mIOThreadPool->start();
+
     while (mFdp.remaining_bytes() > 0) {
         mIsEncoder = mFdp.ConsumeBool();
         mCodec = createCodec(mIsEncoder, mFdp.ConsumeBool() /* isCodecForClient */);
@@ -330,6 +391,7 @@
             AMediaCodec_delete(mCodec);
         }
     }
+    mIOThreadPool->stop();
 }
 
 void NdkAsyncCodecFuzzer::invokeCodecCryptoInfoAPI() {
diff --git a/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp b/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
index c19ea13..23e2eaf 100644
--- a/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
+++ b/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
@@ -18,6 +18,7 @@
 #include <fcntl.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <media/NdkMediaFormat.h>
+#include <media/stagefright/foundation/AMessage.h>
 #include <sys/mman.h>
 #include <unistd.h>
 #include <utils/Log.h>
@@ -176,11 +177,13 @@
 constexpr size_t kMaxBytes = 1000;
 constexpr size_t kMinChoice = 0;
 constexpr size_t kMaxChoice = 9;
+const size_t kMaxIteration = android::AMessage::maxAllowedEntries();
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
     FuzzedDataProvider fdp(data, size);
     AMediaFormat* mediaFormat = AMediaFormat_new();
-    while (fdp.remaining_bytes()) {
+    std::vector<std::string> nameCollection;
+    while (fdp.remaining_bytes() && nameCollection.size() < kMaxIteration) {
         const char* name = nullptr;
         std::string nameString;
         if (fdp.ConsumeBool()) {
@@ -190,6 +193,11 @@
                             : fdp.ConsumeRandomLengthString(
                                       fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
             name = nameString.c_str();
+            std::vector<std::string>::iterator it =
+                    find(nameCollection.begin(), nameCollection.end(), name);
+            if (it == nameCollection.end()) {
+                nameCollection.push_back(name);
+            }
         }
         switch (fdp.ConsumeIntegralInRange<int32_t>(kMinChoice, kMaxChoice)) {
             case 0: {
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index 814a327..76270d3 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -829,7 +829,7 @@
 /**
  * Query the dataspace of the input {@link AImage}.
  *
- * Available since API level 33.
+ * Available since API level 34.
  *
  * @param image the {@link AImage} of interest.
  * @param dataSpace the dataspace of the image will be filled here if the method call succeeds.
@@ -843,7 +843,7 @@
  *                 image has been deleted.</li></ul>
  */
 media_status_t AImage_getDataSpace(const AImage* image,
-                                   /*out*/int32_t* dataSpace) __INTRODUCED_IN(33);
+                                   /*out*/int32_t* dataSpace) __INTRODUCED_IN(34);
 
 __END_DECLS
 
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index 992955b..48a0a82 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -395,7 +395,7 @@
  * the combination of {@code hardwareBufferFormat} and {@code dataSpace} for the
  * format of the Image that the reader will produce.</p>
  *
- * Available since API level 33.
+ * Available since API level 34.
  *
  * @param width The default width in pixels of the Images that this reader will produce.
  * @param height The default height in pixels of the Images that this reader will produce.
@@ -422,7 +422,7 @@
  */
 media_status_t AImageReader_newWithDataSpace(int32_t width, int32_t height, uint64_t usage,
         int32_t maxImages, uint32_t hardwareBufferFormat, int32_t dataSpace,
-        /*out*/ AImageReader** reader) __INTRODUCED_IN(33);
+        /*out*/ AImageReader** reader) __INTRODUCED_IN(34);
 
 /**
  * Acquire the next {@link AImage} from the image reader's queue asynchronously.
@@ -540,9 +540,11 @@
  *
  * @return AMEDIA_OK if the method call succeeds.
  *         AMEDIA_ERROR_INVALID_PARAMETER if reader or handle are NULL.
- *         AMEDIA_ERROR_UNKNOWN if some other error is encountered.
+ *         AMEDIA_ERROR_UNKNOWN if some other error is encountered or
+ *         the device no longer has android.hidl.token service to
+ *         satisfy the request because it is deprecated.
  */
-media_status_t AImageReader_getWindowNativeHandle(
+[[deprecated]] media_status_t AImageReader_getWindowNativeHandle(
     AImageReader *reader, /* out */native_handle_t **handle);
 #endif
 
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 4dd81ab..4f045fd 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -13,13 +13,13 @@
     AImageReader_getWindow; # introduced=24
     AImageReader_new; # introduced=24
     AImageReader_newWithUsage; # introduced=26
-    AImageReader_newWithDataSpace; # introduced=Tiramisu
+    AImageReader_newWithDataSpace; # introduced=UpsideDownCake
     AImageReader_setBufferRemovedListener; # introduced=26
     AImageReader_setImageListener; # introduced=24
     AImage_delete; # introduced=24
     AImage_deleteAsync; # introduced=26
     AImage_getCropRect; # introduced=24
-    AImage_getDataSpace; # introduced=Tiramisu
+    AImage_getDataSpace; # introduced=UpsideDownCake
     AImage_getFormat; # introduced=24
     AImage_getHardwareBuffer; # introduced=26
     AImage_getHeight; # introduced=24
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java
index 011c38c..e2fe177 100644
--- a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java
@@ -90,13 +90,13 @@
         }
 
         String decoderMime = decoderFormat.getString(MediaFormat.KEY_MIME);
-        ArrayList<String> listOfDeocders =
+        ArrayList<String> decoders =
                 MediaCodecBase.selectCodecs(decoderMime, null, null, false, mIsCodecSoftware);
-        if (listOfDeocders.isEmpty()) {
+        if (decoders.isEmpty()) {
             Log.e(TAG, "No suitable decoder found for mime: " + decoderMime);
             return -1;
         }
-        mDecoder = MediaCodec.createByCodecName(listOfDeocders.get(0));
+        mDecoder = MediaCodec.createByCodecName(decoders.get(0));
 
         MediaFormat encoderFormat = setUpEncoderFormat(decoderFormat);
         ArrayList<String> listOfEncoders =
diff --git a/media/tests/benchmark/MediaBenchmarkTest/AndroidTest.xml b/media/tests/benchmark/MediaBenchmarkTest/AndroidTest.xml
index 1890661..1b66b01 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/AndroidTest.xml
+++ b/media/tests/benchmark/MediaBenchmarkTest/AndroidTest.xml
@@ -14,18 +14,26 @@
      limitations under the License.
 -->
 <configuration description="Runs Media Benchmark Tests">
+    <option name="test-tag" value="MediaBenchmarkTest" />
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/tests/benchmark/MediaBenchmark.zip?unzip=true"
-            value="/data/local/tmp/MediaBenchmark/res/" />
     </target_preparer>
-    <target_preparer class="com.android.tradefed.targetprep.TestAppInstallSetup">
-        <option name="cleanup-apks" value="false" />
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
+        <option name="target" value="host" />
+        <option name="config-filename" value="MediaBenchmarkTest" />
+        <option name="version" value="1.0"/>
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
+        <option name="push-all" value="true" />
+        <option name="media-folder-name" value="MediaBenchmarkTest-1.1" />
+        <option name="dynamic-config-module" value="MediaBenchmarkTest" />
+    </target_preparer>
+
+    <target_preparer class="com.android.tradefed.targetprep.suite.SuiteApkInstaller">
+        <option name="cleanup-apks" value="true" />
         <option name="test-file-name" value="MediaBenchmarkTest.apk" />
     </target_preparer>
 
-    <option name="test-tag" value="MediaBenchmarkTest" />
     <test class="com.android.tradefed.testtype.AndroidJUnitTest" >
         <option name="package" value="com.android.media.benchmark" />
         <option name="runner" value="androidx.test.runner.AndroidJUnitRunner" />
diff --git a/media/tests/benchmark/MediaBenchmarkTest/DynamicConfig.xml b/media/tests/benchmark/MediaBenchmarkTest/DynamicConfig.xml
new file mode 100644
index 0000000..1278f29
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/DynamicConfig.xml
@@ -0,0 +1,20 @@
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<dynamicConfig>
+    <entry key="media_files_url">
+            <value>https://storage.googleapis.com/android_media/frameworks/av/media/tests/benchmark/MediaBenchmark-1.1.zip</value>
+    </entry>
+</dynamicConfig>
diff --git a/media/tests/benchmark/MediaBenchmarkTest/res/values/strings.xml b/media/tests/benchmark/MediaBenchmarkTest/res/values/strings.xml
index 24dbccc..2bef254 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/res/values/strings.xml
+++ b/media/tests/benchmark/MediaBenchmarkTest/res/values/strings.xml
@@ -1,4 +1,4 @@
 <resources>
-    <string name="input_file_path">/data/local/tmp/MediaBenchmark/res/</string>
+    <string name="input_file_path">/sdcard/test/MediaBenchmarkTest-1.1/</string>
     <string name="output_file_path">/data/local/tmp/MediaBenchmark/output/</string>
 </resources>
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 7abb0b6..07dac5e 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -179,3 +179,8 @@
     local_include_dirs: ["include"],
     export_include_dirs: ["include"],
 }
+
+cc_library_headers {
+    name: "mediautils_headers",
+    export_include_dirs: ["include", "."],
+}
diff --git a/media/utils/include/mediautils/MethodStatistics.h b/media/utils/include/mediautils/MethodStatistics.h
index 6d7e990..c8b36d8 100644
--- a/media/utils/include/mediautils/MethodStatistics.h
+++ b/media/utils/include/mediautils/MethodStatistics.h
@@ -59,7 +59,7 @@
     void event(C&& code, FloatType executeMs) {
         std::lock_guard lg(mLock);
         auto it = mStatisticsMap.lower_bound(code);
-        if (it != mStatisticsMap.end() && it->first == code) {
+        if (it != mStatisticsMap.end() && it->first == static_cast<Code>(code)) {
             it->second.add(executeMs);
         } else {
             // StatsType ctor takes an optional array of data for initialization.
diff --git a/media/utils/include/mediautils/SharedMemoryAllocator.h b/media/utils/include/mediautils/SharedMemoryAllocator.h
index 17c1ac9..79621e2 100644
--- a/media/utils/include/mediautils/SharedMemoryAllocator.h
+++ b/media/utils/include/mediautils/SharedMemoryAllocator.h
@@ -22,6 +22,7 @@
 
 #include <iomanip>
 #include <limits>
+#include <mutex>
 #include <sstream>
 #include <string>
 #include <type_traits>
@@ -109,13 +110,14 @@
 // TODO is there some way to avoid paying this cost?
 template <typename Allocator>
 class ScopedAllocator;
-template <typename AllocationT, typename AllocatorHandleType>
+
 class ScopedAllocation : public BnMemory {
   public:
     template <typename T>
     friend class ScopedAllocator;
-    ScopedAllocation(const AllocationT& allocation, const AllocatorHandleType& handle)
-        : mAllocation(allocation), mHandle(handle) {}
+    template <typename Deallocator>
+    ScopedAllocation(const AllocationType& allocation, Deallocator&& deallocator)
+        : mAllocation(allocation), mDeallocator(std::forward<Deallocator>(deallocator)) {}
 
     // Defer the implementation to the underlying mAllocation
 
@@ -125,10 +127,10 @@
     }
 
   private:
-    ~ScopedAllocation() override { mHandle->deallocate(mAllocation); }
+    ~ScopedAllocation() override { mDeallocator(mAllocation); }
 
-    const AllocationT mAllocation;
-    const AllocatorHandleType mHandle;
+    const AllocationType mAllocation;
+    const std::function<void(const AllocationType&)> mDeallocator;
 };
 
 // Allocations are only deallocated when going out of scope.
@@ -136,7 +138,6 @@
 template <typename Allocator>
 class ScopedAllocator {
   public:
-    using HandleT = std::shared_ptr<Allocator>;
     static constexpr size_t alignment() { return Allocator::alignment(); }
 
     explicit ScopedAllocator(const std::shared_ptr<Allocator>& allocator) : mAllocator(allocator) {}
@@ -145,11 +146,16 @@
 
     template <typename T>
     auto allocate(T&& request) {
+        std::lock_guard l{*mLock};
         const auto allocation = mAllocator->allocate(std::forward<T>(request));
         if (!allocation) {
-            return sp<ScopedAllocation<AllocationType, HandleT>>{};
+            return sp<ScopedAllocation>{};
         }
-        return sp<ScopedAllocation<AllocationType, HandleT>>::make(allocation, mAllocator);
+        return sp<ScopedAllocation>::make(allocation,
+                [allocator = mAllocator, lock = mLock] (const AllocationType& allocation) {
+                    std::lock_guard l{*lock};
+                    allocator->deallocate(allocation);
+                });
     }
 
     // Deallocate and deallocate_all are implicitly unsafe due to double
@@ -159,20 +165,23 @@
     //
     // Owns is only safe to pseudo-impl due to static cast reqs
     template <typename Enable = bool>
-    auto owns(const sp<ScopedAllocation<AllocationType, HandleT>>& allocation) const
+    auto owns(const sp<ScopedAllocation>& allocation) const
             -> std::enable_if_t<shared_allocator_impl::has_owns<Allocator>, Enable> {
+        std::lock_guard l{*mLock};
         return mAllocator->owns(allocation->mAllocation);
     }
 
     template <typename Enable = std::string>
     auto dump() const -> std::enable_if_t<shared_allocator_impl::has_dump<Allocator>, Enable> {
+        std::lock_guard l{*mLock};
         return mAllocator->dump();
     }
 
   private:
     // We store a shared pointer in order to ensure that the allocator outlives
     // allocations (which call back to become dereferenced).
-    const HandleT mAllocator;
+    const std::shared_ptr<Allocator> mAllocator;
+    const std::shared_ptr<std::mutex> mLock = std::make_shared<std::mutex>();
 };
 
 // A simple policy for PolicyAllocator which enforces a pool size and an allocation
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 43ef311..6329bae 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -19,47 +19,127 @@
     ],
 }
 
-cc_library_shared {
-    name: "libaudioflinger",
+// base tidy_errors for this and all subprojects.
+audioflinger_base_tidy_errors = [
+    // https://clang.llvm.org/extra/clang-tidy/checks/list.html
+    // For many categories, the checks are too many to specify individually.
+    // Feel free to disable as needed - as warnings are generally ignored,
+    // we treat warnings as errors.
+    "android-*",
+    "bugprone-*",
+    "cert-*",
+    "clang-analyzer-security*",
+    "google-*",
+    "misc-*",
+    //"modernize-*",  // explicitly list the modernize as they can be subjective.
+    "modernize-avoid-bind",
+    //"modernize-avoid-c-arrays", // std::array<> can be verbose
+    "modernize-concat-nested-namespaces",
+    //"modernize-deprecated-headers", // C headers still ok even if there is C++ equivalent.
+    "modernize-deprecated-ios-base-aliases",
+    "modernize-loop-convert",
+    "modernize-make-shared",
+    "modernize-make-unique",
+    // "modernize-pass-by-value",
+    "modernize-raw-string-literal",
+    "modernize-redundant-void-arg",
+    "modernize-replace-auto-ptr",
+    "modernize-replace-random-shuffle",
+    "modernize-return-braced-init-list",
+    "modernize-shrink-to-fit",
+    "modernize-unary-static-assert",
+    // "modernize-use-auto",  // found in MediaMetricsService.h, debatable - auto can obscure type
+    "modernize-use-bool-literals",
+    "modernize-use-default-member-init",
+    "modernize-use-emplace",
+    "modernize-use-equals-default",
+    "modernize-use-equals-delete",
+    // "modernize-use-nodiscard",
+    "modernize-use-noexcept",
+    "modernize-use-nullptr",
+    "modernize-use-override",
+    //"modernize-use-trailing-return-type", // not necessarily more readable
+    "modernize-use-transparent-functors",
+    "modernize-use-uncaught-exceptions",
+    "modernize-use-using",
+    "performance-*",
 
-    defaults: [
-        "latest_android_media_audio_common_types_cpp_shared",
-        "latest_android_hardware_audio_core_sounddose_ndk_shared",
-    ],
+    // Remove some pedantic stylistic requirements.
+    "-google-readability-casting", // C++ casts not always necessary and may be verbose
+    "-google-readability-todo",    // do not require TODO(info)
 
-    srcs: [
-        "AudioFlinger.cpp",
-        "AudioHwDevice.cpp",
-        "AudioStreamOut.cpp",
-        "AudioWatchdog.cpp",
-        "BufLog.cpp",
-        "DeviceEffectManager.cpp",
-        "Effects.cpp",
-        "FastCapture.cpp",
-        "FastCaptureDumpState.cpp",
-        "FastCaptureState.cpp",
-        "FastMixer.cpp",
-        "FastMixerDumpState.cpp",
-        "FastMixerState.cpp",
-        "FastThread.cpp",
-        "FastThreadDumpState.cpp",
-        "FastThreadState.cpp",
-        "MelReporter.cpp",
-        "NBAIO_Tee.cpp",
-        "PatchCommandThread.cpp",
-        "PatchPanel.cpp",
-        "PropertyUtils.cpp",
-        "SpdifStreamOut.cpp",
-        "StateQueue.cpp",
-        "Threads.cpp",
-        "Tracks.cpp",
-        "TypedLogger.cpp",
-    ],
+    "-bugprone-unhandled-self-assignment",
+    "-bugprone-suspicious-string-compare",
+    "-cert-oop54-cpp", // found in TransactionLog.h
+    "-bugprone-narrowing-conversions", // b/182410845
+]
 
-    include_dirs: [
-        "frameworks/av/services/audiopolicy",
-        "frameworks/av/services/medialog",
+// TODO(b/275642749) Reenable these warnings
+audioflinger_tidy_errors = audioflinger_base_tidy_errors + [
+    "-bugprone-assignment-in-if-condition",
+    "-bugprone-forward-declaration-namespace",
+    "-bugprone-parent-virtual-call",
+    "-cert-dcl59-cpp",
+    "-cert-err34-c",
+    "-google-build-namespaces",
+    "-google-build-using-namespace",
+    "-google-default-arguments",
+    "-google-runtime-int",
+    "-misc-const-correctness",
+    "-misc-non-private-member-variables-in-classes",
+    "-modernize-concat-nested-namespaces",
+    "-modernize-loop-convert",
+    "-modernize-use-default-member-init",
+    "-modernize-use-equals-default",
+    "-modernize-use-nullptr",
+    "-modernize-use-override",
+    "-modernize-use-using",
+    "-performance-no-int-to-ptr",
+]
+
+audioflinger_base_cflags = [
+    "-Wall",
+    "-Wdeprecated",
+    "-Werror",
+    "-Werror=implicit-fallthrough",
+    "-Werror=sometimes-uninitialized",
+    "-Werror=conditional-uninitialized",
+    "-Wextra",
+
+    // suppress some warning chatter.
+    "-Wno-deprecated-copy-with-dtor",
+    "-Wno-deprecated-copy-with-user-provided-dtor",
+
+    "-Wredundant-decls",
+    "-Wshadow",
+    "-Wstrict-aliasing",
+    "-fstrict-aliasing",
+    "-Wthread-safety",
+    //"-Wthread-safety-negative", // experimental - looks broken in R.
+    "-Wunreachable-code",
+    "-Wunreachable-code-break",
+    "-Wunreachable-code-return",
+    "-Wunused",
+    "-Wused-but-marked-unused",
+]
+
+// Eventually use common tidy defaults
+cc_defaults {
+    name: "audioflinger_flags_defaults",
+    // https://clang.llvm.org/docs/UsersManual.html#command-line-options
+    // https://clang.llvm.org/docs/DiagnosticsReference.html
+    cflags: audioflinger_base_cflags,
+    // https://clang.llvm.org/extra/clang-tidy/
+    tidy: true,
+    tidy_checks: audioflinger_tidy_errors,
+    tidy_checks_as_errors: audioflinger_tidy_errors,
+    tidy_flags: [
+      "-format-style=file",
     ],
+}
+
+cc_defaults {
+    name: "libaudioflinger_dependencies",
 
     shared_libs: [
         "audioflinger-aidl-cpp",
@@ -67,11 +147,14 @@
         "av-types-aidl-cpp",
         "effect-aidl-cpp",
         "libaudioclient_aidl_conversion",
+        "libactivitymanager_aidl",
+        "libaudioflinger_datapath",
+        "libaudioflinger_fastpath",
         "libaudioflinger_timing",
+        "libaudioflinger_utils",
         "libaudiofoundation",
         "libaudiohal",
         "libaudioprocessing",
-        "libaudiospdif",
         "libaudioutils",
         "libcutils",
         "libutils",
@@ -80,7 +163,6 @@
         "libbinder_ndk",
         "libaudioclient",
         "libaudiomanager",
-        "libmedialogservice",
         "libmediametrics",
         "libmediautils",
         "libnbaio",
@@ -96,8 +178,40 @@
     ],
 
     static_libs: [
+        "libmedialogservice",
+        "libaudiospdif",
+    ],
+}
+
+
+cc_library {
+    name: "libaudioflinger",
+
+    defaults: [
+        "libaudioflinger_dependencies",
+        "latest_android_media_audio_common_types_cpp_shared",
+        "latest_android_hardware_audio_core_sounddose_ndk_shared",
+        "audioflinger_flags_defaults",
+    ],
+
+    srcs: [
+        "AudioFlinger.cpp",
+        "DeviceEffectManager.cpp",
+        "Effects.cpp",
+        "MelReporter.cpp",
+        "PatchCommandThread.cpp",
+        "PatchPanel.cpp",
+        "Threads.cpp",
+        "Tracks.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/services/audiopolicy",
+        "frameworks/av/services/medialog",
+    ],
+
+    static_libs: [
         "libcpustats",
-        "libsndfile",
         "libpermission",
     ],
 
@@ -115,7 +229,6 @@
     ],
 
     cflags: [
-        "-DSTATE_QUEUE_INSTANTIATIONS=\"StateQueueInstantiations.cpp\"",
         "-fvisibility=hidden",
         "-Werror",
         "-Wall",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 57392c9..60717b5 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -58,8 +58,8 @@
 #include <audiomanager/IAudioManager.h>
 
 #include "AudioFlinger.h"
-#include "NBAIO_Tee.h"
-#include "PropertyUtils.h"
+#include "EffectConfiguration.h"
+#include <afutils/PropertyUtils.h>
 
 #include <media/AudioResamplerPublic.h>
 
@@ -86,9 +86,8 @@
 #include <private/android_filesystem_config.h>
 
 //#define BUFLOG_NDEBUG 0
-#include <BufLog.h>
-
-#include "TypedLogger.h"
+#include <afutils/BufLog.h>
+#include <afutils/TypedLogger.h>
 
 // ----------------------------------------------------------------------------
 
@@ -111,6 +110,7 @@
 using media::IEffectClient;
 using media::audio::common::AudioMMapPolicyInfo;
 using media::audio::common::AudioMMapPolicyType;
+using media::audio::common::AudioMode;
 using android::content::AttributionSourceState;
 using android::detail::AudioHalVersionInfo;
 
@@ -229,12 +229,14 @@
 BINDER_METHOD_ENTRY(getAAudioMixerBurstCount) \
 BINDER_METHOD_ENTRY(getAAudioHardwareBurstMinUsec) \
 BINDER_METHOD_ENTRY(setDeviceConnectedState) \
+BINDER_METHOD_ENTRY(setSimulateDeviceConnections) \
 BINDER_METHOD_ENTRY(setRequestedLatencyMode) \
 BINDER_METHOD_ENTRY(getSupportedLatencyModes) \
 BINDER_METHOD_ENTRY(setBluetoothVariableLatencyEnabled) \
 BINDER_METHOD_ENTRY(isBluetoothVariableLatencyEnabled) \
 BINDER_METHOD_ENTRY(supportsBluetoothVariableLatency) \
 BINDER_METHOD_ENTRY(getSoundDoseInterface) \
+BINDER_METHOD_ENTRY(getAudioPolicyConfig) \
 
 // singleton for Binder Method Statistics for IAudioFlinger
 static auto& getIAudioFlingerStatistics() {
@@ -369,7 +371,7 @@
     BatteryNotifier::getInstance().noteResetAudio();
 
     mDevicesFactoryHal = DevicesFactoryHalInterface::create();
-    mEffectsFactoryHal = EffectsFactoryHalInterface::create();
+    mEffectsFactoryHal = audioflinger::EffectConfiguration::getEffectsFactoryHal();
 
     mMediaLogNotifier->run("MediaLogNotifier");
     std::vector<pid_t> halPids;
@@ -484,14 +486,17 @@
     return mAAudioHwBurstMinMicros;
 }
 
-status_t AudioFlinger::setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) {
+status_t AudioFlinger::setDeviceConnectedState(const struct audio_port_v7 *port,
+                                               media::DeviceConnectedState state) {
     status_t final_result = NO_INIT;
     Mutex::Autolock _l(mLock);
     AutoMutex lock(mHardwareLock);
     mHardwareStatus = AUDIO_HW_SET_CONNECTED_STATE;
     for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
         sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
-        status_t result = dev->setConnectedState(port, connected);
+        status_t result = state == media::DeviceConnectedState::PREPARE_TO_DISCONNECT
+                ? dev->prepareToDisconnectExternalDevice(port)
+                : dev->setConnectedState(port, state == media::DeviceConnectedState::CONNECTED);
         // Same logic as with setParameter: it's a success if at least one
         // HAL module accepts the update.
         if (final_result != NO_ERROR) {
@@ -502,6 +507,25 @@
     return final_result;
 }
 
+status_t AudioFlinger::setSimulateDeviceConnections(bool enabled) {
+    bool at_least_one_succeeded = false;
+    status_t last_error = INVALID_OPERATION;
+    Mutex::Autolock _l(mLock);
+    AutoMutex lock(mHardwareLock);
+    mHardwareStatus = AUDIO_HW_SET_SIMULATE_CONNECTIONS;
+    for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
+        sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
+        status_t result = dev->setSimulateDeviceConnections(enabled);
+        if (result == OK) {
+            at_least_one_succeeded = true;
+        } else {
+            last_error = result;
+        }
+    }
+    mHardwareStatus = AUDIO_HW_IDLE;
+    return at_least_one_succeeded ? OK : last_error;
+}
+
 // getDefaultVibratorInfo_l must be called with AudioFlinger lock held.
 std::optional<media::AudioVibratorInfo> AudioFlinger::getDefaultVibratorInfo_l() {
     if (mAudioVibratorInfos.empty()) {
@@ -705,24 +729,24 @@
     }
 }
 
-status_t AudioFlinger::addEffectToHal(audio_port_handle_t deviceId,
-        audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect) {
+status_t AudioFlinger::addEffectToHal(
+        const struct audio_port_config *device, const sp<EffectHalInterface>& effect) {
     AutoMutex lock(mHardwareLock);
-    AudioHwDevice *audioHwDevice = mAudioHwDevs.valueFor(hwModuleId);
+    AudioHwDevice *audioHwDevice = mAudioHwDevs.valueFor(device->ext.device.hw_module);
     if (audioHwDevice == nullptr) {
         return NO_INIT;
     }
-    return audioHwDevice->hwDevice()->addDeviceEffect(deviceId, effect);
+    return audioHwDevice->hwDevice()->addDeviceEffect(device, effect);
 }
 
-status_t AudioFlinger::removeEffectFromHal(audio_port_handle_t deviceId,
-        audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect) {
+status_t AudioFlinger::removeEffectFromHal(
+        const struct audio_port_config *device, const sp<EffectHalInterface>& effect) {
     AutoMutex lock(mHardwareLock);
-    AudioHwDevice *audioHwDevice = mAudioHwDevs.valueFor(hwModuleId);
+    AudioHwDevice *audioHwDevice = mAudioHwDevs.valueFor(device->ext.device.hw_module);
     if (audioHwDevice == nullptr) {
         return NO_INIT;
     }
-    return audioHwDevice->hwDevice()->removeDeviceEffect(deviceId, effect);
+    return audioHwDevice->hwDevice()->removeDeviceEffect(device, effect);
 }
 
 static const char * const audio_interfaces[] = {
@@ -816,6 +840,8 @@
     for (const auto& vibratorInfo : mAudioVibratorInfos) {
         dprintf(fd, "  - %s\n", vibratorInfo.toString().c_str());
     }
+    dprintf(fd, "Bluetooth latency modes are %senabled\n",
+            mBluetoothLatencyModesEnabled ? "" : "not ");
 }
 
 void AudioFlinger::dumpPermissionDenial(int fd, const Vector<String16>& args __unused)
@@ -838,6 +864,7 @@
 }
 
 status_t AudioFlinger::dump(int fd, const Vector<String16>& args)
+NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
 {
     if (!dumpAllowed()) {
         dumpPermissionDenial(fd, args);
@@ -943,7 +970,6 @@
         // to lookup the service if it's not running, as it will block for a second
         if (sMediaLogServiceAsBinder != 0) {
             dprintf(fd, "\nmedia.log:\n");
-            Vector<String16> args;
             sMediaLogServiceAsBinder->dump(fd, args);
         }
 
@@ -1216,37 +1242,37 @@
         output.portId = portId;
 
         if (lStatus == NO_ERROR) {
+            // no risk of deadlock because AudioFlinger::mLock is held
+            Mutex::Autolock _dl(thread->mLock);
             // Connect secondary outputs. Failure on a secondary output must not imped the primary
             // Any secondary output setup failure will lead to a desync between the AP and AF until
             // the track is destroyed.
             updateSecondaryOutputsForTrack_l(track.get(), thread, secondaryOutputs);
-        }
-
-        // move effect chain to this output thread if an effect on same session was waiting
-        // for a track to be created
-        if (lStatus == NO_ERROR && effectThread != NULL) {
-            // no risk of deadlock because AudioFlinger::mLock is held
-            Mutex::Autolock _dl(thread->mLock);
-            Mutex::Autolock _sl(effectThread->mLock);
-            if (moveEffectChain_l(sessionId, effectThread, thread) == NO_ERROR) {
-                effectThreadId = thread->id();
-                effectIds = thread->getEffectIds_l(sessionId);
+            // move effect chain to this output thread if an effect on same session was waiting
+            // for a track to be created
+            if (effectThread != nullptr) {
+                Mutex::Autolock _sl(effectThread->mLock);
+                if (moveEffectChain_l(sessionId, effectThread, thread) == NO_ERROR) {
+                    effectThreadId = thread->id();
+                    effectIds = thread->getEffectIds_l(sessionId);
+                }
             }
         }
 
         // Look for sync events awaiting for a session to be used.
-        for (size_t i = 0; i < mPendingSyncEvents.size(); i++) {
-            if (mPendingSyncEvents[i]->triggerSession() == sessionId) {
-                if (thread->isValidSyncEvent(mPendingSyncEvents[i])) {
+        for (auto it = mPendingSyncEvents.begin(); it != mPendingSyncEvents.end();) {
+            if ((*it)->triggerSession() == sessionId) {
+                if (thread->isValidSyncEvent(*it)) {
                     if (lStatus == NO_ERROR) {
-                        (void) track->setSyncEvent(mPendingSyncEvents[i]);
+                        (void) track->setSyncEvent(*it);
                     } else {
-                        mPendingSyncEvents[i]->cancel();
+                        (*it)->cancel();
                     }
-                    mPendingSyncEvents.removeAt(i);
-                    i--;
+                    it = mPendingSyncEvents.erase(it);
+                    continue;
                 }
             }
+            ++it;
         }
         if ((output.flags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) == AUDIO_OUTPUT_FLAG_HW_AV_SYNC) {
             setAudioHwSyncForSession_l(thread, sessionId);
@@ -1445,8 +1471,9 @@
     if (NO_ERROR == ret) {
         Mutex::Autolock _l(mLock);
         mMode = mode;
-        for (size_t i = 0; i < mPlaybackThreads.size(); i++)
+        for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
             mPlaybackThreads.valueAt(i)->setMode(mode);
+        }
     }
 
     mediametrics::LogItem(mMetricsId)
@@ -1794,7 +1821,7 @@
 // forwardAudioHwSyncToDownstreamPatches_l() must be called with AudioFlinger::mLock held
 void AudioFlinger::forwardParametersToDownstreamPatches_l(
         audio_io_handle_t upStream, const String8& keyValuePairs,
-        std::function<bool(const sp<PlaybackThread>&)> useThread)
+        const std::function<bool(const sp<PlaybackThread>&)>& useThread)
 {
     std::vector<PatchPanel::SoftwarePatch> swPatches;
     if (mPatchPanel.getDownstreamSoftwarePatches(upStream, &swPatches) != OK) return;
@@ -1810,7 +1837,7 @@
 
 // Update downstream patches for all playback threads attached to an MSD module
 void AudioFlinger::updateDownStreamPatches_l(const struct audio_patch *patch,
-                                             const std::set<audio_io_handle_t> streams)
+                                             const std::set<audio_io_handle_t>& streams)
 {
     for (const audio_io_handle_t stream : streams) {
         PlaybackThread *playbackThread = checkPlaybackThread_l(stream);
@@ -1840,6 +1867,8 @@
         String8(AudioParameter::keyStreamSupportedFormats),
         String8(AudioParameter::keyStreamSupportedChannels),
         String8(AudioParameter::keyStreamSupportedSamplingRates),
+        String8(AudioParameter::keyClosing),
+        String8(AudioParameter::keyExiting),
     };
 
     if (isAudioServerUid(callingUid)) {
@@ -2016,24 +2045,29 @@
     mHardwareStatus = AUDIO_HW_GET_INPUT_BUFFER_SIZE;
 
     sp<DeviceHalInterface> dev = mPrimaryHardwareDev->hwDevice();
+
     std::vector<audio_channel_mask_t> channelMasks = {channelMask};
-    if (channelMask != AUDIO_CHANNEL_IN_MONO)
+    if (channelMask != AUDIO_CHANNEL_IN_MONO) {
         channelMasks.push_back(AUDIO_CHANNEL_IN_MONO);
-    if (channelMask != AUDIO_CHANNEL_IN_STEREO)
+    }
+    if (channelMask != AUDIO_CHANNEL_IN_STEREO) {
         channelMasks.push_back(AUDIO_CHANNEL_IN_STEREO);
+    }
 
     std::vector<audio_format_t> formats = {format};
-    if (format != AUDIO_FORMAT_PCM_16_BIT)
+    if (format != AUDIO_FORMAT_PCM_16_BIT) {
         formats.push_back(AUDIO_FORMAT_PCM_16_BIT);
+    }
 
     std::vector<uint32_t> sampleRates = {sampleRate};
     static const uint32_t SR_44100 = 44100;
     static const uint32_t SR_48000 = 48000;
-
-    if (sampleRate != SR_48000)
+    if (sampleRate != SR_48000) {
         sampleRates.push_back(SR_48000);
-    if (sampleRate != SR_44100)
+    }
+    if (sampleRate != SR_44100) {
         sampleRates.push_back(SR_44100);
+    }
 
     mHardwareStatus = AUDIO_HW_IDLE;
 
@@ -2504,7 +2538,7 @@
         // session and move it to this thread.
         sp<EffectChain> chain = getOrphanEffectChain_l(sessionId);
         if (chain != 0) {
-            Mutex::Autolock _l(thread->mLock);
+            Mutex::Autolock _l2(thread->mLock);
             thread->addEffectChain_l(chain);
         }
         break;
@@ -2543,6 +2577,47 @@
 
 // ----------------------------------------------------------------------------
 
+status_t AudioFlinger::getAudioPolicyConfig(media::AudioPolicyConfig *config)
+{
+    if (config == nullptr) {
+        return BAD_VALUE;
+    }
+    Mutex::Autolock _l(mLock);
+    AutoMutex lock(mHardwareLock);
+    RETURN_STATUS_IF_ERROR(
+            mDevicesFactoryHal->getSurroundSoundConfig(&config->surroundSoundConfig));
+    RETURN_STATUS_IF_ERROR(mDevicesFactoryHal->getEngineConfig(&config->engineConfig));
+    std::vector<std::string> hwModuleNames;
+    RETURN_STATUS_IF_ERROR(mDevicesFactoryHal->getDeviceNames(&hwModuleNames));
+    std::set<AudioMode> allSupportedModes;
+    for (const auto& name : hwModuleNames) {
+        AudioHwDevice* module = loadHwModule_l(name.c_str());
+        if (module == nullptr) continue;
+        media::AudioHwModule aidlModule;
+        if (module->hwDevice()->getAudioPorts(&aidlModule.ports) == OK &&
+                module->hwDevice()->getAudioRoutes(&aidlModule.routes) == OK) {
+            aidlModule.handle = module->handle();
+            aidlModule.name = module->moduleName();
+            config->modules.push_back(std::move(aidlModule));
+        }
+        std::vector<AudioMode> supportedModes;
+        if (module->hwDevice()->getSupportedModes(&supportedModes) == OK) {
+            allSupportedModes.insert(supportedModes.begin(), supportedModes.end());
+        }
+    }
+    if (!allSupportedModes.empty()) {
+        config->supportedModes.insert(config->supportedModes.end(),
+                allSupportedModes.begin(), allSupportedModes.end());
+    } else {
+        ALOGW("%s: The HAL does not provide telephony functionality", __func__);
+        config->supportedModes = { media::audio::common::AudioMode::NORMAL,
+            media::audio::common::AudioMode::RINGTONE,
+            media::audio::common::AudioMode::IN_CALL,
+            media::audio::common::AudioMode::IN_COMMUNICATION };
+    }
+    return OK;
+}
+
 audio_module_handle_t AudioFlinger::loadHwModule(const char *name)
 {
     if (name == NULL) {
@@ -2553,16 +2628,17 @@
     }
     Mutex::Autolock _l(mLock);
     AutoMutex lock(mHardwareLock);
-    return loadHwModule_l(name);
+    AudioHwDevice* module = loadHwModule_l(name);
+    return module != nullptr ? module->handle() : AUDIO_MODULE_HANDLE_NONE;
 }
 
 // loadHwModule_l() must be called with AudioFlinger::mLock and AudioFlinger::mHardwareLock held
-audio_module_handle_t AudioFlinger::loadHwModule_l(const char *name)
+AudioHwDevice* AudioFlinger::loadHwModule_l(const char *name)
 {
     for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
         if (strncmp(mAudioHwDevs.valueAt(i)->moduleName(), name, strlen(name)) == 0) {
             ALOGW("loadHwModule() module %s already loaded", name);
-            return mAudioHwDevs.keyAt(i);
+            return mAudioHwDevs.valueAt(i);
         }
     }
 
@@ -2571,7 +2647,7 @@
     int rc = mDevicesFactoryHal->openDevice(name, &dev);
     if (rc) {
         ALOGE("loadHwModule() error %d loading module %s", rc, name);
-        return AUDIO_MODULE_HANDLE_NONE;
+        return nullptr;
     }
     if (!mMelReporter->activateHalSoundDoseComputation(name, dev)) {
         ALOGW("loadHwModule() sound dose reporting is not available");
@@ -2582,7 +2658,7 @@
     mHardwareStatus = AUDIO_HW_IDLE;
     if (rc) {
         ALOGE("loadHwModule() init check error %d for module %s", rc, name);
-        return AUDIO_MODULE_HANDLE_NONE;
+        return nullptr;
     }
 
     // Check and cache this HAL's level of support for master mute and master
@@ -2656,8 +2732,7 @@
 
     ALOGI("loadHwModule() Loaded %s audio interface, handle %d", name, handle);
 
-    return handle;
-
+    return audioDevice;
 }
 
 // ----------------------------------------------------------------------------
@@ -2906,7 +2981,7 @@
 sp<AudioFlinger::ThreadBase> AudioFlinger::openOutput_l(audio_module_handle_t module,
                                                         audio_io_handle_t *output,
                                                         audio_config_t *halConfig,
-                                                        audio_config_base_t *mixerConfig __unused,
+                                                        audio_config_base_t *mixerConfig,
                                                         audio_devices_t deviceType,
                                                         const String8& address,
                                                         audio_output_flags_t flags)
@@ -2925,14 +3000,6 @@
         return nullptr;
     }
 
-#ifndef MULTICHANNEL_EFFECT_CHAIN
-    if (flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) {
-        ALOGE("openOutput_l() cannot create spatializer thread "
-                "without #define MULTICHANNEL_EFFECT_CHAIN");
-        return nullptr;
-    }
-#endif
-
     mHardwareStatus = AUDIO_HW_OUTPUT_OPEN;
 
     // FOR TESTING ONLY:
@@ -3032,7 +3099,6 @@
             aidl2legacy_int32_t_audio_output_flags_t_mask(request.flags));
 
     audio_io_handle_t output;
-    uint32_t latencyMs;
 
     ALOGI("openOutput() this %p, module %d Device %s, SamplingRate %d, Format %#08x, "
               "Channels %#x, flags %#x",
@@ -3055,6 +3121,7 @@
     sp<ThreadBase> thread = openOutput_l(module, &output, &halConfig,
             &mixerConfig, deviceType, address, flags);
     if (thread != 0) {
+        uint32_t latencyMs = 0;
         if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) == 0) {
             PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
             latencyMs = playbackThread->latency();
@@ -3403,7 +3470,7 @@
                         continue;
                     }
                     if (t->hasAudioSession(chain->sessionId()) != 0) {
-                        Mutex::Autolock _l(t->mLock);
+                        Mutex::Autolock _l2(t->mLock);
                         ALOGV("closeInput() found thread %d for effect session %d",
                               t->id(), chain->sessionId());
                         t->addEffectChain_l(chain);
@@ -3614,7 +3681,8 @@
     }
 
     for (size_t i = 0; i < chains.size(); i++) {
-        sp<EffectChain> ec = chains[i];
+         // clang-tidy suggests const ref
+        sp<EffectChain> ec = chains[i];  // NOLINT(performance-unnecessary-copy-initialization)
         int sessionid = ec->sessionId();
         sp<ThreadBase> t = ec->thread().promote();
         if (t == 0) {
@@ -3797,7 +3865,7 @@
     PlaybackThread *thread = primaryPlaybackThread_l();
 
     if (thread == NULL) {
-        return DeviceTypeSet();
+        return {};
     }
 
     return thread->outDeviceTypes();
@@ -3923,18 +3991,19 @@
         patchTrack->setPeerProxy(patchRecord, true /* holdReference */);
         patchRecord->setPeerProxy(patchTrack, false /* holdReference */);
     }
-    track->setTeePatches(std::move(teePatches));
+    track->setTeePatchesToUpdate(std::move(teePatches));
 }
 
-sp<AudioFlinger::SyncEvent> AudioFlinger::createSyncEvent(AudioSystem::sync_event_t type,
+sp<audioflinger::SyncEvent> AudioFlinger::createSyncEvent(AudioSystem::sync_event_t type,
                                     audio_session_t triggerSession,
                                     audio_session_t listenerSession,
-                                    sync_event_callback_t callBack,
+                                    const audioflinger::SyncEventCallback& callBack,
                                     const wp<RefBase>& cookie)
 {
     Mutex::Autolock _l(mLock);
 
-    sp<SyncEvent> event = new SyncEvent(type, triggerSession, listenerSession, callBack, cookie);
+    auto event = sp<audioflinger::SyncEvent>::make(
+            type, triggerSession, listenerSession, callBack, cookie);
     status_t playStatus = NAME_NOT_FOUND;
     status_t recStatus = NAME_NOT_FOUND;
     for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
@@ -3950,7 +4019,7 @@
         }
     }
     if (playStatus == NAME_NOT_FOUND || recStatus == NAME_NOT_FOUND) {
-        mPendingSyncEvents.add(event);
+        mPendingSyncEvents.emplace_back(event);
     } else {
         ALOGV("createSyncEvent() invalid event %d", event->type());
         event.clear();
@@ -4309,7 +4378,7 @@
             // session and used it instead of creating a new one.
             sp<EffectChain> chain = getOrphanEffectChain_l(sessionId);
             if (chain != 0) {
-                Mutex::Autolock _l(thread->mLock);
+                Mutex::Autolock _l2(thread->mLock);
                 thread->addEffectChain_l(chain);
             }
         }
@@ -4427,6 +4496,7 @@
 status_t AudioFlinger::moveEffectChain_l(audio_session_t sessionId,
                                    AudioFlinger::PlaybackThread *srcThread,
                                    AudioFlinger::PlaybackThread *dstThread)
+NO_THREAD_SAFETY_ANALYSIS // requires srcThread and dstThread locks
 {
     ALOGV("moveEffectChain_l() session %d from thread %p to thread %p",
             sessionId, srcThread, dstThread);
@@ -4456,11 +4526,12 @@
     // transfer all effects one by one so that new effect chain is created on new thread with
     // correct buffer sizes and audio parameters and effect engines reconfigured accordingly
     sp<EffectChain> dstChain;
-    sp<EffectModule> effect = chain->getEffectFromId_l(0);
     Vector< sp<EffectModule> > removed;
     status_t status = NO_ERROR;
     std::string errorString;
-    while (effect != nullptr) {
+    // process effects one by one.
+    for (sp<EffectModule> effect = chain->getEffectFromId_l(0); effect != nullptr;
+            effect = chain->getEffectFromId_l(0)) {
         srcThread->removeEffect_l(effect);
         removed.add(effect);
         status = dstThread->addEffect_l(effect);
@@ -4481,7 +4552,6 @@
                 break;
             }
         }
-        effect = chain->getEffectFromId_l(0);
     }
 
     size_t restored = 0;
@@ -4585,6 +4655,7 @@
 }
 
 bool AudioFlinger::isNonOffloadableGlobalEffectEnabled_l()
+NO_THREAD_SAFETY_ANALYSIS  // thread lock for getEffectChain_l.
 {
     if (mGlobalEffectEnableTime != 0 &&
             ((systemTime() - mGlobalEffectEnableTime) < kMinGlobalEffectEnabletimeNs)) {
@@ -4696,6 +4767,7 @@
         case TransactionCode::SET_REQUESTED_LATENCY_MODE:
         case TransactionCode::GET_SUPPORTED_LATENCY_MODES:
         case TransactionCode::INVALIDATE_TRACKS:
+        case TransactionCode::GET_AUDIO_POLICY_CONFIG:
             ALOGW("%s: transaction %d received from PID %d",
                   __func__, code, IPCThreadState::self()->getCallingPid());
             // return status only for non void methods
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 2204f8f..8b1d70b 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -79,6 +79,10 @@
 #include <mediautils/Synchronization.h>
 #include <mediautils/ThreadSnapshot.h>
 
+#include <afutils/AllocatorFactory.h>
+#include <afutils/AudioWatchdog.h>
+#include <afutils/NBAIO_Tee.h>
+
 #include <audio_utils/clock.h>
 #include <audio_utils/FdToString.h>
 #include <audio_utils/LinearMap.h>
@@ -89,18 +93,18 @@
 
 #include <sounddose/SoundDoseManager.h>
 #include <timing/MonotonicFrameCounter.h>
+#include <timing/SyncEvent.h>
+#include <timing/SynchronizedRecordState.h>
 
-#include "FastCapture.h"
-#include "FastMixer.h"
+#include <datapath/AudioHwDevice.h>
+#include <datapath/AudioStreamOut.h>
+#include <datapath/SpdifStreamOut.h>
+#include <datapath/ThreadMetrics.h>
+#include <datapath/TrackMetrics.h>
+#include <fastpath/FastCapture.h>
+#include <fastpath/FastMixer.h>
 #include <media/nbaio/NBAIO.h>
-#include "AudioWatchdog.h"
-#include "AudioStreamOut.h"
-#include "SpdifStreamOut.h"
-#include "AudioHwDevice.h"
-#include "NBAIO_Tee.h"
-#include "ThreadMetrics.h"
-#include "TrackMetrics.h"
-#include "AllocatorFactory.h"
+
 #include <android/os/IPowerManager.h>
 
 #include <media/nblog/NBLog.h>
@@ -124,7 +128,6 @@
 class EffectsFactoryHalInterface;
 class FastMixer;
 class IAudioManager;
-class ISoundDoseCallback;
 class PassthruBufferProvider;
 class RecordBufferConverter;
 class ServerProxy;
@@ -298,7 +301,10 @@
 
     virtual int32_t getAAudioHardwareBurstMinUsec();
 
-    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected);
+    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port,
+                                             media::DeviceConnectedState state);
+
+    virtual status_t setSimulateDeviceConnections(bool enabled);
 
     virtual status_t setRequestedLatencyMode(
             audio_io_handle_t output, audio_latency_mode_t mode);
@@ -317,6 +323,8 @@
 
     status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) override;
 
+    virtual status_t getAudioPolicyConfig(media::AudioPolicyConfig* config);
+
     status_t onTransactWrapper(TransactionCode code, const Parcel& data, uint32_t flags,
         const std::function<status_t()>& delegate) override;
 
@@ -340,13 +348,13 @@
         const sp<os::ExternalVibration>& externalVibration);
     static void onExternalVibrationStop(const sp<os::ExternalVibration>& externalVibration);
 
-    status_t addEffectToHal(audio_port_handle_t deviceId,
-            audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect);
-    status_t removeEffectFromHal(audio_port_handle_t deviceId,
-            audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect);
+    status_t addEffectToHal(
+            const struct audio_port_config *device, const sp<EffectHalInterface>& effect);
+    status_t removeEffectFromHal(
+            const struct audio_port_config *device, const sp<EffectHalInterface>& effect);
 
     void updateDownStreamPatches_l(const struct audio_patch *patch,
-                                   const std::set<audio_io_handle_t> streams);
+                                   const std::set<audio_io_handle_t>& streams);
 
     std::optional<media::AudioVibratorInfo> getDefaultVibratorInfo_l();
 
@@ -379,51 +387,16 @@
 
     static inline std::atomic<AudioFlinger *> gAudioFlinger = nullptr;
 
-    class SyncEvent;
-
-    typedef void (*sync_event_callback_t)(const wp<SyncEvent>& event) ;
-
-    class SyncEvent : public RefBase {
-    public:
-        SyncEvent(AudioSystem::sync_event_t type,
-                  audio_session_t triggerSession,
-                  audio_session_t listenerSession,
-                  sync_event_callback_t callBack,
-                  wp<RefBase> cookie)
-        : mType(type), mTriggerSession(triggerSession), mListenerSession(listenerSession),
-          mCallback(callBack), mCookie(cookie)
-        {}
-
-        virtual ~SyncEvent() {}
-
-        void trigger() {
-            Mutex::Autolock _l(mLock);
-            if (mCallback) mCallback(wp<SyncEvent>(this));
-        }
-        bool isCancelled() const { Mutex::Autolock _l(mLock); return (mCallback == NULL); }
-        void cancel() { Mutex::Autolock _l(mLock); mCallback = NULL; }
-        AudioSystem::sync_event_t type() const { return mType; }
-        audio_session_t triggerSession() const { return mTriggerSession; }
-        audio_session_t listenerSession() const { return mListenerSession; }
-        wp<RefBase> cookie() const { return mCookie; }
-
-    private:
-          const AudioSystem::sync_event_t mType;
-          const audio_session_t mTriggerSession;
-          const audio_session_t mListenerSession;
-          sync_event_callback_t mCallback;
-          const wp<RefBase> mCookie;
-          mutable Mutex mLock;
-    };
-
-    sp<SyncEvent> createSyncEvent(AudioSystem::sync_event_t type,
+    sp<audioflinger::SyncEvent> createSyncEvent(AudioSystem::sync_event_t type,
                                         audio_session_t triggerSession,
                                         audio_session_t listenerSession,
-                                        sync_event_callback_t callBack,
+                                        const audioflinger::SyncEventCallback& callBack,
                                         const wp<RefBase>& cookie);
 
     bool        btNrecIsOff() const { return mBtNrecIsOff.load(); }
 
+    void             lock() ACQUIRE(mLock) { mLock.lock(); }
+    void             unlock() RELEASE(mLock) { mLock.unlock(); }
 
 private:
 
@@ -637,13 +610,6 @@
     };
 
     // --- PlaybackThread ---
-#ifdef FLOAT_EFFECT_CHAIN
-#define EFFECT_BUFFER_FORMAT AUDIO_FORMAT_PCM_FLOAT
-using effect_buffer_t = float;
-#else
-#define EFFECT_BUFFER_FORMAT AUDIO_FORMAT_PCM_16_BIT
-using effect_buffer_t = int16_t;
-#endif
 
 #include "Threads.h"
 
@@ -869,7 +835,7 @@
                 void updateOutDevicesForRecordThreads_l(const DeviceDescriptorBaseVector& devices);
                 void forwardParametersToDownstreamPatches_l(
                         audio_io_handle_t upStream, const String8& keyValuePairs,
-                        std::function<bool(const sp<PlaybackThread>&)> useThread = nullptr);
+                        const std::function<bool(const sp<PlaybackThread>&)>& useThread = nullptr);
 
     // AudioStreamIn is immutable, so their fields are const.
     // For emphasis, we could also make all pointers to them be "const *",
@@ -882,7 +848,8 @@
 
         sp<DeviceHalInterface> hwDev() const { return audioHwDev->hwDevice(); }
 
-        AudioStreamIn(AudioHwDevice *dev, sp<StreamInHalInterface> in, audio_input_flags_t flags) :
+        AudioStreamIn(AudioHwDevice *dev, const sp<StreamInHalInterface>& in,
+                audio_input_flags_t flags) :
             audioHwDev(dev), stream(in), flags(flags) {}
         status_t read(void *buffer, size_t bytes, size_t *read) override {
             return stream->read(buffer, bytes, read);
@@ -953,6 +920,7 @@
         AUDIO_HW_GET_MASTER_MUTE,       // get_master_mute
         AUDIO_HW_GET_MICROPHONES,       // getMicrophones
         AUDIO_HW_SET_CONNECTED_STATE,   // setConnectedState
+        AUDIO_HW_SET_SIMULATE_CONNECTIONS, // setSimulateDeviceConnections
     };
 
     mutable     hardware_call_state                 mHardwareStatus;    // for dump only
@@ -984,10 +952,10 @@
                 float       masterVolume_l() const;
                 float       getMasterBalance_l() const;
                 bool        masterMute_l() const;
-                audio_module_handle_t loadHwModule_l(const char *name);
+                AudioHwDevice* loadHwModule_l(const char *name);
 
-                Vector < sp<SyncEvent> > mPendingSyncEvents; // sync events awaiting for a session
-                                                             // to be created
+                // sync events awaiting for a session to be created.
+                std::list<sp<audioflinger::SyncEvent>> mPendingSyncEvents;
 
                 // Effect chains without a valid thread
                 DefaultKeyedVector< audio_session_t , sp<EffectChain> > mOrphanEffectChains;
diff --git a/services/audioflinger/AudioWatchdog.h b/services/audioflinger/AudioWatchdog.h
deleted file mode 100644
index 7b69fc6..0000000
--- a/services/audioflinger/AudioWatchdog.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// The watchdog thread runs periodically.  It has two functions:
-//   (a) verify that adequate CPU time is available, and log
-//       as soon as possible when there appears to be a CPU shortage
-//   (b) monitor the other threads [not yet implemented]
-
-#ifndef AUDIO_WATCHDOG_H
-#define AUDIO_WATCHDOG_H
-
-#include <time.h>
-#include <utils/Thread.h>
-
-namespace android {
-
-// Keeps a cache of AudioWatchdog statistics that can be logged by dumpsys.
-// The usual caveats about atomicity of information apply.
-struct AudioWatchdogDump {
-    AudioWatchdogDump() : mUnderruns(0), mLogs(0), mMostRecent(0) { }
-    /*virtual*/ ~AudioWatchdogDump() { }
-    uint32_t mUnderruns;    // total number of underruns
-    uint32_t mLogs;         // total number of log messages
-    time_t   mMostRecent;   // time of most recent log
-    void     dump(int fd);  // should only be called on a stable copy, not the original
-};
-
-class AudioWatchdog : public Thread {
-
-public:
-    explicit AudioWatchdog(unsigned periodMs = 50) : Thread(false /*canCallJava*/), mPaused(false),
-            mPeriodNs(periodMs * 1000000), mMaxCycleNs(mPeriodNs * 2),
-            // mOldTs
-            // mLogTs initialized below
-            mOldTsValid(false), mUnderruns(0), mLogs(0), mDump(&mDummyDump)
-        {
-#define MIN_TIME_BETWEEN_LOGS_SEC 60
-            // force an immediate log on first underrun
-            mLogTs.tv_sec = MIN_TIME_BETWEEN_LOGS_SEC;
-            mLogTs.tv_nsec = 0;
-        }
-    virtual         ~AudioWatchdog() { }
-
-     // Do not call Thread::requestExitAndWait() without first calling requestExit().
-    // Thread::requestExitAndWait() is not virtual, and the implementation doesn't do enough.
-    virtual void        requestExit();
-
-    // FIXME merge API and implementation with AudioTrackThread
-    void            pause();        // suspend thread from execution at next loop boundary
-    void            resume();       // allow thread to execute, if not requested to exit
-
-    // Where to store the dump, or NULL to not update
-    void            setDump(AudioWatchdogDump* dump);
-
-private:
-    virtual bool    threadLoop();
-
-    Mutex           mMyLock;        // Thread::mLock is private
-    Condition       mMyCond;        // Thread::mThreadExitedCondition is private
-    bool            mPaused;        // whether thread is currently paused
-
-    uint32_t        mPeriodNs;      // nominal period
-    uint32_t        mMaxCycleNs;    // maximum allowed time of one cycle before declaring underrun
-    struct timespec mOldTs;         // monotonic time when threadLoop last ran
-    struct timespec mLogTs;         // time since last log
-    bool            mOldTsValid;    // whether mOldTs is valid
-    uint32_t        mUnderruns;     // total number of underruns
-    uint32_t        mLogs;          // total number of logs
-    AudioWatchdogDump*  mDump;      // where to store the dump, always non-NULL
-    AudioWatchdogDump   mDummyDump; // default area for dump in case setDump() is not called
-};
-
-}   // namespace android
-
-#endif  // AUDIO_WATCHDOG_H
diff --git a/services/audioflinger/Configuration.h b/services/audioflinger/Configuration.h
index ede8e3f..845697a 100644
--- a/services/audioflinger/Configuration.h
+++ b/services/audioflinger/Configuration.h
@@ -41,15 +41,4 @@
 // uncomment to log CPU statistics every n wall clock seconds
 //#define DEBUG_CPU_USAGE 10
 
-// define FLOAT_EFFECT_CHAIN to request float effects (falls back to int16_t if unavailable)
-#define FLOAT_EFFECT_CHAIN
-
-#ifdef FLOAT_EFFECT_CHAIN
-// define FLOAT_AUX to process aux effect buffers in float (FLOAT_EFFECT_CHAIN must be defined)
-#define FLOAT_AUX
-
-// define MULTICHANNEL_EFFECT_CHAIN to allow multichannel effects (FLOAT_EFFECT_CHAIN defined)
-#define MULTICHANNEL_EFFECT_CHAIN
-#endif
-
 #endif // ANDROID_AUDIOFLINGER_CONFIGURATION_H
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index 9105500..4fb6138 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -23,6 +23,7 @@
 #include <audio_utils/primitives.h>
 
 #include "AudioFlinger.h"
+#include "EffectConfiguration.h"
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 
 // ----------------------------------------------------------------------------
@@ -111,14 +112,16 @@
 
 status_t AudioFlinger::DeviceEffectManager::checkEffectCompatibility(
         const effect_descriptor_t *desc) {
-    sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
+    const sp<EffectsFactoryHalInterface> effectsFactory =
+            audioflinger::EffectConfiguration::getEffectsFactoryHal();
     if (effectsFactory == nullptr) {
         return BAD_VALUE;
     }
 
-    static AudioHalVersionInfo sMinDeviceEffectHalVersion =
+    static const AudioHalVersionInfo sMinDeviceEffectHalVersion =
             AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 6, 0);
-    AudioHalVersionInfo halVersion = effectsFactory->getHalVersion();
+    static const AudioHalVersionInfo halVersion =
+            audioflinger::EffectConfiguration::getAudioHalVersionInfo();
 
     // We can trust AIDL generated AudioHalVersionInfo comparison operator (based on std::tie) as
     // long as the type, major and minor sequence doesn't change in the definition.
@@ -137,7 +140,8 @@
         const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t deviceId,
         sp<EffectHalInterface> *effect) {
     status_t status = NO_INIT;
-    sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
+    const sp<EffectsFactoryHalInterface> effectsFactory =
+            audioflinger::EffectConfiguration::getEffectsFactoryHal();
     if (effectsFactory != 0) {
         status = effectsFactory->createEffect(
                 pEffectUuid, sessionId, AUDIO_IO_HANDLE_NONE, deviceId, effect);
@@ -145,7 +149,9 @@
     return status;
 }
 
-void AudioFlinger::DeviceEffectManager::dump(int fd) {
+void AudioFlinger::DeviceEffectManager::dump(int fd)
+NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
+{
     const bool locked = dumpTryLock(mLock);
     if (!locked) {
         String8 result("DeviceEffectManager may be deadlocked\n");
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index 7602f12..b87f830 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -44,13 +44,13 @@
     status_t createEffectHal(const effect_uuid_t *pEffectUuid,
            int32_t sessionId, int32_t deviceId,
            sp<EffectHalInterface> *effect);
-    status_t addEffectToHal(audio_port_handle_t deviceId, audio_module_handle_t hwModuleId,
-            sp<EffectHalInterface> effect) {
-        return mAudioFlinger.addEffectToHal(deviceId, hwModuleId, effect);
+    status_t addEffectToHal(const struct audio_port_config *device,
+            const sp<EffectHalInterface>& effect) {
+        return mAudioFlinger.addEffectToHal(device, effect);
     };
-    status_t removeEffectFromHal(audio_port_handle_t deviceId, audio_module_handle_t hwModuleId,
-            sp<EffectHalInterface> effect) {
-        return mAudioFlinger.removeEffectFromHal(deviceId, hwModuleId, effect);
+    status_t removeEffectFromHal(const struct audio_port_config *device,
+            const sp<EffectHalInterface>& effect) {
+        return mAudioFlinger.removeEffectFromHal(device, effect);
     };
 
     AudioFlinger& audioFlinger() const { return mAudioFlinger; }
@@ -74,7 +74,7 @@
 
 class DeviceEffectManagerCallback : public EffectCallbackInterface {
 public:
-    DeviceEffectManagerCallback(DeviceEffectManager& manager)
+    explicit DeviceEffectManagerCallback(DeviceEffectManager& manager)
         : mManager(manager) {}
 
     status_t createEffectHal(const effect_uuid_t *pEffectUuid,
@@ -105,10 +105,10 @@
     size_t    frameCount() const override  { return 0; }
     uint32_t  latency() const override  { return 0; }
 
-    status_t addEffectToHal(sp<EffectHalInterface> effect __unused) override {
+    status_t addEffectToHal(const sp<EffectHalInterface>& /* effect */) override {
         return NO_ERROR;
     }
-    status_t removeEffectFromHal(sp<EffectHalInterface> effect __unused) override {
+    status_t removeEffectFromHal(const sp<EffectHalInterface>& /* effect */) override {
         return NO_ERROR;
     }
 
@@ -132,13 +132,13 @@
 
     int newEffectId() { return mManager.audioFlinger().nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT); }
 
-    status_t addEffectToHal(audio_port_handle_t deviceId,
-            audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect) {
-        return mManager.addEffectToHal(deviceId, hwModuleId, effect);
+    status_t addEffectToHal(const struct audio_port_config *device,
+            const sp<EffectHalInterface>& effect) {
+        return mManager.addEffectToHal(device, effect);
     }
-    status_t removeEffectFromHal(audio_port_handle_t deviceId,
-            audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect) {
-        return mManager.removeEffectFromHal(deviceId, hwModuleId, effect);
+    status_t removeEffectFromHal(const struct audio_port_config *device,
+            const sp<EffectHalInterface>& effect) {
+        return mManager.removeEffectFromHal(device, effect);
     }
 private:
     DeviceEffectManager& mManager;
diff --git a/services/audioflinger/EffectConfiguration.h b/services/audioflinger/EffectConfiguration.h
new file mode 100644
index 0000000..2f07fa2
--- /dev/null
+++ b/services/audioflinger/EffectConfiguration.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <media/audiohal/EffectsFactoryHalInterface.h>
+
+namespace android::audioflinger {
+
+/**
+ * Effect Configuration abstraction and helper class.
+ */
+class EffectConfiguration {
+public:
+    static bool isHidl() {
+        static const bool isHidl = getAudioHalVersionInfo().isHidl();
+        return isHidl;
+    }
+
+    static const sp<EffectsFactoryHalInterface>& getEffectsFactoryHal() {
+        static const auto effectsFactoryHal = EffectsFactoryHalInterface::create();
+        return effectsFactoryHal;
+    }
+
+    static const detail::AudioHalVersionInfo& getAudioHalVersionInfo() {
+        static const auto audioHalVersionInfo = getEffectsFactoryHal() ?
+                getEffectsFactoryHal()->getHalVersion() : detail::AudioHalVersionInfo{
+                        detail::AudioHalVersionInfo::Type::HIDL, 0 /* major */, 0 /* minor */ };
+        return audioHalVersionInfo;
+    }
+};
+
+} // namespace android::audioflinger
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 84b9c40..822ea93 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -44,6 +44,7 @@
 #include <mediautils/TimeCheck.h>
 
 #include "AudioFlinger.h"
+#include "EffectConfiguration.h"
 
 // ----------------------------------------------------------------------------
 
@@ -65,6 +66,7 @@
 namespace android {
 
 using aidl_utils::statusTFromBinderStatus;
+using audioflinger::EffectConfiguration;
 using binder::Status;
 
 namespace {
@@ -498,6 +500,7 @@
 }
 
 void AudioFlinger::EffectBase::dump(int fd, const Vector<String16>& args __unused)
+NO_THREAD_SAFETY_ANALYSIS // conditional try lock
 {
     String8 result;
 
@@ -569,11 +572,8 @@
       mMaxDisableWaitCnt(1), // set by configure(), should be >= 1
       mDisableWaitCnt(0),    // set by process() and updateState()
       mOffloaded(false),
-      mAddedToHal(false),
       mIsOutput(false)
-#ifdef FLOAT_EFFECT_CHAIN
       , mSupportsFloat(false)
-#endif
 {
     ALOGV("Constructor %p pinned %d", this, pinned);
     int lStatus;
@@ -692,31 +692,16 @@
                             mConfig.inputCfg.buffer.frameCount,
                             mConfig.outputCfg.buffer.frameCount);
     const auto accumulateInputToOutput = [this, safeInputOutputSampleCount]() {
-#ifdef FLOAT_EFFECT_CHAIN
         accumulate_float(
                 mConfig.outputCfg.buffer.f32,
                 mConfig.inputCfg.buffer.f32,
                 safeInputOutputSampleCount);
-#else
-        accumulate_i16(
-                mConfig.outputCfg.buffer.s16,
-                mConfig.inputCfg.buffer.s16,
-                safeInputOutputSampleCount);
-#endif
     };
     const auto copyInputToOutput = [this, safeInputOutputSampleCount]() {
-#ifdef FLOAT_EFFECT_CHAIN
         memcpy(
                 mConfig.outputCfg.buffer.f32,
                 mConfig.inputCfg.buffer.f32,
                 safeInputOutputSampleCount * sizeof(*mConfig.outputCfg.buffer.f32));
-
-#else
-        memcpy(
-                mConfig.outputCfg.buffer.s16,
-                mConfig.inputCfg.buffer.s16,
-                safeInputOutputSampleCount * sizeof(*mConfig.outputCfg.buffer.s16));
-#endif
     };
 
     if (isProcessEnabled()) {
@@ -725,35 +710,14 @@
             if (auxType) {
                 // We overwrite the aux input buffer here and clear after processing.
                 // aux input is always mono.
-#ifdef FLOAT_EFFECT_CHAIN
-                if (mSupportsFloat) {
-#ifndef FLOAT_AUX
-                    // Do in-place float conversion for auxiliary effect input buffer.
-                    static_assert(sizeof(float) <= sizeof(int32_t),
-                            "in-place conversion requires sizeof(float) <= sizeof(int32_t)");
 
-                    memcpy_to_float_from_q4_27(
-                            mConfig.inputCfg.buffer.f32,
-                            mConfig.inputCfg.buffer.s32,
-                            mConfig.inputCfg.buffer.frameCount);
-#endif // !FLOAT_AUX
-                } else
-#endif // FLOAT_EFFECT_CHAIN
-                {
-#ifdef FLOAT_AUX
+                if (!mSupportsFloat) {
                     memcpy_to_i16_from_float(
                             mConfig.inputCfg.buffer.s16,
                             mConfig.inputCfg.buffer.f32,
                             mConfig.inputCfg.buffer.frameCount);
-#else
-                    memcpy_to_i16_from_q4_27(
-                            mConfig.inputCfg.buffer.s16,
-                            mConfig.inputCfg.buffer.s32,
-                            mConfig.inputCfg.buffer.frameCount);
-#endif
                 }
             }
-#ifdef FLOAT_EFFECT_CHAIN
             sp<EffectBufferHalInterface> inBuffer = mInBuffer;
             sp<EffectBufferHalInterface> outBuffer = mOutBuffer;
 
@@ -800,9 +764,7 @@
                     outBuffer = mOutConversionBuffer;
                 }
             }
-#endif
             ret = mEffectInterface->process();
-#ifdef FLOAT_EFFECT_CHAIN
             if (!mSupportsFloat) { // convert output int16_t back to float.
                 sp<EffectBufferHalInterface> target =
                         mOutChannelCountRequested != outChannelCount
@@ -819,11 +781,8 @@
                         sizeof(float),
                         sizeof(float) * outChannelCount * mConfig.outputCfg.buffer.frameCount);
             }
-#endif
         } else {
-#ifdef FLOAT_EFFECT_CHAIN
             data_bypass:
-#endif
             if (!auxType  /* aux effects do not require data bypass */
                     && mConfig.inputCfg.buffer.raw != mConfig.outputCfg.buffer.raw) {
                 if (mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
@@ -842,13 +801,8 @@
 
         // clear auxiliary effect input buffer for next accumulation
         if (auxType) {
-#ifdef FLOAT_AUX
             const size_t size =
                     mConfig.inputCfg.buffer.frameCount * inChannelCount * sizeof(float);
-#else
-            const size_t size =
-                    mConfig.inputCfg.buffer.frameCount * inChannelCount * sizeof(int32_t);
-#endif
             memset(mConfig.inputCfg.buffer.raw, 0, size);
         }
     } else if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_INSERT &&
@@ -903,23 +857,6 @@
             ALOGV("Overriding auxiliary effect input channels %#x as MONO",
                     mConfig.inputCfg.channels);
         }
-#ifndef MULTICHANNEL_EFFECT_CHAIN
-        if (mConfig.outputCfg.channels != AUDIO_CHANNEL_OUT_STEREO) {
-            mConfig.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
-            ALOGV("Overriding auxiliary effect output channels %#x as STEREO",
-                    mConfig.outputCfg.channels);
-        }
-#endif
-    } else {
-#ifndef MULTICHANNEL_EFFECT_CHAIN
-        // TODO: Update this logic when multichannel effects are implemented.
-        // For offloaded tracks consider mono output as stereo for proper effect initialization
-        if (channelMask == AUDIO_CHANNEL_OUT_MONO) {
-            mConfig.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
-            mConfig.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
-            ALOGV("Overriding effect input and output as STEREO");
-        }
-#endif
     }
     if (isHapticGenerator()) {
         audio_channel_mask_t hapticChannelMask = callback->hapticChannelMask();
@@ -931,8 +868,8 @@
     mOutChannelCountRequested =
             audio_channel_count_from_out_mask(mConfig.outputCfg.channels);
 
-    mConfig.inputCfg.format = EFFECT_BUFFER_FORMAT;
-    mConfig.outputCfg.format = EFFECT_BUFFER_FORMAT;
+    mConfig.inputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+    mConfig.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
 
     // Don't use sample rate for thread if effect isn't offloadable.
     if (callback->isOffloadOrDirect() && !isOffloaded()) {
@@ -980,8 +917,8 @@
         status = cmdStatus;
     }
 
-#ifdef MULTICHANNEL_EFFECT_CHAIN
     if (status != NO_ERROR &&
+            EffectConfiguration::isHidl() && // only HIDL effects support channel conversion
             mIsOutput &&
             (mConfig.inputCfg.channels != AUDIO_CHANNEL_OUT_STEREO
                     || mConfig.outputCfg.channels != AUDIO_CHANNEL_OUT_STEREO)) {
@@ -1005,14 +942,13 @@
             status = cmdStatus;
         }
     }
-#endif
 
-#ifdef FLOAT_EFFECT_CHAIN
     if (status == NO_ERROR) {
         mSupportsFloat = true;
     }
 
-    if (status != NO_ERROR) {
+    // only HIDL effects support integer conversion.
+    if (status != NO_ERROR && EffectConfiguration::isHidl()) {
         ALOGV("EFFECT_CMD_SET_CONFIG failed with float format, retry with int16_t.");
         mConfig.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
         mConfig.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
@@ -1032,7 +968,6 @@
             ALOGE("%s failed %d with int16_t (as well as float)", __func__, status);
         }
     }
-#endif
 
     if (status == NO_ERROR) {
         // Establish Buffer strategy
@@ -1103,12 +1038,12 @@
 {
     if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC ||
          (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
-        if (mAddedToHal) {
+        if (mCurrentHalStream == getCallback()->io()) {
             return;
         }
 
         (void)getCallback()->addEffectToHal(mEffectInterface);
-        mAddedToHal = true;
+        mCurrentHalStream = getCallback()->io();
     }
 }
 
@@ -1204,12 +1139,11 @@
 {
     if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC ||
              (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
-        if (!mAddedToHal) {
-            return NO_ERROR;
+        if (mCurrentHalStream != getCallback()->io()) {
+            return (mCurrentHalStream == AUDIO_IO_HANDLE_NONE) ? NO_ERROR : INVALID_OPERATION;
         }
-
         getCallback()->removeEffectFromHal(mEffectInterface);
-        mAddedToHal = false;
+        mCurrentHalStream = AUDIO_IO_HANDLE_NONE;
     }
     return NO_ERROR;
 }
@@ -1249,13 +1183,13 @@
         return -EINVAL;
     }
     if (cmdCode == EFFECT_CMD_GET_PARAM &&
-            (maxReplySize < sizeof(effect_param_t) ||
+            (maxReplySize < static_cast<signed>(sizeof(effect_param_t)) ||
                    param->psize > maxReplySize - sizeof(effect_param_t))) {
         android_errorWriteLog(0x534e4554, "29251553");
         return -EINVAL;
     }
     if (cmdCode == EFFECT_CMD_GET_PARAM &&
-            (sizeof(effect_param_t) > maxReplySize
+            (static_cast<signed>(sizeof(effect_param_t)) > maxReplySize
                     || param->psize > maxReplySize - sizeof(effect_param_t)
                     || param->vsize > maxReplySize - sizeof(effect_param_t)
                             - param->psize
@@ -1346,7 +1280,6 @@
     mInBuffer = buffer;
     mEffectInterface->setInBuffer(buffer);
 
-#ifdef FLOAT_EFFECT_CHAIN
     // aux effects do in place conversion to float - we don't allocate mInConversionBuffer.
     // Theoretically insert effects can also do in-place conversions (destroying
     // the original buffer) when the output buffer is identical to the input buffer,
@@ -1378,7 +1311,6 @@
             ALOGE("%s cannot create mInConversionBuffer", __func__);
         }
     }
-#endif
 }
 
 void AudioFlinger::EffectModule::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
@@ -1394,7 +1326,6 @@
     mOutBuffer = buffer;
     mEffectInterface->setOutBuffer(buffer);
 
-#ifdef FLOAT_EFFECT_CHAIN
     // Note: Any effect that does not accumulate does not need mOutConversionBuffer and
     // can do in-place conversion from int16_t to float.  We don't optimize here.
     const uint32_t outChannelCount =
@@ -1422,7 +1353,6 @@
             ALOGE("%s cannot create mOutConversionBuffer", __func__);
         }
     }
-#endif
 }
 
 status_t AudioFlinger::EffectModule::setVolume(uint32_t *left, uint32_t *right, bool controller)
@@ -1685,6 +1615,7 @@
 }
 
 void AudioFlinger::EffectModule::dump(int fd, const Vector<String16>& args)
+NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
 {
     EffectBase::dump(fd, args);
 
@@ -1717,15 +1648,12 @@
             mConfig.outputCfg.format,
             formatToString((audio_format_t)mConfig.outputCfg.format).c_str());
 
-#ifdef FLOAT_EFFECT_CHAIN
-
     result.appendFormat("\t\t- HAL buffers:\n"
             "\t\t\tIn(%s) InConversion(%s) Out(%s) OutConversion(%s)\n",
             dumpInOutBuffer(true /* isInput */, mInBuffer).c_str(),
             dumpInOutBuffer(true /* isInput */, mInConversionBuffer).c_str(),
             dumpInOutBuffer(false /* isInput */, mOutBuffer).c_str(),
             dumpInOutBuffer(false /* isInput */, mOutConversionBuffer).c_str());
-#endif
 
     write(fd, result.string(), result.length());
 
@@ -1946,7 +1874,7 @@
         }
         mCblkMemory.clear();    // free the shared memory before releasing the heap it belongs to
         // Client destructor must run with AudioFlinger client mutex locked
-        Mutex::Autolock _l(mClient->audioFlinger()->mClientLock);
+        Mutex::Autolock _l2(mClient->audioFlinger()->mClientLock);
         mClient.clear();
     }
 }
@@ -2010,14 +1938,14 @@
     }
 
     if (cmdCode == EFFECT_CMD_ENABLE) {
-        if (maxResponseSize < sizeof(int)) {
+        if (maxResponseSize < static_cast<signed>(sizeof(int))) {
             android_errorWriteLog(0x534e4554, "32095713");
             RETURN(BAD_VALUE);
         }
         writeToBuffer(NO_ERROR, response);
         return enable(_aidl_return);
     } else if (cmdCode == EFFECT_CMD_DISABLE) {
-        if (maxResponseSize < sizeof(int)) {
+        if (maxResponseSize < static_cast<signed>(sizeof(int))) {
             android_errorWriteLog(0x534e4554, "32095713");
             RETURN(BAD_VALUE);
         }
@@ -2041,7 +1969,7 @@
             RETURN(INVALID_OPERATION);
         }
 
-        if (maxResponseSize < sizeof(int)) {
+        if (maxResponseSize < (signed)sizeof(int)) {
             android_errorWriteLog(0x534e4554, "32095713");
             RETURN(BAD_VALUE);
         }
@@ -2050,7 +1978,7 @@
         // No need to trylock() here as this function is executed in the binder thread serving a
         // particular client process:  no risk to block the whole media server process or mixer
         // threads if we are stuck here
-        Mutex::Autolock _l(mCblk->lock);
+        Mutex::Autolock _l2(mCblk->lock);
         // keep local copy of index in case of client corruption b/32220769
         const uint32_t clientIndex = mCblk->clientIndex;
         const uint32_t serverIndex = mCblk->serverIndex;
@@ -2153,6 +2081,7 @@
 }
 
 void AudioFlinger::EffectHandle::dumpToBuffer(char* buffer, size_t size)
+NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
 {
     bool locked = mCblk != NULL && AudioFlinger::dumpTryLock(mCblk->lock);
 
@@ -2257,7 +2186,7 @@
     if (mInBuffer == NULL) {
         return;
     }
-    const size_t frameSize = audio_bytes_per_sample(EFFECT_BUFFER_FORMAT)
+    const size_t frameSize = audio_bytes_per_sample(AUDIO_FORMAT_PCM_FLOAT)
             * mEffectCallback->inChannelCount(mEffects[0]->id());
 
     memset(mInBuffer->audioBuffer()->raw, 0, mEffectCallback->frameCount() * frameSize);
@@ -2358,13 +2287,9 @@
         // calling the process in effect engine
         size_t numSamples = mEffectCallback->frameCount();
         sp<EffectBufferHalInterface> halBuffer;
-#ifdef FLOAT_EFFECT_CHAIN
+
         status_t result = mEffectCallback->allocateHalBuffer(
                 numSamples * sizeof(float), &halBuffer);
-#else
-        status_t result = mEffectCallback->allocateHalBuffer(
-                numSamples * sizeof(int32_t), &halBuffer);
-#endif
         if (result != OK) return result;
 
         effect->configure();
@@ -2407,7 +2332,7 @@
             }
         } else {
             effect->setInBuffer(mInBuffer);
-            if (idx_insert == previousSize) {
+            if (idx_insert == static_cast<ssize_t>(previousSize)) {
                 if (idx_insert != 0) {
                     mEffects[idx_insert-1]->configure();
                     mEffects[idx_insert-1]->setOutBuffer(mInBuffer);
@@ -2467,7 +2392,7 @@
             }
             // remember position of first insert effect and by default
             // select this as insert position for new effect
-            if (idx_insert == size) {
+            if (idx_insert == static_cast<ssize_t>(size)) {
                 idx_insert = i;
             }
             // remember position of last insert effect claiming
@@ -2531,7 +2456,8 @@
 
             // make sure the input buffer configuration for the new first effect in the chain
             // is updated if needed (can switch from HAL channel mask to mixer channel mask)
-            if (i == 0 && size > 1) {
+            if (type != EFFECT_FLAG_TYPE_AUXILIARY // TODO(b/284522658) breaks for aux FX, why?
+                    && i == 0 && size > 1) {
                 mEffects[0]->configure();
                 mEffects[0]->setInBuffer(mInBuffer);
                 mEffects[0]->updateAccessMode();      // reconfig if neeeded.
@@ -2697,6 +2623,7 @@
 }
 
 void AudioFlinger::EffectChain::dump(int fd, const Vector<String16>& args)
+NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
 {
     String8 result;
 
@@ -3030,7 +2957,8 @@
         const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t deviceId,
         sp<EffectHalInterface> *effect) {
     status_t status = NO_INIT;
-    sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
+    const sp<EffectsFactoryHalInterface> effectsFactory =
+            EffectConfiguration::getEffectsFactoryHal();
     if (effectsFactory != 0) {
         status = effectsFactory->createEffect(pEffectUuid, sessionId, io(), deviceId, effect);
     }
@@ -3049,7 +2977,7 @@
 }
 
 status_t AudioFlinger::EffectChain::EffectCallback::addEffectToHal(
-        sp<EffectHalInterface> effect) {
+        const sp<EffectHalInterface>& effect) {
     status_t result = NO_INIT;
     sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
@@ -3065,7 +2993,7 @@
 }
 
 status_t AudioFlinger::EffectChain::EffectCallback::removeEffectFromHal(
-        sp<EffectHalInterface> effect) {
+        const sp<EffectHalInterface>& effect) {
     status_t result = NO_INIT;
     sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
@@ -3207,15 +3135,20 @@
     return t->frameCount();
 }
 
-uint32_t AudioFlinger::EffectChain::EffectCallback::latency() const {
+uint32_t AudioFlinger::EffectChain::EffectCallback::latency() const
+NO_THREAD_SAFETY_ANALYSIS  // latency_l() access
+{
     sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return 0;
     }
+    // TODO(b/275956781) - this requires the thread lock.
     return t->latency_l();
 }
 
-void AudioFlinger::EffectChain::EffectCallback::setVolumeForOutput(float left, float right) const {
+void AudioFlinger::EffectChain::EffectCallback::setVolumeForOutput(float left, float right) const
+NO_THREAD_SAFETY_ANALYSIS  // setVolumeForOutput_l() access
+{
     sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return;
@@ -3368,8 +3301,18 @@
     ALOGV("%s type %d device type %d address %s device ID %d patch.isSoftware() %d",
             __func__, port->type, port->ext.device.type,
             port->ext.device.address, port->id, patch.isSoftware());
-    if (port->type != AUDIO_PORT_TYPE_DEVICE || port->ext.device.type != mDevice.mType
-        || port->ext.device.address != mDevice.address()) {
+    if (port->type != AUDIO_PORT_TYPE_DEVICE || port->ext.device.type != mDevice.mType ||
+        port->ext.device.address != mDevice.address()) {
+        return NAME_NOT_FOUND;
+    }
+    if (((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) &&
+        (audio_port_config_has_input_direction(port))) {
+        ALOGI("%s don't create postprocessing effect on record port", __func__);
+        return NAME_NOT_FOUND;
+    }
+    if (((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC) &&
+        (!audio_port_config_has_input_direction(port))) {
+        ALOGI("%s don't create preprocessing effect on playback port", __func__);
         return NAME_NOT_FOUND;
     }
     status_t status = NAME_NOT_FOUND;
@@ -3421,6 +3364,7 @@
     } else {
         status = BAD_VALUE;
     }
+
     if (status == NO_ERROR || status == ALREADY_EXISTS) {
         Status bs;
         if (isEnabled()) {
@@ -3459,21 +3403,19 @@
 }
 
 status_t AudioFlinger::DeviceEffectProxy::addEffectToHal(
-    sp<EffectHalInterface> effect) {
+        const sp<EffectHalInterface>& effect) {
     if (mHalEffect == nullptr) {
         return NO_INIT;
     }
-    return mManagerCallback->addEffectToHal(
-            mDevicePort.id, mDevicePort.ext.device.hw_module, effect);
+    return mManagerCallback->addEffectToHal(&mDevicePort, effect);
 }
 
 status_t AudioFlinger::DeviceEffectProxy::removeEffectFromHal(
-    sp<EffectHalInterface> effect) {
+        const sp<EffectHalInterface>& effect) {
     if (mHalEffect == nullptr) {
         return NO_INIT;
     }
-    return mManagerCallback->removeEffectFromHal(
-            mDevicePort.id, mDevicePort.ext.device.hw_module, effect);
+    return mManagerCallback->removeEffectFromHal(&mDevicePort, effect);
 }
 
 bool AudioFlinger::DeviceEffectProxy::isOutput() const {
@@ -3506,7 +3448,9 @@
     return audio_channel_count_from_in_mask(channelMask());
 }
 
-void AudioFlinger::DeviceEffectProxy::dump(int fd, int spaces) {
+void AudioFlinger::DeviceEffectProxy::dump(int fd, int spaces)
+NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
+{
     const Vector<String16> args;
     EffectBase::dump(fd, args);
 
@@ -3585,7 +3529,7 @@
 }
 
 status_t AudioFlinger::DeviceEffectProxy::ProxyCallback::addEffectToHal(
-        sp<EffectHalInterface> effect) {
+        const sp<EffectHalInterface>& effect) {
     sp<DeviceEffectProxy> proxy = mProxy.promote();
     if (proxy == nullptr) {
         return NO_INIT;
@@ -3594,7 +3538,7 @@
 }
 
 status_t AudioFlinger::DeviceEffectProxy::ProxyCallback::removeEffectFromHal(
-        sp<EffectHalInterface> effect) {
+        const sp<EffectHalInterface>& effect) {
     sp<DeviceEffectProxy> proxy = mProxy.promote();
     if (proxy == nullptr) {
         return NO_INIT;
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 7b71a85..57acc67 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -45,8 +45,8 @@
     // Non trivial methods usually implemented with help from ThreadBase:
     //   pay attention to mutex locking order
     virtual uint32_t latency() const { return 0; }
-    virtual status_t addEffectToHal(sp<EffectHalInterface> effect) = 0;
-    virtual status_t removeEffectFromHal(sp<EffectHalInterface> effect) = 0;
+    virtual status_t addEffectToHal(const sp<EffectHalInterface>& effect) = 0;
+    virtual status_t removeEffectFromHal(const sp<EffectHalInterface>& effect) = 0;
     virtual void setVolumeForOutput(float left, float right) const = 0;
     virtual bool disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast) = 0;
     virtual void checkSuspendOnEffectEnabled(const sp<EffectBase>& effect,
@@ -159,8 +159,8 @@
     bool             isPinned() const { return mPinned; }
     void             unPin() { mPinned = false; }
 
-    void             lock() { mLock.lock(); }
-    void             unlock() { mLock.unlock(); }
+    void             lock() ACQUIRE(mLock) { mLock.lock(); }
+    void             unlock() RELEASE(mLock) { mLock.unlock(); }
 
     status_t         updatePolicyState();
 
@@ -319,16 +319,15 @@
                                     // sending disable command.
     uint32_t mDisableWaitCnt;       // current process() calls count during disable period.
     bool     mOffloaded;            // effect is currently offloaded to the audio DSP
-    bool     mAddedToHal;           // effect has been added to the audio HAL
+    // effect has been added to this HAL input stream
+    audio_io_handle_t mCurrentHalStream = AUDIO_IO_HANDLE_NONE;
     bool     mIsOutput;             // direction of the AF thread
 
-#ifdef FLOAT_EFFECT_CHAIN
     bool    mSupportsFloat;         // effect supports float processing
     sp<EffectBufferHalInterface> mInConversionBuffer;  // Buffers for HAL conversion if needed.
     sp<EffectBufferHalInterface> mOutConversionBuffer;
     uint32_t mInChannelCountRequested;
     uint32_t mOutChannelCountRequested;
-#endif
 
     class AutoLockReentrant {
     public:
@@ -459,10 +458,10 @@
 
     void process_l();
 
-    void lock() {
+    void lock() ACQUIRE(mLock) {
         mLock.lock();
     }
-    void unlock() {
+    void unlock() RELEASE(mLock) {
         mLock.unlock();
     }
 
@@ -493,14 +492,14 @@
     void setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
         mInBuffer = buffer;
     }
-    effect_buffer_t *inBuffer() const {
-        return mInBuffer != 0 ? reinterpret_cast<effect_buffer_t*>(mInBuffer->ptr()) : NULL;
+    float *inBuffer() const {
+        return mInBuffer != 0 ? reinterpret_cast<float*>(mInBuffer->ptr()) : NULL;
     }
     void setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
         mOutBuffer = buffer;
     }
-    effect_buffer_t *outBuffer() const {
-        return mOutBuffer != 0 ? reinterpret_cast<effect_buffer_t*>(mOutBuffer->ptr()) : NULL;
+    float *outBuffer() const {
+        return mOutBuffer != 0 ? reinterpret_cast<float*>(mOutBuffer->ptr()) : NULL;
     }
 
     void incTrackCnt() { android_atomic_inc(&mTrackCnt); }
@@ -608,8 +607,8 @@
         size_t frameCount() const override;
         uint32_t latency() const override;
 
-        status_t addEffectToHal(sp<EffectHalInterface> effect) override;
-        status_t removeEffectFromHal(sp<EffectHalInterface> effect) override;
+        status_t addEffectToHal(const sp<EffectHalInterface>& effect) override;
+        status_t removeEffectFromHal(const sp<EffectHalInterface>& effect) override;
         bool disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast) override;
         void setVolumeForOutput(float left, float right) const override;
 
@@ -724,8 +723,8 @@
 
     size_t removeEffect(const sp<EffectModule>& effect);
 
-    status_t addEffectToHal(sp<EffectHalInterface> effect);
-    status_t removeEffectFromHal(sp<EffectHalInterface> effect);
+    status_t addEffectToHal(const sp<EffectHalInterface>& effect);
+    status_t removeEffectFromHal(const sp<EffectHalInterface>& effect);
 
     const AudioDeviceTypeAddr& device() { return mDevice; };
     bool isOutput() const;
@@ -769,8 +768,8 @@
         size_t frameCount() const override  { return 0; }
         uint32_t latency() const override  { return 0; }
 
-        status_t addEffectToHal(sp<EffectHalInterface> effect) override;
-        status_t removeEffectFromHal(sp<EffectHalInterface> effect) override;
+        status_t addEffectToHal(const sp<EffectHalInterface>& effect) override;
+        status_t removeEffectFromHal(const sp<EffectHalInterface>& effect) override;
 
         bool disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast) override;
         void setVolumeForOutput(float left __unused, float right __unused) const override {}
diff --git a/services/audioflinger/FastCapture.h b/services/audioflinger/FastCapture.h
deleted file mode 100644
index c3817c0..0000000
--- a/services/audioflinger/FastCapture.h
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_AUDIO_FAST_CAPTURE_H
-#define ANDROID_AUDIO_FAST_CAPTURE_H
-
-#include "FastThread.h"
-#include "StateQueue.h"
-#include "FastCaptureState.h"
-#include "FastCaptureDumpState.h"
-
-namespace android {
-
-typedef StateQueue<FastCaptureState> FastCaptureStateQueue;
-
-class FastCapture : public FastThread {
-
-public:
-            FastCapture();
-    virtual ~FastCapture();
-
-            FastCaptureStateQueue*  sq();
-
-private:
-            FastCaptureStateQueue   mSQ;
-
-    // callouts
-    virtual const FastThreadState *poll();
-    virtual void setNBLogWriter(NBLog::Writer *logWriter);
-    virtual void onIdle();
-    virtual void onExit();
-    virtual bool isSubClassCommand(FastThreadState::Command command);
-    virtual void onStateChange();
-    virtual void onWork();
-
-    static const FastCaptureState sInitial;
-
-    FastCaptureState    mPreIdle;   // copy of state before we went into idle
-    // FIXME by renaming, could pull up many of these to FastThread
-    NBAIO_Source*       mInputSource;
-    int                 mInputSourceGen;
-    NBAIO_Sink*         mPipeSink;
-    int                 mPipeSinkGen;
-    void*               mReadBuffer;
-    ssize_t             mReadBufferState;   // number of initialized frames in readBuffer,
-                                            // or -1 to clear
-    NBAIO_Format        mFormat;
-    unsigned            mSampleRate;
-    FastCaptureDumpState mDummyFastCaptureDumpState;
-    uint32_t            mTotalNativeFramesRead; // copied to dumpState->mFramesRead
-
-};  // class FastCapture
-
-}   // namespace android
-
-#endif  // ANDROID_AUDIO_FAST_CAPTURE_H
diff --git a/services/audioflinger/FastThread.h b/services/audioflinger/FastThread.h
deleted file mode 100644
index 3f6b206..0000000
--- a/services/audioflinger/FastThread.h
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_AUDIO_FAST_THREAD_H
-#define ANDROID_AUDIO_FAST_THREAD_H
-
-#include "Configuration.h"
-#ifdef CPU_FREQUENCY_STATISTICS
-#include <cpustats/ThreadCpuUsage.h>
-#endif
-#include <utils/Thread.h>
-#include "FastThreadState.h"
-
-namespace android {
-
-// FastThread is the common abstract base class of FastMixer and FastCapture
-class FastThread : public Thread {
-
-public:
-            FastThread(const char *cycleMs, const char *loadUs);
-    virtual ~FastThread();
-
-private:
-    // implement Thread::threadLoop()
-    virtual bool threadLoop();
-
-protected:
-    // callouts to subclass in same lexical order as they were in original FastMixer.cpp
-    // FIXME need comments
-    virtual const FastThreadState *poll() = 0;
-    virtual void setNBLogWriter(NBLog::Writer *logWriter __unused) { }
-    virtual void onIdle() = 0;
-    virtual void onExit() = 0;
-    virtual bool isSubClassCommand(FastThreadState::Command command) = 0;
-    virtual void onStateChange() = 0;
-    virtual void onWork() = 0;
-
-    // FIXME these former local variables need comments
-    const FastThreadState*  mPrevious;
-    const FastThreadState*  mCurrent;
-    struct timespec mOldTs;
-    bool            mOldTsValid;
-    long            mSleepNs;       // -1: busy wait, 0: sched_yield, > 0: nanosleep
-    long            mPeriodNs;      // expected period; the time required to render one mix buffer
-    long            mUnderrunNs;    // underrun likely when write cycle is greater than this value
-    long            mOverrunNs;     // overrun likely when write cycle is less than this value
-    long            mForceNs;       // if overrun detected,
-                                    // force the write cycle to take this much time
-    long            mWarmupNsMin;   // warmup complete when write cycle is greater than or equal to
-                                    // this value
-    long            mWarmupNsMax;   // and less than or equal to this value
-    FastThreadDumpState* mDummyDumpState;
-    FastThreadDumpState* mDumpState;
-    bool            mIgnoreNextOverrun;     // used to ignore initial overrun and first after an
-                                            // underrun
-#ifdef FAST_THREAD_STATISTICS
-    struct timespec mOldLoad;       // previous value of clock_gettime(CLOCK_THREAD_CPUTIME_ID)
-    bool            mOldLoadValid;  // whether oldLoad is valid
-    uint32_t        mBounds;
-    bool            mFull;          // whether we have collected at least mSamplingN samples
-#ifdef CPU_FREQUENCY_STATISTICS
-    ThreadCpuUsage  mTcu;           // for reading the current CPU clock frequency in kHz
-#endif
-#endif
-    unsigned        mColdGen;       // last observed mColdGen
-    bool            mIsWarm;        // true means ready to mix,
-                                    // false means wait for warmup before mixing
-    struct timespec   mMeasuredWarmupTs;  // how long did it take for warmup to complete
-    uint32_t          mWarmupCycles;  // counter of number of loop cycles during warmup phase
-    uint32_t          mWarmupConsecutiveInRangeCycles;    // number of consecutive cycles in range
-    const sp<NBLog::Writer> mDummyNBLogWriter{new NBLog::Writer()};
-    status_t          mTimestampStatus;
-
-    FastThreadState::Command mCommand;
-    bool            mAttemptedWrite;
-
-    char            mCycleMs[16];   // cycle_ms + suffix
-    char            mLoadUs[16];    // load_us + suffix
-
-};  // class FastThread
-
-}   // android
-
-#endif  // ANDROID_AUDIO_FAST_THREAD_H
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
index c5c9652..39f772b 100644
--- a/services/audioflinger/MelReporter.cpp
+++ b/services/audioflinger/MelReporter.cpp
@@ -81,7 +81,7 @@
 }
 
 bool AudioFlinger::MelReporter::shouldComputeMelForDeviceType(audio_devices_t device) {
-    if (mSoundDoseManager->isCsdDisabled()) {
+    if (!mSoundDoseManager->isCsdEnabled()) {
         ALOGV("%s csd is disabled", __func__);
         return false;
     }
@@ -92,9 +92,9 @@
     switch (device) {
         case AUDIO_DEVICE_OUT_WIRED_HEADSET:
         case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
-        case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
+        // TODO(b/278265907): enable A2DP when we can distinguish A2DP headsets
+        // case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
         case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
-        case AUDIO_DEVICE_OUT_HEARING_AID:
         case AUDIO_DEVICE_OUT_USB_HEADSET:
         case AUDIO_DEVICE_OUT_BLE_HEADSET:
         case AUDIO_DEVICE_OUT_BLE_BROADCAST:
@@ -106,7 +106,7 @@
 
 void AudioFlinger::MelReporter::updateMetadataForCsd(audio_io_handle_t streamHandle,
         const std::vector<playback_track_metadata_v7_t>& metadataVec) {
-    if (mSoundDoseManager->isCsdDisabled()) {
+    if (!mSoundDoseManager->isCsdEnabled()) {
         ALOGV("%s csd is disabled", __func__);
         return;
     }
@@ -142,14 +142,10 @@
 
 void AudioFlinger::MelReporter::onCreateAudioPatch(audio_patch_handle_t handle,
         const PatchPanel::Patch& patch) {
-    if (mSoundDoseManager->isCsdDisabled()) {
+    if (!mSoundDoseManager->isCsdEnabled()) {
         ALOGV("%s csd is disabled", __func__);
         return;
     }
-    if (useHalSoundDoseInterface()) {
-        ALOGV("%s using HAL sound dose, ignore new patch", __func__);
-        return;
-    }
 
     ALOGV("%s: handle %d mHalHandle %d device sink %08x",
             __func__, handle, patch.mHalHandle,
@@ -163,7 +159,7 @@
     audio_io_handle_t streamHandle = patch.mAudioPatch.sources[0].ext.mix.handle;
     ActiveMelPatch newPatch;
     newPatch.streamHandle = streamHandle;
-    for (int i = 0; i < patch.mAudioPatch.num_sinks; ++ i) {
+    for (size_t i = 0; i < patch.mAudioPatch.num_sinks; ++i) {
         if (patch.mAudioPatch.sinks[i].type == AUDIO_PORT_TYPE_DEVICE
             && shouldComputeMelForDeviceType(patch.mAudioPatch.sinks[i].ext.device.type)) {
             audio_port_handle_t deviceId = patch.mAudioPatch.sinks[i].id;
@@ -174,15 +170,19 @@
         }
     }
 
-    std::lock_guard _afl(mAudioFlinger.mLock);
-    std::lock_guard _l(mLock);
-    ALOGV("%s add patch handle %d to active devices", __func__, handle);
-    startMelComputationForActivePatch_l(newPatch);
-    newPatch.csdActive = true;
-    mActiveMelPatches[handle] = newPatch;
+    if (!newPatch.deviceHandles.empty()) {
+        std::lock_guard _afl(mAudioFlinger.mLock);
+        std::lock_guard _l(mLock);
+        ALOGV("%s add patch handle %d to active devices", __func__, handle);
+        startMelComputationForActivePatch_l(newPatch);
+        newPatch.csdActive = true;
+        mActiveMelPatches[handle] = newPatch;
+    }
 }
 
-void AudioFlinger::MelReporter::startMelComputationForActivePatch_l(const ActiveMelPatch& patch) {
+void AudioFlinger::MelReporter::startMelComputationForActivePatch_l(const ActiveMelPatch& patch)
+NO_THREAD_SAFETY_ANALYSIS  // access of AudioFlinger::checkOutputThread_l
+{
     auto outputThread = mAudioFlinger.checkOutputThread_l(patch.streamHandle);
     if (outputThread == nullptr) {
         ALOGE("%s cannot find thread for stream handle %d", __func__, patch.streamHandle);
@@ -194,7 +194,7 @@
         ALOGI("%s add stream %d that uses device %d for CSD, nr of streams: %d", __func__,
               patch.streamHandle, deviceHandle, mActiveDevices[deviceHandle]);
 
-        if (outputThread != nullptr) {
+        if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
             outputThread->startMelComputation_l(mSoundDoseManager->getOrCreateProcessorForDevice(
                 deviceHandle,
                 patch.streamHandle,
@@ -206,7 +206,7 @@
 }
 
 void AudioFlinger::MelReporter::onReleaseAudioPatch(audio_patch_handle_t handle) {
-    if (mSoundDoseManager->isCsdDisabled()) {
+    if (!mSoundDoseManager->isCsdEnabled()) {
         ALOGV("%s csd is disabled", __func__);
         return;
     }
@@ -244,13 +244,15 @@
     mUseHalSoundDoseInterface = true;
 }
 
-void AudioFlinger::MelReporter::stopMelComputationForPatch_l(const ActiveMelPatch& patch) {
+void AudioFlinger::MelReporter::stopMelComputationForPatch_l(const ActiveMelPatch& patch)
+NO_THREAD_SAFETY_ANALYSIS  // access of AudioFlinger::checkOutputThread_l
+{
     if (!patch.csdActive) {
         // no need to stop CSD inactive patches
         return;
     }
 
-   auto outputThread = mAudioFlinger.checkOutputThread_l(patch.streamHandle);
+    auto outputThread = mAudioFlinger.checkOutputThread_l(patch.streamHandle);
 
     ALOGV("%s: stop MEL for stream id: %d", __func__, patch.streamHandle);
     for (const auto& deviceId : patch.deviceHandles) {
@@ -264,8 +266,7 @@
         }
     }
 
-    mSoundDoseManager->removeStreamProcessor(patch.streamHandle);
-    if (outputThread != nullptr) {
+    if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
         outputThread->stopMelComputation_l();
     }
 }
@@ -281,13 +282,8 @@
     return std::nullopt;
 }
 
-bool AudioFlinger::MelReporter::useHalSoundDoseInterface() {
-    bool useHalSoundDoseInterface = !mSoundDoseManager->forceUseFrameworkMel();
-    {
-        std::lock_guard _l(mLock);
-        useHalSoundDoseInterface &= mUseHalSoundDoseInterface;
-    }
-    return useHalSoundDoseInterface;
+bool AudioFlinger::MelReporter::useHalSoundDoseInterface_l() {
+    return !mSoundDoseManager->forceUseFrameworkMel() & mUseHalSoundDoseInterface;
 }
 
 std::string AudioFlinger::MelReporter::dump() {
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
index c1b291f..2bc33f2 100644
--- a/services/audioflinger/MelReporter.h
+++ b/services/audioflinger/MelReporter.h
@@ -90,14 +90,15 @@
     void stopInternalMelComputation();
 
     /** Should be called with the following order of locks: mAudioFlinger.mLock -> mLock. */
-    void stopMelComputationForPatch_l(const ActiveMelPatch& patch);
+    void stopMelComputationForPatch_l(const ActiveMelPatch& patch) REQUIRES(mLock);
 
     /** Should be called with the following order of locks: mAudioFlinger.mLock -> mLock. */
-    void startMelComputationForActivePatch_l(const ActiveMelPatch& patch);
+    void startMelComputationForActivePatch_l(const ActiveMelPatch& patch) REQUIRES(mLock);
 
-    std::optional<audio_patch_handle_t> activePatchStreamHandle_l(audio_io_handle_t streamHandle);
+    std::optional<audio_patch_handle_t>
+    activePatchStreamHandle_l(audio_io_handle_t streamHandle) REQUIRES(mLock);
 
-    bool useHalSoundDoseInterface();
+    bool useHalSoundDoseInterface_l() REQUIRES(mLock);
 
     AudioFlinger& mAudioFlinger;  // does not own the object
 
diff --git a/services/audioflinger/PatchCommandThread.cpp b/services/audioflinger/PatchCommandThread.cpp
index c3cb7e7..f4aab1f 100644
--- a/services/audioflinger/PatchCommandThread.cpp
+++ b/services/audioflinger/PatchCommandThread.cpp
@@ -56,7 +56,9 @@
     releaseAudioPatchCommand(handle);
 }
 
-bool AudioFlinger::PatchCommandThread::threadLoop() {
+bool AudioFlinger::PatchCommandThread::threadLoop()
+NO_THREAD_SAFETY_ANALYSIS  // bug in clang compiler.
+{
     std::unique_lock _l(mLock);
 
     while (!exitPending()) {
diff --git a/services/audioflinger/PatchCommandThread.h b/services/audioflinger/PatchCommandThread.h
index b7853f0..b52e0a9 100644
--- a/services/audioflinger/PatchCommandThread.h
+++ b/services/audioflinger/PatchCommandThread.h
@@ -84,7 +84,7 @@
 
     class ReleaseAudioPatchData : public CommandData {
     public:
-        ReleaseAudioPatchData(audio_patch_handle_t handle)
+        explicit ReleaseAudioPatchData(audio_patch_handle_t handle)
             :   mHandle(handle) {}
 
         audio_patch_handle_t mHandle;
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 3b428bb..d0feba5 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -135,6 +135,10 @@
 status_t AudioFlinger::PatchPanel::createAudioPatch(const struct audio_patch *patch,
                                    audio_patch_handle_t *handle,
                                    bool endpointPatch)
+ //unlocks AudioFlinger::mLock when calling ThreadBase::sendCreateAudioPatchConfigEvent
+ //to avoid deadlocks if the thread loop needs to acquire AudioFlinger::mLock
+ //before processing the create patch request.
+ NO_THREAD_SAFETY_ANALYSIS
 {
     if (handle == NULL || patch == NULL) {
         return BAD_VALUE;
@@ -245,7 +249,6 @@
                         status = INVALID_OPERATION;
                         goto exit;
                     }
-
                     sp<ThreadBase> thread =
                             mAudioFlinger.checkPlaybackThread_l(patch->sources[1].ext.mix.handle);
                     if (thread == 0) {
@@ -356,11 +359,12 @@
                             goto exit;
                         }
                     }
+                    mAudioFlinger.unlock();
                     status = thread->sendCreateAudioPatchConfigEvent(patch, &halHandle);
+                    mAudioFlinger.lock();
                     if (status == NO_ERROR) {
                         newPatch.setThread(thread);
                     }
-
                     // remove stale audio patch with same input as sink if any
                     for (auto& iter : mPatches) {
                         if (iter.second.mAudioPatch.sinks[0].ext.mix.handle == thread->id()) {
@@ -422,7 +426,9 @@
                 mAudioFlinger.updateOutDevicesForRecordThreads_l(devices);
             }
 
+            mAudioFlinger.unlock();
             status = thread->sendCreateAudioPatchConfigEvent(patch, &halHandle);
+            mAudioFlinger.lock();
             if (status == NO_ERROR) {
                 newPatch.setThread(thread);
             }
@@ -729,7 +735,11 @@
 
 /* Disconnect a patch */
 status_t AudioFlinger::PatchPanel::releaseAudioPatch(audio_patch_handle_t handle)
-{
+ //unlocks AudioFlinger::mLock when calling ThreadBase::sendReleaseAudioPatchConfigEvent
+ //to avoid deadlocks if the thread loop needs to acquire AudioFlinger::mLock
+ //before processing the release patch request.
+ NO_THREAD_SAFETY_ANALYSIS
+ {
     ALOGV("%s handle %d", __func__, handle);
     status_t status = NO_ERROR;
 
@@ -766,7 +776,9 @@
                         break;
                     }
                 }
+                mAudioFlinger.unlock();
                 status = thread->sendReleaseAudioPatchConfigEvent(removedPatch.mHalHandle);
+                mAudioFlinger.lock();
             } else {
                 status = hwDevice->releaseAudioPatch(removedPatch.mHalHandle);
             }
@@ -787,7 +799,9 @@
                     break;
                 }
             }
+            mAudioFlinger.unlock();
             status = thread->sendReleaseAudioPatchConfigEvent(removedPatch.mHalHandle);
+            mAudioFlinger.lock();
         } break;
         default:
             status = BAD_VALUE;
diff --git a/services/audioflinger/PatchPanel.h b/services/audioflinger/PatchPanel.h
index 68a3800..5555766 100644
--- a/services/audioflinger/PatchPanel.h
+++ b/services/audioflinger/PatchPanel.h
@@ -199,7 +199,7 @@
             return mRecord.handle() != AUDIO_PATCH_HANDLE_NONE ||
                     mPlayback.handle() != AUDIO_PATCH_HANDLE_NONE; }
 
-        void setThread(sp<ThreadBase> thread) { mThread = thread; }
+        void setThread(const sp<ThreadBase>& thread) { mThread = thread; }
         wp<ThreadBase> thread() const { return mThread; }
 
         // returns the latency of the patch (from record to playback).
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 9560609..51bb93b 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -117,8 +117,8 @@
             status_t    attachAuxEffect(int EffectId);
             void        setAuxBuffer(int EffectId, int32_t *buffer);
             int32_t     *auxBuffer() const { return mAuxBuffer; }
-            void        setMainBuffer(effect_buffer_t *buffer) { mMainBuffer = buffer; }
-            effect_buffer_t *mainBuffer() const { return mMainBuffer; }
+            void        setMainBuffer(float *buffer) { mMainBuffer = buffer; }
+            float       *mainBuffer() const { return mMainBuffer; }
             int         auxEffectId() const { return mAuxEffectId; }
     virtual status_t    getTimestamp(AudioTimestamp& timestamp);
             void        signal();
@@ -132,7 +132,7 @@
 // implement FastMixerState::VolumeProvider interface
     virtual gain_minifloat_packed_t getVolumeLR();
 
-    virtual status_t    setSyncEvent(const sp<SyncEvent>& event);
+            status_t    setSyncEvent(const sp<audioflinger::SyncEvent>& event) override;
 
     virtual bool        isFastTrack() const { return (mFlags & AUDIO_OUTPUT_FLAG_FAST) != 0; }
 
@@ -186,7 +186,9 @@
             }
             sp<os::ExternalVibration> getExternalVibration() const { return mExternalVibration; }
 
-            void    setTeePatches(TeePatches teePatches);
+            // This function should be called with holding thread lock.
+            void    updateTeePatches();
+            void    setTeePatchesToUpdate(TeePatches teePatchesToUpdate);
 
     void tallyUnderrunFrames(size_t frames) override {
        if (isOut()) { // we expect this from output tracks only
@@ -295,7 +297,7 @@
 
     bool                mResetDone;
     const audio_stream_type_t mStreamType;
-    effect_buffer_t     *mMainBuffer;
+    float     *mMainBuffer;
 
     int32_t             *mAuxBuffer;
     int                 mAuxEffectId;
@@ -369,6 +371,7 @@
     bool                mPauseHwPending = false; // direct/offload track request for thread pause
     audio_output_flags_t mFlags;
     TeePatches  mTeePatches;
+    std::optional<TeePatches> mTeePatchesToUpdate;
     const float         mSpeed;
     const bool          mIsSpatialized;
     const bool          mIsBitPerfect;
@@ -426,6 +429,7 @@
 private:
     status_t            obtainBuffer(AudioBufferProvider::Buffer* buffer,
                                      uint32_t waitTimeMs);
+    void                queueBuffer(Buffer& inBuffer);
     void                clearBufferQueue();
 
     void                restartIfDisabled();
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index f0a5f76..d91a210 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -58,7 +58,7 @@
             void        appendDumpHeader(String8& result);
             void        appendDump(String8& result, bool active);
 
-            void        handleSyncStartEvent(const sp<SyncEvent>& event);
+            void        handleSyncStartEvent(const sp<audioflinger::SyncEvent>& event);
             void        clearSyncStartEvent();
 
             void        updateTrackFrameInfo(int64_t trackFramesReleased,
@@ -107,12 +107,10 @@
 
             // sync event triggering actual audio capture. Frames read before this event will
             // be dropped and therefore not read by the application.
-            sp<SyncEvent>                       mSyncStartEvent;
+            sp<audioflinger::SyncEvent>        mSyncStartEvent;
 
-            // number of captured frames to drop after the start sync event has been received.
-            // when < 0, maximum frames to drop before starting capture even if sync event is
-            // not received
-            ssize_t                             mFramesToDrop;
+            audioflinger::SynchronizedRecordState
+                    mSynchronizedRecordState{mSampleRate}; // sampleRate defined in base
 
             // used by resampler to find source frames
             ResamplerBufferProvider            *mResamplerBufferProvider;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index e113efb..95883d9 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -76,8 +76,6 @@
 #include <media/audiohal/StreamHalInterface.h>
 
 #include "AudioFlinger.h"
-#include "FastMixer.h"
-#include "FastCapture.h"
 #include <mediautils/SchedulingPolicyService.h>
 #include <mediautils/ServiceUtilities.h>
 
@@ -91,10 +89,10 @@
 #include <cpustats/ThreadCpuUsage.h>
 #endif
 
-#include "AutoPark.h"
+#include <fastpath/AutoPark.h>
 
 #include <pthread.h>
-#include "TypedLogger.h"
+#include <afutils/TypedLogger.h>
 
 // ----------------------------------------------------------------------------
 
@@ -376,7 +374,7 @@
         // try three times to get the clock offset, choose the one
         // with the minimum gap in measurements.
         const int tries = 3;
-        nsecs_t bestGap, measured;
+        nsecs_t bestGap = 0, measured = 0; // not required, initialized for clang-tidy
         for (int i = 0; i < tries; ++i) {
             const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
             const nsecs_t tbase = systemTime(clockbase);
@@ -627,6 +625,7 @@
 // sendConfigEvent_l() must be called with ThreadBase::mLock held
 // Can temporarily release the lock if waiting for a reply from processConfigEvents_l().
 status_t AudioFlinger::ThreadBase::sendConfigEvent_l(sp<ConfigEvent>& event)
+NO_THREAD_SAFETY_ANALYSIS  // condition variable
 {
     status_t status = NO_ERROR;
 
@@ -942,6 +941,7 @@
 }
 
 void AudioFlinger::ThreadBase::dump(int fd, const Vector<String16>& args)
+NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
 {
     dprintf(fd, "\n%s thread %p, name %s, tid %d, type %d (%s):\n", isOutput() ? "Output" : "Input",
             this, mThreadName, getTid(), type(), threadTypeToString(type()));
@@ -1310,7 +1310,9 @@
 
 void AudioFlinger::ThreadBase::checkSuspendOnEffectEnabled(bool enabled,
                                                            audio_session_t sessionId,
-                                                           bool threadLocked) {
+                                                           bool threadLocked)
+NO_THREAD_SAFETY_ANALYSIS  // manual locking
+{
     if (!threadLocked) {
         mLock.lock();
     }
@@ -1406,15 +1408,6 @@
 
     switch (mType) {
     case MIXER: {
-#ifndef MULTICHANNEL_EFFECT_CHAIN
-        // Reject any effect on mixer multichannel sinks.
-        // TODO: fix both format and multichannel issues with effects.
-        if (mChannelCount != FCC_2) {
-            ALOGW("%s: effect %s for multichannel(%d) on MIXER thread %s",
-                    __func__, desc->name, mChannelCount, mThreadName);
-            return BAD_VALUE;
-        }
-#endif
         audio_output_flags_t flags = mOutput->flags;
         if (hasFastMixer() || (flags & AUDIO_OUTPUT_FLAG_FAST)) {
             if (sessionId == AUDIO_SESSION_OUTPUT_MIX) {
@@ -1467,15 +1460,6 @@
                 __func__, desc->name, mThreadName);
         return BAD_VALUE;
     case DUPLICATING:
-#ifndef MULTICHANNEL_EFFECT_CHAIN
-        // Reject any effect on mixer multichannel sinks.
-        // TODO: fix both format and multichannel issues with effects.
-        if (mChannelCount != FCC_2) {
-            ALOGW("%s: effect %s for multichannel(%d) on DUPLICATING thread %s",
-                    __func__, desc->name, mChannelCount, mThreadName);
-            return BAD_VALUE;
-        }
-#endif
         if (audio_is_global_session(sessionId)) {
             ALOGW("%s: global effect %s on DUPLICATING thread %s",
                     __func__, desc->name, mThreadName);
@@ -1794,6 +1778,7 @@
 
 void AudioFlinger::ThreadBase::lockEffectChains_l(
         Vector< sp<AudioFlinger::EffectChain> >& effectChains)
+NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::lock()
 {
     effectChains = mEffectChains;
     for (size_t i = 0; i < mEffectChains.size(); i++) {
@@ -1803,6 +1788,7 @@
 
 void AudioFlinger::ThreadBase::unlockEffectChains(
         const Vector< sp<AudioFlinger::EffectChain> >& effectChains)
+NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::unlock()
 {
     for (size_t i = 0; i < effectChains.size(); i++) {
         effectChains[i]->unlock();
@@ -1910,7 +1896,7 @@
 
 template <typename T>
 void AudioFlinger::ThreadBase::ActiveTracks<T>::updatePowerState(
-        sp<ThreadBase> thread, bool force) {
+        const sp<ThreadBase>& thread, bool force) {
     // Updates ActiveTracks client uids to the thread wakelock.
     if (mActiveTracksGeneration != mLastActiveTracksGeneration || force) {
         thread->updateWakeLockUids_l(getWakeLockUids());
@@ -2788,6 +2774,7 @@
 
 // addTrack_l() must be called with ThreadBase::mLock held
 status_t AudioFlinger::PlaybackThread::addTrack_l(const sp<Track>& track)
+NO_THREAD_SAFETY_ANALYSIS  // release and re-acquire mLock
 {
     status_t status = ALREADY_EXISTS;
 
@@ -2901,6 +2888,9 @@
     if (!trackActive) {
         removeTrack_l(track);
     } else if (track->isFastTrack() || track->isOffloaded() || track->isDirect()) {
+        if (track->isPausePending()) {
+            track->pauseAck();
+        }
         track->mState = TrackBase::STOPPING_1;
     }
 
@@ -2941,7 +2931,7 @@
     if (initCheck() == NO_ERROR && mOutput->stream->getParameters(keys, &out_s8) == OK) {
         return out_s8;
     }
-    return String8();
+    return {};
 }
 
 status_t AudioFlinger::DirectOutputThread::selectPresentation(int presentationId, int programId) {
@@ -3190,7 +3180,7 @@
     free(mEffectBuffer);
     mEffectBuffer = NULL;
     if (mEffectBufferEnabled) {
-        mEffectBufferFormat = EFFECT_BUFFER_FORMAT;
+        mEffectBufferFormat = AUDIO_FORMAT_PCM_FLOAT;
         mEffectBufferSize = mNormalFrameCount * mixerChannelCount
                 * audio_bytes_per_sample(mEffectBufferFormat);
         (void)posix_memalign(&mEffectBuffer, 32, mEffectBufferSize);
@@ -3347,7 +3337,7 @@
     return (uint32_t)((uint32_t)((mNormalFrameCount * 1000) / mSampleRate) * 1000);
 }
 
-status_t AudioFlinger::PlaybackThread::setSyncEvent(const sp<SyncEvent>& event)
+status_t AudioFlinger::PlaybackThread::setSyncEvent(const sp<audioflinger::SyncEvent>& event)
 {
     if (!isValidSyncEvent(event)) {
         return BAD_VALUE;
@@ -3366,7 +3356,8 @@
     return NAME_NOT_FOUND;
 }
 
-bool AudioFlinger::PlaybackThread::isValidSyncEvent(const sp<SyncEvent>& event) const
+bool AudioFlinger::PlaybackThread::isValidSyncEvent(
+        const sp<audioflinger::SyncEvent>& event) const
 {
     return event->type() == AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE;
 }
@@ -3561,12 +3552,7 @@
     mIdleSleepTimeUs = idleSleepTimeUs();
 
     mStandbyDelayNs = AudioFlinger::mStandbyTimeInNsecs;
-    // Shorten standby delay on VOIP RX output to avoid delayed routing updates
-    // after a call due to call end tone.
-    if (mOutput != nullptr && (mOutput->flags & AUDIO_OUTPUT_FLAG_VOIP_RX) != 0) {
-        const nsecs_t NS_PER_MS = 1000000;
-        mStandbyDelayNs = std::min(mStandbyDelayNs, latency_l() * NS_PER_MS);
-    }
+
     // make sure standby delay is not too short when connected to an A2DP sink to avoid
     // truncating audio when going to standby.
     if (!Intersection(outDeviceTypes(),  getAudioDeviceOutAllA2dpSet()).empty()) {
@@ -3635,7 +3621,7 @@
 {
     audio_session_t session = chain->sessionId();
     sp<EffectBufferHalInterface> halInBuffer, halOutBuffer;
-    effect_buffer_t *buffer = nullptr; // only used for non global sessions
+    float *buffer = nullptr; // only used for non global sessions
 
     if (mType == SPATIALIZER) {
         if (!audio_is_global_session(session)) {
@@ -3653,7 +3639,7 @@
             size_t numSamples = mNormalFrameCount
                     * (audio_channel_count_from_out_mask(channelMask) + mHapticChannelCount);
             status_t result = mAudioFlinger->mEffectsFactoryHal->allocateBuffer(
-                    numSamples * sizeof(effect_buffer_t),
+                    numSamples * sizeof(float),
                     &halInBuffer);
             if (result != OK) return result;
 
@@ -3663,11 +3649,8 @@
                     &halOutBuffer);
             if (result != OK) return result;
 
-#ifdef FLOAT_EFFECT_CHAIN
             buffer = halInBuffer ? halInBuffer->audioBuffer()->f32 : buffer;
-#else
-            buffer = halInBuffer ? halInBuffer->audioBuffer()->s16 : buffer;
-#endif
+
             ALOGV("addEffectChain_l() creating new input buffer %p session %d",
                     buffer, session);
         } else {
@@ -3695,7 +3678,7 @@
         halOutBuffer = halInBuffer;
         ALOGV("addEffectChain_l() %p on thread %p for session %d", chain.get(), this, session);
         if (!audio_is_global_session(session)) {
-            buffer = halInBuffer ? reinterpret_cast<effect_buffer_t*>(halInBuffer->externalData())
+            buffer = halInBuffer ? reinterpret_cast<float*>(halInBuffer->externalData())
                                  : buffer;
             // Only one effect chain can be present in direct output thread and it uses
             // the sink buffer as input
@@ -3703,15 +3686,12 @@
                 size_t numSamples = mNormalFrameCount
                         * (audio_channel_count_from_out_mask(mMixerChannelMask)
                                                              + mHapticChannelCount);
-                status_t result = mAudioFlinger->mEffectsFactoryHal->allocateBuffer(
-                        numSamples * sizeof(effect_buffer_t),
+                const status_t allocateStatus = mAudioFlinger->mEffectsFactoryHal->allocateBuffer(
+                        numSamples * sizeof(float),
                         &halInBuffer);
-                if (result != OK) return result;
-#ifdef FLOAT_EFFECT_CHAIN
+                if (allocateStatus != OK) return allocateStatus;
+
                 buffer = halInBuffer ? halInBuffer->audioBuffer()->f32 : buffer;
-#else
-                buffer = halInBuffer ? halInBuffer->audioBuffer()->s16 : buffer;
-#endif
                 ALOGV("addEffectChain_l() creating new input buffer %p session %d",
                         buffer, session);
             }
@@ -3792,10 +3772,10 @@
             }
 
             // detach all tracks with same session ID from this chain
-            for (size_t i = 0; i < mTracks.size(); ++i) {
-                sp<Track> track = mTracks[i];
+            for (size_t j = 0; j < mTracks.size(); ++j) {
+                sp<Track> track = mTracks[j];
                 if (session == track->sessionId()) {
-                    track->setMainBuffer(reinterpret_cast<effect_buffer_t*>(mSinkBuffer));
+                    track->setMainBuffer(reinterpret_cast<float*>(mSinkBuffer));
                     chain->decTrackCnt();
                 }
             }
@@ -3846,8 +3826,9 @@
 }
 
 bool AudioFlinger::PlaybackThread::threadLoop()
+NO_THREAD_SAFETY_ANALYSIS  // manual locking of AudioFlinger
 {
-    tlNBLogWriter = mNBLogWriter.get();
+    aflog::setThreadWriter(mNBLogWriter.get());
 
     Vector< sp<Track> > tracksToRemove;
 
@@ -3914,7 +3895,7 @@
             // is more informational.
             if (mAudioFlinger->mLock.tryLock() == NO_ERROR) {
                 std::vector<PatchPanel::SoftwarePatch> swPatches;
-                double latencyMs;
+                double latencyMs = 0.; // not required; initialized for clang-tidy
                 status_t status = INVALID_OPERATION;
                 audio_patch_handle_t downstreamPatchHandle = AUDIO_PATCH_HANDLE_NONE;
                 if (mAudioFlinger->mPatchPanel.getDownstreamSoftwarePatches(id(), &swPatches) == OK
@@ -3934,8 +3915,7 @@
                         ALOGVV("new downstream latency %lf ms", latencyMs);
                     } else {
                         ALOGD("out of range downstream latency %lf ms", latencyMs);
-                        if (latencyMs < minLatency) latencyMs = minLatency;
-                        else if (latencyMs > maxLatency) latencyMs = maxLatency;
+                        latencyMs = std::clamp(latencyMs, minLatency, maxLatency);
                     }
                     mDownstreamLatencyStatMs.add(latencyMs);
                 }
@@ -4013,7 +3993,7 @@
                         LOG_AUDIO_STATE();
                         mThreadMetrics.logEndInterval();
                         mThreadSnapshot.onEnd();
-                        mStandby = true;
+                        setStandby_l();
                     }
                     sendStatistics(false /* force */);
                 }
@@ -4093,6 +4073,19 @@
             activeTracks.insert(activeTracks.end(), mActiveTracks.begin(), mActiveTracks.end());
 
             setHalLatencyMode_l();
+
+            for (const auto &track : mActiveTracks ) {
+                track->updateTeePatches();
+            }
+
+            // signal actual start of output stream when the render position reported by the kernel
+            // starts moving.
+            if (!mHalStarted && ((isSuspended() && (mBytesWritten != 0)) || (!mStandby
+                    && (mKernelPositionOnStandby
+                            != mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL])))) {
+                mHalStarted = true;
+                mWaitHalStartCV.broadcast();
+            }
         } // mLock scope ends
 
         if (mBytesRemaining == 0) {
@@ -4202,12 +4195,12 @@
 
                         const size_t audioBufferSize = mNormalFrameCount
                             * audio_bytes_per_frame(hapticSessionChannelCount,
-                                                    EFFECT_BUFFER_FORMAT);
+                                                    AUDIO_FORMAT_PCM_FLOAT);
                         memcpy_by_audio_format(
                                 (uint8_t*)effectChains[i]->outBuffer() + audioBufferSize,
-                                EFFECT_BUFFER_FORMAT,
+                                AUDIO_FORMAT_PCM_FLOAT,
                                 (const uint8_t*)effectChains[i]->inBuffer() + audioBufferSize,
-                                EFFECT_BUFFER_FORMAT, mNormalFrameCount * mHapticChannelCount);
+                                AUDIO_FORMAT_PCM_FLOAT, mNormalFrameCount * mHapticChannelCount);
                     }
                 }
             }
@@ -4478,7 +4471,7 @@
 
     if (!mStandby) {
         threadLoop_standby();
-        mStandby = true;
+        setStandby();
     }
 
     releaseWakeLock();
@@ -4624,6 +4617,7 @@
 
 // removeTracks_l() must be called with ThreadBase::mLock held
 void AudioFlinger::PlaybackThread::removeTracks_l(const Vector< sp<Track> >& tracksToRemove)
+NO_THREAD_SAFETY_ANALYSIS  // release and re-acquire mLock
 {
     for (const auto& track : tracksToRemove) {
         mActiveTracks.remove(track);
@@ -4699,7 +4693,8 @@
     if ((mOutput->flags & AUDIO_OUTPUT_FLAG_VOIP_RX) != 0) {
         if (*volume != mLeftVolFloat) {
             result = mOutput->stream->setVolume(*volume, *volume);
-            ALOGE_IF(result != OK,
+            // HAL can return INVALID_OPERATION if operation is not supported.
+            ALOGE_IF(result != OK && result != INVALID_OPERATION,
                      "Error when setting output stream volume: %d", result);
             if (result == NO_ERROR) {
                 mLeftVolFloat = *volume;
@@ -4746,8 +4741,8 @@
                             "as it does not support audio patches",
                             patch->sinks[i].ext.device.type);
         type = static_cast<audio_devices_t>(type | patch->sinks[i].ext.device.type);
-        deviceTypeAddrs.push_back(AudioDeviceTypeAddr(patch->sinks[i].ext.device.type,
-                patch->sinks[i].ext.device.address));
+        deviceTypeAddrs.emplace_back(patch->sinks[i].ext.device.type,
+                patch->sinks[i].ext.device.address);
     }
 
     audio_port_handle_t sinkPortId = patch->sinks[0].id;
@@ -4962,14 +4957,15 @@
         // When it wakes up after a maximum latency, it runs a few cycles quickly before
         // finally blocking.  Note the pipe implementation rounds up the request to a power of 2.
         MonoPipe *monoPipe = new MonoPipe(mNormalFrameCount * 4, format, true /*writeCanBlock*/);
-        const NBAIO_Format offers[1] = {format};
-        size_t numCounterOffers = 0;
+        const NBAIO_Format offersFast[1] = {format};
+        size_t numCounterOffersFast = 0;
 #if !LOG_NDEBUG
-        ssize_t index =
+        index =
 #else
         (void)
 #endif
-                monoPipe->negotiate(offers, 1, NULL, numCounterOffers);
+                monoPipe->negotiate(offersFast, std::size(offersFast),
+                        nullptr /* counterOffers */, numCounterOffersFast);
         ALOG_ASSERT(index == 0);
         monoPipe->setAvgFrames((mScreenState & 1) ?
                 (monoPipe->maxFrames() * 7) / 8 : mNormalFrameCount * 2);
@@ -5396,7 +5392,7 @@
         // tallyUnderrunFrames() is called to update the track counters
         // with the number of underrun frames for a particular mixer period.
         // We defer tallying until we know the final mixer status.
-        void tallyUnderrunFrames(sp<Track> track, size_t underrunFrames) {
+        void tallyUnderrunFrames(const sp<Track>& track, size_t underrunFrames) {
             mUnderrunFrames.emplace_back(track, underrunFrames);
         }
 
@@ -5656,7 +5652,7 @@
         // during last round
         size_t desiredFrames;
         const uint32_t sampleRate = track->mAudioTrackServerProxy->getSampleRate();
-        AudioPlaybackRate playbackRate = track->mAudioTrackServerProxy->getPlaybackRate();
+        const AudioPlaybackRate playbackRate = track->mAudioTrackServerProxy->getPlaybackRate();
 
         desiredFrames = sourceFramesNeededWithTimestretch(
                 sampleRate, mNormalFrameCount, mSampleRate, playbackRate.mSpeed);
@@ -5849,12 +5845,12 @@
                 AudioMixer::SAMPLE_RATE,
                 (void *)(uintptr_t)reqSampleRate);
 
-            AudioPlaybackRate playbackRate = proxy->getPlaybackRate();
             mAudioMixer->setParameter(
                 trackId,
                 AudioMixer::TIMESTRETCH,
                 AudioMixer::PLAYBACK_RATE,
-                &playbackRate);
+                // cast away constness for this generic API.
+                const_cast<void *>(reinterpret_cast<const void *>(&playbackRate)));
 
             /*
              * Select the appropriate output buffer for the track.
@@ -5896,7 +5892,7 @@
                 mAudioMixer->setParameter(
                         trackId,
                         AudioMixer::TRACK,
-                        AudioMixer::MIXER_FORMAT, (void *)EFFECT_BUFFER_FORMAT);
+                        AudioMixer::MIXER_FORMAT, (void *)AUDIO_FORMAT_PCM_FLOAT);
                 mAudioMixer->setParameter(
                         trackId,
                         AudioMixer::TRACK,
@@ -6223,12 +6219,12 @@
     if (status == NO_ERROR) {
         status = mOutput->stream->setParameters(keyValuePair);
         if (!mStandby && status == INVALID_OPERATION) {
+            ALOGW("%s: setParameters failed with keyValuePair %s, entering standby",
+                    __func__, keyValuePair.c_str());
             mOutput->standby();
-            if (!mStandby) {
-                mThreadMetrics.logEndInterval();
-                mThreadSnapshot.onEnd();
-                mStandby = true;
-            }
+            mThreadMetrics.logEndInterval();
+            mThreadSnapshot.onEnd();
+            setStandby_l();
             mBytesWritten = 0;
             status = mOutput->stream->setParameters(keyValuePair);
         }
@@ -6238,12 +6234,12 @@
             mAudioMixer = new AudioMixer(mNormalFrameCount, mSampleRate);
             for (const auto &track : mTracks) {
                 const int trackId = track->id();
-                status_t status = mAudioMixer->create(
+                const status_t createStatus = mAudioMixer->create(
                         trackId,
                         track->mChannelMask,
                         track->mFormat,
                         track->mSessionId);
-                ALOGW_IF(status != NO_ERROR,
+                ALOGW_IF(createStatus != NO_ERROR,
                         "%s(): AudioMixer cannot create track(%d)"
                         " mask %#x, format %#x, sessionId %d",
                         __func__,
@@ -6296,6 +6292,12 @@
     } else {
         dprintf(fd, "  No FastMixer\n");
     }
+
+     dprintf(fd, "Bluetooth latency modes are %senabled\n",
+            mBluetoothLatencyModesEnabled ? "" : "not ");
+     dprintf(fd, "HAL does %ssupport Bluetooth latency modes\n", mOutput != nullptr &&
+             mOutput->audioHwDev->supportsBluetoothVariableLatency() ? "" : "not ");
+     dprintf(fd, "Supported latency modes: %s\n", toString(mSupportedLatencyModes).c_str());
 }
 
 uint32_t AudioFlinger::MixerThread::idleSleepTimeUs() const
@@ -6483,9 +6485,13 @@
         if (right > GAIN_FLOAT_UNITY) {
             right = GAIN_FLOAT_UNITY;
         }
-
-        left *= v * mMasterBalanceLeft; // DirectOutputThread balance applied as track volume
-        right *= v * mMasterBalanceRight;
+        left *= v;
+        right *= v;
+        if (mAudioFlinger->getMode() != AUDIO_MODE_IN_COMMUNICATION
+                || audio_channel_count_from_out_mask(mChannelMask) > 1) {
+            left *= mMasterBalanceLeft; // DirectOutputThread balance applied as track volume
+            right *= mMasterBalanceRight;
+        }
     }
 
     track->processMuteEvent_l(mAudioFlinger->getOrCreateAudioManager(),
@@ -6531,7 +6537,8 @@
                 mFlushPending = true;
             }
         } else /* mType == OFFLOAD */ {
-            if (previousTrack->sessionId() != latestTrack->sessionId()) {
+            if (previousTrack->sessionId() != latestTrack->sessionId() ||
+                previousTrack->isFlushPending()) {
                 mFlushPending = true;
             }
         }
@@ -6873,7 +6880,7 @@
             if (!mStandby) {
                 mThreadMetrics.logEndInterval();
                 mThreadSnapshot.onEnd();
-                mStandby = true;
+                setStandby_l();
             }
             mBytesWritten = 0;
             status = mOutput->stream->setParameters(keyValuePair);
@@ -7414,7 +7421,7 @@
 void AudioFlinger::DuplicatingThread::threadLoop_mix()
 {
     // mix buffers...
-    if (outputsReady(outputTracks)) {
+    if (outputsReady()) {
         mAudioMixer->process();
     } else {
         if (mMixerBufferValid) {
@@ -7485,7 +7492,7 @@
     }
 }
 
-void AudioFlinger::DuplicatingThread::dumpInternals_l(int fd, const Vector<String16>& args __unused)
+void AudioFlinger::DuplicatingThread::dumpInternals_l(int fd, const Vector<String16>& args)
 {
     MixerThread::dumpInternals_l(fd, args);
 
@@ -7589,9 +7596,7 @@
     }
 }
 
-
-bool AudioFlinger::DuplicatingThread::outputsReady(
-        const SortedVector< sp<OutputTrack> > &outputTracks)
+bool AudioFlinger::DuplicatingThread::outputsReady()
 {
     for (size_t i = 0; i < outputTracks.size(); i++) {
         sp<ThreadBase> thread = outputTracks[i]->thread().promote();
@@ -7858,15 +7863,17 @@
         // pipe will be shared directly with fast clients, so clear to avoid leaking old information
         memset(pipeBuffer, 0, pipeSize);
         Pipe *pipe = new Pipe(pipeFramesP2, format, pipeBuffer);
-        const NBAIO_Format offers[1] = {format};
-        size_t numCounterOffers = 0;
-        [[maybe_unused]] ssize_t index = pipe->negotiate(offers, 1, NULL, numCounterOffers);
-        ALOG_ASSERT(index == 0);
+        const NBAIO_Format offersFast[1] = {format};
+        size_t numCounterOffersFast = 0;
+        [[maybe_unused]] ssize_t index2 = pipe->negotiate(offersFast, std::size(offersFast),
+                nullptr /* counterOffers */, numCounterOffersFast);
+        ALOG_ASSERT(index2 == 0);
         mPipeSink = pipe;
         PipeReader *pipeReader = new PipeReader(*pipe);
-        numCounterOffers = 0;
-        index = pipeReader->negotiate(offers, 1, NULL, numCounterOffers);
-        ALOG_ASSERT(index == 0);
+        numCounterOffersFast = 0;
+        index2 = pipeReader->negotiate(offersFast, std::size(offersFast),
+                nullptr /* counterOffers */, numCounterOffersFast);
+        ALOG_ASSERT(index2 == 0);
         mPipeSource = pipeReader;
         mPipeFramesP2 = pipeFramesP2;
         mPipeMemory = pipeMemory;
@@ -8213,7 +8220,7 @@
         // copy to the right place.  Permitted because mRsmpInBuffer was over-allocated.
 
         int32_t rear = mRsmpInRear & (mRsmpInFramesP2 - 1);
-        ssize_t framesRead;
+        ssize_t framesRead = 0; // not needed, remove clang-tidy warning.
         const int64_t lastIoBeginNs = systemTime(); // start IO timing
 
         // If an NBAIO source is present, use it to read the normal capture's data
@@ -8406,8 +8413,9 @@
                     // straight from RecordThread buffer to RecordTrack buffer.
                     AudioBufferProvider::Buffer buffer;
                     buffer.frameCount = framesOut;
-                    status_t status = activeTrack->mResamplerBufferProvider->getNextBuffer(&buffer);
-                    if (status == OK && buffer.frameCount != 0) {
+                    const status_t getNextBufferStatus =
+                            activeTrack->mResamplerBufferProvider->getNextBuffer(&buffer);
+                    if (getNextBufferStatus == OK && buffer.frameCount != 0) {
                         ALOGV_IF(buffer.frameCount != framesOut,
                                 "%s() read less than expected (%zu vs %zu)",
                                 __func__, buffer.frameCount, framesOut);
@@ -8417,7 +8425,7 @@
                     } else {
                         framesOut = 0;
                         ALOGE("%s() cannot fill request, status: %d, frameCount: %zu",
-                            __func__, status, buffer.frameCount);
+                            __func__, getNextBufferStatus, buffer.frameCount);
                     }
                 } else {
                     // process frames from the RecordThread buffer provider to the RecordTrack
@@ -8432,7 +8440,11 @@
                     overrun = OVERRUN_FALSE;
                 }
 
-                if (activeTrack->mFramesToDrop == 0) {
+                // MediaSyncEvent handling: Synchronize AudioRecord to AudioTrack completion.
+                const ssize_t framesToDrop =
+                        activeTrack->mSynchronizedRecordState.updateRecordFrames(framesOut);
+                if (framesToDrop == 0) {
+                    // no sync event, process normally, otherwise ignore.
                     if (framesOut > 0) {
                         activeTrack->mSink.frameCount = framesOut;
                         // Sanitize before releasing if the track has no access to the source data
@@ -8442,28 +8454,7 @@
                         }
                         activeTrack->releaseBuffer(&activeTrack->mSink);
                     }
-                } else {
-                    // FIXME could do a partial drop of framesOut
-                    if (activeTrack->mFramesToDrop > 0) {
-                        activeTrack->mFramesToDrop -= (ssize_t)framesOut;
-                        if (activeTrack->mFramesToDrop <= 0) {
-                            activeTrack->clearSyncStartEvent();
-                        }
-                    } else {
-                        activeTrack->mFramesToDrop += framesOut;
-                        if (activeTrack->mFramesToDrop >= 0 || activeTrack->mSyncStartEvent == 0 ||
-                                activeTrack->mSyncStartEvent->isCancelled()) {
-                            ALOGW("Synced record %s, session %d, trigger session %d",
-                                  (activeTrack->mFramesToDrop >= 0) ? "timed out" : "cancelled",
-                                  activeTrack->sessionId(),
-                                  (activeTrack->mSyncStartEvent != 0) ?
-                                          activeTrack->mSyncStartEvent->triggerSession() :
-                                          AUDIO_SESSION_NONE);
-                            activeTrack->clearSyncStartEvent();
-                        }
-                    }
                 }
-
                 if (framesOut == 0) {
                     break;
                 }
@@ -8796,20 +8787,10 @@
     if (event == AudioSystem::SYNC_EVENT_NONE) {
         recordTrack->clearSyncStartEvent();
     } else if (event != AudioSystem::SYNC_EVENT_SAME) {
-        recordTrack->mSyncStartEvent = mAudioFlinger->createSyncEvent(event,
-                                       triggerSession,
-                                       recordTrack->sessionId(),
-                                       syncStartEventCallback,
-                                       recordTrack);
-        // Sync event can be cancelled by the trigger session if the track is not in a
-        // compatible state in which case we start record immediately
-        if (recordTrack->mSyncStartEvent->isCancelled()) {
-            recordTrack->clearSyncStartEvent();
-        } else {
-            // do not wait for the event for more than AudioSystem::kSyncRecordStartTimeOutMs
-            recordTrack->mFramesToDrop = -(ssize_t)
-                    ((AudioSystem::kSyncRecordStartTimeOutMs * recordTrack->mSampleRate) / 1000);
-        }
+        recordTrack->mSynchronizedRecordState.startRecording(
+                mAudioFlinger->createSyncEvent(
+                        event, triggerSession,
+                        recordTrack->sessionId(), syncStartEventCallback, recordTrack));
     }
 
     {
@@ -8837,7 +8818,6 @@
         //      or using a separate command thread
         recordTrack->mState = TrackBase::STARTING_1;
         mActiveTracks.add(recordTrack);
-        status_t status = NO_ERROR;
         if (recordTrack->isExternalTrack()) {
             mLock.unlock();
             status = AudioSystem::startInput(recordTrack->portId());
@@ -8892,9 +8872,9 @@
     }
 }
 
-void AudioFlinger::RecordThread::syncStartEventCallback(const wp<SyncEvent>& event)
+void AudioFlinger::RecordThread::syncStartEventCallback(const wp<audioflinger::SyncEvent>& event)
 {
-    sp<SyncEvent> strongEvent = event.promote();
+    sp<audioflinger::SyncEvent> strongEvent = event.promote();
 
     if (strongEvent != 0) {
         sp<RefBase> ptr = strongEvent->cookie().promote();
@@ -8933,12 +8913,14 @@
     return false;
 }
 
-bool AudioFlinger::RecordThread::isValidSyncEvent(const sp<SyncEvent>& event __unused) const
+bool AudioFlinger::RecordThread::isValidSyncEvent(
+        const sp<audioflinger::SyncEvent>& /* event */) const
 {
     return false;
 }
 
-status_t AudioFlinger::RecordThread::setSyncEvent(const sp<SyncEvent>& event __unused)
+status_t AudioFlinger::RecordThread::setSyncEvent(
+        const sp<audioflinger::SyncEvent>& event __unused)
 {
 #if 0   // This branch is currently dead code, but is preserved in case it will be needed in future
     if (!isValidSyncEvent(event)) {
@@ -9028,7 +9010,7 @@
     // "best effort" behavior of the API.
     if (sharedOffset < 0) {
         sharedAudioStartFrames = mRsmpInRear;
-    } else if (sharedOffset > mRsmpInFrames) {
+    } else if (sharedOffset > static_cast<signed>(mRsmpInFrames)) {
         sharedAudioStartFrames =
                 audio_utils::safe_sub_overflow(mRsmpInRear, (int32_t)mRsmpInFrames);
     }
@@ -9281,7 +9263,7 @@
     if (stepCount == 0) {
         return;
     }
-    ALOG_ASSERT(stepCount <= mRsmpInUnrel);
+    ALOG_ASSERT(stepCount <= (int32_t)mRsmpInUnrel);
     mRsmpInUnrel -= stepCount;
     mRsmpInFront = audio_utils::safe_add_overflow(mRsmpInFront, stepCount);
     buffer->raw = NULL;
@@ -9406,7 +9388,7 @@
             return out_s8;
         }
     }
-    return String8();
+    return {};
 }
 
 void AudioFlinger::RecordThread::ioConfigChanged(audio_io_config_event_t event, pid_t pid,
@@ -9647,7 +9629,7 @@
             maxFilled = filled;
         }
     }
-    if (maxFilled > mRsmpInFrames) {
+    if (maxFilled > static_cast<signed>(mRsmpInFrames)) {
         (void)__builtin_sub_overflow(mRsmpInRear, mRsmpInFrames, &oldestFront);
     }
     return oldestFront;
@@ -9843,7 +9825,7 @@
 
 AudioFlinger::MmapThread::MmapThread(
         const sp<AudioFlinger>& audioFlinger, audio_io_handle_t id,
-        AudioHwDevice *hwDev, sp<StreamHalInterface> stream, bool systemReady, bool isOut)
+        AudioHwDevice *hwDev, const sp<StreamHalInterface>& stream, bool systemReady, bool isOut)
     : ThreadBase(audioFlinger, id, (isOut ? MMAP_PLAYBACK : MMAP_CAPTURE), systemReady, isOut),
       mSessionId(AUDIO_SESSION_NONE),
       mPortId(AUDIO_PORT_HANDLE_NONE),
@@ -9956,6 +9938,9 @@
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
 
     audio_io_handle_t io = mId;
+    AttributionSourceState adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+            client.attributionSource);
+
     if (isOutput()) {
         audio_config_t config = AUDIO_CONFIG_INITIALIZER;
         config.sample_rate = mSampleRate;
@@ -9971,7 +9956,7 @@
         ret = AudioSystem::getOutputForAttr(&mAttr, &io,
                                             mSessionId,
                                             &stream,
-                                            client.attributionSource,
+                                            adjAttributionSource,
                                             &config,
                                             flags,
                                             &deviceId,
@@ -9990,7 +9975,7 @@
         ret = AudioSystem::getInputForAttr(&mAttr, &io,
                                               RECORD_RIID_INVALID,
                                               mSessionId,
-                                              client.attributionSource,
+                                              adjAttributionSource,
                                               &config,
                                               AUDIO_INPUT_FLAG_MMAP_NOIRQ,
                                               &deviceId,
@@ -10049,8 +10034,10 @@
         mHalVolFloat = -1.0f;
     } else if (!track->isSilenced_l()) {
         for (const sp<MmapTrack> &t : mActiveTracks) {
-            if (t->isSilenced_l() && t->uid() != client.attributionSource.uid)
+            if (t->isSilenced_l()
+                    && t->uid() != static_cast<uid_t>(client.attributionSource.uid)) {
                 t->invalidate();
+            }
         }
     }
 
@@ -10287,7 +10274,7 @@
     if (initCheck() == NO_ERROR && mHalStream->getParameters(keys, &out_s8) == OK) {
         return out_s8;
     }
-    return String8();
+    return {};
 }
 
 void AudioFlinger::MmapThread::ioConfigChanged(audio_io_config_event_t event, pid_t pid,
@@ -10317,6 +10304,7 @@
 
 status_t AudioFlinger::MmapThread::createAudioPatch_l(const struct audio_patch *patch,
                                                           audio_patch_handle_t *handle)
+NO_THREAD_SAFETY_ANALYSIS  // elease and re-acquire mLock
 {
     status_t status = NO_ERROR;
 
@@ -10334,8 +10322,8 @@
                                 "as it does not support audio patches",
                                 patch->sinks[i].ext.device.type);
             type = static_cast<audio_devices_t>(type | patch->sinks[i].ext.device.type);
-            sinkDeviceTypeAddrs.push_back(AudioDeviceTypeAddr(patch->sinks[i].ext.device.type,
-                    patch->sinks[i].ext.device.address));
+            sinkDeviceTypeAddrs.emplace_back(patch->sinks[i].ext.device.type,
+                    patch->sinks[i].ext.device.address);
         }
         deviceId = patch->sinks[0].id;
         numDevices = mPatch.num_sinks;
@@ -10499,12 +10487,13 @@
     // and because it can cause a recursive mutex lock on stop().
 }
 
-status_t AudioFlinger::MmapThread::setSyncEvent(const sp<SyncEvent>& event __unused)
+status_t AudioFlinger::MmapThread::setSyncEvent(const sp<audioflinger::SyncEvent>& /* event */)
 {
     return BAD_VALUE;
 }
 
-bool AudioFlinger::MmapThread::isValidSyncEvent(const sp<SyncEvent>& event __unused) const
+bool AudioFlinger::MmapThread::isValidSyncEvent(
+        const sp<audioflinger::SyncEvent>& /* event */) const
 {
     return false;
 }
@@ -10544,6 +10533,7 @@
 }
 
 void AudioFlinger::MmapThread::checkInvalidTracks_l()
+NO_THREAD_SAFETY_ANALYSIS  // release and re-acquire mLock
 {
     sp<MmapStreamCallback> callback;
     for (const sp<MmapTrack> &track : mActiveTracks) {
@@ -10717,6 +10707,7 @@
 }
 
 void AudioFlinger::MmapPlaybackThread::processVolume_l()
+NO_THREAD_SAFETY_ANALYSIS // access of track->processMuteEvent_l
 {
     float volume;
 
@@ -10851,16 +10842,23 @@
         const sp<audio_utils::MelProcessor>& processor)
 {
     ALOGV("%s: starting mel processor for thread %d", __func__, id());
-    if (processor != nullptr) {
-        mMelProcessor = processor;
+    mMelProcessor.store(processor);
+    if (processor) {
+        processor->resume();
     }
+
+    // no need to update output format for MMapPlaybackThread since it is
+    // assigned constant for each thread
 }
 
 // stopMelComputation_l() must be called with AudioFlinger::mLock held
 void AudioFlinger::MmapPlaybackThread::stopMelComputation_l()
 {
-    ALOGV("%s: stopping mel processor for thread %d", __func__, id());
-    mMelProcessor = nullptr;
+    ALOGV("%s: pausing mel processor for thread %d", __func__, id());
+    auto melProcessor = mMelProcessor.load();
+    if (melProcessor != nullptr) {
+        melProcessor->pause();
+    }
 }
 
 void AudioFlinger::MmapPlaybackThread::dumpInternals_l(int fd, const Vector<String16>& args)
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 03e4567..b2700db 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -164,7 +164,7 @@
 
     class SetParameterConfigEventData : public ConfigEventData {
     public:
-        explicit SetParameterConfigEventData(String8 keyValuePairs) :
+        explicit SetParameterConfigEventData(const String8& keyValuePairs) :
             mKeyValuePairs(keyValuePairs) {}
 
         virtual  void dump(char *buffer, size_t size) {
@@ -176,7 +176,7 @@
 
     class SetParameterConfigEvent : public ConfigEvent {
     public:
-        explicit SetParameterConfigEvent(String8 keyValuePairs) :
+        explicit SetParameterConfigEvent(const String8& keyValuePairs) :
             ConfigEvent(CFG_EVENT_SET_PARAMETER) {
             mData = new SetParameterConfigEventData(keyValuePairs);
             mWaitStatus = true;
@@ -520,8 +520,8 @@
                                                  audio_session_t sessionId,
                                                  bool threadLocked);
 
-                virtual status_t    setSyncEvent(const sp<SyncEvent>& event) = 0;
-                virtual bool        isValidSyncEvent(const sp<SyncEvent>& event) const = 0;
+                virtual status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event) = 0;
+                virtual bool isValidSyncEvent(const sp<audioflinger::SyncEvent>& event) const = 0;
 
                 // Return a reference to a per-thread heap which can be used to allocate IMemory
                 // objects that will be read-only to client processes, read/write to mediaserver,
@@ -802,7 +802,7 @@
                     // ThreadBase thread.
                     void            clear();
                     // periodically called in the threadLoop() to update power state uids.
-                    void            updatePowerState(sp<ThreadBase> thread, bool force = false);
+                    void updatePowerState(const sp<ThreadBase>& thread, bool force = false);
 
                     /** @return true if one or move active tracks was added or removed since the
                      *          last time this function was called or the vector was created.
@@ -1021,8 +1021,8 @@
                 status_t    getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames);
                 // Consider also removing and passing an explicit mMainBuffer initialization
                 // parameter to AF::PlaybackThread::Track::Track().
-                effect_buffer_t *sinkBuffer() const {
-                    return reinterpret_cast<effect_buffer_t *>(mSinkBuffer); };
+                float *sinkBuffer() const {
+                    return reinterpret_cast<float *>(mSinkBuffer); };
 
     virtual     void detachAuxEffect_l(int effectId);
                 status_t attachAuxEffect(const sp<AudioFlinger::PlaybackThread::Track>& track,
@@ -1038,8 +1038,8 @@
                 virtual product_strategy_t getStrategyForSession_l(audio_session_t sessionId);
 
 
-                virtual status_t setSyncEvent(const sp<SyncEvent>& event);
-                virtual bool     isValidSyncEvent(const sp<SyncEvent>& event) const;
+                status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event) override;
+                bool     isValidSyncEvent(const sp<audioflinger::SyncEvent>& event) const override;
 
                 // called with AudioFlinger lock held
                         bool     invalidateTracks_l(audio_stream_type_t streamType);
@@ -1116,6 +1116,32 @@
                 void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor) override;
                 void stopMelComputation_l() override;
 
+                void setStandby() {
+                    Mutex::Autolock _l(mLock);
+                    setStandby_l();
+                }
+
+                void setStandby_l() {
+                    mStandby = true;
+                    mHalStarted = false;
+                    mKernelPositionOnStandby =
+                        mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+                }
+
+                bool waitForHalStart() {
+                    Mutex::Autolock _l(mLock);
+                    static const nsecs_t kWaitHalTimeoutNs = seconds(2);
+                    nsecs_t endWaitTimetNs = systemTime() + kWaitHalTimeoutNs;
+                    while (!mHalStarted) {
+                        nsecs_t timeNs = systemTime();
+                        if (timeNs >= endWaitTimetNs) {
+                            break;
+                        }
+                        nsecs_t waitTimeLeftNs = endWaitTimetNs - timeNs;
+                        mWaitHalStartCV.waitRelative(mLock, waitTimeLeftNs);
+                    }
+                    return mHalStarted;
+                }
 protected:
     // updated by readOutputParameters_l()
     size_t                          mNormalFrameCount;  // normal mixer and effects
@@ -1291,7 +1317,7 @@
     template <typename T>
     class Tracks {
     public:
-        Tracks(bool saveDeletedTrackIds) :
+        explicit Tracks(bool saveDeletedTrackIds) :
             mSaveDeletedTrackIds(saveDeletedTrackIds) { }
 
         // SortedVector methods
@@ -1320,7 +1346,7 @@
             return mTracks.end();
         }
 
-        size_t          processDeletedTrackIds(std::function<void(int)> f) {
+        size_t          processDeletedTrackIds(const std::function<void(int)>& f) {
             for (const int trackId : mDeletedTrackIds) {
                 f(trackId);
             }
@@ -1415,6 +1441,14 @@
     // Downstream patch latency, available if mDownstreamLatencyStatMs.getN() > 0.
     audio_utils::Statistics<double> mDownstreamLatencyStatMs{0.999};
 
+    // output stream start detection based on render position returned by the kernel
+    // condition signalled when the output stream has started
+    Condition                mWaitHalStartCV;
+    // true when the output stream render position has moved, reset to false in standby
+    bool                     mHalStarted = false;
+    // last kernel render position saved when entering standby
+    int64_t                  mKernelPositionOnStandby = 0;
+
 public:
     virtual     bool        hasFastMixer() const = 0;
     virtual     FastTrackUnderruns getFastTrackUnderruns(size_t fastIndex __unused) const
@@ -1442,7 +1476,7 @@
                 class IsTimestampAdvancing {
                 public:
                     // The timestamp will not be checked any faster than the specified time.
-                    IsTimestampAdvancing(nsecs_t minimumTimeBetweenChecksNs)
+                    explicit IsTimestampAdvancing(nsecs_t minimumTimeBetweenChecksNs)
                         :   mMinimumTimeBetweenChecksNs(minimumTimeBetweenChecksNs)
                     {
                         clear();
@@ -1759,7 +1793,7 @@
                 void        dumpInternals_l(int fd, const Vector<String16>& args) override;
 
 private:
-                bool        outputsReady(const SortedVector< sp<OutputTrack> > &outputTracks);
+                bool        outputsReady();
 protected:
     // threadLoop snippets
     virtual     void        threadLoop_mix();
@@ -1961,10 +1995,10 @@
             // FIXME replace by Set [and implement Bag/Multiset for other uses].
             KeyedVector<audio_session_t, bool> sessionIds() const;
 
-    virtual status_t setSyncEvent(const sp<SyncEvent>& event);
-    virtual bool     isValidSyncEvent(const sp<SyncEvent>& event) const;
+            status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event) override;
+            bool     isValidSyncEvent(const sp<audioflinger::SyncEvent>& event) const override;
 
-    static void syncStartEventCallback(const wp<SyncEvent>& event);
+    static void syncStartEventCallback(const wp<audioflinger::SyncEvent>& event);
 
     virtual size_t      frameCount() const { return mFrameCount; }
             bool        hasFastCapture() const { return mFastCapture != 0; }
@@ -2108,7 +2142,7 @@
 #include "MmapTracks.h"
 
     MmapThread(const sp<AudioFlinger>& audioFlinger, audio_io_handle_t id,
-               AudioHwDevice *hwDev, sp<StreamHalInterface> stream, bool systemReady,
+               AudioHwDevice *hwDev, const sp<StreamHalInterface>& stream, bool systemReady,
                bool isOut);
     virtual     ~MmapThread();
 
@@ -2168,8 +2202,8 @@
                                 // Note: using mActiveTracks as no mTracks here.
                                 return ThreadBase::hasAudioSession_l(sessionId, mActiveTracks);
                             }
-    virtual     status_t    setSyncEvent(const sp<SyncEvent>& event);
-    virtual     bool        isValidSyncEvent(const sp<SyncEvent>& event) const;
+    virtual     status_t    setSyncEvent(const sp<audioflinger::SyncEvent>& event);
+    virtual     bool        isValidSyncEvent(const sp<audioflinger::SyncEvent>& event) const;
 
     virtual     void        checkSilentMode_l() {}
     virtual     void        processVolume_l() {}
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index f305aa8..d5b6a98 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -67,7 +67,7 @@
                                 pid_t creatorPid,
                                 uid_t uid,
                                 bool isOut,
-                                alloc_type alloc = ALLOC_CBLK,
+                                const alloc_type alloc = ALLOC_CBLK,
                                 track_type type = TYPE_DEFAULT,
                                 audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
                                 std::string metricsId = {});
@@ -84,7 +84,7 @@
             pid_t       creatorPid() const { return mCreatorPid; }
 
             audio_port_handle_t portId() const { return mPortId; }
-    virtual status_t    setSyncEvent(const sp<SyncEvent>& event);
+    virtual status_t    setSyncEvent(const sp<audioflinger::SyncEvent>& event);
 
             sp<IMemory> getBuffers() const { return mBufferMemory; }
             void*       buffer() const { return mBuffer; }
@@ -124,7 +124,7 @@
              * This may be called without the thread lock.
              */
     virtual double      bufferLatencyMs() const {
-                            return mServerProxy->framesReadySafe() * 1000 / sampleRate();
+                            return mServerProxy->framesReadySafe() * 1000. / sampleRate();
                         }
 
             /** returns whether the track supports server latency computation.
@@ -352,6 +352,7 @@
                                     // this could be a track type if needed later
 
     const wp<ThreadBase> mThread;
+    const alloc_type     mAllocType;
     /*const*/ sp<Client> mClient;   // see explanation at ~TrackBase() why not const
     sp<IMemory>         mCblkMemory;
     audio_track_cblk_t* mCblk;
@@ -375,7 +376,7 @@
 
     const audio_session_t mSessionId;
     uid_t               mUid;
-    Vector < sp<SyncEvent> >mSyncEvents;
+    std::list<sp<audioflinger::SyncEvent>> mSyncEvents;
     const bool          mIsOut;
     sp<ServerProxy>     mServerProxy;
     const int           mId;
@@ -432,7 +433,7 @@
 {
 public:
     using Timeout = std::optional<std::chrono::nanoseconds>;
-                        PatchTrackBase(sp<ClientProxy> proxy, const ThreadBase& thread,
+                        PatchTrackBase(const sp<ClientProxy>& proxy, const ThreadBase& thread,
                                        const Timeout& timeout);
             void        setPeerTimeout(std::chrono::nanoseconds timeout);
             template <typename T>
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 1fbf720..00c88bc 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -90,12 +90,13 @@
             pid_t creatorPid,
             uid_t clientUid,
             bool isOut,
-            alloc_type alloc,
+            const alloc_type alloc,
             track_type type,
             audio_port_handle_t portId,
             std::string metricsId)
     :   RefBase(),
         mThread(thread),
+        mAllocType(alloc),
         mClient(client),
         mCblk(NULL),
         // mBuffer, mBufferSize
@@ -118,7 +119,7 @@
         mThreadIoHandle(thread ? thread->id() : AUDIO_IO_HANDLE_NONE),
         mPortId(portId),
         mIsInvalid(false),
-        mTrackMetrics(std::move(metricsId), isOut),
+        mTrackMetrics(std::move(metricsId), isOut, clientUid),
         mCreatorPid(creatorPid)
 {
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
@@ -277,6 +278,10 @@
         // relying on the automatic clear() at end of scope.
         mClient.clear();
     }
+    if (mAllocType == ALLOC_LOCAL) {
+        free(mBuffer);
+        mBuffer = nullptr;
+    }
     // flush the binder command buffer
     IPCThreadState::self()->flushCommands();
 }
@@ -298,13 +303,14 @@
     mServerProxy->releaseBuffer(&buf);
 }
 
-status_t AudioFlinger::ThreadBase::TrackBase::setSyncEvent(const sp<SyncEvent>& event)
+status_t AudioFlinger::ThreadBase::TrackBase::setSyncEvent(
+        const sp<audioflinger::SyncEvent>& event)
 {
-    mSyncEvents.add(event);
+    mSyncEvents.emplace_back(event);
     return NO_ERROR;
 }
 
-AudioFlinger::ThreadBase::PatchTrackBase::PatchTrackBase(sp<ClientProxy> proxy,
+AudioFlinger::ThreadBase::PatchTrackBase::PatchTrackBase(const sp<ClientProxy>& proxy,
                                                          const ThreadBase& thread,
                                                          const Timeout& timeout)
     : mProxy(proxy)
@@ -1319,8 +1325,9 @@
 // must be called with thread lock held
 void AudioFlinger::PlaybackThread::Track::flushAck()
 {
-    if (!isOffloaded() && !isDirect())
+    if (!isOffloaded() && !isDirect()) {
         return;
+    }
 
     // Clear the client ring buffer so that the app can prime the buffer while paused.
     // Otherwise it might not get cleared until playback is resumed and obtainBuffer() is called.
@@ -1490,15 +1497,24 @@
     *backInserter++ = metadata;
 }
 
-void AudioFlinger::PlaybackThread::Track::setTeePatches(TeePatches teePatches) {
-    forEachTeePatchTrack([](auto patchTrack) { patchTrack->destroy(); });
-    mTeePatches = std::move(teePatches);
-    if (mState == TrackBase::ACTIVE || mState == TrackBase::RESUMING ||
-            mState == TrackBase::STOPPING_1) {
-        forEachTeePatchTrack([](auto patchTrack) { patchTrack->start(); });
+void AudioFlinger::PlaybackThread::Track::updateTeePatches() {
+    if (mTeePatchesToUpdate.has_value()) {
+        forEachTeePatchTrack([](auto patchTrack) { patchTrack->destroy(); });
+        mTeePatches = mTeePatchesToUpdate.value();
+        if (mState == TrackBase::ACTIVE || mState == TrackBase::RESUMING ||
+                mState == TrackBase::STOPPING_1) {
+            forEachTeePatchTrack([](auto patchTrack) { patchTrack->start(); });
+        }
+        mTeePatchesToUpdate.reset();
     }
 }
 
+void AudioFlinger::PlaybackThread::Track::setTeePatchesToUpdate(TeePatches teePatchesToUpdate) {
+    ALOGW_IF(mTeePatchesToUpdate.has_value(),
+             "%s, existing tee patches to update will be ignored", __func__);
+    mTeePatchesToUpdate = std::move(teePatchesToUpdate);
+}
+
 // must be called with player thread lock held
 void AudioFlinger::PlaybackThread::Track::processMuteEvent_l(const sp<
     IAudioManager>& audioManager, mute_state_t muteState)
@@ -1663,12 +1679,13 @@
 
 void AudioFlinger::PlaybackThread::Track::triggerEvents(AudioSystem::sync_event_t type)
 {
-    for (size_t i = 0; i < mSyncEvents.size();) {
-        if (mSyncEvents[i]->type() == type) {
-            mSyncEvents[i]->trigger();
-            mSyncEvents.removeAt(i);
+    for (auto it = mSyncEvents.begin(); it != mSyncEvents.end();) {
+        if ((*it)->type() == type) {
+            ALOGV("%s: triggering SyncEvent type %d", __func__, type);
+            (*it)->trigger();
+            it = mSyncEvents.erase(it);
         } else {
-            ++i;
+            ++it;
         }
     }
 }
@@ -1700,7 +1717,8 @@
     return vlr;
 }
 
-status_t AudioFlinger::PlaybackThread::Track::setSyncEvent(const sp<SyncEvent>& event)
+status_t AudioFlinger::PlaybackThread::Track::setSyncEvent(
+        const sp<audioflinger::SyncEvent>& event)
 {
     if (isTerminated() || mState == PAUSED ||
             ((framesReady() == 0) && ((mSharedBuffer != 0) ||
@@ -1851,23 +1869,23 @@
 
 //To be called with thread lock held
 bool AudioFlinger::PlaybackThread::Track::isResumePending() {
-
-    if (mState == RESUMING)
+    if (mState == RESUMING) {
         return true;
+    }
     /* Resume is pending if track was stopping before pause was called */
     if (mState == STOPPING_1 &&
-        mResumeToStopping)
+        mResumeToStopping) {
         return true;
+    }
 
     return false;
 }
 
 //To be called with thread lock held
 void AudioFlinger::PlaybackThread::Track::resumeAck() {
-
-
-    if (mState == RESUMING)
+    if (mState == RESUMING) {
         mState = ACTIVE;
+    }
 
     // Other possibility of  pending resume is stopping_1 state
     // Do not update the state from stopping as this prevents
@@ -1914,6 +1932,8 @@
         }
     }
 
+    ALOGV("%s: trackFramesReleased:%lld  sinkFramesWritten:%lld  setDrained: %d",
+        __func__, (long long)trackFramesReleased, (long long)sinkFramesWritten, drained);
     mAudioTrackServerProxy->setDrained(drained);
     // Set correction for flushed frames that are not accounted for in released.
     local.mFlushed = mAudioTrackServerProxy->framesFlushed();
@@ -2055,17 +2075,50 @@
 
 ssize_t AudioFlinger::PlaybackThread::OutputTrack::write(void* data, uint32_t frames)
 {
+    if (!mActive && frames != 0) {
+        sp<ThreadBase> thread = mThread.promote();
+        if (thread != nullptr && thread->standby()) {
+            // preload one silent buffer to trigger mixer on start()
+            ClientProxy::Buffer buf { .mFrameCount = mClientProxy->getStartThresholdInFrames() };
+            status_t status = mClientProxy->obtainBuffer(&buf);
+            if (status != NO_ERROR && status != NOT_ENOUGH_DATA && status != WOULD_BLOCK) {
+                ALOGE("%s(%d): could not obtain buffer on start", __func__, mId);
+                return 0;
+            }
+            memset(buf.mRaw, 0, buf.mFrameCount * mFrameSize);
+            mClientProxy->releaseBuffer(&buf);
+
+            (void) start();
+
+            // wait for HAL stream to start before sending actual audio. Doing this on each
+            // OutputTrack makes that playback start on all output streams is synchronized.
+            // If another OutputTrack has already started it can underrun but this is OK
+            // as only silence has been played so far and the retry count is very high on
+            // OutputTrack.
+            auto pt = static_cast<PlaybackThread *>(thread.get());
+            if (!pt->waitForHalStart()) {
+                ALOGW("%s(%d): timeout waiting for thread to exit standby", __func__, mId);
+                stop();
+                return 0;
+            }
+
+            // enqueue the first buffer and exit so that other OutputTracks will also start before
+            // write() is called again and this buffer actually consumed.
+            Buffer firstBuffer;
+            firstBuffer.frameCount = frames;
+            firstBuffer.raw = data;
+            queueBuffer(firstBuffer);
+            return frames;
+        } else {
+            (void) start();
+        }
+    }
+
     Buffer *pInBuffer;
     Buffer inBuffer;
     inBuffer.frameCount = frames;
     inBuffer.raw = data;
-
     uint32_t waitTimeLeftMs = mSourceThread->waitTimeMs();
-
-    if (!mActive && frames != 0) {
-        (void) start();
-    }
-
     while (waitTimeLeftMs) {
         // First write pending buffers, then new data
         if (mBufferQueue.size()) {
@@ -2133,22 +2186,7 @@
     if (inBuffer.frameCount) {
         sp<ThreadBase> thread = mThread.promote();
         if (thread != 0 && !thread->standby()) {
-            if (mBufferQueue.size() < kMaxOverFlowBuffers) {
-                pInBuffer = new Buffer;
-                pInBuffer->mBuffer = malloc(inBuffer.frameCount * mFrameSize);
-                pInBuffer->frameCount = inBuffer.frameCount;
-                pInBuffer->raw = pInBuffer->mBuffer;
-                memcpy(pInBuffer->raw, inBuffer.raw, inBuffer.frameCount * mFrameSize);
-                mBufferQueue.add(pInBuffer);
-                ALOGV("%s(%d): thread %d adding overflow buffer %zu", __func__, mId,
-                        (int)mThreadIoHandle, mBufferQueue.size());
-                // audio data is consumed (stored locally); set frameCount to 0.
-                inBuffer.frameCount = 0;
-            } else {
-                ALOGW("%s(%d): thread %d no more overflow buffers",
-                        __func__, mId, (int)mThreadIoHandle);
-                // TODO: return error for this.
-            }
+            queueBuffer(inBuffer);
         }
     }
 
@@ -2161,6 +2199,29 @@
     return frames - inBuffer.frameCount;  // number of frames consumed.
 }
 
+void AudioFlinger::PlaybackThread::OutputTrack::queueBuffer(Buffer& inBuffer) {
+
+    if (mBufferQueue.size() < kMaxOverFlowBuffers) {
+        Buffer *pInBuffer = new Buffer;
+        const size_t bufferSize = inBuffer.frameCount * mFrameSize;
+        pInBuffer->mBuffer = malloc(bufferSize);
+        LOG_ALWAYS_FATAL_IF(pInBuffer->mBuffer == nullptr,
+                "%s: Unable to malloc size %zu", __func__, bufferSize);
+        pInBuffer->frameCount = inBuffer.frameCount;
+        pInBuffer->raw = pInBuffer->mBuffer;
+        memcpy(pInBuffer->raw, inBuffer.raw, inBuffer.frameCount * mFrameSize);
+        mBufferQueue.add(pInBuffer);
+        ALOGV("%s(%d): thread %d adding overflow buffer %zu", __func__, mId,
+                (int)mThreadIoHandle, mBufferQueue.size());
+        // audio data is consumed (stored locally); set frameCount to 0.
+        inBuffer.frameCount = 0;
+    } else {
+        ALOGW("%s(%d): thread %d no more overflow buffers",
+                __func__, mId, (int)mThreadIoHandle);
+        // TODO: return error for this.
+    }
+}
+
 void AudioFlinger::PlaybackThread::OutputTrack::copyMetadataTo(MetadataInserter& backInserter) const
 {
     std::lock_guard<std::mutex> lock(mTrackMetadatasMutex);
@@ -2299,7 +2360,7 @@
     buf.mFrameCount = buffer->frameCount;
     buf.mRaw = buffer->raw;
     mPeerProxy->releaseBuffer(&buf);
-    TrackBase::releaseBuffer(buffer);
+    TrackBase::releaseBuffer(buffer); // Note: this is the base class.
 }
 
 status_t AudioFlinger::PlaybackThread::PatchTrack::obtainBuffer(Proxy::Buffer* buffer,
@@ -2447,7 +2508,6 @@
                   type, portId,
                   std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD) + std::to_string(portId)),
         mOverflow(false),
-        mFramesToDrop(0),
         mResamplerBufferProvider(NULL), // initialize in case of early constructor exit
         mRecordBufferConverter(NULL),
         mFlags(flags),
@@ -2649,27 +2709,24 @@
     result.append("\n");
 }
 
-void AudioFlinger::RecordThread::RecordTrack::handleSyncStartEvent(const sp<SyncEvent>& event)
+// This is invoked by SyncEvent callback.
+void AudioFlinger::RecordThread::RecordTrack::handleSyncStartEvent(
+        const sp<audioflinger::SyncEvent>& event)
 {
-    if (event == mSyncStartEvent) {
-        ssize_t framesToDrop = 0;
-        sp<ThreadBase> threadBase = mThread.promote();
-        if (threadBase != 0) {
-            // TODO: use actual buffer filling status instead of 2 buffers when info is available
-            // from audio HAL
-            framesToDrop = threadBase->mFrameCount * 2;
-        }
-        mFramesToDrop = framesToDrop;
+    size_t framesToDrop = 0;
+    sp<ThreadBase> threadBase = mThread.promote();
+    if (threadBase != 0) {
+        // TODO: use actual buffer filling status instead of 2 buffers when info is available
+        // from audio HAL
+        framesToDrop = threadBase->mFrameCount * 2;
     }
+
+    mSynchronizedRecordState.onPlaybackFinished(event, framesToDrop);
 }
 
 void AudioFlinger::RecordThread::RecordTrack::clearSyncStartEvent()
 {
-    if (mSyncStartEvent != 0) {
-        mSyncStartEvent->cancel();
-        mSyncStartEvent.clear();
-    }
-    mFramesToDrop = 0;
+    mSynchronizedRecordState.clear();
 }
 
 void AudioFlinger::RecordThread::RecordTrack::updateTrackFrameInfo(
@@ -2913,7 +2970,7 @@
 {
     void *ptr = nullptr;
     (void)posix_memalign(&ptr, alignment, size);
-    return std::unique_ptr<void, decltype(free)*>(ptr, free);
+    return {ptr, free};
 }
 
 AudioFlinger::RecordThread::PassthruPatchRecord::PassthruPatchRecord(
diff --git a/services/audioflinger/AllocatorFactory.h b/services/audioflinger/afutils/AllocatorFactory.h
similarity index 100%
rename from services/audioflinger/AllocatorFactory.h
rename to services/audioflinger/afutils/AllocatorFactory.h
diff --git a/services/audioflinger/afutils/Android.bp b/services/audioflinger/afutils/Android.bp
new file mode 100644
index 0000000..1580b8f
--- /dev/null
+++ b/services/audioflinger/afutils/Android.bp
@@ -0,0 +1,67 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_base_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_services_audioflinger_license"],
+}
+
+// TODO(b/275642749) Reenable these warnings
+audioflinger_utils_tidy_errors = audioflinger_base_tidy_errors + [
+    "-misc-non-private-member-variables-in-classes",
+]
+
+// Eventually use common tidy defaults
+cc_defaults {
+    name: "audioflinger_utils_flags_defaults",
+    // https://clang.llvm.org/docs/UsersManual.html#command-line-options
+    // https://clang.llvm.org/docs/DiagnosticsReference.html
+    cflags: audioflinger_base_cflags,
+    // https://clang.llvm.org/extra/clang-tidy/
+    tidy: true,
+    tidy_checks: audioflinger_utils_tidy_errors,
+    tidy_checks_as_errors: audioflinger_utils_tidy_errors,
+    tidy_flags: [
+      "-format-style=file",
+    ],
+}
+
+cc_library {
+    name: "libaudioflinger_utils",
+
+    defaults: [
+        "audioflinger_utils_flags_defaults",
+        "latest_android_media_audio_common_types_cpp_shared", // PropertyUtils.cpp
+    ],
+
+    srcs: [
+        "AudioWatchdog.cpp",
+        "BufLog.cpp",
+        "NBAIO_Tee.cpp",
+        "PropertyUtils.cpp",
+        "TypedLogger.cpp",
+    ],
+
+    shared_libs: [
+        "libaudioutils",
+        "libbase",
+        "libcutils", // property_get_int32
+        "liblog",
+        "libnbaio",
+        "libnblog",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libsndfile",
+    ],
+
+    header_libs: [
+        "libaaudio_headers",  // PropertyUtils.cpp
+    ],
+
+    include_dirs: [
+        "frameworks/av/services/audioflinger",  // for configuration
+    ],
+}
diff --git a/services/audioflinger/AudioWatchdog.cpp b/services/audioflinger/afutils/AudioWatchdog.cpp
similarity index 85%
rename from services/audioflinger/AudioWatchdog.cpp
rename to services/audioflinger/afutils/AudioWatchdog.cpp
index 877e776..48a07a5 100644
--- a/services/audioflinger/AudioWatchdog.cpp
+++ b/services/audioflinger/afutils/AudioWatchdog.cpp
@@ -38,12 +38,12 @@
             mUnderruns, mLogs, buf);
 }
 
-bool AudioWatchdog::threadLoop()
+bool AudioWatchdog::threadLoop() NO_THREAD_SAFETY_ANALYSIS // unique_lock
 {
     {
-        AutoMutex _l(mMyLock);
+        std::unique_lock _l(mLock);
         if (mPaused) {
-            mMyCond.wait(mMyLock);
+            mCond.wait(_l);
             // ignore previous timestamp after resume()
             mOldTsValid = false;
             // force an immediate log on first underrun after resume()
@@ -65,7 +65,7 @@
         return true;
     }
     time_t sec = newTs.tv_sec - mOldTs.tv_sec;
-    long nsec = newTs.tv_nsec - mOldTs.tv_nsec;
+    auto nsec = newTs.tv_nsec - mOldTs.tv_nsec;
     if (nsec < 0) {
         --sec;
         nsec += 1000000000;
@@ -81,7 +81,8 @@
         }
     }
     mLogTs.tv_sec += sec;
-    if ((mLogTs.tv_nsec += nsec) >= 1000000000) {
+    mLogTs.tv_nsec += nsec;
+    if (mLogTs.tv_nsec >= 1000000000) {
         mLogTs.tv_sec++;
         mLogTs.tv_nsec -= 1000000000;
     }
@@ -89,7 +90,7 @@
         mDump->mUnderruns = ++mUnderruns;
         if (mLogTs.tv_sec >= MIN_TIME_BETWEEN_LOGS_SEC) {
             mDump->mLogs = ++mLogs;
-            mDump->mMostRecent = time(NULL);
+            mDump->mMostRecent = time(nullptr /* tloc */);
             ALOGW("Insufficient CPU for load: expected=%.1f actual=%.1f ms; underruns=%u logs=%u",
                 mPeriodNs * 1e-6, cycleNs * 1e-6, mUnderruns, mLogs);
             mLogTs.tv_sec = 0;
@@ -99,7 +100,7 @@
     struct timespec req;
     req.tv_sec = 0;
     req.tv_nsec = mPeriodNs;
-    rc = nanosleep(&req, NULL);
+    rc = nanosleep(&req, nullptr /* remaining */);
     if (!((rc == 0) || (rc == -1 && errno == EINTR))) {
         pause();
         return false;
@@ -116,22 +117,23 @@
 
 void AudioWatchdog::pause()
 {
-    AutoMutex _l(mMyLock);
+    const std::lock_guard _l(mLock);
     mPaused = true;
 }
 
 void AudioWatchdog::resume()
 {
-    AutoMutex _l(mMyLock);
+    const std::lock_guard _l(mLock);
     if (mPaused) {
         mPaused = false;
-        mMyCond.signal();
+        mCond.notify_one();
     }
 }
 
 void AudioWatchdog::setDump(AudioWatchdogDump *dump)
 {
-    mDump = dump != NULL ? dump : &mDummyDump;
+    const std::lock_guard _l(mLock);
+    mDump = dump != nullptr ? dump : &mDummyDump;
 }
 
 }   // namespace android
diff --git a/services/audioflinger/afutils/AudioWatchdog.h b/services/audioflinger/afutils/AudioWatchdog.h
new file mode 100644
index 0000000..1f5dad4
--- /dev/null
+++ b/services/audioflinger/afutils/AudioWatchdog.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// The watchdog thread runs periodically.  It has two functions:
+//   (a) verify that adequate CPU time is available, and log
+//       as soon as possible when there appears to be a CPU shortage
+//   (b) monitor the other threads [not yet implemented]
+
+#pragma once
+
+#include <mutex>
+#include <time.h>
+#include <utils/Thread.h>
+
+namespace android {
+
+// Keeps a cache of AudioWatchdog statistics that can be logged by dumpsys.
+// The usual caveats about atomicity of information apply.
+struct AudioWatchdogDump {
+    uint32_t mUnderruns = 0;    // total number of underruns
+    uint32_t mLogs = 0;         // total number of log messages
+    time_t   mMostRecent = 0;   // time of most recent log
+    void     dump(int fd);  // should only be called on a stable copy, not the original
+};
+
+class AudioWatchdog : public Thread {
+
+public:
+    explicit AudioWatchdog(unsigned periodMs = 50) : Thread(false /*canCallJava*/),
+            mPeriodNs(periodMs * 1000000), mMaxCycleNs(mPeriodNs * 2)
+        {
+            // force an immediate log on first underrun
+            mLogTs.tv_sec = MIN_TIME_BETWEEN_LOGS_SEC;
+            mLogTs.tv_nsec = 0;
+        }
+
+     // Do not call Thread::requestExitAndWait() without first calling requestExit().
+    // Thread::requestExitAndWait() is not virtual, and the implementation doesn't do enough.
+    void            requestExit() override;
+
+    // FIXME merge API and implementation with AudioTrackThread
+    void            pause();   // suspend thread from execution at next loop boundary
+    void            resume();  // allow thread to execute, if not requested to exit
+
+    // Where to store the dump, or NULL to not update
+    void            setDump(AudioWatchdogDump* dump);
+
+private:
+    bool            threadLoop() override;
+
+    static constexpr int32_t MIN_TIME_BETWEEN_LOGS_SEC = 60;
+    const uint32_t  mPeriodNs;       // nominal period
+    const uint32_t  mMaxCycleNs;     // maximum allowed time of one cycle before declaring underrun
+
+    mutable std::mutex mLock;      // Thread::mLock is private
+    std::condition_variable mCond; // Thread::mThreadExitedCondition is private
+    bool            mPaused GUARDED_BY(mLock) = false; // whether thread is currently paused
+    bool            mOldTsValid GUARDED_BY(mLock) = false;  // whether mOldTs is valid
+    struct timespec mOldTs GUARDED_BY(mLock);          // monotonic time when threadLoop last ran
+    struct timespec mLogTs GUARDED_BY(mLock);          // time since last log (ctor init).
+    uint32_t        mUnderruns GUARDED_BY(mLock) = 0;  // total number of underruns
+    uint32_t        mLogs GUARDED_BY(mLock) = 0;       // total number of logs
+
+    // where to store the dump, always non-NULL
+    AudioWatchdogDump*  mDump GUARDED_BY(mLock) = &mDummyDump;
+    AudioWatchdogDump   mDummyDump; // default area for dump in case setDump() is not called
+};
+
+}   // namespace android
+
diff --git a/services/audioflinger/BufLog.cpp b/services/audioflinger/afutils/BufLog.cpp
similarity index 77%
rename from services/audioflinger/BufLog.cpp
rename to services/audioflinger/afutils/BufLog.cpp
index 5f6aca0..508022f 100644
--- a/services/audioflinger/BufLog.cpp
+++ b/services/audioflinger/afutils/BufLog.cpp
@@ -28,12 +28,14 @@
 
 #define MIN(a, b) ((a) < (b) ? (a) : (b))
 
+namespace android {
+
 // ------------------------------
 // BufLogSingleton
 // ------------------------------
 pthread_once_t onceControl = PTHREAD_ONCE_INIT;
 
-BufLog *BufLogSingleton::mInstance = NULL;
+BufLog *BufLogSingleton::mInstance = nullptr;
 
 void BufLogSingleton::initOnce() {
     mInstance = new BufLog();
@@ -49,55 +51,39 @@
 }
 
 bool BufLogSingleton::instanceExists() {
-    return mInstance != NULL;
+    return mInstance != nullptr;
 }
 
 // ------------------------------
 // BufLog
 // ------------------------------
 
-BufLog::BufLog() {
-    memset(mStreams, 0, sizeof(mStreams));
-}
-
 BufLog::~BufLog() {
-    android::Mutex::Autolock autoLock(mLock);
-
-    for (unsigned int id = 0; id < BUFLOG_MAXSTREAMS; id++) {
-        BufLogStream *pBLStream = mStreams[id];
-        if (pBLStream != NULL) {
-            delete pBLStream ;
-            mStreams[id] = NULL;
-        }
-    }
+    reset();
 }
 
 size_t BufLog::write(int streamid, const char *tag, int format, int channels,
         int samplingRate, size_t maxBytes, const void *buf, size_t size) {
-    unsigned int id = streamid % BUFLOG_MAXSTREAMS;
-    android::Mutex::Autolock autoLock(mLock);
+    const unsigned int id = streamid % BUFLOG_MAXSTREAMS;
+    const std::lock_guard autoLock(mLock);
 
     BufLogStream *pBLStream = mStreams[id];
 
-    if (pBLStream == NULL) {
+    if (pBLStream == nullptr) {
         pBLStream = mStreams[id] = new BufLogStream(id, tag, format, channels,
                 samplingRate, maxBytes);
-        ALOG_ASSERT(pBLStream != NULL, "BufLogStream Failed to be created");
     }
 
     return pBLStream->write(buf, size);
 }
 
 void BufLog::reset() {
-    android::Mutex::Autolock autoLock(mLock);
-    ALOGV("Resetting all BufLogs");
+    const std::lock_guard autoLock(mLock);
     int count = 0;
-
-    for (unsigned int id = 0; id < BUFLOG_MAXSTREAMS; id++) {
-        BufLogStream *pBLStream = mStreams[id];
-        if (pBLStream != NULL) {
+    for (auto &pBLStream : mStreams) {
+        if (pBLStream != nullptr) {
             delete pBLStream;
-            mStreams[id] = NULL;
+            pBLStream = nullptr;
             count++;
         }
     }
@@ -115,9 +101,7 @@
         unsigned int samplingRate,
         size_t maxBytes = 0) : mId(id), mFormat(format), mChannels(channels),
                 mSamplingRate(samplingRate), mMaxBytes(maxBytes) {
-    mByteCount = 0;
-    mPaused = false;
-    if (tag != NULL) {
+    if (tag != nullptr) {
         (void)audio_utils_strlcpy(mTag, tag);
     } else {
         mTag[0] = 0;
@@ -129,7 +113,7 @@
     //timestamp
     char timeStr[16];   //size 16: format %Y%m%d%H%M%S 14 chars + string null terminator
     struct timeval tv;
-    gettimeofday(&tv, NULL);
+    gettimeofday(&tv, nullptr);
     struct tm tm;
     localtime_r(&tv.tv_sec, &tm);
     strftime(timeStr, sizeof(timeStr), "%Y%m%d%H%M%S", &tm);
@@ -139,7 +123,7 @@
     ALOGV("data output: %s", logPath);
 
     mFile = fopen(logPath, "wb");
-    if (mFile != NULL) {
+    if (mFile != nullptr) {
         ALOGV("Success creating file at: %p", mFile);
     } else {
         ALOGE("Error: could not create file BufLogStream %s", strerror(errno));
@@ -148,24 +132,24 @@
 
 void BufLogStream::closeStream_l() {
     ALOGV("Closing BufLogStream id:%d tag:%s", mId, mTag);
-    if (mFile != NULL) {
+    if (mFile != nullptr) {
         fclose(mFile);
-        mFile = NULL;
+        mFile = nullptr;
     }
 }
 
 BufLogStream::~BufLogStream() {
     ALOGV("Destroying BufLogStream id:%d tag:%s", mId, mTag);
-    android::Mutex::Autolock autoLock(mLock);
+    const std::lock_guard autoLock(mLock);
     closeStream_l();
 }
 
 size_t BufLogStream::write(const void *buf, size_t size) {
 
     size_t bytes = 0;
-    if (!mPaused && mFile != NULL) {
-        if (size > 0 && buf != NULL) {
-            android::Mutex::Autolock autoLock(mLock);
+    if (!mPaused && mFile != nullptr) {
+        if (size > 0 && buf != nullptr) {
+            const std::lock_guard autoLock(mLock);
             if (mMaxBytes > 0) {
                 size = MIN(size, mMaxBytes - mByteCount);
             }
@@ -185,12 +169,14 @@
 }
 
 bool BufLogStream::setPause(bool pause) {
-    bool old = mPaused;
+    const bool old = mPaused;
     mPaused = pause;
     return old;
 }
 
 void BufLogStream::finalize() {
-    android::Mutex::Autolock autoLock(mLock);
+    const std::lock_guard autoLock(mLock);
     closeStream_l();
 }
+
+} // namespace android
diff --git a/services/audioflinger/BufLog.h b/services/audioflinger/afutils/BufLog.h
similarity index 92%
rename from services/audioflinger/BufLog.h
rename to services/audioflinger/afutils/BufLog.h
index 1b402f4..a58d073 100644
--- a/services/audioflinger/BufLog.h
+++ b/services/audioflinger/afutils/BufLog.h
@@ -14,8 +14,7 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_AUDIO_BUFLOG_H
-#define ANDROID_AUDIO_BUFLOG_H
+#pragma once
 
 /*
  * BUFLOG creates up to BUFLOG_MAXSTREAMS simultaneous streams [0:15] of audio buffer data
@@ -99,17 +98,18 @@
     BufLogSingleton::instance()->reset(); } } while (0)
 #endif
 
-
+#include <mutex>
 #include <stdint.h>
 #include <stdio.h>
 #include <sys/types.h>
-#include <utils/Mutex.h>
 
 //BufLog configuration
 #define BUFLOGSTREAM_MAX_TAGSIZE    32
 #define BUFLOG_BASE_PATH            "/data/misc/audioserver"
 #define BUFLOG_MAX_PATH_SIZE        300
 
+namespace android {
+
 class BufLogStream {
 public:
     BufLogStream(unsigned int id,
@@ -135,26 +135,24 @@
     void            finalize();
 
 private:
-    bool                mPaused;
     const unsigned int  mId;
-    char                mTag[BUFLOGSTREAM_MAX_TAGSIZE + 1];
     const unsigned int  mFormat;
     const unsigned int  mChannels;
     const unsigned int  mSamplingRate;
     const size_t        mMaxBytes;
-    size_t              mByteCount;
-    FILE                *mFile;
-    mutable android::Mutex mLock;
+    char                mTag[BUFLOGSTREAM_MAX_TAGSIZE + 1]; // const, set in ctor.
+
+    mutable std::mutex  mLock;
+    bool                mPaused = false;
+    size_t              mByteCount = 0;
+    FILE                *mFile; // set in ctor
 
     void            closeStream_l();
 };
 
-
 class BufLog {
 public:
-    BufLog();
     ~BufLog();
-    BufLog(BufLog const&) {};
 
     //  streamid:      int [0:BUFLOG_MAXSTREAMS-1]   buffer id.
     //                  If a buffer doesn't exist, it is created the first time is referenced
@@ -181,9 +179,9 @@
     void            reset();
 
 protected:
-    static const unsigned int BUFLOG_MAXSTREAMS = 16;
-    BufLogStream    *mStreams[BUFLOG_MAXSTREAMS];
-    mutable android::Mutex mLock;
+    static constexpr size_t BUFLOG_MAXSTREAMS = 16;
+    mutable std::mutex mLock;
+    BufLogStream *mStreams[BUFLOG_MAXSTREAMS]{};
 };
 
 class BufLogSingleton {
@@ -196,4 +194,4 @@
     static BufLog   *mInstance;
 };
 
-#endif //ANDROID_AUDIO_BUFLOG_H
+} // namespace android
diff --git a/services/audioflinger/NBAIO_Tee.cpp b/services/audioflinger/afutils/NBAIO_Tee.cpp
similarity index 90%
rename from services/audioflinger/NBAIO_Tee.cpp
rename to services/audioflinger/afutils/NBAIO_Tee.cpp
index 53083d5..49057ce 100644
--- a/services/audioflinger/NBAIO_Tee.cpp
+++ b/services/audioflinger/afutils/NBAIO_Tee.cpp
@@ -91,8 +91,8 @@
 
     /** returns filename of created audio file, else empty string on failure. */
     std::string create(
-            std::function<ssize_t /* frames_read */
-                        (void * /* buffer */, size_t /* size_in_frames */)> reader,
+            const std::function<ssize_t /* frames_read */
+                        (void * /* buffer */, size_t /* size_in_frames */)>& reader,
             uint32_t sampleRate,
             uint32_t channelCount,
             audio_format_t format,
@@ -109,8 +109,8 @@
 
     /** creates an audio file from a reader functor passed in. */
     status_t createInternal(
-            std::function<ssize_t /* frames_read */
-                        (void * /* buffer */, size_t /* size_in_frames */)> reader,
+            const std::function<ssize_t /* frames_read */
+                        (void * /* buffer */, size_t /* size_in_frames */)>& reader,
             uint32_t sampleRate,
             uint32_t channelCount,
             audio_format_t format,
@@ -123,7 +123,7 @@
     std::string generateFilename(const std::string &suffix) const {
         char fileTime[sizeof("YYYYmmdd_HHMMSS_\0")];
         struct timeval tv;
-        gettimeofday(&tv, NULL);
+        gettimeofday(&tv, nullptr /* struct timezone */);
         struct tm tm;
         localtime_r(&tv.tv_sec, &tm);
         LOG_ALWAYS_FATAL_IF(strftime(fileTime, sizeof(fileTime), "%Y%m%d_%H%M%S_", &tm) == 0,
@@ -159,30 +159,29 @@
     // yet another ThreadPool implementation.
     class ThreadPool {
     public:
-        ThreadPool(size_t size)
+        explicit ThreadPool(size_t size)
             : mThreadPoolSize(size)
         { }
 
         /** launches task "name" with associated function "func".
             if the threadpool is exhausted, it will launch on calling function */
-        status_t launch(const std::string &name, std::function<status_t()> func);
+        status_t launch(const std::string &name, const std::function<status_t()>& func);
 
     private:
+        const size_t mThreadPoolSize;
         std::mutex mLock;
         std::list<std::pair<
-                std::string, std::future<status_t>>> mFutures; // GUARDED_BY(mLock)
-
-        const size_t mThreadPoolSize;
+                std::string, std::future<status_t>>> mFutures; // GUARDED_BY(mLock);
     } mThreadPool;
 
-    const std::string mPrefix;
-    std::mutex mLock;
-    std::string mDirectory;         // GUARDED_BY(mLock)
-    std::deque<std::string> mFiles; // GUARDED_BY(mLock)  sorted list of files by creation time
-
     static constexpr size_t FRAMES_PER_READ = 1024;
     static constexpr size_t MAX_FILES_READ = 1024;
     static constexpr size_t MAX_FILES_KEEP = 32;
+
+    const std::string mPrefix;
+    std::mutex mLock;
+    std::string mDirectory;         // GUARDED_BY(mLock);
+    std::deque<std::string> mFiles; // GUARDED_BY(mLock); // sorted list of files by creation time
 };
 
 /* static */
@@ -200,7 +199,7 @@
 
     const NBAIO_Format format = source->format();
     bool firstRead = true;
-    std::string filename = audioFileHandler.create(
+    const std::string filename = audioFileHandler.create(
             // this functor must not hold references to stack
             [firstRead, sinkSource] (void *buffer, size_t frames) mutable {
                     auto &source = sinkSource.second;
@@ -230,14 +229,16 @@
         Pipe *pipe = new Pipe(frames, format);
         size_t numCounterOffers = 0;
         const NBAIO_Format offers[1] = {format};
-        ssize_t index = pipe->negotiate(offers, 1, NULL, numCounterOffers);
+        ssize_t index = pipe->negotiate(
+                offers, 1 /* numOffers */, nullptr /* counterOffers */, numCounterOffers);
         if (index != 0) {
             ALOGW("pipe failure to negotiate: %zd", index);
             goto exit;
         }
         PipeReader *pipeReader = new PipeReader(*pipe);
         numCounterOffers = 0;
-        index = pipeReader->negotiate(offers, 1, NULL, numCounterOffers);
+        index = pipeReader->negotiate(
+                offers, 1 /* numOffers */, nullptr /* counterOffers */, numCounterOffers);
         if (index != 0) {
             ALOGW("pipeReader failure to negotiate: %zd", index);
             goto exit;
@@ -251,14 +252,14 @@
 }
 
 std::string AudioFileHandler::create(
-        std::function<ssize_t /* frames_read */
-                    (void * /* buffer */, size_t /* size_in_frames */)> reader,
+        const std::function<ssize_t /* frames_read */
+                    (void * /* buffer */, size_t /* size_in_frames */)>& reader,
         uint32_t sampleRate,
         uint32_t channelCount,
         audio_format_t format,
         const std::string &suffix)
 {
-    const std::string filename = generateFilename(suffix);
+    std::string filename = generateFilename(suffix);
 
     if (mThreadPool.launch(std::string("create ") + filename,
             [=]() { return createInternal(reader, sampleRate, channelCount, format, filename); })
@@ -312,7 +313,7 @@
     std::sort(files.begin() + toRemove, files.end());
 
     {
-        std::lock_guard<std::mutex> _l(mLock);
+        const std::lock_guard<std::mutex> _l(mLock);
 
         mDirectory = directory;
         mFiles = std::move(files);
@@ -330,13 +331,13 @@
     std::vector<std::string> filesToRemove;
     std::string dir;
     {
-        std::lock_guard<std::mutex> _l(mLock);
+        const std::lock_guard<std::mutex> _l(mLock);
 
         if (!isDirectoryValid(mDirectory)) return NO_INIT;
 
         dir = mDirectory;
         if (mFiles.size() > MAX_FILES_KEEP) {
-            size_t toRemove = mFiles.size() - MAX_FILES_KEEP;
+            const size_t toRemove = mFiles.size() - MAX_FILES_KEEP;
 
             // use move and erase to efficiently transfer std::string
             std::move(mFiles.begin(),
@@ -346,7 +347,7 @@
         }
     }
 
-    std::string dirp = dir + "/";
+    const std::string dirp = dir + "/";
     // remove files outside of lock for better concurrency.
     for (const auto &file : filesToRemove) {
         (void)unlink((dirp + file).c_str());
@@ -360,14 +361,14 @@
 }
 
 status_t AudioFileHandler::ThreadPool::launch(
-        const std::string &name, std::function<status_t()> func)
+        const std::string &name, const std::function<status_t()>& func)
 {
     if (mThreadPoolSize > 1) {
-        std::lock_guard<std::mutex> _l(mLock);
+        const std::lock_guard<std::mutex> _l(mLock);
         if (mFutures.size() >= mThreadPoolSize) {
             for (auto it = mFutures.begin(); it != mFutures.end();) {
                 const std::string &filename = it->first;
-                std::future<status_t> &future = it->second;
+                const std::future<status_t> &future = it->second;
                 if (!future.valid() ||
                         future.wait_for(std::chrono::seconds(0)) == std::future_status::ready) {
                     ALOGV("%s: future %s ready", __func__, filename.c_str());
@@ -389,8 +390,8 @@
 }
 
 status_t AudioFileHandler::createInternal(
-        std::function<ssize_t /* frames_read */
-                    (void * /* buffer */, size_t /* size_in_frames */)> reader,
+        const std::function<ssize_t /* frames_read */
+                    (void * /* buffer */, size_t /* size_in_frames */)>& reader,
         uint32_t sampleRate,
         uint32_t channelCount,
         audio_format_t format,
@@ -429,9 +430,9 @@
     }
 
     std::string directory;
-    status_t status = clean(&directory);
+    const status_t status = clean(&directory);
     if (status != NO_ERROR) return status;
-    std::string dirPrefix = directory + "/";
+    const std::string dirPrefix = directory + "/";
 
     const std::string path = dirPrefix + filename;
 
@@ -503,7 +504,7 @@
 
     // Success: add our name to managed files.
     {
-        std::lock_guard<std::mutex> _l(mLock);
+        const std::lock_guard<std::mutex> _l(mLock);
         // weak synchronization - only update mFiles if the directory hasn't changed.
         if (mDirectory == directory) {
             mFiles.emplace_back(filename);  // add to the end to preserve sort.
diff --git a/services/audioflinger/NBAIO_Tee.h b/services/audioflinger/afutils/NBAIO_Tee.h
similarity index 96%
rename from services/audioflinger/NBAIO_Tee.h
rename to services/audioflinger/afutils/NBAIO_Tee.h
index fed8cc8..17b6175 100644
--- a/services/audioflinger/NBAIO_Tee.h
+++ b/services/audioflinger/afutils/NBAIO_Tee.h
@@ -15,8 +15,8 @@
  */
 
 // Enabled with TEE_SINK in Configuration.h
-#ifndef ANDROID_NBAIO_TEE_H
-#define ANDROID_NBAIO_TEE_H
+
+#pragma once
 
 #ifdef TEE_SINK
 
@@ -216,7 +216,7 @@
             // Note: as mentioned in NBAIO_Tee::set(), don't call set() while write() is
             // ongoing.
             if (enabled) {
-                std::lock_guard<std::mutex> _l(mLock);
+                const std::lock_guard<std::mutex> _l(mLock);
                 mFlags = flags;
                 mFormat = format; // could get this from the Sink.
                 mFrames = frames;
@@ -228,7 +228,7 @@
         }
 
         void setId(const std::string &id) {
-            std::lock_guard<std::mutex> _l(mLock);
+            const std::lock_guard<std::mutex> _l(mLock);
             mId = id;
         }
 
@@ -237,7 +237,7 @@
             std::string suffix;
             NBAIO_SinkSource sinkSource;
             {
-                std::lock_guard<std::mutex> _l(mLock);
+                const std::lock_guard<std::mutex> _l(mLock);
                 suffix = mId + reason;
                 sinkSource = mSinkSource;
             }
@@ -281,13 +281,13 @@
     class RunningTees {
     public:
         void add(const std::shared_ptr<NBAIO_TeeImpl> &tee) {
-            std::lock_guard<std::mutex> _l(mLock);
+            const std::lock_guard<std::mutex> _l(mLock);
             ALOGW_IF(!mTees.emplace(tee).second,
                     "%s: %p already exists in mTees", __func__, tee.get());
         }
 
         void remove(const std::shared_ptr<NBAIO_TeeImpl> &tee) {
-            std::lock_guard<std::mutex> _l(mLock);
+            const std::lock_guard<std::mutex> _l(mLock);
             ALOGW_IF(mTees.erase(tee) != 1,
                     "%s: %p doesn't exist in mTees", __func__, tee.get());
         }
@@ -295,7 +295,7 @@
         void dump(int fd, const std::string &reason) {
             std::vector<std::shared_ptr<NBAIO_TeeImpl>> tees; // safe snapshot of tees
             {
-                std::lock_guard<std::mutex> _l(mLock);
+                const std::lock_guard<std::mutex> _l(mLock);
                 tees.insert(tees.end(), mTees.begin(), mTees.end());
             }
             for (const auto &tee : tees) {
@@ -323,4 +323,3 @@
 } // namespace android
 
 #endif // TEE_SINK
-#endif // !ANDROID_NBAIO_TEE_H
diff --git a/services/audioflinger/PropertyUtils.cpp b/services/audioflinger/afutils/PropertyUtils.cpp
similarity index 100%
rename from services/audioflinger/PropertyUtils.cpp
rename to services/audioflinger/afutils/PropertyUtils.cpp
diff --git a/services/audioflinger/PropertyUtils.h b/services/audioflinger/afutils/PropertyUtils.h
similarity index 100%
rename from services/audioflinger/PropertyUtils.h
rename to services/audioflinger/afutils/PropertyUtils.h
diff --git a/services/audioflinger/afutils/TypedLogger.cpp b/services/audioflinger/afutils/TypedLogger.cpp
new file mode 100644
index 0000000..7c546a5
--- /dev/null
+++ b/services/audioflinger/afutils/TypedLogger.cpp
@@ -0,0 +1,46 @@
+/*
+ *
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioFlinger"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <pthread.h>
+#include "TypedLogger.h"
+
+namespace android::aflog {
+
+// External linkage access of thread local storage outside of this shared library
+// causes orphaned memory allocations.  This occurs in the implementation of
+// __emutls_get_address(), see b/284657986.
+//
+// We only expose a thread local storage getter and setter here, not the
+// actual thread local variable.
+
+namespace {
+thread_local NBLog::Writer *tlNBLogWriter;
+} // namespace
+
+NBLog::Writer *getThreadWriter() {
+    return tlNBLogWriter;
+}
+
+void setThreadWriter(NBLog::Writer *writer) {
+    tlNBLogWriter = writer;
+}
+
+} // namespace android::aflog
diff --git a/services/audioflinger/TypedLogger.h b/services/audioflinger/afutils/TypedLogger.h
similarity index 78%
rename from services/audioflinger/TypedLogger.h
rename to services/audioflinger/afutils/TypedLogger.h
index feb71e3..8c2d239 100644
--- a/services/audioflinger/TypedLogger.h
+++ b/services/audioflinger/afutils/TypedLogger.h
@@ -15,8 +15,7 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_TYPED_LOGGER_H
-#define ANDROID_TYPED_LOGGER_H
+#pragma once
 
 // This is the client API for the typed logger.
 
@@ -85,56 +84,56 @@
 //      slower than nullptr check when logging is enabled at compile-time and disabled at runtime.
 
 // Write formatted entry to log
-#define LOGT(fmt, ...) do { NBLog::Writer *x = tlNBLogWriter; if (x != nullptr) \
+#define LOGT(fmt, ...) do { NBLog::Writer *x = aflog::getThreadWriter(); if (x != nullptr) \
                                 x->logFormat((fmt), hash(__FILE__, __LINE__), ##__VA_ARGS__); } \
                                 while (0)
 
 // Write histogram timestamp entry
-#define LOG_HIST_TS() do { NBLog::Writer *x = tlNBLogWriter; if (x != nullptr) \
+#define LOG_HIST_TS() do { NBLog::Writer *x = aflog::getThreadWriter(); if (x != nullptr) \
         x->logEventHistTs(NBLog::EVENT_HISTOGRAM_ENTRY_TS, hash(__FILE__, __LINE__)); } while(0)
 
 // Record that audio was turned on/off
-#define LOG_AUDIO_STATE() do { NBLog::Writer *x = tlNBLogWriter; if (x != nullptr) \
+#define LOG_AUDIO_STATE() do { NBLog::Writer *x = aflog::getThreadWriter(); if (x != nullptr) \
         x->logEventHistTs(NBLog::EVENT_AUDIO_STATE, hash(__FILE__, __LINE__)); } while(0)
 
 // Log the difference bewteen frames presented by HAL and frames written to HAL output sink,
 // divided by the sample rate. Parameter ms is of type double.
-#define LOG_LATENCY(ms) do { NBLog::Writer *x = tlNBLogWriter; if (x != nullptr) \
+#define LOG_LATENCY(ms) do { NBLog::Writer *x = aflog::getThreadWriter(); if (x != nullptr) \
         x->log<NBLog::EVENT_LATENCY>(ms); } while (0)
 
 // Record thread overrun event nanosecond timestamp. Parameter ns is an int64_t.
-#define LOG_OVERRUN(ns) do { NBLog::Writer *x = tlNBLogWriter; if (x != nullptr) \
+#define LOG_OVERRUN(ns) do { NBLog::Writer *x = aflog::getThreadWriter(); if (x != nullptr) \
         x->log<NBLog::EVENT_OVERRUN>(ns); } while (0)
 
 // Record thread info. This currently includes type, frameCount, and sampleRate.
 // Parameter type is thread_info_t as defined in NBLog.h.
-#define LOG_THREAD_INFO(info) do { NBLog::Writer *x = tlNBLogWriter; \
+#define LOG_THREAD_INFO(info) do { NBLog::Writer *x = aflog::getThreadWriter(); \
         if (x != nullptr) x->log<NBLog::EVENT_THREAD_INFO>(info); } while (0)
 
-#define LOG_THREAD_PARAMS(params) do {NBLog::Writer *x = tlNBLogWriter; \
+#define LOG_THREAD_PARAMS(params) do {NBLog::Writer *x = aflog::getThreadWriter(); \
         if (x != nullptr) x->log<NBLog::EVENT_THREAD_PARAMS>(params); } while (0)
 
 // Record thread underrun event nanosecond timestamp. Parameter ns is an int64_t.
-#define LOG_UNDERRUN(ns) do { NBLog::Writer *x = tlNBLogWriter; if (x != nullptr) \
+#define LOG_UNDERRUN(ns) do { NBLog::Writer *x = aflog::getThreadWriter(); if (x != nullptr) \
         x->log<NBLog::EVENT_UNDERRUN>(ns); } while (0)
 
 // Record thread warmup time in milliseconds. Parameter ms is of type double.
-#define LOG_WARMUP_TIME(ms) do { NBLog::Writer *x = tlNBLogWriter; if (x != nullptr) \
+#define LOG_WARMUP_TIME(ms) do { \
+        NBLog::Writer *x = aflog::getThreadWriter(); if (x != nullptr) \
         x->log<NBLog::EVENT_WARMUP_TIME>(ms); } while (0)
 
 // Record a typed entry that represents a thread's work time in nanoseconds.
 // Parameter ns should be of type uint32_t.
-#define LOG_WORK_TIME(ns) do { NBLog::Writer *x = tlNBLogWriter; if (x != nullptr) \
+#define LOG_WORK_TIME(ns) do { NBLog::Writer *x = aflog::getThreadWriter(); if (x != nullptr) \
         x->log<NBLog::EVENT_WORK_TIME>(ns); } while (0)
 
-namespace android {
-extern "C" {
+namespace android::aflog {
 // TODO consider adding a thread_local NBLog::Writer tlStubNBLogWriter and then
-// initialize below tlNBLogWriter to &tlStubNBLogWriter to remove the need to
+// initialize setThreadWriter() to &tlStubNBLogWriter to remove the need to
 // check for nullptr every time. Also reduces the need to add a new logging macro above
 // each time we want to log a new type.
-extern thread_local NBLog::Writer *tlNBLogWriter;
-}
-} // namespace android
 
-#endif // ANDROID_TYPED_LOGGER_H
+NBLog::Writer *getThreadWriter();
+void setThreadWriter(NBLog::Writer *writer);
+
+} // namespace android::aflog
diff --git a/services/audioflinger/datapath/Android.bp b/services/audioflinger/datapath/Android.bp
new file mode 100644
index 0000000..58f0422
--- /dev/null
+++ b/services/audioflinger/datapath/Android.bp
@@ -0,0 +1,64 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_base_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_services_audioflinger_license"],
+}
+
+audioflinger_datapath_tidy_errors = audioflinger_base_tidy_errors + [
+    "modernize-avoid-c-arrays",
+    "modernize-deprecated-headers",
+    "modernize-pass-by-value",
+    "modernize-use-auto",
+    "modernize-use-nodiscard",
+
+    // TODO(b/275642749) Reenable these warnings
+    "-misc-non-private-member-variables-in-classes",
+]
+
+// Eventually use common tidy defaults
+cc_defaults {
+    name: "audioflinger_datapath_flags_defaults",
+    // https://clang.llvm.org/docs/UsersManual.html#command-line-options
+    // https://clang.llvm.org/docs/DiagnosticsReference.html
+    cflags: audioflinger_base_cflags,
+    // https://clang.llvm.org/extra/clang-tidy/
+    tidy: true,
+    tidy_checks: audioflinger_datapath_tidy_errors,
+    tidy_checks_as_errors: audioflinger_datapath_tidy_errors,
+    tidy_flags: [
+      "-format-style=file",
+    ],
+}
+
+cc_library {
+    name: "libaudioflinger_datapath",
+
+    defaults: [
+        "audioflinger_datapath_flags_defaults",
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
+
+    srcs: [
+        "AudioHwDevice.cpp",
+        "AudioStreamOut.cpp",
+        "SpdifStreamOut.cpp",
+    ],
+
+    header_libs: [
+        "libaudiohal_headers",
+        "liberror_headers",
+    ],
+
+    shared_libs: [
+        "audioclient-types-aidl-cpp",
+        "av-types-aidl-cpp",
+        "libaudiospdif",
+        "libaudioutils",
+        "libbase",
+        "liblog",
+        "libutils", // refbase
+    ],
+}
diff --git a/services/audioflinger/AudioHwDevice.cpp b/services/audioflinger/datapath/AudioHwDevice.cpp
similarity index 94%
rename from services/audioflinger/AudioHwDevice.cpp
rename to services/audioflinger/datapath/AudioHwDevice.cpp
index dee6161..9ff316c 100644
--- a/services/audioflinger/AudioHwDevice.cpp
+++ b/services/audioflinger/datapath/AudioHwDevice.cpp
@@ -44,7 +44,7 @@
 {
 
     struct audio_config originalConfig = *config;
-    AudioStreamOut *outputStream = new AudioStreamOut(this, flags);
+    auto outputStream = new AudioStreamOut(this, flags);
 
     // Try to open the HAL first using the current format.
     ALOGV("openOutputStream(), try "
@@ -57,7 +57,7 @@
 
     if (status != NO_ERROR) {
         delete outputStream;
-        outputStream = NULL;
+        outputStream = nullptr;
 
         // FIXME Look at any modification to the config.
         // The HAL might modify the config to suggest a wrapped format.
@@ -71,7 +71,7 @@
             status);
 
         // If the data is encoded then try again using wrapped PCM.
-        bool wrapperNeeded = !audio_has_proportional_frames(originalConfig.format)
+        const bool wrapperNeeded = !audio_has_proportional_frames(originalConfig.format)
                 && ((flags & AUDIO_OUTPUT_FLAG_DIRECT) != 0)
                 && ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0);
 
@@ -83,7 +83,7 @@
                     ALOGE("ERROR - openOutputStream(), SPDIF open returned %d",
                         status);
                     delete outputStream;
-                    outputStream = NULL;
+                    outputStream = nullptr;
                 }
             } else {
                 ALOGE("ERROR - openOutputStream(), SPDIFEncoder does not support format 0x%08x",
diff --git a/services/audioflinger/AudioHwDevice.h b/services/audioflinger/datapath/AudioHwDevice.h
similarity index 79%
rename from services/audioflinger/AudioHwDevice.h
rename to services/audioflinger/datapath/AudioHwDevice.h
index 1749f3f..f9cb80e 100644
--- a/services/audioflinger/AudioHwDevice.h
+++ b/services/audioflinger/datapath/AudioHwDevice.h
@@ -46,7 +46,7 @@
 
     AudioHwDevice(audio_module_handle_t handle,
                   const char *moduleName,
-                  sp<DeviceHalInterface> hwDevice,
+                  const sp<DeviceHalInterface>& hwDevice,
                   Flags flags)
         : mHandle(handle)
         , mModuleName(strdup(moduleName))
@@ -54,25 +54,25 @@
         , mFlags(flags) { }
     virtual ~AudioHwDevice() { free((void *)mModuleName); }
 
-    bool canSetMasterVolume() const {
+    [[nodiscard]] bool canSetMasterVolume() const {
         return (0 != (mFlags & AHWD_CAN_SET_MASTER_VOLUME));
     }
 
-    bool canSetMasterMute() const {
+    [[nodiscard]] bool canSetMasterMute() const {
         return (0 != (mFlags & AHWD_CAN_SET_MASTER_MUTE));
     }
 
-    bool isInsert() const {
+    [[nodiscard]] bool isInsert() const {
         return (0 != (mFlags & AHWD_IS_INSERT));
     }
 
-    bool supportsBluetoothVariableLatency() const {
+    [[nodiscard]] bool supportsBluetoothVariableLatency() const {
         return (0 != (mFlags & AHWD_SUPPORTS_BT_LATENCY_MODES));
     }
 
-    audio_module_handle_t handle() const { return mHandle; }
-    const char *moduleName() const { return mModuleName; }
-    sp<DeviceHalInterface> hwDevice() const { return mHwDevice; }
+   [[nodiscard]] audio_module_handle_t handle() const { return mHandle; }
+   [[nodiscard]] const char *moduleName() const { return mModuleName; }
+   [[nodiscard]] sp<DeviceHalInterface> hwDevice() const { return mHwDevice; }
 
     /** This method creates and opens the audio hardware output stream.
      * The "address" parameter qualifies the "devices" audio device type if needed.
@@ -89,17 +89,17 @@
             struct audio_config *config,
             const char *address);
 
-    bool supportsAudioPatches() const;
+    [[nodiscard]] bool supportsAudioPatches() const;
 
-    status_t getAudioPort(struct audio_port_v7 *port) const;
+    [[nodiscard]] status_t getAudioPort(struct audio_port_v7 *port) const;
 
-    status_t getMmapPolicyInfos(
+    [[nodiscard]] status_t getMmapPolicyInfos(
             media::audio::common::AudioMMapPolicyType policyType,
             std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) const;
 
-    int32_t getAAudioMixerBurstCount() const;
+    [[nodiscard]] int32_t getAAudioMixerBurstCount() const;
 
-    int32_t getAAudioHardwareBurstMinUsec() const;
+    [[nodiscard]] int32_t getAAudioHardwareBurstMinUsec() const;
 
 private:
     const audio_module_handle_t mHandle;
diff --git a/services/audioflinger/AudioStreamOut.cpp b/services/audioflinger/datapath/AudioStreamOut.cpp
similarity index 86%
rename from services/audioflinger/AudioStreamOut.cpp
rename to services/audioflinger/datapath/AudioStreamOut.cpp
index 65ec0e8..6fa82e5 100644
--- a/services/audioflinger/AudioStreamOut.cpp
+++ b/services/audioflinger/datapath/AudioStreamOut.cpp
@@ -31,21 +31,13 @@
 // ----------------------------------------------------------------------------
 AudioStreamOut::AudioStreamOut(AudioHwDevice *dev, audio_output_flags_t flags)
         : audioHwDev(dev)
-        , stream(NULL)
         , flags(flags)
-        , mFramesWritten(0)
-        , mFramesWrittenAtStandby(0)
-        , mRenderPosition(0)
-        , mRateMultiplier(1)
-        , mHalFormatHasProportionalFrames(false)
-        , mHalFrameSize(0)
-        , mExpectRetrograde(false)
 {
 }
 
-AudioStreamOut::~AudioStreamOut()
-{
-}
+// This must be defined here together with the HAL includes above and
+// not solely in the header.
+AudioStreamOut::~AudioStreamOut() = default;
 
 sp<DeviceHalInterface> AudioStreamOut::hwDev() const
 {
@@ -54,12 +46,12 @@
 
 status_t AudioStreamOut::getRenderPosition(uint64_t *frames)
 {
-    if (stream == 0) {
+    if (stream == nullptr) {
         return NO_INIT;
     }
 
     uint32_t halPosition = 0;
-    status_t status = stream->getRenderPosition(&halPosition);
+    const status_t status = stream->getRenderPosition(&halPosition);
     if (status != NO_ERROR) {
         return status;
     }
@@ -67,7 +59,7 @@
     // Maintain a 64-bit render position using the 32-bit result from the HAL.
     // This delta calculation relies on the arithmetic overflow behavior
     // of integers. For example (100 - 0xFFFFFFF0) = 116.
-    const uint32_t truncatedPosition = (uint32_t)mRenderPosition;
+    const auto truncatedPosition = (uint32_t)mRenderPosition;
     int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
     (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
 
@@ -87,7 +79,7 @@
 status_t AudioStreamOut::getRenderPosition(uint32_t *frames)
 {
     uint64_t position64 = 0;
-    status_t status = getRenderPosition(&position64);
+    const status_t status = getRenderPosition(&position64);
     if (status == NO_ERROR) {
         *frames = (uint32_t)position64;
     }
@@ -96,12 +88,12 @@
 
 status_t AudioStreamOut::getPresentationPosition(uint64_t *frames, struct timespec *timestamp)
 {
-    if (stream == 0) {
+    if (stream == nullptr) {
         return NO_INIT;
     }
 
     uint64_t halPosition = 0;
-    status_t status = stream->getPresentationPosition(&halPosition, timestamp);
+    const status_t status = stream->getPresentationPosition(&halPosition, timestamp);
     if (status != NO_ERROR) {
         return status;
     }
@@ -109,7 +101,7 @@
     // Adjust for standby using HAL rate frames.
     // Only apply this correction if the HAL is getting PCM frames.
     if (mHalFormatHasProportionalFrames) {
-        uint64_t adjustedPosition = (halPosition <= mFramesWrittenAtStandby) ?
+        const uint64_t adjustedPosition = (halPosition <= mFramesWrittenAtStandby) ?
                 0 : (halPosition - mFramesWrittenAtStandby);
         // Scale from HAL sample rate to application rate.
         *frames = adjustedPosition / mRateMultiplier;
@@ -129,7 +121,7 @@
 {
     sp<StreamOutHalInterface> outStream;
 
-    audio_output_flags_t customFlags = (config->format == AUDIO_FORMAT_IEC61937)
+    const audio_output_flags_t customFlags = (config->format == AUDIO_FORMAT_IEC61937)
                 ? (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO)
                 : flags;
 
@@ -195,7 +187,7 @@
     mExpectRetrograde = false;
     mFramesWritten = 0;
     mFramesWrittenAtStandby = 0;
-    status_t result = stream->flush();
+    const status_t result = stream->flush();
     return result != INVALID_OPERATION ? result : NO_ERROR;
 }
 
@@ -210,7 +202,7 @@
 ssize_t AudioStreamOut::write(const void *buffer, size_t numBytes)
 {
     size_t bytesWritten;
-    status_t result = stream->write(buffer, numBytes, &bytesWritten);
+    const status_t result = stream->write(buffer, numBytes, &bytesWritten);
     if (result == OK && bytesWritten > 0 && mHalFrameSize > 0) {
         mFramesWritten += bytesWritten / mHalFrameSize;
     }
diff --git a/services/audioflinger/AudioStreamOut.h b/services/audioflinger/datapath/AudioStreamOut.h
similarity index 84%
rename from services/audioflinger/AudioStreamOut.h
rename to services/audioflinger/datapath/AudioStreamOut.h
index 82fe238..ce00f8c 100644
--- a/services/audioflinger/AudioStreamOut.h
+++ b/services/audioflinger/datapath/AudioStreamOut.h
@@ -41,7 +41,7 @@
     sp<StreamOutHalInterface> stream;
     const audio_output_flags_t flags;
 
-    sp<DeviceHalInterface> hwDev() const;
+    [[nodiscard]] sp<DeviceHalInterface> hwDev() const;
 
     AudioStreamOut(AudioHwDevice *dev, audio_output_flags_t flags);
 
@@ -78,7 +78,7 @@
     /**
      * @return frame size from the perspective of the application and the AudioFlinger.
      */
-    virtual size_t getFrameSize() const { return mHalFrameSize; }
+    [[nodiscard]] virtual size_t getFrameSize() const { return mHalFrameSize; }
 
     /**
      * @return audio stream configuration: channel mask, format, sample rate:
@@ -88,7 +88,7 @@
      *   - sample rate from the perspective of the application and the AudioFlinger,
      *     The HAL may be running at a higher sample rate if, for example, playing wrapped EAC3.
      */
-    virtual audio_config_base_t getAudioProperties() const;
+    [[nodiscard]] virtual audio_config_base_t getAudioProperties() const;
 
     virtual status_t flush();
     virtual status_t standby();
@@ -101,13 +101,13 @@
     virtual void presentationComplete() { mExpectRetrograde = true; }
 
 protected:
-    uint64_t             mFramesWritten; // reset by flush
-    uint64_t             mFramesWrittenAtStandby;
-    uint64_t             mRenderPosition; // reset by flush, standby, or presentation complete
-    int                  mRateMultiplier;
-    bool                 mHalFormatHasProportionalFrames;
-    size_t               mHalFrameSize;
-    bool                 mExpectRetrograde; // see presentationComplete
+    uint64_t             mFramesWritten = 0; // reset by flush
+    uint64_t             mFramesWrittenAtStandby = 0;
+    uint64_t             mRenderPosition = 0; // reset by flush, standby, or presentation complete
+    int                  mRateMultiplier = 1;
+    bool                 mHalFormatHasProportionalFrames = false;
+    size_t               mHalFrameSize = 0;
+    bool                 mExpectRetrograde = false; // see presentationComplete
 };
 
 } // namespace android
diff --git a/services/audioflinger/SpdifStreamOut.cpp b/services/audioflinger/datapath/SpdifStreamOut.cpp
similarity index 95%
rename from services/audioflinger/SpdifStreamOut.cpp
rename to services/audioflinger/datapath/SpdifStreamOut.cpp
index 0ce5681..43e9c0c 100644
--- a/services/audioflinger/SpdifStreamOut.cpp
+++ b/services/audioflinger/datapath/SpdifStreamOut.cpp
@@ -37,9 +37,6 @@
         // Tell the HAL that the data will be compressed audio wrapped in a data burst.
         : AudioStreamOut(dev, (audio_output_flags_t) (flags | AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO))
         , mSpdifEncoder(this, format)
-        , mApplicationFormat(AUDIO_FORMAT_DEFAULT)
-        , mApplicationSampleRate(0)
-        , mApplicationChannelMask(AUDIO_CHANNEL_NONE)
 {
 }
 
@@ -91,7 +88,7 @@
             customConfig.format,
             customConfig.channel_mask);
 
-    status_t status = AudioStreamOut::open(
+    const status_t status = AudioStreamOut::open(
             handle,
             devices,
             &customConfig,
diff --git a/services/audioflinger/SpdifStreamOut.h b/services/audioflinger/datapath/SpdifStreamOut.h
similarity index 78%
rename from services/audioflinger/SpdifStreamOut.h
rename to services/audioflinger/datapath/SpdifStreamOut.h
index fc9bb6e..c8dc89f 100644
--- a/services/audioflinger/SpdifStreamOut.h
+++ b/services/audioflinger/datapath/SpdifStreamOut.h
@@ -39,13 +39,13 @@
     SpdifStreamOut(AudioHwDevice *dev, audio_output_flags_t flags,
             audio_format_t format);
 
-    virtual ~SpdifStreamOut() { }
+    ~SpdifStreamOut() override = default;
 
-    virtual status_t open(
+    status_t open(
             audio_io_handle_t handle,
             audio_devices_t devices,
             struct audio_config *config,
-            const char *address);
+            const char *address) override;
 
     /**
     * Write audio buffer to driver. Returns number of bytes written, or a
@@ -60,32 +60,34 @@
     * callback function must be called when more space is available in the
     * driver/hardware buffer.
     */
-    virtual ssize_t write(const void* buffer, size_t bytes);
+    ssize_t write(const void* buffer, size_t bytes) override;
 
     /**
      * @return frame size from the perspective of the application and the AudioFlinger.
      */
-    virtual size_t getFrameSize() const { return sizeof(int8_t); }
+    [[nodiscard]] size_t getFrameSize() const override { return sizeof(int8_t); }
 
     /**
      * @return format from the perspective of the application and the AudioFlinger.
      */
-    virtual audio_format_t getFormat() const { return mApplicationFormat; }
+    [[nodiscard]] virtual audio_format_t getFormat() const { return mApplicationFormat; }
 
     /**
      * The HAL may be running at a higher sample rate if, for example, playing wrapped EAC3.
      * @return sample rate from the perspective of the application and the AudioFlinger.
      */
-    virtual uint32_t getSampleRate() const { return mApplicationSampleRate; }
+    [[nodiscard]] virtual uint32_t getSampleRate() const { return mApplicationSampleRate; }
 
     /**
      * The HAL is in stereo mode when playing multi-channel compressed audio over HDMI.
      * @return channel mask from the perspective of the application and the AudioFlinger.
      */
-    virtual audio_channel_mask_t getChannelMask() const { return mApplicationChannelMask; }
+    [[nodiscard]] virtual audio_channel_mask_t getChannelMask() const {
+        return mApplicationChannelMask;
+    }
 
-    virtual status_t flush();
-    virtual status_t standby();
+    status_t flush() override;
+    status_t standby() override;
 
 private:
 
@@ -98,7 +100,7 @@
         {
         }
 
-        virtual ssize_t writeOutput(const void* buffer, size_t bytes)
+        ssize_t writeOutput(const void* buffer, size_t bytes) override
         {
             return mSpdifStreamOut->writeDataBurst(buffer, bytes);
         }
@@ -107,9 +109,9 @@
     };
 
     MySPDIFEncoder       mSpdifEncoder;
-    audio_format_t       mApplicationFormat;
-    uint32_t             mApplicationSampleRate;
-    audio_channel_mask_t mApplicationChannelMask;
+    audio_format_t       mApplicationFormat = AUDIO_FORMAT_DEFAULT;
+    uint32_t             mApplicationSampleRate = 0;
+    audio_channel_mask_t mApplicationChannelMask = AUDIO_CHANNEL_NONE;
 
     ssize_t  writeDataBurst(const void* data, size_t bytes);
     ssize_t  writeInternal(const void* buffer, size_t bytes);
diff --git a/services/audioflinger/ThreadMetrics.h b/services/audioflinger/datapath/ThreadMetrics.h
similarity index 100%
rename from services/audioflinger/ThreadMetrics.h
rename to services/audioflinger/datapath/ThreadMetrics.h
diff --git a/services/audioflinger/TrackMetrics.h b/services/audioflinger/datapath/TrackMetrics.h
similarity index 84%
rename from services/audioflinger/TrackMetrics.h
rename to services/audioflinger/datapath/TrackMetrics.h
index 6fc70d6..f3425df 100644
--- a/services/audioflinger/TrackMetrics.h
+++ b/services/audioflinger/datapath/TrackMetrics.h
@@ -17,6 +17,9 @@
 #ifndef ANDROID_AUDIO_TRACKMETRICS_H
 #define ANDROID_AUDIO_TRACKMETRICS_H
 
+#include <binder/IActivityManager.h>
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
 #include <mutex>
 
 namespace android {
@@ -38,10 +41,13 @@
  * We currently deliver metrics based on an AudioIntervalGroup.
  */
 class TrackMetrics final {
+
+
 public:
-    TrackMetrics(std::string metricsId, bool isOut)
+    TrackMetrics(std::string metricsId, bool isOut, int clientUid)
         : mMetricsId(std::move(metricsId))
         , mIsOut(isOut)
+        , mUid(clientUid)
         {}  // we don't log a constructor item, we wait for more info in logConstructor().
 
     ~TrackMetrics() {
@@ -64,6 +70,18 @@
                     AMEDIAMETRICS_PROP_EVENT_VALUE_BEGINAUDIOINTERVALGROUP, devices.c_str());
         }
         ++mIntervalCount;
+        const auto& mActivityManager = getActivityManager();
+        if (mActivityManager) {
+            if (mIsOut) {
+                mActivityManager->logFgsApiBegin(AUDIO_API,
+                    mUid,
+                    IPCThreadState::self() -> getCallingPid());
+            } else {
+                mActivityManager->logFgsApiBegin(MICROPHONE_API,
+                    mUid,
+                    IPCThreadState::self() -> getCallingPid());
+            }
+        }
     }
 
     void logConstructor(pid_t creatorPid, uid_t creatorUid, int32_t internalTrackId,
@@ -93,6 +111,18 @@
             logVolume_l(mVolume); // flush out the last volume.
             mLastVolumeChangeTimeNs = 0;
         }
+        const auto& mActivityManager = getActivityManager();
+        if (mActivityManager) {
+            if (mIsOut) {
+                mActivityManager->logFgsApiEnd(AUDIO_API,
+                    mUid,
+                    IPCThreadState::self() -> getCallingPid());
+            } else {
+                mActivityManager->logFgsApiEnd(MICROPHONE_API,
+                    mUid,
+                    IPCThreadState::self() -> getCallingPid());
+            }
+        }
     }
 
     void logInvalidate() const {
@@ -113,7 +143,8 @@
         mDeviceStartupMs.add(startupMs);
     }
 
-    void updateMinMaxVolume(int64_t durationNs, double deviceVolume) {
+    void updateMinMaxVolume_l(int64_t durationNs, double deviceVolume)
+            REQUIRES(mLock) {
         if (deviceVolume > mMaxVolume) {
             mMaxVolume = deviceVolume;
             mMaxVolumeDurationNs = durationNs;
@@ -165,7 +196,7 @@
             mDeviceTimeNs += durationNs;
             mCumulativeTimeNs += durationNs;
         }
-        updateMinMaxVolume(durationNs, mVolume); // always update.
+        updateMinMaxVolume_l(durationNs, mVolume); // always update.
         mVolume = volume;
         mLastVolumeChangeTimeNs = timeNs;
     }
@@ -221,9 +252,25 @@
         // do not reset mUnderrunCount - it keeps continuously running for tracks.
     }
 
+    // Meyer's singleton is thread-safe.
+    static const sp<IActivityManager>& getActivityManager() {
+        static const auto activityManager = []() -> sp<IActivityManager> {
+            const sp<IServiceManager> sm(defaultServiceManager());
+            if (sm != nullptr) {
+                 return interface_cast<IActivityManager>(sm->checkService(String16("activity")));
+            }
+            return nullptr;
+        }();
+        return activityManager;
+    }
+
     const std::string mMetricsId;
     const bool        mIsOut;  // if true, than a playback track, otherwise used for record.
 
+    static constexpr int AUDIO_API = 5;
+    static constexpr int MICROPHONE_API = 6;
+    const int         mUid;
+
     mutable           std::mutex mLock;
 
     // Devices in the interval group.
diff --git a/services/audioflinger/fastpath/Android.bp b/services/audioflinger/fastpath/Android.bp
new file mode 100644
index 0000000..84a580f
--- /dev/null
+++ b/services/audioflinger/fastpath/Android.bp
@@ -0,0 +1,75 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_base_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_services_audioflinger_license"],
+}
+
+// TODO(b/275642749) Reenable these warnings
+fastpath_tidy_errors = audioflinger_base_tidy_errors + [
+    "-misc-non-private-member-variables-in-classes",
+    "-performance-no-int-to-ptr",
+]
+
+// Eventually use common tidy defaults
+cc_defaults {
+    name: "fastpath_flags_defaults",
+    // https://clang.llvm.org/docs/UsersManual.html#command-line-options
+    // https://clang.llvm.org/docs/DiagnosticsReference.html
+    cflags: audioflinger_base_cflags,
+    // https://clang.llvm.org/extra/clang-tidy/
+    tidy: true,
+    tidy_checks: fastpath_tidy_errors,
+    tidy_checks_as_errors: fastpath_tidy_errors,
+    tidy_flags: [
+      "-format-style=file",
+    ],
+}
+
+cc_library_shared {
+    name: "libaudioflinger_fastpath",
+
+    defaults: [
+        "fastpath_flags_defaults",
+    ],
+
+    srcs: [
+        "FastCapture.cpp",
+        "FastCaptureDumpState.cpp",
+        "FastCaptureState.cpp",
+        "FastMixer.cpp",
+        "FastMixerDumpState.cpp",
+        "FastMixerState.cpp",
+        "FastThread.cpp",
+        "FastThreadDumpState.cpp",
+        "FastThreadState.cpp",
+        "StateQueue.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/services/audioflinger", // for Configuration
+    ],
+
+    shared_libs: [
+        "libaudioflinger_utils", // NBAIO_Tee
+        "libaudioprocessing",
+        "libaudioutils",
+        "libcutils",
+        "liblog",
+        "libnbaio",
+        "libnblog", // legacy NBLog that can be removed.
+        "libutils",
+    ],
+
+    header_libs: [
+        "libaudiohal_headers",
+        "libmedia_headers",
+    ],
+
+    sanitize: {
+        integer_overflow: true,
+    },
+
+}
diff --git a/services/audioflinger/AutoPark.h b/services/audioflinger/fastpath/AutoPark.h
similarity index 94%
rename from services/audioflinger/AutoPark.h
rename to services/audioflinger/fastpath/AutoPark.h
index 9ac7b65..d71a305 100644
--- a/services/audioflinger/AutoPark.h
+++ b/services/audioflinger/fastpath/AutoPark.h
@@ -14,6 +14,8 @@
  * limitations under the License.
  */
 
+#pragma once
+
 namespace android {
 
 // T is FastMixer or FastCapture
@@ -23,7 +25,6 @@
     // Park the specific FastThread, which can be nullptr, in hot idle if not currently idling
     explicit AutoPark(const sp<T>& fastThread) : mFastThread(fastThread)
     {
-        mPreviousCommand = FastThreadState::HOT_IDLE;
         if (fastThread != nullptr) {
             auto sq = mFastThread->sq();
             FastThreadState *state = sq->begin();
@@ -55,7 +56,7 @@
 private:
     const sp<T>                 mFastThread;
     // if !&IDLE, holds the FastThread state to restore after new parameters processed
-    FastThreadState::Command    mPreviousCommand;
+    FastThreadState::Command    mPreviousCommand = FastThreadState::HOT_IDLE;
 };  // class AutoPark
 
-}   // namespace
+}   // namespace android
diff --git a/services/audioflinger/FastCapture.cpp b/services/audioflinger/fastpath/FastCapture.cpp
similarity index 85%
rename from services/audioflinger/FastCapture.cpp
rename to services/audioflinger/fastpath/FastCapture.cpp
index 2963202..288036d 100644
--- a/services/audioflinger/FastCapture.cpp
+++ b/services/audioflinger/fastpath/FastCapture.cpp
@@ -30,24 +30,16 @@
 
 namespace android {
 
-/*static*/ const FastCaptureState FastCapture::sInitial;
+/*static*/ const FastCaptureState FastCapture::sInitial{};
 
-FastCapture::FastCapture() : FastThread("cycleC_ms", "loadC_us"),
-    mInputSource(NULL), mInputSourceGen(0), mPipeSink(NULL), mPipeSinkGen(0),
-    mReadBuffer(NULL), mReadBufferState(-1), mFormat(Format_Invalid), mSampleRate(0),
-    // mDummyDumpState
-    mTotalNativeFramesRead(0)
+FastCapture::FastCapture() : FastThread("cycleC_ms", "loadC_us")
 {
+    // base class initialization
     mPrevious = &sInitial;
     mCurrent = &sInitial;
-
     mDummyDumpState = &mDummyFastCaptureDumpState;
 }
 
-FastCapture::~FastCapture()
-{
-}
-
 FastCaptureStateQueue* FastCapture::sq()
 {
     return &mSQ;
@@ -95,11 +87,11 @@
     bool eitherChanged = false;
 
     // check for change in input HAL configuration
-    NBAIO_Format previousFormat = mFormat;
+    const NBAIO_Format previousFormat = mFormat;
     if (current->mInputSourceGen != mInputSourceGen) {
         mInputSource = current->mInputSource;
         mInputSourceGen = current->mInputSourceGen;
-        if (mInputSource == NULL) {
+        if (mInputSource == nullptr) {
             mFormat = Format_Invalid;
             mSampleRate = 0;
         } else {
@@ -122,19 +114,19 @@
     }
 
     // input source and pipe sink must be compatible
-    if (eitherChanged && mInputSource != NULL && mPipeSink != NULL) {
+    if (eitherChanged && mInputSource != nullptr && mPipeSink != nullptr) {
         ALOG_ASSERT(Format_isEqual(mFormat, mPipeSink->format()));
     }
 
     if ((!Format_isEqual(mFormat, previousFormat)) || (frameCount != previous->mFrameCount)) {
         // FIXME to avoid priority inversion, don't free here
         free(mReadBuffer);
-        mReadBuffer = NULL;
+        mReadBuffer = nullptr;
         if (frameCount > 0 && mSampleRate > 0) {
             // FIXME new may block for unbounded time at internal mutex of the heap
             //       implementation; it would be better to have normal capture thread allocate for
             //       us to avoid blocking here and to prevent possible priority inversion
-            size_t bufferSize = frameCount * Format_frameSize(mFormat);
+            const size_t bufferSize = frameCount * Format_frameSize(mFormat);
             (void)posix_memalign(&mReadBuffer, 32, bufferSize);
             memset(mReadBuffer, 0, bufferSize); // if posix_memalign fails, will segv here.
             mPeriodNs = (frameCount * 1000000000LL) / mSampleRate;      // 1.00
@@ -166,9 +158,9 @@
     AudioBufferProvider* fastPatchRecordBufferProvider = current->mFastPatchRecordBufferProvider;
     AudioBufferProvider::Buffer patchBuffer;
 
-    if (fastPatchRecordBufferProvider != 0) {
+    if (fastPatchRecordBufferProvider != nullptr) {
         patchBuffer.frameCount = ~0;
-        status_t status = fastPatchRecordBufferProvider->getNextBuffer(&patchBuffer);
+        const status_t status = fastPatchRecordBufferProvider->getNextBuffer(&patchBuffer);
         if (status != NO_ERROR) {
             frameCount = 0;
         } else if (patchBuffer.frameCount < frameCount) {
@@ -179,11 +171,11 @@
     }
 
     if ((command & FastCaptureState::READ) /*&& isWarm*/) {
-        ALOG_ASSERT(mInputSource != NULL);
-        ALOG_ASSERT(mReadBuffer != NULL);
+        ALOG_ASSERT(mInputSource != nullptr);
+        ALOG_ASSERT(mReadBuffer != nullptr);
         dumpState->mReadSequence++;
         ATRACE_BEGIN("read");
-        ssize_t framesRead = mInputSource->read(mReadBuffer, frameCount);
+        const ssize_t framesRead = mInputSource->read(mReadBuffer, frameCount);
         ATRACE_END();
         dumpState->mReadSequence++;
         if (framesRead >= 0) {
@@ -201,8 +193,8 @@
     }
 
     if (command & FastCaptureState::WRITE) {
-        ALOG_ASSERT(mPipeSink != NULL);
-        ALOG_ASSERT(mReadBuffer != NULL);
+        ALOG_ASSERT(mPipeSink != nullptr);
+        ALOG_ASSERT(mReadBuffer != nullptr);
         if (mReadBufferState < 0) {
             memset(mReadBuffer, 0, frameCount * Format_frameSize(mFormat));
             mReadBufferState = frameCount;
@@ -211,23 +203,23 @@
             if (current->mSilenceCapture) {
                 memset(mReadBuffer, 0, mReadBufferState * Format_frameSize(mFormat));
             }
-            ssize_t framesWritten = mPipeSink->write(mReadBuffer, mReadBufferState);
+            const ssize_t framesWritten = mPipeSink->write(mReadBuffer, mReadBufferState);
             audio_track_cblk_t* cblk = current->mCblk;
-            if (fastPatchRecordBufferProvider != 0) {
+            if (fastPatchRecordBufferProvider != nullptr) {
                 // This indicates the fast track is a patch record, update the cblk by
                 // calling releaseBuffer().
                 memcpy_by_audio_format(patchBuffer.raw, current->mFastPatchRecordFormat,
                         mReadBuffer, mFormat.mFormat, framesWritten * mFormat.mChannelCount);
                 patchBuffer.frameCount = framesWritten;
                 fastPatchRecordBufferProvider->releaseBuffer(&patchBuffer);
-            } else if (cblk != NULL && framesWritten > 0) {
+            } else if (cblk != nullptr && framesWritten > 0) {
                 // FIXME This supports at most one fast capture client.
                 //       To handle multiple clients this could be converted to an array,
                 //       or with a lot more work the control block could be shared by all clients.
-                int32_t rear = cblk->u.mStreaming.mRear;
+                const int32_t rear = cblk->u.mStreaming.mRear;
                 android_atomic_release_store(framesWritten + rear, &cblk->u.mStreaming.mRear);
                 cblk->mServer += framesWritten;
-                int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
+                const int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
                 if (!(old & CBLK_FUTEX_WAKE)) {
                     // client is never in server process, so don't use FUTEX_WAKE_PRIVATE
                     (void) syscall(__NR_futex, &cblk->mFutex, FUTEX_WAKE, 1);
diff --git a/services/audioflinger/fastpath/FastCapture.h b/services/audioflinger/fastpath/FastCapture.h
new file mode 100644
index 0000000..b565216
--- /dev/null
+++ b/services/audioflinger/fastpath/FastCapture.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "FastThread.h"
+#include "StateQueue.h"
+#include "FastCaptureState.h"
+#include "FastCaptureDumpState.h"
+
+namespace android {
+
+using FastCaptureStateQueue = StateQueue<FastCaptureState>;
+
+class FastCapture : public FastThread {
+
+public:
+            FastCapture();
+
+            FastCaptureStateQueue*  sq();
+
+private:
+            FastCaptureStateQueue   mSQ;
+
+    // callouts
+    const FastThreadState *poll() override;
+    void setNBLogWriter(NBLog::Writer *logWriter) override;
+    void onIdle() override;
+    void onExit() override;
+    bool isSubClassCommand(FastThreadState::Command command) override;
+    void onStateChange() override;
+    void onWork() override;
+
+    static const FastCaptureState sInitial;
+
+    FastCaptureState    mPreIdle;   // copy of state before we went into idle
+    // FIXME by renaming, could pull up many of these to FastThread
+    NBAIO_Source*       mInputSource = nullptr;
+    int                 mInputSourceGen = 0;
+    NBAIO_Sink*         mPipeSink = nullptr;
+    int                 mPipeSinkGen = 0;
+    void*               mReadBuffer = nullptr;
+    ssize_t             mReadBufferState = -1;  // number of initialized frames in readBuffer,
+                                                // or -1 to clear
+    NBAIO_Format        mFormat = Format_Invalid;
+    unsigned            mSampleRate = 0;
+    FastCaptureDumpState mDummyFastCaptureDumpState;
+    uint32_t            mTotalNativeFramesRead = 0; // copied to dumpState->mFramesRead
+
+};  // class FastCapture
+
+}   // namespace android
diff --git a/services/audioflinger/FastCaptureDumpState.cpp b/services/audioflinger/fastpath/FastCaptureDumpState.cpp
similarity index 81%
rename from services/audioflinger/FastCaptureDumpState.cpp
rename to services/audioflinger/fastpath/FastCaptureDumpState.cpp
index b8b3866..fe7ea16 100644
--- a/services/audioflinger/FastCaptureDumpState.cpp
+++ b/services/audioflinger/fastpath/FastCaptureDumpState.cpp
@@ -24,24 +24,15 @@
 
 namespace android {
 
-FastCaptureDumpState::FastCaptureDumpState() : FastThreadDumpState(),
-    mReadSequence(0), mFramesRead(0), mReadErrors(0), mSampleRate(0), mFrameCount(0)
-{
-}
-
-FastCaptureDumpState::~FastCaptureDumpState()
-{
-}
-
 void FastCaptureDumpState::dump(int fd) const
 {
     if (mCommand == FastCaptureState::INITIAL) {
         dprintf(fd, "  FastCapture not initialized\n");
         return;
     }
-    double measuredWarmupMs = (mMeasuredWarmupTs.tv_sec * 1000.0) +
+    const double measuredWarmupMs = (mMeasuredWarmupTs.tv_sec * 1000.0) +
             (mMeasuredWarmupTs.tv_nsec / 1000000.0);
-    double periodSec = (double) mFrameCount / mSampleRate;
+    const double periodSec = (double) mFrameCount / mSampleRate;
     dprintf(fd, "  FastCapture command=%s readSequence=%u framesRead=%u\n"
                 "              readErrors=%u sampleRate=%u frameCount=%zu\n"
                 "              measuredWarmup=%.3g ms, warmupCycles=%u period=%.2f ms\n"
@@ -51,4 +42,4 @@
                 periodSec * 1e3, mSilenced ? "true" : "false");
 }
 
-}   // android
+}  // namespace android
diff --git a/services/audioflinger/FastCaptureDumpState.h b/services/audioflinger/fastpath/FastCaptureDumpState.h
similarity index 66%
rename from services/audioflinger/FastCaptureDumpState.h
rename to services/audioflinger/fastpath/FastCaptureDumpState.h
index a1b8706..3dc8a9b 100644
--- a/services/audioflinger/FastCaptureDumpState.h
+++ b/services/audioflinger/fastpath/FastCaptureDumpState.h
@@ -14,30 +14,28 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_AUDIO_FAST_CAPTURE_DUMP_STATE_H
-#define ANDROID_AUDIO_FAST_CAPTURE_DUMP_STATE_H
+#pragma once
 
 #include <stdint.h>
+#include <type_traits>
 #include "Configuration.h"
 #include "FastThreadDumpState.h"
 
 namespace android {
 
 struct FastCaptureDumpState : FastThreadDumpState {
-    FastCaptureDumpState();
-    /*virtual*/ ~FastCaptureDumpState();
-
     void dump(int fd) const;    // should only be called on a stable copy, not the original
 
     // FIXME by renaming, could pull up many of these to FastThreadDumpState
-    uint32_t mReadSequence;     // incremented before and after each read()
-    uint32_t mFramesRead;       // total number of frames read successfully
-    uint32_t mReadErrors;       // total number of read() errors
-    uint32_t mSampleRate;
-    size_t   mFrameCount;
+    uint32_t mReadSequence = 0;  // incremented before and after each read()
+    uint32_t mFramesRead = 0;    // total number of frames read successfully
+    uint32_t mReadErrors = 0;    // total number of read() errors
+    uint32_t mSampleRate = 0;
+    size_t   mFrameCount = 0;
     bool     mSilenced = false; // capture is silenced
 };
 
-}   // android
+// No virtuals
+static_assert(!std::is_polymorphic_v<FastCaptureDumpState>);
 
-#endif  // ANDROID_AUDIO_FAST_CAPTURE_DUMP_STATE_H
+}  // namespace android
diff --git a/services/audioflinger/FastCaptureState.cpp b/services/audioflinger/fastpath/FastCaptureState.cpp
similarity index 79%
rename from services/audioflinger/FastCaptureState.cpp
rename to services/audioflinger/fastpath/FastCaptureState.cpp
index c4d5e45..77c0c4c 100644
--- a/services/audioflinger/FastCaptureState.cpp
+++ b/services/audioflinger/fastpath/FastCaptureState.cpp
@@ -18,20 +18,11 @@
 
 namespace android {
 
-FastCaptureState::FastCaptureState() : FastThreadState(),
-    mInputSource(NULL), mInputSourceGen(0), mPipeSink(NULL), mPipeSinkGen(0), mFrameCount(0)
-{
-}
-
-FastCaptureState::~FastCaptureState()
-{
-}
-
 // static
 const char *FastCaptureState::commandToString(Command command)
 {
     const char *str = FastThreadState::commandToString(command);
-    if (str != NULL) {
+    if (str != nullptr) {
         return str;
     }
     switch (command) {
@@ -39,7 +30,7 @@
     case FastCaptureState::WRITE:       return "WRITE";
     case FastCaptureState::READ_WRITE:  return "READ_WRITE";
     }
-    LOG_ALWAYS_FATAL("%s", __func__);
+    LOG_ALWAYS_FATAL("%s: command %d invalid", __func__, (int) command);
 }
 
-}   // android
+}  // namespace android
diff --git a/services/audioflinger/FastCaptureState.h b/services/audioflinger/fastpath/FastCaptureState.h
similarity index 76%
rename from services/audioflinger/FastCaptureState.h
rename to services/audioflinger/fastpath/FastCaptureState.h
index f949275..0f4126e 100644
--- a/services/audioflinger/FastCaptureState.h
+++ b/services/audioflinger/fastpath/FastCaptureState.h
@@ -14,9 +14,9 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_AUDIO_FAST_CAPTURE_STATE_H
-#define ANDROID_AUDIO_FAST_CAPTURE_STATE_H
+#pragma once
 
+#include <type_traits>
 #include <media/nbaio/NBAIO.h>
 #include <media/AudioBufferProvider.h>
 #include "FastThreadState.h"
@@ -26,16 +26,14 @@
 
 // Represent a single state of the fast capture
 struct FastCaptureState : FastThreadState {
-                FastCaptureState();
-    /*virtual*/ ~FastCaptureState();
-
     // all pointer fields use raw pointers; objects are owned and ref-counted by RecordThread
-    NBAIO_Source*   mInputSource;       // HAL input device, must already be negotiated
+    NBAIO_Source*   mInputSource = nullptr; // HAL input device, must already be negotiated
     // FIXME by renaming, could pull up these fields to FastThreadState
-    int             mInputSourceGen;    // increment when mInputSource is assigned
-    NBAIO_Sink*     mPipeSink;          // after reading from input source, write to this pipe sink
-    int             mPipeSinkGen;       // increment when mPipeSink is assigned
-    size_t          mFrameCount;        // number of frames per fast capture buffer
+    int             mInputSourceGen = 0;    // increment when mInputSource is assigned
+    NBAIO_Sink*     mPipeSink = nullptr;    // after reading from input source,
+                                            // write to this pipe sink
+    int             mPipeSinkGen = 0;       // increment when mPipeSink is assigned
+    size_t          mFrameCount = 0;        // number of frames per fast capture buffer
     audio_track_cblk_t* mCblk;          // control block for the single fast client, or NULL
 
     audio_format_t  mFastPatchRecordFormat = AUDIO_FORMAT_INVALID;
@@ -55,6 +53,7 @@
     static const char *commandToString(Command command);
 };  // struct FastCaptureState
 
-}   // namespace android
+// No virtuals.
+static_assert(!std::is_polymorphic_v<FastCaptureState>);
 
-#endif  // ANDROID_AUDIO_FAST_CAPTURE_STATE_H
+}   // namespace android
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/fastpath/FastMixer.cpp
similarity index 93%
rename from services/audioflinger/FastMixer.cpp
rename to services/audioflinger/fastpath/FastMixer.cpp
index 61dd3f2..e0a15c1 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/fastpath/FastMixer.cpp
@@ -42,7 +42,7 @@
 #include <cutils/bitops.h>
 #include <media/AudioMixer.h>
 #include "FastMixer.h"
-#include "TypedLogger.h"
+#include <afutils/TypedLogger.h>
 
 namespace android {
 
@@ -61,48 +61,25 @@
     : FastThread("cycle_ms", "load_us"),
     // mFastTrackNames
     // mGenerations
-    mOutputSink(NULL),
-    mOutputSinkGen(0),
-    mMixer(NULL),
-    mSinkBuffer(NULL),
-    mSinkBufferSize(0),
-    mSinkChannelCount(FCC_2),
-    mMixerBuffer(NULL),
-    mMixerBufferSize(0),
-    mMixerBufferState(UNDEFINED),
-    mFormat(Format_Invalid),
-    mSampleRate(0),
-    mFastTracksGen(0),
-    mTotalNativeFramesWritten(0),
     // timestamp
-    mNativeFramesWrittenButNotPresented(0),   // the = 0 is to silence the compiler
-    mMasterMono(false),
     mThreadIoHandle(parentIoHandle)
 {
     // FIXME pass sInitial as parameter to base class constructor, and make it static local
     mPrevious = &sInitial;
     mCurrent = &sInitial;
-
     mDummyDumpState = &mDummyFastMixerDumpState;
+
     // TODO: Add channel mask to NBAIO_Format.
     // We assume that the channel mask must be a valid positional channel mask.
     mSinkChannelMask = getChannelMaskFromCount(mSinkChannelCount);
     mBalance.setChannelMask(mSinkChannelMask);
 
-    unsigned i;
-    for (i = 0; i < FastMixerState::sMaxFastTracks; ++i) {
-        mGenerations[i] = 0;
-    }
 #ifdef FAST_THREAD_STATISTICS
     mOldLoad.tv_sec = 0;
     mOldLoad.tv_nsec = 0;
 #endif
 }
 
-FastMixer::~FastMixer()
-{
-}
-
 FastMixerStateQueue* FastMixer::sq()
 {
     return &mSQ;
@@ -229,13 +206,13 @@
     unsigned previousTrackMask;
 
     // check for change in output HAL configuration
-    NBAIO_Format previousFormat = mFormat;
+    const NBAIO_Format previousFormat = mFormat;
     if (current->mOutputSinkGen != mOutputSinkGen) {
         mOutputSink = current->mOutputSink;
         mOutputSinkGen = current->mOutputSinkGen;
         mSinkChannelMask = current->mSinkChannelMask;
         mBalance.setChannelMask(mSinkChannelMask);
-        if (mOutputSink == NULL) {
+        if (mOutputSink == nullptr) {
             mFormat = Format_Invalid;
             mSampleRate = 0;
             mSinkChannelCount = 0;
@@ -259,11 +236,11 @@
     if ((!Format_isEqual(mFormat, previousFormat)) || (frameCount != previous->mFrameCount)) {
         // FIXME to avoid priority inversion, don't delete here
         delete mMixer;
-        mMixer = NULL;
+        mMixer = nullptr;
         free(mMixerBuffer);
-        mMixerBuffer = NULL;
+        mMixerBuffer = nullptr;
         free(mSinkBuffer);
-        mSinkBuffer = NULL;
+        mSinkBuffer = nullptr;
         if (frameCount > 0 && mSampleRate > 0) {
             // FIXME new may block for unbounded time at internal mutex of the heap
             //       implementation; it would be better to have normal mixer allocate for us
@@ -320,7 +297,7 @@
         // process removed tracks first to avoid running out of track names
         unsigned removedTracks = previousTrackMask & ~currentTrackMask;
         while (removedTracks != 0) {
-            int i = __builtin_ctz(removedTracks);
+            const int i = __builtin_ctz(removedTracks);
             removedTracks &= ~(1 << i);
             updateMixerTrack(i, REASON_REMOVE);
             // don't reset track dump state, since other side is ignoring it
@@ -329,7 +306,7 @@
         // now process added tracks
         unsigned addedTracks = currentTrackMask & ~previousTrackMask;
         while (addedTracks != 0) {
-            int i = __builtin_ctz(addedTracks);
+            const int i = __builtin_ctz(addedTracks);
             addedTracks &= ~(1 << i);
             updateMixerTrack(i, REASON_ADD);
         }
@@ -338,7 +315,7 @@
         // but may have a different buffer provider or volume provider
         unsigned modifiedTracks = currentTrackMask & previousTrackMask;
         while (modifiedTracks != 0) {
-            int i = __builtin_ctz(modifiedTracks);
+            const int i = __builtin_ctz(modifiedTracks);
             modifiedTracks &= ~(1 << i);
             updateMixerTrack(i, REASON_MODIFY);
         }
@@ -373,8 +350,8 @@
     const FastMixerState::Command command = mCommand;
     const size_t frameCount = current->mFrameCount;
 
-    if ((command & FastMixerState::MIX) && (mMixer != NULL) && mIsWarm) {
-        ALOG_ASSERT(mMixerBuffer != NULL);
+    if ((command & FastMixerState::MIX) && (mMixer != nullptr) && mIsWarm) {
+        ALOG_ASSERT(mMixerBuffer != nullptr);
 
         // AudioMixer::mState.enabledTracks is undefined if mState.hook == process__validate,
         // so we keep a side copy of enabledTracks
@@ -383,7 +360,7 @@
         // for each track, update volume and check for underrun
         unsigned currentTrackMask = current->mTrackMask;
         while (currentTrackMask != 0) {
-            int i = __builtin_ctz(currentTrackMask);
+            const int i = __builtin_ctz(currentTrackMask);
             currentTrackMask &= ~(1 << i);
             const FastTrack* fastTrack = &current->mFastTracks[i];
 
@@ -406,8 +383,8 @@
             fastTrack->mBufferProvider->onTimestamp(perTrackTimestamp);
 
             const int name = i;
-            if (fastTrack->mVolumeProvider != NULL) {
-                gain_minifloat_packed_t vlr = fastTrack->mVolumeProvider->getVolumeLR();
+            if (fastTrack->mVolumeProvider != nullptr) {
+                const gain_minifloat_packed_t vlr = fastTrack->mVolumeProvider->getVolumeLR();
                 float vlf = float_from_gain(gain_minifloat_unpack_left(vlr));
                 float vrf = float_from_gain(gain_minifloat_unpack_right(vlr));
 
@@ -418,7 +395,7 @@
             // takes a tryLock, which can block
             // up to 1 ms.  If enough active tracks all blocked in sequence, this would result
             // in the overall fast mix cycle being delayed.  Should use a non-blocking FIFO.
-            size_t framesReady = fastTrack->mBufferProvider->framesReady();
+            const size_t framesReady = fastTrack->mBufferProvider->framesReady();
             if (ATRACE_ENABLED()) {
                 // I wish we had formatted trace names
                 char traceName[16];
@@ -464,7 +441,8 @@
         mMixerBufferState = UNDEFINED;
     }
     //bool didFullWrite = false;    // dumpsys could display a count of partial writes
-    if ((command & FastMixerState::WRITE) && (mOutputSink != NULL) && (mMixerBuffer != NULL)) {
+    if ((command & FastMixerState::WRITE)
+            && (mOutputSink != nullptr) && (mMixerBuffer != nullptr)) {
         if (mMixerBufferState == UNDEFINED) {
             memset(mMixerBuffer, 0, mMixerBufferSize);
             mMixerBufferState = ZEROED;
@@ -481,7 +459,7 @@
         mBalance.process((float *)mMixerBuffer, frameCount);
 
         // prepare the buffer used to write to sink
-        void *buffer = mSinkBuffer != NULL ? mSinkBuffer : mMixerBuffer;
+        void *buffer = mSinkBuffer != nullptr ? mSinkBuffer : mMixerBuffer;
         if (mFormat.mFormat != mMixerBufferFormat) { // sink format not the same as mixer format
             memcpy_by_audio_format(buffer, mFormat.mFormat, mMixerBuffer, mMixerBufferFormat,
                     frameCount * Format_channelCount(mFormat));
@@ -493,7 +471,7 @@
                     audio_bytes_per_sample(mFormat.mFormat),
                     frameCount * audio_bytes_per_frame(mAudioChannelCount, mFormat.mFormat));
         }
-        // if non-NULL, then duplicate write() to this non-blocking sink
+        // if non-nullptr, then duplicate write() to this non-blocking sink
 #ifdef TEE_SINK
         mTee.write(buffer, frameCount);
 #endif
@@ -501,7 +479,7 @@
         //       but this code should be modified to handle both non-blocking and blocking sinks
         dumpState->mWriteSequence++;
         ATRACE_BEGIN("write");
-        ssize_t framesWritten = mOutputSink->write(buffer, frameCount);
+        const ssize_t framesWritten = mOutputSink->write(buffer, frameCount);
         ATRACE_END();
         dumpState->mWriteSequence++;
         if (framesWritten >= 0) {
diff --git a/services/audioflinger/FastMixer.h b/services/audioflinger/fastpath/FastMixer.h
similarity index 66%
rename from services/audioflinger/FastMixer.h
rename to services/audioflinger/fastpath/FastMixer.h
index d71519f..6e48df6 100644
--- a/services/audioflinger/FastMixer.h
+++ b/services/audioflinger/fastpath/FastMixer.h
@@ -14,8 +14,7 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_AUDIO_FAST_MIXER_H
-#define ANDROID_AUDIO_FAST_MIXER_H
+#pragma once
 
 #include <atomic>
 #include <audio_utils/Balance.h>
@@ -23,13 +22,13 @@
 #include "StateQueue.h"
 #include "FastMixerState.h"
 #include "FastMixerDumpState.h"
-#include "NBAIO_Tee.h"
+#include <afutils/NBAIO_Tee.h>
 
 namespace android {
 
 class AudioMixer;
 
-typedef StateQueue<FastMixerState> FastMixerStateQueue;
+using FastMixerStateQueue = StateQueue<FastMixerState>;
 
 class FastMixer : public FastThread {
 
@@ -37,7 +36,6 @@
     /** FastMixer constructor takes as param the parent MixerThread's io handle (id)
         for purposes of identification. */
     explicit FastMixer(audio_io_handle_t threadIoHandle);
-    virtual ~FastMixer();
 
             FastMixerStateQueue* sq();
 
@@ -51,13 +49,13 @@
             FastMixerStateQueue mSQ;
 
     // callouts
-    virtual const FastThreadState *poll();
-    virtual void setNBLogWriter(NBLog::Writer *logWriter);
-    virtual void onIdle();
-    virtual void onExit();
-    virtual bool isSubClassCommand(FastThreadState::Command command);
-    virtual void onStateChange();
-    virtual void onWork();
+    const FastThreadState *poll() override;
+    void setNBLogWriter(NBLog::Writer *logWriter) override;
+    void onIdle() override;
+    void onExit() override;
+    bool isSubClassCommand(FastThreadState::Command command) override;
+    void onStateChange() override;
+    void onWork() override;
 
     enum Reason {
         REASON_REMOVE,
@@ -71,41 +69,42 @@
     static const FastMixerState sInitial;
 
     FastMixerState  mPreIdle;   // copy of state before we went into idle
-    int             mGenerations[FastMixerState::kMaxFastTracks];
+    int             mGenerations[FastMixerState::kMaxFastTracks]{};
                                 // last observed mFastTracks[i].mGeneration
-    NBAIO_Sink*     mOutputSink;
-    int             mOutputSinkGen;
-    AudioMixer*     mMixer;
+    NBAIO_Sink*     mOutputSink = nullptr;
+    int             mOutputSinkGen = 0;
+    AudioMixer*     mMixer = nullptr;
 
     // mSinkBuffer audio format is stored in format.mFormat.
-    void*           mSinkBuffer;        // used for mixer output format translation
+    void*           mSinkBuffer = nullptr; // used for mixer output format translation
                                         // if sink format is different than mixer output.
-    size_t          mSinkBufferSize;
-    uint32_t        mSinkChannelCount;
-    audio_channel_mask_t mSinkChannelMask;
-    void*           mMixerBuffer;       // mixer output buffer.
-    size_t          mMixerBufferSize;
+    size_t          mSinkBufferSize = 0;
+    uint32_t        mSinkChannelCount = FCC_2;
+    audio_channel_mask_t mSinkChannelMask;        // set in ctor
+    void*           mMixerBuffer = nullptr;       // mixer output buffer.
+    size_t          mMixerBufferSize = 0;
     static constexpr audio_format_t mMixerBufferFormat = AUDIO_FORMAT_PCM_FLOAT;
 
-    uint32_t        mAudioChannelCount; // audio channel count, excludes haptic channels.
+    // audio channel count, excludes haptic channels.  Set in onStateChange().
+    uint32_t        mAudioChannelCount = 0;
 
-    enum {UNDEFINED, MIXED, ZEROED} mMixerBufferState;
-    NBAIO_Format    mFormat;
-    unsigned        mSampleRate;
-    int             mFastTracksGen;
+    enum {UNDEFINED, MIXED, ZEROED} mMixerBufferState = UNDEFINED;
+    NBAIO_Format    mFormat{Format_Invalid};
+    unsigned        mSampleRate = 0;
+    int             mFastTracksGen = 0;
     FastMixerDumpState mDummyFastMixerDumpState;
-    int64_t         mTotalNativeFramesWritten;  // copied to dumpState->mFramesWritten
+    int64_t         mTotalNativeFramesWritten = 0;  // copied to dumpState->mFramesWritten
 
     // next 2 fields are valid only when timestampStatus == NO_ERROR
     ExtendedTimestamp mTimestamp;
-    int64_t         mNativeFramesWrittenButNotPresented;
+    int64_t         mNativeFramesWrittenButNotPresented = 0;
 
     audio_utils::Balance mBalance;
 
     // accessed without lock between multiple threads.
-    std::atomic_bool mMasterMono;
+    std::atomic_bool mMasterMono{};
     std::atomic<float> mMasterBalance{};
-    std::atomic_int_fast64_t mBoottimeOffset;
+    std::atomic_int_fast64_t mBoottimeOffset{};
 
     // parent thread id for debugging purposes
     [[maybe_unused]] const audio_io_handle_t mThreadIoHandle;
@@ -115,5 +114,3 @@
 };  // class FastMixer
 
 }   // namespace android
-
-#endif  // ANDROID_AUDIO_FAST_MIXER_H
diff --git a/services/audioflinger/FastMixerDumpState.cpp b/services/audioflinger/fastpath/FastMixerDumpState.cpp
similarity index 90%
rename from services/audioflinger/FastMixerDumpState.cpp
rename to services/audioflinger/fastpath/FastMixerDumpState.cpp
index 3f20282..4f79dd6 100644
--- a/services/audioflinger/FastMixerDumpState.cpp
+++ b/services/audioflinger/fastpath/FastMixerDumpState.cpp
@@ -29,23 +29,11 @@
 
 namespace android {
 
-FastMixerDumpState::FastMixerDumpState() : FastThreadDumpState(),
-    mWriteSequence(0), mFramesWritten(0),
-    mNumTracks(0), mWriteErrors(0),
-    mSampleRate(0), mFrameCount(0),
-    mTrackMask(0)
-{
-}
-
-FastMixerDumpState::~FastMixerDumpState()
-{
-}
-
 // helper function called by qsort()
 static int compare_uint32_t(const void *pa, const void *pb)
 {
-    uint32_t a = *(const uint32_t *)pa;
-    uint32_t b = *(const uint32_t *)pb;
+    const uint32_t a = *(const uint32_t *)pa;
+    const uint32_t b = *(const uint32_t *)pb;
     if (a < b) {
         return -1;
     } else if (a > b) {
@@ -61,9 +49,9 @@
         dprintf(fd, "  FastMixer not initialized\n");
         return;
     }
-    double measuredWarmupMs = (mMeasuredWarmupTs.tv_sec * 1000.0) +
+    const double measuredWarmupMs = (mMeasuredWarmupTs.tv_sec * 1000.0) +
             (mMeasuredWarmupTs.tv_nsec / 1000000.0);
-    double mixPeriodSec = (double) mFrameCount / mSampleRate;
+    const double mixPeriodSec = (double) mFrameCount / mSampleRate;
     dprintf(fd, "  FastMixer command=%s writeSequence=%u framesWritten=%u\n"
                 "            numTracks=%u writeErrors=%u underruns=%u overruns=%u\n"
                 "            sampleRate=%u frameCount=%zu measuredWarmup=%.3g ms, warmupCycles=%u\n"
@@ -99,16 +87,16 @@
     // the mean account for 99.73% of the population.  So if we take each tail to be 1/1000 of the
     // sample set, we get 99.8% combined, or close to three standard deviations.
     static const uint32_t kTailDenominator = 1000;
-    uint32_t *tail = n >= kTailDenominator ? new uint32_t[n] : NULL;
+    uint32_t *tail = n >= kTailDenominator ? new uint32_t[n] : nullptr;
     // loop over all the samples
     for (uint32_t j = 0; j < n; ++j) {
-        size_t i = oldestClosed++ & (mSamplingN - 1);
-        uint32_t wallNs = mMonotonicNs[i];
-        if (tail != NULL) {
+        const size_t i = oldestClosed++ & (mSamplingN - 1);
+        const uint32_t wallNs = mMonotonicNs[i];
+        if (tail != nullptr) {
             tail[j] = wallNs;
         }
         wall.add(wallNs);
-        uint32_t sampleLoadNs = mLoadNs[i];
+        const uint32_t sampleLoadNs = mLoadNs[i];
         loadNs.add(sampleLoadNs);
 #ifdef CPU_FREQUENCY_STATISTICS
         uint32_t sampleCpukHz = mCpukHz[i];
@@ -146,10 +134,10 @@
                 "    mean=%.1f min=%.1f max=%.1f stddev=%.1f\n",
                 loadMHz.getMean(), loadMHz.getMin(), loadMHz.getMax(), loadMHz.getStdDev());
 #endif
-    if (tail != NULL) {
+    if (tail != nullptr) {
         qsort(tail, n, sizeof(uint32_t), compare_uint32_t);
         // assume same number of tail samples on each side, left and right
-        uint32_t count = n / kTailDenominator;
+        const uint32_t count = n / kTailDenominator;
         audio_utils::Statistics<double> left, right;
         for (uint32_t i = 0; i < count; ++i) {
             left.add(tail[i]);
@@ -175,7 +163,7 @@
             FastMixerState::sMaxFastTracks, trackMask);
     dprintf(fd, "  Index Active Full Partial Empty  Recent Ready    Written\n");
     for (uint32_t i = 0; i < FastMixerState::sMaxFastTracks; ++i, trackMask >>= 1) {
-        bool isActive = trackMask & 1;
+        const bool isActive = trackMask & 1;
         const FastTrackDump *ftDump = &mTracks[i];
         const FastTrackUnderruns& underruns = ftDump->mUnderruns;
         const char *mostRecent;
@@ -203,4 +191,4 @@
     }
 }
 
-}   // android
+}  // namespace android
diff --git a/services/audioflinger/FastMixerDumpState.h b/services/audioflinger/fastpath/FastMixerDumpState.h
similarity index 74%
rename from services/audioflinger/FastMixerDumpState.h
rename to services/audioflinger/fastpath/FastMixerDumpState.h
index 9b91cbc..4d2e1a0 100644
--- a/services/audioflinger/FastMixerDumpState.h
+++ b/services/audioflinger/fastpath/FastMixerDumpState.h
@@ -14,10 +14,10 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_AUDIO_FAST_MIXER_DUMP_STATE_H
-#define ANDROID_AUDIO_FAST_MIXER_DUMP_STATE_H
+#pragma once
 
 #include <stdint.h>
+#include <type_traits>
 #include <audio_utils/TimestampVerifier.h>
 #include "Configuration.h"
 #include "FastThreadDumpState.h"
@@ -54,33 +54,32 @@
 
 // Represents the dump state of a fast track
 struct FastTrackDump {
-    FastTrackDump() : mFramesReady(0) { }
-    /*virtual*/ ~FastTrackDump() { }
     FastTrackUnderruns  mUnderruns;
-    size_t              mFramesReady;        // most recent value only; no long-term statistics kept
-    int64_t             mFramesWritten;      // last value from track
+    size_t              mFramesReady = 0;    // most recent value only; no long-term statistics kept
+    int64_t             mFramesWritten = 0;  // last value from track
 };
 
-struct FastMixerDumpState : FastThreadDumpState {
-    FastMixerDumpState();
-    /*virtual*/ ~FastMixerDumpState();
+// No virtuals.
+static_assert(!std::is_polymorphic_v<FastTrackDump>);
 
+struct FastMixerDumpState : FastThreadDumpState {
     void dump(int fd) const;    // should only be called on a stable copy, not the original
 
-    double   mLatencyMs = 0.;   // measured latency, default of 0 if no valid timestamp read.
-    uint32_t mWriteSequence;    // incremented before and after each write()
-    uint32_t mFramesWritten;    // total number of frames written successfully
-    uint32_t mNumTracks;        // total number of active fast tracks
-    uint32_t mWriteErrors;      // total number of write() errors
-    uint32_t mSampleRate;
-    size_t   mFrameCount;
-    uint32_t mTrackMask;        // mask of active tracks
+    double   mLatencyMs = 0.;     // measured latency, default of 0 if no valid timestamp read.
+    uint32_t mWriteSequence = 0;  // incremented before and after each write()
+    uint32_t mFramesWritten = 0;  // total number of frames written successfully
+    uint32_t mNumTracks = 0;      // total number of active fast tracks
+    uint32_t mWriteErrors = 0;    // total number of write() errors
+    uint32_t mSampleRate = 0;
+    size_t   mFrameCount = 0;
+    uint32_t mTrackMask = 0;      // mask of active tracks
     FastTrackDump   mTracks[FastMixerState::kMaxFastTracks];
 
     // For timestamp statistics.
     TimestampVerifier<int64_t /* frame count */, int64_t /* time ns */> mTimestampVerifier;
 };
 
-}   // android
+// No virtuals.
+static_assert(!std::is_polymorphic_v<FastMixerDumpState>);
 
-#endif  // ANDROID_AUDIO_FAST_MIXER_DUMP_STATE_H
+}  // namespace android
diff --git a/services/audioflinger/FastMixerState.cpp b/services/audioflinger/fastpath/FastMixerState.cpp
similarity index 73%
rename from services/audioflinger/FastMixerState.cpp
rename to services/audioflinger/fastpath/FastMixerState.cpp
index b98842d..4fe2d86 100644
--- a/services/audioflinger/FastMixerState.cpp
+++ b/services/audioflinger/fastpath/FastMixerState.cpp
@@ -22,31 +22,14 @@
 
 namespace android {
 
-FastTrack::FastTrack() :
-    mBufferProvider(NULL), mVolumeProvider(NULL),
-    mChannelMask(AUDIO_CHANNEL_OUT_STEREO), mFormat(AUDIO_FORMAT_INVALID), mGeneration(0)
+FastMixerState::FastMixerState() : FastThreadState()
 {
-}
-
-FastTrack::~FastTrack()
-{
-}
-
-FastMixerState::FastMixerState() : FastThreadState(),
-    // mFastTracks
-    mFastTracksGen(0), mTrackMask(0), mOutputSink(NULL), mOutputSinkGen(0),
-    mFrameCount(0)
-{
-    int ok = pthread_once(&sMaxFastTracksOnce, sMaxFastTracksInit);
+    const int ok = pthread_once(&sMaxFastTracksOnce, sMaxFastTracksInit);
     if (ok != 0) {
         ALOGE("%s pthread_once failed: %d", __func__, ok);
     }
 }
 
-FastMixerState::~FastMixerState()
-{
-}
-
 // static
 unsigned FastMixerState::sMaxFastTracks = kDefaultFastTracks;
 
@@ -57,7 +40,7 @@
 const char *FastMixerState::commandToString(Command command)
 {
     const char *str = FastThreadState::commandToString(command);
-    if (str != NULL) {
+    if (str != nullptr) {
         return str;
     }
     switch (command) {
@@ -72,9 +55,9 @@
 void FastMixerState::sMaxFastTracksInit()
 {
     char value[PROPERTY_VALUE_MAX];
-    if (property_get("ro.audio.max_fast_tracks", value, NULL) > 0) {
+    if (property_get("ro.audio.max_fast_tracks", value, nullptr /* default_value */) > 0) {
         char *endptr;
-        unsigned long ul = strtoul(value, &endptr, 0);
+        const auto ul = strtoul(value, &endptr, 0);
         if (*endptr == '\0' && kMinFastTracks <= ul && ul <= kMaxFastTracks) {
             sMaxFastTracks = (unsigned) ul;
         }
diff --git a/services/audioflinger/FastMixerState.h b/services/audioflinger/fastpath/FastMixerState.h
similarity index 66%
rename from services/audioflinger/FastMixerState.h
rename to services/audioflinger/fastpath/FastMixerState.h
index ce3cc14..c70e42a 100644
--- a/services/audioflinger/FastMixerState.h
+++ b/services/audioflinger/fastpath/FastMixerState.h
@@ -14,10 +14,10 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_AUDIO_FAST_MIXER_STATE_H
-#define ANDROID_AUDIO_FAST_MIXER_STATE_H
+#pragma once
 
 #include <math.h>
+#include <type_traits>
 
 #include <audio_utils/minifloat.h>
 #include <system/audio.h>
@@ -37,45 +37,50 @@
     // The provider implementation is responsible for validating that the return value is in range.
     virtual gain_minifloat_packed_t getVolumeLR() = 0;
 protected:
-    VolumeProvider() { }
-    virtual ~VolumeProvider() { }
+    VolumeProvider() = default;
+    virtual ~VolumeProvider() = default;
 };
 
 // Represents the state of a fast track
 struct FastTrack {
-    FastTrack();
-    /*virtual*/ ~FastTrack();
+    // must be nullptr if inactive, or non-nullptr if active
+    ExtendedAudioBufferProvider* mBufferProvider = nullptr;
 
-    ExtendedAudioBufferProvider* mBufferProvider; // must be NULL if inactive, or non-NULL if active
-    VolumeProvider*         mVolumeProvider; // optional; if NULL then full-scale
-    audio_channel_mask_t    mChannelMask;    // AUDIO_CHANNEL_OUT_MONO or AUDIO_CHANNEL_OUT_STEREO
-    audio_format_t          mFormat;         // track format
-    int                     mGeneration;     // increment when any field is assigned
+    // optional: if nullptr then full-scale
+    VolumeProvider*         mVolumeProvider = nullptr;
+
+    // AUDIO_CHANNEL_OUT_MONO or AUDIO_CHANNEL_OUT_STEREO
+    audio_channel_mask_t    mChannelMask = AUDIO_CHANNEL_OUT_STEREO;
+    audio_format_t          mFormat = AUDIO_FORMAT_INVALID;         // track format
+    int                     mGeneration = 0;     // increment when any field is assigned
     bool                    mHapticPlaybackEnabled = false; // haptic playback is enabled or not
     os::HapticScale         mHapticIntensity = os::HapticScale::MUTE; // intensity of haptic data
     float                   mHapticMaxAmplitude = NAN; // max amplitude allowed for haptic data
 };
 
+// No virtuals.
+static_assert(!std::is_polymorphic_v<FastTrack>);
+
 // Represents a single state of the fast mixer
 struct FastMixerState : FastThreadState {
-                FastMixerState();
-    /*virtual*/ ~FastMixerState();
+    FastMixerState();
 
     // These are the minimum, maximum, and default values for maximum number of fast tracks
-    static const unsigned kMinFastTracks = 2;
-    static const unsigned kMaxFastTracks = 32;
-    static const unsigned kDefaultFastTracks = 8;
+    static constexpr unsigned kMinFastTracks = 2;
+    static constexpr unsigned kMaxFastTracks = 32;
+    static constexpr unsigned kDefaultFastTracks = 8;
 
     static unsigned sMaxFastTracks;             // Configured maximum number of fast tracks
     static pthread_once_t sMaxFastTracksOnce;   // Protects initializer for sMaxFastTracks
 
     // all pointer fields use raw pointers; objects are owned and ref-counted by the normal mixer
     FastTrack   mFastTracks[kMaxFastTracks];
-    int         mFastTracksGen; // increment when any mFastTracks[i].mGeneration is incremented
-    unsigned    mTrackMask;     // bit i is set if and only if mFastTracks[i] is active
-    NBAIO_Sink* mOutputSink;    // HAL output device, must already be negotiated
-    int         mOutputSinkGen; // increment when mOutputSink is assigned
-    size_t      mFrameCount;    // number of frames per fast mix buffer
+    int         mFastTracksGen = 0; // increment when any
+                                    // mFastTracks[i].mGeneration is incremented
+    unsigned    mTrackMask = 0;     // bit i is set if and only if mFastTracks[i] is active
+    NBAIO_Sink* mOutputSink = nullptr; // HAL output device, must already be negotiated
+    int         mOutputSinkGen = 0; // increment when mOutputSink is assigned
+    size_t      mFrameCount = 0;    // number of frames per fast mix buffer
     audio_channel_mask_t mSinkChannelMask; // If not AUDIO_CHANNEL_NONE, specifies sink channel
                                            // mask when it cannot be directly calculated from
                                            // channel count
@@ -95,6 +100,7 @@
 
 };  // struct FastMixerState
 
-}   // namespace android
+// No virtuals.
+static_assert(!std::is_polymorphic_v<FastMixerState>);
 
-#endif  // ANDROID_AUDIO_FAST_MIXER_STATE_H
+}   // namespace android
diff --git a/services/audioflinger/FastThread.cpp b/services/audioflinger/fastpath/FastThread.cpp
similarity index 87%
rename from services/audioflinger/FastThread.cpp
rename to services/audioflinger/fastpath/FastThread.cpp
index 47fe0b3..d054d71 100644
--- a/services/audioflinger/FastThread.cpp
+++ b/services/audioflinger/fastpath/FastThread.cpp
@@ -28,7 +28,7 @@
 #include <utils/Trace.h>
 #include "FastThread.h"
 #include "FastThreadDumpState.h"
-#include "TypedLogger.h"
+#include <afutils/TypedLogger.h>
 
 #define FAST_DEFAULT_NS    999999999L   // ~1 sec: default time to sleep
 #define FAST_HOT_IDLE_NS     1000000L   // 1 ms: time to sleep while hot idling
@@ -38,69 +38,28 @@
 
 namespace android {
 
-FastThread::FastThread(const char *cycleMs, const char *loadUs) : Thread(false /*canCallJava*/),
-    // re-initialized to &sInitial by subclass constructor
-    mPrevious(NULL), mCurrent(NULL),
-    /* mOldTs({0, 0}), */
-    mOldTsValid(false),
-    mSleepNs(-1),
-    mPeriodNs(0),
-    mUnderrunNs(0),
-    mOverrunNs(0),
-    mForceNs(0),
-    mWarmupNsMin(0),
-    mWarmupNsMax(LONG_MAX),
-    // re-initialized to &mDummySubclassDumpState by subclass constructor
-    mDummyDumpState(NULL),
-    mDumpState(NULL),
-    mIgnoreNextOverrun(true),
-#ifdef FAST_THREAD_STATISTICS
-    // mOldLoad
-    mOldLoadValid(false),
-    mBounds(0),
-    mFull(false),
-    // mTcu
-#endif
-    mColdGen(0),
-    mIsWarm(false),
-    /* mMeasuredWarmupTs({0, 0}), */
-    mWarmupCycles(0),
-    mWarmupConsecutiveInRangeCycles(0),
-    mTimestampStatus(INVALID_OPERATION),
-
-    mCommand(FastThreadState::INITIAL),
-#if 0
-    frameCount(0),
-#endif
-    mAttemptedWrite(false)
-    // mCycleMs(cycleMs)
-    // mLoadUs(loadUs)
+FastThread::FastThread(const char *cycleMs, const char *loadUs) : Thread(false /*canCallJava*/)
 {
-    mOldTs.tv_sec = 0;
-    mOldTs.tv_nsec = 0;
-    mMeasuredWarmupTs.tv_sec = 0;
-    mMeasuredWarmupTs.tv_nsec = 0;
     strlcpy(mCycleMs, cycleMs, sizeof(mCycleMs));
     strlcpy(mLoadUs, loadUs, sizeof(mLoadUs));
 }
 
-FastThread::~FastThread()
-{
-}
-
 bool FastThread::threadLoop()
 {
     // LOGT now works even if tlNBLogWriter is nullptr, but we're considering changing that,
     // so this initialization permits a future change to remove the check for nullptr.
-    tlNBLogWriter = mDummyNBLogWriter.get();
+    aflog::setThreadWriter(mDummyNBLogWriter.get());
     for (;;) {
 
         // either nanosleep, sched_yield, or busy wait
         if (mSleepNs >= 0) {
             if (mSleepNs > 0) {
                 ALOG_ASSERT(mSleepNs < 1000000000);
-                const struct timespec req = {0, mSleepNs};
-                nanosleep(&req, NULL);
+                const struct timespec req = {
+                    0, // tv_sec
+                    static_cast<long>(mSleepNs) // NOLINT(google-runtime-int)
+                };
+                nanosleep(&req, nullptr);
             } else {
                 sched_yield();
             }
@@ -110,7 +69,7 @@
 
         // poll for state change
         const FastThreadState *next = poll();
-        if (next == NULL) {
+        if (next == nullptr) {
             // continue to use the default initial state until a real state is available
             // FIXME &sInitial not available, should save address earlier
             //ALOG_ASSERT(mCurrent == &sInitial && previous == &sInitial);
@@ -121,10 +80,11 @@
         if (next != mCurrent) {
 
             // As soon as possible of learning of a new dump area, start using it
-            mDumpState = next->mDumpState != NULL ? next->mDumpState : mDummyDumpState;
-            tlNBLogWriter = next->mNBLogWriter != NULL ?
+            mDumpState = next->mDumpState != nullptr ? next->mDumpState : mDummyDumpState;
+            NBLog::Writer * const writer = next->mNBLogWriter != nullptr ?
                     next->mNBLogWriter : mDummyNBLogWriter.get();
-            setNBLogWriter(tlNBLogWriter); // This is used for debugging only
+            aflog::setThreadWriter(writer);
+            setNBLogWriter(writer); // This is used for debugging only
 
             // We want to always have a valid reference to the previous (non-idle) state.
             // However, the state queue only guarantees access to current and previous states.
@@ -149,7 +109,7 @@
             mCurrent = next;
         }
 #if !LOG_NDEBUG
-        next = NULL;    // not referenced again
+        next = nullptr;    // not referenced again
 #endif
 
         mDumpState->mCommand = mCommand;
@@ -167,12 +127,12 @@
             // FIXME consider checking previous state and only perform if previous != COLD_IDLE
             if (mCurrent->mColdGen != mColdGen) {
                 int32_t *coldFutexAddr = mCurrent->mColdFutexAddr;
-                ALOG_ASSERT(coldFutexAddr != NULL);
-                int32_t old = android_atomic_dec(coldFutexAddr);
+                ALOG_ASSERT(coldFutexAddr != nullptr);
+                const int32_t old = android_atomic_dec(coldFutexAddr);
                 if (old <= 0) {
-                    syscall(__NR_futex, coldFutexAddr, FUTEX_WAIT_PRIVATE, old - 1, NULL);
+                    syscall(__NR_futex, coldFutexAddr, FUTEX_WAIT_PRIVATE, old - 1, nullptr);
                 }
-                int policy = sched_getscheduler(0) & ~SCHED_RESET_ON_FORK;
+                const int policy = sched_getscheduler(0) & ~SCHED_RESET_ON_FORK;
                 if (!(policy == SCHED_FIFO || policy == SCHED_RR)) {
                     ALOGE("did not receive expected priority boost on time");
                 }
@@ -224,7 +184,7 @@
         if (rc == 0) {
             if (mOldTsValid) {
                 time_t sec = newTs.tv_sec - mOldTs.tv_sec;
-                long nsec = newTs.tv_nsec - mOldTs.tv_nsec;
+                auto nsec = newTs.tv_nsec - mOldTs.tv_nsec;
                 ALOGE_IF(sec < 0 || (sec == 0 && nsec < 0),
                         "clock_gettime(CLOCK_MONOTONIC) failed: was %ld.%09ld but now %ld.%09ld",
                         mOldTs.tv_sec, mOldTs.tv_nsec, newTs.tv_sec, newTs.tv_nsec);
@@ -267,7 +227,7 @@
                 mSleepNs = -1;
                 if (mIsWarm) {
                     if (sec > 0 || nsec > mUnderrunNs) {
-                        ATRACE_NAME("underrun");
+                        ATRACE_NAME("underrun");   // NOLINT(misc-const-correctness)
                         // FIXME only log occasionally
                         ALOGV("underrun: time since last cycle %d.%03ld sec",
                                 (int) sec, nsec / 1000000L);
@@ -298,7 +258,7 @@
 #ifdef FAST_THREAD_STATISTICS
                 if (mIsWarm) {
                     // advance the FIFO queue bounds
-                    size_t i = mBounds & (mDumpState->mSamplingN - 1);
+                    const size_t i = mBounds & (mDumpState->mSamplingN - 1);
                     mBounds = (mBounds & 0xFFFF0000) | ((mBounds + 1) & 0xFFFF);
                     if (mFull) {
                         //mBounds += 0x10000;
diff --git a/services/audioflinger/fastpath/FastThread.h b/services/audioflinger/fastpath/FastThread.h
new file mode 100644
index 0000000..1f46b29
--- /dev/null
+++ b/services/audioflinger/fastpath/FastThread.h
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "Configuration.h"
+#ifdef CPU_FREQUENCY_STATISTICS
+#include <cpustats/ThreadCpuUsage.h>
+#endif
+#include <utils/Thread.h>
+#include "FastThreadState.h"
+
+namespace android {
+
+// FastThread is the common abstract base class of FastMixer and FastCapture
+class FastThread : public Thread {
+
+public:
+            FastThread(const char *cycleMs, const char *loadUs);
+
+private:
+    // implement Thread::threadLoop()
+    bool threadLoop() override;
+
+protected:
+    // callouts to subclass in same lexical order as they were in original FastMixer.cpp
+    // FIXME need comments
+    virtual const FastThreadState *poll() = 0;
+    virtual void setNBLogWriter(NBLog::Writer *logWriter __unused) { }
+    virtual void onIdle() = 0;
+    virtual void onExit() = 0;
+    virtual bool isSubClassCommand(FastThreadState::Command command) = 0;
+    virtual void onStateChange() = 0;
+    virtual void onWork() = 0;
+
+    // FIXME these former local variables need comments
+    const FastThreadState*  mPrevious = nullptr;
+    const FastThreadState*  mCurrent = nullptr;
+    struct timespec mOldTs{};
+    bool            mOldTsValid = false;
+    int64_t         mSleepNs = -1;     // -1: busy wait, 0: sched_yield, > 0: nanosleep
+    int64_t         mPeriodNs = 0;     // expected period; the time required to
+                                       // render one mix buffer
+    int64_t         mUnderrunNs = 0;   // underrun likely when write cycle
+                                       // is greater than this value
+    int64_t         mOverrunNs = 0;    // overrun likely when write cycle is less than this value
+    int64_t         mForceNs = 0;      // if overrun detected,
+                                       // force the write cycle to take this much time
+    int64_t         mWarmupNsMin = 0;  // warmup complete when write cycle is greater
+                                       //  than or equal to this value
+    int64_t         mWarmupNsMax = INT64_MAX;  // and less than or equal to this value
+    FastThreadDumpState* mDummyDumpState = nullptr;
+    FastThreadDumpState* mDumpState = nullptr;
+    bool            mIgnoreNextOverrun = true; // used to ignore initial overrun
+                                               //  and first after an underrun
+#ifdef FAST_THREAD_STATISTICS
+    struct timespec mOldLoad;       // previous value of clock_gettime(CLOCK_THREAD_CPUTIME_ID)
+    bool            mOldLoadValid = false;  // whether oldLoad is valid
+    uint32_t        mBounds = 0;
+    bool            mFull = false;        // whether we have collected at least mSamplingN samples
+#ifdef CPU_FREQUENCY_STATISTICS
+    ThreadCpuUsage  mTcu;           // for reading the current CPU clock frequency in kHz
+#endif
+#endif
+    unsigned        mColdGen = 0;       // last observed mColdGen
+    bool            mIsWarm = false;        // true means ready to mix,
+                                    // false means wait for warmup before mixing
+    struct timespec   mMeasuredWarmupTs{};  // how long did it take for warmup to complete
+    uint32_t          mWarmupCycles = 0;  // counter of number of loop cycles during warmup phase
+    uint32_t          mWarmupConsecutiveInRangeCycles = 0; // number of consecutive cycles in range
+    const sp<NBLog::Writer> mDummyNBLogWriter{new NBLog::Writer()};
+    status_t          mTimestampStatus = INVALID_OPERATION;
+
+    FastThreadState::Command mCommand = FastThreadState::INITIAL;
+    bool            mAttemptedWrite = false;
+
+    // init in constructor
+    char            mCycleMs[16];   // cycle_ms + suffix
+    char            mLoadUs[16];    // load_us + suffix
+
+};  // class FastThread
+
+}  // namespace android
diff --git a/services/audioflinger/FastThreadDumpState.cpp b/services/audioflinger/fastpath/FastThreadDumpState.cpp
similarity index 77%
rename from services/audioflinger/FastThreadDumpState.cpp
rename to services/audioflinger/fastpath/FastThreadDumpState.cpp
index 964a725..747cb9e 100644
--- a/services/audioflinger/FastThreadDumpState.cpp
+++ b/services/audioflinger/fastpath/FastThreadDumpState.cpp
@@ -19,32 +19,20 @@
 
 namespace android {
 
-FastThreadDumpState::FastThreadDumpState() :
-    mCommand(FastThreadState::INITIAL), mUnderruns(0), mOverruns(0),
-    /* mMeasuredWarmupTs({0, 0}), */
-    mWarmupCycles(0)
-#ifdef FAST_THREAD_STATISTICS
-    , mSamplingN(0), mBounds(0)
-#endif
+FastThreadDumpState::FastThreadDumpState()
 {
-    mMeasuredWarmupTs.tv_sec = 0;
-    mMeasuredWarmupTs.tv_nsec = 0;
 #ifdef FAST_THREAD_STATISTICS
     increaseSamplingN(1);
 #endif
 }
 
-FastThreadDumpState::~FastThreadDumpState()
-{
-}
-
 #ifdef FAST_THREAD_STATISTICS
 void FastThreadDumpState::increaseSamplingN(uint32_t samplingN)
 {
     if (samplingN <= mSamplingN || samplingN > kSamplingN || roundup(samplingN) != samplingN) {
         return;
     }
-    uint32_t additional = samplingN - mSamplingN;
+    const uint32_t additional = samplingN - mSamplingN;
     // sample arrays aren't accessed atomically with respect to the bounds,
     // so clearing reduces chance for dumpsys to read random uninitialized samples
     memset(&mMonotonicNs[mSamplingN], 0, sizeof(mMonotonicNs[0]) * additional);
@@ -56,4 +44,4 @@
 }
 #endif
 
-}   // android
+}  // namespace android
diff --git a/services/audioflinger/FastThreadDumpState.h b/services/audioflinger/fastpath/FastThreadDumpState.h
similarity index 82%
rename from services/audioflinger/FastThreadDumpState.h
rename to services/audioflinger/fastpath/FastThreadDumpState.h
index 1ce0914..b7bc404 100644
--- a/services/audioflinger/FastThreadDumpState.h
+++ b/services/audioflinger/fastpath/FastThreadDumpState.h
@@ -14,8 +14,9 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_AUDIO_FAST_THREAD_DUMP_STATE_H
-#define ANDROID_AUDIO_FAST_THREAD_DUMP_STATE_H
+#pragma once
+
+#include <type_traits>
 
 #include "Configuration.h"
 #include "FastThreadState.h"
@@ -30,13 +31,12 @@
 // It has a different lifetime than the FastThread, and so it can't be a member of FastThread.
 struct FastThreadDumpState {
     FastThreadDumpState();
-    /*virtual*/ ~FastThreadDumpState();
 
-    FastThreadState::Command mCommand;   // current command
-    uint32_t mUnderruns;        // total number of underruns
-    uint32_t mOverruns;         // total number of overruns
-    struct timespec mMeasuredWarmupTs;  // measured warmup time
-    uint32_t mWarmupCycles;     // number of loop cycles required to warmup
+    FastThreadState::Command mCommand = FastThreadState::INITIAL;  // current command
+    uint32_t mUnderruns = 0;        // total number of underruns
+    uint32_t mOverruns = 0;         // total number of overruns
+    struct timespec mMeasuredWarmupTs{};  // measured warmup time
+    uint32_t mWarmupCycles = 0;     // number of loop cycles required to warmup
 
 #ifdef FAST_THREAD_STATISTICS
     // Recently collected samples of per-cycle monotonic time, thread CPU time, and CPU frequency.
@@ -48,12 +48,12 @@
     // This value was chosen such that each array uses 1 small page (4 Kbytes).
     static const uint32_t kSamplingNforLowRamDevice = 0x400;
     // Corresponding runtime maximum size of sample arrays, must be a power of 2 <= kSamplingN.
-    uint32_t mSamplingN;
+    uint32_t mSamplingN = 0;
     // The bounds define the interval of valid samples, and are represented as follows:
     //      newest open (excluded) endpoint   = lower 16 bits of bounds, modulo N
     //      oldest closed (included) endpoint = upper 16 bits of bounds, modulo N
     // Number of valid samples is newest - oldest.
-    uint32_t mBounds;                   // bounds for mMonotonicNs, mThreadCpuNs, and mCpukHz
+    uint32_t mBounds = 0;               // bounds for mMonotonicNs, mThreadCpuNs, and mCpukHz
     // The elements in the *Ns arrays are in units of nanoseconds <= 3999999999.
     uint32_t mMonotonicNs[kSamplingN];  // delta monotonic (wall clock) time
     uint32_t mLoadNs[kSamplingN];       // delta CPU load in time
@@ -67,6 +67,7 @@
 
 };  // struct FastThreadDumpState
 
-}   // android
+// No virtuals.
+static_assert(!std::is_polymorphic_v<FastThreadDumpState>);
 
-#endif  // ANDROID_AUDIO_FAST_THREAD_DUMP_STATE_H
+}  // namespace android
diff --git a/services/audioflinger/FastThreadState.cpp b/services/audioflinger/fastpath/FastThreadState.cpp
similarity index 84%
rename from services/audioflinger/FastThreadState.cpp
rename to services/audioflinger/fastpath/FastThreadState.cpp
index ad5f31f..dfe8e65 100644
--- a/services/audioflinger/FastThreadState.cpp
+++ b/services/audioflinger/fastpath/FastThreadState.cpp
@@ -19,16 +19,6 @@
 
 namespace android {
 
-FastThreadState::FastThreadState() :
-    mCommand(INITIAL), mColdFutexAddr(NULL), mColdGen(0), mDumpState(NULL), mNBLogWriter(NULL)
-
-{
-}
-
-FastThreadState::~FastThreadState()
-{
-}
-
 // static
 const char *FastThreadState::commandToString(FastThreadState::Command command)
 {
@@ -38,7 +28,7 @@
     case FastThreadState::COLD_IDLE:    return "COLD_IDLE";
     case FastThreadState::EXIT:         return "EXIT";
     }
-    return NULL;
+    return nullptr;
 }
 
 }   // namespace android
diff --git a/services/audioflinger/FastThreadState.h b/services/audioflinger/fastpath/FastThreadState.h
similarity index 68%
rename from services/audioflinger/FastThreadState.h
rename to services/audioflinger/fastpath/FastThreadState.h
index 54c0dc6..8e5bedd 100644
--- a/services/audioflinger/FastThreadState.h
+++ b/services/audioflinger/fastpath/FastThreadState.h
@@ -14,9 +14,9 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_AUDIO_FAST_THREAD_STATE_H
-#define ANDROID_AUDIO_FAST_THREAD_STATE_H
+#pragma once
 
+#include <type_traits>
 #include "Configuration.h"
 #include <stdint.h>
 #include <media/nblog/NBLog.h>
@@ -27,10 +27,7 @@
 
 // Represents a single state of a FastThread
 struct FastThreadState {
-                FastThreadState();
-    /*virtual*/ ~FastThreadState();
-
-    typedef uint32_t Command;
+    using Command = uint32_t;
     static const Command
         INITIAL = 0,            // used only for the initial state
         HOT_IDLE = 1,           // do nothing
@@ -38,18 +35,20 @@
         IDLE = 3,               // either HOT_IDLE or COLD_IDLE
         EXIT = 4;               // exit from thread
         // additional values defined per subclass
-    Command     mCommand;       // current command
-    int32_t*    mColdFutexAddr; // for COLD_IDLE only, pointer to the associated futex
-    unsigned    mColdGen;       // increment when COLD_IDLE is requested so it's only performed once
+    Command     mCommand = INITIAL;       // current command
+    int32_t*    mColdFutexAddr = nullptr; // for COLD_IDLE only, pointer to the associated futex
+    unsigned    mColdGen = 0;             // increment when COLD_IDLE is requested
+                                          // so it's only performed once
 
     // This might be a one-time configuration rather than per-state
-    FastThreadDumpState* mDumpState; // if non-NULL, then update dump state periodically
-    NBLog::Writer* mNBLogWriter; // non-blocking logger
+    FastThreadDumpState* mDumpState = nullptr; // if non-NULL, then update dump state periodically
+    NBLog::Writer* mNBLogWriter = nullptr; // non-blocking logger
 
     // returns NULL if command belongs to a subclass
     static const char *commandToString(Command command);
 };  // struct FastThreadState
 
-}   // android
+// No virtuals.
+static_assert(!std::is_polymorphic_v<FastThreadState>);
 
-#endif  // ANDROID_AUDIO_FAST_THREAD_STATE_H
+}  // namespace android
diff --git a/services/audioflinger/StateQueue.cpp b/services/audioflinger/fastpath/StateQueue.cpp
similarity index 85%
rename from services/audioflinger/StateQueue.cpp
rename to services/audioflinger/fastpath/StateQueue.cpp
index 9d4188f..e896d29 100644
--- a/services/audioflinger/StateQueue.cpp
+++ b/services/audioflinger/fastpath/StateQueue.cpp
@@ -38,23 +38,6 @@
 }
 #endif
 
-// Constructor and destructor
-
-template<typename T> StateQueue<T>::StateQueue() :
-    mAck(NULL), mCurrent(NULL),
-    mMutating(&mStates[0]), mExpecting(NULL),
-    mInMutation(false), mIsDirty(false), mIsInitialized(false)
-#ifdef STATE_QUEUE_DUMP
-    , mObserverDump(&mObserverDummyDump), mMutatorDump(&mMutatorDummyDump)
-#endif
-{
-    atomic_init(&mNext, static_cast<uintptr_t>(0));
-}
-
-template<typename T> StateQueue<T>::~StateQueue()
-{
-}
-
 // Observer APIs
 
 template<typename T> const T* StateQueue<T>::poll()
@@ -112,7 +95,7 @@
 #endif
 
         // wait for prior push to be acknowledged
-        if (mExpecting != NULL) {
+        if (mExpecting != nullptr) {
 #ifdef STATE_QUEUE_DUMP
             unsigned count = 0;
 #endif
@@ -120,7 +103,7 @@
                 const T *ack = (const T *) mAck;    // no additional barrier needed
                 if (ack == mExpecting) {
                     // unnecessary as we're about to rewrite
-                    //mExpecting = NULL;
+                    //mExpecting = nullptr;
                     break;
                 }
                 if (block == BLOCK_NEVER) {
@@ -132,7 +115,7 @@
                 }
                 ++count;
 #endif
-                nanosleep(&req, NULL);
+                nanosleep(&req, nullptr);
             }
 #ifdef STATE_QUEUE_DUMP
             if (count > 1) {
@@ -156,14 +139,14 @@
 
     // optionally wait for this push or a prior push to be acknowledged
     if (block == BLOCK_UNTIL_ACKED) {
-        if (mExpecting != NULL) {
+        if (mExpecting != nullptr) {
 #ifdef STATE_QUEUE_DUMP
             unsigned count = 0;
 #endif
             for (;;) {
                 const T *ack = (const T *) mAck;    // no additional barrier needed
                 if (ack == mExpecting) {
-                    mExpecting = NULL;
+                    mExpecting = nullptr;
                     break;
                 }
 #ifdef STATE_QUEUE_DUMP
@@ -172,7 +155,7 @@
                 }
                 ++count;
 #endif
-                nanosleep(&req, NULL);
+                nanosleep(&req, nullptr);
             }
 #ifdef STATE_QUEUE_DUMP
             if (count > 1) {
@@ -187,7 +170,14 @@
 
 }   // namespace android
 
-// hack for gcc
-#ifdef STATE_QUEUE_INSTANTIATIONS
-#include STATE_QUEUE_INSTANTIATIONS
-#endif
+// Instantiate StateQueue template for the types we need.
+// This needs to be done in the same translation unit as the template
+// method definitions above.
+
+#include "FastCaptureState.h"
+#include "FastMixerState.h"
+
+namespace android {
+template class StateQueue<FastCaptureState>;
+template class StateQueue<FastMixerState>;
+}   // namespace android
diff --git a/services/audioflinger/StateQueue.h b/services/audioflinger/fastpath/StateQueue.h
similarity index 91%
rename from services/audioflinger/StateQueue.h
rename to services/audioflinger/fastpath/StateQueue.h
index 27f6a28..36d986b 100644
--- a/services/audioflinger/StateQueue.h
+++ b/services/audioflinger/fastpath/StateQueue.h
@@ -14,8 +14,7 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_AUDIO_STATE_QUEUE_H
-#define ANDROID_AUDIO_STATE_QUEUE_H
+#pragma once
 
 #include <stdatomic.h>
 
@@ -124,12 +123,10 @@
 #endif
 
 // manages a FIFO queue of states
-template<typename T> class StateQueue {
+// marking as final to avoid derived classes as there are no virtuals.
+template<typename T> class StateQueue final {
 
 public:
-            StateQueue();
-    virtual ~StateQueue();
-
     // Observer APIs
 
     // Poll for a state change.  Returns a pointer to a read-only state,
@@ -188,28 +185,29 @@
     T                 mStates[kN];      // written by mutator, read by observer
 
     // "volatile" is meaningless with SMP, but here it indicates that we're using atomic ops
-    atomic_uintptr_t  mNext; // written by mutator to advance next, read by observer
-    volatile const T* mAck;  // written by observer to acknowledge advance of next, read by mutator
+    atomic_uintptr_t  mNext{}; // written by mutator to advance next, read by observer
+    volatile const T* mAck = nullptr;  // written by observer to acknowledge advance of next,
+                                       // read by mutator
 
     // only used by observer
-    const T*          mCurrent;         // most recent value returned by poll()
+    const T*    mCurrent = nullptr;     // most recent value returned by poll()
 
     // only used by mutator
-    T*                mMutating;        // where updates by mutator are done in place
-    const T*          mExpecting;       // what the mutator expects mAck to be set to
-    bool              mInMutation;      // whether we're currently in the middle of a mutation
-    bool              mIsDirty;         // whether mutating state has been modified since last push
-    bool              mIsInitialized;   // whether mutating state has been initialized yet
+    T*          mMutating{&mStates[0]}; // where updates by mutator are done in place
+    const T*    mExpecting = nullptr;   // what the mutator expects mAck to be set to
+    bool        mInMutation = false;    // whether we're currently in the middle of a mutation
+    bool        mIsDirty = false;       // whether mutating state has been modified since last push
+    bool        mIsInitialized = false; // whether mutating state has been initialized yet
 
 #ifdef STATE_QUEUE_DUMP
     StateQueueObserverDump  mObserverDummyDump; // default area for observer dump if not set
-    StateQueueObserverDump* mObserverDump;      // pointer to active observer dump, always non-NULL
+    // pointer to active observer dump, always non-nullptr
+    StateQueueObserverDump* mObserverDump{&mObserverDummyDump};
     StateQueueMutatorDump   mMutatorDummyDump;  // default area for mutator dump if not set
-    StateQueueMutatorDump*  mMutatorDump;       // pointer to active mutator dump, always non-NULL
+    // pointer to active mutator dump, always non-nullptr
+    StateQueueMutatorDump*  mMutatorDump{&mMutatorDummyDump};
 #endif
 
 };  // class StateQueue
 
 }   // namespace android
-
-#endif  // ANDROID_AUDIO_STATE_QUEUE_H
diff --git a/services/audioflinger/sounddose/Android.bp b/services/audioflinger/sounddose/Android.bp
index 0a8c8be..2cab5d1 100644
--- a/services/audioflinger/sounddose/Android.bp
+++ b/services/audioflinger/sounddose/Android.bp
@@ -7,12 +7,39 @@
     default_applicable_licenses: ["frameworks_av_services_audioflinger_license"],
 }
 
+audioflinger_sounddose_tidy_errors = audioflinger_base_tidy_errors + [
+    "modernize-avoid-c-arrays",
+    "modernize-deprecated-headers",
+    "modernize-pass-by-value",
+    "modernize-use-auto",
+    "modernize-use-nodiscard",
+
+    // TODO(b/275642749) Reenable these warnings
+    "-misc-non-private-member-variables-in-classes",
+]
+
+// Eventually use common tidy defaults
+cc_defaults {
+    name: "audioflinger_sounddose_flags_defaults",
+    // https://clang.llvm.org/docs/UsersManual.html#command-line-options
+    // https://clang.llvm.org/docs/DiagnosticsReference.html
+    cflags: audioflinger_base_cflags,
+    // https://clang.llvm.org/extra/clang-tidy/
+    tidy: true,
+    tidy_checks: audioflinger_sounddose_tidy_errors,
+    tidy_checks_as_errors: audioflinger_sounddose_tidy_errors,
+    tidy_flags: [
+      "-format-style=file",
+    ],
+}
+
 cc_library {
     name: "libsounddose",
 
     double_loadable: true,
 
     defaults: [
+        "audioflinger_sounddose_flags_defaults",
         "latest_android_media_audio_common_types_ndk_shared",
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_hardware_audio_sounddose_ndk_shared",
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index 21252d6..21f346e 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -30,7 +30,6 @@
 namespace android {
 
 using aidl::android::media::audio::common::AudioDevice;
-using aidl::android::media::audio::common::AudioDeviceAddress;
 
 namespace {
 
@@ -48,43 +47,47 @@
 sp<audio_utils::MelProcessor> SoundDoseManager::getOrCreateProcessorForDevice(
         audio_port_handle_t deviceId, audio_io_handle_t streamHandle, uint32_t sampleRate,
         size_t channelCount, audio_format_t format) {
-    std::lock_guard _l(mLock);
+    const std::lock_guard _l(mLock);
 
-    if (mHalSoundDose != nullptr && !mDisableCsd) {
-        ALOGW("%s: using HAL MEL computation, no MelProcessor needed.", __func__);
+    if (mHalSoundDose != nullptr && mEnabledCsd) {
+        ALOGD("%s: using HAL MEL computation, no MelProcessor needed.", __func__);
         return nullptr;
     }
 
     auto streamProcessor = mActiveProcessors.find(streamHandle);
-    sp<audio_utils::MelProcessor> processor;
-    if (streamProcessor != mActiveProcessors.end() &&
-            (processor = streamProcessor->second.promote())) {
-        ALOGV("%s: found callback for stream id %d", __func__, streamHandle);
-        const auto activeTypeIt = mActiveDeviceTypes.find(deviceId);
-        if (activeTypeIt != mActiveDeviceTypes.end()) {
-            processor->setAttenuation(mMelAttenuationDB[activeTypeIt->second]);
+    if (streamProcessor != mActiveProcessors.end()) {
+        auto processor = streamProcessor->second.promote();
+        // if processor is nullptr it means it was removed by the playback
+        // thread and can be replaced in the mActiveProcessors map
+        if (processor != nullptr) {
+            ALOGV("%s: found callback for stream id %d", __func__, streamHandle);
+            const auto activeTypeIt = mActiveDeviceTypes.find(deviceId);
+            if (activeTypeIt != mActiveDeviceTypes.end()) {
+                processor->setAttenuation(mMelAttenuationDB[activeTypeIt->second]);
+            }
+            processor->setDeviceId(deviceId);
+            processor->setOutputRs2UpperBound(mRs2UpperBound);
+            return processor;
         }
-        processor->setDeviceId(deviceId);
-        processor->setOutputRs2UpperBound(mRs2UpperBound);
-        return processor;
-    } else {
-        ALOGV("%s: creating new callback for stream id %d", __func__, streamHandle);
-        sp<audio_utils::MelProcessor> melProcessor = sp<audio_utils::MelProcessor>::make(
-                sampleRate, channelCount, format, this, deviceId, mRs2UpperBound);
-        const auto activeTypeIt = mActiveDeviceTypes.find(deviceId);
-        if (activeTypeIt != mActiveDeviceTypes.end()) {
-            melProcessor->setAttenuation(mMelAttenuationDB[activeTypeIt->second]);
-        }
-        mActiveProcessors[streamHandle] = melProcessor;
-        return melProcessor;
     }
+
+    ALOGV("%s: creating new callback for stream id %d", __func__, streamHandle);
+    sp<audio_utils::MelProcessor> melProcessor = sp<audio_utils::MelProcessor>::make(
+            sampleRate, channelCount, format, this, deviceId, mRs2UpperBound);
+    const auto activeTypeIt = mActiveDeviceTypes.find(deviceId);
+    if (activeTypeIt != mActiveDeviceTypes.end()) {
+        melProcessor->setAttenuation(mMelAttenuationDB[activeTypeIt->second]);
+    }
+    mActiveProcessors[streamHandle] = melProcessor;
+    return melProcessor;
 }
 
 bool SoundDoseManager::setHalSoundDoseInterface(const std::shared_ptr<ISoundDose>& halSoundDose) {
     ALOGV("%s", __func__);
 
+    std::shared_ptr<HalSoundDoseCallback> halSoundDoseCallback;
     {
-        std::lock_guard _l(mLock);
+        const std::lock_guard _l(mLock);
 
         mHalSoundDose = halSoundDose;
         if (halSoundDose == nullptr) {
@@ -103,9 +106,11 @@
             mHalSoundDoseCallback =
                 ndk::SharedRefBase::make<HalSoundDoseCallback>(this);
         }
+        halSoundDoseCallback = mHalSoundDoseCallback;
     }
 
-    auto status = halSoundDose->registerSoundDoseCallback(mHalSoundDoseCallback);
+    auto status = halSoundDose->registerSoundDoseCallback(halSoundDoseCallback);
+
     if (!status.isOk()) {
         // Not a warning since this can happen if the callback was registered before
         ALOGI("%s: Cannot register HAL sound dose callback with status message: %s",
@@ -118,7 +123,7 @@
 
 void SoundDoseManager::setOutputRs2UpperBound(float rs2Value) {
     ALOGV("%s", __func__);
-    std::lock_guard _l(mLock);
+    const std::lock_guard _l(mLock);
 
     if (mHalSoundDose != nullptr) {
         // using the HAL sound dose interface
@@ -131,9 +136,9 @@
     }
 
     for (auto& streamProcessor : mActiveProcessors) {
-        sp<audio_utils::MelProcessor> processor = streamProcessor.second.promote();
+        const sp<audio_utils::MelProcessor> processor = streamProcessor.second.promote();
         if (processor != nullptr) {
-            status_t result = processor->setOutputRs2UpperBound(rs2Value);
+            const status_t result = processor->setOutputRs2UpperBound(rs2Value);
             if (result != NO_ERROR) {
                 ALOGW("%s: could not set RS2 upper bound %f for stream %d", __func__, rs2Value,
                       streamProcessor.first);
@@ -145,7 +150,7 @@
 }
 
 void SoundDoseManager::removeStreamProcessor(audio_io_handle_t streamHandle) {
-    std::lock_guard _l(mLock);
+    const std::lock_guard _l(mLock);
     auto callbackToRemove = mActiveProcessors.find(streamHandle);
     if (callbackToRemove != mActiveProcessors.end()) {
         mActiveProcessors.erase(callbackToRemove);
@@ -153,7 +158,7 @@
 }
 
 audio_port_handle_t SoundDoseManager::getIdForAudioDevice(const AudioDevice& audioDevice) const {
-    std::lock_guard _l(mLock);
+    const std::lock_guard _l(mLock);
 
     audio_devices_t type;
     std::string address;
@@ -175,14 +180,14 @@
 
 void SoundDoseManager::mapAddressToDeviceId(const AudioDeviceTypeAddr& adt,
                                             const audio_port_handle_t deviceId) {
-    std::lock_guard _l(mLock);
+    const std::lock_guard _l(mLock);
     ALOGI("%s: map address: %d to device id: %d", __func__, adt.mType, deviceId);
     mActiveDevices[adt] = deviceId;
     mActiveDeviceTypes[deviceId] = adt.mType;
 }
 
 void SoundDoseManager::clearMapDeviceIdEntries(audio_port_handle_t deviceId) {
-    std::lock_guard _l(mLock);
+    const std::lock_guard _l(mLock);
     for (auto activeDevice = mActiveDevices.begin(); activeDevice != mActiveDevices.end();) {
         if (activeDevice->second == deviceId) {
             ALOGI("%s: clear mapping type: %d to deviceId: %d",
@@ -213,7 +218,7 @@
     if (id == AUDIO_PORT_HANDLE_NONE) {
         ALOGI("%s: no mapped id for audio device with type %d and address %s",
                 __func__, in_audioDevice.type.type,
-                in_audioDevice.address.get<AudioDeviceAddress::id>().c_str());
+                in_audioDevice.address.toString().c_str());
         return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
     }
     soundDoseManager->onMomentaryExposure(in_currentDbA, id);
@@ -240,7 +245,7 @@
     if (id == AUDIO_PORT_HANDLE_NONE) {
         ALOGI("%s: no mapped id for audio device with type %d and address %s",
                 __func__, in_audioDevice.type.type,
-                in_audioDevice.address.get<AudioDeviceAddress::id>().c_str());
+                in_audioDevice.address.toString().c_str());
         return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
     }
     // TODO: introduce timestamp in onNewMelValues callback
@@ -287,11 +292,11 @@
     return binder::Status::ok();
 }
 
-binder::Status SoundDoseManager::SoundDose::disableCsd() {
+binder::Status SoundDoseManager::SoundDose::setCsdEnabled(bool enabled) {
     ALOGV("%s", __func__);
     auto soundDoseManager = mSoundDoseManager.promote();
     if (soundDoseManager != nullptr) {
-        soundDoseManager->disableCsd();
+        soundDoseManager->setCsdEnabled(enabled);
     }
     return binder::Status::ok();
 }
@@ -300,7 +305,7 @@
     ALOGV("%s", __func__);
     auto soundDoseManager = mSoundDoseManager.promote();
     if (soundDoseManager != nullptr) {
-        std::lock_guard _l(soundDoseManager->mLock);
+        const std::lock_guard _l(soundDoseManager->mLock);
         *value = soundDoseManager->mRs2UpperBound;
     }
     return binder::Status::ok();
@@ -345,7 +350,7 @@
 }
 
 void SoundDoseManager::updateAttenuation(float attenuationDB, audio_devices_t deviceType) {
-    std::lock_guard _l(mLock);
+    const std::lock_guard _l(mLock);
     ALOGV("%s: updating MEL processor attenuation for device type %d to %f",
             __func__, deviceType, attenuationDB);
     mMelAttenuationDB[deviceType] = attenuationDB;
@@ -362,78 +367,78 @@
     }
 }
 
-void SoundDoseManager::disableCsd() {
+void SoundDoseManager::setCsdEnabled(bool enabled) {
     ALOGV("%s",  __func__);
 
-    std::lock_guard _l(mLock);
-    mDisableCsd = true;
+    const std::lock_guard _l(mLock);
+    mEnabledCsd = enabled;
 
-    // Normally, there should be no active MelProcessors when this method is called
-    // We pause however every cached MelProcessor as a defensive mechanism to not
-    // have unnecessary processing
     for (auto& activeEntry : mActiveProcessors) {
         auto melProcessor = activeEntry.second.promote();
         if (melProcessor != nullptr) {
-            melProcessor->pause();
+            if (enabled) {
+                melProcessor->resume();
+            } else {
+                melProcessor->pause();
+            }
         }
     }
 }
 
-bool SoundDoseManager::isCsdDisabled() {
-    std::lock_guard _l(mLock);
-    return mDisableCsd;
+bool SoundDoseManager::isCsdEnabled() {
+    const std::lock_guard _l(mLock);
+    return mEnabledCsd;
 }
 
 void SoundDoseManager::setUseFrameworkMel(bool useFrameworkMel) {
     // invalidate any HAL sound dose interface used
     setHalSoundDoseInterface(nullptr);
 
-    std::lock_guard _l(mLock);
+    const std::lock_guard _l(mLock);
     mUseFrameworkMel = useFrameworkMel;
 }
 
 bool SoundDoseManager::forceUseFrameworkMel() const {
-    std::lock_guard _l(mLock);
+    const std::lock_guard _l(mLock);
     return mUseFrameworkMel;
 }
 
 void SoundDoseManager::setComputeCsdOnAllDevices(bool computeCsdOnAllDevices) {
-    std::lock_guard _l(mLock);
+    const std::lock_guard _l(mLock);
     mComputeCsdOnAllDevices = computeCsdOnAllDevices;
 }
 
 bool SoundDoseManager::forceComputeCsdOnAllDevices() const {
-    std::lock_guard _l(mLock);
+    const std::lock_guard _l(mLock);
     return mComputeCsdOnAllDevices;
 }
 
 bool SoundDoseManager::isSoundDoseHalSupported() const {
-    if (mDisableCsd) {
-        return false;
+    {
+        const std::lock_guard _l(mLock);
+        if (!mEnabledCsd) return false;
     }
 
     std::shared_ptr<ISoundDose> halSoundDose;
     getHalSoundDose(&halSoundDose);
-    if (mHalSoundDose == nullptr) {
-        return false;
-    }
-    return true;
+    return halSoundDose != nullptr;
 }
 
 void SoundDoseManager::getHalSoundDose(std::shared_ptr<ISoundDose>* halSoundDose) const {
-    std::lock_guard _l(mLock);
+    const std::lock_guard _l(mLock);
     *halSoundDose = mHalSoundDose;
 }
 
 void SoundDoseManager::resetSoundDose() {
-    std::lock_guard lock(mLock);
+    const std::lock_guard lock(mLock);
     mSoundDose = nullptr;
 }
 
 void SoundDoseManager::resetCsd(float currentCsd,
                                 const std::vector<media::SoundDoseRecord>& records) {
-    std::lock_guard lock(mLock);
+    const std::lock_guard lock(mLock);
     std::vector<audio_utils::CsdRecord> resetRecords;
+    resetRecords.reserve(records.size());
     for (const auto& record : records) {
         resetRecords.emplace_back(record.timestamp, record.duration, record.value,
                                   record.averageMel);
@@ -451,13 +456,13 @@
     std::vector<audio_utils::CsdRecord> records;
     float currentCsd;
     {
-        std::lock_guard _l(mLock);
-        if (mDisableCsd) {
+        const std::lock_guard _l(mLock);
+        if (!mEnabledCsd) {
             return;
         }
 
 
-        int64_t timestampSec = getMonotonicSecond();
+        const int64_t timestampSec = getMonotonicSecond();
 
         // only for internal callbacks
         records = mMelAggregator->aggregateAndAddNewMelRecord(audio_utils::MelRecord(
@@ -471,6 +476,7 @@
 
     if (records.size() > 0 && soundDoseCallback != nullptr) {
         std::vector<media::SoundDoseRecord> newRecordsToReport;
+        newRecordsToReport.resize(records.size());
         for (const auto& record : records) {
             newRecordsToReport.emplace_back(csdRecordToSoundDoseRecord(record));
         }
@@ -480,7 +486,7 @@
 }
 
 sp<media::ISoundDoseCallback> SoundDoseManager::getSoundDoseCallback() const {
-    std::lock_guard _l(mLock);
+    const std::lock_guard _l(mLock);
     if (mSoundDose == nullptr) {
         return nullptr;
     }
@@ -492,8 +498,8 @@
     ALOGV("%s: Momentary exposure for device %d triggered: %f MEL", __func__, deviceId, currentMel);
 
     {
-        std::lock_guard _l(mLock);
-        if (mDisableCsd) {
+        const std::lock_guard _l(mLock);
+        if (!mEnabledCsd) {
             return;
         }
     }
@@ -508,7 +514,7 @@
         const sp<media::ISoundDoseCallback>& callback) {
     ALOGV("%s: Register ISoundDoseCallback", __func__);
 
-    std::lock_guard _l(mLock);
+    const std::lock_guard _l(mLock);
     if (mSoundDose == nullptr) {
         mSoundDose = sp<SoundDose>::make(this, callback);
     }
@@ -518,8 +524,8 @@
 std::string SoundDoseManager::dump() const {
     std::string output;
     {
-        std::lock_guard _l(mLock);
-        if (mDisableCsd) {
+        const std::lock_guard _l(mLock);
+        if (!mEnabledCsd) {
             base::StringAppendF(&output, "CSD is disabled");
             return output;
         }
diff --git a/services/audioflinger/sounddose/SoundDoseManager.h b/services/audioflinger/sounddose/SoundDoseManager.h
index 5081ce4..9ed0661 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.h
+++ b/services/audioflinger/sounddose/SoundDoseManager.h
@@ -101,8 +101,8 @@
     /** Clear all map entries with passed audio_port_handle_t. */
     void clearMapDeviceIdEntries(audio_port_handle_t deviceId);
 
-    /** Returns true if CSD is disabled. */
-    bool isCsdDisabled();
+    /** Returns true if CSD is enabled. */
+    bool isCsdEnabled();
 
     std::string dump() const;
 
@@ -129,7 +129,7 @@
               mSoundDoseCallback(callback) {}
 
         /** IBinder::DeathRecipient. Listen to the death of ISoundDoseCallback. */
-        virtual void binderDied(const wp<IBinder>& who);
+        void binderDied(const wp<IBinder>& who) override;
 
         /** BnSoundDose override */
         binder::Status setOutputRs2UpperBound(float value) override;
@@ -137,7 +137,7 @@
                                 const std::vector<media::SoundDoseRecord>& records) override;
         binder::Status updateAttenuation(float attenuationDB, int device) override;
         binder::Status getOutputRs2UpperBound(float* value) override;
-        binder::Status disableCsd() override;
+        binder::Status setCsdEnabled(bool enabled) override;
 
         binder::Status getCsd(float* value) override;
         binder::Status forceUseFrameworkMel(bool useFrameworkMel) override;
@@ -170,7 +170,7 @@
     sp<media::ISoundDoseCallback> getSoundDoseCallback() const;
 
     void updateAttenuation(float attenuationDB, audio_devices_t deviceType);
-    void disableCsd();
+    void setCsdEnabled(bool enabled);
     void setUseFrameworkMel(bool useFrameworkMel);
     void setComputeCsdOnAllDevices(bool computeCsdOnAllDevices);
     bool isSoundDoseHalSupported() const;
@@ -202,7 +202,7 @@
     bool mUseFrameworkMel GUARDED_BY(mLock) = true;
     bool mComputeCsdOnAllDevices GUARDED_BY(mLock) = false;
 
-    bool mDisableCsd GUARDED_BY(mLock) = false;
+    bool mEnabledCsd GUARDED_BY(mLock) = true;
 };
 
 }  // namespace android
diff --git a/services/audioflinger/timing/Android.bp b/services/audioflinger/timing/Android.bp
index 17ce8bd..30ebca0 100644
--- a/services/audioflinger/timing/Android.bp
+++ b/services/audioflinger/timing/Android.bp
@@ -7,9 +7,39 @@
     default_applicable_licenses: ["frameworks_av_services_audioflinger_license"],
 }
 
+audioflinger_timing_tidy_errors = audioflinger_base_tidy_errors + [
+    "modernize-avoid-c-arrays",
+    "modernize-deprecated-headers",
+    "modernize-pass-by-value",
+    "modernize-use-auto",
+    "modernize-use-nodiscard",
+
+    // TODO(b/275642749) Reenable these warnings
+    "-misc-non-private-member-variables-in-classes",
+]
+
+// Eventually use common tidy defaults
+cc_defaults {
+    name: "audioflinger_timing_flags_defaults",
+    // https://clang.llvm.org/docs/UsersManual.html#command-line-options
+    // https://clang.llvm.org/docs/DiagnosticsReference.html
+    cflags: audioflinger_base_cflags,
+    // https://clang.llvm.org/extra/clang-tidy/
+    tidy: true,
+    tidy_checks: audioflinger_timing_tidy_errors,
+    tidy_checks_as_errors: audioflinger_timing_tidy_errors,
+    tidy_flags: [
+      "-format-style=file",
+    ],
+}
+
 cc_library {
     name: "libaudioflinger_timing",
 
+    defaults: [
+        "audioflinger_timing_flags_defaults",
+    ],
+
     host_supported: true,
 
     srcs: [
diff --git a/services/audioflinger/timing/MonotonicFrameCounter.h b/services/audioflinger/timing/MonotonicFrameCounter.h
index 0ea9510..8468130 100644
--- a/services/audioflinger/timing/MonotonicFrameCounter.h
+++ b/services/audioflinger/timing/MonotonicFrameCounter.h
@@ -58,19 +58,19 @@
      * This offset is sufficient to ensure monotonicity after flush is called,
      * suitability for any other purpose is *not* guaranteed.
      */
-    int64_t getOffsetFrameCount() const { return mOffsetFrameCount; }
+    [[nodiscard]] int64_t getOffsetFrameCount() const { return mOffsetFrameCount; }
 
     /**
      * Returns the last received frameCount.
      */
-    int64_t getLastReceivedFrameCount() const {
+    [[nodiscard]] int64_t getLastReceivedFrameCount() const {
         return mLastReceivedFrameCount;
     }
 
     /**
      * Returns the last reported frameCount from updateAndGetMonotonicFrameCount().
      */
-    int64_t getLastReportedFrameCount() const {
+    [[nodiscard]] int64_t getLastReportedFrameCount() const {
         // This is consistent after onFlush().
         return mOffsetFrameCount + mLastReceivedFrameCount;
     }
diff --git a/services/audioflinger/timing/SyncEvent.h b/services/audioflinger/timing/SyncEvent.h
new file mode 100644
index 0000000..b5a3b40
--- /dev/null
+++ b/services/audioflinger/timing/SyncEvent.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <functional>
+#include <mutex>
+
+#include <media/AudioSystem.h>
+#include <utils/RefBase.h>
+
+namespace android::audioflinger {
+
+class SyncEvent;
+using SyncEventCallback = std::function<void(const wp<SyncEvent>& event)>;
+
+class SyncEvent : public RefBase {
+public:
+    SyncEvent(AudioSystem::sync_event_t type,
+              audio_session_t triggerSession,
+              audio_session_t listenerSession,
+              const SyncEventCallback& callBack,
+              const wp<RefBase>& cookie)
+    : mType(type), mTriggerSession(triggerSession), mListenerSession(listenerSession),
+      mCookie(cookie), mCallback(callBack)
+    {}
+
+    void trigger() {
+        std::lock_guard l(mLock);
+        if (mCallback) mCallback(wp<SyncEvent>::fromExisting(this));
+    }
+
+    bool isCancelled() const {
+        std::lock_guard l(mLock);
+        return mCallback == nullptr;
+    }
+
+    void cancel() {
+        std::lock_guard l(mLock);
+        mCallback = nullptr;
+    }
+
+    AudioSystem::sync_event_t type() const { return mType; }
+    audio_session_t triggerSession() const { return mTriggerSession; }
+    audio_session_t listenerSession() const { return mListenerSession; }
+    const wp<RefBase>& cookie() const { return mCookie; }
+
+private:
+      const AudioSystem::sync_event_t mType;
+      const audio_session_t mTriggerSession;
+      const audio_session_t mListenerSession;
+      const wp<RefBase> mCookie;
+      mutable std::mutex mLock;
+      SyncEventCallback mCallback GUARDED_BY(mLock);
+};
+
+} // namespace android::audioflinger
diff --git a/services/audioflinger/timing/SynchronizedRecordState.h b/services/audioflinger/timing/SynchronizedRecordState.h
new file mode 100644
index 0000000..f40d41b
--- /dev/null
+++ b/services/audioflinger/timing/SynchronizedRecordState.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "SyncEvent.h"
+
+#pragma push_macro("LOG_TAG")
+#undef LOG_TAG
+#define LOG_TAG "SynchronizedRecordState"
+
+namespace android::audioflinger {
+
+class SynchronizedRecordState {
+public:
+    explicit SynchronizedRecordState(uint32_t sampleRate)
+        : mSampleRate(sampleRate)
+        {}
+
+    void clear() {
+        std::lock_guard lg(mLock);
+        clear_l();
+    }
+
+    // Called by the RecordThread when recording is starting.
+    void startRecording(const sp<SyncEvent>& event) {
+        std::lock_guard lg(mLock);
+        mSyncStartEvent = event;
+        // Sync event can be cancelled by the trigger session if the track is not in a
+        // compatible state in which case we start record immediately
+        if (mSyncStartEvent->isCancelled()) {
+            clear_l();
+        } else {
+            mFramesToDrop = -(ssize_t)
+                ((AudioSystem::kSyncRecordStartTimeOutMs * mSampleRate) / 1000);
+        }
+    }
+
+    // Invoked by SyncEvent callback.
+    void onPlaybackFinished(const sp<SyncEvent>& event, size_t framesToDrop = 1) {
+        std::lock_guard lg(mLock);
+        if (event == mSyncStartEvent) {
+            mFramesToDrop = framesToDrop;  // compute this
+            ALOGV("%s: framesToDrop:%zd", __func__, mFramesToDrop);
+        }
+    }
+
+    // Returns the current FramesToDrop counter
+    //
+    //   if <0 waiting (drop the frames)
+    //   if >0 draining (drop the frames)
+    //    else if ==0 proceed to record.
+    ssize_t updateRecordFrames(size_t frames) {
+        std::lock_guard lg(mLock);
+        if (mFramesToDrop > 0) {
+            // we've been triggered, we count down for start delay
+            ALOGV("%s: trigger countdown %zd by %zu frames", __func__, mFramesToDrop, frames);
+            mFramesToDrop -= (ssize_t)frames;
+            if (mFramesToDrop <= 0) clear_l();
+        } else if (mFramesToDrop < 0) {
+            // we're waiting to be triggered.
+            // ALOGD("%s: timeout countup %zd with %zu frames", __func__, mFramesToDrop, frames);
+            mFramesToDrop += (ssize_t)frames;
+            if (mFramesToDrop >= 0 || !mSyncStartEvent || mSyncStartEvent->isCancelled()) {
+                ALOGW("Synced record %s, trigger session %d",
+                        (mFramesToDrop >= 0) ? "timed out" : "cancelled",
+                        (mSyncStartEvent) ? mSyncStartEvent->triggerSession()
+                                          : AUDIO_SESSION_NONE);
+                 clear_l();
+            }
+        }
+        return mFramesToDrop;
+    }
+
+private:
+    const uint32_t mSampleRate;
+
+    std::mutex mLock;
+    // number of captured frames to drop after the start sync event has been received.
+    // when < 0, maximum frames to drop before starting capture even if sync event is
+    // not received
+    ssize_t mFramesToDrop GUARDED_BY(mLock) = 0;
+
+    // sync event triggering actual audio capture. Frames read before this event will
+    // be dropped and therefore not read by the application.
+    sp<SyncEvent> mSyncStartEvent GUARDED_BY(mLock);
+
+    void clear_l() REQUIRES(mLock) {
+        if (mSyncStartEvent) {
+            mSyncStartEvent->cancel();
+            mSyncStartEvent.clear();
+        }
+        mFramesToDrop = 0;
+    }
+};
+
+} // namespace android::audioflinger
+
+#pragma pop_macro("LOG_TAG")
diff --git a/services/audioflinger/timing/tests/Android.bp b/services/audioflinger/timing/tests/Android.bp
index 29267a6..d1e5563 100644
--- a/services/audioflinger/timing/tests/Android.bp
+++ b/services/audioflinger/timing/tests/Android.bp
@@ -8,6 +8,31 @@
 }
 
 cc_test {
+    name: "mediasyncevent_tests",
+
+    host_supported: true,
+
+    srcs: [
+        "mediasyncevent_tests.cpp"
+    ],
+
+    header_libs: [
+        "libaudioclient_headers",
+    ],
+
+    static_libs: [
+        "liblog",
+        "libutils", // RefBase
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+}
+
+cc_test {
     name: "monotonicframecounter_tests",
 
     host_supported: true,
@@ -26,4 +51,29 @@
         "-Werror",
         "-Wextra",
     ],
-}
\ No newline at end of file
+}
+
+cc_test {
+     name: "synchronizedrecordstate_tests",
+
+     host_supported: true,
+
+     srcs: [
+         "synchronizedrecordstate_tests.cpp"
+     ],
+
+     header_libs: [
+         "libaudioclient_headers",
+     ],
+
+     static_libs: [
+         "liblog",
+         "libutils", // RefBase
+     ],
+
+     cflags: [
+         "-Wall",
+         "-Werror",
+         "-Wextra",
+     ],
+ }
\ No newline at end of file
diff --git a/services/audioflinger/timing/tests/mediasyncevent_tests.cpp b/services/audioflinger/timing/tests/mediasyncevent_tests.cpp
new file mode 100644
index 0000000..2922d90
--- /dev/null
+++ b/services/audioflinger/timing/tests/mediasyncevent_tests.cpp
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "mediasyncevent_tests"
+
+#include "../SyncEvent.h"
+
+#include <gtest/gtest.h>
+
+using namespace android;
+using namespace android::audioflinger;
+
+namespace {
+
+TEST(MediaSyncEventTests, Basic) {
+    struct Cookie : public RefBase {};
+
+    // These variables are set by trigger().
+    bool triggered = false;
+    wp<SyncEvent> param;
+
+    constexpr auto type = AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE;
+    constexpr auto triggerSession = audio_session_t(10);
+    constexpr auto listenerSession = audio_session_t(11);
+    const SyncEventCallback callback =
+            [&](const wp<SyncEvent>& event) {
+                triggered = true;
+                param = event;
+            };
+    const auto cookie = sp<Cookie>::make();
+
+    // Since the callback uses a weak pointer to this,
+    // don't allocate on the stack.
+    auto syncEvent = sp<SyncEvent>::make(
+            type,
+            triggerSession,
+            listenerSession,
+            callback,
+            cookie);
+
+    ASSERT_EQ(type, syncEvent->type());
+    ASSERT_EQ(triggerSession, syncEvent->triggerSession());
+    ASSERT_EQ(listenerSession, syncEvent->listenerSession());
+    ASSERT_EQ(cookie, syncEvent->cookie());
+    ASSERT_FALSE(triggered);
+
+    syncEvent->trigger();
+    ASSERT_TRUE(triggered);
+    ASSERT_EQ(param, syncEvent);
+
+    ASSERT_FALSE(syncEvent->isCancelled());
+    syncEvent->cancel();
+    ASSERT_TRUE(syncEvent->isCancelled());
+}
+
+} // namespace
diff --git a/services/audioflinger/timing/tests/synchronizedrecordstate_tests.cpp b/services/audioflinger/timing/tests/synchronizedrecordstate_tests.cpp
new file mode 100644
index 0000000..ee5d269
--- /dev/null
+++ b/services/audioflinger/timing/tests/synchronizedrecordstate_tests.cpp
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "synchronizedrecordstate_tests"
+
+#include "../SynchronizedRecordState.h"
+
+#include <gtest/gtest.h>
+
+using namespace android;
+using namespace android::audioflinger;
+
+namespace {
+
+TEST(SynchronizedRecordStateTests, Basic) {
+    struct Cookie : public RefBase {};
+
+    // These variables are set by trigger().
+    bool triggered = false;
+    wp<SyncEvent> param;
+
+    constexpr auto type = AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE;
+    constexpr auto triggerSession = audio_session_t(10);
+    constexpr auto listenerSession = audio_session_t(11);
+    const SyncEventCallback callback =
+            [&](const wp<SyncEvent>& event) {
+                triggered = true;
+                param = event;
+            };
+    const auto cookie = sp<Cookie>::make();
+
+    // Check timeout.
+    SynchronizedRecordState recordState(48000 /* sampleRate */);
+    auto syncEvent = sp<SyncEvent>::make(
+            type,
+            triggerSession,
+            listenerSession,
+            callback,
+            cookie);
+    recordState.startRecording(syncEvent);
+    recordState.updateRecordFrames(2);
+    ASSERT_FALSE(triggered);
+    ASSERT_EQ(0, recordState.updateRecordFrames(1'000'000'000));
+    ASSERT_FALSE(triggered);
+    ASSERT_TRUE(syncEvent->isCancelled());
+
+    // Check count down after track is complete.
+    syncEvent = sp<SyncEvent>::make(
+                type,
+                triggerSession,
+                listenerSession,
+                callback,
+                cookie);
+    recordState.startRecording(syncEvent);
+    recordState.onPlaybackFinished(syncEvent, 10);
+    ASSERT_EQ(1, recordState.updateRecordFrames(9));
+    ASSERT_FALSE(triggered);
+    ASSERT_EQ(0, recordState.updateRecordFrames(2));
+    ASSERT_FALSE(triggered);
+    ASSERT_TRUE(syncEvent->isCancelled());
+}
+
+}
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 8f9c60b..da0df5f 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_AUDIOPOLICY_INTERFACE_H
 #define ANDROID_AUDIOPOLICY_INTERFACE_H
 
+#include <android/media/DeviceConnectedState.h>
 #include <media/AudioCommonTypes.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
@@ -246,6 +247,8 @@
                                     unsigned int *num_ports,
                                     struct audio_port_v7 *ports,
                                     unsigned int *generation) = 0;
+    virtual status_t listDeclaredDevicePorts(media::AudioPortRole role,
+                                             std::vector<media::AudioPortFw>* result) = 0;
     virtual status_t getAudioPort(struct audio_port_v7 *port) = 0;
     virtual status_t createAudioPatch(const struct audio_patch *patch,
                                        audio_patch_handle_t *handle,
@@ -435,6 +438,8 @@
 public:
     virtual ~AudioPolicyClientInterface() {}
 
+    virtual status_t getAudioPolicyConfig(media::AudioPolicyConfig *config) = 0;
+
     //
     // Audio HW module functions
     //
@@ -565,7 +570,8 @@
     virtual status_t updateSecondaryOutputs(
             const TrackSecondaryOutputsMap& trackSecondaryOutputs) = 0;
 
-    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
+    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port,
+                                             media::DeviceConnectedState state) = 0;
 
     virtual status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) = 0;
 };
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index de8e77f..d266e63 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -218,12 +218,15 @@
         return *(deviceTypes.begin());
     } else {
         // Multiple device selection is either:
+        //  - dock + one other device: give priority to dock in this case.
         //  - speaker + one other device: give priority to speaker in this case.
         //  - one A2DP device + another device: happens with duplicated output. In this case
         // retain the device on the A2DP output as the other must not correspond to an active
         // selection if not the speaker.
         //  - HDMI-CEC system audio mode only output: give priority to available item in order.
-        if (deviceTypes.count(AUDIO_DEVICE_OUT_SPEAKER) != 0) {
+        if (deviceTypes.count(AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET) != 0) {
+            return AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET;
+        } else if (deviceTypes.count(AUDIO_DEVICE_OUT_SPEAKER) != 0) {
             return AUDIO_DEVICE_OUT_SPEAKER;
         } else if (deviceTypes.count(AUDIO_DEVICE_OUT_SPEAKER_SAFE) != 0) {
             return AUDIO_DEVICE_OUT_SPEAKER_SAFE;
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index 92a5628..8b76842 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -7,14 +7,19 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_library_static {
+cc_library {
     name: "libaudiopolicycomponents",
 
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
+
     srcs: [
         "src/AudioCollections.cpp",
         "src/AudioInputDescriptor.cpp",
         "src/AudioOutputDescriptor.cpp",
         "src/AudioPatch.cpp",
+        "src/AudioPolicyConfig.cpp",
         "src/AudioPolicyMix.cpp",
         "src/AudioProfileVectorHelper.cpp",
         "src/AudioRoute.cpp",
@@ -30,7 +35,11 @@
         "src/TypeConverter.cpp",
     ],
     shared_libs: [
+        "audioclient-types-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "libaudioclient_aidl_conversion",
         "libaudiofoundation",
+        "libaudiopolicy",
         "libbase",
         "libcutils",
         "libhidlbase",
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 52a000f..febccac 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -301,6 +301,10 @@
         return mActiveClients;
     }
 
+    // Returns 0 if not all active clients have the same exclusive preferred device
+    // or the number of active clients with the same exclusive preferred device
+    size_t sameExclusivePreferredDevicesCount() const;
+
     bool useHwGain() const
     {
         return !devices().isEmpty() ? devices().itemAt(0)->hasGainController() : false;
@@ -420,6 +424,15 @@
     bool supportsAllDevices(const DeviceVector &devices) const;
 
     /**
+     * @brief supportsAtLeastOne checks if any device in devices is currently supported
+     * @param devices to be checked against
+     * @return true if the device is weakly supported by type (e.g. for non bus / rsubmix devices),
+     *         true if the device is supported (both type and address) for bus / remote submix
+     *         false otherwise
+     */
+    bool supportsAtLeastOne(const DeviceVector &devices) const;
+
+    /**
      * @brief supportsDevicesForPlayback
      * @param devices to be checked against
      * @return true if the devices is a supported combo for playback
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
index a62d3f0..1f6002f 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
@@ -16,62 +16,64 @@
 
 #pragma once
 
+#include <string>
 #include <unordered_map>
 #include <unordered_set>
+#include <vector>
 
-#include <AudioPatch.h>
 #include <DeviceDescriptor.h>
-#include <IOProfile.h>
 #include <HwModule.h>
-#include <PolicyAudioPort.h>
-#include <AudioInputDescriptor.h>
-#include <AudioOutputDescriptor.h>
-#include <AudioPolicyMix.h>
-#include <EffectDescriptor.h>
-#include <SoundTriggerSession.h>
-#include <media/AudioProfile.h>
+#include <android/media/AudioPolicyConfig.h>
+#include <error/Result.h>
+#include <utils/StrongPointer.h>
+#include <utils/RefBase.h>
 
 namespace android {
 
-// This class gathers together various bits of AudioPolicyManager
-// configuration, which are usually filled out as a result of parsing
-// the audio_policy_configuration.xml file.
+// This class gathers together various bits of AudioPolicyManager configuration. It can be filled
+// out either as a result of parsing the audio_policy_configuration.xml file, from the HAL data, or
+// to default fallback data.
 //
-// Note that AudioPolicyConfig doesn't own some of the data,
-// it simply proxies access to the fields of AudioPolicyManager
-// class. Be careful about the fields that are references,
-// e.g. 'mOutputDevices'. This also means that it's impossible
-// to implement "deep copying" of this class without re-designing it.
-class AudioPolicyConfig
+// The data in this class is immutable once loaded, this is why a pointer to a const is returned
+// from the factory methods. However, this does not prevent modifications of data bits that
+// are held inside collections, for example, individual modules, devices, etc.
+class AudioPolicyConfig : public RefBase
 {
 public:
-    AudioPolicyConfig(HwModuleCollection &hwModules,
-                      DeviceVector &outputDevices,
-                      DeviceVector &inputDevices,
-                      sp<DeviceDescriptor> &defaultOutputDevice)
-        : mHwModules(hwModules),
-          mOutputDevices(outputDevices),
-          mInputDevices(inputDevices),
-          mDefaultOutputDevice(defaultOutputDevice) {
-        clear();
-    }
+    // Surround formats, with an optional list of subformats that are equivalent from users' POV.
+    using SurroundFormats = std::unordered_map<audio_format_t, std::unordered_set<audio_format_t>>;
 
-    void clear() {
-        mSource = {};
-        mEngineLibraryNameSuffix = kDefaultEngineLibraryNameSuffix;
-        mHwModules.clear();
-        mOutputDevices.clear();
-        mInputDevices.clear();
-        mDefaultOutputDevice.clear();
-        mIsSpeakerDrcEnabled = false;
-        mIsCallScreenModeSupported = false;
-        mSurroundFormats.clear();
-    }
+    // The source used to indicate the configuration from the AIDL HAL.
+    static const constexpr char* const kAidlConfigSource = "AIDL HAL";
+    // The source used to indicate the default fallback configuration.
+    static const constexpr char* const kDefaultConfigSource = "AudioPolicyConfig::setDefault";
+    // The suffix of the "engine default" implementation shared library name.
+    static const constexpr char* const kDefaultEngineLibraryNameSuffix = "default";
+
+    // Creates the default (fallback) configuration.
+    static sp<const AudioPolicyConfig> createDefault();
+    // Attempts to load the configuration from the AIDL config falls back to default on failure.
+    static sp<const AudioPolicyConfig> loadFromApmAidlConfigWithFallback(
+            const media::AudioPolicyConfig& aidl);
+    // Attempts to load the configuration from the XML file, falls back to default on failure.
+    // If the XML file path is not provided, uses `audio_get_audio_policy_config_file` function.
+    static sp<const AudioPolicyConfig> loadFromApmXmlConfigWithFallback(
+            const std::string& xmlFilePath = "");
+    // The factory method to use in APM tests which craft the configuration manually.
+    static sp<AudioPolicyConfig> createWritableForTests();
+    // The factory method to use in APM tests which use a custom XML file.
+    static error::Result<sp<AudioPolicyConfig>> loadFromCustomXmlConfigForTests(
+            const std::string& xmlFilePath);
+    // The factory method to use in VTS tests. If the 'configPath' is empty,
+    // it is determined automatically from the list of known config paths.
+    static error::Result<sp<AudioPolicyConfig>> loadFromCustomXmlConfigForVtsTests(
+            const std::string& configPath, const std::string& xmlFileName);
+
+    ~AudioPolicyConfig() = default;
 
     const std::string& getSource() const {
         return mSource;
     }
-
     void setSource(const std::string& file) {
         mSource = file;
     }
@@ -79,16 +81,24 @@
     const std::string& getEngineLibraryNameSuffix() const {
         return mEngineLibraryNameSuffix;
     }
-
     void setEngineLibraryNameSuffix(const std::string& suffix) {
         mEngineLibraryNameSuffix = suffix;
     }
 
+    const HwModuleCollection& getHwModules() const { return mHwModules; }
     void setHwModules(const HwModuleCollection &hwModules)
     {
         mHwModules = hwModules;
     }
 
+    const DeviceVector& getInputDevices() const
+    {
+        return mInputDevices;
+    }
+    const DeviceVector& getOutputDevices() const
+    {
+        return mOutputDevices;
+    }
     void addDevice(const sp<DeviceDescriptor> &device)
     {
         if (audio_is_output_device(device->type())) {
@@ -97,128 +107,55 @@
             mInputDevices.add(device);
         }
     }
-
     void addInputDevices(const DeviceVector &inputDevices)
     {
         mInputDevices.add(inputDevices);
     }
-
     void addOutputDevices(const DeviceVector &outputDevices)
     {
         mOutputDevices.add(outputDevices);
     }
 
-    bool isSpeakerDrcEnabled() const { return mIsSpeakerDrcEnabled; }
-
-    void setSpeakerDrcEnabled(bool isSpeakerDrcEnabled)
-    {
-        mIsSpeakerDrcEnabled = isSpeakerDrcEnabled;
-    }
-
-    bool isCallScreenModeSupported() const { return mIsCallScreenModeSupported; }
-
-    void setCallScreenModeSupported(bool isCallScreenModeSupported)
-    {
-        mIsCallScreenModeSupported = isCallScreenModeSupported;
-    }
-
-
-    const HwModuleCollection getHwModules() const { return mHwModules; }
-
-    const DeviceVector &getInputDevices() const
-    {
-        return mInputDevices;
-    }
-
-    const DeviceVector &getOutputDevices() const
-    {
-        return mOutputDevices;
-    }
-
+    const sp<DeviceDescriptor>& getDefaultOutputDevice() const { return mDefaultOutputDevice; }
     void setDefaultOutputDevice(const sp<DeviceDescriptor> &defaultDevice)
     {
         mDefaultOutputDevice = defaultDevice;
     }
 
-    const sp<DeviceDescriptor> &getDefaultOutputDevice() const { return mDefaultOutputDevice; }
-
-    void setDefault(void)
+    bool isCallScreenModeSupported() const { return mIsCallScreenModeSupported; }
+    void setCallScreenModeSupported(bool isCallScreenModeSupported)
     {
-        mSource = "AudioPolicyConfig::setDefault";
-        mEngineLibraryNameSuffix = kDefaultEngineLibraryNameSuffix;
-        mDefaultOutputDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_SPEAKER);
-        mDefaultOutputDevice->addAudioProfile(AudioProfile::createFullDynamic(gDynamicFormat));
-        sp<DeviceDescriptor> defaultInputDevice = new DeviceDescriptor(AUDIO_DEVICE_IN_BUILTIN_MIC);
-        defaultInputDevice->addAudioProfile(AudioProfile::createFullDynamic(gDynamicFormat));
-        sp<AudioProfile> micProfile = new AudioProfile(
-                AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_MONO, 8000);
-        defaultInputDevice->addAudioProfile(micProfile);
-        mOutputDevices.add(mDefaultOutputDevice);
-        mInputDevices.add(defaultInputDevice);
-
-        sp<HwModule> module = new HwModule(AUDIO_HARDWARE_MODULE_ID_PRIMARY, 2 /*halVersionMajor*/);
-        mHwModules.add(module);
-
-        sp<OutputProfile> outProfile = new OutputProfile("primary");
-        outProfile->addAudioProfile(
-                new AudioProfile(AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 44100));
-        outProfile->addSupportedDevice(mDefaultOutputDevice);
-        outProfile->setFlags(AUDIO_OUTPUT_FLAG_PRIMARY);
-        module->addOutputProfile(outProfile);
-
-        sp<InputProfile> inProfile = new InputProfile("primary");
-        inProfile->addAudioProfile(micProfile);
-        inProfile->addSupportedDevice(defaultInputDevice);
-        module->addInputProfile(inProfile);
-
-        setDefaultSurroundFormats();
+        mIsCallScreenModeSupported = isCallScreenModeSupported;
     }
 
-    // Surround formats, with an optional list of subformats that are equivalent from users' POV.
-    using SurroundFormats = std::unordered_map<audio_format_t, std::unordered_set<audio_format_t>>;
-
     const SurroundFormats &getSurroundFormats() const
     {
         return mSurroundFormats;
     }
-
+    void setDefaultSurroundFormats();
     void setSurroundFormats(const SurroundFormats &surroundFormats)
     {
         mSurroundFormats = surroundFormats;
     }
 
-    void setDefaultSurroundFormats()
-    {
-        mSurroundFormats = {
-            {AUDIO_FORMAT_AC3, {}},
-            {AUDIO_FORMAT_E_AC3, {}},
-            {AUDIO_FORMAT_DTS, {}},
-            {AUDIO_FORMAT_DTS_HD, {}},
-            {AUDIO_FORMAT_DTS_HD_MA, {}},
-            {AUDIO_FORMAT_DTS_UHD, {}},
-            {AUDIO_FORMAT_DTS_UHD_P2, {}},
-            {AUDIO_FORMAT_AAC_LC, {
-                    AUDIO_FORMAT_AAC_HE_V1, AUDIO_FORMAT_AAC_HE_V2, AUDIO_FORMAT_AAC_ELD,
-                    AUDIO_FORMAT_AAC_XHE}},
-            {AUDIO_FORMAT_DOLBY_TRUEHD, {}},
-            {AUDIO_FORMAT_E_AC3_JOC, {}},
-            {AUDIO_FORMAT_AC4, {}}};
-    }
+    void setDefault();
 
 private:
-    static const constexpr char* const kDefaultEngineLibraryNameSuffix = "default";
+    friend class sp<AudioPolicyConfig>;
 
-    std::string mSource;
-    std::string mEngineLibraryNameSuffix;
-    HwModuleCollection &mHwModules; /**< Collection of Module, with Profiles, i.e. Mix Ports. */
-    DeviceVector &mOutputDevices;
-    DeviceVector &mInputDevices;
-    sp<DeviceDescriptor> &mDefaultOutputDevice;
-    // TODO: remove when legacy conf file is removed. true on devices that use DRC on the
-    // DEVICE_CATEGORY_SPEAKER path to boost soft sounds, used to adjust volume curves accordingly.
-    // Note: remove also speaker_drc_enabled from global configuration of XML config file.
-    bool mIsSpeakerDrcEnabled;
-    bool mIsCallScreenModeSupported;
+    AudioPolicyConfig() = default;
+
+    void augmentData();
+    status_t loadFromAidl(const media::AudioPolicyConfig& aidl);
+    status_t loadFromXml(const std::string& xmlFilePath, bool forVts);
+
+    std::string mSource;  // Not kDefaultConfigSource. Empty source means an empty config.
+    std::string mEngineLibraryNameSuffix = kDefaultEngineLibraryNameSuffix;
+    HwModuleCollection mHwModules; /**< Collection of Module, with Profiles, i.e. Mix Ports. */
+    DeviceVector mOutputDevices;  // Attached output devices.
+    DeviceVector mInputDevices;   // Attached input devices.
+    sp<DeviceDescriptor> mDefaultOutputDevice;
+    bool mIsCallScreenModeSupported = false;
     SurroundFormats mSurroundFormats;
 };
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index 92292e1..7e29e10 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -138,7 +138,7 @@
      */
     status_t setUserIdDeviceAffinities(int userId, const AudioDeviceTypeAddrVector& devices);
     status_t removeUserIdDeviceAffinities(int userId);
-    status_t getDevicesForUserId(int userId, Vector<AudioDeviceTypeAddr>& devices) const;
+    status_t getDevicesForUserId(int userId, AudioDeviceTypeAddrVector& devices) const;
 
     void dump(String8 *dst) const;
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/HwModule.h b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
index 436fcc1..e994758 100644
--- a/services/audiopolicy/common/managerdefinitions/include/HwModule.h
+++ b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
@@ -39,7 +39,8 @@
 class HwModule : public RefBase
 {
 public:
-    explicit HwModule(const char *name, uint32_t halVersionMajor = 0, uint32_t halVersionMinor = 0);
+    explicit HwModule(const char *name, uint32_t halVersionMajor, uint32_t halVersionMinor);
+    HwModule(const char *name, uint32_t halVersion = 0);
     ~HwModule();
 
     const char *getName() const { return mName.string(); }
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index a46186b..8b23311 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -238,6 +238,27 @@
     return clients;
 }
 
+size_t AudioOutputDescriptor::sameExclusivePreferredDevicesCount() const
+{
+    audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
+    size_t count = 0;
+    for (const auto &client : getClientIterable()) {
+        if (client->active()) {
+            if (!(client->hasPreferredDevice() &&
+                    client->isPreferredDeviceForExclusiveUse())) {
+                return 0;
+            }
+            if (deviceId == AUDIO_PORT_HANDLE_NONE) {
+                deviceId = client->preferredDeviceId();
+            } else if (deviceId != client->preferredDeviceId()) {
+                return 0;
+            }
+            count++;
+        }
+    }
+    return count;
+}
+
 bool AudioOutputDescriptor::isAnyActive(VolumeSource volumeSourceToIgnore) const
 {
     return std::find_if(begin(mActiveClients), end(mActiveClients),
@@ -368,6 +389,11 @@
     return supportedDevices().containsAllDevices(devices);
 }
 
+bool SwAudioOutputDescriptor::supportsAtLeastOne(const DeviceVector &devices) const
+{
+    return filterSupportedDevices(devices).size() > 0;
+}
+
 bool SwAudioOutputDescriptor::supportsDevicesForPlayback(const DeviceVector &devices) const
 {
     // No considering duplicated output
@@ -680,10 +706,6 @@
             }
         }
 
-        AudioParameter param;
-        param.add(String8("closing"), String8("true"));
-        mClientInterface->setParameters(mIoHandle, param.toString());
-
         mClientInterface->closeOutput(mIoHandle);
 
         LOG_ALWAYS_FATAL_IF(mProfile->curOpenCount < 1, "%s profile open count %u",
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
new file mode 100644
index 0000000..e214ae9
--- /dev/null
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
@@ -0,0 +1,332 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "APM_Config"
+
+#include <AudioPolicyConfig.h>
+#include <IOProfile.h>
+#include <Serializer.h>
+#include <hardware/audio.h>
+#include <media/AidlConversion.h>
+#include <media/AidlConversionUtil.h>
+#include <media/AudioProfile.h>
+#include <system/audio.h>
+#include <system/audio_config.h>
+#include <utils/Log.h>
+
+namespace android {
+
+using media::audio::common::AudioIoFlags;
+using media::audio::common::AudioPortDeviceExt;
+using media::audio::common::AudioPortExt;
+
+namespace {
+
+ConversionResult<sp<PolicyAudioPort>>
+aidl2legacy_portId_PolicyAudioPort(int32_t portId,
+        const std::unordered_map<int32_t, sp<PolicyAudioPort>>& ports) {
+    if (auto it = ports.find(portId); it != ports.end()) {
+        return it->second;
+    }
+    return base::unexpected(BAD_VALUE);
+}
+
+ConversionResult<sp<AudioRoute>>
+aidl2legacy_AudioRoute(const media::AudioRoute& aidl,
+        const std::unordered_map<int32_t, sp<PolicyAudioPort>>& ports) {
+    auto legacy = sp<AudioRoute>::make(aidl.isExclusive ? AUDIO_ROUTE_MUX : AUDIO_ROUTE_MIX);
+    auto legacySink = VALUE_OR_RETURN(aidl2legacy_portId_PolicyAudioPort(aidl.sinkPortId, ports));
+    legacy->setSink(legacySink);
+    PolicyAudioPortVector legacySources;
+    for (int32_t portId : aidl.sourcePortIds) {
+        sp<PolicyAudioPort> legacyPort = VALUE_OR_RETURN(
+                aidl2legacy_portId_PolicyAudioPort(portId, ports));
+        legacySources.add(legacyPort);
+    }
+    legacy->setSources(legacySources);
+    legacySink->addRoute(legacy);
+    for (const auto& legacySource : legacySources) {
+        legacySource->addRoute(legacy);
+    }
+    return legacy;
+}
+
+status_t aidl2legacy_AudioHwModule_HwModule(const media::AudioHwModule& aidl,
+        sp<HwModule>* legacy,
+        DeviceVector* attachedInputDevices, DeviceVector* attachedOutputDevices,
+        sp<DeviceDescriptor>* defaultOutputDevice) {
+    *legacy = sp<HwModule>::make(aidl.name.c_str(), AUDIO_DEVICE_API_VERSION_CURRENT);
+    audio_module_handle_t legacyHandle = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_int32_t_audio_module_handle_t(aidl.handle));
+    (*legacy)->setHandle(legacyHandle);
+    IOProfileCollection mixPorts;
+    DeviceVector devicePorts;
+    const int defaultDeviceFlag = 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE;
+    std::unordered_map<int32_t, sp<PolicyAudioPort>> ports;
+    for (const auto& aidlPort : aidl.ports) {
+        const bool isInput = aidlPort.flags.getTag() == AudioIoFlags::input;
+        audio_port_v7 legacyPort = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_AudioPort_audio_port_v7(aidlPort, isInput));
+        // This conversion fills out both 'hal' and 'sys' parts.
+        media::AudioPortFw fwPort = VALUE_OR_RETURN_STATUS(
+                legacy2aidl_audio_port_v7_AudioPortFw(legacyPort));
+        // Since audio_port_v7 lacks some fields, for example, 'maxOpen/ActiveCount',
+        // replace the converted data with the actual data from the HAL.
+        fwPort.hal = aidlPort;
+        if (aidlPort.ext.getTag() == AudioPortExt::mix) {
+            auto mixPort = sp<IOProfile>::make("", AUDIO_PORT_ROLE_NONE);
+            RETURN_STATUS_IF_ERROR(mixPort->readFromParcelable(fwPort));
+            sortAudioProfiles(mixPort->getAudioProfiles());
+            mixPorts.add(mixPort);
+            ports.emplace(aidlPort.id, mixPort);
+        } else if (aidlPort.ext.getTag() == AudioPortExt::device) {
+            // In the legacy XML, device ports use 'tagName' instead of 'AudioPort.name'.
+            auto devicePort =
+                    sp<DeviceDescriptor>::make(AUDIO_DEVICE_NONE, aidlPort.name);
+            RETURN_STATUS_IF_ERROR(devicePort->readFromParcelable(fwPort));
+            devicePort->setName("");
+            auto& profiles = devicePort->getAudioProfiles();
+            if (profiles.empty()) {
+                profiles.add(AudioProfile::createFullDynamic(gDynamicFormat));
+            } else {
+                sortAudioProfiles(profiles);
+            }
+            devicePorts.add(devicePort);
+            ports.emplace(aidlPort.id, devicePort);
+
+            if (const auto& deviceExt = aidlPort.ext.get<AudioPortExt::device>();
+                    deviceExt.device.type.connection.empty()) {  // Attached device
+                if (isInput) {
+                    attachedInputDevices->add(devicePort);
+                } else {
+                    attachedOutputDevices->add(devicePort);
+                    if ((deviceExt.flags & defaultDeviceFlag) != 0) {
+                        *defaultOutputDevice = devicePort;
+                    }
+                }
+            }
+        } else {
+            return BAD_VALUE;
+        }
+    }
+    (*legacy)->setProfiles(mixPorts);
+    (*legacy)->setDeclaredDevices(devicePorts);
+    AudioRouteVector routes;
+    for (const auto& aidlRoute : aidl.routes) {
+        sp<AudioRoute> legacy = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioRoute(aidlRoute, ports));
+        routes.add(legacy);
+    }
+    (*legacy)->setRoutes(routes);
+    return OK;
+}
+
+status_t aidl2legacy_AudioHwModules_HwModuleCollection(
+        const std::vector<media::AudioHwModule>& aidl,
+        HwModuleCollection* legacyModules, DeviceVector* attachedInputDevices,
+        DeviceVector* attachedOutputDevices, sp<DeviceDescriptor>* defaultOutputDevice) {
+    for (const auto& aidlModule : aidl) {
+        sp<HwModule> legacy;
+        RETURN_STATUS_IF_ERROR(aidl2legacy_AudioHwModule_HwModule(aidlModule, &legacy,
+                        attachedInputDevices, attachedOutputDevices, defaultOutputDevice));
+        legacyModules->add(legacy);
+    }
+    return OK;
+}
+
+using SurroundFormatFamily = AudioPolicyConfig::SurroundFormats::value_type;
+ConversionResult<SurroundFormatFamily>
+aidl2legacy_SurroundFormatFamily(const media::SurroundSoundConfig::SurroundFormatFamily& aidl) {
+    audio_format_t legacyPrimary = VALUE_OR_RETURN(
+            aidl2legacy_AudioFormatDescription_audio_format_t(aidl.primaryFormat));
+    std::unordered_set<audio_format_t> legacySubs = VALUE_OR_RETURN(
+            convertContainer<std::unordered_set<audio_format_t>>(
+                    aidl.subFormats, aidl2legacy_AudioFormatDescription_audio_format_t));
+    return std::make_pair(legacyPrimary, legacySubs);
+}
+
+ConversionResult<AudioPolicyConfig::SurroundFormats>
+aidl2legacy_SurroundSoundConfig_SurroundFormats(const media::SurroundSoundConfig& aidl) {
+    return convertContainer<AudioPolicyConfig::SurroundFormats>(aidl.formatFamilies,
+            aidl2legacy_SurroundFormatFamily);
+};
+
+}  // namespace
+
+// static
+sp<const AudioPolicyConfig> AudioPolicyConfig::createDefault() {
+    auto config = sp<AudioPolicyConfig>::make();
+    config->setDefault();
+    return config;
+}
+
+// static
+sp<const AudioPolicyConfig> AudioPolicyConfig::loadFromApmAidlConfigWithFallback(
+        const media::AudioPolicyConfig& aidl) {
+    auto config = sp<AudioPolicyConfig>::make();
+    if (status_t status = config->loadFromAidl(aidl); status == NO_ERROR) {
+        return config;
+    }
+    return createDefault();
+}
+
+// static
+sp<const AudioPolicyConfig> AudioPolicyConfig::loadFromApmXmlConfigWithFallback(
+        const std::string& xmlFilePath) {
+    const std::string filePath =
+            xmlFilePath.empty() ? audio_get_audio_policy_config_file() : xmlFilePath;
+    auto config = sp<AudioPolicyConfig>::make();
+    if (status_t status = config->loadFromXml(filePath, false /*forVts*/); status == NO_ERROR) {
+        return config;
+    }
+    return createDefault();
+}
+
+// static
+sp<AudioPolicyConfig> AudioPolicyConfig::createWritableForTests() {
+    return sp<AudioPolicyConfig>::make();
+}
+
+// static
+error::Result<sp<AudioPolicyConfig>> AudioPolicyConfig::loadFromCustomXmlConfigForTests(
+        const std::string& xmlFilePath) {
+    auto config = sp<AudioPolicyConfig>::make();
+    if (status_t status = config->loadFromXml(xmlFilePath, false /*forVts*/); status == NO_ERROR) {
+        return config;
+    } else {
+        return base::unexpected(status);
+    }
+}
+
+// static
+error::Result<sp<AudioPolicyConfig>> AudioPolicyConfig::loadFromCustomXmlConfigForVtsTests(
+        const std::string& configPath, const std::string& xmlFileName) {
+    auto filePath = configPath;
+    if (filePath.empty()) {
+        for (const auto& location : audio_get_configuration_paths()) {
+            std::string path = location + '/' + xmlFileName;
+            if (access(path.c_str(), F_OK) == 0) {
+                filePath = location;
+                break;
+            }
+        }
+    }
+    if (filePath.empty()) {
+        ALOGE("Did not find a config file \"%s\" among known config paths", xmlFileName.c_str());
+        return base::unexpected(BAD_VALUE);
+    }
+    auto config = sp<AudioPolicyConfig>::make();
+    if (status_t status = config->loadFromXml(filePath + "/" + xmlFileName, true /*forVts*/);
+            status == NO_ERROR) {
+        return config;
+    } else {
+        return base::unexpected(status);
+    }
+}
+
+void AudioPolicyConfig::augmentData() {
+    // If microphones address is empty, set it according to device type
+    for (size_t i = 0; i < mInputDevices.size(); i++) {
+        if (mInputDevices[i]->address().empty()) {
+            if (mInputDevices[i]->type() == AUDIO_DEVICE_IN_BUILTIN_MIC) {
+                mInputDevices[i]->setAddress(AUDIO_BOTTOM_MICROPHONE_ADDRESS);
+            } else if (mInputDevices[i]->type() == AUDIO_DEVICE_IN_BACK_MIC) {
+                mInputDevices[i]->setAddress(AUDIO_BACK_MICROPHONE_ADDRESS);
+            }
+        }
+    }
+}
+
+status_t AudioPolicyConfig::loadFromAidl(const media::AudioPolicyConfig& aidl) {
+    RETURN_STATUS_IF_ERROR(aidl2legacy_AudioHwModules_HwModuleCollection(aidl.modules,
+                    &mHwModules, &mInputDevices, &mOutputDevices, &mDefaultOutputDevice));
+    mIsCallScreenModeSupported = std::find(aidl.supportedModes.begin(), aidl.supportedModes.end(),
+            media::audio::common::AudioMode::CALL_SCREEN) != aidl.supportedModes.end();
+    mSurroundFormats = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_SurroundSoundConfig_SurroundFormats(aidl.surroundSoundConfig));
+    mSource = kAidlConfigSource;
+    // No need to augmentData() as AIDL HAL must provide correct mic addresses.
+    return NO_ERROR;
+}
+
+status_t AudioPolicyConfig::loadFromXml(const std::string& xmlFilePath, bool forVts) {
+    if (xmlFilePath.empty()) {
+        ALOGE("Audio policy configuration file name is empty");
+        return BAD_VALUE;
+    }
+    status_t status = forVts ? deserializeAudioPolicyFileForVts(xmlFilePath.c_str(), this)
+            : deserializeAudioPolicyFile(xmlFilePath.c_str(), this);
+    if (status == NO_ERROR) {
+        mSource = xmlFilePath;
+        augmentData();
+    } else {
+        ALOGE("Could not load audio policy from the configuration file \"%s\": %d",
+                xmlFilePath.c_str(), status);
+    }
+    return status;
+}
+
+void AudioPolicyConfig::setDefault() {
+    mSource = kDefaultConfigSource;
+    mEngineLibraryNameSuffix = kDefaultEngineLibraryNameSuffix;
+
+    mDefaultOutputDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_SPEAKER);
+    mDefaultOutputDevice->addAudioProfile(AudioProfile::createFullDynamic(gDynamicFormat));
+    sp<DeviceDescriptor> defaultInputDevice = new DeviceDescriptor(AUDIO_DEVICE_IN_BUILTIN_MIC);
+    defaultInputDevice->addAudioProfile(AudioProfile::createFullDynamic(gDynamicFormat));
+    sp<AudioProfile> micProfile = new AudioProfile(
+            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_MONO, 8000);
+    defaultInputDevice->addAudioProfile(micProfile);
+    mOutputDevices.add(mDefaultOutputDevice);
+    mInputDevices.add(defaultInputDevice);
+
+    sp<HwModule> module = new HwModule(
+            AUDIO_HARDWARE_MODULE_ID_PRIMARY, AUDIO_DEVICE_API_VERSION_2_0);
+    mHwModules.add(module);
+
+    sp<OutputProfile> outProfile = new OutputProfile("primary");
+    outProfile->addAudioProfile(
+            new AudioProfile(AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 44100));
+    outProfile->addSupportedDevice(mDefaultOutputDevice);
+    outProfile->setFlags(AUDIO_OUTPUT_FLAG_PRIMARY);
+    module->addOutputProfile(outProfile);
+
+    sp<InputProfile> inProfile = new InputProfile("primary");
+    inProfile->addAudioProfile(micProfile);
+    inProfile->addSupportedDevice(defaultInputDevice);
+    module->addInputProfile(inProfile);
+
+    setDefaultSurroundFormats();
+    augmentData();
+}
+
+void AudioPolicyConfig::setDefaultSurroundFormats() {
+    mSurroundFormats = {
+        {AUDIO_FORMAT_AC3, {}},
+        {AUDIO_FORMAT_E_AC3, {}},
+        {AUDIO_FORMAT_DTS, {}},
+        {AUDIO_FORMAT_DTS_HD, {}},
+        {AUDIO_FORMAT_DTS_HD_MA, {}},
+        {AUDIO_FORMAT_DTS_UHD, {}},
+        {AUDIO_FORMAT_DTS_UHD_P2, {}},
+        {AUDIO_FORMAT_AAC_LC, {
+                AUDIO_FORMAT_AAC_HE_V1, AUDIO_FORMAT_AAC_HE_V2, AUDIO_FORMAT_AAC_ELD,
+                AUDIO_FORMAT_AAC_XHE}},
+        {AUDIO_FORMAT_DOLBY_TRUEHD, {}},
+        {AUDIO_FORMAT_E_AC3_JOC, {}},
+        {AUDIO_FORMAT_AC4, {}}};
+}
+
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index ba5a6a7..fc1372b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -183,7 +183,8 @@
     for (size_t i = 0; i < size(); i++) {
         const sp<AudioPolicyMix>& registeredMix = itemAt(i);
         if (mix.mDeviceType == registeredMix->mDeviceType
-                && mix.mDeviceAddress.compare(registeredMix->mDeviceAddress) == 0) {
+                && mix.mDeviceAddress.compare(registeredMix->mDeviceAddress) == 0
+                && is_mix_loopback(mix.mRouteFlags)) {
             ALOGE("registerMix(): mix already registered for dev=0x%x addr=%s",
                     mix.mDeviceType, mix.mDeviceAddress.string());
             return BAD_VALUE;
@@ -278,17 +279,6 @@
             mixesDisallowsRequestedDevice = true;
         }
 
-        if (!primaryOutputMix && (flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ)) {
-            // AAudio does not support MMAP_NO_IRQ loopback render, and there is no way with
-            // the current MmapStreamInterface::start to reject a specific client added to a shared
-            // mmap stream.
-            // As a result all MMAP_NOIRQ requests have to be rejected when an loopback render
-            // policy is present. That ensures no shared mmap stream is used when an loopback
-            // render policy is registered.
-            ALOGD("%s: Rejecting MMAP_NOIRQ request due to LOOPBACK|RENDER mix present.", __func__);
-            return INVALID_OPERATION;
-        }
-
         if (primaryOutputMix && primaryMix != nullptr) {
             ALOGV("%s: Skiping %zu: Primary output already found", __func__, i);
             continue; // Primary output already found
@@ -299,6 +289,13 @@
             continue; // skip the mix
         }
 
+        if (flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) {
+            // AAudio MMAP_NOIRQ streams cannot be routed using dynamic audio policy.
+            ALOGD("%s: Rejecting MMAP_NOIRQ request matched to dynamic audio policy mix.",
+                __func__);
+            return INVALID_OPERATION;
+        }
+
         if (mixDevice != nullptr && mixDevice->equals(requestedDevice)) {
             ALOGV("%s: Mix %zu: requested device mathches", __func__, i);
             mixesDisallowsRequestedDevice = false;
@@ -644,7 +641,7 @@
 }
 
 status_t AudioPolicyMixCollection::getDevicesForUserId(int userId,
-        Vector<AudioDeviceTypeAddr>& devices) const {
+        AudioDeviceTypeAddrVector& devices) const {
     // for each player mix:
     // find rules that don't exclude this userId, and add the device to the list
     for (size_t i = 0; i < size(); i++) {
@@ -662,7 +659,7 @@
             }
         }
         if (ruleAllowsUserId) {
-            devices.add(AudioDeviceTypeAddr(mix->mDeviceType, mix->mDeviceAddress.string()));
+            devices.push_back(AudioDeviceTypeAddr(mix->mDeviceType, mix->mDeviceAddress.string()));
         }
     }
     return NO_ERROR;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
index 8ccb8b9..82f51ad 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
@@ -115,12 +115,22 @@
         profile->setDynamicFormat(true);
         profile->setDynamicChannels(dynamicFormatProfile->isDynamicChannels());
         profile->setDynamicRate(dynamicFormatProfile->isDynamicRate());
-        addAudioProfileAndSort(audioProfileVector, profile);
+        size_t profileIndex = 0;
+        for (; profileIndex < audioProfileVector.size(); profileIndex++) {
+            if (profile->equals(audioProfileVector.at(profileIndex))) {
+                // The dynamic profile is already there
+                break;
+            }
+        }
+        if (profileIndex >= audioProfileVector.size()) {
+            // Only add when the dynamic profile is not there
+            addAudioProfileAndSort(audioProfileVector, profile);
+        }
     }
 }
 
 void addDynamicAudioProfileAndSort(AudioProfileVector &audioProfileVector,
-                                      const sp<AudioProfile> &profileToAdd)
+                                   const sp<AudioProfile> &profileToAdd)
 {
     // Check valid profile to add:
     if (!profileToAdd->hasValidFormat()) {
@@ -143,11 +153,15 @@
                 audioProfileVector, profileToAdd->getChannels(), profileToAdd->getFormat());
         return;
     }
+    const bool originalIsDynamicFormat = profileToAdd->isDynamicFormat();
+    profileToAdd->setDynamicFormat(true); // set the format as dynamic to allow removal
     // Go through the list of profile to avoid duplicates
     for (size_t profileIndex = 0; profileIndex < audioProfileVector.size(); profileIndex++) {
         const sp<AudioProfile> &profile = audioProfileVector.at(profileIndex);
-        if (profile->isValid() && profile == profileToAdd) {
-            // Nothing to do
+        if (profile->isValid() && profile->equals(profileToAdd)) {
+            // The same profile is already there, no need to add.
+            // Reset `isDynamicProfile` as original value.
+            profileToAdd->setDynamicFormat(originalIsDynamicFormat);
             return;
         }
     }
diff --git a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
index 418b7eb..5f14ee4 100644
--- a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
@@ -33,6 +33,13 @@
     setHalVersion(halVersionMajor, halVersionMinor);
 }
 
+HwModule::HwModule(const char *name, uint32_t halVersion)
+    : mName(String8(name)),
+      mHandle(AUDIO_MODULE_HANDLE_NONE),
+      mHalVersion(halVersion)
+{
+}
+
 HwModule::~HwModule()
 {
     for (size_t i = 0; i < mOutputProfiles.size(); i++) {
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index 98d7d59..03ab3f8 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -75,7 +75,7 @@
             if (checkExactAudioProfile(&config) != NO_ERROR) {
                 return false;
             }
-        } else if (checkCompatibleAudioProfile(
+        } else if (checkExactAudioProfile(&config) != NO_ERROR && checkCompatibleAudioProfile(
                 myUpdatedSamplingRate, myUpdatedChannelMask, myUpdatedFormat) != NO_ERROR) {
             return false;
         }
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index d446e96..3d5c1d2 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -29,6 +29,7 @@
 #include <utils/StrongPointer.h>
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
+#include "IOProfile.h"
 #include "Serializer.h"
 #include "TypeConverter.h"
 
@@ -196,7 +197,6 @@
 
     struct Attributes
     {
-        static constexpr const char *speakerDrcEnabled = "speaker_drc_enabled";
         static constexpr const char *callScreenModeSupported= "call_screen_mode_supported";
         static constexpr const char *engineLibrarySuffix = "engine_library";
     };
@@ -769,12 +769,7 @@
     for (const xmlNode *cur = root->xmlChildrenNode; cur != NULL; cur = cur->next) {
         if (!xmlStrcmp(cur->name, reinterpret_cast<const xmlChar*>(GlobalConfigTraits::tag))) {
             bool value;
-            std::string attr = getXmlAttribute(cur, Attributes::speakerDrcEnabled);
-            if (!attr.empty() &&
-                    convertTo<std::string, bool>(attr, value)) {
-                config->setSpeakerDrcEnabled(value);
-            }
-            attr = getXmlAttribute(cur, Attributes::callScreenModeSupported);
+            std::string attr = getXmlAttribute(cur, Attributes::callScreenModeSupported);
             if (!attr.empty() &&
                     convertTo<std::string, bool>(attr, value)) {
                 config->setCallScreenModeSupported(value);
@@ -907,7 +902,6 @@
 {
     PolicySerializer serializer;
     status_t status = serializer.deserialize(fileName, config);
-    if (status != OK) config->clear();
     return status;
 }
 
@@ -915,7 +909,6 @@
 {
     PolicySerializer serializer;
     status_t status = serializer.deserialize(fileName, config, true /*ignoreVendorExtensions*/);
-    if (status != OK) config->clear();
     return status;
 }
 
diff --git a/services/audiopolicy/engine/common/Android.bp b/services/audiopolicy/engine/common/Android.bp
index 50c5eab..6c46c54 100644
--- a/services/audiopolicy/engine/common/Android.bp
+++ b/services/audiopolicy/engine/common/Android.bp
@@ -51,10 +51,10 @@
         "libaudiopolicyengine_common_headers",
     ],
     static_libs: [
-        "libaudiopolicycomponents",
         "libaudiopolicyengine_config",
     ],
     shared_libs: [
         "libaudiofoundation",
+        "libaudiopolicycomponents",
     ],
 }
diff --git a/services/audiopolicy/engine/common/include/EngineBase.h b/services/audiopolicy/engine/common/include/EngineBase.h
index 8cfa592..b9c94a4 100644
--- a/services/audiopolicy/engine/common/include/EngineBase.h
+++ b/services/audiopolicy/engine/common/include/EngineBase.h
@@ -16,6 +16,9 @@
 
 #pragma once
 
+#include <functional>
+
+#include <android/media/audio/common/AudioHalEngineConfig.h>
 #include <EngineConfig.h>
 #include <EngineInterface.h>
 #include <ProductStrategy.h>
@@ -113,7 +116,10 @@
     status_t getDevicesForRoleAndStrategy(product_strategy_t strategy, device_role_t role,
             AudioDeviceTypeAddrVector &devices) const override;
 
-    engineConfig::ParsingResult loadAudioPolicyEngineConfig();
+    engineConfig::ParsingResult loadAudioPolicyEngineConfig(
+            const media::audio::common::AudioHalEngineConfig& aidlConfig);
+
+    engineConfig::ParsingResult loadAudioPolicyEngineConfig(const std::string& xmlFilePath = "");
 
     const ProductStrategyMap &getProductStrategies() const { return mProductStrategies; }
 
@@ -165,7 +171,19 @@
 
     DeviceVector getActiveMediaDevices(const DeviceVector& availableDevices) const override;
 
+    void initializeDeviceSelectionCache() override;
+
+    void updateDeviceSelectionCache() override;
+
+protected:
+    DeviceVector getPreferredAvailableDevicesForProductStrategy(
+        const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
+    DeviceVector getDisabledDevicesForProductStrategy(
+        const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
+
 private:
+    engineConfig::ParsingResult processParsingResult(engineConfig::ParsingResult&& rawResult);
+
     /**
      * Get media devices as the given role
      *
@@ -193,6 +211,28 @@
 
     /** current forced use configuration. */
     audio_policy_forced_cfg_t mForceUse[AUDIO_POLICY_FORCE_USE_CNT] = {};
+
+protected:
+    /**
+     * Set the device information for a given strategy.
+     *
+     * @param strategy the strategy to set devices information
+     * @param devices the devices selected for the strategy
+     */
+    virtual void setStrategyDevices(const sp<ProductStrategy>& /*strategy*/,
+                                    const DeviceVector& /*devices*/) {
+        // In EngineBase, do nothing. It is up to the actual engine to decide if it is needed to
+        // set devices information for the given strategy.
+    }
+
+    /**
+     * Get devices that will be used for the given product strategy.
+     *
+     * @param strategy the strategy to query
+     */
+    virtual DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const = 0;
+
+    DeviceStrategyMap mDevicesForStrategies;
 };
 
 } // namespace audio_policy
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 8015ae0..218aff8 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -115,10 +115,53 @@
     return PRODUCT_STRATEGY_NONE;
 }
 
-engineConfig::ParsingResult EngineBase::loadAudioPolicyEngineConfig()
+engineConfig::ParsingResult EngineBase::loadAudioPolicyEngineConfig(
+        const media::audio::common::AudioHalEngineConfig& aidlConfig)
+{
+    engineConfig::ParsingResult result = engineConfig::convert(aidlConfig);
+    if (result.parsedConfig == nullptr) {
+        ALOGE("%s: There was an error parsing AIDL data", __func__);
+        result = {std::make_unique<engineConfig::Config>(gDefaultEngineConfig), 1};
+    } else {
+        // It is allowed for the HAL to return an empty list of strategies.
+        if (result.parsedConfig->productStrategies.empty()) {
+            result.parsedConfig->productStrategies = gDefaultEngineConfig.productStrategies;
+        }
+    }
+    return processParsingResult(std::move(result));
+}
+
+engineConfig::ParsingResult EngineBase::loadAudioPolicyEngineConfig(const std::string& xmlFilePath)
+{
+    auto fileExists = [](const char* path) {
+        struct stat fileStat;
+        return stat(path, &fileStat) == 0 && S_ISREG(fileStat.st_mode);
+    };
+    const std::string filePath = xmlFilePath.empty() ? engineConfig::DEFAULT_PATH : xmlFilePath;
+    engineConfig::ParsingResult result =
+            fileExists(filePath.c_str()) ?
+            engineConfig::parse(filePath.c_str()) : engineConfig::ParsingResult{};
+    if (result.parsedConfig == nullptr) {
+        ALOGD("%s: No configuration found, using default matching phone experience.", __FUNCTION__);
+        engineConfig::Config config = gDefaultEngineConfig;
+        android::status_t ret = engineConfig::parseLegacyVolumes(config.volumeGroups);
+        result = {std::make_unique<engineConfig::Config>(config),
+                  static_cast<size_t>(ret == NO_ERROR ? 0 : 1)};
+    } else {
+        // Append for internal use only volume groups (e.g. rerouting/patch)
+        result.parsedConfig->volumeGroups.insert(
+                    std::end(result.parsedConfig->volumeGroups),
+                    std::begin(gSystemVolumeGroups), std::end(gSystemVolumeGroups));
+    }
+    ALOGE_IF(result.nbSkippedElement != 0, "skipped %zu elements", result.nbSkippedElement);
+    return processParsingResult(std::move(result));
+}
+
+engineConfig::ParsingResult EngineBase::processParsingResult(
+        engineConfig::ParsingResult&& rawResult)
 {
     auto loadVolumeConfig = [](auto &volumeGroups, auto &volumeConfig) {
-        // Ensure name unicity to prevent duplicate
+        // Ensure volume group name uniqueness.
         LOG_ALWAYS_FATAL_IF(std::any_of(std::begin(volumeGroups), std::end(volumeGroups),
                                      [&volumeConfig](const auto &volumeGroup) {
                 return volumeConfig.name == volumeGroup.second->getName(); }),
@@ -158,41 +201,21 @@
         });
         return iter != end(volumeGroups);
     };
-    auto fileExists = [](const char* path) {
-        struct stat fileStat;
-        return stat(path, &fileStat) == 0 && S_ISREG(fileStat.st_mode);
-    };
 
-    auto result = fileExists(engineConfig::DEFAULT_PATH) ?
-            engineConfig::parse(engineConfig::DEFAULT_PATH) : engineConfig::ParsingResult{};
-    if (result.parsedConfig == nullptr) {
-        ALOGD("%s: No configuration found, using default matching phone experience.", __FUNCTION__);
-        engineConfig::Config config = gDefaultEngineConfig;
-        android::status_t ret = engineConfig::parseLegacyVolumes(config.volumeGroups);
-        result = {std::make_unique<engineConfig::Config>(config),
-                  static_cast<size_t>(ret == NO_ERROR ? 0 : 1)};
-    } else {
-        // Append for internal use only volume groups (e.g. rerouting/patch)
-        result.parsedConfig->volumeGroups.insert(
-                    std::end(result.parsedConfig->volumeGroups),
-                    std::begin(gSystemVolumeGroups), std::end(gSystemVolumeGroups));
-    }
+    auto result = std::move(rawResult);
     // Append for internal use only strategies (e.g. rerouting/patch)
     result.parsedConfig->productStrategies.insert(
                 std::end(result.parsedConfig->productStrategies),
                 std::begin(gOrderedSystemStrategies), std::end(gOrderedSystemStrategies));
 
-
-    ALOGE_IF(result.nbSkippedElement != 0, "skipped %zu elements", result.nbSkippedElement);
-
     engineConfig::VolumeGroup defaultVolumeConfig;
     engineConfig::VolumeGroup defaultSystemVolumeConfig;
     for (auto &volumeConfig : result.parsedConfig->volumeGroups) {
         // save default volume config for streams not defined in configuration
-        if (volumeConfig.name.compare("AUDIO_STREAM_MUSIC") == 0) {
+        if (volumeConfig.name.compare(audio_stream_type_to_string(AUDIO_STREAM_MUSIC)) == 0) {
             defaultVolumeConfig = volumeConfig;
         }
-        if (volumeConfig.name.compare("AUDIO_STREAM_PATCH") == 0) {
+        if (volumeConfig.name.compare(audio_stream_type_to_string(AUDIO_STREAM_PATCH)) == 0) {
             defaultSystemVolumeConfig = volumeConfig;
         }
         loadVolumeConfig(mVolumeGroups, volumeConfig);
@@ -685,6 +708,58 @@
     return activeDevices;
 }
 
+void EngineBase::initializeDeviceSelectionCache() {
+    // Initializing the device selection cache with default device won't be harmful, it will be
+    // updated after the audio modules are initialized.
+    auto defaultDevices = DeviceVector(getApmObserver()->getDefaultOutputDevice());
+    for (const auto &iter : getProductStrategies()) {
+        const auto &strategy = iter.second;
+        mDevicesForStrategies[strategy->getId()] = defaultDevices;
+        setStrategyDevices(strategy, defaultDevices);
+    }
+}
+
+void EngineBase::updateDeviceSelectionCache() {
+    for (const auto &iter : getProductStrategies()) {
+        const auto& strategy = iter.second;
+        auto devices = getDevicesForProductStrategy(strategy->getId());
+        mDevicesForStrategies[strategy->getId()] = devices;
+        setStrategyDevices(strategy, devices);
+    }
+}
+
+DeviceVector EngineBase::getPreferredAvailableDevicesForProductStrategy(
+        const DeviceVector& availableOutputDevices, product_strategy_t strategy) const {
+    DeviceVector preferredAvailableDevVec = {};
+    AudioDeviceTypeAddrVector preferredStrategyDevices;
+    const status_t status = getDevicesForRoleAndStrategy(
+            strategy, DEVICE_ROLE_PREFERRED, preferredStrategyDevices);
+    if (status == NO_ERROR) {
+        // there is a preferred device, is it available?
+        preferredAvailableDevVec =
+                availableOutputDevices.getDevicesFromDeviceTypeAddrVec(preferredStrategyDevices);
+        if (preferredAvailableDevVec.size() == preferredStrategyDevices.size()) {
+            ALOGV("%s using pref device %s for strategy %u",
+                   __func__, preferredAvailableDevVec.toString().c_str(), strategy);
+            return preferredAvailableDevVec;
+        }
+    }
+    return preferredAvailableDevVec;
+}
+
+DeviceVector EngineBase::getDisabledDevicesForProductStrategy(
+        const DeviceVector &availableOutputDevices, product_strategy_t strategy) const {
+    DeviceVector disabledDevices = {};
+    AudioDeviceTypeAddrVector disabledDevicesTypeAddr;
+    const status_t status = getDevicesForRoleAndStrategy(
+            strategy, DEVICE_ROLE_DISABLED, disabledDevicesTypeAddr);
+    if (status == NO_ERROR) {
+        disabledDevices =
+                availableOutputDevices.getDevicesFromDeviceTypeAddrVec(disabledDevicesTypeAddr);
+    }
+    return disabledDevices;
+}
+
 void EngineBase::dumpCapturePresetDevicesRoleMap(String8 *dst, int spaces) const
 {
     dst->appendFormat("\n%*sDevice role per capture preset dump:", spaces, "");
diff --git a/services/audiopolicy/engine/common/src/ProductStrategy.cpp b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
index 1d3ad1c..0d25955 100644
--- a/services/audiopolicy/engine/common/src/ProductStrategy.cpp
+++ b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
@@ -155,7 +155,7 @@
             return iter.second->getId();
         }
         if (score > matchScore) {
-            bestStrategyOrdefault = iter.second->getId();;
+            bestStrategyOrdefault = iter.second->getId();
             matchScore = score;
         }
     }
diff --git a/services/audiopolicy/engine/config/Android.bp b/services/audiopolicy/engine/config/Android.bp
index 459cc78..12597de 100644
--- a/services/audiopolicy/engine/config/Android.bp
+++ b/services/audiopolicy/engine/config/Android.bp
@@ -22,11 +22,13 @@
         "-Wextra",
     ],
     shared_libs: [
-        "libmedia_helper",
-        "libxml2",
-        "libutils",
-        "liblog",
+        "libaudio_aidl_conversion_common_cpp",
+        "libaudiopolicycomponents",
         "libcutils",
+        "liblog",
+        "libmedia_helper",
+        "libutils",
+        "libxml2",
     ],
     header_libs: [
         "libaudio_system_headers",
diff --git a/services/audiopolicy/engine/config/include/EngineConfig.h b/services/audiopolicy/engine/config/include/EngineConfig.h
index 4de16c5..119dbd6 100644
--- a/services/audiopolicy/engine/config/include/EngineConfig.h
+++ b/services/audiopolicy/engine/config/include/EngineConfig.h
@@ -16,10 +16,11 @@
 
 #pragma once
 
-#include <system/audio.h>
-
 #include <string>
 #include <vector>
+
+#include <android/media/audio/common/AudioHalEngineConfig.h>
+#include <system/audio.h>
 #include <utils/Errors.h>
 
 struct _xmlNode;
@@ -116,6 +117,7 @@
  */
 ParsingResult parse(const char* path = DEFAULT_PATH);
 android::status_t parseLegacyVolumes(VolumeGroups &volumeGroups);
+ParsingResult convert(const ::android::media::audio::common::AudioHalEngineConfig& aidlConfig);
 // Exposed for testing.
 android::status_t parseLegacyVolumeFile(const char* path, VolumeGroups &volumeGroups);
 
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index ac117f0..ca78ce7 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -14,26 +14,30 @@
  * limitations under the License.
  */
 
+#include <cstdint>
+#include <istream>
+#include <map>
+#include <sstream>
+#include <stdarg.h>
+#include <string>
+#include <string>
+#include <vector>
+
 #define LOG_TAG "APM::AudioPolicyEngine/Config"
 //#define LOG_NDEBUG 0
 
 #include "EngineConfig.h"
+#include <TypeConverter.h>
+#include <Volume.h>
 #include <cutils/properties.h>
+#include <libxml/parser.h>
+#include <libxml/xinclude.h>
+#include <media/AidlConversion.h>
+#include <media/AidlConversionUtil.h>
 #include <media/TypeConverter.h>
 #include <media/convert.h>
 #include <system/audio_config.h>
 #include <utils/Log.h>
-#include <libxml/parser.h>
-#include <libxml/xinclude.h>
-#include <string>
-#include <vector>
-#include <map>
-#include <sstream>
-#include <istream>
-
-#include <cstdint>
-#include <stdarg.h>
-#include <string>
 
 namespace android {
 
@@ -45,6 +49,85 @@
 static const char *const gReferenceElementName = "reference";
 static const char *const gReferenceAttributeName = "name";
 
+namespace {
+
+ConversionResult<AttributesGroup> aidl2legacy_AudioHalAttributeGroup_AttributesGroup(
+        const media::audio::common::AudioHalAttributesGroup& aidl) {
+    AttributesGroup legacy;
+    legacy.stream = VALUE_OR_RETURN(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(aidl.streamType));
+    legacy.volumeGroup = aidl.volumeGroupName;
+    legacy.attributesVect = VALUE_OR_RETURN(convertContainer<AttributesVector>(
+                    aidl.attributes, aidl2legacy_AudioAttributes_audio_attributes_t));
+    return legacy;
+}
+
+ConversionResult<ProductStrategy> aidl2legacy_AudioHalProductStrategy_ProductStrategy(
+        const media::audio::common::AudioHalProductStrategy& aidl) {
+    ProductStrategy legacy;
+    legacy.name = "strategy_" + std::to_string(aidl.id);
+    legacy.attributesGroups = VALUE_OR_RETURN(convertContainer<AttributesGroups>(
+                    aidl.attributesGroups,
+                    aidl2legacy_AudioHalAttributeGroup_AttributesGroup));
+    return legacy;
+}
+
+ConversionResult<std::string> legacy_device_category_to_string(device_category legacy) {
+    std::string s;
+    if (DeviceCategoryConverter::toString(legacy, s)) {
+        return s;
+    }
+    return base::unexpected(BAD_VALUE);
+}
+
+ConversionResult<std::string> aidl2legacy_DeviceCategory(
+        const media::audio::common::AudioHalVolumeCurve::DeviceCategory aidl) {
+    using DeviceCategory = media::audio::common::AudioHalVolumeCurve::DeviceCategory;
+    switch (aidl) {
+        case DeviceCategory::HEADSET:
+            return legacy_device_category_to_string(DEVICE_CATEGORY_HEADSET);
+        case DeviceCategory::SPEAKER:
+            return legacy_device_category_to_string(DEVICE_CATEGORY_SPEAKER);
+        case DeviceCategory::EARPIECE:
+            return legacy_device_category_to_string(DEVICE_CATEGORY_EARPIECE);
+        case DeviceCategory::EXT_MEDIA:
+            return legacy_device_category_to_string(DEVICE_CATEGORY_EXT_MEDIA);
+        case DeviceCategory::HEARING_AID:
+            return legacy_device_category_to_string(DEVICE_CATEGORY_HEARING_AID);
+    }
+    return base::unexpected(BAD_VALUE);
+}
+
+ConversionResult<CurvePoint> aidl2legacy_AudioHalCurvePoint_CurvePoint(
+        const media::audio::common::AudioHalVolumeCurve::CurvePoint& aidl) {
+    CurvePoint legacy;
+    legacy.index = VALUE_OR_RETURN(convertIntegral<int>(aidl.index));
+    legacy.attenuationInMb = aidl.attenuationMb;
+    return legacy;
+}
+
+ConversionResult<VolumeCurve> aidl2legacy_AudioHalVolumeCurve_VolumeCurve(
+        const media::audio::common::AudioHalVolumeCurve& aidl) {
+    VolumeCurve legacy;
+    legacy.deviceCategory = VALUE_OR_RETURN(aidl2legacy_DeviceCategory(aidl.deviceCategory));
+    legacy.curvePoints = VALUE_OR_RETURN(convertContainer<CurvePoints>(
+                    aidl.curvePoints, aidl2legacy_AudioHalCurvePoint_CurvePoint));
+    return legacy;
+}
+
+ConversionResult<VolumeGroup> aidl2legacy_AudioHalVolumeGroup_VolumeGroup(
+        const media::audio::common::AudioHalVolumeGroup& aidl) {
+    VolumeGroup legacy;
+    legacy.name = aidl.name;
+    legacy.indexMin = aidl.minIndex;
+    legacy.indexMax = aidl.maxIndex;
+    legacy.volumeCurves = VALUE_OR_RETURN(convertContainer<VolumeCurves>(
+                    aidl.volumeCurves, aidl2legacy_AudioHalVolumeCurve_VolumeCurve));
+    return legacy;
+}
+
+}  // namespace
+
 template<typename E, typename C>
 struct BaseSerializerTraits {
     typedef E Element;
@@ -724,5 +807,25 @@
     }
 }
 
+ParsingResult convert(const ::android::media::audio::common::AudioHalEngineConfig& aidlConfig) {
+    auto config = std::make_unique<engineConfig::Config>();
+    config->version = 1.0f;
+    if (auto conv = convertContainer<engineConfig::ProductStrategies>(
+                    aidlConfig.productStrategies,
+                    aidl2legacy_AudioHalProductStrategy_ProductStrategy); conv.ok()) {
+        config->productStrategies = std::move(conv.value());
+    } else {
+        return ParsingResult{};
+    }
+    if (auto conv = convertContainer<engineConfig::VolumeGroups>(
+                    aidlConfig.volumeGroups,
+                    aidl2legacy_AudioHalVolumeGroup_VolumeGroup); conv.ok()) {
+        config->volumeGroups = std::move(conv.value());
+    } else {
+        return ParsingResult{};
+    }
+    return {.parsedConfig=std::move(config), .nbSkippedElement=0};
+ }
+
 } // namespace engineConfig
 } // namespace android
diff --git a/services/audiopolicy/engine/config/tests/Android.bp b/services/audiopolicy/engine/config/tests/Android.bp
index 5791f17..5d1aa16 100644
--- a/services/audiopolicy/engine/config/tests/Android.bp
+++ b/services/audiopolicy/engine/config/tests/Android.bp
@@ -11,6 +11,7 @@
     name: "audiopolicy_engineconfig_tests",
 
     shared_libs: [
+        "libaudiopolicycomponents",
         "libbase",
         "liblog",
         "libmedia_helper",
diff --git a/services/audiopolicy/engine/interface/EngineInterface.h b/services/audiopolicy/engine/interface/EngineInterface.h
index b8e35ed..70461ad 100644
--- a/services/audiopolicy/engine/interface/EngineInterface.h
+++ b/services/audiopolicy/engine/interface/EngineInterface.h
@@ -16,9 +16,11 @@
 
 #pragma once
 
+#include <string>
 #include <utility>
 
 #include <AudioPolicyManagerObserver.h>
+#include <android/media/audio/common/AudioHalEngineConfig.h>
 #include <media/AudioProductStrategy.h>
 #include <media/AudioVolumeGroup.h>
 #include <IVolumeCurves.h>
@@ -46,6 +48,21 @@
 {
 public:
     /**
+     * Loads the engine configuration from AIDL configuration data.
+     * If loading failed, tries to fall back to some default configuration. If fallback
+     * is impossible, returns an error.
+     */
+    virtual status_t loadFromHalConfigWithFallback(
+            const media::audio::common::AudioHalEngineConfig& config) = 0;
+
+    /**
+     * Loads the engine configuration from the specified or the default config file.
+     * If loading failed, tries to fall back to some default configuration. If fallback
+     * is impossible, returns an error.
+     */
+    virtual status_t loadFromXmlConfigWithFallback(const std::string& xmlFilePath = "") = 0;
+
+    /**
      * Checks if the engine was correctly initialized.
      *
      * @return NO_ERROR if initialization has been done correctly, error code otherwise..
@@ -434,6 +451,16 @@
      */
     virtual DeviceVector getActiveMediaDevices(const DeviceVector& availableDevices) const = 0;
 
+    /**
+     * @brief initializeDeviceSelectionCache. Device selection for AudioAttribute / Streams is
+     * cached in the engine in order to speed up process when the audio system is stable. When the
+     * audio system is initializing, not all audio devices information will be available. In that
+     * case, calling this function can allow the engine to initialize the device selection cache
+     * with default values.
+     * This must only be called when audio policy manager is initializing.
+     */
+    virtual void initializeDeviceSelectionCache() = 0;
+
     virtual void dump(String8 *dst) const = 0;
 
 protected:
diff --git a/services/audiopolicy/engineconfigurable/Android.bp b/services/audiopolicy/engineconfigurable/Android.bp
index dc8d9cf..eb2e2f4 100644
--- a/services/audiopolicy/engineconfigurable/Android.bp
+++ b/services/audiopolicy/engineconfigurable/Android.bp
@@ -35,14 +35,15 @@
         "libaudiopolicyengineconfigurable_interface_headers",
     ],
     static_libs: [
-        "libaudiopolicycomponents",
         "libaudiopolicyengine_common",
         "libaudiopolicyengine_config",
         "libaudiopolicyengineconfigurable_pfwwrapper",
 
     ],
   shared_libs: [
+        "libaudio_aidl_conversion_common_cpp",
         "libaudiofoundation",
+        "libaudiopolicycomponents",
         "libbase",
         "liblog",
         "libcutils",
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
index 0398fc7..f7159c5 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
@@ -31,11 +31,11 @@
         "libaudiopolicyengineconfigurable_interface_headers",
     ],
     static_libs: [
-        "libaudiopolicycomponents",
         "libaudiopolicyengine_common",
         "libpfw_utility",
     ],
     shared_libs: [
+        "libaudiopolicycomponents",
         "libaudiopolicyengineconfigurable",
         "liblog",
         "libutils",
diff --git a/services/audiopolicy/engineconfigurable/src/Collection.h b/services/audiopolicy/engineconfigurable/src/Collection.h
index 02b41cb..4640515 100644
--- a/services/audiopolicy/engineconfigurable/src/Collection.h
+++ b/services/audiopolicy/engineconfigurable/src/Collection.h
@@ -53,6 +53,10 @@
     {
         collectionSupported();
     }
+    ~Collection()
+    {
+        clear();
+    }
 
     /**
      * Add a policy element to the collection. Policy elements are streams, strategies, input
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index 9d53017..ccd4316 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -68,16 +68,21 @@
 
 Engine::Engine() : mPolicyParameterMgr(new ParameterManagerWrapper())
 {
-    status_t loadResult = loadAudioPolicyEngineConfig();
+}
+
+status_t Engine::loadFromHalConfigWithFallback(
+        const media::audio::common::AudioHalEngineConfig& config __unused) {
+    // b/242678729. Need to implement for the configurable engine.
+    return INVALID_OPERATION;
+}
+
+status_t Engine::loadFromXmlConfigWithFallback(const std::string& xmlFilePath)
+{
+    status_t loadResult = loadAudioPolicyEngineConfig(xmlFilePath);
     if (loadResult < 0) {
         ALOGE("Policy Engine configuration is invalid.");
     }
-}
-
-Engine::~Engine()
-{
-    mStreamCollection.clear();
-    mInputSourceCollection.clear();
+    return loadResult;
 }
 
 status_t Engine::initCheck()
@@ -93,7 +98,7 @@
 template <typename Key>
 Element<Key> *Engine::getFromCollection(const Key &key) const
 {
-    const Collection<Key> collection = getCollection<Key>();
+    const Collection<Key> &collection = getCollection<Key>();
     return collection.get(key);
 }
 
@@ -165,6 +170,21 @@
     return mPolicyParameterMgr->getForceUse(usage);
 }
 
+status_t Engine::setOutputDevicesConnectionState(const DeviceVector &devices,
+                                                 audio_policy_dev_state_t state)
+{
+    for (const auto &device : devices) {
+        mPolicyParameterMgr->setDeviceConnectionState(device->type(), device->address(), state);
+    }
+    DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+    if (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) {
+        availableOutputDevices.remove(devices);
+    } else {
+        availableOutputDevices.add(devices);
+    }
+    return mPolicyParameterMgr->setAvailableOutputDevices(availableOutputDevices.types());
+}
+
 status_t Engine::setDeviceConnectionState(const sp<DeviceDescriptor> device,
                                           audio_policy_dev_state_t state)
 {
@@ -179,9 +199,9 @@
     return EngineBase::setDeviceConnectionState(device, state);
 }
 
-status_t Engine::loadAudioPolicyEngineConfig()
+status_t Engine::loadAudioPolicyEngineConfig(const std::string& xmlFilePath)
 {
-    auto result = EngineBase::loadAudioPolicyEngineConfig();
+    auto result = EngineBase::loadAudioPolicyEngineConfig(xmlFilePath);
 
     // Custom XML Parsing
     auto loadCriteria= [this](const auto& configCriteria, const auto& configCriterionTypes) {
@@ -205,17 +225,126 @@
     return result.nbSkippedElement == 0? NO_ERROR : BAD_VALUE;
 }
 
+status_t Engine::setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+                                           const AudioDeviceTypeAddrVector &devices)
+{
+    DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+    DeviceVector prevDisabledDevices =
+            getDisabledDevicesForProductStrategy(availableOutputDevices, strategy);
+    status_t status = EngineBase::setDevicesRoleForStrategy(strategy, role, devices);
+    if (status != NO_ERROR) {
+        return status;
+    }
+    DeviceVector newDisabledDevices =
+            getDisabledDevicesForProductStrategy(availableOutputDevices, strategy);
+    if (role == DEVICE_ROLE_PREFERRED) {
+        DeviceVector reenabledDevices = prevDisabledDevices;
+        reenabledDevices.remove(newDisabledDevices);
+        if (reenabledDevices.empty()) {
+            ALOGD("%s DEVICE_ROLE_PREFERRED empty renabled devices", __func__);
+            return status;
+        }
+        // some devices were moved from disabled to preferred, need to force a resync for these
+        enableDevicesForStrategy(strategy, prevDisabledDevices);
+    }
+    if (newDisabledDevices.empty()) {
+        return status;
+    }
+    return disableDevicesForStrategy(strategy, newDisabledDevices);
+}
+
+status_t Engine::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+        const AudioDeviceTypeAddrVector &devices)
+{
+    const auto productStrategies = getProductStrategies();
+    if (productStrategies.find(strategy) == end(productStrategies)) {
+        ALOGE("%s invalid %d", __func__, strategy);
+        return BAD_VALUE;
+    }
+    DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+    DeviceVector prevDisabledDevices =
+            getDisabledDevicesForProductStrategy(availableOutputDevices, strategy);
+    status_t status = EngineBase::removeDevicesRoleForStrategy(strategy, role, devices);
+    if (status != NO_ERROR || role == DEVICE_ROLE_PREFERRED) {
+        return status;
+    }
+    // Removing ROLE_DISABLED for given devices, need to force a resync for these
+    enableDevicesForStrategy(strategy, prevDisabledDevices);
+
+    DeviceVector remainingDisabledDevices = getDisabledDevicesForProductStrategy(
+            availableOutputDevices, strategy);
+    if (remainingDisabledDevices.empty()) {
+        return status;
+    }
+    return disableDevicesForStrategy(strategy, remainingDisabledDevices);
+}
+
+status_t Engine::clearDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role)
+{
+    const auto productStrategies = getProductStrategies();
+    if (productStrategies.find(strategy) == end(productStrategies)) {
+        ALOGE("%s invalid %d", __func__, strategy);
+        return BAD_VALUE;
+    }
+    DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+    DeviceVector prevDisabledDevices =
+            getDisabledDevicesForProductStrategy(availableOutputDevices, strategy);
+    status_t status = EngineBase::clearDevicesRoleForStrategy(strategy, role);
+    if (status != NO_ERROR || role == DEVICE_ROLE_PREFERRED || prevDisabledDevices.empty()) {
+        return status;
+    }
+    // Disabled devices were removed, need to force a resync for these
+    enableDevicesForStrategy(strategy, prevDisabledDevices);
+    return NO_ERROR;
+}
+
+void Engine::enableDevicesForStrategy(product_strategy_t strategy __unused,
+        const DeviceVector &devicesToEnable) {
+    // devices were (re)enabled, need to force a resync for these
+    setOutputDevicesConnectionState(devicesToEnable, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE);
+    setOutputDevicesConnectionState(devicesToEnable, AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
+}
+
+status_t Engine::disableDevicesForStrategy(product_strategy_t strategy,
+        const DeviceVector &devicesToDisable) {
+    // Filter out disabled devices for this strategy.
+    // However, to update the output device decision, availability criterion shall be updated,
+    // which may impact other strategies. So, as a WA, reconsider now and later to prevent from
+    // altering decision for other strategies;
+    setOutputDevicesConnectionState(devicesToDisable, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE);
+
+    DeviceTypeSet deviceTypes = getProductStrategies().getDeviceTypesForProductStrategy(strategy);
+    const std::string address(getProductStrategies().getDeviceAddressForProductStrategy(strategy));
+
+    setOutputDevicesConnectionState(devicesToDisable, AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
+
+    // Force reapply devices for given strategy
+    getProductStrategies().at(strategy)->setDeviceTypes(deviceTypes);
+    setDeviceAddressForProductStrategy(strategy, address);
+    return NO_ERROR;
+}
+
 DeviceVector Engine::getDevicesForProductStrategy(product_strategy_t ps) const
 {
+    DeviceVector selectedDevices = {};
+    DeviceVector disabledDevices = {};
     const auto productStrategies = getProductStrategies();
     if (productStrategies.find(ps) == productStrategies.end()) {
         ALOGE("%s: Trying to get device on invalid strategy %d", __FUNCTION__, ps);
-        return {};
+        return selectedDevices;
     }
-    const DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+    DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
     const SwAudioOutputCollection &outputs = getApmObserver()->getOutputs();
     DeviceTypeSet availableOutputDevicesTypes = availableOutputDevices.types();
 
+    // check if this strategy has a preferred device that is available,
+    // if yes, give priority to it.
+    DeviceVector preferredAvailableDevVec =
+            getPreferredAvailableDevicesForProductStrategy(availableOutputDevices, ps);
+    if (!preferredAvailableDevVec.isEmpty()) {
+        return preferredAvailableDevVec;
+    }
+
     /** This is the only case handled programmatically because the PFW is unable to know the
      * activity of streams.
      *
@@ -227,33 +356,34 @@
      * -When media is not playing anymore, fall back on the sonification behavior
      */
     DeviceTypeSet deviceTypes;
+    product_strategy_t psOrFallback = ps;
     if (ps == getProductStrategyForStream(AUDIO_STREAM_NOTIFICATION) &&
             !is_state_in_call(getPhoneState()) &&
             !outputs.isActiveRemotely(toVolumeSource(AUDIO_STREAM_MUSIC),
                                       SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY) &&
             outputs.isActive(toVolumeSource(AUDIO_STREAM_MUSIC),
                              SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)) {
-        product_strategy_t strategyForMedia =
-                getProductStrategyForStream(AUDIO_STREAM_MUSIC);
-        deviceTypes = productStrategies.getDeviceTypesForProductStrategy(strategyForMedia);
+        psOrFallback = getProductStrategyForStream(AUDIO_STREAM_MUSIC);
     } else if (ps == getProductStrategyForStream(AUDIO_STREAM_ACCESSIBILITY) &&
         (outputs.isActive(toVolumeSource(AUDIO_STREAM_RING)) ||
          outputs.isActive(toVolumeSource(AUDIO_STREAM_ALARM)))) {
             // do not route accessibility prompts to a digital output currently configured with a
             // compressed format as they would likely not be mixed and dropped.
             // Device For Sonification conf file has HDMI, SPDIF and HDMI ARC unreacheable.
-        product_strategy_t strategyNotification = getProductStrategyForStream(AUDIO_STREAM_RING);
-        deviceTypes = productStrategies.getDeviceTypesForProductStrategy(strategyNotification);
-    } else {
-        deviceTypes = productStrategies.getDeviceTypesForProductStrategy(ps);
+        psOrFallback = getProductStrategyForStream(AUDIO_STREAM_RING);
     }
+    disabledDevices = getDisabledDevicesForProductStrategy(availableOutputDevices, psOrFallback);
+    deviceTypes = productStrategies.getDeviceTypesForProductStrategy(psOrFallback);
+    // In case a fallback is decided on other strategy, prevent from selecting this device if
+    // disabled for current strategy.
+    availableOutputDevices.remove(disabledDevices);
+
     if (deviceTypes.empty() ||
             Intersection(deviceTypes, availableOutputDevicesTypes).empty()) {
         auto defaultDevice = getApmObserver()->getDefaultOutputDevice();
         ALOG_ASSERT(defaultDevice != nullptr, "no valid default device defined");
-        return DeviceVector(defaultDevice);
-    }
-    if (/*device_distinguishes_on_address(*deviceTypes.begin())*/ isSingleDeviceType(
+        selectedDevices = DeviceVector(defaultDevice);
+    } else if (/*device_distinguishes_on_address(*deviceTypes.begin())*/ isSingleDeviceType(
             deviceTypes, AUDIO_DEVICE_OUT_BUS)) {
         // We do expect only one device for these types of devices
         // Criterion device address garantee this one is available
@@ -268,12 +398,15 @@
                   dumpDeviceTypes(deviceTypes).c_str(), address.c_str());
             auto defaultDevice = getApmObserver()->getDefaultOutputDevice();
             ALOG_ASSERT(defaultDevice != nullptr, "Default Output Device NOT available");
-            return DeviceVector(defaultDevice);
+            selectedDevices = DeviceVector(defaultDevice);
+        } else {
+            selectedDevices = DeviceVector(busDevice);
         }
-        return DeviceVector(busDevice);
+    } else {
+        ALOGV("%s:device %s %d", __FUNCTION__, dumpDeviceTypes(deviceTypes).c_str(), ps);
+        selectedDevices = availableOutputDevices.getDevicesFromTypes(deviceTypes);
     }
-    ALOGV("%s:device %s %d", __FUNCTION__, dumpDeviceTypes(deviceTypes).c_str(), ps);
-    return availableOutputDevices.getDevicesFromTypes(deviceTypes);
+    return selectedDevices;
 }
 
 DeviceVector Engine::getOutputDevicesForAttributes(const audio_attributes_t &attributes,
@@ -356,14 +489,6 @@
     return availableInputDevices.getDevice(deviceType, String8(address.c_str()), AUDIO_FORMAT_DEFAULT);
 }
 
-void Engine::updateDeviceSelectionCache()
-{
-    for (const auto &iter : getProductStrategies()) {
-        const auto &strategy = iter.second;
-        mDevicesForStrategies[strategy->getId()] = getDevicesForProductStrategy(strategy->getId());
-    }
-}
-
 void Engine::setDeviceAddressForProductStrategy(product_strategy_t strategy,
                                                 const std::string &address)
 {
@@ -411,5 +536,3 @@
 
 } // namespace audio_policy
 } // namespace android
-
-
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
index 6ac20cd..4f3e620 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.h
+++ b/services/audiopolicy/engineconfigurable/src/Engine.h
@@ -33,15 +33,23 @@
 {
 public:
     Engine();
-    virtual ~Engine();
+    virtual ~Engine() = default;
 
     template <class RequestedInterface>
     RequestedInterface *queryInterface();
 
     ///
+    /// from EngineInterface
+    ///
+    status_t loadFromHalConfigWithFallback(
+            const media::audio::common::AudioHalEngineConfig& config) override;
+
+    status_t loadFromXmlConfigWithFallback(const std::string& xmlFilePath = "") override;
+
+    ///
     /// from EngineBase
     ///
-    android::status_t initCheck() override;
+    status_t initCheck() override;
 
     status_t setPhoneState(audio_mode_t mode) override;
 
@@ -51,8 +59,8 @@
 
     audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage) const override;
 
-    android::status_t setDeviceConnectionState(const sp<DeviceDescriptor> devDesc,
-                                               audio_policy_dev_state_t state) override;
+    status_t setDeviceConnectionState(const sp<DeviceDescriptor> devDesc,
+                                      audio_policy_dev_state_t state) override;
 
     DeviceVector getOutputDevicesForAttributes(const audio_attributes_t &attr,
                                                const sp<DeviceDescriptor> &preferedDevice = nullptr,
@@ -67,7 +75,12 @@
                                                      sp<AudioPolicyMix> *mix = nullptr)
                                                      const override;
 
-    void updateDeviceSelectionCache() override;
+    status_t setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+                                       const AudioDeviceTypeAddrVector &devices) override;
+
+    status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+                const AudioDeviceTypeAddrVector &devices) override;
+    status_t clearDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) override;
 
     ///
     /// from AudioPolicyPluginInterface
@@ -96,6 +109,12 @@
     }
 
 private:
+    android::status_t disableDevicesForStrategy(product_strategy_t strategy,
+            const DeviceVector &devicesToDisable);
+    void enableDevicesForStrategy(product_strategy_t strategy, const DeviceVector &devicesToEnable);
+    android::status_t setOutputDevicesConnectionState(const DeviceVector &devices,
+                                                      audio_policy_dev_state_t state);
+
     /* Copy facilities are put private to disable copy. */
     Engine(const Engine &object);
     Engine &operator=(const Engine &object);
@@ -121,20 +140,21 @@
     template <typename Property, typename Key>
     bool setPropertyForKey(const Property &property, const Key &key);
 
-    status_t loadAudioPolicyEngineConfig();
+    status_t loadAudioPolicyEngineConfig(const std::string& xmlFilePath);
 
-    DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const;
     DeviceVector getCachedDevices(product_strategy_t ps) const;
 
+    ///
+    /// from EngineBase
+    ///
+    DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const override;
+
     /**
      * Policy Parameter Manager hidden through a wrapper.
      */
     ParameterManagerWrapper *mPolicyParameterMgr;
-
-    DeviceStrategyMap mDevicesForStrategies;
 };
 
 } // namespace audio_policy
 
 } // namespace android
-
diff --git a/services/audiopolicy/enginedefault/Android.bp b/services/audiopolicy/enginedefault/Android.bp
index 4671fe9..7d4ccab 100644
--- a/services/audiopolicy/enginedefault/Android.bp
+++ b/services/audiopolicy/enginedefault/Android.bp
@@ -25,12 +25,13 @@
         "libaudiopolicyengine_interface_headers",
     ],
     static_libs: [
-        "libaudiopolicycomponents",
         "libaudiopolicyengine_common",
         "libaudiopolicyengine_config",
     ],
     shared_libs: [
+        "libaudio_aidl_conversion_common_cpp",
         "libaudiofoundation",
+        "libaudiopolicycomponents",
         "libbase",
         "liblog",
         "libcutils",
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index c7e2103..15f7842 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -35,10 +35,7 @@
 #include <utils/String8.h>
 #include <utils/Log.h>
 
-namespace android
-{
-namespace audio_policy
-{
+namespace android::audio_policy {
 
 struct legacy_strategy_map { const char *name; legacy_strategy id; };
 static const std::vector<legacy_strategy_map>& getLegacyStrategy() {
@@ -59,9 +56,18 @@
     return legacyStrategy;
 }
 
-Engine::Engine()
-{
-    auto result = EngineBase::loadAudioPolicyEngineConfig();
+status_t Engine::loadFromHalConfigWithFallback(
+        const media::audio::common::AudioHalEngineConfig& aidlConfig) {
+    return loadWithFallback(aidlConfig);
+}
+
+status_t Engine::loadFromXmlConfigWithFallback(const std::string& xmlFilePath) {
+    return loadWithFallback(xmlFilePath);
+}
+
+template<typename T>
+status_t Engine::loadWithFallback(const T& configSource) {
+    auto result = EngineBase::loadAudioPolicyEngineConfig(configSource);
     ALOGE_IF(result.nbSkippedElement != 0,
              "Policy Engine configuration is partially invalid, skipped %zu elements",
              result.nbSkippedElement);
@@ -70,8 +76,11 @@
     for (const auto &strategy : legacyStrategy) {
         mLegacyStrategyMap[getProductStrategyByName(strategy.name)] = strategy.id;
     }
+
+    return OK;
 }
 
+
 status_t Engine::setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config)
 {
     switch(usage) {
@@ -291,7 +300,8 @@
                             }));
         if (!devices.isEmpty()) break;
         devices = availableOutputDevices.getFirstDevicesFromTypes({
-                AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_EARPIECE});
+                AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_EARPIECE,
+                AUDIO_DEVICE_OUT_SPEAKER});
     } break;
 
     case STRATEGY_SONIFICATION:
@@ -704,15 +714,9 @@
     return device;
 }
 
-void Engine::updateDeviceSelectionCache()
-{
-    for (const auto &iter : getProductStrategies()) {
-        const auto& strategy = iter.second;
-        auto devices = getDevicesForProductStrategy(strategy->getId());
-        mDevicesForStrategies[strategy->getId()] = devices;
-        strategy->setDeviceTypes(devices.types());
-        strategy->setDeviceAddress(devices.getFirstValidAddress().c_str());
-    }
+void Engine::setStrategyDevices(const sp<ProductStrategy>& strategy, const DeviceVector &devices) {
+    strategy->setDeviceTypes(devices.types());
+    strategy->setDeviceAddress(devices.getFirstValidAddress().c_str());
 }
 
 product_strategy_t Engine::getProductStrategyFromLegacy(legacy_strategy legacyStrategy) const {
@@ -735,38 +739,6 @@
     return AUDIO_DEVICE_NONE;
 }
 
-DeviceVector Engine::getPreferredAvailableDevicesForProductStrategy(
-        const DeviceVector& availableOutputDevices, product_strategy_t strategy) const {
-    DeviceVector preferredAvailableDevVec = {};
-    AudioDeviceTypeAddrVector preferredStrategyDevices;
-    const status_t status = getDevicesForRoleAndStrategy(
-            strategy, DEVICE_ROLE_PREFERRED, preferredStrategyDevices);
-    if (status == NO_ERROR) {
-        // there is a preferred device, is it available?
-        preferredAvailableDevVec =
-                availableOutputDevices.getDevicesFromDeviceTypeAddrVec(preferredStrategyDevices);
-        if (preferredAvailableDevVec.size() == preferredStrategyDevices.size()) {
-            ALOGVV("%s using pref device %s for strategy %u",
-                   __func__, preferredAvailableDevVec.toString().c_str(), strategy);
-            return preferredAvailableDevVec;
-        }
-    }
-    return preferredAvailableDevVec;
-}
-
-DeviceVector Engine::getDisabledDevicesForProductStrategy(
-        const DeviceVector &availableOutputDevices, product_strategy_t strategy) const {
-    DeviceVector disabledDevices = {};
-    AudioDeviceTypeAddrVector disabledDevicesTypeAddr;
-    const status_t status = getDevicesForRoleAndStrategy(
-            strategy, DEVICE_ROLE_DISABLED, disabledDevicesTypeAddr);
-    if (status == NO_ERROR) {
-        disabledDevices =
-                availableOutputDevices.getDevicesFromDeviceTypeAddrVec(disabledDevicesTypeAddr);
-    }
-    return disabledDevices;
-}
-
 DeviceVector Engine::getDevicesForProductStrategy(product_strategy_t strategy) const {
     const SwAudioOutputCollection& outputs = getApmObserver()->getOutputs();
 
@@ -880,7 +852,4 @@
                                            AUDIO_FORMAT_DEFAULT);
 }
 
-} // namespace audio_policy
-} // namespace android
-
-
+} // namespace android::audio_policy
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index e27a03b..878bca9 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -45,8 +45,17 @@
 class Engine : public EngineBase
 {
 public:
-    Engine();
+    Engine() = default;
     virtual ~Engine() = default;
+    Engine(const Engine &object) = delete;
+    Engine &operator=(const Engine &object) = delete;
+
+    ///
+    /// from EngineInterface
+    ///
+    status_t loadFromHalConfigWithFallback(
+            const media::audio::common::AudioHalEngineConfig& config) override;
+    status_t loadFromXmlConfigWithFallback(const std::string& xmlFilePath = "") override;
 
 private:
     ///
@@ -68,12 +77,14 @@
                                                      sp<AudioPolicyMix> *mix = nullptr)
                                                      const override;
 
-    void updateDeviceSelectionCache() override;
+    void setStrategyDevices(const sp<ProductStrategy>& strategy,
+                            const DeviceVector& devices) override;
+
+    DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const override;
 
 private:
-    /* Copy facilities are put private to disable copy. */
-    Engine(const Engine &object);
-    Engine &operator=(const Engine &object);
+    template<typename T>
+    status_t loadWithFallback(const T& configSource);
 
     status_t setDefaultDevice(audio_devices_t device);
 
@@ -88,26 +99,17 @@
                                           DeviceVector availableOutputDevices,
                                           const SwAudioOutputCollection &outputs) const;
 
-    DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const;
-
     sp<DeviceDescriptor> getDeviceForInputSource(audio_source_t inputSource) const;
 
     product_strategy_t getProductStrategyFromLegacy(legacy_strategy legacyStrategy) const;
     audio_devices_t getPreferredDeviceTypeForLegacyStrategy(
         const DeviceVector& availableOutputDevices, legacy_strategy legacyStrategy) const;
-    DeviceVector getPreferredAvailableDevicesForProductStrategy(
-        const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
-    DeviceVector getDisabledDevicesForProductStrategy(
-        const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
     DeviceVector getPreferredAvailableDevicesForInputSource(
             const DeviceVector& availableInputDevices, audio_source_t inputSource) const;
     DeviceVector getDisabledDevicesForInputSource(
             const DeviceVector& availableInputDevices, audio_source_t inputSource) const;
 
-    DeviceStrategyMap mDevicesForStrategies;
-
     std::map<product_strategy_t, legacy_strategy> mLegacyStrategyMap;
 };
 } // namespace audio_policy
 } // namespace android
-
diff --git a/services/audiopolicy/fuzzer/Android.bp b/services/audiopolicy/fuzzer/Android.bp
index 9f6b703..c4b3751 100644
--- a/services/audiopolicy/fuzzer/Android.bp
+++ b/services/audiopolicy/fuzzer/Android.bp
@@ -38,6 +38,7 @@
         "capture_state_listener-aidl-cpp",
         "libaudioclient",
         "libaudiofoundation",
+        "libaudiopolicycomponents",
         "libbase",
         "libcutils",
         "libhidlbase",
@@ -54,7 +55,6 @@
     ],
     static_libs: [
         "android.hardware.audio.common@7.0-enums",
-        "libaudiopolicycomponents",
     ],
     header_libs: [
         "libaudiopolicycommon",
@@ -63,6 +63,15 @@
     ],
     data: [":audiopolicyfuzzer_configuration_files"],
     fuzz_config: {
-       cc: ["mnaganov@google.com"],
+        cc: ["mnaganov@google.com"],
+        componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libaudiopolicy",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index 14f565b..8793085 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -216,8 +216,9 @@
     virtual void process();
 
    protected:
+    sp<AudioPolicyConfig> mConfig{AudioPolicyConfig::createWritableForTests()};
     std::unique_ptr<AudioPolicyManagerTestClient> mClient{new AudioPolicyManagerTestClient};
-    std::unique_ptr<AudioPolicyTestManager> mManager{new AudioPolicyTestManager(mClient.get())};
+    std::unique_ptr<AudioPolicyTestManager> mManager;
     FuzzedDataProvider *mFdp;
 };
 
@@ -230,7 +231,10 @@
     }
     // init code
     SetUpManagerConfig();
-
+    if (mConfig == nullptr) {
+        return false;
+    }
+    mManager.reset(new AudioPolicyTestManager(mConfig, mClient.get()));
     if (mManager->initialize() != NO_ERROR) {
         return false;
     }
@@ -240,7 +244,7 @@
     return true;
 }
 
-void AudioPolicyManagerFuzzer::SetUpManagerConfig() { mManager->getConfig().setDefault(); }
+void AudioPolicyManagerFuzzer::SetUpManagerConfig() { mConfig->setDefault(); }
 
 bool AudioPolicyManagerFuzzer::getOutputForAttr(
     audio_port_handle_t *selectedDeviceId, audio_format_t format, audio_channel_mask_t channelMask,
@@ -408,7 +412,11 @@
 }
 
 void AudioPolicyManagerFuzzerWithConfigurationFile::SetUpManagerConfig() {
-    deserializeAudioPolicyFile(getConfigFile().c_str(), &mManager->getConfig());
+    const std::string configFilePath = getConfigFile();
+    auto result = AudioPolicyConfig::loadFromCustomXmlConfigForTests(configFilePath);
+    mConfig = result.ok() ? mConfig = result.value() : nullptr;
+    ALOGE_IF(!result.ok(), "%s: Failed to deserialize \"%s\": %d",
+            __func__, configFilePath.c_str(), result.error());
 }
 
 void AudioPolicyManagerFuzzerWithConfigurationFile::traverseAndFuzzXML(xmlDocPtr pDoc,
diff --git a/services/audiopolicy/managerdefault/Android.bp b/services/audiopolicy/managerdefault/Android.bp
index 6e34eb0..a1785da 100644
--- a/services/audiopolicy/managerdefault/Android.bp
+++ b/services/audiopolicy/managerdefault/Android.bp
@@ -23,6 +23,7 @@
 
     shared_libs: [
         "libaudiofoundation",
+        "libaudiopolicycomponents",
         "libcutils",
         "libdl",
         "libutils",
@@ -49,8 +50,6 @@
         "libaudiopolicymanager_interface_headers",
     ],
 
-    static_libs: ["libaudiopolicycomponents"],
-
     cflags: [
         "-Wall",
         "-Werror",
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index f5c7a71..0e971b0 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -116,14 +116,13 @@
 }
 
 void AudioPolicyManager::broadcastDeviceConnectionState(const sp<DeviceDescriptor> &device,
-                                                        audio_policy_dev_state_t state)
+                                                        media::DeviceConnectedState state)
 {
     audio_port_v7 devicePort;
     device->toAudioPort(&devicePort);
-    if (status_t status = mpClientInterface->setDeviceConnectedState(
-                    &devicePort, state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
+    if (status_t status = mpClientInterface->setDeviceConnectedState(&devicePort, state);
             status != OK) {
-        ALOGE("Error %d while setting connected state for device %s", status,
+        ALOGE("Error %d while setting connected state for device %s", state,
                 device->getDeviceTypeAddr().toString(false).c_str());
     }
 }
@@ -206,14 +205,14 @@
 
             // Before checking outputs, broadcast connect event to allow HAL to retrieve dynamic
             // parameters on newly connected devices (instead of opening the outputs...)
-            broadcastDeviceConnectionState(device, state);
+            broadcastDeviceConnectionState(device, media::DeviceConnectedState::CONNECTED);
 
             if (checkOutputsForDevice(device, state, outputs) != NO_ERROR) {
                 mAvailableOutputDevices.remove(device);
 
                 mHwModules.cleanUpForDevice(device);
 
-                broadcastDeviceConnectionState(device, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE);
+                broadcastDeviceConnectionState(device, media::DeviceConnectedState::DISCONNECTED);
                 return INVALID_OPERATION;
             }
 
@@ -235,8 +234,9 @@
 
             ALOGV("%s() disconnecting output device %s", __func__, device->toString().c_str());
 
-            // Send Disconnect to HALs
-            broadcastDeviceConnectionState(device, state);
+            // Notify the HAL to prepare to disconnect device
+            broadcastDeviceConnectionState(
+                    device, media::DeviceConnectedState::PREPARE_TO_DISCONNECT);
 
             // remove device from available output devices
             mAvailableOutputDevices.remove(device);
@@ -245,6 +245,9 @@
 
             checkOutputsForDevice(device, state, outputs);
 
+            // Send Disconnect to HALs
+            broadcastDeviceConnectionState(device, media::DeviceConnectedState::DISCONNECTED);
+
             // Reset active device codec
             device->setEncodedFormat(AUDIO_FORMAT_DEFAULT);
 
@@ -384,12 +387,12 @@
 
             // Before checking intputs, broadcast connect event to allow HAL to retrieve dynamic
             // parameters on newly connected devices (instead of opening the inputs...)
-            broadcastDeviceConnectionState(device, state);
+            broadcastDeviceConnectionState(device, media::DeviceConnectedState::CONNECTED);
 
             if (checkInputsForDevice(device, state) != NO_ERROR) {
                 mAvailableInputDevices.remove(device);
 
-                broadcastDeviceConnectionState(device, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE);
+                broadcastDeviceConnectionState(device, media::DeviceConnectedState::DISCONNECTED);
 
                 mHwModules.cleanUpForDevice(device);
 
@@ -407,13 +410,17 @@
 
             ALOGV("%s() disconnecting input device %s", __func__, device->toString().c_str());
 
-            // Set Disconnect to HALs
-            broadcastDeviceConnectionState(device, state);
+            // Notify the HAL to prepare to disconnect device
+            broadcastDeviceConnectionState(
+                    device, media::DeviceConnectedState::PREPARE_TO_DISCONNECT);
 
             mAvailableInputDevices.remove(device);
 
             checkInputsForDevice(device, state);
 
+            // Set Disconnect to HALs
+            broadcastDeviceConnectionState(device, media::DeviceConnectedState::DISCONNECTED);
+
             // remove device from mReportedFormatsMap cache
             mReportedFormatsMap.erase(device);
         } break;
@@ -1099,7 +1106,12 @@
     // and AudioSystem::getOutputSamplingRate().
 
     SortedVector<audio_io_handle_t> outputs = getOutputsForDevices(devices, mOutputs);
-    const audio_io_handle_t output = selectOutput(outputs);
+    audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
+    if (stream == AUDIO_STREAM_MUSIC &&
+        property_get_bool("audio.deep_buffer.media", false /* default_value */)) {
+        flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+    }
+    const audio_io_handle_t output = selectOutput(outputs, flags);
 
     ALOGV("getOutput() stream %d selected devices %s, output %d", stream,
           devices.toString().c_str(), output);
@@ -1286,7 +1298,8 @@
         if (outputDevices.size() == 1) {
             info = getPreferredMixerAttributesInfo(
                     outputDevices.itemAt(0)->getId(),
-                    mEngine->getProductStrategyForAttributes(*resultAttr));
+                    mEngine->getProductStrategyForAttributes(*resultAttr),
+                    true /*activeBitPerfectPreferred*/);
             // Only use preferred mixer if the uid matches or the preferred mixer is bit-perfect
             // and it is currently active.
             if (info != nullptr && info->getUid() != uid &&
@@ -1312,10 +1325,15 @@
         AudioProfileVector profiles;
         status_t ret = getProfilesForDevices(outputDevices, profiles, *flags, false /*isInput*/);
         if (ret == NO_ERROR && !profiles.empty()) {
-            config->channel_mask = profiles[0]->getChannels().empty() ? config->channel_mask
-                    : *profiles[0]->getChannels().begin();
-            config->sample_rate = profiles[0]->getSampleRates().empty() ? config->sample_rate
-                    : *profiles[0]->getSampleRates().begin();
+            const auto channels = profiles[0]->getChannels();
+            if (!channels.empty() && (channels.find(config->channel_mask) == channels.end())) {
+                config->channel_mask = *channels.begin();
+            }
+            const auto sampleRates = profiles[0]->getSampleRates();
+            if (!sampleRates.empty() &&
+                    (sampleRates.find(config->sample_rate) == sampleRates.end())) {
+                config->sample_rate = *sampleRates.begin();
+            }
             config->format = profiles[0]->getFormat();
         }
         return INVALID_OPERATION;
@@ -1323,7 +1341,7 @@
 
     *selectedDeviceId = getFirstDeviceId(outputDevices);
     for (auto &outputDevice : outputDevices) {
-        if (outputDevice->getId() == getConfig().getDefaultOutputDevice()->getId()) {
+        if (outputDevice->getId() == mConfig->getDefaultOutputDevice()->getId()) {
             *selectedDeviceId = outputDevice->getId();
             break;
         }
@@ -1908,7 +1926,8 @@
 }
 
 bool AudioPolicyManager::msdHasPatchesToAllDevices(const AudioDeviceTypeAddrVector& devices) {
-    DeviceVector devicesToCheck = mOutputDevicesAll.getDevicesFromDeviceTypeAddrVec(devices);
+    DeviceVector devicesToCheck =
+            mConfig->getOutputDevices().getDevicesFromDeviceTypeAddrVec(devices);
     AudioPatchCollection msdPatches = getMsdOutputPatches();
     for (size_t i = 0; i < msdPatches.size(); i++) {
         const auto& patch = msdPatches[i];
@@ -2139,6 +2158,26 @@
                 return DEAD_OBJECT;
             }
             info->increaseActiveClient();
+            if (info->getActiveClientCount() == 1 &&
+                (info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE) {
+                // If it is first bit-perfect client, reroute all clients that will be routed to
+                // the bit-perfect sink so that it is guaranteed only bit-perfect stream is active.
+                PortHandleVector clientsToInvalidate;
+                for (size_t i = 0; i < mOutputs.size(); i++) {
+                    if (mOutputs[i] == outputDesc ||
+                        !mOutputs[i]->devices().filter(outputDesc->devices()).isEmpty()) {
+                        continue;
+                    }
+                    for (const auto& c : mOutputs[i]->getClientIterable()) {
+                        clientsToInvalidate.push_back(c->portId());
+                    }
+                }
+                if (!clientsToInvalidate.empty()) {
+                    ALOGD("%s Invalidate clients due to first bit-perfect client started",
+                          __func__);
+                    mpClientInterface->invalidateTracks(clientsToInvalidate);
+                }
+            }
         }
     }
 
@@ -2228,8 +2267,7 @@
     outputDesc->setClientActive(client, true);
 
     if (client->hasPreferredDevice(true)) {
-        if (outputDesc->clientsList(true /*activeOnly*/).size() == 1 &&
-                client->isPreferredDeviceForExclusiveUse()) {
+        if (outputDesc->sameExclusivePreferredDevicesCount() > 0) {
             // Preferred device may be exclusive, use only if no other active clients on this output
             devices = DeviceVector(
                         mAvailableOutputDevices.getDeviceFromId(client->preferredDeviceId()));
@@ -2461,7 +2499,8 @@
             }
         }
         bool forceDeviceUpdate = false;
-        if (client->hasPreferredDevice(true)) {
+        if (client->hasPreferredDevice(true) &&
+                outputDesc->sameExclusivePreferredDevicesCount() < 2) {
             checkStrategyRoute(client->strategy(), AUDIO_IO_HANDLE_NONE);
             forceDeviceUpdate = true;
         }
@@ -2740,10 +2779,15 @@
         status_t ret = getProfilesForDevices(
                 DeviceVector(device), profiles, flags, true /*isInput*/);
         if (ret == NO_ERROR && !profiles.empty()) {
-            config->channel_mask = profiles[0]->getChannels().empty() ? config->channel_mask
-                    : *profiles[0]->getChannels().begin();
-            config->sample_rate = profiles[0]->getSampleRates().empty() ? config->sample_rate
-                    : *profiles[0]->getSampleRates().begin();
+            const auto channels = profiles[0]->getChannels();
+            if (!channels.empty() && (channels.find(config->channel_mask) == channels.end())) {
+                config->channel_mask = *channels.begin();
+            }
+            const auto sampleRates = profiles[0]->getSampleRates();
+            if (!sampleRates.empty() &&
+                    (sampleRates.find(config->sample_rate) == sampleRates.end())) {
+                config->sample_rate = *sampleRates.begin();
+            }
             config->format = profiles[0]->getFormat();
         }
         goto error;
@@ -3774,6 +3818,44 @@
     return true;
 }
 
+void AudioPolicyManager::changeOutputDevicesMuteState(
+        const AudioDeviceTypeAddrVector& devices) {
+    ALOGVV("%s() num devices %zu", __func__, devices.size());
+
+    std::vector<sp<SwAudioOutputDescriptor>> outputs =
+            getSoftwareOutputsForDevices(devices);
+
+    for (size_t i = 0; i < outputs.size(); i++) {
+        sp<SwAudioOutputDescriptor> outputDesc = outputs[i];
+        DeviceVector prevDevices = outputDesc->devices();
+        checkDeviceMuteStrategies(outputDesc, prevDevices, 0 /* delayMs */);
+    }
+}
+
+std::vector<sp<SwAudioOutputDescriptor>> AudioPolicyManager::getSoftwareOutputsForDevices(
+        const AudioDeviceTypeAddrVector& devices) const
+{
+    std::vector<sp<SwAudioOutputDescriptor>> outputs;
+    DeviceVector deviceDescriptors;
+    for (size_t j = 0; j < devices.size(); j++) {
+        sp<DeviceDescriptor> desc = mHwModules.getDeviceDescriptor(
+                devices[j].mType, devices[j].getAddress(), String8(), AUDIO_FORMAT_DEFAULT);
+        if (desc == nullptr || !audio_is_output_device(devices[j].mType)) {
+            ALOGE("%s: device type %#x address %s not supported or not an output device",
+                __func__, devices[j].mType, devices[j].getAddress());
+                    continue;
+        }
+        deviceDescriptors.add(desc);
+    }
+    for (size_t i = 0; i < mOutputs.size(); i++) {
+        if (!mOutputs.valueAt(i)->supportsAtLeastOne(deviceDescriptors)) {
+            continue;
+        }
+        outputs.push_back(mOutputs.valueAt(i));
+    }
+    return outputs;
+}
+
 status_t AudioPolicyManager::setUidDeviceAffinities(uid_t uid,
         const AudioDeviceTypeAddrVector& devices) {
     ALOGV("%s() uid=%d num devices %zu", __FUNCTION__, uid, devices.size());
@@ -3840,7 +3922,8 @@
     return NO_ERROR;
 }
 
-void AudioPolicyManager::updateCallAndOutputRouting(bool forceVolumeReeval, uint32_t delayMs)
+void AudioPolicyManager::updateCallAndOutputRouting(bool forceVolumeReeval, uint32_t delayMs,
+    bool skipDelays)
 {
     uint32_t waitMs = 0;
     bool wasLeUnicastActive = isLeUnicastActive();
@@ -3866,8 +3949,8 @@
                 continue;
             }
             waitMs = setOutputDevices(outputDesc, newDevices, forceRouting, delayMs, nullptr,
-                                      true /*requiresMuteCheck*/,
-                                      !forceRouting /*requiresVolumeCheck*/);
+                                      !skipDelays /*requiresMuteCheck*/,
+                                      !forceRouting /*requiresVolumeCheck*/, skipDelays);
             // Only apply special touch sound delay once
             delayMs = 0;
         }
@@ -4052,13 +4135,18 @@
 
     // reevaluate outputs for all devices
     checkForDeviceAndOutputChanges();
-    updateCallAndOutputRouting();
+    changeOutputDevicesMuteState(devices);
+    updateCallAndOutputRouting(false /* forceVolumeReeval */, 0 /* delayMs */,
+        true /* skipDelays */);
+    changeOutputDevicesMuteState(devices);
 
     return NO_ERROR;
 }
 
 status_t AudioPolicyManager::removeUserIdDeviceAffinities(int userId) {
     ALOGV("%s() userId=%d", __FUNCTION__, userId);
+    AudioDeviceTypeAddrVector devices;
+    mPolicyMixes.getDevicesForUserId(userId, devices);
     status_t status = mPolicyMixes.removeUserIdDeviceAffinities(userId);
     if (status != NO_ERROR) {
         ALOGE("%s() Could not remove all device affinities fo userId = %d",
@@ -4068,7 +4156,10 @@
 
     // reevaluate outputs for all devices
     checkForDeviceAndOutputChanges();
-    updateCallAndOutputRouting();
+    changeOutputDevicesMuteState(devices);
+    updateCallAndOutputRouting(false /* forceVolumeReeval */, 0 /* delayMs */,
+        true /* skipDelays */);
+    changeOutputDevicesMuteState(devices);
 
     return NO_ERROR;
 }
@@ -4099,13 +4190,13 @@
     dst->appendFormat(" TTS output %savailable\n", mTtsOutputAvailable ? "" : "not ");
     dst->appendFormat(" Master mono: %s\n", mMasterMono ? "on" : "off");
     dst->appendFormat(" Communication Strategy id: %d\n", mCommunnicationStrategy);
-    dst->appendFormat(" Config source: %s\n", mConfig.getSource().c_str()); // getConfig not const
+    dst->appendFormat(" Config source: %s\n", mConfig->getSource().c_str());
 
     dst->append("\n");
     mAvailableOutputDevices.dump(dst, String8("Available output"), 1);
     dst->append("\n");
     mAvailableInputDevices.dump(dst, String8("Available input"), 1);
-    mHwModulesAll.dump(dst);
+    mHwModules.dump(dst);
     mOutputs.dump(dst);
     mInputs.dump(dst);
     mEffects.dump(dst, 1);
@@ -4365,6 +4456,11 @@
         ALOGE("%s the requested device is currently unavailable", __func__);
         return BAD_VALUE;
     }
+    if (!audio_is_usb_out_device(deviceDescriptor->type())) {
+        ALOGE("%s the requested device(type=%#x) is not usb device", __func__,
+              deviceDescriptor->type());
+        return BAD_VALUE;
+    }
     for (const auto& hwModule : mHwModules) {
         for (const auto& curProfile : hwModule->getOutputProfiles()) {
             if (curProfile->supportsDevice(deviceDescriptor)) {
@@ -4472,16 +4568,24 @@
 }
 
 sp<PreferredMixerAttributesInfo> AudioPolicyManager::getPreferredMixerAttributesInfo(
-        audio_port_handle_t devicePortId, product_strategy_t strategy) {
+        audio_port_handle_t devicePortId,
+        product_strategy_t strategy,
+        bool activeBitPerfectPreferred) {
     auto it = mPreferredMixerAttrInfos.find(devicePortId);
     if (it == mPreferredMixerAttrInfos.end()) {
         return nullptr;
     }
-    auto mixerAttrInfoIt = it->second.find(strategy);
-    if (mixerAttrInfoIt == it->second.end()) {
-        return nullptr;
+    if (activeBitPerfectPreferred) {
+        for (auto [strategy, info] : it->second) {
+            if ((info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE
+                && info->getActiveClientCount() != 0) {
+                return info;
+            }
+        }
     }
-    return mixerAttrInfoIt->second;
+    auto strategyMatchedMixerAttrInfoIt = it->second.find(strategy);
+    return strategyMatchedMixerAttrInfoIt == it->second.end()
+            ? nullptr : strategyMatchedMixerAttrInfoIt->second;
 }
 
 status_t AudioPolicyManager::getPreferredMixerAttributes(
@@ -4602,6 +4706,28 @@
     return NO_ERROR;
 }
 
+status_t AudioPolicyManager::listDeclaredDevicePorts(media::AudioPortRole role,
+        std::vector<media::AudioPortFw>* _aidl_return) {
+    auto pushPort = [&](const sp<DeviceDescriptor>& dev) -> status_t {
+        audio_port_v7 port;
+        dev->toAudioPort(&port);
+        auto aidlPort = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_v7_AudioPortFw(port));
+        _aidl_return->push_back(std::move(aidlPort));
+        return OK;
+    };
+
+    for (const auto& module : mHwModules) {
+        for (const auto& dev : module->getDeclaredDevices()) {
+            if (role == media::AudioPortRole::NONE ||
+                    ((role == media::AudioPortRole::SOURCE)
+                            == audio_is_input_device(dev->type()))) {
+                RETURN_STATUS_IF_ERROR(pushPort(dev));
+            }
+        }
+    }
+    return OK;
+}
+
 status_t AudioPolicyManager::getAudioPort(struct audio_port_v7 *port)
 {
     if (port == nullptr || port->id == AUDIO_PORT_HANDLE_NONE) {
@@ -5497,10 +5623,10 @@
     size_t formatsWritten = 0;
     size_t formatsMax = *numSurroundFormats;
 
-    *numSurroundFormats = mConfig.getSurroundFormats().size();
+    *numSurroundFormats = mConfig->getSurroundFormats().size();
     audio_policy_forced_cfg_t forceUse = mEngine->getForceUse(
             AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND);
-    for (const auto& format: mConfig.getSurroundFormats()) {
+    for (const auto& format: mConfig->getSurroundFormats()) {
         if (formatsWritten < formatsMax) {
             surroundFormats[formatsWritten] = format.first;
             bool formatEnabled = true;
@@ -5553,10 +5679,10 @@
         formatset.insert(encodedFormats.begin(), encodedFormats.end());
         // Filter the formats which are supported by the vendor hardware.
         for (auto it = formatset.begin(); it != formatset.end(); ++it) {
-            if (mConfig.getSurroundFormats().count(*it) != 0) {
+            if (mConfig->getSurroundFormats().count(*it) != 0) {
                 formats.insert(*it);
             } else {
-                for (const auto& pair : mConfig.getSurroundFormats()) {
+                for (const auto& pair : mConfig->getSurroundFormats()) {
                     if (pair.second.count(*it) != 0) {
                         formats.insert(pair.first);
                         break;
@@ -5577,8 +5703,8 @@
 status_t AudioPolicyManager::setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled)
 {
     ALOGV("%s() format 0x%X enabled %d", __func__, audioFormat, enabled);
-    const auto& formatIter = mConfig.getSurroundFormats().find(audioFormat);
-    if (formatIter == mConfig.getSurroundFormats().end()) {
+    const auto& formatIter = mConfig->getSurroundFormats().find(audioFormat);
+    if (formatIter == mConfig->getSurroundFormats().end()) {
         ALOGW("%s() format 0x%X is not a known surround format", __func__, audioFormat);
         return BAD_VALUE;
     }
@@ -5733,7 +5859,7 @@
 
 bool AudioPolicyManager::isCallScreenModeSupported()
 {
-    return getConfig().isCallScreenModeSupported();
+    return mConfig->isCallScreenModeSupported();
 }
 
 
@@ -5966,26 +6092,16 @@
     return mAudioPortGeneration++;
 }
 
-static status_t deserializeAudioPolicyXmlConfig(AudioPolicyConfig &config) {
-    if (std::string audioPolicyXmlConfigFile = audio_get_audio_policy_config_file();
-            !audioPolicyXmlConfigFile.empty()) {
-        status_t ret = deserializeAudioPolicyFile(audioPolicyXmlConfigFile.c_str(), &config);
-        if (ret == NO_ERROR) {
-            config.setSource(audioPolicyXmlConfigFile);
-        }
-        return ret;
-    }
-    return BAD_VALUE;
-}
-
-AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterface,
-                                       bool /*forTesting*/)
+AudioPolicyManager::AudioPolicyManager(const sp<const AudioPolicyConfig>& config,
+                                       EngineInstance&& engine,
+                                       AudioPolicyClientInterface *clientInterface)
     :
     mUidCached(AID_AUDIOSERVER), // no need to call getuid(), there's only one of us running.
+    mConfig(config),
+    mEngine(std::move(engine)),
     mpClientInterface(clientInterface),
     mLimitRingtoneVolume(false), mLastVoiceVolume(-1.0f),
     mA2dpSuspended(false),
-    mConfig(mHwModulesAll, mOutputDevicesAll, mInputDevicesAll, mDefaultOutputDevice),
     mAudioPortGeneration(1),
     mBeaconMuteRefCount(0),
     mBeaconPlayingRefCount(0),
@@ -5996,32 +6112,9 @@
 {
 }
 
-AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterface)
-        : AudioPolicyManager(clientInterface, false /*forTesting*/)
-{
-    loadConfig();
-}
-
-void AudioPolicyManager::loadConfig() {
-    if (deserializeAudioPolicyXmlConfig(getConfig()) != NO_ERROR) {
-        ALOGE("could not load audio policy configuration file, setting defaults");
-        getConfig().setDefault();
-    }
-}
-
 status_t AudioPolicyManager::initialize() {
-    {
-        auto engLib = EngineLibrary::load(
-                        "libaudiopolicyengine" + getConfig().getEngineLibraryNameSuffix() + ".so");
-        if (!engLib) {
-            ALOGE("%s: Failed to load the engine library", __FUNCTION__);
-            return NO_INIT;
-        }
-        mEngine = engLib->createEngine();
-        if (mEngine == nullptr) {
-            ALOGE("%s: Failed to instantiate the APM engine", __FUNCTION__);
-            return NO_INIT;
-        }
+    if (mEngine == nullptr) {
+        return NO_INIT;
     }
     mEngine->setObserver(this);
     status_t status = mEngine->initCheck();
@@ -6030,29 +6123,22 @@
         return status;
     }
 
-    // If microphones address is empty, set it according to device type
-    for (size_t i = 0; i < mInputDevicesAll.size(); i++) {
-        if (mInputDevicesAll[i]->address().empty()) {
-            if (mInputDevicesAll[i]->type() == AUDIO_DEVICE_IN_BUILTIN_MIC) {
-                mInputDevicesAll[i]->setAddress(AUDIO_BOTTOM_MICROPHONE_ADDRESS);
-            } else if (mInputDevicesAll[i]->type() == AUDIO_DEVICE_IN_BACK_MIC) {
-                mInputDevicesAll[i]->setAddress(AUDIO_BACK_MICROPHONE_ADDRESS);
-            }
-        }
-    }
-
-    mEngine->updateDeviceSelectionCache();
+    // The actual device selection cache will be updated when calling `updateDevicesAndOutputs`
+    // at the end of this function.
+    mEngine->initializeDeviceSelectionCache();
     mCommunnicationStrategy = mEngine->getProductStrategyForAttributes(
         mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL));
 
-    // after parsing the config, mOutputDevicesAll and mInputDevicesAll contain all known devices;
+    // after parsing the config, mConfig contain all known devices;
     // open all output streams needed to access attached devices
     onNewAudioModulesAvailableInt(nullptr /*newDevices*/);
 
     // make sure default device is reachable
-    if (mDefaultOutputDevice == 0 || !mAvailableOutputDevices.contains(mDefaultOutputDevice)) {
-        ALOGE_IF(mDefaultOutputDevice != 0, "Default device %s is unreachable",
-                 mDefaultOutputDevice->toString().c_str());
+    if (const auto defaultOutputDevice = mConfig->getDefaultOutputDevice();
+            defaultOutputDevice == nullptr ||
+            !mAvailableOutputDevices.contains(defaultOutputDevice)) {
+        ALOGE_IF(defaultOutputDevice != nullptr, "Default device %s is unreachable",
+                 defaultOutputDevice->toString().c_str());
         status = NO_INIT;
     }
     ALOGW_IF(mPrimaryOutput == nullptr, "The policy configuration does not declare a primary output");
@@ -6077,8 +6163,8 @@
    mOutputs.clear();
    mInputs.clear();
    mHwModules.clear();
-   mHwModulesAll.clear();
    mManualSurroundFormats.clear();
+   mConfig.clear();
 }
 
 status_t AudioPolicyManager::initCheck()
@@ -6100,14 +6186,18 @@
 
 void AudioPolicyManager::onNewAudioModulesAvailableInt(DeviceVector *newDevices)
 {
-    for (const auto& hwModule : mHwModulesAll) {
+    for (const auto& hwModule : mConfig->getHwModules()) {
         if (std::find(mHwModules.begin(), mHwModules.end(), hwModule) != mHwModules.end()) {
             continue;
         }
-        hwModule->setHandle(mpClientInterface->loadHwModule(hwModule->getName()));
         if (hwModule->getHandle() == AUDIO_MODULE_HANDLE_NONE) {
-            ALOGW("could not open HW module %s", hwModule->getName());
-            continue;
+            if (audio_module_handle_t handle = mpClientInterface->loadHwModule(hwModule->getName());
+                    handle != AUDIO_MODULE_HANDLE_NONE) {
+                hwModule->setHandle(handle);
+            } else {
+                ALOGW("could not load HW module %s", hwModule->getName());
+                continue;
+            }
         }
         mHwModules.push_back(hwModule);
         // open all output streams needed to access attached devices.
@@ -6129,10 +6219,10 @@
             }
 
             const DeviceVector &supportedDevices = outProfile->getSupportedDevices();
-            DeviceVector availProfileDevices = supportedDevices.filter(mOutputDevicesAll);
+            DeviceVector availProfileDevices = supportedDevices.filter(mConfig->getOutputDevices());
             sp<DeviceDescriptor> supportedDevice = 0;
-            if (supportedDevices.contains(mDefaultOutputDevice)) {
-                supportedDevice = mDefaultOutputDevice;
+            if (supportedDevices.contains(mConfig->getDefaultOutputDevice())) {
+                supportedDevice = mConfig->getDefaultOutputDevice();
             } else {
                 // choose first device present in profile's SupportedDevices also part of
                 // mAvailableOutputDevices.
@@ -6141,7 +6231,7 @@
                 }
                 supportedDevice = availProfileDevices.itemAt(0);
             }
-            if (!mOutputDevicesAll.contains(supportedDevice)) {
+            if (!mConfig->getOutputDevices().contains(supportedDevice)) {
                 continue;
             }
             sp<SwAudioOutputDescriptor> outputDesc = new SwAudioOutputDescriptor(outProfile,
@@ -6196,7 +6286,7 @@
             // chose first device present in profile's SupportedDevices also part of
             // available input devices
             const DeviceVector &supportedDevices = inProfile->getSupportedDevices();
-            DeviceVector availProfileDevices = supportedDevices.filter(mInputDevicesAll);
+            DeviceVector availProfileDevices = supportedDevices.filter(mConfig->getInputDevices());
             if (availProfileDevices.isEmpty()) {
                 ALOGV("%s: Input device list is empty! for profile %s",
                     __func__, inProfile->getTagName().c_str());
@@ -6447,6 +6537,14 @@
     }
 
     if (state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
+        // first call getAudioPort to get the supported attributes from the HAL
+        struct audio_port_v7 port = {};
+        device->toAudioPort(&port);
+        status_t status = mpClientInterface->getAudioPort(&port);
+        if (status == NO_ERROR) {
+            device->importAudioPort(port);
+        }
+
         // look for input profiles that can be routed to this device
         SortedVector< sp<IOProfile> > profiles;
         for (const auto& hwModule : mHwModules) {
@@ -6498,11 +6596,7 @@
 
             desc = new AudioInputDescriptor(profile, mpClientInterface);
             audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
-            status_t status = desc->open(nullptr,
-                                         device,
-                                         AUDIO_SOURCE_MIC,
-                                         AUDIO_INPUT_FLAG_NONE,
-                                         &input);
+            status = desc->open(nullptr, device, AUDIO_SOURCE_MIC, AUDIO_INPUT_FLAG_NONE, &input);
 
             if (status == NO_ERROR) {
                 const String8& address = String8(device->address().c_str());
@@ -7312,7 +7406,8 @@
                                               bool force,
                                               int delayMs,
                                               audio_patch_handle_t *patchHandle,
-                                              bool requiresMuteCheck, bool requiresVolumeCheck)
+                                              bool requiresMuteCheck, bool requiresVolumeCheck,
+                                              bool skipMuteDelay)
 {
     // TODO(b/262404095): Consider if the output need to be reopened.
     ALOGV("%s device %s delayMs %d", __func__, devices.toString().c_str(), delayMs);
@@ -7320,9 +7415,9 @@
 
     if (outputDesc->isDuplicated()) {
         muteWaitMs = setOutputDevices(outputDesc->subOutput1(), devices, force, delayMs,
-                nullptr /* patchHandle */, requiresMuteCheck);
+                nullptr /* patchHandle */, requiresMuteCheck, skipMuteDelay);
         muteWaitMs += setOutputDevices(outputDesc->subOutput2(), devices, force, delayMs,
-                nullptr /* patchHandle */, requiresMuteCheck);
+                nullptr /* patchHandle */, requiresMuteCheck, skipMuteDelay);
         return muteWaitMs;
     }
 
@@ -7388,12 +7483,16 @@
 
         // Add half reported latency to delayMs when muteWaitMs is null in order
         // to avoid disordered sequence of muting volume and changing devices.
-        installPatch(__func__, patchHandle, outputDesc.get(), patchBuilder.patch(),
-                muteWaitMs == 0 ? (delayMs + (outputDesc->latency() / 2)) : delayMs);
+        int actualDelayMs = !skipMuteDelay && muteWaitMs == 0
+                ? (delayMs + (outputDesc->latency() / 2)) : delayMs;
+        installPatch(__func__, patchHandle, outputDesc.get(), patchBuilder.patch(), actualDelayMs);
     }
 
-    // update stream volumes according to new device
-    applyStreamVolumes(outputDesc, filteredDevices.types(), delayMs);
+    // Since the mute is skip, also skip the apply stream volume as that will be applied externally
+    if (!skipMuteDelay) {
+        // update stream volumes according to new device
+        applyStreamVolumes(outputDesc, filteredDevices.types(), delayMs);
+    }
 
     return muteWaitMs;
 }
@@ -7952,7 +8051,7 @@
     std::unordered_set<audio_format_t> enforcedSurround(
             devDesc->encodedFormats().begin(), devDesc->encodedFormats().end());
     std::unordered_set<audio_format_t> allSurround;  // A flat set of all known surround formats
-    for (const auto& pair : mConfig.getSurroundFormats()) {
+    for (const auto& pair : mConfig->getSurroundFormats()) {
         allSurround.insert(pair.first);
         for (const auto& subformat : pair.second) allSurround.insert(subformat);
     }
@@ -8049,12 +8148,17 @@
                 ioHandle, String8(AudioParameter::keyStreamSupportedFormats));
         ALOGV("%s: supported formats %d, %s", __FUNCTION__, ioHandle, reply.string());
         AudioParameter repliedParameters(reply);
+        FormatVector formats;
         if (repliedParameters.get(
-                String8(AudioParameter::keyStreamSupportedFormats), reply) != NO_ERROR) {
-            ALOGE("%s: failed to retrieve format, bailing out", __FUNCTION__);
+                String8(AudioParameter::keyStreamSupportedFormats), reply) == NO_ERROR) {
+            formats = formatsFromString(reply.string());
+        } else if (devDesc->hasValidAudioProfile()) {
+            ALOGD("%s: using the device profiles", __func__);
+            formats = devDesc->getAudioProfiles().getSupportedFormats();
+        } else {
+            ALOGE("%s: failed to retrieve format, bailing out", __func__);
             return;
         }
-        FormatVector formats = formatsFromString(reply.string());
         mReportedFormatsMap[devDesc] = formats;
         if (device == AUDIO_DEVICE_OUT_HDMI
                 || isDeviceOfModule(devDesc, AUDIO_HARDWARE_MODULE_ID_MSD)) {
@@ -8064,7 +8168,7 @@
     }
 
     for (audio_format_t format : profiles.getSupportedFormats()) {
-        ChannelMaskSet channelMasks;
+        std::optional<ChannelMaskSet> channelMasks;
         SampleRateSet samplingRates;
         AudioParameter requestedParameters;
         requestedParameters.addInt(String8(AudioParameter::keyFormat), format);
@@ -8079,6 +8183,8 @@
             if (repliedParameters.get(
                     String8(AudioParameter::keyStreamSupportedSamplingRates), reply) == NO_ERROR) {
                 samplingRates = samplingRatesFromString(reply.string());
+            } else {
+                samplingRates = devDesc->getAudioProfiles().getSampleRatesFor(format);
             }
         }
         if (profiles.hasDynamicChannelsFor(format)) {
@@ -8090,14 +8196,17 @@
             if (repliedParameters.get(
                     String8(AudioParameter::keyStreamSupportedChannels), reply) == NO_ERROR) {
                 channelMasks = channelMasksFromString(reply.string());
-                if (device == AUDIO_DEVICE_OUT_HDMI
-                        || isDeviceOfModule(devDesc, AUDIO_HARDWARE_MODULE_ID_MSD)) {
-                    modifySurroundChannelMasks(&channelMasks);
-                }
+            } else {
+                channelMasks = devDesc->getAudioProfiles().getChannelMasksFor(format);
+            }
+            if (channelMasks.has_value() && (device == AUDIO_DEVICE_OUT_HDMI
+                    || isDeviceOfModule(devDesc, AUDIO_HARDWARE_MODULE_ID_MSD))) {
+                modifySurroundChannelMasks(&channelMasks.value());
             }
         }
         addDynamicAudioProfileAndSort(
-                profiles, new AudioProfile(format, channelMasks, samplingRates));
+                profiles, new AudioProfile(
+                        format, channelMasks.value_or(ChannelMaskSet()), samplingRates));
     }
 }
 
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index ed212c2..863c785 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -35,6 +35,7 @@
 #include <media/PatchBuilder.h>
 #include "AudioPolicyInterface.h"
 
+#include <android/media/DeviceConnectedState.h>
 #include <android/media/audio/common/AudioPort.h>
 #include <AudioPolicyManagerObserver.h>
 #include <AudioPolicyConfig.h>
@@ -93,7 +94,9 @@
 {
 
 public:
-        explicit AudioPolicyManager(AudioPolicyClientInterface *clientInterface);
+        AudioPolicyManager(const sp<const AudioPolicyConfig>& config,
+                           EngineInstance&& engine,
+                           AudioPolicyClientInterface *clientInterface);
         virtual ~AudioPolicyManager();
 
         // AudioPolicyInterface
@@ -263,6 +266,8 @@
                                         unsigned int *num_ports,
                                         struct audio_port_v7 *ports,
                                         unsigned int *generation);
+                status_t listDeclaredDevicePorts(media::AudioPortRole role,
+                                                 std::vector<media::AudioPortFw>* result) override;
         virtual status_t getAudioPort(struct audio_port_v7 *port);
         virtual status_t createAudioPatch(const struct audio_patch *patch,
                                            audio_patch_handle_t *handle,
@@ -424,19 +429,7 @@
         status_t initialize();
 
 protected:
-        // A constructor that allows more fine-grained control over initialization process,
-        // used in automatic tests.
-        AudioPolicyManager(AudioPolicyClientInterface *clientInterface, bool forTesting);
-
-        // These methods should be used when finer control over APM initialization
-        // is needed, e.g. in tests. Must be used in conjunction with the constructor
-        // that only performs fields initialization. The public constructor comprises
-        // these steps in the following sequence:
-        //   - field initializing constructor;
-        //   - loadConfig;
-        //   - initialize.
-        AudioPolicyConfig& getConfig() { return mConfig; }
-        void loadConfig();
+        const AudioPolicyConfig& getConfig() const { return *(mConfig.get()); }
 
         // From AudioPolicyManagerObserver
         virtual const AudioPatchCollection &getAudioPatches() const
@@ -470,7 +463,7 @@
         }
         virtual const sp<DeviceDescriptor> &getDefaultOutputDevice() const
         {
-            return mDefaultOutputDevice;
+            return mConfig->getDefaultOutputDevice();
         }
 
         std::vector<volume_group_t> getVolumeGroups() const
@@ -542,8 +535,9 @@
          *        and currently active, allow to have proper drain and avoid pops
          * @param requiresVolumeCheck true if called requires to reapply volume if the routing did
          * not change (but the output is still routed).
+         * @param skipMuteDelay if true will skip mute delay when installing audio patch
          * @return the number of ms we have slept to allow new routing to take effect in certain
-         * cases.
+         *        cases.
          */
         uint32_t setOutputDevices(const sp<SwAudioOutputDescriptor>& outputDesc,
                                   const DeviceVector &device,
@@ -551,7 +545,8 @@
                                   int delayMs = 0,
                                   audio_patch_handle_t *patchHandle = NULL,
                                   bool requiresMuteCheck = true,
-                                  bool requiresVolumeCheck = false);
+                                  bool requiresVolumeCheck = false,
+                                  bool skipMuteDelay = false);
         status_t resetOutputDevice(const sp<AudioOutputDescriptor>& outputDesc,
                                    int delayMs = 0,
                                    audio_patch_handle_t *patchHandle = NULL);
@@ -654,8 +649,10 @@
         /**
          * @brief updates routing for all outputs (including call if call in progress).
          * @param delayMs delay for unmuting if required
+         * @param skipDelays if true all the delays will be skip while updating routing
          */
-        void updateCallAndOutputRouting(bool forceVolumeReeval = true, uint32_t delayMs = 0);
+        void updateCallAndOutputRouting(bool forceVolumeReeval = true, uint32_t delayMs = 0,
+                bool skipDelays = false);
 
         bool isCallRxAudioSource(const sp<SourceClientDescriptor> &source) {
             return mCallRxSourceClient != nullptr && source == mCallRxSourceClient;
@@ -929,6 +926,8 @@
                 sp<SwAudioOutputDescriptor> ignoredOutput, uint32_t delayMs);
 
         const uid_t mUidCached;                         // AID_AUDIOSERVER
+        sp<const AudioPolicyConfig> mConfig;
+        EngineInstance mEngine;                         // Audio Policy Engine instance
         AudioPolicyClientInterface *mpClientInterface;  // audio policy client interface
         sp<SwAudioOutputDescriptor> mPrimaryOutput;     // primary output descriptor
         // list of descriptors for outputs currently opened
@@ -941,8 +940,6 @@
         SwAudioOutputCollection mPreviousOutputs;
         AudioInputCollection mInputs;     // list of input descriptors
 
-        DeviceVector  mOutputDevicesAll; // all output devices from the config
-        DeviceVector  mInputDevicesAll;  // all input devices from the config
         DeviceVector  mAvailableOutputDevices; // all available output devices
         DeviceVector  mAvailableInputDevices;  // all available input devices
 
@@ -952,11 +949,7 @@
         bool    mA2dpSuspended;  // true if A2DP output is suspended
 
         EffectDescriptorCollection mEffects;  // list of registered audio effects
-        sp<DeviceDescriptor> mDefaultOutputDevice; // output device selected by default at boot time
         HwModuleCollection mHwModules; // contains modules that have been loaded successfully
-        HwModuleCollection mHwModulesAll; // contains all modules declared in the config
-
-        AudioPolicyConfig mConfig;
 
         std::atomic<uint32_t> mAudioPortGeneration;
 
@@ -987,9 +980,6 @@
 
         uint32_t nextAudioPortGeneration();
 
-        // Audio Policy Engine Interface.
-        EngineInstance mEngine;
-
         // Surround formats that are enabled manually. Taken into account when
         // "encoded surround" is forced into "manual" mode.
         std::unordered_set<audio_format_t> mManualSurroundFormats;
@@ -1058,13 +1048,16 @@
         void updateAudioProfiles(const sp<DeviceDescriptor>& devDesc, audio_io_handle_t ioHandle,
                 AudioProfileVector &profiles);
 
+        // Notify the policy client to prepare for disconnecting external device.
+        void prepareToDisconnectExternalDevice(const sp<DeviceDescriptor> &device);
+
         // Notify the policy client of any change of device state with AUDIO_IO_HANDLE_NONE,
         // so that the client interprets it as global to audio hardware interfaces.
         // It can give a chance to HAL implementer to retrieve dynamic capabilities associated
         // to this device for example.
         // TODO avoid opening stream to retrieve capabilities of a profile.
         void broadcastDeviceConnectionState(const sp<DeviceDescriptor> &device,
-                                            audio_policy_dev_state_t state);
+                                            media::DeviceConnectedState state);
 
         // updates device caching and output for streams that can influence the
         //    routing of notifications
@@ -1252,6 +1245,21 @@
                 const char* context,
                 bool matchAddress = true);
 
+        /**
+         * @brief changeOutputDevicesMuteState mute/unmute devices using checkDeviceMuteStrategies
+         * @param devices devices to mute/unmute
+         */
+        void changeOutputDevicesMuteState(const AudioDeviceTypeAddrVector& devices);
+
+        /**
+         * @brief Returns a vector of software output descriptor that support the queried devices
+         * @param devices devices to query
+         * @param openOutputs open outputs where the devices are supported as determined by
+         *      SwAudioOutputDescriptor::supportsAtLeastOne
+         */
+        std::vector<sp<SwAudioOutputDescriptor>> getSoftwareOutputsForDevices(
+                const AudioDeviceTypeAddrVector& devices) const;
+
         bool isScoRequestedForComm() const;
 
         bool isHearingAidUsedForComm() const;
@@ -1309,8 +1317,15 @@
                                        uint32_t flags,
                                        bool isInput);
 
+        /**
+         * Returns the preferred mixer attributes info for the given device port id and strategy.
+         * Bit-perfect mixer attributes will be returned if it is active and
+         * `activeBitPerfectPreferred` is true.
+         */
         sp<PreferredMixerAttributesInfo> getPreferredMixerAttributesInfo(
-                audio_port_handle_t devicePortId, product_strategy_t strategy);
+                audio_port_handle_t devicePortId,
+                product_strategy_t strategy,
+                bool activeBitPerfectPreferred = false);
 
         sp<SwAudioOutputDescriptor> reopenOutput(
                 sp<SwAudioOutputDescriptor> outputDesc,
diff --git a/services/audiopolicy/managerdefault/EngineLibrary.cpp b/services/audiopolicy/managerdefault/EngineLibrary.cpp
index ef699aa..ab77941 100644
--- a/services/audiopolicy/managerdefault/EngineLibrary.cpp
+++ b/services/audiopolicy/managerdefault/EngineLibrary.cpp
@@ -23,9 +23,44 @@
 
 namespace android {
 
-// static
-std::shared_ptr<EngineLibrary> EngineLibrary::load(std::string libraryPath)
+EngineInstance loadApmEngineLibraryAndCreateEngine(const std::string& librarySuffix,
+        const std::string& configXmlFilePath)
 {
+    auto engLib = EngineLibrary::load(librarySuffix);
+    if (!engLib) {
+        ALOGE("%s: Failed to load the engine library, suffix \"%s\"",
+                __func__, librarySuffix.c_str());
+        return nullptr;
+    }
+    auto engine = engLib->createEngineUsingXmlConfig(configXmlFilePath);
+    if (engine == nullptr) {
+        ALOGE("%s: Failed to instantiate the APM engine", __func__);
+        return nullptr;
+    }
+    return engine;
+}
+
+EngineInstance loadApmEngineLibraryAndCreateEngine(const std::string& librarySuffix,
+        const media::audio::common::AudioHalEngineConfig& config)
+{
+    auto engLib = EngineLibrary::load(librarySuffix);
+    if (!engLib) {
+        ALOGE("%s: Failed to load the engine library, suffix \"%s\"",
+                __func__, librarySuffix.c_str());
+        return nullptr;
+    }
+    auto engine = engLib->createEngineUsingHalConfig(config);
+    if (engine == nullptr) {
+        ALOGE("%s: Failed to instantiate the APM engine", __func__);
+        return nullptr;
+    }
+    return engine;
+}
+
+// static
+std::shared_ptr<EngineLibrary> EngineLibrary::load(const std::string& librarySuffix)
+{
+    std::string libraryPath = "libaudiopolicyengine" + librarySuffix + ".so";
     std::shared_ptr<EngineLibrary> engLib(new EngineLibrary());
     return engLib->init(std::move(libraryPath)) ? engLib : nullptr;
 }
@@ -35,6 +70,36 @@
     close();
 }
 
+EngineInstance EngineLibrary::createEngineUsingXmlConfig(const std::string& xmlFilePath)
+{
+    auto instance = createEngine();
+    if (instance != nullptr) {
+        if (status_t status = instance->loadFromXmlConfigWithFallback(xmlFilePath);
+                status == OK) {
+            return instance;
+        } else {
+            ALOGE("%s: loading of the engine config with XML configuration file \"%s\" failed: %d",
+                    __func__, xmlFilePath.empty() ? "default" : xmlFilePath.c_str(), status);
+        }
+    }
+    return nullptr;
+}
+
+EngineInstance EngineLibrary::createEngineUsingHalConfig(
+        const media::audio::common::AudioHalEngineConfig& config)
+{
+    auto instance = createEngine();
+    if (instance != nullptr) {
+        if (status_t status = instance->loadFromHalConfigWithFallback(config); status == OK) {
+            return instance;
+        } else {
+            ALOGE("%s: loading of the engine config with HAL configuration \"%s\" failed: %d",
+                    __func__, config.toString().c_str(), status);
+        }
+    }
+    return nullptr;
+}
+
 bool EngineLibrary::init(std::string libraryPath)
 {
     mLibraryHandle = dlopen(libraryPath.c_str(), 0);
diff --git a/services/audiopolicy/managerdefault/EngineLibrary.h b/services/audiopolicy/managerdefault/EngineLibrary.h
index f143916..4710e34 100644
--- a/services/audiopolicy/managerdefault/EngineLibrary.h
+++ b/services/audiopolicy/managerdefault/EngineLibrary.h
@@ -21,14 +21,20 @@
 #include <string>
 
 #include <EngineInterface.h>
+#include <android/media/audio/common/AudioHalEngineConfig.h>
 
 namespace android {
 
 using EngineInstance = std::unique_ptr<EngineInterface, std::function<void (EngineInterface*)>>;
 
+EngineInstance loadApmEngineLibraryAndCreateEngine(const std::string& librarySuffix,
+        const std::string& configXmlFilePath = "");
+EngineInstance loadApmEngineLibraryAndCreateEngine(const std::string& librarySuffix,
+        const media::audio::common::AudioHalEngineConfig& config);
+
 class EngineLibrary : public std::enable_shared_from_this<EngineLibrary> {
 public:
-    static std::shared_ptr<EngineLibrary> load(std::string libraryPath);
+    static std::shared_ptr<EngineLibrary> load(const std::string& librarySuffix);
     ~EngineLibrary();
 
     EngineLibrary(const EngineLibrary&) = delete;
@@ -36,11 +42,14 @@
     EngineLibrary& operator=(const EngineLibrary&) = delete;
     EngineLibrary& operator=(EngineLibrary&&) = delete;
 
-    EngineInstance createEngine();
+    EngineInstance createEngineUsingXmlConfig(const std::string& xmlFilePath);
+    EngineInstance createEngineUsingHalConfig(
+            const media::audio::common::AudioHalEngineConfig& config);
 
 private:
     EngineLibrary() = default;
     bool init(std::string libraryPath);
+    EngineInstance createEngine();
     void close();
 
     void *mLibraryHandle = nullptr;
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index 10403fa..3a530aa 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -7,10 +7,57 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_library_shared {
+cc_defaults {
+    name: "libaudiopolicyservice_dependencies",
+
+    shared_libs: [
+        "libactivitymanager_aidl",
+        "libaudioclient",
+        "libaudioclient_aidl_conversion",
+        "libaudiofoundation",
+        "libaudiohal",
+        "libaudiopolicy",
+        "libaudiopolicycomponents",
+        "libaudiopolicymanagerdefault",
+        "libaudiousecasevalidation",
+        "libaudioutils",
+        "libbinder",
+        "libcutils",
+        "libhardware_legacy",
+        "libheadtracking",
+        "libheadtracking-binding",
+        "liblog",
+        "libmedia_helper",
+        "libmediametrics",
+        "libmediautils",
+        "libpermission",
+        "libsensor",
+        "libsensorprivacy",
+        "libshmemcompat",
+        "libstagefright_foundation",
+        "libutils",
+        "libxml2",
+        "audioclient-types-aidl-cpp",
+        "audioflinger-aidl-cpp",
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "capture_state_listener-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "packagemanager_aidl-cpp",
+        "spatializer-aidl-cpp",
+    ],
+
+    static_libs: [
+        "libeffectsconfig",
+        "libaudiopolicycomponents",
+    ]
+}
+
+cc_library {
     name: "libaudiopolicyservice",
 
     defaults: [
+        "libaudiopolicyservice_dependencies",
         "latest_android_media_audio_common_types_cpp_shared",
     ],
 
@@ -28,44 +75,8 @@
         "frameworks/av/services/audioflinger"
     ],
 
-    shared_libs: [
-        "libactivitymanager_aidl",
-        "libaudioclient",
-        "libaudioclient_aidl_conversion",
-        "libaudiofoundation",
-        "libaudiohal",
-        "libaudiopolicy",
-        "libaudiopolicymanagerdefault",
-        "libaudiousecasevalidation",
-        "libaudioutils",
-        "libbinder",
-        "libcutils",
-        "libeffectsconfig",
-        "libhardware_legacy",
-        "libheadtracking",
-        "libheadtracking-binding",
-        "liblog",
-        "libmedia_helper",
-        "libmediametrics",
-        "libmediautils",
-        "libpermission",
-        "libsensor",
-        "libsensorprivacy",
-        "libshmemcompat",
-        "libutils",
-        "libstagefright_foundation",
-        "audioclient-types-aidl-cpp",
-        "audioflinger-aidl-cpp",
-        "audiopolicy-aidl-cpp",
-        "audiopolicy-types-aidl-cpp",
-        "capture_state_listener-aidl-cpp",
-        "framework-permission-aidl-cpp",
-        "packagemanager_aidl-cpp",
-        "spatializer-aidl-cpp",
-    ],
 
     static_libs: [
-        "libaudiopolicycomponents",
         "framework-permission-aidl-cpp",
     ],
 
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 1bb89df..13e682b 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -27,6 +27,18 @@
 
 /* implementation of the client interface from the policy manager */
 
+status_t AudioPolicyService::AudioPolicyClient::getAudioPolicyConfig(
+        media::AudioPolicyConfig *config)
+{
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (af == 0) {
+        ALOGW("%s: could not get AudioFlinger", __func__);
+        return AUDIO_MODULE_HANDLE_NONE;
+    }
+
+    return af->getAudioPolicyConfig(config);
+}
+
 audio_module_handle_t AudioPolicyService::AudioPolicyClient::loadHwModule(const char *name)
 {
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
@@ -309,13 +321,13 @@
 }
 
 status_t AudioPolicyService::AudioPolicyClient::setDeviceConnectedState(
-        const struct audio_port_v7 *port, bool connected) {
+        const struct audio_port_v7 *port, media::DeviceConnectedState state) {
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
     if (af == nullptr) {
         ALOGW("%s: could not get AudioFlinger", __func__);
         return PERMISSION_DENIED;
     }
-    return af->setDeviceConnectedState(port, connected);
+    return af->setDeviceConnectedState(port, state);
 }
 
 status_t AudioPolicyService::AudioPolicyClient::invalidateTracks(
@@ -328,5 +340,4 @@
     return af->invalidateTracks(portIds);
 }
 
-
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index c7a60c2..70a1785 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -41,26 +41,25 @@
 // AudioPolicyEffects Implementation
 // ----------------------------------------------------------------------------
 
-AudioPolicyEffects::AudioPolicyEffects()
-{
-    status_t loadResult = loadAudioEffectXmlConfig();
+AudioPolicyEffects::AudioPolicyEffects(const sp<EffectsFactoryHalInterface>& effectsFactoryHal) {
+    // load xml config with effectsFactoryHal
+    status_t loadResult = loadAudioEffectConfig(effectsFactoryHal);
     if (loadResult == NO_ERROR) {
-        mDefaultDeviceEffectFuture = std::async(
-                    std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
+        mDefaultDeviceEffectFuture =
+                std::async(std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
     } else if (loadResult < 0) {
-        ALOGW("Failed to load XML effect configuration, fallback to .conf");
+        ALOGW("Failed to query effect configuration, fallback to load .conf");
         // load automatic audio effect modules
         if (access(AUDIO_EFFECT_VENDOR_CONFIG_FILE, R_OK) == 0) {
-            loadAudioEffectConfig(AUDIO_EFFECT_VENDOR_CONFIG_FILE);
+            loadAudioEffectConfigLegacy(AUDIO_EFFECT_VENDOR_CONFIG_FILE);
         } else if (access(AUDIO_EFFECT_DEFAULT_CONFIG_FILE, R_OK) == 0) {
-            loadAudioEffectConfig(AUDIO_EFFECT_DEFAULT_CONFIG_FILE);
+            loadAudioEffectConfigLegacy(AUDIO_EFFECT_DEFAULT_CONFIG_FILE);
         }
     } else if (loadResult > 0) {
         ALOGE("Effect config is partially invalid, skipped %d elements", loadResult);
     }
 }
 
-
 AudioPolicyEffects::~AudioPolicyEffects()
 {
     size_t i = 0;
@@ -907,30 +906,35 @@
     return NO_ERROR;
 }
 
-status_t AudioPolicyEffects::loadAudioEffectXmlConfig() {
-    auto result = effectsConfig::parse();
-    if (result.parsedConfig == nullptr) {
-        return -ENOENT;
+status_t AudioPolicyEffects::loadAudioEffectConfig(
+        const sp<EffectsFactoryHalInterface>& effectsFactoryHal) {
+    if (!effectsFactoryHal) {
+        ALOGE("%s Null EffectsFactoryHalInterface", __func__);
+        return UNEXPECTED_NULL;
+    }
+
+    const auto skippedElements = VALUE_OR_RETURN_STATUS(effectsFactoryHal->getSkippedElements());
+    const auto processings = effectsFactoryHal->getProcessings();
+    if (!processings) {
+        ALOGE("%s Null processings with %zu skipped elements", __func__, skippedElements);
+        return UNEXPECTED_NULL;
     }
 
     auto loadProcessingChain = [](auto& processingChain, auto& streams) {
         for (auto& stream : processingChain) {
             auto effectDescs = std::make_unique<EffectDescVector>();
             for (auto& effect : stream.effects) {
-                effectDescs->mEffects.add(
-                        new EffectDesc{effect.get().name.c_str(), effect.get().uuid});
+                effectDescs->mEffects.add(new EffectDesc{effect->name.c_str(), effect->uuid});
             }
             streams.add(stream.type, effectDescs.release());
         }
     };
 
-    auto loadDeviceProcessingChain = [](auto &processingChain, auto& devicesEffects) {
+    auto loadDeviceProcessingChain = [](auto& processingChain, auto& devicesEffects) {
         for (auto& deviceProcess : processingChain) {
-
             auto effectDescs = std::make_unique<EffectDescVector>();
             for (auto& effect : deviceProcess.effects) {
-                effectDescs->mEffects.add(
-                        new EffectDesc{effect.get().name.c_str(), effect.get().uuid});
+                effectDescs->mEffects.add(new EffectDesc{effect->name.c_str(), effect->uuid});
             }
             auto deviceEffects = std::make_unique<DeviceEffects>(
                         std::move(effectDescs), deviceProcess.type, deviceProcess.address);
@@ -938,17 +942,18 @@
         }
     };
 
-    loadProcessingChain(result.parsedConfig->preprocess, mInputSources);
-    loadProcessingChain(result.parsedConfig->postprocess, mOutputStreams);
+    loadProcessingChain(processings->preprocess, mInputSources);
+    loadProcessingChain(processings->postprocess, mOutputStreams);
+
     {
         Mutex::Autolock _l(mLock);
-        loadDeviceProcessingChain(result.parsedConfig->deviceprocess, mDeviceEffects);
+        loadDeviceProcessingChain(processings->deviceprocess, mDeviceEffects);
     }
-    // Casting from ssize_t to status_t is probably safe, there should not be more than 2^31 errors
-    return result.nbSkippedElement;
+
+    return skippedElements;
 }
 
-status_t AudioPolicyEffects::loadAudioEffectConfig(const char *path)
+status_t AudioPolicyEffects::loadAudioEffectConfigLegacy(const char *path)
 {
     cnode *root;
     char *data;
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index 13d5d0c..9f65a96 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -20,22 +20,33 @@
 #include <stdlib.h>
 #include <stdio.h>
 #include <string.h>
+#include <future>
+
+#include <android-base/thread_annotations.h>
 #include <cutils/misc.h>
 #include <media/AudioEffect.h>
+#include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <system/audio.h>
 #include <utils/Vector.h>
 #include <utils/SortedVector.h>
-#include <android-base/thread_annotations.h>
-
-#include <future>
 
 namespace android {
 
 // ----------------------------------------------------------------------------
 
-// AudioPolicyEffects class
-// This class will manage all effects attached to input and output streams in
-// AudioPolicyService as configured in audio_effects.conf.
+/**
+ * AudioPolicyEffects class.
+ *
+ * This class manages all effects attached to input and output streams in AudioPolicyService.
+ * The effect configurations can be queried in several ways:
+ *
+ * With HIDL HAL, the configuration file `audio_effects.xml` will be loaded by libAudioHal. If this
+ * file does not exist, AudioPolicyEffects class will fallback to load configuration from
+ * `/vendor/etc/audio_effects.conf` (AUDIO_EFFECT_VENDOR_CONFIG_FILE). If this file also does not
+ * exist, the configuration will be loaded from the file `/system/etc/audio_effects.conf`.
+ *
+ * With AIDL HAL, the configuration will be queried with the method `IFactory::queryProcessing()`.
+ */
 class AudioPolicyEffects : public RefBase
 {
 
@@ -44,7 +55,7 @@
     // The constructor will parse audio_effects.conf
     // First it will look whether vendor specific file exists,
     // otherwise it will parse the system default file.
-	         AudioPolicyEffects();
+    explicit AudioPolicyEffects(const sp<EffectsFactoryHalInterface>& effectsFactoryHal);
     virtual ~AudioPolicyEffects();
 
     // NOTE: methods on AudioPolicyEffects should never be called with the AudioPolicyService
@@ -218,7 +229,6 @@
 
     };
 
-
     static const char * const kInputSourceNames[AUDIO_SOURCE_CNT -1];
     static audio_source_t inputSourceNameToEnum(const char *name);
 
@@ -226,8 +236,8 @@
     audio_stream_type_t streamNameToEnum(const char *name);
 
     // Parse audio_effects.conf
-    status_t loadAudioEffectConfig(const char *path); // TODO: add legacy in the name
-    status_t loadAudioEffectXmlConfig(); // TODO: remove "Xml" in the name
+    status_t loadAudioEffectConfigLegacy(const char *path);
+    status_t loadAudioEffectConfig(const sp<EffectsFactoryHalInterface>& effectsFactoryHal);
 
     // Load all effects descriptors in configuration file
     status_t loadEffects(cnode *root, Vector <EffectDesc *>& effects);
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 35411f9..5d86e7c 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -301,7 +301,8 @@
     audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
 
-    if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
+    if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT
+          && stream != AUDIO_STREAM_ASSISTANT && stream != AUDIO_STREAM_CALL_ASSISTANT) {
         *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
             legacy2aidl_audio_io_handle_t_int32_t(AUDIO_IO_HANDLE_NONE));
         return Status::ok();
@@ -317,7 +318,7 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::getOutputForAttr(const media::AudioAttributesInternal& attrAidl,
+Status AudioPolicyService::getOutputForAttr(const media::audio::common::AudioAttributes& attrAidl,
                                             int32_t sessionAidl,
                                             const AttributionSourceState& attributionSource,
                                             const AudioConfig& configAidl,
@@ -326,7 +327,7 @@
                                             media::GetOutputForAttrResponse* _aidl_return)
 {
     audio_attributes_t attr = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+            aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
     audio_session_t session = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_session_t(sessionAidl));
     audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
@@ -439,7 +440,7 @@
         _aidl_return->isSpatialized = isSpatialized;
         _aidl_return->isBitPerfect = isBitPerfect;
         _aidl_return->attr = VALUE_OR_RETURN_BINDER_STATUS(
-                legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+                legacy2aidl_audio_attributes_t_AudioAttributes(attr));
     } else {
         _aidl_return->configBase.format = VALUE_OR_RETURN_BINDER_STATUS(
                 legacy2aidl_audio_format_t_AudioFormatDescription(config.format));
@@ -475,14 +476,14 @@
     }
     ALOGV("startOutput()");
     sp<AudioPlaybackClient> client;
-    sp<AudioPolicyEffects>audioPolicyEffects;
+    sp<AudioPolicyEffects> audioPolicyEffects;
 
     getPlaybackClientAndEffects(portId, client, audioPolicyEffects, __func__);
 
     if (audioPolicyEffects != 0) {
         // create audio processors according to stream
-        status_t status = audioPolicyEffects->addOutputSessionEffects(
-            client->io, client->stream, client->session);
+        status_t status = audioPolicyEffects->addOutputSessionEffects(client->io, client->stream,
+                                                                      client->session);
         if (status != NO_ERROR && status != ALREADY_EXISTS) {
             ALOGW("Failed to add effects on session %d", client->session);
         }
@@ -574,7 +575,7 @@
     mAudioPolicyManager->releaseOutput(portId);
 }
 
-Status AudioPolicyService::getInputForAttr(const media::AudioAttributesInternal& attrAidl,
+Status AudioPolicyService::getInputForAttr(const media::audio::common::AudioAttributes& attrAidl,
                                            int32_t inputAidl,
                                            int32_t riidAidl,
                                            int32_t sessionAidl,
@@ -584,7 +585,7 @@
                                            int32_t selectedDeviceIdAidl,
                                            media::GetInputForAttrResponse* _aidl_return) {
     audio_attributes_t attr = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+            aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
     audio_io_handle_t input = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_io_handle_t(inputAidl));
     audio_unique_id_t riid = VALUE_OR_RETURN_BINDER_STATUS(
@@ -817,8 +818,29 @@
 
     Mutex::Autolock _l(mLock);
 
+    ALOGW_IF(client->silenced, "startInput on silenced input for port %d, uid %d. Unsilencing.",
+            portIdAidl,
+            client->attributionSource.uid);
+
+    if (client->active) {
+        ALOGE("Client should never be active before startInput. Uid %d port %d",
+                client->attributionSource.uid, portId);
+        finishRecording(client->attributionSource, client->attributes.source);
+        return binderStatusFromStatusT(INVALID_OPERATION);
+    }
+
+    // Force the possibly silenced client to be unsilenced since we just called
+    // startRecording (i.e. we have assumed it is unsilenced).
+    // At this point in time, the client is inactive, so no calls to appops are sent in
+    // setAppState_l.
+    // This ensures existing clients have the same behavior as new clients (starting unsilenced).
+    // TODO(b/282076713)
+    setAppState_l(client, APP_STATE_TOP);
+
     client->active = true;
     client->startTimeNs = systemTime();
+    // This call updates the silenced state, and since we are active, appropriately notifies appops
+    // if we silence the track.
     updateUidStates_l();
 
     status_t status;
@@ -1051,10 +1073,10 @@
 }
 
 Status AudioPolicyService::setVolumeIndexForAttributes(
-        const media::AudioAttributesInternal& attrAidl,
+        const media::audio::common::AudioAttributes& attrAidl,
         const AudioDeviceDescription& deviceAidl, int32_t indexAidl) {
     audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+            aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
     int index = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int>(indexAidl));
     audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioDeviceDescription_audio_devices_t(deviceAidl));
@@ -1074,10 +1096,10 @@
 }
 
 Status AudioPolicyService::getVolumeIndexForAttributes(
-        const media::AudioAttributesInternal& attrAidl,
+        const media::audio::common::AudioAttributes& attrAidl,
         const AudioDeviceDescription& deviceAidl, int32_t* _aidl_return) {
     audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+            aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
     audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioDeviceDescription_audio_devices_t(deviceAidl));
     int index;
@@ -1096,9 +1118,9 @@
 }
 
 Status AudioPolicyService::getMinVolumeIndexForAttributes(
-        const media::AudioAttributesInternal& attrAidl, int32_t* _aidl_return) {
+        const media::audio::common::AudioAttributes& attrAidl, int32_t* _aidl_return) {
     audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+            aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
     int index;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             AudioValidator::validateAudioAttributes(attributes, "169572641")));
@@ -1115,9 +1137,9 @@
 }
 
 Status AudioPolicyService::getMaxVolumeIndexForAttributes(
-        const media::AudioAttributesInternal& attrAidl, int32_t* _aidl_return) {
+        const media::audio::common::AudioAttributes& attrAidl, int32_t* _aidl_return) {
     audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+            aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
     int index;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             AudioValidator::validateAudioAttributes(attributes, "169572641")));
@@ -1155,12 +1177,13 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::getDevicesForAttributes(const media::AudioAttributesInternal& attrAidl,
-                                                   bool forVolume,
-                                                   std::vector<AudioDevice>* _aidl_return)
+Status AudioPolicyService::getDevicesForAttributes(
+        const media::audio::common::AudioAttributes& attrAidl,
+        bool forVolume,
+        std::vector<AudioDevice>* _aidl_return)
 {
     audio_attributes_t aa = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+            aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
     AudioDeviceTypeAddrVector devices;
 
     if (mAudioPolicyManager == NULL) {
@@ -1485,12 +1508,12 @@
 
 Status AudioPolicyService::isDirectOutputSupported(
         const AudioConfigBase& configAidl,
-        const media::AudioAttributesInternal& attributesAidl,
+        const media::audio::common::AudioAttributes& attributesAidl,
         bool* _aidl_return) {
     audio_config_base_t config = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioConfigBase_audio_config_base_t(configAidl, false /*isInput*/));
     audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attributesAidl));
+            aidl2legacy_AudioAttributes_audio_attributes_t(attributesAidl));
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             AudioValidator::validateAudioAttributes(attributes, "169572641")));
 
@@ -1540,6 +1563,17 @@
     return Status::ok();
 }
 
+Status AudioPolicyService::listDeclaredDevicePorts(media::AudioPortRole role,
+                                                    std::vector<media::AudioPortFw>* _aidl_return) {
+    Mutex::Autolock _l(mLock);
+    if (mAudioPolicyManager == NULL) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    AutoCallerClear acc;
+    return binderStatusFromStatusT(mAudioPolicyManager->listDeclaredDevicePorts(
+                    role, _aidl_return));
+}
+
 Status AudioPolicyService::getAudioPort(int portId,
                                         media::AudioPortFw* _aidl_return) {
     audio_port_v7 port{ .id = portId };
@@ -1796,12 +1830,12 @@
 }
 
 Status AudioPolicyService::startAudioSource(const media::AudioPortConfigFw& sourceAidl,
-                                            const media::AudioAttributesInternal& attributesAidl,
-                                            int32_t* _aidl_return) {
+        const media::audio::common::AudioAttributes& attributesAidl,
+        int32_t* _aidl_return) {
     audio_port_config source = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioPortConfigFw_audio_port_config(sourceAidl));
     audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attributesAidl));
+            aidl2legacy_AudioAttributes_audio_attributes_t(attributesAidl));
     audio_port_handle_t portId;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             AudioValidator::validateAudioPortConfig(source)));
@@ -2074,10 +2108,10 @@
 }
 
 Status AudioPolicyService::getProductStrategyFromAudioAttributes(
-        const media::AudioAttributesInternal& aaAidl,
+        const media::audio::common::AudioAttributes& aaAidl,
         bool fallbackOnDefault, int32_t* _aidl_return) {
     audio_attributes_t aa = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(aaAidl));
+            aidl2legacy_AudioAttributes_audio_attributes_t(aaAidl));
     product_strategy_t productStrategy;
 
     if (mAudioPolicyManager == NULL) {
@@ -2108,10 +2142,10 @@
 }
 
 Status AudioPolicyService::getVolumeGroupFromAudioAttributes(
-        const media::AudioAttributesInternal& aaAidl,
+        const media::audio::common::AudioAttributes& aaAidl,
         bool fallbackOnDefault, int32_t* _aidl_return) {
     audio_attributes_t aa = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(aaAidl));
+            aidl2legacy_AudioAttributes_audio_attributes_t(aaAidl));
     volume_group_t volumeGroup;
 
     if (mAudioPolicyManager == NULL) {
@@ -2348,7 +2382,7 @@
 }
 
 Status AudioPolicyService::canBeSpatialized(
-        const std::optional<media::AudioAttributesInternal>& attrAidl,
+        const std::optional<media::audio::common::AudioAttributes>& attrAidl,
         const std::optional<AudioConfig>& configAidl,
         const std::vector<AudioDevice>& devicesAidl,
         bool* _aidl_return) {
@@ -2358,7 +2392,7 @@
     audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
     if (attrAidl.has_value()) {
         attr = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl.value()));
+            aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl.value()));
     }
     audio_config_t config = AUDIO_CONFIG_INITIALIZER;
     if (configAidl.has_value()) {
@@ -2375,9 +2409,10 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::getDirectPlaybackSupport(const media::AudioAttributesInternal &attrAidl,
-                                                    const AudioConfig &configAidl,
-                                                    media::AudioDirectMode *_aidl_return) {
+Status AudioPolicyService::getDirectPlaybackSupport(
+        const media::audio::common::AudioAttributes &attrAidl,
+        const AudioConfig &configAidl,
+        media::AudioDirectMode *_aidl_return) {
     if (mAudioPolicyManager == nullptr) {
         return binderStatusFromStatusT(NO_INIT);
     }
@@ -2385,7 +2420,7 @@
         return binderStatusFromStatusT(BAD_VALUE);
     }
     audio_attributes_t attr = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+            aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
     audio_config_t config = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioConfig_audio_config_t(configAidl, false /*isInput*/));
     Mutex::Autolock _l(mLock);
@@ -2396,13 +2431,13 @@
 }
 
 Status AudioPolicyService::getDirectProfilesForAttributes(
-                                const media::AudioAttributesInternal& attrAidl,
+                                const media::audio::common::AudioAttributes& attrAidl,
                                 std::vector<media::audio::common::AudioProfile>* _aidl_return) {
    if (mAudioPolicyManager == nullptr) {
         return binderStatusFromStatusT(NO_INIT);
     }
     audio_attributes_t attr = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+            aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
     AudioProfileVector audioProfiles;
 
     Mutex::Autolock _l(mLock);
@@ -2437,7 +2472,7 @@
 }
 
 Status AudioPolicyService::setPreferredMixerAttributes(
-        const media::AudioAttributesInternal& attrAidl,
+        const media::audio::common::AudioAttributes& attrAidl,
         int32_t portIdAidl,
         int32_t uidAidl,
         const media::AudioMixerAttributesInternal& mixerAttrAidl) {
@@ -2446,7 +2481,7 @@
     }
 
     audio_attributes_t  attr = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+            aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
     audio_mixer_attributes_t mixerAttr = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioMixerAttributesInternal_audio_mixer_attributes_t(mixerAttrAidl));
     uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
@@ -2459,7 +2494,7 @@
 }
 
 Status AudioPolicyService::getPreferredMixerAttributes(
-        const media::AudioAttributesInternal& attrAidl,
+        const media::audio::common::AudioAttributes& attrAidl,
         int32_t portIdAidl,
         std::optional<media::AudioMixerAttributesInternal>* _aidl_return) {
     if (mAudioPolicyManager == nullptr) {
@@ -2467,7 +2502,7 @@
     }
 
     audio_attributes_t  attr = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+            aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
     audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
 
@@ -2482,7 +2517,7 @@
 }
 
 Status AudioPolicyService::clearPreferredMixerAttributes(
-        const media::AudioAttributesInternal& attrAidl,
+        const media::audio::common::AudioAttributes& attrAidl,
         int32_t portIdAidl,
         int32_t uidAidl) {
     if (mAudioPolicyManager == nullptr) {
@@ -2490,7 +2525,7 @@
     }
 
     audio_attributes_t  attr = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+            aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
     uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
     audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 974ae38..234104d 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -46,6 +46,7 @@
 
 #include <system/audio.h>
 #include <system/audio_policy.h>
+#include <AudioPolicyConfig.h>
 #include <AudioPolicyManager.h>
 
 namespace android {
@@ -185,7 +186,23 @@
 
 static AudioPolicyInterface* createAudioPolicyManager(AudioPolicyClientInterface *clientInterface)
 {
-    AudioPolicyManager *apm = new AudioPolicyManager(clientInterface);
+    AudioPolicyManager *apm = nullptr;
+    media::AudioPolicyConfig apmConfig;
+    if (status_t status = clientInterface->getAudioPolicyConfig(&apmConfig); status == OK) {
+        auto config = AudioPolicyConfig::loadFromApmAidlConfigWithFallback(apmConfig);
+        LOG_ALWAYS_FATAL_IF(config->getEngineLibraryNameSuffix() !=
+                AudioPolicyConfig::kDefaultEngineLibraryNameSuffix,
+                "Only default engine is currently supported with the AIDL HAL");
+        apm = new AudioPolicyManager(config,
+                loadApmEngineLibraryAndCreateEngine(
+                        config->getEngineLibraryNameSuffix(), apmConfig.engineConfig),
+                clientInterface);
+    } else {
+        auto config = AudioPolicyConfig::loadFromApmXmlConfigWithFallback();  // This can't fail.
+        apm = new AudioPolicyManager(config,
+                loadApmEngineLibraryAndCreateEngine(config->getEngineLibraryNameSuffix()),
+                clientInterface);
+    }
     status_t status = apm->initialize();
     if (status != NO_ERROR) {
         delete apm;
@@ -259,7 +276,8 @@
     }
 
     // load audio processing modules
-    sp<AudioPolicyEffects> audioPolicyEffects = new AudioPolicyEffects();
+    const sp<EffectsFactoryHalInterface> effectsFactoryHal = EffectsFactoryHalInterface::create();
+    sp<AudioPolicyEffects> audioPolicyEffects = new AudioPolicyEffects(effectsFactoryHal);
     sp<UidPolicy> uidPolicy = new UidPolicy(this);
     sp<SensorPrivacyPolicy> sensorPrivacyPolicy = new SensorPrivacyPolicy(this);
     {
@@ -278,7 +296,7 @@
         AudioDeviceTypeAddrVector devices;
         bool hasSpatializer = mAudioPolicyManager->canBeSpatialized(&attr, nullptr, devices);
         if (hasSpatializer) {
-            mSpatializer = Spatializer::create(this);
+            mSpatializer = Spatializer::create(this, effectsFactoryHal);
         }
         if (mSpatializer == nullptr) {
             // No spatializer created, signal the reason: NO_INIT a failure, OK means intended.
@@ -1694,7 +1712,7 @@
     }
 }
 
-void AudioPolicyService::UidPolicy::onUidProcAdjChanged(uid_t uid __unused) {
+void AudioPolicyService::UidPolicy::onUidProcAdjChanged(uid_t uid __unused, int32_t adj __unused) {
 }
 
 void AudioPolicyService::UidPolicy::updateOverrideUid(uid_t uid, bool active, bool insert) {
@@ -1838,14 +1856,12 @@
 void AudioPolicyService::SensorPrivacyPolicy::registerSelf() {
     SensorPrivacyManager spm;
     mSensorPrivacyEnabled = spm.isSensorPrivacyEnabled();
-    (void)spm.addToggleSensorPrivacyListener(this);
     spm.addSensorPrivacyListener(this);
 }
 
 void AudioPolicyService::SensorPrivacyPolicy::unregisterSelf() {
     SensorPrivacyManager spm;
     spm.removeSensorPrivacyListener(this);
-    spm.removeToggleSensorPrivacyListener(this);
 }
 
 bool AudioPolicyService::SensorPrivacyPolicy::isSensorPrivacyEnabled() {
@@ -1914,6 +1930,7 @@
     // since it controls the mic permission for legacy apps.
     mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
         mAttributionSource.packageName.value_or(""))),
+        AppOpsManager::WATCH_FOREGROUND_CHANGES,
         mOpCallback);
 }
 
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 31d5249..d0cde64 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -98,7 +98,8 @@
     binder::Status getForceUse(media::AudioPolicyForceUse usage,
                                media::AudioPolicyForcedConfig* _aidl_return) override;
     binder::Status getOutput(AudioStreamType stream, int32_t* _aidl_return) override;
-    binder::Status getOutputForAttr(const media::AudioAttributesInternal& attr, int32_t session,
+    binder::Status getOutputForAttr(const media::audio::common::AudioAttributes& attr,
+                                    int32_t session,
                                     const AttributionSourceState &attributionSource,
                                     const AudioConfig& config,
                                     int32_t flags, int32_t selectedDeviceId,
@@ -106,7 +107,7 @@
     binder::Status startOutput(int32_t portId) override;
     binder::Status stopOutput(int32_t portId) override;
     binder::Status releaseOutput(int32_t portId) override;
-    binder::Status getInputForAttr(const media::AudioAttributesInternal& attr, int32_t input,
+    binder::Status getInputForAttr(const media::audio::common::AudioAttributes& attr, int32_t input,
                                    int32_t riid, int32_t session,
                                    const AttributionSourceState &attributionSource,
                                    const AudioConfigBase& config, int32_t flags,
@@ -123,19 +124,19 @@
     binder::Status getStreamVolumeIndex(AudioStreamType stream,
                                         const AudioDeviceDescription& device,
                                         int32_t* _aidl_return) override;
-    binder::Status setVolumeIndexForAttributes(const media::AudioAttributesInternal& attr,
+    binder::Status setVolumeIndexForAttributes(const media::audio::common::AudioAttributes& attr,
                                                const AudioDeviceDescription& device,
                                                int32_t index) override;
-    binder::Status getVolumeIndexForAttributes(const media::AudioAttributesInternal& attr,
+    binder::Status getVolumeIndexForAttributes(const media::audio::common::AudioAttributes& attr,
                                                const AudioDeviceDescription& device,
                                                int32_t* _aidl_return) override;
-    binder::Status getMaxVolumeIndexForAttributes(const media::AudioAttributesInternal& attr,
+    binder::Status getMaxVolumeIndexForAttributes(const media::audio::common::AudioAttributes& attr,
                                                   int32_t* _aidl_return) override;
-    binder::Status getMinVolumeIndexForAttributes(const media::AudioAttributesInternal& attr,
+    binder::Status getMinVolumeIndexForAttributes(const media::audio::common::AudioAttributes& attr,
                                                   int32_t* _aidl_return) override;
     binder::Status getStrategyForStream(AudioStreamType stream,
                                         int32_t* _aidl_return) override;
-    binder::Status getDevicesForAttributes(const media::AudioAttributesInternal& attr,
+    binder::Status getDevicesForAttributes(const media::audio::common::AudioAttributes& attr,
                                            bool forVolume,
                                            std::vector<AudioDevice>* _aidl_return) override;
     binder::Status getOutputForEffect(const media::EffectDescriptor& desc,
@@ -170,11 +171,13 @@
     binder::Status getOffloadSupport(const media::audio::common::AudioOffloadInfo& info,
                                      media::AudioOffloadMode* _aidl_return) override;
     binder::Status isDirectOutputSupported(const AudioConfigBase& config,
-                                           const media::AudioAttributesInternal& attributes,
+                                           const media::audio::common::AudioAttributes& attributes,
                                            bool* _aidl_return) override;
     binder::Status listAudioPorts(media::AudioPortRole role, media::AudioPortType type,
                                   Int* count, std::vector<media::AudioPortFw>* ports,
                                   int32_t* _aidl_return) override;
+    binder::Status listDeclaredDevicePorts(media::AudioPortRole role,
+                                           std::vector<media::AudioPortFw>* _aidl_return) override;
     binder::Status getAudioPort(int portId,
                                 media::AudioPortFw* _aidl_return) override;
     binder::Status createAudioPatch(const media::AudioPatchFw& patch, int32_t handle,
@@ -199,7 +202,7 @@
             const std::vector<AudioDevice>& devices) override;
     binder::Status removeUserIdDeviceAffinities(int32_t userId) override;
     binder::Status startAudioSource(const media::AudioPortConfigFw& source,
-                                    const media::AudioAttributesInternal& attributes,
+                                    const media::audio::common::AudioAttributes& attributes,
                                     int32_t* _aidl_return) override;
     binder::Status stopAudioSource(int32_t portId) override;
     binder::Status setMasterMono(bool mono) override;
@@ -226,14 +229,16 @@
     binder::Status isHotwordStreamSupported(bool lookbackAudio, bool* _aidl_return) override;
     binder::Status listAudioProductStrategies(
             std::vector<media::AudioProductStrategy>* _aidl_return) override;
-    binder::Status getProductStrategyFromAudioAttributes(const media::AudioAttributesInternal& aa,
-                                                         bool fallbackOnDefault,
-                                                         int32_t* _aidl_return) override;
+    binder::Status getProductStrategyFromAudioAttributes(
+            const media::audio::common::AudioAttributes& aa,
+            bool fallbackOnDefault,
+            int32_t* _aidl_return) override;
     binder::Status listAudioVolumeGroups(
             std::vector<media::AudioVolumeGroup>* _aidl_return) override;
-    binder::Status getVolumeGroupFromAudioAttributes(const media::AudioAttributesInternal& aa,
-                                                     bool fallbackOnDefault,
-                                                     int32_t* _aidl_return) override;
+    binder::Status getVolumeGroupFromAudioAttributes(
+            const media::audio::common::AudioAttributes& aa,
+            bool fallbackOnDefault,
+            int32_t* _aidl_return) override;
     binder::Status setRttEnabled(bool enabled) override;
     binder::Status isCallScreenModeSupported(bool* _aidl_return) override;
     binder::Status setDevicesRoleForStrategy(
@@ -272,31 +277,31 @@
     binder::Status getSpatializer(const sp<media::INativeSpatializerCallback>& callback,
             media::GetSpatializerResponse* _aidl_return) override;
     binder::Status canBeSpatialized(
-            const std::optional<media::AudioAttributesInternal>& attr,
+            const std::optional<media::audio::common::AudioAttributes>& attr,
             const std::optional<AudioConfig>& config,
             const std::vector<AudioDevice>& devices,
             bool* _aidl_return) override;
 
-    binder::Status getDirectPlaybackSupport(const media::AudioAttributesInternal& attr,
+    binder::Status getDirectPlaybackSupport(const media::audio::common::AudioAttributes& attr,
                                             const AudioConfig& config,
                                             media::AudioDirectMode* _aidl_return) override;
 
-    binder::Status getDirectProfilesForAttributes(const media::AudioAttributesInternal& attr,
+    binder::Status getDirectProfilesForAttributes(const media::audio::common::AudioAttributes& attr,
                         std::vector<media::audio::common::AudioProfile>* _aidl_return) override;
 
     binder::Status getSupportedMixerAttributes(
             int32_t portId,
             std::vector<media::AudioMixerAttributesInternal>* _aidl_return) override;
     binder::Status setPreferredMixerAttributes(
-            const media::AudioAttributesInternal& attr,
+            const media::audio::common::AudioAttributes& attr,
             int32_t portId,
             int32_t uid,
             const media::AudioMixerAttributesInternal& mixerAttr) override;
     binder::Status getPreferredMixerAttributes(
-            const media::AudioAttributesInternal& attr,
+            const media::audio::common::AudioAttributes& attr,
             int32_t portId,
             std::optional<media::AudioMixerAttributesInternal>* _aidl_return) override;
-    binder::Status clearPreferredMixerAttributes(const media::AudioAttributesInternal& attr,
+    binder::Status clearPreferredMixerAttributes(const media::audio::common::AudioAttributes& attr,
                                                  int32_t portId,
                                                  int32_t uid) override;
 
@@ -482,7 +487,7 @@
         void onUidIdle(uid_t uid, bool disabled) override;
         void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
                 int32_t capability) override;
-        void onUidProcAdjChanged(uid_t uid) override;
+        void onUidProcAdjChanged(uid_t uid, int32_t adj) override;
 
         void addOverrideUid(uid_t uid, bool active) { updateOverrideUid(uid, active, true); }
         void removeOverrideUid(uid_t uid) { updateOverrideUid(uid, false, false); }
@@ -733,6 +738,8 @@
         explicit AudioPolicyClient(AudioPolicyService *service) : mAudioPolicyService(service) {}
         virtual ~AudioPolicyClient() {}
 
+        virtual status_t getAudioPolicyConfig(media::AudioPolicyConfig *config);
+
         //
         // Audio HW module functions
         //
@@ -845,7 +852,7 @@
                 const TrackSecondaryOutputsMap& trackSecondaryOutputs) override;
 
         status_t setDeviceConnectedState(
-                const struct audio_port_v7 *port, bool connected) override;
+                const struct audio_port_v7 *port, media::DeviceConnectedState state) override;
 
         status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) override;
 
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index 95b8e7c..1245b1e 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -30,7 +30,6 @@
 #include <audio_utils/fixedfft.h>
 #include <cutils/bitops.h>
 #include <hardware/sensors.h>
-#include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/MediaMetricsItem.h>
@@ -215,18 +214,17 @@
 };
 
 // ---------------------------------------------------------------------------
-sp<Spatializer> Spatializer::create(SpatializerPolicyCallback *callback) {
+sp<Spatializer> Spatializer::create(SpatializerPolicyCallback* callback,
+                                    const sp<EffectsFactoryHalInterface>& effectsFactoryHal) {
     sp<Spatializer> spatializer;
 
-    sp<EffectsFactoryHalInterface> effectsFactoryHal = EffectsFactoryHalInterface::create();
     if (effectsFactoryHal == nullptr) {
         ALOGW("%s failed to create effect factory interface", __func__);
         return spatializer;
     }
 
     std::vector<effect_descriptor_t> descriptors;
-    status_t status =
-            effectsFactoryHal->getDescriptors(FX_IID_SPATIALIZER, &descriptors);
+    status_t status = effectsFactoryHal->getDescriptors(FX_IID_SPATIALIZER, &descriptors);
     if (status != NO_ERROR) {
         ALOGW("%s failed to get spatializer descriptor, error %d", __func__, status);
         return spatializer;
@@ -238,18 +236,15 @@
     sp<EffectHalInterface> effect;
     status = effectsFactoryHal->createEffect(&descriptors[0].uuid, AUDIO_SESSION_OUTPUT_STAGE,
             AUDIO_IO_HANDLE_NONE, AUDIO_PORT_HANDLE_NONE, &effect);
-    ALOGI("%s FX create status %d effect ID %" PRId64, __func__, status,
-          effect ? effect->effectId() : 0);
+    ALOGI("%s FX create status %d effect %p", __func__, status, effect.get());
 
     if (status == NO_ERROR && effect != nullptr) {
         spatializer = new Spatializer(descriptors[0], callback);
         if (spatializer->loadEngineConfiguration(effect) != NO_ERROR) {
             spatializer.clear();
-            ALOGW("%s loadEngine error: %d  effect Id %" PRId64,
-                    __func__, status, effect ? effect->effectId() : 0);
+            ALOGW("%s loadEngine error: %d  effect %p", __func__, status, effect.get());
         } else {
-            spatializer->mLocalLog.log("%s with effect Id %" PRId64, __func__,
-                                       effect ? effect->effectId() : 0);
+            spatializer->mLocalLog.log("%s with effect Id %p", __func__, effect.get());
         }
     }
 
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 23de0c0..0d4d3f6 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -27,6 +27,7 @@
 #include <audio_utils/SimpleLog.h>
 #include <math.h>
 #include <media/AudioEffect.h>
+#include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <media/VectorRecorder.h>
 #include <media/audiohal/EffectHalInterface.h>
 #include <media/stagefright/foundation/ALooper.h>
@@ -95,7 +96,8 @@
                     private SpatializerPoseController::Listener,
                     public virtual AudioSystem::SupportedLatencyModesCallback {
   public:
-    static sp<Spatializer> create(SpatializerPolicyCallback *callback);
+    static sp<Spatializer> create(SpatializerPolicyCallback* callback,
+                                  const sp<EffectsFactoryHalInterface>& effectsFactoryHal);
 
            ~Spatializer() override;
 
diff --git a/services/audiopolicy/service/SpatializerPoseController.h b/services/audiopolicy/service/SpatializerPoseController.h
index 9d78188..7fa4f86 100644
--- a/services/audiopolicy/service/SpatializerPoseController.h
+++ b/services/audiopolicy/service/SpatializerPoseController.h
@@ -121,9 +121,7 @@
     mutable std::timed_mutex mMutex;
     Listener* const mListener;
     const std::chrono::microseconds mSensorPeriod;
-    // Order matters for the following two members to ensure correct destruction.
     std::unique_ptr<media::HeadTrackingProcessor> mProcessor;
-    std::unique_ptr<media::SensorPoseProvider> mPoseProvider;
     int32_t mHeadSensor = media::SensorPoseProvider::INVALID_HANDLE;
     int32_t mScreenSensor = media::SensorPoseProvider::INVALID_HANDLE;
     std::optional<media::HeadTrackingMode> mActualMode;
@@ -146,6 +144,9 @@
         4 /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
         { 3 } /* delimiterIdx */};
 
+    // Next to last variable as releasing this stops the callbacks
+    std::unique_ptr<media::SensorPoseProvider> mPoseProvider;
+
     // It's important that mThread is the last variable in this class
     // since we starts mThread in initializer list
     std::thread mThread;
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index bf82680..a4a0cd4 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -19,19 +19,20 @@
     ],
 
     shared_libs: [
+        "framework-permission-aidl-cpp",
         "libaudioclient",
         "libaudiofoundation",
         "libaudiopolicy",
         "libaudiopolicymanagerdefault",
         "libbase",
+        "libbinder",
+        "libcutils",
         "libhidlbase",
         "liblog",
         "libmedia_helper",
         "libutils",
         "libcutils",
         "libxml2",
-        "framework-permission-aidl-cpp",
-        "libbinder",
     ],
 
     static_libs: [
@@ -73,21 +74,22 @@
     require_root: true,
 
     shared_libs: [
-        "libaudiofoundation",
+        "audioclient-types-aidl-cpp",
         "libaudioclient",
+        "libaudioclient_aidl_conversion",
+        "libaudiofoundation",
+        "libaudiopolicycomponents",
         "libaudiopolicymanagerdefault",
+        "libcutils",
         "liblog",
         "libmedia_helper",
-        "libutils",
-        "libaudioclient_aidl_conversion",
-        "libstagefright_foundation",
         "libshmemcompat",
         "libshmemutil",
-        "audioclient-types-aidl-cpp",
+        "libstagefright_foundation",
+        "libutils",
+        "libxml2",
     ],
 
-    static_libs: ["libaudiopolicycomponents"],
-
     header_libs: [
         "libaudiopolicyengine_interface_headers",
         "libaudiopolicymanager_interface_headers",
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index 6eca7cc..3629c16 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -103,10 +103,11 @@
         ++mAudioPortListUpdateCount;
     }
 
-    status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) override {
-        if (connected) {
+    status_t setDeviceConnectedState(const struct audio_port_v7 *port,
+                                     media::DeviceConnectedState state) override {
+        if (state == media::DeviceConnectedState::CONNECTED) {
             mConnectedDevicePorts.push_back(*port);
-        } else {
+        } else if (state == media::DeviceConnectedState::DISCONNECTED){
             mDisconnectedDevicePorts.push_back(*port);
         }
         return NO_ERROR;
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index 0c04e35..2ae0e97 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -25,6 +25,9 @@
     virtual ~AudioPolicyTestClient() = default;
 
     // AudioPolicyClientInterface Implementation
+    status_t getAudioPolicyConfig(media::AudioPolicyConfig* /*config*/) override {
+        return INVALID_OPERATION;
+    }
     audio_module_handle_t loadHwModule(const char* /*name*/) override {
         return AUDIO_MODULE_HANDLE_NONE;
     }
@@ -96,8 +99,8 @@
             const TrackSecondaryOutputsMap& trackSecondaryOutputs __unused) override {
         return NO_INIT;
     }
-    status_t setDeviceConnectedState(
-            const struct audio_port_v7 *port __unused, bool connected __unused) override {
+    status_t setDeviceConnectedState(const struct audio_port_v7 *port __unused,
+                                     media::DeviceConnectedState state __unused) override {
         return NO_INIT;
     }
     status_t invalidateTracks(const std::vector<audio_port_handle_t>& /*portIds*/) override {
diff --git a/services/audiopolicy/tests/AudioPolicyTestManager.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
index 2a7a060..31ee252 100644
--- a/services/audiopolicy/tests/AudioPolicyTestManager.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -22,9 +22,13 @@
 class AudioPolicyTestManager : public AudioPolicyManager {
   public:
     explicit AudioPolicyTestManager(AudioPolicyClientInterface *clientInterface)
-            : AudioPolicyManager(clientInterface, true /*forTesting*/) { }
+            : AudioPolicyTestManager(AudioPolicyConfig::createDefault(), clientInterface) {}
+    AudioPolicyTestManager(const sp<const AudioPolicyConfig>& config,
+            AudioPolicyClientInterface *clientInterface)
+            : AudioPolicyManager(config,
+                    loadApmEngineLibraryAndCreateEngine(config->getEngineLibraryNameSuffix()),
+                    clientInterface) {}
     using AudioPolicyManager::getConfig;
-    using AudioPolicyManager::loadConfig;
     using AudioPolicyManager::initialize;
     using AudioPolicyManager::getOutputs;
     using AudioPolicyManager::getAvailableOutputDevices;
diff --git a/services/audiopolicy/tests/audio_health_tests.cpp b/services/audiopolicy/tests/audio_health_tests.cpp
index 798332c..70a3022 100644
--- a/services/audiopolicy/tests/audio_health_tests.cpp
+++ b/services/audiopolicy/tests/audio_health_tests.cpp
@@ -21,6 +21,7 @@
 
 #include <gtest/gtest.h>
 
+#include <AudioPolicyConfig.h>
 #include <media/AudioSystem.h>
 #include <media/TypeConverter.h>
 #include <system/audio.h>
@@ -65,19 +66,17 @@
     }
     free(audioPorts);
 
-    AudioPolicyManagerTestClient client;
-    AudioPolicyTestManager manager(&client);
-    manager.loadConfig();
-    ASSERT_NE("AudioPolicyConfig::setDefault", manager.getConfig().getSource());
+    auto config = AudioPolicyConfig::loadFromApmXmlConfigWithFallback();
+    ASSERT_NE(AudioPolicyConfig::kDefaultConfigSource, config->getSource());
 
-    for (auto desc : manager.getConfig().getInputDevices()) {
+    for (const auto& desc : config->getInputDevices()) {
         if (attachedDevices.find(desc->type()) == attachedDevices.end()) {
             std::string deviceType;
             (void)DeviceConverter::toString(desc->type(), deviceType);
             ADD_FAILURE() << "Input device \"" << deviceType << "\" not found";
         }
     }
-    for (auto desc : manager.getConfig().getOutputDevices()) {
+    for (const auto& desc : config->getOutputDevices()) {
         if (attachedDevices.find(desc->type()) == attachedDevices.end()) {
             std::string deviceType;
             (void)DeviceConverter::toString(desc->type(), deviceType);
@@ -87,13 +86,13 @@
 }
 
 TEST(AudioHealthTest, ConnectSupportedDevice) {
+    auto config = AudioPolicyConfig::loadFromApmXmlConfigWithFallback();
+    ASSERT_NE(AudioPolicyConfig::kDefaultConfigSource, config->getSource());
     AudioPolicyManagerTestClient client;
-    AudioPolicyTestManager manager(&client);
-    manager.loadConfig();
-    ASSERT_NE("AudioPolicyConfig::setDefault", manager.getConfig().getSource());
+    AudioPolicyTestManager manager(config, &client);
 
     DeviceVector devices;
-    for (const auto& hwModule : manager.getConfig().getHwModules()) {
+    for (const auto& hwModule : config->getHwModules()) {
         for (const auto& profile : hwModule->getOutputProfiles()) {
             devices.merge(profile->getSupportedDevices());
         }
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index ed77936..8dfc081 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -82,13 +82,54 @@
     return criterion;
 }
 
+// TODO b/182392769: use attribution source util
+AttributionSourceState createAttributionSourceState(uid_t uid) {
+    AttributionSourceState attributionSourceState;
+    attributionSourceState.uid = uid;
+    attributionSourceState.token = sp<BBinder>::make();
+    return attributionSourceState;
+}
+
 } // namespace
 
+TEST(AudioPolicyConfigTest, DefaultConfigForTestsIsEmpty) {
+    auto config = AudioPolicyConfig::createWritableForTests();
+    EXPECT_TRUE(config->getSource().empty());
+    EXPECT_TRUE(config->getHwModules().isEmpty());
+    EXPECT_TRUE(config->getInputDevices().isEmpty());
+    EXPECT_TRUE(config->getOutputDevices().isEmpty());
+}
+
+TEST(AudioPolicyConfigTest, FallbackToDefault) {
+    auto config = AudioPolicyConfig::loadFromApmXmlConfigWithFallback(
+            base::GetExecutableDirectory() + "/test_invalid_audio_policy_configuration.xml");
+    EXPECT_EQ(AudioPolicyConfig::kDefaultConfigSource, config->getSource());
+}
+
+TEST(AudioPolicyConfigTest, LoadForTests) {
+    {
+        auto result = AudioPolicyConfig::loadFromCustomXmlConfigForTests(
+                base::GetExecutableDirectory() + "/test_invalid_audio_policy_configuration.xml");
+        EXPECT_FALSE(result.ok());
+    }
+    {
+        const std::string source =
+                base::GetExecutableDirectory() + "/test_audio_policy_configuration.xml";
+        auto result = AudioPolicyConfig::loadFromCustomXmlConfigForTests(source);
+        ASSERT_TRUE(result.ok());
+        EXPECT_EQ(source, result.value()->getSource());
+        EXPECT_FALSE(result.value()->getHwModules().isEmpty());
+        EXPECT_FALSE(result.value()->getInputDevices().isEmpty());
+        EXPECT_FALSE(result.value()->getOutputDevices().isEmpty());
+    }
+}
+
 TEST(AudioPolicyManagerTestInit, EngineFailure) {
     AudioPolicyTestClient client;
-    AudioPolicyTestManager manager(&client);
-    manager.getConfig().setDefault();
-    manager.getConfig().setEngineLibraryNameSuffix("non-existent");
+    auto config = AudioPolicyConfig::createWritableForTests();
+    config->setDefault();
+    config->setEngineLibraryNameSuffix("non-existent");
+    AudioPolicyTestManager manager(config, &client);
     ASSERT_EQ(NO_INIT, manager.initialize());
     ASSERT_EQ(NO_INIT, manager.initCheck());
 }
@@ -96,41 +137,12 @@
 TEST(AudioPolicyManagerTestInit, ClientFailure) {
     AudioPolicyTestClient client;
     AudioPolicyTestManager manager(&client);
-    manager.getConfig().setDefault();
     // Since the default client fails to open anything,
     // APM should indicate that the initialization didn't succeed.
     ASSERT_EQ(NO_INIT, manager.initialize());
     ASSERT_EQ(NO_INIT, manager.initCheck());
 }
 
-// Verifies that a failure while loading a config doesn't leave
-// APM config in a "dirty" state. Since AudioPolicyConfig object
-// is a proxy for the data hosted by APM, it isn't possible
-// to "deep copy" it, and thus we have to test its elements
-// individually.
-TEST(AudioPolicyManagerTestInit, ConfigLoadingIsTransactional) {
-    AudioPolicyTestClient client;
-    AudioPolicyTestManager manager(&client);
-    ASSERT_TRUE(manager.getConfig().getHwModules().isEmpty());
-    ASSERT_TRUE(manager.getConfig().getInputDevices().isEmpty());
-    ASSERT_TRUE(manager.getConfig().getOutputDevices().isEmpty());
-    status_t status = deserializeAudioPolicyFile(
-            (base::GetExecutableDirectory() +
-                    "/test_invalid_audio_policy_configuration.xml").c_str(),
-            &manager.getConfig());
-    ASSERT_NE(NO_ERROR, status);
-    EXPECT_TRUE(manager.getConfig().getHwModules().isEmpty());
-    EXPECT_TRUE(manager.getConfig().getInputDevices().isEmpty());
-    EXPECT_TRUE(manager.getConfig().getOutputDevices().isEmpty());
-    status = deserializeAudioPolicyFile(
-            (base::GetExecutableDirectory() + "/test_audio_policy_configuration.xml").c_str(),
-            &manager.getConfig());
-    ASSERT_EQ(NO_ERROR, status);
-    EXPECT_FALSE(manager.getConfig().getHwModules().isEmpty());
-    EXPECT_FALSE(manager.getConfig().getInputDevices().isEmpty());
-    EXPECT_FALSE(manager.getConfig().getOutputDevices().isEmpty());
-}
-
 
 class PatchCountCheck {
   public:
@@ -192,6 +204,7 @@
     static audio_port_handle_t getDeviceIdFromPatch(const struct audio_patch* patch);
     virtual AudioPolicyManagerTestClient* getClient() { return new AudioPolicyManagerTestClient; }
 
+    sp<AudioPolicyConfig> mConfig;
     std::unique_ptr<AudioPolicyManagerTestClient> mClient;
     std::unique_ptr<AudioPolicyTestManager> mManager;
 
@@ -200,8 +213,8 @@
 
 void AudioPolicyManagerTest::SetUp() {
     mClient.reset(getClient());
-    mManager.reset(new AudioPolicyTestManager(mClient.get()));
     ASSERT_NO_FATAL_FAILURE(SetUpManagerConfig());  // Subclasses may want to customize the config.
+    mManager.reset(new AudioPolicyTestManager(mConfig, mClient.get()));
     ASSERT_EQ(NO_ERROR, mManager->initialize());
     ASSERT_EQ(NO_ERROR, mManager->initCheck());
 }
@@ -212,7 +225,8 @@
 }
 
 void AudioPolicyManagerTest::SetUpManagerConfig() {
-    mManager->getConfig().setDefault();
+    mConfig = AudioPolicyConfig::createWritableForTests();
+    mConfig->setDefault();
 }
 
 void AudioPolicyManagerTest::dumpToLog() {
@@ -271,10 +285,7 @@
     AudioPolicyInterface::output_type_t outputType;
     bool isSpatialized;
     bool isBitPerfectInternal;
-    // TODO b/182392769: use attribution source util
-    AttributionSourceState attributionSource = AttributionSourceState();
-    attributionSource.uid = uid;
-    attributionSource.token = sp<BBinder>::make();
+    AttributionSourceState attributionSource = createAttributionSourceState(uid);
     ASSERT_EQ(OK, mManager->getOutputForAttr(
                     &attr, output, session, &stream, attributionSource, &config, &flags,
                     selectedDeviceId, portId, {}, &outputType, &isSpatialized,
@@ -302,10 +313,7 @@
     if (!portId) portId = &localPortId;
     *portId = AUDIO_PORT_HANDLE_NONE;
     AudioPolicyInterface::input_type_t inputType;
-    // TODO b/182392769: use attribution source util
-    AttributionSourceState attributionSource = AttributionSourceState();
-    attributionSource.uid = 0;
-    attributionSource.token = sp<BBinder>::make();
+    AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
     ASSERT_EQ(OK, mManager->getInputForAttr(
             &attr, &input, riid, session, attributionSource, &config, flags,
             selectedDeviceId, &inputType, portId));
@@ -465,7 +473,6 @@
 void AudioPolicyManagerTestMsd::SetUpManagerConfig() {
     // TODO: Consider using Serializer to load part of the config from a string.
     ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTest::SetUpManagerConfig());
-    AudioPolicyConfig& config = mManager->getConfig();
     mMsdOutputDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_BUS);
     sp<AudioProfile> pcmOutputProfile = new AudioProfile(
             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
@@ -481,26 +488,26 @@
     sp<AudioProfile> pcmInputProfile = new AudioProfile(
             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, 44100);
     mMsdInputDevice->addAudioProfile(pcmInputProfile);
-    config.addDevice(mMsdOutputDevice);
-    config.addDevice(mMsdInputDevice);
+    mConfig->addDevice(mMsdOutputDevice);
+    mConfig->addDevice(mMsdInputDevice);
 
     if (mExpectedAudioPatchCount == 2) {
         // Add SPDIF device with PCM output profile as a second device for dual MSD audio patching.
         mSpdifDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_SPDIF);
         mSpdifDevice->addAudioProfile(pcmOutputProfile);
-        config.addDevice(mSpdifDevice);
+        mConfig->addDevice(mSpdifDevice);
 
         sp<OutputProfile> spdifOutputProfile = new OutputProfile("spdif output");
         spdifOutputProfile->addAudioProfile(pcmOutputProfile);
         spdifOutputProfile->addSupportedDevice(mSpdifDevice);
-        config.getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
+        mConfig->getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
                 addOutputProfile(spdifOutputProfile);
     }
 
     sp<HwModule> msdModule = new HwModule(AUDIO_HARDWARE_MODULE_ID_MSD, 2 /*halVersionMajor*/);
-    HwModuleCollection modules = config.getHwModules();
+    HwModuleCollection modules = mConfig->getHwModules();
     modules.add(msdModule);
-    config.setHwModules(modules);
+    mConfig->setHwModules(modules);
 
     sp<OutputProfile> msdOutputProfile = new OutputProfile("msd input");
     msdOutputProfile->addAudioProfile(pcmOutputProfile);
@@ -528,15 +535,15 @@
     // of streams that are not supported by MSD.
     sp<AudioProfile> dtsOutputProfile = new AudioProfile(
             AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1, k48000SamplingRate);
-    config.getDefaultOutputDevice()->addAudioProfile(dtsOutputProfile);
+    mConfig->getDefaultOutputDevice()->addAudioProfile(dtsOutputProfile);
     sp<OutputProfile> primaryEncodedOutputProfile = new OutputProfile("encoded");
     primaryEncodedOutputProfile->addAudioProfile(dtsOutputProfile);
     primaryEncodedOutputProfile->setFlags(AUDIO_OUTPUT_FLAG_DIRECT);
-    primaryEncodedOutputProfile->addSupportedDevice(config.getDefaultOutputDevice());
-    config.getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
+    primaryEncodedOutputProfile->addSupportedDevice(mConfig->getDefaultOutputDevice());
+    mConfig->getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
             addOutputProfile(primaryEncodedOutputProfile);
 
-    mDefaultOutputDevice = config.getDefaultOutputDevice();
+    mDefaultOutputDevice = mConfig->getDefaultOutputDevice();
     if (mExpectedAudioPatchCount == 2) {
         mSpdifDevice->addAudioProfile(dtsOutputProfile);
         primaryEncodedOutputProfile->addSupportedDevice(mSpdifDevice);
@@ -547,12 +554,12 @@
     sp<AudioProfile> iec958InputProfile = new AudioProfile(
             AUDIO_FORMAT_IEC60958, AUDIO_CHANNEL_INDEX_MASK_24, k48000SamplingRate);
     mHdmiInputDevice->addAudioProfile(iec958InputProfile);
-    config.addDevice(mHdmiInputDevice);
+    mConfig->addDevice(mHdmiInputDevice);
     sp<InputProfile> hdmiInputProfile = new InputProfile("hdmi input");
     hdmiInputProfile->addAudioProfile(iec958InputProfile);
     hdmiInputProfile->setFlags(AUDIO_INPUT_FLAG_DIRECT);
     hdmiInputProfile->addSupportedDevice(mHdmiInputDevice);
-    config.getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
+    mConfig->getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
             addInputProfile(hdmiInputProfile);
 }
 
@@ -719,7 +726,7 @@
     int countDirectProfilesPrimary = 0;
     const auto& primary = mManager->getConfig().getHwModules()
             .getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY);
-    for (const auto outputProfile : primary->getOutputProfiles()) {
+    for (const auto& outputProfile : primary->getOutputProfiles()) {
         if (outputProfile->asAudioPort()->isDirectOutput()) {
             countDirectProfilesPrimary += outputProfile->asAudioPort()->getAudioProfiles().size();
         }
@@ -729,7 +736,7 @@
     int countDirectProfilesMsd = 0;
     const auto& msd = mManager->getConfig().getHwModules()
             .getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
-    for (const auto outputProfile : msd->getOutputProfiles()) {
+    for (const auto& outputProfile : msd->getOutputProfiles()) {
         if (outputProfile->asAudioPort()->isDirectOutput()) {
             countDirectProfilesMsd += outputProfile->asAudioPort()->getAudioProfiles().size();
         }
@@ -920,9 +927,9 @@
         sExecutableDir + "test_audio_policy_configuration.xml";
 
 void AudioPolicyManagerTestWithConfigurationFile::SetUpManagerConfig() {
-    status_t status = deserializeAudioPolicyFile(getConfigFile().c_str(), &mManager->getConfig());
-    ASSERT_EQ(NO_ERROR, status);
-    mManager->getConfig().setSource(getConfigFile());
+    auto result = AudioPolicyConfig::loadFromCustomXmlConfigForTests(getConfigFile());
+    ASSERT_TRUE(result.ok());
+    mConfig = result.value();
 }
 
 TEST_F(AudioPolicyManagerTestWithConfigurationFile, InitSuccess) {
@@ -1225,6 +1232,19 @@
     EXPECT_FALSE(isBitPerfect);
     EXPECT_EQ(bitPerfectOutput, output);
 
+    const audio_attributes_t dtmfAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
+            .usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+    };
+    audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    portId = AUDIO_PORT_HANDLE_NONE;
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            48000, AUDIO_OUTPUT_FLAG_NONE, &dtmfOutput, &portId, dtmfAttr,
+            AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(bitPerfectOutput, dtmfOutput);
+
     // When configuration matches preferred mixer attributes, which is bit-perfect, but the client
     // is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
     getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
@@ -1330,13 +1350,14 @@
             AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig);
     ASSERT_EQ(INVALID_OPERATION, ret);
 
-    // The first time to register valid policy mixes should succeed.
+    // The first time to register valid loopback policy mix should succeed.
     clearPolicyMix();
-    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
-            AUDIO_DEVICE_OUT_SPEAKER, "", audioConfig);
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
+            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "addr", audioConfig);
     ASSERT_EQ(NO_ERROR, ret);
-    // Registering the same policy mixes should fail.
-    ret = mManager->registerPolicyMixes(mAudioMixes);
+    // Registering the render policy for the loopback address should succeed.
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "addr", audioConfig);
     ASSERT_EQ(INVALID_OPERATION, ret);
 }
 
@@ -1859,6 +1880,82 @@
                     /*expected_match=*/ false)
                     .withSessionId(TEST_SESSION_ID).withUsage(AUDIO_USAGE_MEDIA)));
 
+struct DPMmapTestParam {
+    DPMmapTestParam(int mixRouteFlags, audio_devices_t deviceType, const std::string& deviceAddress)
+        : mixRouteFlags(mixRouteFlags), deviceType(deviceType), deviceAddress(deviceAddress) {}
+
+    int mixRouteFlags;
+    audio_devices_t deviceType;
+    std::string deviceAddress;
+};
+
+class AudioPolicyManagerTestMMapPlaybackRerouting
+    : public AudioPolicyManagerTestDynamicPolicy,
+      public ::testing::WithParamInterface<DPMmapTestParam> {
+  protected:
+    void SetUp() override {
+        AudioPolicyManagerTestDynamicPolicy::SetUp();
+        audioConfig = AUDIO_CONFIG_INITIALIZER;
+        audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+        audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+        audioConfig.sample_rate = k48000SamplingRate;
+    }
+
+    audio_config_t audioConfig;
+    audio_io_handle_t mOutput;
+    audio_stream_type_t mStream = AUDIO_STREAM_DEFAULT;
+    audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t mPortId;
+    AudioPolicyInterface::output_type_t mOutputType;
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    bool mIsSpatialized;
+    bool mIsBitPerfect;
+};
+
+TEST_P(AudioPolicyManagerTestMMapPlaybackRerouting, MmapPlaybackStreamMatchingDapMixFails) {
+    // Add mix matching the test uid.
+    const int testUid = 12345;
+    const auto param = GetParam();
+    status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, param.mixRouteFlags, param.deviceType,
+                                param.deviceAddress, audioConfig, {createUidCriterion(testUid)});
+    ASSERT_EQ(NO_ERROR, ret);
+
+    // Geting output for matching uid and mmap-ed stream should fail.
+    audio_output_flags_t outputFlags = AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+    ASSERT_EQ(INVALID_OPERATION,
+              mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
+                                         createAttributionSourceState(testUid), &audioConfig,
+                                         &outputFlags, &mSelectedDeviceId, &mPortId, {},
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+}
+
+TEST_P(AudioPolicyManagerTestMMapPlaybackRerouting, NonMmapPlaybackStreamMatchingDapMixSucceeds) {
+    // Add mix matching the test uid.
+    const int testUid = 12345;
+    const auto param = GetParam();
+    status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, param.mixRouteFlags, param.deviceType,
+                                param.deviceAddress, audioConfig, {createUidCriterion(testUid)});
+    ASSERT_EQ(NO_ERROR, ret);
+
+    // Geting output for matching uid should succeed for non-mmaped stream.
+    audio_output_flags_t outputFlags = AUDIO_OUTPUT_FLAG_NONE;
+    ASSERT_EQ(NO_ERROR,
+              mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
+                                         createAttributionSourceState(testUid), &audioConfig,
+                                         &outputFlags, &mSelectedDeviceId, &mPortId, {},
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        MmapPlaybackRerouting, AudioPolicyManagerTestMMapPlaybackRerouting,
+        testing::Values(DPMmapTestParam(MIX_ROUTE_FLAG_LOOP_BACK, AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+                                        /*deviceAddress=*/"remote_submix_media"),
+                        DPMmapTestParam(MIX_ROUTE_FLAG_LOOP_BACK_AND_RENDER,
+                                        AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+                                        /*deviceAddress=*/"remote_submix_media"),
+                        DPMmapTestParam(MIX_ROUTE_FLAG_RENDER, AUDIO_DEVICE_OUT_SPEAKER,
+                                        /*deviceAddress=*/"")));
+
 class AudioPolicyManagerTestDPMixRecordInjection : public AudioPolicyManagerTestDynamicPolicy,
         public testing::WithParamInterface<DPTestParam> {
 protected:
@@ -2018,19 +2115,19 @@
     // Connecting a valid output device with valid parameters should trigger a routing update
     ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
             AUDIO_DEVICE_OUT_BLUETOOTH_SCO, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
-            "a", "b", AUDIO_FORMAT_DEFAULT));
+            "00:11:22:33:44:55", "b", AUDIO_FORMAT_DEFAULT));
     ASSERT_EQ(1, mClient->getRoutingUpdatedCounter());
 
     // Disconnecting a connected device should succeed and trigger a routing update
     ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
             AUDIO_DEVICE_OUT_BLUETOOTH_SCO, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-            "a", "b", AUDIO_FORMAT_DEFAULT));
+            "00:11:22:33:44:55", "b", AUDIO_FORMAT_DEFAULT));
     ASSERT_EQ(2, mClient->getRoutingUpdatedCounter());
 
     // Disconnecting a disconnected device should fail and not trigger a routing update
     ASSERT_EQ(INVALID_OPERATION, mManager->setDeviceConnectionState(
             AUDIO_DEVICE_OUT_BLUETOOTH_SCO, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-            "a", "b",  AUDIO_FORMAT_DEFAULT));
+            "00:11:22:33:44:55", "b",  AUDIO_FORMAT_DEFAULT));
     ASSERT_EQ(2, mClient->getRoutingUpdatedCounter());
 
     // Changing force use should trigger an update
@@ -2158,9 +2255,9 @@
                 DeviceConnectionTestParams({AUDIO_DEVICE_OUT_HDMI, "test_out_hdmi",
                                             "audio_policy_test_out_hdmi"}),
                 DeviceConnectionTestParams({AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, "bt_hfp_in",
-                                            "hfp_client_in"}),
+                                            "00:11:22:33:44:55"}),
                 DeviceConnectionTestParams({AUDIO_DEVICE_OUT_BLUETOOTH_SCO, "bt_hfp_out",
-                                            "hfp_client_out"})
+                                            "00:11:22:33:44:55"})
                 )
         );
 
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 1c922ce..84dcf26 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -35,9 +35,91 @@
     ],
 }
 
-cc_library_shared {
+cc_defaults {
+    name: "libcameraservice_deps",
+
+    shared_libs: [
+        "libactivitymanager_aidl",
+        "libbase",
+        "libdl",
+        "libui",
+        "liblog",
+        "libutilscallstack",
+        "libutils",
+        "libbinder",
+        "libbinder_ndk",
+        "libactivitymanager_aidl",
+        "libpermission",
+        "libcutils",
+        "libexif",
+        "libmedia",
+        "libmediautils",
+        "libcamera_client",
+        "libcamera_metadata",
+        "libfmq",
+        "libgui",
+        "libhardware",
+        "libhidlbase",
+        "libimage_io",
+        "libjpeg",
+        "libultrahdr",
+        "libmedia_codeclist",
+        "libmedia_omx",
+        "libmemunreachable",
+        "libprocessgroup",
+        "libprocinfo",
+        "libsensorprivacy",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libxml2",
+        "libyuv",
+        "android.hardware.camera.common@1.0",
+        "android.hardware.camera.device@1.0",
+        "android.hardware.camera.device@3.2",
+        "android.hardware.camera.device@3.3",
+        "android.hardware.camera.device@3.4",
+        "android.hardware.camera.device@3.5",
+        "android.hardware.camera.device@3.6",
+        "android.hardware.camera.device@3.7",
+        "android.hardware.common-V2-ndk",
+        "android.hardware.common.fmq-V1-ndk",
+        "android.hardware.graphics.common-V4-ndk",
+        "media_permission-aidl-cpp",
+    ],
+
+    static_libs: [
+        "android.frameworks.cameraservice.common@2.0",
+        "android.frameworks.cameraservice.service@2.0",
+        "android.frameworks.cameraservice.service@2.1",
+        "android.frameworks.cameraservice.service@2.2",
+        "android.frameworks.cameraservice.device@2.0",
+        "android.frameworks.cameraservice.device@2.1",
+        "android.frameworks.cameraservice.common-V1-ndk",
+        "android.frameworks.cameraservice.service-V1-ndk",
+        "android.frameworks.cameraservice.device-V1-ndk",
+        "android.hardware.camera.common-V1-ndk",
+        "android.hardware.camera.device-V2-ndk",
+        "android.hardware.camera.metadata-V2-ndk",
+        "android.hardware.camera.provider@2.4",
+        "android.hardware.camera.provider@2.5",
+        "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.provider-V2-ndk",
+        "libaidlcommonsupport",
+        "libdynamic_depth",
+        "libprocessinfoservice_aidl",
+        "libbinderthreadstateutils",
+        "media_permission-aidl-cpp",
+        "libcameraservice_device_independent",
+    ],
+}
+
+cc_library {
     name: "libcameraservice",
 
+    defaults: [
+        "libcameraservice_deps",
+    ],
     // Camera service source
 
     srcs: [
@@ -105,6 +187,7 @@
         "utils/CameraThreadState.cpp",
         "utils/CameraTraces.cpp",
         "utils/AutoConditionLock.cpp",
+        "utils/SchedulingPolicyUtils.cpp",
         "utils/SessionConfigurationUtils.cpp",
         "utils/SessionConfigurationUtilsHidl.cpp",
         "utils/SessionStatsBuilder.cpp",
@@ -119,72 +202,6 @@
         "libmediametrics_headers",
     ],
 
-    shared_libs: [
-        "libbase",
-        "libdl",
-        "libexif",
-        "libui",
-        "liblog",
-        "libutilscallstack",
-        "libutils",
-        "libbinder",
-        "libbinder_ndk",
-        "libactivitymanager_aidl",
-        "libpermission",
-        "libcutils",
-        "libmedia",
-        "libmediautils",
-        "libcamera_client",
-        "libcamera_metadata",
-        "libdynamic_depth",
-        "libfmq",
-        "libgui",
-        "libhardware",
-        "libhidlbase",
-        "libimage_io",
-        "libjpeg",
-        "libjpegrecoverymap",
-        "libmedia_codeclist",
-        "libmedia_omx",
-        "libmemunreachable",
-        "libsensorprivacy",
-        "libstagefright",
-        "libstagefright_foundation",
-        "libxml2",
-        "libyuv",
-        "android.frameworks.cameraservice.common@2.0",
-        "android.frameworks.cameraservice.service@2.0",
-        "android.frameworks.cameraservice.service@2.1",
-        "android.frameworks.cameraservice.service@2.2",
-        "android.frameworks.cameraservice.device@2.0",
-        "android.frameworks.cameraservice.device@2.1",
-        "android.frameworks.cameraservice.common-V1-ndk",
-        "android.frameworks.cameraservice.service-V1-ndk",
-        "android.frameworks.cameraservice.device-V1-ndk",
-        "android.hardware.camera.common@1.0",
-        "android.hardware.camera.provider@2.4",
-        "android.hardware.camera.provider@2.5",
-        "android.hardware.camera.provider@2.6",
-        "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V2-ndk",
-        "android.hardware.camera.device@3.2",
-        "android.hardware.camera.device@3.3",
-        "android.hardware.camera.device@3.4",
-        "android.hardware.camera.device@3.5",
-        "android.hardware.camera.device@3.6",
-        "android.hardware.camera.device@3.7",
-        "android.hardware.camera.device-V2-ndk",
-        "media_permission-aidl-cpp",
-    ],
-
-    static_libs: [
-        "libaidlcommonsupport",
-        "libprocessinfoservice_aidl",
-        "libbinderthreadstateutils",
-        "media_permission-aidl-cpp",
-        "libcameraservice_device_independent",
-    ],
-
     export_shared_lib_headers: [
         "libbinder",
         "libactivitymanager_aidl",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 0115023..ca894fe 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -69,6 +69,8 @@
 #include <private/android_filesystem_config.h>
 #include <system/camera_vendor_tags.h>
 #include <system/camera_metadata.h>
+#include <binder/IServiceManager.h>
+#include <binder/IActivityManager.h>
 
 #include <system/camera.h>
 
@@ -85,6 +87,7 @@
     const char* kActivityServiceName = "activity";
     const char* kSensorPrivacyServiceName = "sensor_privacy";
     const char* kAppopsServiceName = "appops";
+    const char* kProcessInfoServiceName = "processinfo";
 }; // namespace anonymous
 
 namespace android {
@@ -137,10 +140,13 @@
         "android.permission.CAMERA_OPEN_CLOSE_LISTENER");
 static const String16
         sCameraInjectExternalCameraPermission("android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
+// Constant integer for FGS Logging, used to denote the API type for logger
+static const int LOG_FGS_CAMERA_API = 1;
 const char *sFileName = "lastOpenSessionDumpFile";
 static constexpr int32_t kSystemNativeClientScore = resource_policy::PERCEPTIBLE_APP_ADJ;
 static constexpr int32_t kSystemNativeClientState =
         ActivityManager::PROCESS_STATE_PERSISTENT_UI;
+static const String16 kServiceName("cameraserver");
 
 const String8 CameraService::kOfflineDevice("offline-");
 const String16 CameraService::kWatchAllClientsFlag("all");
@@ -697,17 +703,100 @@
     broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
 }
 
-static bool hasPermissionsForSystemCamera(int callingPid, int callingUid) {
+static bool isAutomotiveDevice() {
+    // Checks the property ro.hardware.type and returns true if it is
+    // automotive.
+    char value[PROPERTY_VALUE_MAX] = {0};
+    property_get("ro.hardware.type", value, "");
+    return strncmp(value, "automotive", PROPERTY_VALUE_MAX) == 0;
+}
+
+static bool isAutomotivePrivilegedClient(int32_t uid) {
+    // Returns false if this is not an automotive device type.
+    if (!isAutomotiveDevice())
+        return false;
+
+    // Returns true if the uid is AID_AUTOMOTIVE_EVS which is a
+    // privileged client uid used for safety critical use cases such as
+    // rear view and surround view.
+    return uid == AID_AUTOMOTIVE_EVS;
+}
+
+bool CameraService::isAutomotiveExteriorSystemCamera(const String8& cam_id) const{
+    // Returns false if this is not an automotive device type.
+    if (!isAutomotiveDevice())
+        return false;
+
+    // Returns false if no camera id is provided.
+    if (cam_id.isEmpty())
+        return false;
+
+    SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
+    if (getSystemCameraKind(cam_id, &systemCameraKind) != OK) {
+        // This isn't a known camera ID, so it's not a system camera.
+        ALOGE("%s: Unknown camera id %s, ", __FUNCTION__, cam_id.c_str());
+        return false;
+    }
+
+    if (systemCameraKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) {
+        ALOGE("%s: camera id %s is not a system camera", __FUNCTION__, cam_id.c_str());
+        return false;
+    }
+
+    CameraMetadata cameraInfo;
+    status_t res = mCameraProviderManager->getCameraCharacteristics(
+            cam_id.string(), false, &cameraInfo, false);
+    if (res != OK){
+        ALOGE("%s: Not able to get camera characteristics for camera id %s",__FUNCTION__,
+                cam_id.c_str());
+        return false;
+    }
+
+    camera_metadata_entry auto_location  = cameraInfo.find(ANDROID_AUTOMOTIVE_LOCATION);
+    if (auto_location.count != 1)
+        return false;
+
+    uint8_t location = auto_location.data.u8[0];
+    if ((location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_FRONT) &&
+            (location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_REAR) &&
+            (location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_LEFT) &&
+            (location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT)) {
+        return false;
+    }
+
+    return true;
+}
+
+bool CameraService::checkPermission(const String8& cam_id, const String16& permission,
+        const AttributionSourceState& attributionSource,const String16& message,
+        int32_t attributedOpCode) const{
+    if (isAutomotivePrivilegedClient(attributionSource.uid)) {
+        // If cam_id is empty, then it means that this check is not used for the
+        // purpose of accessing a specific camera, hence grant permission just
+        // based on uid to the automotive privileged client.
+        if (cam_id.isEmpty())
+            return true;
+        // If this call is used for accessing a specific camera then cam_id must be provided.
+        // In that case, only pre-grants the permission for accessing the exterior system only
+        // camera.
+        return isAutomotiveExteriorSystemCamera(cam_id);
+
+    }
+
     permission::PermissionChecker permissionChecker;
+    return permissionChecker.checkPermissionForPreflight(permission, attributionSource,
+            message, attributedOpCode) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+}
+
+bool CameraService::hasPermissionsForSystemCamera(const String8& cam_id, int callingPid,
+        int callingUid) const{
     AttributionSourceState attributionSource{};
     attributionSource.pid = callingPid;
     attributionSource.uid = callingUid;
-    bool checkPermissionForSystemCamera = permissionChecker.checkPermissionForPreflight(
-            sSystemCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
-    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-            sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForSystemCamera = checkPermission(cam_id,
+            sSystemCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE);
+    bool checkPermissionForCamera = checkPermission(cam_id,
+            sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE);
     return checkPermissionForSystemCamera && checkPermissionForCamera;
 }
 
@@ -715,7 +804,7 @@
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
     bool hasSystemCameraPermissions =
-            hasPermissionsForSystemCamera(CameraThreadState::getCallingPid(),
+            hasPermissionsForSystemCamera(String8(), CameraThreadState::getCallingPid(),
                     CameraThreadState::getCallingUid());
     switch (type) {
         case CAMERA_TYPE_BACKWARD_COMPATIBLE:
@@ -756,9 +845,8 @@
         return STATUS_ERROR(ERROR_DISCONNECTED,
                 "Camera subsystem is not available");
     }
-    bool hasSystemCameraPermissions =
-            hasPermissionsForSystemCamera(CameraThreadState::getCallingPid(),
-                    CameraThreadState::getCallingUid());
+    bool hasSystemCameraPermissions = hasPermissionsForSystemCamera(String8::format("%d", cameraId),
+            CameraThreadState::getCallingPid(), CameraThreadState::getCallingUid());
     int cameraIdBound = mNumberOfCamerasWithoutSystemCamera;
     if (hasSystemCameraPermissions) {
         cameraIdBound = mNumberOfCameras;
@@ -787,13 +875,11 @@
     const std::vector<std::string> *deviceIds = &mNormalDeviceIdsWithoutSystemCamera;
     auto callingPid = CameraThreadState::getCallingPid();
     auto callingUid = CameraThreadState::getCallingUid();
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
     attributionSource.pid = callingPid;
     attributionSource.uid = callingUid;
-    bool checkPermissionForSystemCamera = permissionChecker.checkPermissionForPreflight(
-                sSystemCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-                != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForSystemCamera = checkPermission(String8::format("%d", cameraIdInt),
+                sSystemCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE);
     if (checkPermissionForSystemCamera || getpid() == callingPid) {
         deviceIds = &mNormalDeviceIds;
     }
@@ -865,13 +951,11 @@
     // If it's not calling from cameraserver, check the permission only if
     // android.permission.CAMERA is required. If android.permission.SYSTEM_CAMERA was needed,
     // it would've already been checked in shouldRejectSystemCameraConnection.
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
     attributionSource.pid = callingPid;
     attributionSource.uid = callingUid;
-    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-                sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-                != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForCamera = checkPermission(String8(cameraId), sCameraPermission,
+            attributionSource, String16(), AppOpsManager::OP_NONE);
     if ((callingPid != getpid()) &&
             (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) &&
             !checkPermissionForCamera) {
@@ -1024,7 +1108,7 @@
         int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
         int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
         apiLevel effectiveApiLevel, bool overrideForPerfClass, bool overrideToPortrait,
-        /*out*/sp<BasicClient>* client) {
+        bool forceSlowJpegMode, /*out*/sp<BasicClient>* client) {
     // For HIDL devices
     if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
         // Create CameraClient based on device version reported by the HAL.
@@ -1057,9 +1141,10 @@
         sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
         *client = new Camera2Client(cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
                 packageName, featureId, cameraId, api1CameraId, facing, sensorOrientation,
-                clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait);
-        ALOGI("%s: Camera1 API (legacy), override to portrait %d", __FUNCTION__,
-                overrideToPortrait);
+                clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+                forceSlowJpegMode);
+        ALOGI("%s: Camera1 API (legacy), override to portrait %d, forceSlowJpegMode %d",
+                __FUNCTION__, overrideToPortrait, forceSlowJpegMode);
     } else { // Camera2 API route
         sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
                 static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
@@ -1149,15 +1234,15 @@
 Status CameraService::initializeShimMetadata(int cameraId) {
     int uid = CameraThreadState::getCallingUid();
 
-    String16 internalPackageName("cameraserver");
     String8 id = String8::format("%d", cameraId);
     Status ret = Status::ok();
     sp<Client> tmp = nullptr;
     if (!(ret = connectHelper<ICameraClient,Client>(
             sp<ICameraClient>{nullptr}, id, cameraId,
-            internalPackageName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
+            kServiceName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
             API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
-            /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true, /*out*/ tmp)
+            /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true,
+            /*forceSlowJpegMode*/false, /*out*/ tmp)
             ).isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().string());
     }
@@ -1308,7 +1393,6 @@
 Status CameraService::validateClientPermissionsLocked(const String8& cameraId,
         const String8& clientName8, int& clientUid, int& clientPid,
         /*out*/int& originalClientPid) const {
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
 
     int callingPid = CameraThreadState::getCallingPid();
@@ -1360,9 +1444,8 @@
     attributionSource.pid = clientPid;
     attributionSource.uid = clientUid;
     attributionSource.packageName = clientName8;
-    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-            sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForCamera = checkPermission(cameraId, sCameraPermission, attributionSource,
+            String16(), AppOpsManager::OP_NONE);
     if (callingPid != getpid() &&
                 (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
         ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
@@ -1383,8 +1466,13 @@
                 callingUid, procState);
     }
 
-    // If sensor privacy is enabled then prevent access to the camera
-    if (mSensorPrivacyPolicy->isSensorPrivacyEnabled()) {
+
+    // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use cases
+    // such as rear view and surround view cannot be disabled and are exempt from sensor privacy
+    // policy. In all other cases,if sensor privacy is enabled then prevent access to the camera.
+    if ((!isAutomotivePrivilegedClient(callingUid) ||
+            !isAutomotiveExteriorSystemCamera(cameraId)) &&
+            mSensorPrivacyPolicy->isSensorPrivacyEnabled()) {
         ALOGE("Access Denial: cannot use the camera when sensor privacy is enabled");
         return STATUS_ERROR_FMT(ERROR_DISABLED,
                 "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" when sensor privacy "
@@ -1502,33 +1590,6 @@
             }
         }
 
-        // Get current active client PIDs
-        std::vector<int> ownerPids(mActiveClientManager.getAllOwners());
-        ownerPids.push_back(clientPid);
-
-        std::vector<int> priorityScores(ownerPids.size());
-        std::vector<int> states(ownerPids.size());
-
-        // Get priority scores of all active PIDs
-        status_t err = ProcessInfoService::getProcessStatesScoresFromPids(
-                ownerPids.size(), &ownerPids[0], /*out*/&states[0],
-                /*out*/&priorityScores[0]);
-        if (err != OK) {
-            ALOGE("%s: Priority score query failed: %d",
-                  __FUNCTION__, err);
-            return err;
-        }
-
-        // Update all active clients' priorities
-        std::map<int,resource_policy::ClientPriority> pidToPriorityMap;
-        for (size_t i = 0; i < ownerPids.size() - 1; i++) {
-            pidToPriorityMap.emplace(ownerPids[i],
-                    resource_policy::ClientPriority(priorityScores[i], states[i],
-                            /* isVendorClient won't get copied over*/ false,
-                            /* oomScoreOffset won't get copied over*/ 0));
-        }
-        mActiveClientManager.updatePriorities(pidToPriorityMap);
-
         // Get state for the given cameraId
         auto state = getCameraState(cameraId);
         if (state == nullptr) {
@@ -1538,16 +1599,57 @@
             return BAD_VALUE;
         }
 
-        int32_t actualScore = priorityScores[priorityScores.size() - 1];
-        int32_t actualState = states[states.size() - 1];
+        sp<IServiceManager> sm = defaultServiceManager();
+        sp<IBinder> binder = sm->checkService(String16(kProcessInfoServiceName));
+        if (!binder && isAutomotivePrivilegedClient(CameraThreadState::getCallingUid())) {
+            // If processinfo service is not available and the client is automotive privileged
+            // client used for safety critical uses cases such as rear-view and surround-view which
+            // needs to be available before android boot completes, then use the hardcoded values
+            // for the process state and priority score. As this scenario is before android system
+            // services are up and client is native client, hence using NATIVE_ADJ as the priority
+            // score and state as PROCESS_STATE_BOUND_TOP as such automotive apps need to be
+            // visible on the top.
+            clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
+                    sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
+                    state->getConflicting(), resource_policy::NATIVE_ADJ, clientPid,
+                    ActivityManager::PROCESS_STATE_BOUND_TOP, oomScoreOffset, systemNativeClient);
+        } else {
+            // Get current active client PIDs
+            std::vector<int> ownerPids(mActiveClientManager.getAllOwners());
+            ownerPids.push_back(clientPid);
 
-        // Make descriptor for incoming client. We store the oomScoreOffset
-        // since we might need it later on new handleEvictionsLocked and
-        // ProcessInfoService would not take that into account.
-        clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
-                sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
-                state->getConflicting(), actualScore, clientPid, actualState,
-                oomScoreOffset, systemNativeClient);
+            std::vector<int> priorityScores(ownerPids.size());
+            std::vector<int> states(ownerPids.size());
+
+            // Get priority scores of all active PIDs
+            status_t err = ProcessInfoService::getProcessStatesScoresFromPids(ownerPids.size(),
+                    &ownerPids[0], /*out*/&states[0], /*out*/&priorityScores[0]);
+            if (err != OK) {
+                ALOGE("%s: Priority score query failed: %d", __FUNCTION__, err);
+                return err;
+            }
+
+            // Update all active clients' priorities
+            std::map<int,resource_policy::ClientPriority> pidToPriorityMap;
+            for (size_t i = 0; i < ownerPids.size() - 1; i++) {
+                pidToPriorityMap.emplace(ownerPids[i],
+                        resource_policy::ClientPriority(priorityScores[i], states[i],
+                        /* isVendorClient won't get copied over*/ false,
+                        /* oomScoreOffset won't get copied over*/ 0));
+            }
+            mActiveClientManager.updatePriorities(pidToPriorityMap);
+
+            int32_t actualScore = priorityScores[priorityScores.size() - 1];
+            int32_t actualState = states[states.size() - 1];
+
+            // Make descriptor for incoming client. We store the oomScoreOffset
+            // since we might need it later on new handleEvictionsLocked and
+            // ProcessInfoService would not take that into account.
+            clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
+                    sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
+                    state->getConflicting(), actualScore, clientPid, actualState,
+                    oomScoreOffset, systemNativeClient);
+        }
 
         resource_policy::ClientPriority clientPriority = clientDescriptor->getPriority();
 
@@ -1682,6 +1784,7 @@
         int clientPid,
         int targetSdkVersion,
         bool overrideToPortrait,
+        bool forceSlowJpegMode,
         /*out*/
         sp<ICamera>* device) {
 
@@ -1693,7 +1796,7 @@
     ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId,
             clientPackageName,/*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
             /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
-            overrideToPortrait, /*out*/client);
+            overrideToPortrait, forceSlowJpegMode, /*out*/client);
 
     if(!ret.isOk()) {
         logRejected(id, CameraThreadState::getCallingPid(), String8(clientPackageName),
@@ -1702,6 +1805,15 @@
     }
 
     *device = client;
+
+    const sp<IServiceManager> sm(defaultServiceManager());
+    const auto& mActivityManager = getActivityManager();
+    if (mActivityManager) {
+        mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
+            CameraThreadState::getCallingUid(),
+            CameraThreadState::getCallingPid());
+    }
+
     return ret;
 }
 
@@ -1713,7 +1825,7 @@
     //      have android.permission.SYSTEM_CAMERA permissions.
     if (!isVendorListener && (systemCameraKind == SystemCameraKind::HIDDEN_SECURE_CAMERA ||
             (systemCameraKind == SystemCameraKind::SYSTEM_ONLY_CAMERA &&
-            !hasPermissionsForSystemCamera(clientPid, clientUid)))) {
+            !hasPermissionsForSystemCamera(String8(), clientPid, clientUid)))) {
         return true;
     }
     return false;
@@ -1753,7 +1865,7 @@
     //     characteristics) even if clients don't have android.permission.CAMERA. We do not want the
     //     same behavior for system camera devices.
     if (!systemClient && systemCameraKind == SystemCameraKind::SYSTEM_ONLY_CAMERA &&
-            !hasPermissionsForSystemCamera(cPid, cUid)) {
+            !hasPermissionsForSystemCamera(cameraId, cPid, cUid)) {
         ALOGW("Rejecting access to system only camera %s, inadequete permissions",
                 cameraId.c_str());
         return true;
@@ -1801,7 +1913,10 @@
         clientUserId = multiuser_get_user_id(callingUid);
     }
 
-    if (mCameraServiceProxyWrapper->isCameraDisabled(clientUserId)) {
+    // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use cases
+    // such as rear view and surround view cannot be disabled.
+    if ((!isAutomotivePrivilegedClient(callingUid) || !isAutomotiveExteriorSystemCamera(id)) &&
+            mCameraServiceProxyWrapper->isCameraDisabled(clientUserId)) {
         String8 msg =
                 String8::format("Camera disabled by device policy");
         ALOGE("%s: %s", __FUNCTION__, msg.string());
@@ -1810,7 +1925,7 @@
 
     // enforce system camera permissions
     if (oomScoreOffset > 0 &&
-            !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid()) &&
+            !hasPermissionsForSystemCamera(id, callingPid, CameraThreadState::getCallingUid()) &&
             !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
         String8 msg =
                 String8::format("Cannot change the priority of a client %s pid %d for "
@@ -1823,7 +1938,8 @@
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
             /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient,clientFeatureId,
             clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
-            targetSdkVersion, overrideToPortrait, /*out*/client);
+            targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false,
+            /*out*/client);
 
     if(!ret.isOk()) {
         logRejected(id, callingPid, String8(clientPackageNameAdj), ret.toString8());
@@ -1847,6 +1963,13 @@
             ALOGE("%s: Error while creating the file: %s", __FUNCTION__, sFileName);
         }
     }
+    const sp<IServiceManager> sm(defaultServiceManager());
+    const auto& mActivityManager = getActivityManager();
+    if (mActivityManager) {
+        mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
+            CameraThreadState::getCallingUid(),
+            CameraThreadState::getCallingPid());
+    }
     return ret;
 }
 
@@ -1885,11 +2008,14 @@
         int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
         const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
         apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
-        bool overrideToPortrait, /*out*/sp<CLIENT>& device) {
+        bool overrideToPortrait, bool forceSlowJpegMode,
+        /*out*/sp<CLIENT>& device) {
     binder::Status ret = binder::Status::ok();
 
     bool isNonSystemNdk = false;
     String16 clientPackageName;
+    int packageUid = (clientUid == USE_CALLING_UID) ?
+            CameraThreadState::getCallingUid() : clientUid;
     if (clientPackageNameMaybe.size() <= 0) {
         // NDK calls don't come with package names, but we need one for various cases.
         // Generally, there's a 1:1 mapping between UID and package name, but shared UIDs
@@ -1897,8 +2023,6 @@
         // same permissions, so picking any associated package name is sufficient. For some
         // other cases, this may give inaccurate names for clients in logs.
         isNonSystemNdk = true;
-        int packageUid = (clientUid == USE_CALLING_UID) ?
-            CameraThreadState::getCallingUid() : clientUid;
         clientPackageName = getPackageNameFromUid(packageUid);
     } else {
         clientPackageName = clientPackageNameMaybe;
@@ -2001,7 +2125,8 @@
                 clientFeatureId, cameraId, api1CameraId, facing, orientation,
                 clientPid, clientUid, getpid(),
                 deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
-                overrideToPortrait, /*out*/&tmp)).isOk()) {
+                overrideToPortrait, forceSlowJpegMode,
+                /*out*/&tmp)).isOk()) {
             return ret;
         }
         client = static_cast<CLIENT*>(tmp.get());
@@ -2093,32 +2218,38 @@
                     clientPackageName));
         }
 
-        // Set camera muting behavior
-        bool isCameraPrivacyEnabled =
-                mSensorPrivacyPolicy->isCameraPrivacyEnabled();
-        if (client->supportsCameraMute()) {
-            client->setCameraMute(
-                    mOverrideCameraMuteMode || isCameraPrivacyEnabled);
-        } else if (isCameraPrivacyEnabled) {
-            // no camera mute supported, but privacy is on! => disconnect
-            ALOGI("Camera mute not supported for package: %s, camera id: %s",
-                    String8(client->getPackageName()).string(), cameraId.string());
-            // Do not hold mServiceLock while disconnecting clients, but
-            // retain the condition blocking other clients from connecting
-            // in mServiceLockWrapper if held.
-            mServiceLock.unlock();
-            // Clear caller identity temporarily so client disconnect PID
-            // checks work correctly
-            int64_t token = CameraThreadState::clearCallingIdentity();
-            // Note AppOp to trigger the "Unblock" dialog
-            client->noteAppOp();
-            client->disconnect();
-            CameraThreadState::restoreCallingIdentity(token);
-            // Reacquire mServiceLock
-            mServiceLock.lock();
+        // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use
+        // cases such as rear view and surround view cannot be disabled and are exempt from camera
+        // privacy policy.
+        if ((!isAutomotivePrivilegedClient(packageUid) ||
+                !isAutomotiveExteriorSystemCamera(cameraId))) {
+            // Set camera muting behavior.
+            bool isCameraPrivacyEnabled =
+                    mSensorPrivacyPolicy->isCameraPrivacyEnabled();
+            if (client->supportsCameraMute()) {
+                client->setCameraMute(
+                        mOverrideCameraMuteMode || isCameraPrivacyEnabled);
+            } else if (isCameraPrivacyEnabled) {
+                // no camera mute supported, but privacy is on! => disconnect
+                ALOGI("Camera mute not supported for package: %s, camera id: %s",
+                        String8(client->getPackageName()).string(), cameraId.string());
+                // Do not hold mServiceLock while disconnecting clients, but
+                // retain the condition blocking other clients from connecting
+                // in mServiceLockWrapper if held.
+                mServiceLock.unlock();
+                // Clear caller identity temporarily so client disconnect PID
+                // checks work correctly
+                int64_t token = CameraThreadState::clearCallingIdentity();
+                // Note AppOp to trigger the "Unblock" dialog
+                client->noteAppOp();
+                client->disconnect();
+                CameraThreadState::restoreCallingIdentity(token);
+                // Reacquire mServiceLock
+                mServiceLock.lock();
 
-            return STATUS_ERROR_FMT(ERROR_DISABLED,
-                    "Camera \"%s\" disabled due to camera mute", cameraId.string());
+                return STATUS_ERROR_FMT(ERROR_DISABLED,
+                        "Camera \"%s\" disabled due to camera mute", cameraId.string());
+            }
         }
 
         if (shimUpdateOnly) {
@@ -2133,6 +2264,7 @@
 
         client->setImageDumpMask(mImageDumpMask);
         client->setStreamUseCaseOverrides(mStreamUseCaseOverrides);
+        client->setZoomOverride(mZoomOverrideValue);
     } // lock is destroyed, allow further connect calls
 
     // Important: release the mutex here so the client can call back into the service from its
@@ -2575,6 +2707,19 @@
     }
 }
 
+void CameraService::notifyMonitoredUids(const std::unordered_set<uid_t> &notifyUidSet) {
+    Mutex::Autolock lock(mStatusListenerLock);
+
+    for (const auto& it : mListenerList) {
+        if (notifyUidSet.find(it->getListenerUid()) != notifyUidSet.end()) {
+            ALOGV("%s: notifying uid %d", __FUNCTION__, it->getListenerUid());
+            auto ret = it->getListener()->onCameraAccessPrioritiesChanged();
+            it->handleBinderStatus(ret, "%s: Failed to trigger permission callback for %d:%d: %d",
+                    __FUNCTION__, it->getListenerUid(), it->getListenerPid(), ret.exceptionCode());
+        }
+    }
+}
+
 Status CameraService::notifyDeviceStateChange(int64_t newState) {
     const int pid = CameraThreadState::getCallingPid();
     const int selfPid = getpid();
@@ -2707,13 +2852,11 @@
     // Check for camera permissions
     int callingPid = CameraThreadState::getCallingPid();
     int callingUid = CameraThreadState::getCallingUid();
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
     attributionSource.pid = callingPid;
     attributionSource.uid = callingUid;
-    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-                sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-                != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForCamera = checkPermission(String8(),
+                sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE);
     if ((callingPid != getpid()) && !checkPermissionForCamera) {
         ALOGE("%s: pid %d doesn't have camera permissions", __FUNCTION__, callingPid);
         return STATUS_ERROR(ERROR_PERMISSION_DENIED,
@@ -2761,13 +2904,13 @@
 
     auto clientUid = CameraThreadState::getCallingUid();
     auto clientPid = CameraThreadState::getCallingPid();
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
     attributionSource.uid = clientUid;
     attributionSource.pid = clientPid;
-    bool openCloseCallbackAllowed = permissionChecker.checkPermissionForPreflight(
+
+   bool openCloseCallbackAllowed = checkPermission(String8(),
             sCameraOpenCloseListenerPermission, attributionSource, String16(),
-            AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+            AppOpsManager::OP_NONE);
 
     Mutex::Autolock lock(mServiceLock);
 
@@ -2796,7 +2939,7 @@
         // permissions the listener process has / whether it is a vendor listener. Since it might be
         // eligible to listen to other camera ids.
         mListenerList.emplace_back(serviceListener);
-        mUidPolicy->registerMonitorUid(clientUid);
+        mUidPolicy->registerMonitorUid(clientUid, /*openCamera*/false);
     }
 
     /* Collect current devices and status */
@@ -2864,7 +3007,7 @@
         Mutex::Autolock lock(mStatusListenerLock);
         for (auto it = mListenerList.begin(); it != mListenerList.end(); it++) {
             if (IInterface::asBinder((*it)->getListener()) == IInterface::asBinder(listener)) {
-                mUidPolicy->unregisterMonitorUid((*it)->getListenerUid());
+                mUidPolicy->unregisterMonitorUid((*it)->getListenerUid(), /*closeCamera*/false);
                 IInterface::asBinder(listener)->unlinkToDeath(*it);
                 mListenerList.erase(it);
                 return Status::ok();
@@ -3034,6 +3177,13 @@
     return binder::Status::ok();
 }
 
+Status CameraService::reportExtensionSessionStats(
+        const hardware::CameraExtensionSessionStats& stats, String16* sessionKey /*out*/) {
+    ALOGV("%s: reported %s", __FUNCTION__, stats.toString().c_str());
+    *sessionKey = mCameraServiceProxyWrapper->updateExtensionStats(stats);
+    return Status::ok();
+}
+
 void CameraService::removeByClient(const BasicClient* client) {
     Mutex::Autolock lock(mServiceLock);
     for (auto& i : mActiveClientManager.getAll()) {
@@ -3511,6 +3661,13 @@
     // client shouldn't be able to call into us anymore
     mClientPid = 0;
 
+    const auto& mActivityManager = getActivityManager();
+    if (mActivityManager) {
+        mActivityManager->logFgsApiEnd(LOG_FGS_CAMERA_API,
+            CameraThreadState::getCallingUid(),
+            CameraThreadState::getCallingPid());
+    }
+
     return res;
 }
 
@@ -3646,7 +3803,7 @@
     // Transition device availability listeners from PRESENT -> NOT_AVAILABLE
     sCameraService->updateStatus(StatusInternal::NOT_AVAILABLE, mCameraIdStr);
 
-    sCameraService->mUidPolicy->registerMonitorUid(mClientUid);
+    sCameraService->mUidPolicy->registerMonitorUid(mClientUid, /*openCamera*/true);
 
     // Notify listeners of camera open/close status
     sCameraService->updateOpenCloseStatus(mCameraIdStr, true/*open*/, mClientPackageName);
@@ -3754,7 +3911,7 @@
     }
     mOpsCallback.clear();
 
-    sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid);
+    sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid, /*closeCamera*/true);
 
     // Notify listeners of camera open/close status
     sCameraService->updateOpenCloseStatus(mCameraIdStr, false/*open*/, mClientPackageName);
@@ -3863,18 +4020,21 @@
 
 void CameraService::UidPolicy::registerWithActivityManager() {
     Mutex::Autolock _l(mUidLock);
+    int32_t emptyUidArray[] = { };
 
     if (mRegistered) return;
     status_t res = mAm.linkToDeath(this);
-    mAm.registerUidObserver(this, ActivityManager::UID_OBSERVER_GONE
+    mAm.registerUidObserverForUids(this, ActivityManager::UID_OBSERVER_GONE
             | ActivityManager::UID_OBSERVER_IDLE
             | ActivityManager::UID_OBSERVER_ACTIVE | ActivityManager::UID_OBSERVER_PROCSTATE
             | ActivityManager::UID_OBSERVER_PROC_OOM_ADJ,
             ActivityManager::PROCESS_STATE_UNKNOWN,
-            String16("cameraserver"));
+            kServiceName, emptyUidArray, 0, mObserverToken);
     if (res == OK) {
         mRegistered = true;
         ALOGV("UidPolicy: Registered with ActivityManager");
+    } else {
+        ALOGE("UidPolicy: Failed to register with ActivityManager: 0x%08x", res);
     }
 }
 
@@ -3954,24 +4114,54 @@
     }
 }
 
-void CameraService::UidPolicy::onUidProcAdjChanged(uid_t uid) {
-    bool procAdjChange = false;
+/**
+ * When the OOM adj of the uid owning the camera changes, a different uid waiting on camera
+ * privileges may take precedence if the owner's new OOM adj is greater than the waiting package.
+ * Here, we track which monitoredUid has the camera, and track its adj relative to other
+ * monitoredUids. If it is revised above some other monitoredUid, signal
+ * onCameraAccessPrioritiesChanged. This only needs to capture the case where there are two
+ * foreground apps in split screen - state changes will capture all other cases.
+ */
+void CameraService::UidPolicy::onUidProcAdjChanged(uid_t uid, int32_t adj) {
+    std::unordered_set<uid_t> notifyUidSet;
     {
         Mutex::Autolock _l(mUidLock);
-        if (mMonitoredUids.find(uid) != mMonitoredUids.end()) {
-            procAdjChange = true;
+        auto it = mMonitoredUids.find(uid);
+
+        if (it != mMonitoredUids.end()) {
+            if (it->second.hasCamera) {
+                for (auto &monitoredUid : mMonitoredUids) {
+                    if (monitoredUid.first != uid && adj > monitoredUid.second.procAdj) {
+                        ALOGV("%s: notify uid %d", __FUNCTION__, monitoredUid.first);
+                        notifyUidSet.emplace(monitoredUid.first);
+                    }
+                }
+                ALOGV("%s: notify uid %d", __FUNCTION__, uid);
+                notifyUidSet.emplace(uid);
+            } else {
+                for (auto &monitoredUid : mMonitoredUids) {
+                    if (monitoredUid.second.hasCamera && adj < monitoredUid.second.procAdj) {
+                        ALOGV("%s: notify uid %d", __FUNCTION__, uid);
+                        notifyUidSet.emplace(uid);
+                    }
+                }
+            }
+            it->second.procAdj = adj;
         }
     }
 
-    if (procAdjChange) {
+    if (notifyUidSet.size() > 0) {
         sp<CameraService> service = mService.promote();
         if (service != nullptr) {
-            service->notifyMonitoredUids();
+            service->notifyMonitoredUids(notifyUidSet);
         }
     }
 }
 
-void CameraService::UidPolicy::registerMonitorUid(uid_t uid) {
+/**
+ * Register a uid for monitoring, and note whether it owns a camera.
+ */
+void CameraService::UidPolicy::registerMonitorUid(uid_t uid, bool openCamera) {
     Mutex::Autolock _l(mUidLock);
     auto it = mMonitoredUids.find(uid);
     if (it != mMonitoredUids.end()) {
@@ -3979,18 +4169,36 @@
     } else {
         MonitoredUid monitoredUid;
         monitoredUid.procState = ActivityManager::PROCESS_STATE_NONEXISTENT;
+        monitoredUid.procAdj = resource_policy::UNKNOWN_ADJ;
         monitoredUid.refCount = 1;
-        mMonitoredUids.emplace(std::pair<uid_t, MonitoredUid>(uid, monitoredUid));
+        it = mMonitoredUids.emplace(std::pair<uid_t, MonitoredUid>(uid, monitoredUid)).first;
+        status_t res = mAm.addUidToObserver(mObserverToken, kServiceName, uid);
+        if (res != OK) {
+            ALOGE("UidPolicy: Failed to add uid to observer: 0x%08x", res);
+        }
+    }
+
+    if (openCamera) {
+        it->second.hasCamera = true;
     }
 }
 
-void CameraService::UidPolicy::unregisterMonitorUid(uid_t uid) {
+/**
+ * Unregister a uid for monitoring, and note whether it lost ownership of a camera.
+ */
+void CameraService::UidPolicy::unregisterMonitorUid(uid_t uid, bool closeCamera) {
     Mutex::Autolock _l(mUidLock);
     auto it = mMonitoredUids.find(uid);
     if (it != mMonitoredUids.end()) {
         it->second.refCount--;
         if (it->second.refCount == 0) {
             mMonitoredUids.erase(it);
+            status_t res = mAm.removeUidFromObserver(mObserverToken, kServiceName, uid);
+            if (res != OK) {
+                ALOGE("UidPolicy: Failed to remove uid from observer: 0x%08x", res);
+            }
+        } else if (closeCamera) {
+            it->second.hasCamera = false;
         }
     } else {
         ALOGE("%s: Trying to unregister uid: %d which is not monitored!", __FUNCTION__, uid);
@@ -5090,6 +5298,8 @@
     } else if (args.size() >= 1 && args[0] == String16("clear-stream-use-case-override")) {
         handleClearStreamUseCaseOverrides();
         return OK;
+    } else if (args.size() >= 1 && args[0] == String16("set-zoom-override")) {
+        return handleSetZoomOverride(args);
     } else if (args.size() >= 2 && args[0] == String16("watch")) {
         return handleWatchCommand(args, in, out);
     } else if (args.size() >= 2 && args[0] == String16("set-watchdog")) {
@@ -5333,6 +5543,34 @@
     mStreamUseCaseOverrides.clear();
 }
 
+status_t CameraService::handleSetZoomOverride(const Vector<String16>& args) {
+    char* end;
+    int zoomOverrideValue = strtol(String8(args[1]), &end, /*base=*/10);
+    if ((*end != '\0') ||
+            (zoomOverrideValue != -1 &&
+             zoomOverrideValue != ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF &&
+             zoomOverrideValue != ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM)) {
+        return BAD_VALUE;
+    }
+
+    Mutex::Autolock lock(mServiceLock);
+    mZoomOverrideValue = zoomOverrideValue;
+
+    const auto clients = mActiveClientManager.getAll();
+    for (auto& current : clients) {
+        if (current != nullptr) {
+            const auto basicClient = current->getValue();
+            if (basicClient.get() != nullptr) {
+                if (basicClient->supportsZoomOverride()) {
+                    basicClient->setZoomOverride(mZoomOverrideValue);
+                }
+            }
+        }
+    }
+
+    return OK;
+}
+
 status_t CameraService::handleWatchCommand(const Vector<String16>& args, int inFd, int outFd) {
     if (args.size() >= 3 && args[1] == String16("start")) {
         return startWatchingTags(args, outFd);
@@ -5699,6 +5937,8 @@
         "      Valid values are (case sensitive): DEFAULT, PREVIEW, STILL_CAPTURE, VIDEO_RECORD,\n"
         "      PREVIEW_VIDEO_STILL, VIDEO_CALL, CROPPED_RAW\n"
         "  clear-stream-use-case-override clear the stream use case override\n"
+        "  set-zoom-override <-1/0/1> enable or disable zoom override\n"
+        "      Valid values -1: do not override, 0: override to OFF, 1: override to ZOOM\n"
         "  watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
         "  help print this message\n");
 }
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 59c5534..38336ee 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_SERVERS_CAMERA_CAMERASERVICE_H
 #define ANDROID_SERVERS_CAMERA_CAMERASERVICE_H
 
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/BnCameraService.h>
 #include <android/hardware/BnSensorPrivacyListener.h>
 #include <android/hardware/ICameraServiceListener.h>
@@ -29,6 +30,8 @@
 #include <binder/ActivityManager.h>
 #include <binder/AppOpsManager.h>
 #include <binder/BinderService.h>
+#include <binder/IServiceManager.h>
+#include <binder/IActivityManager.h>
 #include <binder/IAppOpsCallback.h>
 #include <binder/IUidObserver.h>
 #include <hardware/camera.h>
@@ -152,7 +155,7 @@
     virtual binder::Status     connect(const sp<hardware::ICameraClient>& cameraClient,
             int32_t cameraId, const String16& clientPackageName,
             int32_t clientUid, int clientPid, int targetSdkVersion,
-            bool overrideToPortrait,
+            bool overrideToPortrait, bool forceSlowJpegMode,
             /*out*/
             sp<hardware::ICamera>* device) override;
 
@@ -216,6 +219,9 @@
             /*out*/
             sp<hardware::camera2::ICameraInjectionSession>* cameraInjectionSession);
 
+    virtual binder::Status reportExtensionSessionStats(
+            const hardware::CameraExtensionSessionStats& stats, String16* sessionKey /*out*/);
+
     // Extra permissions checks
     virtual status_t    onTransact(uint32_t code, const Parcel& data,
                                    Parcel* reply, uint32_t flags);
@@ -231,6 +237,7 @@
 
     // Monitored UIDs availability notification
     void                notifyMonitoredUids();
+    void                notifyMonitoredUids(const std::unordered_set<uid_t> &notifyUidSet);
 
     // Stores current open session device info in temp file.
     void cacheDump();
@@ -369,6 +376,12 @@
         // Clear stream use case overrides
         virtual void clearStreamUseCaseOverrides() = 0;
 
+        // Whether the client supports camera zoom override
+        virtual bool supportsZoomOverride() = 0;
+
+        // Set/reset zoom override
+        virtual status_t setZoomOverride(int32_t zoomOverride) = 0;
+
         // The injection camera session to replace the internal camera
         // session.
         virtual status_t injectCamera(const String8& injectedCamId,
@@ -595,8 +608,45 @@
     int32_t updateAudioRestrictionLocked();
 
 private:
+    /**
+     * Returns true if the device is an automotive device and cameraId is system
+     * only camera which has characteristic AUTOMOTIVE_LOCATION value as either
+     * AUTOMOTIVE_LOCATION_EXTERIOR_LEFT,AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT,
+     * AUTOMOTIVE_LOCATION_EXTERIOR_FRONT or AUTOMOTIVE_LOCATION_EXTERIOR_REAR.
+     */
+    bool isAutomotiveExteriorSystemCamera(const String8& cameraId) const;
+
+    // TODO: b/263304156 update this to make use of a death callback for more
+    // robust/fault tolerant logging
+    static const sp<IActivityManager>& getActivityManager() {
+        static const char* kActivityService = "activity";
+        static const auto activityManager = []() -> sp<IActivityManager> {
+            const sp<IServiceManager> sm(defaultServiceManager());
+            if (sm != nullptr) {
+                 return interface_cast<IActivityManager>(sm->checkService(String16(kActivityService)));
+            }
+            return nullptr;
+        }();
+        return activityManager;
+    }
 
     /**
+     * Pre-grants the permission if the attribution source uid is for an automotive
+     * privileged client. Otherwise uses system service permission checker to check
+     * for the appropriate permission. If this function is called for accessing a specific
+     * camera,then the cameraID must not be empty. CameraId is used only in case of automotive
+     * privileged client so that permission is pre-granted only to access system camera device
+     * which is located outside of the vehicle body frame because camera located inside the vehicle
+     * cabin would need user permission.
+     */
+    bool checkPermission(const String8& cameraId, const String16& permission,
+            const content::AttributionSourceState& attributionSource,const String16& message,
+            int32_t attributedOpCode) const;
+
+    bool hasPermissionsForSystemCamera(const String8& cameraId, int callingPid, int callingUid)
+            const;
+
+   /**
      * Typesafe version of device status, containing both the HAL-layer and the service interface-
      * layer values.
      */
@@ -741,13 +791,13 @@
         void onUidIdle(uid_t uid, bool disabled) override;
         void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
                 int32_t capability) override;
-        void onUidProcAdjChanged(uid_t uid) override;
+        void onUidProcAdjChanged(uid_t uid, int adj) override;
 
         void addOverrideUid(uid_t uid, String16 callingPackage, bool active);
         void removeOverrideUid(uid_t uid, String16 callingPackage);
 
-        void registerMonitorUid(uid_t uid);
-        void unregisterMonitorUid(uid_t uid);
+        void registerMonitorUid(uid_t uid, bool openCamera);
+        void unregisterMonitorUid(uid_t uid, bool closeCamera);
 
         // Implementation of IServiceManager::LocalRegistrationCallback
         virtual void onServiceRegistration(const String16& name,
@@ -762,6 +812,8 @@
 
         struct MonitoredUid {
             int32_t procState;
+            int32_t procAdj;
+            bool hasCamera;
             size_t refCount;
         };
 
@@ -773,6 +825,7 @@
         // Monitored uid map
         std::unordered_map<uid_t, MonitoredUid> mMonitoredUids;
         std::unordered_map<uid_t, bool> mOverrideUids;
+        sp<IBinder> mObserverToken;
     }; // class UidPolicy
 
     // If sensor privacy is enabled then all apps, including those that are active, should be
@@ -856,7 +909,7 @@
     // Should a device status update be skipped for a particular camera device ? (this can happen
     // under various conditions. For example if a camera device is advertised as
     // system only or hidden secure camera, amongst possible others.
-    static bool shouldSkipStatusUpdates(SystemCameraKind systemCameraKind, bool isVendorListener,
+    bool shouldSkipStatusUpdates(SystemCameraKind systemCameraKind, bool isVendorListener,
             int clientPid, int clientUid);
 
     // Gets the kind of camera device (i.e public, hidden secure or system only)
@@ -887,7 +940,8 @@
             int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
             const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
             apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
-            bool overrideToPortrait, /*out*/sp<CLIENT>& device);
+            bool overrideToPortrait, bool forceSlowJpegMode,
+            /*out*/sp<CLIENT>& device);
 
     // Lock guarding camera service state
     Mutex               mServiceLock;
@@ -1289,6 +1343,9 @@
     // Clear the stream use case overrides
     void handleClearStreamUseCaseOverrides();
 
+    // Set or clear the zoom override flag
+    status_t handleSetZoomOverride(const Vector<String16>& args);
+
     // Handle 'watch' command as passed through 'cmd'
     status_t handleWatchCommand(const Vector<String16> &args, int inFd, int outFd);
 
@@ -1340,7 +1397,8 @@
             const String8& cameraId, int api1CameraId, int facing, int sensorOrientation,
             int clientPid, uid_t clientUid, int servicePid,
             std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
-            bool overrideForPerfClass, bool overrideToPortrait, /*out*/sp<BasicClient>* client);
+            bool overrideForPerfClass, bool overrideToPortrait, bool forceSlowJpegMode,
+            /*out*/sp<BasicClient>* client);
 
     status_t checkCameraAccess(const String16& opPackageName);
 
@@ -1390,6 +1448,9 @@
     // Current stream use case overrides
     std::vector<int64_t> mStreamUseCaseOverrides;
 
+    // Current zoom override value
+    int32_t mZoomOverrideValue = -1;
+
     /**
      * A listener class that implements the IBinder::DeathRecipient interface
      * for use to call back the error state injected by the external camera, and
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.cpp b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
index e80064a..1c1bd24 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.cpp
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
@@ -17,6 +17,7 @@
 #define LOG_TAG "CameraServiceWatchdog"
 
 #include "CameraServiceWatchdog.h"
+#include "android/set_abort_message.h"
 #include "utils/CameraServiceProxyWrapper.h"
 
 namespace android {
@@ -36,12 +37,14 @@
     {
         AutoMutex _l(mWatchdogLock);
 
-        for (auto it = tidToCycleCounterMap.begin(); it != tidToCycleCounterMap.end(); it++) {
+        for (auto it = mTidMap.begin(); it != mTidMap.end(); it++) {
             uint32_t currentThreadId = it->first;
 
-            tidToCycleCounterMap[currentThreadId]++;
+            mTidMap[currentThreadId].cycles++;
 
-            if (tidToCycleCounterMap[currentThreadId] >= mMaxCycles) {
+            if (mTidMap[currentThreadId].cycles >= mMaxCycles) {
+                std::string abortMessage = getAbortMessage(mTidMap[currentThreadId].functionName);
+                android_set_abort_message(abortMessage.c_str());
                 ALOGW("CameraServiceWatchdog triggering abort for pid: %d tid: %d", getpid(),
                         currentThreadId);
                 mCameraServiceProxyWrapper->logClose(mCameraId, 0 /*latencyMs*/,
@@ -56,13 +59,19 @@
     return true;
 }
 
+std::string CameraServiceWatchdog::getAbortMessage(const std::string& functionName) {
+    std::string res = "CameraServiceWatchdog triggering abort during "
+            + functionName;
+    return res;
+}
+
 void CameraServiceWatchdog::requestExit()
 {
     Thread::requestExit();
 
     AutoMutex _l(mWatchdogLock);
 
-    tidToCycleCounterMap.clear();
+    mTidMap.clear();
 
     if (mPause) {
         mPause = false;
@@ -85,18 +94,21 @@
 {
     AutoMutex _l(mWatchdogLock);
 
-    tidToCycleCounterMap.erase(tid);
+    mTidMap.erase(tid);
 
-    if (tidToCycleCounterMap.empty()) {
+    if (mTidMap.empty()) {
         mPause = true;
     }
 }
 
-void CameraServiceWatchdog::start(uint32_t tid)
+void CameraServiceWatchdog::start(uint32_t tid, const char* functionName)
 {
     AutoMutex _l(mWatchdogLock);
 
-    tidToCycleCounterMap[tid] = 0;
+    MonitoredFunction monitoredFunction = {};
+    monitoredFunction.cycles = 0;
+    monitoredFunction.functionName = functionName;
+    mTidMap[tid] = monitoredFunction;
 
     if (mPause) {
         mPause = false;
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.h b/services/camera/libcameraservice/CameraServiceWatchdog.h
index 6617873..de6ac9e 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.h
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.h
@@ -40,9 +40,9 @@
 #include "utils/CameraServiceProxyWrapper.h"
 
 // Used to wrap the call of interest in start and stop calls
-#define WATCH(toMonitor) watchThread([&]() { return toMonitor;}, gettid())
+#define WATCH(toMonitor) watchThread([&]() { return toMonitor;}, gettid(), __FUNCTION__)
 #define WATCH_CUSTOM_TIMER(toMonitor, cycles, cycleLength) \
-        watchThread([&]() { return toMonitor;}, gettid(), cycles, cycleLength);
+        watchThread([&]() { return toMonitor;}, gettid(), __FUNCTION__, cycles, cycleLength);
 
 // Default cycles and cycle length values used to calculate permitted elapsed time
 const static size_t   kMaxCycles     = 100;
@@ -52,6 +52,11 @@
 
 class CameraServiceWatchdog : public Thread {
 
+struct MonitoredFunction {
+    uint32_t cycles;
+    std::string functionName;
+};
+
 public:
     explicit CameraServiceWatchdog(const String8 &cameraId,
             std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
@@ -75,7 +80,8 @@
 
     /** Used to wrap monitored calls in start and stop functions using custom timer values */
     template<typename T>
-    auto watchThread(T func, uint32_t tid, uint32_t cycles, uint32_t cycleLength) {
+    auto watchThread(T func, uint32_t tid, const char* functionName, uint32_t cycles,
+            uint32_t cycleLength) {
         decltype(func()) res;
 
         if (cycles != mMaxCycles || cycleLength != mCycleLengthMs) {
@@ -91,17 +97,17 @@
             status_t status = tempWatchdog->run("CameraServiceWatchdog");
             if (status != OK) {
                 ALOGE("Unable to watch thread: %s (%d)", strerror(-status), status);
-                res = watchThread(func, tid);
+                res = watchThread(func, tid, functionName);
                 return res;
             }
 
-            res = tempWatchdog->watchThread(func, tid);
+            res = tempWatchdog->watchThread(func, tid, functionName);
             tempWatchdog->requestExit();
             tempWatchdog.clear();
         } else {
             // If custom timer values are equivalent to set class timer values, use
             // current thread
-            res = watchThread(func, tid);
+            res = watchThread(func, tid, functionName);
         }
 
         return res;
@@ -109,12 +115,12 @@
 
     /** Used to wrap monitored calls in start and stop functions using class timer values */
     template<typename T>
-    auto watchThread(T func, uint32_t tid) {
+    auto watchThread(T func, uint32_t tid, const char* functionName) {
         decltype(func()) res;
         AutoMutex _l(mEnabledLock);
 
         if (mEnabled) {
-            start(tid);
+            start(tid, functionName);
             res = func();
             stop(tid);
         } else {
@@ -130,7 +136,7 @@
      * Start adds a cycle counter for the calling thread. When threadloop is blocked/paused,
      * start() unblocks and starts the watchdog
      */
-    void start(uint32_t tid);
+    void start(uint32_t tid, const char* functionName);
 
     /**
      * If there are no calls left to be monitored, stop blocks/pauses threadloop
@@ -138,6 +144,8 @@
      */
     void stop(uint32_t tid);
 
+    std::string getAbortMessage(const std::string& functionName);
+
     virtual bool    threadLoop();
 
     Mutex           mWatchdogLock;      // Lock for condition variable
@@ -151,7 +159,9 @@
 
     std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
 
-    std::unordered_map<uint32_t, uint32_t> tidToCycleCounterMap; // Thread Id to cycle counter map
+    std::unordered_map<uint32_t, MonitoredFunction> mTidMap; // Thread Id to MonitoredFunction type
+                                                             // which retrieves the num of cycles
+                                                             // and name of the function
 };
 
 }   // namespace android
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
index b9f1224..402f8a2 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
@@ -19,6 +19,7 @@
 #include "AidlCameraDeviceUser.h"
 #include <aidl/AidlUtils.h>
 #include <aidl/android/frameworks/cameraservice/device/CaptureMetadataInfo.h>
+#include <android-base/properties.h>
 
 namespace android::frameworks::cameraservice::device::implementation {
 
@@ -35,7 +36,7 @@
 using ::android::hardware::cameraservice::utils::conversion::aidl::cloneToAidl;
 using ::android::hardware::cameraservice::utils::conversion::aidl::convertFromAidl;
 using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
-using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
 using ::ndk::ScopedAStatus;
 
 namespace {
@@ -55,6 +56,7 @@
 AidlCameraDeviceUser::AidlCameraDeviceUser(const sp<UICameraDeviceUser>& deviceRemote):
       mDeviceRemote(deviceRemote) {
     mInitSuccess = initDevice();
+    mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
 }
 
 bool AidlCameraDeviceUser::initDevice() {
@@ -171,6 +173,13 @@
         ALOGE("%s: Failed to create default request: %s", __FUNCTION__, ret.toString8().string());
         return fromUStatus(ret);
     }
+
+    if (filterVndkKeys(mVndkVersion, metadata, /*isStatic*/false) != OK) {
+        ALOGE("%s: Unable to filter vndk metadata keys for version %d",
+              __FUNCTION__, mVndkVersion);
+        return fromSStatus(SStatus::UNKNOWN_ERROR);
+    }
+
     const camera_metadata_t* rawMetadata = metadata.getAndLock();
     cloneToAidl(rawMetadata, _aidl_return);
     metadata.unlock(rawMetadata);
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
index afff197..8014951 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
@@ -109,6 +109,7 @@
     std::shared_ptr<CaptureResultMetadataQueue> mCaptureResultMetadataQueue = nullptr;
     bool mInitSuccess = false;
     int32_t mRequestId = REQUEST_ID_NONE;
+    int mVndkVersion = -1;
 };
 
 } // namespace android::frameworks::cameraservice::device::implementation
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index b33303e..06ef88a 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -63,7 +63,8 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass,
-        bool overrideToPortrait):
+        bool overrideToPortrait,
+        bool forceSlowJpegMode):
         Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper, clientPackageName,
                 false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
                 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
@@ -79,6 +80,9 @@
 
     SharedParameters::Lock l(mParameters);
     l.mParameters.state = Parameters::DISCONNECTED;
+    if (forceSlowJpegMode) {
+        l.mParameters.isSlowJpegModeForced = true;
+    }
 }
 
 status_t Camera2Client::initialize(sp<CameraProviderManager> manager, const String8& monitorTags) {
@@ -2387,6 +2391,14 @@
     mDevice->clearStreamUseCaseOverrides();
 }
 
+bool Camera2Client::supportsZoomOverride() {
+    return mDevice->supportsZoomOverride();
+}
+
+status_t  Camera2Client::setZoomOverride(int zoomOverride) {
+    return mDevice->setZoomOverride(zoomOverride);
+}
+
 status_t Camera2Client::waitUntilCurrentRequestIdLocked() {
     int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
     if (activeRequestId != 0) {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index f035fea..6bdb644 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -97,6 +97,9 @@
                                     const std::vector<int64_t>& useCaseOverrides);
     virtual void            clearStreamUseCaseOverrides();
 
+    virtual bool            supportsZoomOverride();
+    virtual status_t        setZoomOverride(int32_t zoomOverride);
+
     /**
      * Interface used by CameraService
      */
@@ -114,7 +117,8 @@
             uid_t clientUid,
             int servicePid,
             bool overrideForPerfClass,
-            bool overrideToPortrait);
+            bool overrideToPortrait,
+            bool forceSlowJpegMode);
 
     virtual ~Camera2Client();
 
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
index 9475a39..f39599f 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
@@ -113,7 +113,7 @@
     static const nsecs_t kWaitDuration = 100000000; // 100 ms
     static const nsecs_t kIdleWaitDuration = 10000000; // 10 ms
     static const int kMaxTimeoutsForPrecaptureStart = 10; // 1 sec
-    static const int kMaxTimeoutsForPrecaptureEnd = 20;  // 2 sec
+    static const int kMaxTimeoutsForPrecaptureEnd = 40;  // 4 sec
     static const int kMaxTimeoutsForCaptureEnd    = 40;  // 4 sec
     static const int kMaxRetryCount = 3; // 3 retries in case of buffer drop
 
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 8b2af90..23570c2 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -990,9 +990,8 @@
     Size maxJpegSize = getMaxSize(getAvailableJpegSizes());
     int64_t minFrameDurationNs = getJpegStreamMinFrameDurationNs(maxJpegSize);
 
-    slowJpegMode = false;
-    if (minFrameDurationNs > kSlowJpegModeThreshold) {
-        slowJpegMode = true;
+    slowJpegMode = isSlowJpegModeForced || minFrameDurationNs > kSlowJpegModeThreshold;
+    if (slowJpegMode) {
         // Slow jpeg devices does not support video snapshot without
         // slowing down preview.
         // TODO: support video size video snapshot only?
@@ -2089,7 +2088,7 @@
     paramsFlattened = newParams.flatten();
     params = newParams;
 
-    slowJpegMode = false;
+    slowJpegMode = isSlowJpegModeForced;
     Size pictureSize = { pictureWidth, pictureHeight };
     bool zslFrameRateSupported = false;
     int64_t jpegMinFrameDurationNs = getJpegStreamMinFrameDurationNs(pictureSize);
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index fd18a5d..2bd3d43 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -183,6 +183,8 @@
     bool isDeviceZslSupported;
     // Whether the device supports geometric distortion correction
     bool isDistortionCorrectionSupported;
+    // Whether slowJpegMode is forced regardless of jpeg stream FPS
+    bool isSlowJpegModeForced;
 
     // Overall camera state
     enum State {
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index d4ac88d..38c615d 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -956,7 +956,8 @@
             camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
     bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
     bool isJpegRCompositeStream =
-        camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0]);
+        camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0]) &&
+        !mDevice->isCompositeJpegRDisabled();
     if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
         sp<CompositeStream> compositeStream;
         if (isDepthCompositeStream) {
@@ -1071,7 +1072,7 @@
             outputConfiguration.getSensorPixelModesUsed();
     if (SessionConfigurationUtils::checkAndOverrideSensorPixelModesUsed(
             sensorPixelModesUsed, format, width, height, getStaticInfo(cameraIdUsed),
-            /*allowRounding*/ false, &overriddenSensorPixelModesUsed) != OK) {
+            &overriddenSensorPixelModesUsed) != OK) {
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                 "sensor pixel modes used not valid for deferred stream");
     }
@@ -1788,6 +1789,14 @@
     mDevice->clearStreamUseCaseOverrides();
 }
 
+bool CameraDeviceClient::supportsZoomOverride() {
+    return mDevice->supportsZoomOverride();
+}
+
+status_t CameraDeviceClient::setZoomOverride(int32_t zoomOverride) {
+    return mDevice->setZoomOverride(zoomOverride);
+}
+
 binder::Status CameraDeviceClient::switchToOffline(
         const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
         const std::vector<int>& offlineOutputIds,
@@ -1841,7 +1850,8 @@
             sp<Surface> s = new Surface(gbp, false /*controlledByApp*/);
             isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) ||
                 camera3::HeicCompositeStream::isHeicCompositeStream(s) ||
-                camera3::JpegRCompositeStream::isJpegRCompositeStream(s);
+                (camera3::JpegRCompositeStream::isJpegRCompositeStream(s) &&
+                 !mDevice->isCompositeJpegRDisabled());
             if (isCompositeStream) {
                 auto compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp));
                 if (compositeIdx == NAME_NOT_FOUND) {
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index c6688a5..1533cf5 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -204,6 +204,9 @@
     virtual bool          supportsCameraMute();
     virtual status_t      setCameraMute(bool enabled);
 
+    virtual bool          supportsZoomOverride() override;
+    virtual status_t      setZoomOverride(int32_t zoomOverride) override;
+
     virtual status_t      dump(int fd, const Vector<String16>& args);
 
     virtual status_t      dumpClient(int fd, const Vector<String16>& args);
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index d798632..86a0ebc 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -106,6 +106,13 @@
 void CameraOfflineSessionClient::clearStreamUseCaseOverrides() {
 }
 
+bool CameraOfflineSessionClient::supportsZoomOverride() {
+    return false;
+}
+
+status_t CameraOfflineSessionClient::setZoomOverride(int32_t /*zoomOverride*/) {
+    return INVALID_OPERATION;
+}
 
 status_t CameraOfflineSessionClient::dump(int fd, const Vector<String16>& args) {
     return BasicClient::dump(fd, args);
@@ -257,7 +264,7 @@
     mOpsActive = true;
 
     // Transition device state to OPEN
-    sCameraService->mUidPolicy->registerMonitorUid(mClientUid);
+    sCameraService->mUidPolicy->registerMonitorUid(mClientUid, /*openCamera*/true);
 
     return OK;
 }
@@ -281,7 +288,7 @@
     }
     mOpsCallback.clear();
 
-    sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid);
+    sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid, /*closeCamera*/true);
 
     return OK;
 }
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 906d454..ad763f9 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -94,6 +94,10 @@
 
     void clearStreamUseCaseOverrides() override;
 
+    bool supportsZoomOverride() override;
+
+    status_t setZoomOverride(int32_t zoomOverride) override;
+
     // permissions management
     status_t startCameraOps() override;
     status_t finishCameraOps() override;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index e652546..694aff3 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -120,7 +120,7 @@
         camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds,
-        int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
+        int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
         int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
     sp<CameraDeviceBase> device = mDevice.promote();
     if (!device.get()) {
@@ -153,7 +153,7 @@
             ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
             OutputConfiguration::MIRROR_MODE_AUTO,
-            ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+            colorSpace,
             useReadoutTimestamp);
     if (res == OK) {
         mAppSegmentSurfaceId = (*surfaceIds)[0];
@@ -196,7 +196,7 @@
             ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
             OutputConfiguration::MIRROR_MODE_AUTO,
-            ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+            colorSpace,
             useReadoutTimestamp);
     if (res == OK) {
         mMainImageSurfaceId = sourceSurfaceId[0];
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index 5794747..6588470 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -26,7 +26,7 @@
 
 #include "common/CameraProviderManager.h"
 #include <gui/Surface.h>
-#include <jpegrecoverymap/jpegr.h>
+#include <ultrahdr/jpegr.h>
 #include <utils/ExifUtils.h>
 #include <utils/Log.h>
 #include "utils/SessionConfigurationUtils.h"
@@ -292,37 +292,34 @@
     }
 
     size_t actualJpegRSize = 0;
-    jpegrecoverymap::jpegr_uncompressed_struct p010;
-    jpegrecoverymap::jpegr_compressed_struct jpegR;
-    jpegrecoverymap::JpegR jpegREncoder;
+    ultrahdr::jpegr_uncompressed_struct p010;
+    ultrahdr::jpegr_compressed_struct jpegR;
+    ultrahdr::JpegR jpegREncoder;
 
     p010.height = inputFrame.p010Buffer.height;
     p010.width = inputFrame.p010Buffer.width;
-    p010.colorGamut = jpegrecoverymap::jpegr_color_gamut::JPEGR_COLORGAMUT_BT2100;
-    size_t yChannelSizeInByte = p010.width * p010.height * 2;
-    size_t uvChannelSizeInByte = p010.width * p010.height;
-    p010.data = new uint8_t[yChannelSizeInByte + uvChannelSizeInByte];
-    std::unique_ptr<uint8_t[]> p010_data;
-    p010_data.reset(reinterpret_cast<uint8_t*>(p010.data));
-    memcpy((uint8_t*)p010.data, inputFrame.p010Buffer.data, yChannelSizeInByte);
-    memcpy((uint8_t*)p010.data + yChannelSizeInByte, inputFrame.p010Buffer.dataCb,
-           uvChannelSizeInByte);
+    p010.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT2100;
+    p010.data = inputFrame.p010Buffer.data;
+    p010.chroma_data = inputFrame.p010Buffer.dataCb;
+    // Strides are expected to be in pixels not bytes
+    p010.luma_stride = inputFrame.p010Buffer.stride / 2;
+    p010.chroma_stride = inputFrame.p010Buffer.chromaStride / 2;
 
     jpegR.data = dstBuffer;
     jpegR.maxLength = maxJpegRBufferSize;
 
-    jpegrecoverymap::jpegr_transfer_function transferFunction;
+    ultrahdr::ultrahdr_transfer_function transferFunction;
     switch (mP010DynamicRange) {
         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
-            transferFunction = jpegrecoverymap::jpegr_transfer_function::JPEGR_TF_PQ;
+            transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_PQ;
             break;
         default:
-            transferFunction = jpegrecoverymap::jpegr_transfer_function::JPEGR_TF_HLG;
+            transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_HLG;
     }
 
     if (mSupportInternalJpeg) {
-        jpegrecoverymap::jpegr_compressed_struct jpeg;
+        ultrahdr::jpegr_compressed_struct jpeg;
 
         jpeg.data = inputFrame.jpegBuffer.data;
         jpeg.length = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data,
@@ -334,9 +331,9 @@
         }
 
         if (mOutputColorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3) {
-            jpeg.colorGamut = jpegrecoverymap::jpegr_color_gamut::JPEGR_COLORGAMUT_P3;
+            jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_P3;
         } else {
-            jpeg.colorGamut = jpegrecoverymap::jpegr_color_gamut::JPEGR_COLORGAMUT_BT709;
+            jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT709;
         }
 
         res = jpegREncoder.encodeJPEGR(&p010, &jpeg, transferFunction, &jpegR);
@@ -354,7 +351,7 @@
             ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
         }
 
-        jpegrecoverymap::jpegr_exif_struct exif;
+        ultrahdr::jpegr_exif_struct exif;
         exif.data = reinterpret_cast<void*>(const_cast<uint8_t*>(exifBuffer));
         exif.length = exifBufferSize;
 
@@ -367,7 +364,6 @@
     }
 
     actualJpegRSize = jpegR.length;
-    p010_data.release();
 
     size_t finalJpegRSize = actualJpegRSize + sizeof(CameraBlob);
     if (finalJpegRSize > maxJpegRBufferSize) {
@@ -662,6 +658,13 @@
         return res;
     }
 
+    if ((res = native_window_set_usage(mOutputSurface.get(),
+            GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN)) != OK) {
+        ALOGE("%s: Unable to configure stream buffer usage for stream %d", __FUNCTION__,
+                mP010StreamId);
+        return res;
+    }
+
     int maxProducerBuffers;
     ANativeWindow *anw = mP010Surface.get();
     if ((res = anw->query(anw, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxProducerBuffers)) != OK) {
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index f1fc815..6e10f30 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -145,7 +145,6 @@
     res = TClientBase::startCameraOps();
     if (res != OK) {
         TClientBase::finishCameraOps();
-        mDevice.clear();
         return res;
     }
 
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 6f15653..919108d 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -113,6 +113,8 @@
      */
     virtual const CameraMetadata& infoPhysical(const String8& physicalId) const = 0;
 
+    virtual bool isCompositeJpegRDisabled() const { return false; };
+
     struct PhysicalCameraSettings {
         std::string cameraId;
         CameraMetadata metadata;
@@ -126,6 +128,9 @@
         int32_t mOriginalTestPatternMode = 0;
         int32_t mOriginalTestPatternData[4] = {};
 
+        // Original value of SETTINGS_OVERRIDE so that they can be restored if
+        // camera service isn't overwriting the app value.
+        int32_t mOriginalSettingsOverride = ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF;
     };
     typedef List<PhysicalCameraSettings> PhysicalCameraSettingsList;
 
@@ -465,6 +470,14 @@
     virtual status_t setCameraMute(bool enabled) = 0;
 
     /**
+     * Whether the camera device supports zoom override.
+     */
+    virtual bool supportsZoomOverride() = 0;
+
+    // Set/reset zoom override
+    virtual status_t setZoomOverride(int32_t zoomOverride) = 0;
+
+    /**
      * Enable/disable camera service watchdog
      */
     virtual status_t setCameraServiceWatchdog(bool enabled) = 0;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index d2d1e38..1a6e341 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -316,6 +316,18 @@
     return deviceInfo->supportNativeZoomRatio();
 }
 
+bool CameraProviderManager::isCompositeJpegRDisabled(const std::string &id) const {
+    std::lock_guard<std::mutex> lock(mInterfaceMutex);
+    return isCompositeJpegRDisabledLocked(id);
+}
+
+bool CameraProviderManager::isCompositeJpegRDisabledLocked(const std::string &id) const {
+    auto deviceInfo = findDeviceInfoLocked(id);
+    if (deviceInfo == nullptr) return false;
+
+    return deviceInfo->isCompositeJpegRDisabled();
+}
+
 status_t CameraProviderManager::getResourceCost(const std::string &id,
         CameraResourceCost* cost) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
@@ -1098,7 +1110,7 @@
 
     for (size_t i = 0; i < entry.count; i += 3) {
         if (entry.data.i64[i] == profile) {
-            if (entry.data.i64[i+1] & concurrentProfile) {
+            if ((entry.data.i64[i+1] == 0) || (entry.data.i64[i+1] & concurrentProfile)) {
                 return true;
             }
         }
@@ -1108,15 +1120,15 @@
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveJpegRTags(bool maxResolution) {
-    if (kFrameworkJpegRDisabled) {
+    if (kFrameworkJpegRDisabled || mCompositeJpegRDisabled) {
         return OK;
     }
 
     const int32_t scalerSizesTag =
               SessionConfigurationUtils::getAppropriateModeTag(
                       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
-    const int32_t scalerMinFrameDurationsTag =
-            ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
+    const int32_t scalerMinFrameDurationsTag = SessionConfigurationUtils::getAppropriateModeTag(
+            ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, maxResolution);
     const int32_t scalerStallDurationsTag =
                  SessionConfigurationUtils::getAppropriateModeTag(
                         ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, maxResolution);
@@ -1154,16 +1166,26 @@
         return OK;
     }
 
+    if (!isConcurrentDynamicRangeCaptureSupported(c,
+                ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10,
+                ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) &&
+            !property_get_bool("ro.camera.enableCompositeAPI0JpegR", false)) {
+        // API0, P010 only Jpeg/R support is meant to be used only as a reference due to possible
+        // impact on quality and performance.
+        // This data path will be turned off by default and individual device builds must enable
+        // 'ro.camera.enableCompositeAPI0JpegR' in order to experiment using it.
+        mCompositeJpegRDisabled = true;
+        return OK;
+    }
+
     getSupportedSizes(c, scalerSizesTag,
             static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_BLOB), &supportedBlobSizes);
     getSupportedSizes(c, scalerSizesTag,
             static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010), &supportedP010Sizes);
     auto it = supportedP010Sizes.begin();
     while (it != supportedP010Sizes.end()) {
-        // Resolutions that don't align on 32 pixels are not supported by Jpeg/R.
-        // This can be removed as soon as the encoder restriction is lifted.
-        if ((std::find(supportedBlobSizes.begin(), supportedBlobSizes.end(), *it) ==
-                supportedBlobSizes.end()) || ((std::get<0>(*it) % 32) != 0)) {
+        if (std::find(supportedBlobSizes.begin(), supportedBlobSizes.end(), *it) ==
+                supportedBlobSizes.end()) {
             it = supportedP010Sizes.erase(it);
         } else {
             it++;
@@ -2508,7 +2530,11 @@
         const CameraResourceCost& resourceCost,
         sp<ProviderInfo> parentProvider,
         const std::vector<std::string>& publicCameraIds) :
-        DeviceInfo(name, tagId, id, hardware::hidl_version{3, minorVersion},
+        DeviceInfo(name, tagId, id,
+                   hardware::hidl_version{
+                        static_cast<uint16_t >(
+                                parentProvider->getIPCTransport() == IPCTransport::HIDL ? 3 : 1),
+                                minorVersion},
                    publicCameraIds, resourceCost, parentProvider) { }
 
 void CameraProviderManager::ProviderInfo::DeviceInfo3::notifyDeviceStateChange(int64_t newState) {
@@ -2516,6 +2542,10 @@
             (mDeviceStateOrientationMap.find(newState) != mDeviceStateOrientationMap.end())) {
         mCameraCharacteristics.update(ANDROID_SENSOR_ORIENTATION,
                 &mDeviceStateOrientationMap[newState], 1);
+        if (mCameraCharNoPCOverride.get() != nullptr) {
+            mCameraCharNoPCOverride->update(ANDROID_SENSOR_ORIENTATION,
+                &mDeviceStateOrientationMap[newState], 1);
+        }
     }
 }
 
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index aab6fd5..e6e4619 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -248,6 +248,11 @@
     bool supportNativeZoomRatio(const std::string &id) const;
 
     /**
+     * Return true if the camera device has no composite Jpeg/R support.
+     */
+    bool isCompositeJpegRDisabled(const std::string &id) const;
+
+    /**
      * Return the resource cost of this camera device
      */
     status_t getResourceCost(const std::string &id,
@@ -568,6 +573,7 @@
 
             bool hasFlashUnit() const { return mHasFlashUnit; }
             bool supportNativeZoomRatio() const { return mSupportNativeZoomRatio; }
+            bool isCompositeJpegRDisabled() const { return mCompositeJpegRDisabled; }
             virtual status_t setTorchMode(bool enabled) = 0;
             virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
             virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
@@ -609,13 +615,14 @@
                     mParentProvider(parentProvider), mTorchStrengthLevel(0),
                     mTorchMaximumStrengthLevel(0), mTorchDefaultStrengthLevel(0),
                     mHasFlashUnit(false), mSupportNativeZoomRatio(false),
-                    mPublicCameraIds(publicCameraIds) {}
+                    mPublicCameraIds(publicCameraIds), mCompositeJpegRDisabled(false) {}
             virtual ~DeviceInfo() {}
         protected:
 
             bool mHasFlashUnit; // const after constructor
             bool mSupportNativeZoomRatio; // const after constructor
             const std::vector<std::string>& mPublicCameraIds;
+            bool mCompositeJpegRDisabled;
         };
         std::vector<std::unique_ptr<DeviceInfo>> mDevices;
         std::unordered_set<std::string> mUniqueCameraIds;
@@ -804,6 +811,8 @@
     // No guarantees on the order of traversal
     ProviderInfo::DeviceInfo* findDeviceInfoLocked(const std::string& id) const;
 
+    bool isCompositeJpegRDisabledLocked(const std::string &id) const;
+
     // Map external providers to USB devices in order to handle USB hotplug
     // events for lazy HALs
     std::pair<std::vector<std::string>, sp<ProviderInfo>>
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 1351703..b18cbd4 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -483,6 +483,9 @@
         }
     }
 
+    mCompositeJpegRDisabled = mCameraCharacteristics.exists(
+            ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS);
+
     mSystemCameraKind = getSystemCameraKind();
 
     status_t res = fixupMonochromeTags();
@@ -732,8 +735,8 @@
     camera::device::StreamConfiguration streamConfiguration;
     bool earlyExit = false;
     auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
-            String8(mId.c_str()), mCameraCharacteristics, getMetadata, mPhysicalIds,
-            streamConfiguration, overrideForPerfClass, &earlyExit);
+            String8(mId.c_str()), mCameraCharacteristics, mCompositeJpegRDisabled, getMetadata,
+            mPhysicalIds, streamConfiguration, overrideForPerfClass, &earlyExit);
 
     if (!bRes.isOk()) {
         return UNKNOWN_ERROR;
@@ -798,7 +801,8 @@
         bStatus =
             SessionConfigurationUtils::convertToHALStreamCombination(
                     cameraIdAndSessionConfig.mSessionConfiguration,
-                    String8(cameraId.c_str()), deviceInfo, getMetadata,
+                    String8(cameraId.c_str()), deviceInfo,
+                    mManager->isCompositeJpegRDisabledLocked(cameraId), getMetadata,
                     physicalCameraIds, streamConfiguration,
                     overrideForPerfClass, &shouldExit);
         if (!bStatus.isOk()) {
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index a13b937..8ff5c3f 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -697,6 +697,11 @@
 
     mTorchStrengthLevel = 0;
 
+    if (!kEnableLazyHal) {
+        // Save HAL reference indefinitely
+        mSavedInterface = interface;
+    }
+
     queryPhysicalCameraIds();
 
     // Get physical camera characteristics if applicable
@@ -757,13 +762,6 @@
             }
         }
     }
-
-    if (!kEnableLazyHal) {
-        // Save HAL reference indefinitely
-        mSavedInterface = interface;
-    }
-
-
 }
 
 status_t HidlProviderInfo::HidlDeviceInfo3::setTorchMode(bool enabled) {
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index f2c74d0..4487970 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -53,15 +53,16 @@
 #include <android/hardware/camera2/ICameraDeviceUser.h>
 
 #include "CameraService.h"
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
 #include "aidl/AidlUtils.h"
 #include "device3/Camera3Device.h"
 #include "device3/Camera3FakeStream.h"
 #include "device3/Camera3InputStream.h"
 #include "device3/Camera3OutputStream.h"
 #include "device3/Camera3SharedOutputStream.h"
-#include "mediautils/SchedulingPolicyService.h"
 #include "utils/CameraThreadState.h"
 #include "utils/CameraTraces.h"
+#include "utils/SchedulingPolicyUtils.h"
 #include "utils/SessionConfigurationUtils.h"
 #include "utils/TraceHFR.h"
 
@@ -80,6 +81,7 @@
         mLegacyClient(legacyClient),
         mOperatingMode(NO_MODE),
         mIsConstrainedHighSpeedConfiguration(false),
+        mIsCompositeJpegRDisabled(false),
         mStatus(STATUS_UNINITIALIZED),
         mStatusWaiters(0),
         mUsePartialResult(false),
@@ -98,6 +100,10 @@
         mNeedFixupMonochromeTags(false),
         mOverrideForPerfClass(overrideForPerfClass),
         mOverrideToPortrait(overrideToPortrait),
+        mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
+        mComposerOutput(false),
+        mAutoframingOverride(ANDROID_CONTROL_AUTOFRAMING_OFF),
+        mSettingsOverride(-1),
         mActivePhysicalId("")
 {
     ATRACE_CALL();
@@ -167,10 +173,21 @@
         }
     }
 
+    camera_metadata_entry_t availableSettingsOverrides = mDeviceInfo.find(
+            ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES);
+    for (size_t i = 0; i < availableSettingsOverrides.count; i++) {
+        if (availableSettingsOverrides.data.i32[i] ==
+                ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM) {
+            mSupportZoomOverride = true;
+            break;
+        }
+    }
+
     /** Start up request queue thread */
     mRequestThread = createNewRequestThread(
             this, mStatusTracker, mInterface, sessionParamKeys,
-            mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait);
+            mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait,
+            mSupportZoomOverride);
     res = mRequestThread->run(String8::format("C3Dev-%s-ReqQueue", mId.string()).string());
     if (res != OK) {
         SET_ERR_L("Unable to start request queue thread: %s (%d)",
@@ -248,113 +265,110 @@
 
 status_t Camera3Device::disconnectImpl() {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
+
     ALOGI("%s: E", __FUNCTION__);
 
     status_t res = OK;
     std::vector<wp<Camera3StreamInterface>> streams;
+    nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
     {
-        Mutex::Autolock il(mInterfaceLock);
-        nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
-        {
-            Mutex::Autolock l(mLock);
-            if (mStatus == STATUS_UNINITIALIZED) return res;
+        Mutex::Autolock l(mLock);
+        if (mStatus == STATUS_UNINITIALIZED) return res;
 
-            if (mRequestThread != NULL) {
-                if (mStatus == STATUS_ACTIVE || mStatus == STATUS_ERROR) {
-                    res = mRequestThread->clear();
+        if (mRequestThread != NULL) {
+            if (mStatus == STATUS_ACTIVE || mStatus == STATUS_ERROR) {
+                res = mRequestThread->clear();
+                if (res != OK) {
+                    SET_ERR_L("Can't stop streaming");
+                    // Continue to close device even in case of error
+                } else {
+                    res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
+                                  /*requestThreadInvocation*/ false);
                     if (res != OK) {
-                        SET_ERR_L("Can't stop streaming");
+                        SET_ERR_L("Timeout waiting for HAL to drain (% " PRIi64 " ns)",
+                                maxExpectedDuration);
                         // Continue to close device even in case of error
-                    } else {
-                        res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
-                        if (res != OK) {
-                            SET_ERR_L("Timeout waiting for HAL to drain (% " PRIi64 " ns)",
-                                    maxExpectedDuration);
-                            // Continue to close device even in case of error
-                        }
                     }
                 }
-                // Signal to request thread that we're not expecting any
-                // more requests. This will be true since once we're in
-                // disconnect and we've cleared off the request queue, the
-                // request thread can't receive any new requests through
-                // binder calls - since disconnect holds
-                // mBinderSerialization lock.
-                mRequestThread->setRequestClearing();
             }
+            // Signal to request thread that we're not expecting any
+            // more requests. This will be true since once we're in
+            // disconnect and we've cleared off the request queue, the
+            // request thread can't receive any new requests through
+            // binder calls - since disconnect holds
+            // mBinderSerialization lock.
+            mRequestThread->setRequestClearing();
+        }
 
-            if (mStatus == STATUS_ERROR) {
-                CLOGE("Shutting down in an error state");
-            }
+        if (mStatus == STATUS_ERROR) {
+            CLOGE("Shutting down in an error state");
+        }
 
-            if (mStatusTracker != NULL) {
-                mStatusTracker->requestExit();
-            }
+        if (mStatusTracker != NULL) {
+            mStatusTracker->requestExit();
+        }
 
-            if (mRequestThread != NULL) {
-                mRequestThread->requestExit();
-            }
+        if (mRequestThread != NULL) {
+            mRequestThread->requestExit();
+        }
 
-            streams.reserve(mOutputStreams.size() + (mInputStream != nullptr ? 1 : 0));
-            for (size_t i = 0; i < mOutputStreams.size(); i++) {
-                streams.push_back(mOutputStreams[i]);
-            }
-            if (mInputStream != nullptr) {
-                streams.push_back(mInputStream);
-            }
+        streams.reserve(mOutputStreams.size() + (mInputStream != nullptr ? 1 : 0));
+        for (size_t i = 0; i < mOutputStreams.size(); i++) {
+            streams.push_back(mOutputStreams[i]);
+        }
+        if (mInputStream != nullptr) {
+            streams.push_back(mInputStream);
         }
     }
-    // Joining done without holding mLock and mInterfaceLock, otherwise deadlocks may ensue
-    // as the threads try to access parent state (b/143513518)
+
+    // Joining done without holding mLock, otherwise deadlocks may ensue
+    // as the threads try to access parent state
     if (mRequestThread != NULL && mStatus != STATUS_ERROR) {
         // HAL may be in a bad state, so waiting for request thread
         // (which may be stuck in the HAL processCaptureRequest call)
         // could be dangerous.
-        // give up mInterfaceLock here and then lock it again. Could this lead
-        // to other deadlocks
         mRequestThread->join();
     }
+
+    if (mStatusTracker != NULL) {
+        mStatusTracker->join();
+    }
+
+    if (mInjectionMethods->isInjecting()) {
+        mInjectionMethods->stopInjection();
+    }
+
+    HalInterface* interface;
     {
-        Mutex::Autolock il(mInterfaceLock);
-        if (mStatusTracker != NULL) {
-            mStatusTracker->join();
-        }
+        Mutex::Autolock l(mLock);
+        mRequestThread.clear();
+        Mutex::Autolock stLock(mTrackerLock);
+        mStatusTracker.clear();
+        interface = mInterface.get();
+    }
 
-        if (mInjectionMethods->isInjecting()) {
-            mInjectionMethods->stopInjection();
-        }
+    // Call close without internal mutex held, as the HAL close may need to
+    // wait on assorted callbacks,etc, to complete before it can return.
+    mCameraServiceWatchdog->WATCH(interface->close());
 
-        HalInterface* interface;
-        {
-            Mutex::Autolock l(mLock);
-            mRequestThread.clear();
-            Mutex::Autolock stLock(mTrackerLock);
-            mStatusTracker.clear();
-            interface = mInterface.get();
-        }
+    flushInflightRequests();
 
-        // Call close without internal mutex held, as the HAL close may need to
-        // wait on assorted callbacks,etc, to complete before it can return.
-        mCameraServiceWatchdog->WATCH(interface->close());
+    {
+        Mutex::Autolock l(mLock);
+        mInterface->clear();
+        mOutputStreams.clear();
+        mInputStream.clear();
+        mDeletedStreams.clear();
+        mBufferManager.clear();
+        internalUpdateStatusLocked(STATUS_UNINITIALIZED);
+    }
 
-        flushInflightRequests();
-
-        {
-            Mutex::Autolock l(mLock);
-            mInterface->clear();
-            mOutputStreams.clear();
-            mInputStream.clear();
-            mDeletedStreams.clear();
-            mBufferManager.clear();
-            internalUpdateStatusLocked(STATUS_UNINITIALIZED);
-        }
-
-        for (auto& weakStream : streams) {
-              sp<Camera3StreamInterface> stream = weakStream.promote();
-            if (stream != nullptr) {
-                ALOGE("%s: Stream %d leaked! strong reference (%d)!",
-                        __FUNCTION__, stream->getId(), stream->getStrongCount() - 1);
-            }
+    for (auto& weakStream : streams) {
+        sp<Camera3StreamInterface> stream = weakStream.promote();
+        if (stream != nullptr) {
+            ALOGE("%s: Stream %d leaked! strong reference (%d)!",
+                    __FUNCTION__, stream->getId(), stream->getStrongCount() - 1);
         }
     }
     ALOGI("%s: X", __FUNCTION__);
@@ -826,7 +840,7 @@
     }
 
     if (res == OK) {
-        waitUntilStateThenRelock(/*active*/true, kActiveTimeout);
+        waitUntilStateThenRelock(/*active*/true, kActiveTimeout, /*requestThreadInvocation*/false);
         if (res != OK) {
             SET_ERR_L("Can't transition to active in %f seconds!",
                     kActiveTimeout/1e9);
@@ -951,7 +965,8 @@
             break;
         case STATUS_ACTIVE:
             ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
-            res = internalPauseAndWaitLocked(maxExpectedDuration);
+            res = internalPauseAndWaitLocked(maxExpectedDuration,
+                          /*requestThreadInvocation*/ false);
             if (res != OK) {
                 SET_ERR_L("Can't pause captures to reconfigure streams!");
                 return res;
@@ -1068,7 +1083,8 @@
             break;
         case STATUS_ACTIVE:
             ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
-            res = internalPauseAndWaitLocked(maxExpectedDuration);
+            res = internalPauseAndWaitLocked(maxExpectedDuration,
+                          /*requestThreadInvocation*/ false);
             if (res != OK) {
                 SET_ERR_L("Can't pause captures to reconfigure streams!");
                 return res;
@@ -1363,12 +1379,49 @@
     set_camera_metadata_vendor_id(meta, mVendorTagId);
     filteredParams.unlock(meta);
     if (availableSessionKeys.count > 0) {
+        bool rotateAndCropSessionKey = false;
+        bool autoframingSessionKey = false;
         for (size_t i = 0; i < availableSessionKeys.count; i++) {
             camera_metadata_ro_entry entry = params.find(
                     availableSessionKeys.data.i32[i]);
             if (entry.count > 0) {
                 filteredParams.update(entry);
             }
+            if (ANDROID_SCALER_ROTATE_AND_CROP == availableSessionKeys.data.i32[i]) {
+                rotateAndCropSessionKey = true;
+            }
+            if (ANDROID_CONTROL_AUTOFRAMING == availableSessionKeys.data.i32[i]) {
+                autoframingSessionKey = true;
+            }
+        }
+
+        if (rotateAndCropSessionKey || autoframingSessionKey) {
+            sp<CaptureRequest> request = new CaptureRequest();
+            PhysicalCameraSettings settingsList;
+            settingsList.metadata = filteredParams;
+            request->mSettingsList.push_back(settingsList);
+
+            if (rotateAndCropSessionKey) {
+                auto rotateAndCropEntry = filteredParams.find(ANDROID_SCALER_ROTATE_AND_CROP);
+                if (rotateAndCropEntry.count > 0 &&
+                        rotateAndCropEntry.data.u8[0] == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
+                    request->mRotateAndCropAuto = true;
+                } else {
+                    request->mRotateAndCropAuto = false;
+                }
+
+                overrideAutoRotateAndCrop(request, mOverrideToPortrait, mRotateAndCropOverride);
+            }
+
+            if (autoframingSessionKey) {
+                auto autoframingEntry = filteredParams.find(ANDROID_CONTROL_AUTOFRAMING);
+                if (autoframingEntry.count > 0 &&
+                        autoframingEntry.data.u8[0] == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+                    overrideAutoframing(request, mAutoframingOverride);
+                }
+            }
+
+            filteredParams = request->mSettingsList.begin()->metadata;
         }
     }
 
@@ -1504,7 +1557,8 @@
     }
     ALOGV("%s: Camera %s: Waiting until idle (%" PRIi64 "ns)", __FUNCTION__, mId.string(),
             maxExpectedDuration);
-    status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
+    status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
+                           /*requestThreadInvocation*/ false);
     if (res != OK) {
         mStatusTracker->dumpActiveComponents();
         SET_ERR_L("Error waiting for HAL to drain: %s (%d)", strerror(-res),
@@ -1515,12 +1569,14 @@
 
 void Camera3Device::internalUpdateStatusLocked(Status status) {
     mStatus = status;
-    mRecentStatusUpdates.add(mStatus);
+    mStatusIsInternal = mPauseStateNotify ? true : false;
+    mRecentStatusUpdates.add({mStatus, mStatusIsInternal});
     mStatusChanged.broadcast();
 }
 
 // Pause to reconfigure
-status_t Camera3Device::internalPauseAndWaitLocked(nsecs_t maxExpectedDuration) {
+status_t Camera3Device::internalPauseAndWaitLocked(nsecs_t maxExpectedDuration,
+        bool requestThreadInvocation) {
     if (mRequestThread.get() != nullptr) {
         mRequestThread->setPaused(true);
     } else {
@@ -1529,7 +1585,8 @@
 
     ALOGV("%s: Camera %s: Internal wait until idle (% " PRIi64 " ns)", __FUNCTION__, mId.string(),
           maxExpectedDuration);
-    status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
+    status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
+                           requestThreadInvocation);
     if (res != OK) {
         mStatusTracker->dumpActiveComponents();
         SET_ERR_L("Can't idle device in %f seconds!",
@@ -1547,7 +1604,9 @@
 
     ALOGV("%s: Camera %s: Internal wait until active (% " PRIi64 " ns)", __FUNCTION__, mId.string(),
             kActiveTimeout);
-    res = waitUntilStateThenRelock(/*active*/ true, kActiveTimeout);
+    // internalResumeLocked is always called from a binder thread.
+    res = waitUntilStateThenRelock(/*active*/ true, kActiveTimeout,
+                  /*requestThreadInvocation*/ false);
     if (res != OK) {
         SET_ERR_L("Can't transition to active in %f seconds!",
                 kActiveTimeout/1e9);
@@ -1556,7 +1615,8 @@
     return OK;
 }
 
-status_t Camera3Device::waitUntilStateThenRelock(bool active, nsecs_t timeout) {
+status_t Camera3Device::waitUntilStateThenRelock(bool active, nsecs_t timeout,
+        bool requestThreadInvocation) {
     status_t res = OK;
 
     size_t startIndex = 0;
@@ -1585,7 +1645,12 @@
     bool stateSeen = false;
     nsecs_t startTime = systemTime();
     do {
-        if (active == (mStatus == STATUS_ACTIVE)) {
+        if (mStatus == STATUS_ERROR) {
+            // Device in error state. Return right away.
+            break;
+        }
+        if (active == (mStatus == STATUS_ACTIVE) &&
+            (requestThreadInvocation || !mStatusIsInternal)) {
             // Desired state is current
             break;
         }
@@ -1607,9 +1672,19 @@
                 "%s: Skipping status updates in Camera3Device, may result in deadlock.",
                 __FUNCTION__);
 
-        // Encountered desired state since we began waiting
+        // Encountered desired state since we began waiting. Internal invocations coming from
+        // request threads (such as reconfigureCamera) should be woken up immediately, whereas
+        // invocations from binder threads (such as createInputStream) should only be woken up if
+        // they are not paused. This avoids intermediate pause signals from reconfigureCamera as it
+        // changes the status to active right after.
         for (size_t i = startIndex; i < mRecentStatusUpdates.size(); i++) {
-            if (active == (mRecentStatusUpdates[i] == STATUS_ACTIVE) ) {
+            if (mRecentStatusUpdates[i].status == STATUS_ERROR) {
+                // Device in error state. Return right away.
+                stateSeen = true;
+                break;
+            }
+            if (active == (mRecentStatusUpdates[i].status == STATUS_ACTIVE) &&
+                (requestThreadInvocation || !mRecentStatusUpdates[i].isInternal)) {
                 stateSeen = true;
                 break;
             }
@@ -1840,7 +1915,7 @@
     camera_metadata_entry minDurations =
             mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
     for (size_t i = 0; i < minDurations.count; i += 4) {
-        if (minDurations.data.i64[i] == stream->getFormat()
+        if (minDurations.data.i64[i] == stream->getOriginalFormat()
                 && minDurations.data.i64[i+1] == stream->getWidth()
                 && minDurations.data.i64[i+2] == stream->getHeight()) {
             int64_t minFrameDuration = minDurations.data.i64[i+3];
@@ -2190,6 +2265,16 @@
         }
     }
 
+    if (mSupportZoomOverride) {
+        for (auto& settings : newRequest->mSettingsList) {
+            auto settingsOverrideEntry =
+                    settings.metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
+            settings.mOriginalSettingsOverride = settingsOverrideEntry.count > 0 ?
+                    settingsOverrideEntry.data.i32[0] :
+                    ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF;
+        }
+    }
+
     return newRequest;
 }
 
@@ -2246,7 +2331,9 @@
 
     nsecs_t startTime = systemTime();
 
-    Mutex::Autolock il(mInterfaceLock);
+    // We must not hold mInterfaceLock here since this function is called from
+    // RequestThread::threadLoop and holding mInterfaceLock could lead to
+    // deadlocks (http://b/143513518)
     nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
 
     Mutex::Autolock l(mLock);
@@ -2261,8 +2348,18 @@
     if (mStatus == STATUS_ACTIVE) {
         markClientActive = true;
         mPauseStateNotify = true;
+        mStatusTracker->markComponentIdle(clientStatusId, Fence::NO_FENCE);
 
-        rc = internalPauseAndWaitLocked(maxExpectedDuration);
+        // This is essentially the same as calling rc = internalPauseAndWaitLocked(..), except that
+        // we don't want to call setPaused(true) to avoid it interfering with setPaused() called
+        // from createInputStream/createStream.
+        rc = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
+                /*requestThreadInvocation*/ true);
+        if (rc != OK) {
+            mStatusTracker->dumpActiveComponents();
+            SET_ERR_L("Can't idle device in %f seconds!",
+                maxExpectedDuration/1e9);
+        }
     }
 
     if (rc == NO_ERROR) {
@@ -2277,6 +2374,9 @@
             //present streams end up with outstanding buffers that will
             //not get drained.
             internalUpdateStatusLocked(STATUS_ACTIVE);
+
+            mCameraServiceProxyWrapper->logStreamConfigured(mId, mOperatingMode,
+                    true /*internalReconfig*/, ns2ms(systemTime() - startTime));
         } else if (rc == DEAD_OBJECT) {
             // DEAD_OBJECT can be returned if either the consumer surface is
             // abandoned, or the HAL has died.
@@ -2292,9 +2392,6 @@
         ALOGE("%s: Failed to pause streaming: %d", __FUNCTION__, rc);
     }
 
-    mCameraServiceProxyWrapper->logStreamConfigured(mId, mOperatingMode, true /*internalReconfig*/,
-        ns2ms(systemTime() - startTime));
-
     if (markClientActive) {
         mStatusTracker->markComponentActive(clientStatusId);
     }
@@ -2397,7 +2494,7 @@
     }
 
     mGroupIdPhysicalCameraMap.clear();
-    bool composerSurfacePresent = false;
+    mComposerOutput = false;
     for (size_t i = 0; i < mOutputStreams.size(); i++) {
 
         // Don't configure bidi streams twice, nor add them twice to the list
@@ -2420,7 +2517,10 @@
         if (outputStream->format == HAL_PIXEL_FORMAT_BLOB) {
             size_t k = i + ((mInputStream != nullptr) ? 1 : 0); // Input stream if present should
                                                                 // always occupy the initial entry.
-            if (outputStream->data_space == HAL_DATASPACE_V0_JFIF) {
+            if ((outputStream->data_space == HAL_DATASPACE_V0_JFIF) ||
+                    (outputStream->data_space ==
+                     static_cast<android_dataspace_t>(
+                         aidl::android::hardware::graphics::common::Dataspace::JPEG_R))) {
                 bufferSizes[k] = static_cast<uint32_t>(
                         getJpegBufferSize(infoPhysical(String8(outputStream->physical_camera_id)),
                                 outputStream->width, outputStream->height));
@@ -2440,7 +2540,7 @@
         }
 
         if (outputStream->usage & GraphicBuffer::USAGE_HW_COMPOSER) {
-            composerSurfacePresent = true;
+            mComposerOutput = true;
         }
     }
 
@@ -2510,7 +2610,7 @@
         }
     }
 
-    mRequestThread->setComposerSurface(composerSurfacePresent);
+    mRequestThread->setComposerSurface(mComposerOutput);
 
     // Request thread needs to know to avoid using repeat-last-settings protocol
     // across configure_streams() calls
@@ -2525,8 +2625,8 @@
     if (disableFifo != 1) {
         // Boost priority of request thread to SCHED_FIFO.
         pid_t requestThreadTid = mRequestThread->getTid();
-        res = requestPriority(getpid(), requestThreadTid,
-                kRequestThreadPriority, /*isForApp*/ false, /*asynchronous*/ false);
+        res = SchedulingPolicyUtils::requestPriorityDirect(getpid(), requestThreadTid,
+                kRequestThreadPriority);
         if (res != OK) {
             ALOGW("Can't set realtime priority for request processing thread: %s (%d)",
                     strerror(-res), res);
@@ -2914,7 +3014,8 @@
         sp<HalInterface> interface, const Vector<int32_t>& sessionParamKeys,
         bool useHalBufManager,
         bool supportCameraMute,
-        bool overrideToPortrait) :
+        bool overrideToPortrait,
+        bool supportSettingsOverride) :
         Thread(/*canCallJava*/false),
         mParent(parent),
         mStatusTracker(statusTracker),
@@ -2936,6 +3037,7 @@
         mComposerOutput(false),
         mCameraMute(ANDROID_SENSOR_TEST_PATTERN_MODE_OFF),
         mCameraMuteChanged(false),
+        mSettingsOverride(ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF),
         mRepeatingLastFrameNumber(
             hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES),
         mPrepareVideoStream(false),
@@ -2945,7 +3047,8 @@
         mLatestSessionParams(sessionParamKeys.size()),
         mUseHalBufManager(useHalBufManager),
         mSupportCameraMute(supportCameraMute),
-        mOverrideToPortrait(overrideToPortrait) {
+        mOverrideToPortrait(overrideToPortrait),
+        mSupportSettingsOverride(supportSettingsOverride) {
     mStatusId = statusTracker->addComponent("RequestThread");
     mVndkVersion = property_get_int32("ro.vndk.version", __ANDROID_API_FUTURE__);
 }
@@ -3465,6 +3568,17 @@
         latestRequestId = NAME_NOT_FOUND;
     }
 
+    for (size_t i = 0; i < mNextRequests.size(); i++) {
+        auto& nextRequest = mNextRequests.editItemAt(i);
+        sp<CaptureRequest> captureRequest = nextRequest.captureRequest;
+        // Do not override rotate&crop for stream configurations that include
+        // SurfaceViews(HW_COMPOSER) output, unless mOverrideToPortrait is set.
+        // The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
+        captureRequest->mRotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
+            overrideAutoRotateAndCrop(captureRequest);
+        captureRequest->mAutoframingChanged = overrideAutoframing(captureRequest);
+    }
+
     // 'mNextRequests' will at this point contain either a set of HFR batched requests
     //  or a single request from streaming or burst. In either case the first element
     //  should contain the latest camera settings that we need to check for any session
@@ -3486,13 +3600,8 @@
         if (res == OK) {
             sp<Camera3Device> parent = mParent.promote();
             if (parent != nullptr) {
-                sp<StatusTracker> statusTracker = mStatusTracker.promote();
-                if (statusTracker != nullptr) {
-                    statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
-                }
                 mReconfigured |= parent->reconfigureCamera(mLatestSessionParams, mStatusId);
             }
-            setPaused(false);
 
             if (mNextRequests[0].captureRequest->mInputStream != nullptr) {
                 mNextRequests[0].captureRequest->mInputStream->restoreConfiguredState();
@@ -3614,19 +3723,16 @@
         bool triggersMixedIn = (triggerCount > 0 || mPrevTriggers > 0);
         mPrevTriggers = triggerCount;
 
-        // Do not override rotate&crop for stream configurations that include
-        // SurfaceViews(HW_COMPOSER) output, unless mOverrideToPortrait is set.
-        // The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
-        bool rotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
-            overrideAutoRotateAndCrop(captureRequest);
-        bool autoframingChanged = overrideAutoframing(captureRequest);
         bool testPatternChanged = overrideTestPattern(captureRequest);
+        bool settingsOverrideChanged = overrideSettingsOverride(captureRequest);
 
         // If the request is the same as last, or we had triggers now or last time or
         // changing overrides this time
         bool newRequest =
-                (mPrevRequest != captureRequest || triggersMixedIn || rotateAndCropChanged ||
-                         autoframingChanged || testPatternChanged) &&
+                (mPrevRequest != captureRequest || triggersMixedIn ||
+                         captureRequest->mRotateAndCropChanged ||
+                         captureRequest->mAutoframingChanged ||
+                         testPatternChanged || settingsOverrideChanged) &&
                 // Request settings are all the same within one batch, so only treat the first
                 // request in a batch as new
                 !(batchedRequest && i > 0);
@@ -4101,9 +4207,6 @@
         camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue) {
     ATRACE_CALL();
     Mutex::Autolock l(mTriggerMutex);
-    if (rotateAndCropValue == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
-        return BAD_VALUE;
-    }
     mRotateAndCropOverride = rotateAndCropValue;
     return OK;
 }
@@ -4112,9 +4215,6 @@
         camera_metadata_enum_android_control_autoframing_t autoframingValue) {
     ATRACE_CALL();
     Mutex::Autolock l(mTriggerMutex);
-    if (autoframingValue == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
-        return BAD_VALUE;
-    }
     mAutoframingOverride = autoframingValue;
     return OK;
 }
@@ -4136,6 +4236,13 @@
     return OK;
 }
 
+status_t Camera3Device::RequestThread::setZoomOverride(int32_t zoomOverride) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mTriggerMutex);
+    mSettingsOverride = zoomOverride;
+    return OK;
+}
+
 nsecs_t Camera3Device::getExpectedInFlightDuration() {
     ATRACE_CALL();
     std::lock_guard<std::mutex> l(mInFlightLock);
@@ -4702,13 +4809,20 @@
     return OK;
 }
 
-bool Camera3Device::RequestThread::overrideAutoRotateAndCrop(
-        const sp<CaptureRequest> &request) {
+bool Camera3Device::RequestThread::overrideAutoRotateAndCrop(const sp<CaptureRequest> &request) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mTriggerMutex);
+    return Camera3Device::overrideAutoRotateAndCrop(request, this->mOverrideToPortrait,
+            this->mRotateAndCropOverride);
+}
+
+bool Camera3Device::overrideAutoRotateAndCrop(const sp<CaptureRequest> &request,
+        bool overrideToPortrait,
+        camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride) {
     ATRACE_CALL();
 
-    if (mOverrideToPortrait) {
-        Mutex::Autolock l(mTriggerMutex);
-        uint8_t rotateAndCrop_u8 = mRotateAndCropOverride;
+    if (overrideToPortrait) {
+        uint8_t rotateAndCrop_u8 = rotateAndCropOverride;
         CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
         metadata.update(ANDROID_SCALER_ROTATE_AND_CROP,
                 &rotateAndCrop_u8, 1);
@@ -4716,24 +4830,44 @@
     }
 
     if (request->mRotateAndCropAuto) {
-        Mutex::Autolock l(mTriggerMutex);
         CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
 
         auto rotateAndCropEntry = metadata.find(ANDROID_SCALER_ROTATE_AND_CROP);
         if (rotateAndCropEntry.count > 0) {
-            if (rotateAndCropEntry.data.u8[0] == mRotateAndCropOverride) {
+            if (rotateAndCropEntry.data.u8[0] == rotateAndCropOverride) {
                 return false;
             } else {
-                rotateAndCropEntry.data.u8[0] = mRotateAndCropOverride;
+                rotateAndCropEntry.data.u8[0] = rotateAndCropOverride;
                 return true;
             }
         } else {
-            uint8_t rotateAndCrop_u8 = mRotateAndCropOverride;
-            metadata.update(ANDROID_SCALER_ROTATE_AND_CROP,
-                    &rotateAndCrop_u8, 1);
+            uint8_t rotateAndCrop_u8 = rotateAndCropOverride;
+            metadata.update(ANDROID_SCALER_ROTATE_AND_CROP, &rotateAndCrop_u8, 1);
             return true;
         }
     }
+
+    return false;
+}
+
+bool Camera3Device::overrideAutoframing(const sp<CaptureRequest> &request /*out*/,
+        camera_metadata_enum_android_control_autoframing_t autoframingOverride) {
+    CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+    auto autoframingEntry = metadata.find(ANDROID_CONTROL_AUTOFRAMING);
+    if (autoframingEntry.count > 0) {
+        if (autoframingEntry.data.u8[0] == autoframingOverride) {
+            return false;
+        } else {
+            autoframingEntry.data.u8[0] = autoframingOverride;
+            return true;
+        }
+    } else {
+        uint8_t autoframing_u8 = autoframingOverride;
+        metadata.update(ANDROID_CONTROL_AUTOFRAMING,
+                &autoframing_u8, 1);
+        return true;
+    }
+
     return false;
 }
 
@@ -4742,23 +4876,9 @@
 
     if (request->mAutoframingAuto) {
         Mutex::Autolock l(mTriggerMutex);
-        CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
-
-        auto autoframingEntry = metadata.find(ANDROID_CONTROL_AUTOFRAMING);
-        if (autoframingEntry.count > 0) {
-            if (autoframingEntry.data.u8[0] == mAutoframingOverride) {
-                return false;
-            } else {
-                autoframingEntry.data.u8[0] = mAutoframingOverride;
-                return true;
-            }
-        } else {
-            uint8_t autoframing_u8 = mAutoframingOverride;
-            metadata.update(ANDROID_CONTROL_AUTOFRAMING,
-                    &autoframing_u8, 1);
-            return true;
-        }
+        return Camera3Device::overrideAutoframing(request, mAutoframingOverride);
     }
+
     return false;
 }
 
@@ -4824,6 +4944,33 @@
     return changed;
 }
 
+bool Camera3Device::RequestThread::overrideSettingsOverride(
+        const sp<CaptureRequest> &request) {
+    ATRACE_CALL();
+
+    if (!mSupportSettingsOverride) return false;
+
+    Mutex::Autolock l(mTriggerMutex);
+
+    // For a multi-camera, only override the logical camera's metadata.
+    CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+    camera_metadata_entry entry = metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
+    int32_t originalValue = request->mSettingsList.begin()->mOriginalSettingsOverride;
+    if (mSettingsOverride != -1 &&
+            (entry.count == 0 || entry.data.i32[0] != mSettingsOverride)) {
+        metadata.update(ANDROID_CONTROL_SETTINGS_OVERRIDE,
+                &mSettingsOverride, 1);
+        return true;
+    } else if (mSettingsOverride == -1 &&
+            (entry.count == 0 || entry.data.i32[0] != originalValue)) {
+        metadata.update(ANDROID_CONTROL_SETTINGS_OVERRIDE,
+                &originalValue, 1);
+        return true;
+    }
+
+    return false;
+}
+
 status_t Camera3Device::RequestThread::setHalInterface(
         sp<HalInterface> newHalInterface) {
     if (newHalInterface.get() == nullptr) {
@@ -5246,6 +5393,10 @@
     if (mRequestThread == nullptr) {
         return INVALID_OPERATION;
     }
+    if (rotateAndCropValue == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
+        return BAD_VALUE;
+    }
+    mRotateAndCropOverride = rotateAndCropValue;
     return mRequestThread->setRotateAndCropAutoBehavior(rotateAndCropValue);
 }
 
@@ -5257,6 +5408,10 @@
     if (mRequestThread == nullptr) {
         return INVALID_OPERATION;
     }
+    if (autoframingValue == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+        return BAD_VALUE;
+    }
+    mAutoframingOverride = autoframingValue;
     return mRequestThread->setAutoframingAutoBehaviour(autoframingValue);
 }
 
@@ -5282,6 +5437,25 @@
     return mRequestThread->setCameraMute(muteMode);
 }
 
+bool Camera3Device::supportsZoomOverride() {
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+
+    return mSupportZoomOverride;
+}
+
+status_t Camera3Device::setZoomOverride(int32_t zoomOverride) {
+    ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+
+    if (mRequestThread == nullptr || !mSupportZoomOverride) {
+        return INVALID_OPERATION;
+    }
+
+    return mRequestThread->setZoomOverride(zoomOverride);
+}
+
 status_t Camera3Device::injectCamera(const String8& injectedCamId,
                                      sp<CameraProviderManager> manager) {
     ALOGI("%s Injection camera: injectedCamId = %s", __FUNCTION__, injectedCamId.string());
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 6b98d9f..b1dd135 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -119,6 +119,7 @@
     status_t dumpWatchedEventsToVector(std::vector<std::string> &out) override;
     const CameraMetadata& info() const override;
     const CameraMetadata& infoPhysical(const String8& physicalId) const override;
+    bool isCompositeJpegRDisabled() const override { return mIsCompositeJpegRDisabled; };
 
     // Capture and setStreamingRequest will configure streams if currently in
     // idle state
@@ -311,6 +312,14 @@
     // Clear stream use case overrides
     void clearStreamUseCaseOverrides();
 
+    /**
+     * Whether the camera device supports zoom override.
+     */
+    bool supportsZoomOverride();
+
+    // Set/reset zoom override
+    status_t setZoomOverride(int32_t zoomOverride);
+
     // Get the status trackeer for the camera device
     wp<camera3::StatusTracker> getStatusTracker() { return mStatusTracker; }
 
@@ -357,6 +366,7 @@
 
     // A lock to enforce serialization on the input/configure side
     // of the public interface.
+    // Only locked by public methods inherited from CameraDeviceBase.
     // Not locked by methods guarded by mOutputLock, since they may act
     // concurrently to the input/configure side of the interface.
     // Must be locked before mLock if both will be locked by a method
@@ -543,6 +553,7 @@
 
     CameraMetadata             mDeviceInfo;
     bool                       mSupportNativeZoomRatio;
+    bool                       mIsCompositeJpegRDisabled;
     std::unordered_map<std::string, CameraMetadata> mPhysicalDeviceInfoMap;
 
     CameraMetadata             mRequestTemplateCache[CAMERA_TEMPLATE_COUNT];
@@ -561,8 +572,15 @@
         STATUS_ACTIVE
     }                          mStatus;
 
+    struct StatusInfo {
+        Status status;
+        bool isInternal; // status triggered by internal reconfigureCamera.
+    };
+
+    bool                       mStatusIsInternal;
+
     // Only clear mRecentStatusUpdates, mStatusWaiters from waitUntilStateThenRelock
-    Vector<Status>             mRecentStatusUpdates;
+    Vector<StatusInfo>         mRecentStatusUpdates;
     int                        mStatusWaiters;
 
     Condition                  mStatusChanged;
@@ -625,9 +643,14 @@
         // overriding of ROTATE_AND_CROP value and adjustment of coordinates
         // in several other controls in both the request and the result
         bool                                mRotateAndCropAuto;
+        // Indicates that the ROTATE_AND_CROP value within 'mSettingsList' was modified
+        // irrespective of the original value.
+        bool                                mRotateAndCropChanged = false;
         // Whether this request has AUTOFRAMING_AUTO set, so need to override the AUTOFRAMING value
         // in the capture request.
         bool                                mAutoframingAuto;
+        // Indicates that the auto framing value within 'mSettingsList' was modified
+        bool                                mAutoframingChanged = false;
 
         // Whether this capture request has its zoom ratio set to 1.0x before
         // the framework overrides it for camera HAL consumption.
@@ -702,7 +725,8 @@
      * CameraDeviceBase interface we shouldn't need to.
      * Must be called with mLock and mInterfaceLock both held.
      */
-    status_t internalPauseAndWaitLocked(nsecs_t maxExpectedDuration);
+    status_t internalPauseAndWaitLocked(nsecs_t maxExpectedDuration,
+                     bool requestThreadInvocation);
 
     /**
      * Resume work after internalPauseAndWaitLocked()
@@ -721,7 +745,8 @@
      * During the wait mLock is released.
      *
      */
-    status_t waitUntilStateThenRelock(bool active, nsecs_t timeout);
+    status_t waitUntilStateThenRelock(bool active, nsecs_t timeout,
+                     bool requestThreadInvocation);
 
     /**
      * Implementation of waitUntilDrained. On success, will transition to IDLE state.
@@ -816,6 +841,15 @@
      */
     static nsecs_t getMonoToBoottimeOffset();
 
+    // Override rotate_and_crop control if needed
+    static bool    overrideAutoRotateAndCrop(const sp<CaptureRequest> &request /*out*/,
+            bool overrideToPortrait,
+            camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride);
+
+    // Override auto framing control if needed
+    static bool    overrideAutoframing(const sp<CaptureRequest> &request /*out*/,
+            camera_metadata_enum_android_control_autoframing_t autoframingOverride);
+
     struct RequestTrigger {
         // Metadata tag number, e.g. android.control.aePrecaptureTrigger
         uint32_t metadataTag;
@@ -846,7 +880,8 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait);
+                bool overrideToPortrait,
+                bool supportSettingsOverride);
         ~RequestThread();
 
         void     setNotificationListener(wp<NotificationListener> listener);
@@ -953,6 +988,8 @@
 
         status_t setCameraMute(int32_t muteMode);
 
+        status_t setZoomOverride(int32_t zoomOverride);
+
         status_t setHalInterface(sp<HalInterface> newHalInterface);
 
       protected:
@@ -973,7 +1010,7 @@
         status_t           addFakeTriggerIds(const sp<CaptureRequest> &request);
 
         // Override rotate_and_crop control if needed; returns true if the current value was changed
-        bool               overrideAutoRotateAndCrop(const sp<CaptureRequest> &request);
+        bool               overrideAutoRotateAndCrop(const sp<CaptureRequest> &request /*out*/);
 
         // Override autoframing control if needed; returns true if the current value was changed
         bool               overrideAutoframing(const sp<CaptureRequest> &request);
@@ -982,6 +1019,10 @@
         // if the current value was changed
         bool               overrideTestPattern(const sp<CaptureRequest> &request);
 
+        // Override settings override if needed for lower zoom latency; return
+        // true if the current value was changed
+        bool               overrideSettingsOverride(const sp<CaptureRequest> &request);
+
         static const nsecs_t kRequestTimeout = 50e6; // 50 ms
 
         // TODO: does this need to be adjusted for long exposure requests?
@@ -1116,6 +1157,8 @@
         bool               mComposerOutput;
         int32_t            mCameraMute; // 0 = no mute, otherwise the TEST_PATTERN_MODE to use
         bool               mCameraMuteChanged;
+        int32_t            mSettingsOverride; // -1 = use original, otherwise
+                                              // the settings override to use.
 
         int64_t            mRepeatingLastFrameNumber;
 
@@ -1135,6 +1178,7 @@
         const bool         mUseHalBufManager;
         const bool         mSupportCameraMute;
         const bool         mOverrideToPortrait;
+        const bool         mSupportSettingsOverride;
         int32_t            mVndkVersion = -1;
     };
 
@@ -1144,7 +1188,8 @@
                 const Vector<int32_t>& /*sessionParamKeys*/,
                 bool /*useHalBufManager*/,
                 bool /*supportCameraMute*/,
-                bool /*overrideToPortrait*/) = 0;
+                bool /*overrideToPortrait*/,
+                bool /*supportSettingsOverride*/) = 0;
 
     sp<RequestThread> mRequestThread;
 
@@ -1409,6 +1454,8 @@
     bool mSupportCameraMute = false;
     // Whether the HAL supports SOLID_COLOR or BLACK if mSupportCameraMute is true
     bool mSupportTestPatternSolidColor = false;
+    // Whether the HAL supports zoom settings override
+    bool mSupportZoomOverride = false;
 
     // Whether the camera framework overrides the device characteristics for
     // performance class.
@@ -1417,6 +1464,15 @@
     // Whether the camera framework overrides the device characteristics for
     // app compatibility reasons.
     bool mOverrideToPortrait;
+    camera_metadata_enum_android_scaler_rotate_and_crop_t mRotateAndCropOverride;
+    bool mComposerOutput;
+
+    // Auto framing override value
+    camera_metadata_enum_android_control_autoframing mAutoframingOverride;
+
+    // Settings override value
+    int32_t mSettingsOverride; // -1 = use original, otherwise
+                               // the settings override to use.
 
     // Current active physical id of the logical multi-camera, if any
     std::string mActivePhysicalId;
diff --git a/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp b/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
index 031c255..ab772d4 100644
--- a/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
+++ b/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
@@ -58,7 +58,8 @@
     if (parent->mStatus == STATUS_ACTIVE) {
         ALOGV("%s: Let the device be IDLE and the request thread is paused",
                 __FUNCTION__);
-        res = parent->internalPauseAndWaitLocked(maxExpectedDuration);
+        res = parent->internalPauseAndWaitLocked(maxExpectedDuration,
+                                                 /*requestThreadInvocation*/false);
         if (res != OK) {
             ALOGE("%s: Can't pause captures to inject camera!", __FUNCTION__);
             return res;
@@ -117,7 +118,8 @@
     if (parent->mStatus == STATUS_ACTIVE) {
         ALOGV("%s: Let the device be IDLE and the request thread is paused",
                 __FUNCTION__);
-        res = parent->internalPauseAndWaitLocked(maxExpectedDuration);
+        res = parent->internalPauseAndWaitLocked(maxExpectedDuration,
+                                                 /*requestThreadInvocation*/false);
         if (res != OK) {
             ALOGE("%s: Can't pause captures to stop injection!", __FUNCTION__);
             return res;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 58db57a..a387064 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -24,6 +24,7 @@
 
 #include <aidl/android/hardware/camera/device/CameraBlob.h>
 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
 
 #include <android-base/unique_fd.h>
 #include <cutils/properties.h>
@@ -456,7 +457,10 @@
             mTraceFirstBuffer = false;
         }
         // Fix CameraBlob id type discrepancy between HIDL and AIDL, details : http://b/229688810
-        if (getFormat() == HAL_PIXEL_FORMAT_BLOB && getDataSpace() == HAL_DATASPACE_V0_JFIF) {
+        if (getFormat() == HAL_PIXEL_FORMAT_BLOB && (getDataSpace() == HAL_DATASPACE_V0_JFIF ||
+                    (getDataSpace() ==
+                     static_cast<android_dataspace_t>(
+                         aidl::android::hardware::graphics::common::Dataspace::JPEG_R)))) {
             if (mIPCTransport == IPCTransport::HIDL) {
                 fixUpHidlJpegBlobHeader(anwBuffer, anwReleaseFence);
             }
@@ -482,7 +486,7 @@
             bufferDeferred = true;
         } else {
             nsecs_t presentTime = mSyncToDisplay ?
-                    syncTimestampToDisplayLocked(captureTime) : captureTime;
+                    syncTimestampToDisplayLocked(captureTime, releaseFence->dup()) : captureTime;
 
             setTransform(transform, true/*mayChangeMirror*/);
             res = native_window_set_buffers_timestamp(mConsumer.get(), presentTime);
@@ -1406,7 +1410,7 @@
     }
 }
 
-nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t) {
+nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t, int releaseFence) {
     nsecs_t currentTime = systemTime();
     if (!mFixedFps) {
         mLastCaptureTime = t;
@@ -1440,7 +1444,7 @@
     //
     nsecs_t captureInterval = t - mLastCaptureTime;
     if (captureInterval > kSpacingResetIntervalNs) {
-        for (size_t i = 0; i < VsyncEventData::kFrameTimelinesLength; i++) {
+        for (size_t i = 0; i < vsyncEventData.frameTimelinesLength; i++) {
             const auto& timeline = vsyncEventData.frameTimelines[i];
             if (timeline.deadlineTimestamp >= currentTime &&
                     timeline.expectedPresentationTime > minPresentT) {
@@ -1449,6 +1453,17 @@
                 mLastCaptureTime = t;
                 mLastPresentTime = presentT;
 
+                // If releaseFence is available, store the fence to check signal
+                // time later.
+                mRefVsyncData = vsyncEventData;
+                mReferenceCaptureTime = t;
+                mReferenceArrivalTime = currentTime;
+                if (releaseFence != -1) {
+                    mReferenceFrameFence = new Fence(releaseFence);
+                } else {
+                    mFenceSignalOffset = 0;
+                }
+
                 // Move the expected presentation time back by 1/3 of frame interval to
                 // mitigate the time drift. Due to time drift, if we directly use the
                 // expected presentation time, often times 2 expected presentation time
@@ -1458,6 +1473,36 @@
         }
     }
 
+    // If there is a reference frame release fence, get the signal time and
+    // update the captureToPresentOffset.
+    if (mReferenceFrameFence != nullptr) {
+        mFenceSignalOffset = 0;
+        nsecs_t signalTime = mReferenceFrameFence->getSignalTime();
+        // Now that the fence has signaled, recalculate the offsets based on
+        // the timeline which was actually latched
+        if (signalTime != INT64_MAX) {
+            for (size_t i = 0; i < mRefVsyncData.frameTimelinesLength; i++) {
+                const auto& timeline = mRefVsyncData.frameTimelines[i];
+                if (timeline.deadlineTimestamp >= signalTime) {
+                    nsecs_t originalOffset = mCaptureToPresentOffset;
+                    mCaptureToPresentOffset = timeline.expectedPresentationTime
+                            - mReferenceCaptureTime;
+                    mLastPresentTime = timeline.expectedPresentationTime;
+                    mFenceSignalOffset = signalTime > mReferenceArrivalTime ?
+                            signalTime - mReferenceArrivalTime : 0;
+
+                    ALOGV("%s: Last deadline %" PRId64 " signalTime %" PRId64
+                            " original offset %" PRId64 " new offset %" PRId64
+                            " fencesignal offset %" PRId64, __FUNCTION__,
+                            timeline.deadlineTimestamp, signalTime, originalOffset,
+                            mCaptureToPresentOffset, mFenceSignalOffset);
+                    break;
+                }
+            }
+            mReferenceFrameFence.clear();
+        }
+    }
+
     nsecs_t idealPresentT = t + mCaptureToPresentOffset;
     nsecs_t expectedPresentT = mLastPresentTime;
     nsecs_t minDiff = INT64_MAX;
@@ -1501,6 +1546,7 @@
 
     // Find best timestamp in the vsync timelines:
     // - Only use at most kMaxTimelines timelines to avoid long latency
+    // - Add an extra timeline if display fence is used
     // - closest to the ideal presentation time,
     // - deadline timestamp is greater than the current time, and
     // - For fixed FPS, if the capture interval doesn't deviate too much from refresh interval,
@@ -1509,7 +1555,9 @@
     // - For variable FPS, or if the capture interval deviates from refresh
     //   interval for more than 5%, find a presentation time closest to the
     //   (lastPresentationTime + captureToPresentOffset) instead.
-    int maxTimelines = std::min(kMaxTimelines, (int)VsyncEventData::kFrameTimelinesLength);
+    int fenceAdjustment = (mFenceSignalOffset > 0) ? 1 : 0;
+    int maxTimelines = std::min(kMaxTimelines + fenceAdjustment,
+            (int)vsyncEventData.frameTimelinesLength);
     float biasForShortDelay = 1.0f;
     for (int i = 0; i < maxTimelines; i ++) {
         const auto& vsyncTime = vsyncEventData.frameTimelines[i];
@@ -1520,7 +1568,7 @@
             biasForShortDelay = 1.0 - 2.0 * i / (maxTimelines - 1);
         }
         if (std::abs(vsyncTime.expectedPresentationTime - idealPresentT) < minDiff &&
-                vsyncTime.deadlineTimestamp >= currentTime &&
+                vsyncTime.deadlineTimestamp >= currentTime + mFenceSignalOffset &&
                 ((!cameraDisplayInSync && vsyncTime.expectedPresentationTime > minPresentT) ||
                  (cameraDisplayInSync && vsyncTime.expectedPresentationTime >
                 mLastPresentTime + minInterval +
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 9a08485..1435081 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -446,7 +446,14 @@
     static constexpr nsecs_t kTimelineThresholdNs = 1000000LL; // 1 millisecond
     static constexpr float kMaxIntervalRatioDeviation = 0.05f;
     static constexpr int kMaxTimelines = 2;
-    nsecs_t syncTimestampToDisplayLocked(nsecs_t t);
+    nsecs_t syncTimestampToDisplayLocked(nsecs_t t, int releaseFence);
+
+    // In case of fence being used
+    sp<Fence> mReferenceFrameFence;
+    nsecs_t mReferenceCaptureTime = 0;
+    nsecs_t mReferenceArrivalTime = 0;
+    nsecs_t mFenceSignalOffset = 0;
+    VsyncEventData  mRefVsyncData;
 
     // Re-space frames by delaying queueBuffer so that frame delivery has
     // the same cadence as capture. Default is on for SurfaceTexture bound
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 738c314..a2a9d04 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -260,7 +260,7 @@
 
     auto mapper = states.rotateAndCropMappers.find(states.cameraId.c_str());
     if (mapper != states.rotateAndCropMappers.end()) {
-        const auto& remappedKeys = iter->second.getRemappedKeys();
+        const auto& remappedKeys = mapper->second.getRemappedKeys();
         keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
     }
 
@@ -639,9 +639,24 @@
                     if (deviceInfo != states.physicalDeviceInfoMap.end()) {
                         auto orientation = deviceInfo->second.find(ANDROID_SENSOR_ORIENTATION);
                         if (orientation.count > 0) {
+                            int32_t transform;
                             ret = CameraUtils::getRotationTransform(deviceInfo->second,
-                                    OutputConfiguration::MIRROR_MODE_AUTO, &request.transform);
-                            if (ret != OK) {
+                                    OutputConfiguration::MIRROR_MODE_AUTO, &transform);
+                            if (ret == OK) {
+                                // It is possible for camera providers to return the capture
+                                // results after the processed frames. In such scenario, we will
+                                // not be able to set the output transformation before the frames
+                                // return back to the consumer for the current capture request
+                                // but we could still try and configure it for any future requests
+                                // that are still in flight. The assumption is that the physical
+                                // device id remains the same for the duration of the pending queue.
+                                for (size_t i = 0; i < states.inflightMap.size(); i++) {
+                                    auto &r = states.inflightMap.editValueAt(i);
+                                    if (r.requestTimeNs >= request.requestTimeNs) {
+                                        r.transform = transform;
+                                    }
+                                }
+                            } else {
                                 ALOGE("%s: Failed to calculate current stream transformation: %s "
                                         "(%d)", __FUNCTION__, strerror(-ret), ret);
                             }
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 7924eb5..aa941b0 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -205,6 +205,7 @@
         return res;
     }
     mSupportNativeZoomRatio = manager->supportNativeZoomRatio(mId.string());
+    mIsCompositeJpegRDisabled = manager->isCompositeJpegRDisabled(mId.string());
 
     std::vector<std::string> physicalCameraIds;
     bool isLogical = manager->isLogicalCamera(mId.string(), &physicalCameraIds);
@@ -1418,9 +1419,10 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait) :
+                bool overrideToPortrait,
+                bool supportSettingsOverride) :
           RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
-                  supportCameraMute, overrideToPortrait) {}
+                  supportCameraMute, overrideToPortrait, supportSettingsOverride) {}
 
 status_t AidlCamera3Device::AidlRequestThread::switchToOffline(
         const std::vector<int32_t>& streamsToKeep,
@@ -1590,9 +1592,10 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait) {
+                bool overrideToPortrait,
+                bool supportSettingsOverride) {
     return new AidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-            useHalBufManager, supportCameraMute, overrideToPortrait);
+            useHalBufManager, supportCameraMute, overrideToPortrait, supportSettingsOverride);
 };
 
 sp<Camera3Device::Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index 8ee5c63..99a308b 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -179,7 +179,8 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait);
+                bool overrideToPortrait,
+                bool supportSettingsOverride);
 
         status_t switchToOffline(
                 const std::vector<int32_t>& streamsToKeep,
@@ -265,7 +266,8 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait) override;
+                bool overrideToPortrait,
+                bool supportSettingsOverride) override;
 
     virtual sp<Camera3DeviceInjectionMethods>
             createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index b367019..0d44dd5 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -704,9 +704,10 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait) {
+                bool overrideToPortrait,
+                bool supportSettingsOverride) {
         return new HidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-                useHalBufManager, supportCameraMute, overrideToPortrait);
+                useHalBufManager, supportCameraMute, overrideToPortrait, supportSettingsOverride);
 };
 
 sp<Camera3Device::Camera3DeviceInjectionMethods>
@@ -1701,9 +1702,10 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait) :
+                bool overrideToPortrait,
+                bool supportSettingsOverride) :
           RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
-                  supportCameraMute, overrideToPortrait) {}
+                  supportCameraMute, overrideToPortrait, supportSettingsOverride) {}
 
 status_t HidlCamera3Device::HidlRequestThread::switchToOffline(
         const std::vector<int32_t>& streamsToKeep,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index 7b216b2..1e50844 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -177,7 +177,8 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait);
+                bool overrideToPortrait,
+                bool supportSettingsOverride);
 
         status_t switchToOffline(
                 const std::vector<int32_t>& streamsToKeep,
@@ -225,7 +226,8 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait) override;
+                bool overrideToPortrait,
+                bool supportSettingsOverride) override;
 
     virtual sp<Camera3DeviceInjectionMethods>
             createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index 26e813a..0f7f127 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -19,10 +19,12 @@
 #include <gui/Surface.h>
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
 
+#include <aidl/AidlUtils.h>
 #include <hidl/AidlCameraDeviceCallbacks.h>
 #include <hidl/HidlCameraDeviceUser.h>
 #include <hidl/Utils.h>
 #include <android/hardware/camera/device/3.2/types.h>
+#include <android-base/properties.h>
 
 namespace android {
 namespace frameworks {
@@ -31,6 +33,7 @@
 namespace V2_1 {
 namespace implementation {
 
+using hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
 using hardware::cameraservice::utils::conversion::convertToHidl;
 using hardware::cameraservice::utils::conversion::convertFromHidl;
 using hardware::cameraservice::utils::conversion::B2HStatus;
@@ -55,6 +58,7 @@
     const sp<hardware::camera2::ICameraDeviceUser> &deviceRemote)
   : mDeviceRemote(deviceRemote) {
     mInitSuccess = initDevice();
+    mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
 }
 
 bool HidlCameraDeviceUser::initDevice() {
@@ -235,8 +239,16 @@
     android::CameraMetadata cameraMetadata;
     binder::Status ret = mDeviceRemote->createDefaultRequest(convertFromHidl(templateId),
                                                              &cameraMetadata);
-    HStatus hStatus = B2HStatus(ret);
+
     HCameraMetadata hidlMetadata;
+    if (filterVndkKeys(mVndkVersion, cameraMetadata, /*isStatic*/false) != OK) {
+        ALOGE("%s: Unable to filter vndk metadata keys for version %d",
+              __FUNCTION__, mVndkVersion);
+        _hidl_cb(HStatus::UNKNOWN_ERROR, hidlMetadata);
+        return Void();
+    }
+
+    HStatus hStatus = B2HStatus(ret);
     const camera_metadata_t *rawMetadata = cameraMetadata.getAndLock();
     convertToHidl(rawMetadata, &hidlMetadata);
     _hidl_cb(hStatus, hidlMetadata);
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
index 0e2ab3d..a653ca2 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
@@ -127,6 +127,7 @@
     std::shared_ptr<CaptureResultMetadataQueue> mCaptureResultMetadataQueue = nullptr;
     bool mInitSuccess = false;
     int32_t mRequestId = REQUEST_ID_NONE;
+    int mVndkVersion = -1;
 };
 
 } // implementation
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index b1bf41e..921ad7d 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -29,11 +29,8 @@
     ],
 }
 
-cc_fuzz {
-    name: "camera_service_fuzzer",
-    srcs: [
-        "camera_service_fuzzer.cpp",
-    ],
+cc_defaults {
+    name: "camera_service_fuzzer_defaults",
     header_libs: [
         "libmedia_headers",
     ],
@@ -74,3 +71,28 @@
 
     },
 }
+
+cc_fuzz {
+    name: "camera_service_fuzzer",
+    srcs: [
+        "camera_service_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_service_fuzzer_defaults"
+    ],
+}
+
+cc_fuzz {
+    name: "camera_service_aidl_fuzzer",
+    srcs: [
+        "camera_service_aidl_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_service_fuzzer_defaults",
+        "service_fuzzer_defaults",
+        "fuzzer_disable_leaks",
+    ],
+    fuzz_config: {
+        triage_assignee: "waghpawan@google.com",
+    },
+}
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_aidl_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_aidl_fuzzer.cpp
new file mode 100644
index 0000000..a0fb93c
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_aidl_fuzzer.cpp
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fuzzbinder/libbinder_driver.h>
+#include <CameraService.h>
+
+using android::fuzzService;
+using android::sp;
+using android::CameraService;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    auto service = sp<CameraService>::make();
+    fuzzService(service, FuzzedDataProvider(data, size));
+    return 0;
+}
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 072fcfb..b397573 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -346,7 +346,8 @@
                                  android::CameraService::USE_CALLING_UID,
                                  android::CameraService::USE_CALLING_PID,
                                  /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
-                                 /*overrideToPortrait*/true, &cameraDevice);
+                                 /*overrideToPortrait*/true, /*forceSlowJpegMode*/false,
+                                 &cameraDevice);
     if (!rc.isOk()) {
         // camera not connected
         return;
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 5e2a3fb..b035edd 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -31,11 +31,14 @@
         "libmedia_headers",
     ],
 
+    defaults: [
+        "libcameraservice_deps",
+    ],
+
     shared_libs: [
         "libbase",
         "libbinder",
         "libcutils",
-        "libcameraservice",
         "libhidlbase",
         "liblog",
         "libcamera_client",
@@ -45,11 +48,6 @@
         "libjpeg",
         "libexif",
         "android.hardware.camera.common@1.0",
-        "android.hardware.camera.provider@2.4",
-        "android.hardware.camera.provider@2.5",
-        "android.hardware.camera.provider@2.6",
-        "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
@@ -58,6 +56,12 @@
     ],
 
     static_libs: [
+        "android.hardware.camera.provider@2.4",
+        "android.hardware.camera.provider@2.5",
+        "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.provider-V2-ndk",
+        "libcameraservice",
         "libgmock",
     ],
 
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 4225366..b58975f 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -26,8 +26,14 @@
 
 namespace android {
 
-using hardware::ICameraServiceProxy;
+using hardware::CameraExtensionSessionStats;
 using hardware::CameraSessionStats;
+using hardware::ICameraServiceProxy;
+
+namespace {
+// Sentinel value to be returned when extension session with a stale or invalid key is reported.
+const String16 POISON_EXT_STATS_KEY("poisoned_stats");
+} // anonymous namespace
 
 /**
  * CameraSessionStatsWrapper functions
@@ -53,6 +59,7 @@
     mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_CLOSED;
     mSessionStats.mLatencyMs = latencyMs;
     mSessionStats.mDeviceError = deviceError;
+    mSessionStats.mSessionIndex = 0;
     updateProxyDeviceState(proxyBinder);
 }
 
@@ -74,6 +81,7 @@
 
     mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_ACTIVE;
     mSessionStats.mMaxPreviewFps = maxPreviewFps;
+    mSessionStats.mSessionIndex++;
     updateProxyDeviceState(proxyBinder);
 
     // Reset mCreationDuration to -1 to distinguish between 1st session
@@ -95,10 +103,12 @@
     mSessionStats.mUserTag = String16(userTag.c_str());
     mSessionStats.mVideoStabilizationMode = videoStabilizationMode;
     mSessionStats.mStreamStats = streamStats;
+
     updateProxyDeviceState(proxyBinder);
 
     mSessionStats.mInternalReconfigure = 0;
     mSessionStats.mStreamStats.clear();
+    mSessionStats.mCameraExtensionSessionStats = {};
 }
 
 int64_t CameraServiceProxyWrapper::CameraSessionStatsWrapper::getLogId() {
@@ -106,6 +116,65 @@
     return mSessionStats.mLogId;
 }
 
+String16 CameraServiceProxyWrapper::CameraSessionStatsWrapper::updateExtensionSessionStats(
+        const hardware::CameraExtensionSessionStats& extStats) {
+    Mutex::Autolock l(mLock);
+    CameraExtensionSessionStats& currStats = mSessionStats.mCameraExtensionSessionStats;
+    if (currStats.key != extStats.key) {
+        // Mismatched keys. Extensions stats likely reported for a closed session
+        ALOGW("%s: mismatched extensions stats key: current='%s' reported='%s'. Dropping stats.",
+              __FUNCTION__, String8(currStats.key).c_str(), String8(extStats.key).c_str());
+        return POISON_EXT_STATS_KEY; // return poisoned key to so future calls are
+                                     // definitely dropped.
+    }
+
+    // Matching keys...
+    if (currStats.key.size()) {
+        // non-empty matching keys. overwrite.
+        ALOGV("%s: Overwriting extension session stats: %s", __FUNCTION__,
+              extStats.toString().c_str());
+        currStats = extStats;
+        return currStats.key;
+    }
+
+    // Matching empty keys...
+    if (mSessionStats.mClientName != extStats.clientName) {
+        ALOGW("%s: extension stats reported for unexpected package: current='%s' reported='%s'. "
+              "Dropping stats.", __FUNCTION__,
+              String8(mSessionStats.mClientName).c_str(),
+              String8(extStats.clientName).c_str());
+        return POISON_EXT_STATS_KEY;
+    }
+
+    // Matching empty keys for the current client...
+    if (mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_OPEN ||
+        mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_IDLE) {
+        // Camera is open, but not active. It is possible that the active callback hasn't
+        // occurred yet. Keep the stats, but don't associate it with any session.
+        ALOGV("%s: extension stat reported for an open, but not active camera. "
+              "Saving stats, but not generating key.", __FUNCTION__);
+        currStats = extStats;
+        return {}; // Subsequent calls will handle setting the correct key.
+    }
+
+    if (mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_ACTIVE) {
+        // camera is active. First call for the session!
+        currStats = extStats;
+
+        // Generate a new key from logId and sessionIndex.
+        std::ostringstream key;
+        key << mSessionStats.mSessionIndex << '/' << mSessionStats.mLogId;
+        currStats.key = String16(key.str().c_str());
+        ALOGV("%s: New extension session stats: %s", __FUNCTION__, currStats.toString().c_str());
+        return currStats.key;
+    }
+
+    // Camera is closed. Probably a stale call.
+    ALOGW("%s: extension stats reported for closed camera id '%s'. Dropping stats.",
+          __FUNCTION__, String8(mSessionStats.mCameraId).c_str());
+    return {};
+}
+
 /**
  * CameraServiceProxyWrapper functions
  */
@@ -174,12 +243,12 @@
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
     {
         Mutex::Autolock l(mLock);
-        sessionStats = mSessionStatsMap[id];
-        if (sessionStats == nullptr) {
+        if (mSessionStatsMap.count(id) == 0) {
             ALOGE("%s: SessionStatsMap should contain camera %s",
                     __FUNCTION__, id.c_str());
             return;
         }
+        sessionStats = mSessionStatsMap[id];
     }
 
     ALOGV("%s: id %s, operatingMode %d, internalConfig %d, latencyMs %d",
@@ -191,12 +260,12 @@
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
     {
         Mutex::Autolock l(mLock);
-        sessionStats = mSessionStatsMap[id];
-        if (sessionStats == nullptr) {
+        if (mSessionStatsMap.count(id) == 0) {
             ALOGE("%s: SessionStatsMap should contain camera %s when logActive is called",
                     __FUNCTION__, id.c_str());
             return;
         }
+        sessionStats = mSessionStatsMap[id];
     }
 
     ALOGV("%s: id %s", __FUNCTION__, id.c_str());
@@ -211,13 +280,12 @@
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
     {
         Mutex::Autolock l(mLock);
-        sessionStats = mSessionStatsMap[id];
-    }
-
-    if (sessionStats == nullptr) {
-        ALOGE("%s: SessionStatsMap should contain camera %s when logIdle is called",
+        if (mSessionStatsMap.count(id) == 0) {
+            ALOGE("%s: SessionStatsMap should contain camera %s when logIdle is called",
                 __FUNCTION__, id.c_str());
-        return;
+            return;
+        }
+        sessionStats = mSessionStatsMap[id];
     }
 
     ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d"
@@ -335,4 +403,21 @@
     return ret;
 }
 
+String16 CameraServiceProxyWrapper::updateExtensionStats(
+        const hardware::CameraExtensionSessionStats& extStats) {
+    std::shared_ptr<CameraSessionStatsWrapper> stats;
+    String8 cameraId = String8(extStats.cameraId);
+    {
+        Mutex::Autolock _l(mLock);
+        if (mSessionStatsMap.count(cameraId) == 0) {
+            ALOGE("%s CameraExtensionSessionStats reported for camera id that isn't open: %s",
+                  __FUNCTION__, cameraId.c_str());
+            return {};
+        }
+
+        stats = mSessionStatsMap[cameraId];
+        return stats->updateExtensionSessionStats(extStats);
+    }
+}
+
 }  // namespace android
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index d47c738..e32580c 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -65,6 +65,8 @@
                 const std::string& userTag, int32_t videoStabilizationMode,
                 const std::vector<hardware::CameraStreamStats>& streamStats);
 
+        String16 updateExtensionSessionStats(const hardware::CameraExtensionSessionStats& extStats);
+
         // Returns the logId associated with this event.
         int64_t getLogId();
     };
@@ -127,6 +129,9 @@
     // frameworks/av/camera/include/camera/CameraSessionStats.h for more details about this
     // identifier. Returns a non-0 value on success.
     int64_t getCurrentLogIdForCamera(const String8& cameraId);
+
+    // Update the stored extension stats to the latest values
+    String16 updateExtensionStats(const hardware::CameraExtensionSessionStats& extStats);
 };
 
 } // android
diff --git a/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
new file mode 100644
index 0000000..92a1030
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "SchedulingPolicyUtils.h"
+
+#include <errno.h>
+#include <pthread.h>
+#include <sched.h>
+
+#include "CameraThreadState.h"
+#include <private/android_filesystem_config.h>
+#include <processgroup/processgroup.h>
+#include <processgroup/sched_policy.h>
+#include <procinfo/process.h>
+#include <utils/Log.h>
+
+namespace android {
+namespace camera3 {
+namespace SchedulingPolicyUtils {
+
+int requestPriorityDirect(int pid, int tid, int prio) {
+    android::procinfo::ProcessInfo processInfo;
+    static const int kMinPrio = 1;
+    static const int kMaxPrio = 3;
+
+    if (!android::procinfo::GetProcessInfo(tid, &processInfo)) {
+       ALOGE("%s: Error getting process info", __FUNCTION__);
+       return -EPERM;
+    }
+
+    if (prio < kMinPrio || prio > kMaxPrio || processInfo.pid != pid) {
+        ALOGE("%s: Invalid parameter prio=%d pid=%d procinfo.pid=%d", __FUNCTION__, prio, pid,
+                processInfo.pid);
+        return -EPERM;
+    }
+
+    // Set the thread group as audio system thread group in consistent with the
+    // implementation in SchedulingPolicyService.java when isApp is false in
+    // requestPriority method.
+    if (!SetTaskProfiles(tid, {get_sched_policy_profile_name(SP_AUDIO_SYS)},
+            /*use_fd_cache*/ true)) {
+        ALOGE("%s:Error in  SetTaskProfiles", __FUNCTION__);
+        return -EPERM;
+    }
+
+    struct sched_param param;
+    param.sched_priority = prio;
+    return sched_setscheduler(tid, SCHED_FIFO | SCHED_RESET_ON_FORK, &param);
+}
+
+} // namespace SchedulingPolicyUtils
+} // namespace camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/SchedulingPolicyUtils.h b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.h
new file mode 100644
index 0000000..f71fddf
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_SERVICE_CAMERA_SCHEDULING_POLICY_UTILS_H
+#define ANDROID_SERVICE_CAMERA_SCHEDULING_POLICY_UTILS_H
+
+namespace android {
+namespace camera3 {
+namespace SchedulingPolicyUtils {
+
+/**
+ * Request elevated priority for thread tid, whose thread group leader must be pid.
+ * Instead of using scheduling policy service, this method uses direct system calls.
+ * The priority parameter is currently restricted from 1 to 3 matching
+ * scheduling policy service implementation.
+ */
+int requestPriorityDirect(int pid, int tid, int prio);
+
+} // SchedulingPolicyUtils
+} // camera3
+} // android
+
+#endif
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 48b27be..ffd9f01 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -161,8 +161,13 @@
             getAppropriateModeTag(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
     const int32_t heicSizesTag =
             getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t jpegRSizesTag = getAppropriateModeTag(
+            ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
 
+    bool isJpegRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
+                ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R));
     camera_metadata_ro_entry streamConfigs =
+            (isJpegRDataSpace) ? info.find(jpegRSizesTag) :
             (dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
             (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
             info.find(heicSizesTag) :
@@ -380,6 +385,23 @@
     }
 }
 
+bool dataSpaceFromColorSpace(android_dataspace *dataSpace, int32_t colorSpace) {
+    switch (colorSpace) {
+        case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB:
+            *dataSpace = HAL_DATASPACE_V0_SRGB;
+            return true;
+        case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3:
+            *dataSpace = HAL_DATASPACE_DISPLAY_P3;
+            return true;
+        case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG:
+            *(reinterpret_cast<int32_t*>(dataSpace)) = HAL_DATASPACE_BT2020_HLG;
+            return true;
+        default:
+            ALOGE("%s: Unsupported color space %d", __FUNCTION__, colorSpace);
+            return false;
+    }
+}
+
 bool isStreamUseCaseSupported(int64_t streamUseCase,
         const CameraMetadata &deviceInfo) {
     camera_metadata_ro_entry_t availableStreamUseCases =
@@ -472,6 +494,16 @@
         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
     }
 
+    if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
+            format != HAL_PIXEL_FORMAT_BLOB) {
+        if (!dataSpaceFromColorSpace(&dataSpace, colorSpace)) {
+            String8 msg = String8::format("Camera %s: color space %d not supported, failed to "
+                    "convert to data space", logicalCameraId.string(), colorSpace);
+            ALOGE("%s: %s", __FUNCTION__, msg.string());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        }
+    }
+
     // FIXME: remove this override since the default format should be
     //       IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
     if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
@@ -483,7 +515,7 @@
     }
     std::unordered_set<int32_t> overriddenSensorPixelModes;
     if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
-            physicalCameraMetadata, flexibleConsumer, &overriddenSensorPixelModes) != OK) {
+            physicalCameraMetadata, &overriddenSensorPixelModes) != OK) {
         String8 msg = String8::format("Camera %s: sensor pixel modes for stream with "
                 "format %#x are not valid",logicalCameraId.string(), format);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
@@ -646,6 +678,7 @@
 convertToHALStreamCombination(
         const SessionConfiguration& sessionConfiguration,
         const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
+        bool isCompositeJpegRDisabled,
         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
         aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
         bool overrideForPerfClass, bool *earlyExit) {
@@ -757,7 +790,7 @@
             streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
             if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
                     streamInfo.format, streamInfo.width,
-                    streamInfo.height, metadataChosen, false /*flexibleConsumer*/,
+                    streamInfo.height, metadataChosen,
                     &streamInfo.sensorPixelModesUsed) != OK) {
                         ALOGE("%s: Deferred surface sensor pixel modes not valid",
                                 __FUNCTION__);
@@ -789,7 +822,8 @@
                 bool isHeicCompositeStream =
                         camera3::HeicCompositeStream::isHeicCompositeStream(surface);
                 bool isJpegRCompositeStream =
-                        camera3::JpegRCompositeStream::isJpegRCompositeStream(surface);
+                        camera3::JpegRCompositeStream::isJpegRCompositeStream(surface) &&
+                        !isCompositeJpegRDisabled;
                 if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
                     // We need to take in to account that composite streams can have
                     // additional internal camera streams.
@@ -934,7 +968,7 @@
 
 status_t checkAndOverrideSensorPixelModesUsed(
         const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
-        const CameraMetadata &staticInfo, bool flexibleConsumer,
+        const CameraMetadata &staticInfo,
         std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
 
     const std::unordered_set<int32_t> &sensorPixelModesUsedSet =
@@ -943,6 +977,8 @@
         if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
                 sensorPixelModesUsedSet.end()) {
             // invalid value for non ultra high res sensors
+            ALOGE("%s ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION used on a device which doesn't "
+                    "support ultra high resolution capture", __FUNCTION__);
             return BAD_VALUE;
         }
         overriddenSensorPixelModesUsed->clear();
@@ -973,27 +1009,30 @@
             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
             return OK;
         }
-        // We don't allow flexible consumer for max resolution mode.
         if (isInMaximumResolutionStreamConfigurationMap) {
-            overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
-            return OK;
-        }
-        if (isInDefaultStreamConfigurationMap || (flexibleConsumer && width < ROUNDING_WIDTH_CAP)) {
+            overriddenSensorPixelModesUsed->insert(
+                    ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+        } else {
             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
-            return OK;
         }
-        return BAD_VALUE;
+        return OK;
     }
 
     // Case2: The app has set sensorPixelModesUsed, we need to verify that they
     // are valid / err out.
     if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_DEFAULT) !=
             sensorPixelModesUsedSet.end() && !isInDefaultStreamConfigurationMap) {
+        ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_DEFAULT set by client, but stream f: %d size %d x %d"
+                " isn't present in default stream configuration map", __FUNCTION__, format, width,
+                height);
         return BAD_VALUE;
     }
 
    if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
             sensorPixelModesUsedSet.end() && !isInMaximumResolutionStreamConfigurationMap) {
+        ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION set by client, but stream f: "
+                "%d size %d x %d isn't present in default stream configuration map", __FUNCTION__,
+                format, width, height);
         return BAD_VALUE;
     }
     *overriddenSensorPixelModesUsed = sensorPixelModesUsedSet;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index b5654ac..dc143da 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -115,6 +115,8 @@
 bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
         int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
 
+bool dataSpaceFromColorSpace(android_dataspace *dataSpace, int32_t colorSpace);
+
 bool isStreamUseCaseSupported(int64_t streamUseCase, const CameraMetadata &deviceInfo);
 
 void mapStreamInfo(const OutputStreamInfo &streamInfo,
@@ -136,7 +138,7 @@
 binder::Status
 convertToHALStreamCombination(
     const SessionConfiguration& sessionConfiguration,
-    const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
+    const String8 &logicalCameraId, const CameraMetadata &deviceInfo, bool isCompositeJpegRDisabled,
     metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
     aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
     bool overrideForPerfClass, bool *earlyExit);
@@ -145,7 +147,7 @@
 
 status_t checkAndOverrideSensorPixelModesUsed(
         const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
-        const CameraMetadata &staticInfo, bool flexibleConsumer,
+        const CameraMetadata &staticInfo,
         std::unordered_set<int32_t> *overriddenSensorPixelModesUsed);
 
 bool targetPerfClassPrimaryCamera(
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
index 5444f2a..111c1bf 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
@@ -111,8 +111,8 @@
         bool overrideForPerfClass, bool *earlyExit) {
     aidl::android::hardware::camera::device::StreamConfiguration aidlStreamConfiguration;
     auto ret = convertToHALStreamCombination(sessionConfiguration, logicalCameraId, deviceInfo,
-            getMetadata, physicalCameraIds, aidlStreamConfiguration, overrideForPerfClass,
-            earlyExit);
+            false /*isCompositeJpegRDisabled*/, getMetadata, physicalCameraIds,
+            aidlStreamConfiguration, overrideForPerfClass, earlyExit);
     if (!ret.isOk()) {
         return ret;
     }
diff --git a/services/camera/libcameraservice/utils/TagMonitor.cpp b/services/camera/libcameraservice/utils/TagMonitor.cpp
index fe87ed6..d1e54ab 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.cpp
+++ b/services/camera/libcameraservice/utils/TagMonitor.cpp
@@ -346,7 +346,7 @@
                         == OK) {
                         returnStr += value_string_tmp;
                     } else {
-                        returnStr.appendFormat("%hhu", *(data_ptr + index));
+                        returnStr.appendFormat("%hhu ", *(data_ptr + index));
                     }
                     break;
                 case TYPE_INT32:
@@ -363,7 +363,7 @@
                     }
                     break;
                 case TYPE_FLOAT:
-                    returnStr.appendFormat("%0.8f", *(float*)(data_ptr + index));
+                    returnStr.appendFormat("%0.8f ", *(float*)(data_ptr + index));
                     break;
                 case TYPE_INT64:
                     returnStr.appendFormat("%" PRId64 " ", *(int64_t*)(data_ptr + index));
diff --git a/services/mediaextractor/Android.bp b/services/mediaextractor/Android.bp
index acafe56..e22d749 100644
--- a/services/mediaextractor/Android.bp
+++ b/services/mediaextractor/Android.bp
@@ -89,3 +89,25 @@
         "code_coverage.policy",
     ],
 }
+
+cc_fuzz {
+    name: "mediaextractor_service_fuzzer",
+    shared_libs: [
+        "libmedia",
+        "libmediaextractorservice",
+        "libmediautils",
+        "liblog",
+        "libavservices_minijail",
+    ],
+    defaults: [
+        "service_fuzzer_defaults",
+        "fuzzer_disable_leaks",
+    ],
+    srcs: ["fuzzers/MediaExtractorServiceFuzzer.cpp"],
+    fuzz_config: {
+        cc: [
+            "android-media-playback+bugs@google.com",
+        ],
+        triage_assignee: "waghpawan@google.com",
+    },
+}
\ No newline at end of file
diff --git a/services/mediaextractor/fuzzers/MediaExtractorServiceFuzzer.cpp b/services/mediaextractor/fuzzers/MediaExtractorServiceFuzzer.cpp
new file mode 100644
index 0000000..d329e54
--- /dev/null
+++ b/services/mediaextractor/fuzzers/MediaExtractorServiceFuzzer.cpp
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <fuzzbinder/libbinder_driver.h>
+
+#include "MediaExtractorService.h"
+
+using ::android::fuzzService;
+using ::android::sp;
+using ::android::MediaExtractorService;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    auto service = sp<MediaExtractorService>::make();
+    fuzzService(service, FuzzedDataProvider(data, size));
+    return 0;
+}
diff --git a/services/medialog/fuzzer/Android.bp b/services/medialog/fuzzer/Android.bp
index 9ff0ce4..c96c37b 100644
--- a/services/medialog/fuzzer/Android.bp
+++ b/services/medialog/fuzzer/Android.bp
@@ -38,5 +38,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmedialogservice library",
+        vector: "local_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "future_version",
     },
 }
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index adb2217..af1372b 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -524,6 +524,8 @@
                                      "audiotrack",
                                      // other media
                                      "codec",
+                                     "freeze",
+                                     "judder",
                                      "extractor",
                                      "mediadrm",
                                      "mediaparser",
diff --git a/services/mediametrics/StringUtils.cpp b/services/mediametrics/StringUtils.cpp
index d1c7a18..5766f1c 100644
--- a/services/mediametrics/StringUtils.cpp
+++ b/services/mediametrics/StringUtils.cpp
@@ -15,11 +15,13 @@
  */
 
 //#define LOG_NDEBUG 0
-#define LOG_TAG "MediaMetricsService::stringutils"
+#define LOG_TAG "mediametrics::stringutils"
 #include <utils/Log.h>
 
 #include "StringUtils.h"
 
+#include <charconv>
+
 #include "AudioTypes.h"
 
 namespace android::mediametrics::stringutils {
@@ -54,6 +56,26 @@
     }
 }
 
+bool parseVector(const std::string &str, std::vector<int32_t> *vector) {
+    std::vector<int32_t> values;
+    const char *p = str.c_str();
+    const char *last = p + str.size();
+    while (p != last) {
+        if (*p == ',' || *p == '{' || *p == '}') {
+            p++;
+        }
+        int32_t value = -1;
+        auto [ptr, error] = std::from_chars(p, last, value);
+        if (error == std::errc::invalid_argument || error == std::errc::result_out_of_range) {
+            return false;
+        }
+        p = ptr;
+        values.push_back(value);
+    }
+    *vector = std::move(values);
+    return true;
+}
+
 std::vector<std::pair<std::string, std::string>> getDeviceAddressPairs(const std::string& devices)
 {
     std::vector<std::pair<std::string, std::string>> result;
diff --git a/services/mediametrics/fuzzer/Android.bp b/services/mediametrics/fuzzer/Android.bp
index 8b33f10..99703e3 100644
--- a/services/mediametrics/fuzzer/Android.bp
+++ b/services/mediametrics/fuzzer/Android.bp
@@ -27,13 +27,8 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_fuzz {
-    name: "mediametrics_service_fuzzer",
-
-    srcs: [
-        "mediametrics_service_fuzzer.cpp",
-    ],
-
+cc_defaults {
+    name: "mediametrics_service_fuzzer_defaults",
     static_libs: [
         "libmediametrics",
         "libmediametricsservice",
@@ -68,5 +63,36 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmediametricsservice",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
+
+cc_fuzz {
+    name: "mediametrics_service_fuzzer",
+
+    srcs: [
+        "mediametrics_service_fuzzer.cpp",
+    ],
+    defaults: [
+        "mediametrics_service_fuzzer_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "mediametrics_aidl_fuzzer",
+    srcs: [
+        "mediametrics_aidl_fuzzer.cpp",
+    ],
+    defaults: [
+        "service_fuzzer_defaults",
+        "fuzzer_disable_leaks",
+        "mediametrics_service_fuzzer_defaults",
+    ],
+}
diff --git a/services/mediametrics/fuzzer/mediametrics_aidl_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_aidl_fuzzer.cpp
new file mode 100644
index 0000000..c7468c7
--- /dev/null
+++ b/services/mediametrics/fuzzer/mediametrics_aidl_fuzzer.cpp
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <fuzzbinder/libbinder_driver.h>
+
+#include <mediametricsservice/MediaMetricsService.h>
+
+using ::android::fuzzService;
+using ::android::sp;
+using ::android::MediaMetricsService;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    auto service = sp<MediaMetricsService>::make();
+    fuzzService(service, FuzzedDataProvider(data, size));
+    return 0;
+}
diff --git a/services/mediametrics/include/mediametricsservice/StatsdLog.h b/services/mediametrics/include/mediametricsservice/StatsdLog.h
index e207bac..5d5009e 100644
--- a/services/mediametrics/include/mediametricsservice/StatsdLog.h
+++ b/services/mediametrics/include/mediametricsservice/StatsdLog.h
@@ -16,11 +16,13 @@
 
 #pragma once
 
-#include <audio_utils/SimpleLog.h>
 #include <map>
 #include <mutex>
 #include <sstream>
 
+#include <android-base/thread_annotations.h>
+#include <audio_utils/SimpleLog.h>
+
 namespace android::mediametrics {
 
 class StatsdLog {
@@ -61,9 +63,9 @@
    }
 
 private:
+    mutable std::mutex mLock;
     SimpleLog mSimpleLog; // internally locked
     std::map<int /* atom */, size_t /* count */> mCountMap GUARDED_BY(mLock); // sorted
-    mutable std::mutex mLock;
 };
 
 } // namespace android::mediametrics
diff --git a/services/mediametrics/include/mediametricsservice/StringUtils.h b/services/mediametrics/include/mediametricsservice/StringUtils.h
index 78c25ff..ed2cf2e 100644
--- a/services/mediametrics/include/mediametricsservice/StringUtils.h
+++ b/services/mediametrics/include/mediametricsservice/StringUtils.h
@@ -72,6 +72,12 @@
 std::vector<std::string> split(const std::string& flags, const char *delim);
 
 /**
+ * Parses a vector of integers using ',' '{' and '}' as delimeters. Leaves
+ * vector unmodified if the parsing fails.
+ */
+bool parseVector(const std::string &str, std::vector<int32_t> *vector);
+
+/**
  * Parse the devices string and return a vector of device address pairs.
  *
  * A failure to parse returns early with the contents that were able to be parsed.
diff --git a/services/mediametrics/include/mediametricsservice/iface_statsd.h b/services/mediametrics/include/mediametricsservice/iface_statsd.h
index 5bc293b..34d71ba 100644
--- a/services/mediametrics/include/mediametricsservice/iface_statsd.h
+++ b/services/mediametrics/include/mediametricsservice/iface_statsd.h
@@ -15,7 +15,9 @@
  */
 
 #include <memory>
+
 #include <stats_event.h>
+#include <StatsdLog.h>
 
 namespace android {
 namespace mediametrics {
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index cb5e783..ea76bcd 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -23,6 +23,7 @@
 #include <pthread.h>
 #include <pwd.h>
 #include <stdint.h>
+#include <string>
 #include <string.h>
 #include <sys/stat.h>
 #include <sys/time.h>
@@ -32,14 +33,149 @@
 #include <stats_media_metrics.h>
 #include <stats_event.h>
 
-#include "cleaner.h"
-#include "MediaMetricsService.h"
-#include "ValidateId.h"
-#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
-#include "iface_statsd.h"
+#include <frameworks/proto_logging/stats/message/mediametrics_message.pb.h>
+#include <mediametricsservice/cleaner.h>
+#include <mediametricsservice/iface_statsd.h>
+#include <mediametricsservice/MediaMetricsService.h>
+#include <mediametricsservice/StringUtils.h>
+#include <mediametricsservice/ValidateId.h>
 
 namespace android {
 
+using stats::media_metrics::stats_write;
+using stats::media_metrics::MEDIA_CODEC_RENDERED;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_UNKNOWN;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_INVALID;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_ZERO;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_UNKNOWN;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_UNDETERMINED;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_24_3_2_PULLDOWN;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_NONE;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HLG;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HDR10;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HDR10_PLUS;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_DOLBY_VISION;
+
+static const int BITRATE_UNKNOWN =
+        stats::media_metrics::MEDIA_CODEC_RENDERED__BITRATE__BITRATE_UNKNOWN;
+
+static const std::pair<char const *, int> CODEC_LOOKUP[] = {
+    { "avc", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_AVC },
+    { "h264", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_AVC },
+    { "hevc", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_HEVC },
+    { "h265", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_HEVC },
+    { "vp8", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_VP8 },
+    { "vp9", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_VP9 },
+    { "av1", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_AV1 },
+    { "av01", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_AV1 },
+    { "dolby-vision", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_HEVC },
+};
+
+static const int32_t RESOLUTION_LOOKUP[] = {
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_MAX_SIZE,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_32K,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_16K,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_8K_UHD,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_8K_UHD_ALMOST,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_4K_UHD_ALMOST,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1440X2560,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1080X2400,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1080X2340,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1080P_FHD,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1080P_FHD_ALMOST,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_720P_HD,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_720P_HD_ALMOST,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_576X1024,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_540X960,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_480X854,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_480X640,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_360X640,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_352X640,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_VERY_LOW,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_SMALLEST,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_ZERO,
+};
+
+static const int32_t FRAMERATE_LOOKUP[] = {
+    stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_24,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_25,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_30,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_50,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_60,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_120,
+};
+
+static int32_t getMetricsCodecEnum(const std::string &mime, const std::string &componentName) {
+    for (const auto & codecStrAndEnum : CODEC_LOOKUP) {
+        if (strcasestr(mime.c_str(), codecStrAndEnum.first) != nullptr ||
+            strcasestr(componentName.c_str(), codecStrAndEnum.first) != nullptr) {
+            return codecStrAndEnum.second;
+        }
+    }
+    return MEDIA_CODEC_RENDERED__CODEC__CODEC_UNKNOWN;
+}
+
+static int32_t getMetricsResolutionEnum(int32_t width, int32_t height) {
+    if (width == 0 || height == 0) {
+        return MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_ZERO;
+    }
+    int64_t pixels = int64_t(width) * height / 1000;
+    if (width < 0 || height < 0 || pixels > RESOLUTION_LOOKUP[0]) {
+        return MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_INVALID;
+    }
+    for (int32_t resolutionEnum : RESOLUTION_LOOKUP) {
+        if (pixels > resolutionEnum) {
+            return resolutionEnum;
+        }
+    }
+    return MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_ZERO;
+}
+
+static int32_t getMetricsFramerateEnum(float inFramerate) {
+    if (inFramerate == -1.0f) {
+        return MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_UNDETERMINED;
+    }
+    if (inFramerate == -2.0f) {
+        return MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_24_3_2_PULLDOWN;
+    }
+    int framerate = int(inFramerate * 100); // Table is in hundredths of frames per second
+    static const int framerateTolerance = 40; // Tolerance is 0.4 frames per second - table is 100s
+    for (int32_t framerateEnum : FRAMERATE_LOOKUP) {
+        if (abs(framerate - framerateEnum) < framerateTolerance) {
+            return framerateEnum;
+        }
+    }
+    return MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_UNKNOWN;
+}
+
+static int32_t getMetricsHdrFormatEnum(std::string &mime, std::string &componentName,
+                                       int32_t configColorTransfer, int32_t parsedColorTransfer,
+                                       int32_t hdr10StaticInfo, int32_t hdr10PlusInfo) {
+    if (hdr10PlusInfo) {
+        return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HDR10_PLUS;
+    }
+    if (hdr10StaticInfo) {
+        return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HDR10;
+    }
+    // 7 = COLOR_TRANSFER_HLG in MediaCodecConstants.h
+    if (configColorTransfer == 7 || parsedColorTransfer == 7) {
+        return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HLG;
+    }
+    if (strcasestr(mime.c_str(), "dolby-vision") != nullptr ||
+        strcasestr(componentName.c_str(), "dvhe") != nullptr ||
+        strcasestr(componentName.c_str(), "dvav") != nullptr ||
+        strcasestr(componentName.c_str(), "dav1") != nullptr) {
+        return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_DOLBY_VISION;
+    }
+    return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_NONE;
+}
+
+static void parseVector(const std::string &str, std::vector<int32_t> *vector) {
+    if (!mediametrics::stringutils::parseVector(str, vector)) {
+        ALOGE("failed to parse integer vector from '%s'", str.c_str());
+    }
+}
+
 bool statsd_codec(const std::shared_ptr<const mediametrics::Item>& item,
         const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
@@ -48,17 +184,17 @@
     AStatsEvent* event = AStatsEvent_obtain();
     AStatsEvent_setAtomId(event, stats::media_metrics::MEDIA_CODEC_REPORTED);
 
-    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
-    AStatsEvent_writeInt64(event, timestamp_nanos);
+    const nsecs_t timestampNanos = MediaMetricsService::roundTime(item->getTimestamp());
+    AStatsEvent_writeInt64(event, timestampNanos);
 
-    std::string package_name = item->getPkgName();
-    AStatsEvent_writeString(event, package_name.c_str());
+    std::string packageName = item->getPkgName();
+    AStatsEvent_writeString(event, packageName.c_str());
 
-    int64_t package_version_code = item->getPkgVersionCode();
-    AStatsEvent_writeInt64(event, package_version_code);
+    int64_t packageVersionCode = item->getPkgVersionCode();
+    AStatsEvent_writeInt64(event, packageVersionCode);
 
-    int64_t media_apex_version = 0;
-    AStatsEvent_writeInt64(event, media_apex_version);
+    int64_t mediaApexVersion = 0;
+    AStatsEvent_writeInt64(event, mediaApexVersion);
 
     // the rest into our own proto
     //
@@ -84,17 +220,25 @@
     }
     AStatsEvent_writeString(event, mode.c_str());
 
-    int32_t encoder = -1;
-    if (item->getInt32("android.media.mediacodec.encoder", &encoder)) {
-        metrics_proto.set_encoder(encoder);
+    int32_t isEncoder = -1;
+    if (item->getInt32("android.media.mediacodec.encoder", &isEncoder)) {
+        metrics_proto.set_encoder(isEncoder);
     }
-    AStatsEvent_writeInt32(event, encoder);
+    AStatsEvent_writeInt32(event, isEncoder);
 
-    int32_t secure = -1;
-    if (item->getInt32("android.media.mediacodec.secure", &secure)) {
-        metrics_proto.set_secure(secure);
+    int32_t isSecure = -1;
+    if (item->getInt32("android.media.mediacodec.secure", &isSecure)) {
+        metrics_proto.set_secure(isSecure);
     }
-    AStatsEvent_writeInt32(event, secure);
+    AStatsEvent_writeInt32(event, isSecure);
+
+    int32_t isHardware = -1;
+    item->getInt32("android.media.mediacodec.hardware", &isHardware);
+    // not logged to MediaCodecReported or MediametricsCodecReported
+
+    int32_t isTunneled = -1;
+    item->getInt32("android.media.mediacodec.tunneled", &isTunneled);
+    // not logged to MediaCodecReported or MediametricsCodecReported
 
     int32_t width = -1;
     if (item->getInt32("android.media.mediacodec.width", &width)) {
@@ -133,79 +277,78 @@
     AStatsEvent_writeInt32(event, level);
 
 
-    int32_t max_width = -1;
-    if ( item->getInt32("android.media.mediacodec.maxwidth", &max_width)) {
-        metrics_proto.set_max_width(max_width);
+    int32_t maxWidth = -1;
+    if ( item->getInt32("android.media.mediacodec.maxwidth", &maxWidth)) {
+        metrics_proto.set_max_width(maxWidth);
     }
-    AStatsEvent_writeInt32(event, max_width);
+    AStatsEvent_writeInt32(event, maxWidth);
 
-    int32_t max_height = -1;
-    if ( item->getInt32("android.media.mediacodec.maxheight", &max_height)) {
-        metrics_proto.set_max_height(max_height);
+    int32_t maxHeight = -1;
+    if ( item->getInt32("android.media.mediacodec.maxheight", &maxHeight)) {
+        metrics_proto.set_max_height(maxHeight);
     }
-    AStatsEvent_writeInt32(event, max_height);
+    AStatsEvent_writeInt32(event, maxHeight);
 
-    int32_t error_code = -1;
-    if ( item->getInt32("android.media.mediacodec.errcode", &error_code)) {
-        metrics_proto.set_error_code(error_code);
+    int32_t errorCode = -1;
+    if ( item->getInt32("android.media.mediacodec.errcode", &errorCode)) {
+        metrics_proto.set_error_code(errorCode);
     }
-    AStatsEvent_writeInt32(event, error_code);
+    AStatsEvent_writeInt32(event, errorCode);
 
-    std::string error_state;
-    if ( item->getString("android.media.mediacodec.errstate", &error_state)) {
-        metrics_proto.set_error_state(error_state);
+    std::string errorState;
+    if ( item->getString("android.media.mediacodec.errstate", &errorState)) {
+        metrics_proto.set_error_state(errorState);
     }
-    AStatsEvent_writeString(event, error_state.c_str());
+    AStatsEvent_writeString(event, errorState.c_str());
 
-    int64_t latency_max = -1;
-    if (item->getInt64("android.media.mediacodec.latency.max", &latency_max)) {
-        metrics_proto.set_latency_max(latency_max);
+    int64_t latencyMax = -1;
+    if (item->getInt64("android.media.mediacodec.latency.max", &latencyMax)) {
+        metrics_proto.set_latency_max(latencyMax);
     }
-    AStatsEvent_writeInt64(event, latency_max);
+    AStatsEvent_writeInt64(event, latencyMax);
 
-    int64_t latency_min = -1;
-    if (item->getInt64("android.media.mediacodec.latency.min", &latency_min)) {
-        metrics_proto.set_latency_min(latency_min);
+    int64_t latencyMin = -1;
+    if (item->getInt64("android.media.mediacodec.latency.min", &latencyMin)) {
+        metrics_proto.set_latency_min(latencyMin);
     }
-    AStatsEvent_writeInt64(event, latency_min);
+    AStatsEvent_writeInt64(event, latencyMin);
 
-    int64_t latency_avg = -1;
-    if (item->getInt64("android.media.mediacodec.latency.avg", &latency_avg)) {
-        metrics_proto.set_latency_avg(latency_avg);
+    int64_t latencyAvg = -1;
+    if (item->getInt64("android.media.mediacodec.latency.avg", &latencyAvg)) {
+        metrics_proto.set_latency_avg(latencyAvg);
     }
-    AStatsEvent_writeInt64(event, latency_avg);
+    AStatsEvent_writeInt64(event, latencyAvg);
 
-    int64_t latency_count = -1;
-    if (item->getInt64("android.media.mediacodec.latency.n", &latency_count)) {
-        metrics_proto.set_latency_count(latency_count);
+    int64_t latencyCount = -1;
+    if (item->getInt64("android.media.mediacodec.latency.n", &latencyCount)) {
+        metrics_proto.set_latency_count(latencyCount);
     }
-    AStatsEvent_writeInt64(event, latency_count);
+    AStatsEvent_writeInt64(event, latencyCount);
 
-    int64_t latency_unknown = -1;
-    if (item->getInt64("android.media.mediacodec.latency.unknown", &latency_unknown)) {
-        metrics_proto.set_latency_unknown(latency_unknown);
+    int64_t latencyUnknown = -1;
+    if (item->getInt64("android.media.mediacodec.latency.unknown", &latencyUnknown)) {
+        metrics_proto.set_latency_unknown(latencyUnknown);
     }
-    AStatsEvent_writeInt64(event, latency_unknown);
+    AStatsEvent_writeInt64(event, latencyUnknown);
 
-    int32_t queue_secure_input_buffer_error = -1;
+    int32_t queueSecureInputBufferError = -1;
     if (item->getInt32("android.media.mediacodec.queueSecureInputBufferError",
-            &queue_secure_input_buffer_error)) {
-        metrics_proto.set_queue_secure_input_buffer_error(queue_secure_input_buffer_error);
+            &queueSecureInputBufferError)) {
+        metrics_proto.set_queue_secure_input_buffer_error(queueSecureInputBufferError);
     }
-    AStatsEvent_writeInt32(event, queue_secure_input_buffer_error);
+    AStatsEvent_writeInt32(event, queueSecureInputBufferError);
 
-    int32_t queue_input_buffer_error = -1;
-    if (item->getInt32("android.media.mediacodec.queueInputBufferError",
-            &queue_input_buffer_error)) {
-        metrics_proto.set_queue_input_buffer_error(queue_input_buffer_error);
+    int32_t queueInputBufferError = -1;
+    if (item->getInt32("android.media.mediacodec.queueInputBufferError", &queueInputBufferError)) {
+        metrics_proto.set_queue_input_buffer_error(queueInputBufferError);
     }
-    AStatsEvent_writeInt32(event, queue_input_buffer_error);
+    AStatsEvent_writeInt32(event, queueInputBufferError);
 
-    std::string bitrate_mode;
-    if (item->getString("android.media.mediacodec.bitrate_mode", &bitrate_mode)) {
-        metrics_proto.set_bitrate_mode(bitrate_mode);
+    std::string bitrateMode;
+    if (item->getString("android.media.mediacodec.bitrate_mode", &bitrateMode)) {
+        metrics_proto.set_bitrate_mode(bitrateMode);
     }
-    AStatsEvent_writeString(event, bitrate_mode.c_str());
+    AStatsEvent_writeString(event, bitrateMode.c_str());
 
     int32_t bitrate = -1;
     if (item->getInt32("android.media.mediacodec.bitrate", &bitrate)) {
@@ -213,18 +356,18 @@
     }
     AStatsEvent_writeInt32(event, bitrate);
 
-    int64_t lifetime_millis = -1;
-    if (item->getInt64("android.media.mediacodec.lifetimeMs", &lifetime_millis)) {
-        lifetime_millis = mediametrics::bucket_time_minutes(lifetime_millis);
-        metrics_proto.set_lifetime_millis(lifetime_millis);
+    int64_t lifetimeMillis = -1;
+    if (item->getInt64("android.media.mediacodec.lifetimeMs", &lifetimeMillis)) {
+        lifetimeMillis = mediametrics::bucket_time_minutes(lifetimeMillis);
+        metrics_proto.set_lifetime_millis(lifetimeMillis);
     }
-    AStatsEvent_writeInt64(event, lifetime_millis);
+    AStatsEvent_writeInt64(event, lifetimeMillis);
 
-    int64_t playback_duration_sec = -1;
-    item->getInt64("android.media.mediacodec.playback-duration-sec", &playback_duration_sec);
+    int64_t playbackDurationSec = -1;
+    item->getInt64("android.media.mediacodec.playback-duration-sec", &playbackDurationSec);
     // DO NOT record  playback-duration in the metrics_proto - it should only
     // exist in the flattened atom
-    AStatsEvent_writeInt64(event, playback_duration_sec);
+    AStatsEvent_writeInt64(event, playbackDurationSec);
 
     std::string sessionId;
     if (item->getString("android.media.mediacodec.log-session-id", &sessionId)) {
@@ -438,7 +581,7 @@
     }
     AStatsEvent_writeInt32(event, hdr10PlusInfo);
 
-    int32_t hdrFormat= -1;
+    int32_t hdrFormat = -1;
     if (item->getInt32("android.media.mediacodec.hdr-format", &hdrFormat)) {
         metrics_proto.set_hdr_format(hdrFormat);
     }
@@ -450,61 +593,243 @@
     }
     AStatsEvent_writeInt64(event, codecId);
 
+    int32_t arrayMode = -1;
+    if (item->getInt32("android.media.mediacodec.array-mode", &arrayMode)) {
+        metrics_proto.set_array_mode(arrayMode);
+    }
+    AStatsEvent_writeInt32(event, arrayMode);
+
+    int32_t operationMode = -1;
+    if (item->getInt32("android.media.mediacodec.operation-mode", &operationMode)) {
+        metrics_proto.set_operation_mode(operationMode);
+    }
+    AStatsEvent_writeInt32(event, operationMode);
+
+    int32_t outputSurface = -1;
+    if (item->getInt32("android.media.mediacodec.output-surface", &outputSurface)) {
+        metrics_proto.set_output_surface(outputSurface);
+    }
+    AStatsEvent_writeInt32(event, outputSurface);
+
+    int32_t appMaxInputSize = -1;
+    if (item->getInt32("android.media.mediacodec.app-max-input-size", &appMaxInputSize)) {
+        metrics_proto.set_app_max_input_size(appMaxInputSize);
+    }
+    AStatsEvent_writeInt32(event, appMaxInputSize);
+
+    int32_t usedMaxInputSize = -1;
+    if (item->getInt32("android.media.mediacodec.used-max-input-size", &usedMaxInputSize)) {
+        metrics_proto.set_used_max_input_size(usedMaxInputSize);
+    }
+    AStatsEvent_writeInt32(event, usedMaxInputSize);
+
+    int32_t codecMaxInputSize = -1;
+    if (item->getInt32("android.media.mediacodec.codec-max-input-size", &codecMaxInputSize)) {
+        metrics_proto.set_codec_max_input_size(codecMaxInputSize);
+    }
+    AStatsEvent_writeInt32(event, codecMaxInputSize);
+
+    int32_t flushCount = -1;
+    if (item->getInt32("android.media.mediacodec.flush-count", &flushCount)) {
+        metrics_proto.set_flush_count(flushCount);
+    }
+    AStatsEvent_writeInt32(event, flushCount);
+
+    int32_t setSurfaceCount = -1;
+    if (item->getInt32("android.media.mediacodec.set-surface-count", &setSurfaceCount)) {
+        metrics_proto.set_set_surface_count(setSurfaceCount);
+    }
+    AStatsEvent_writeInt32(event, setSurfaceCount);
+
+    int32_t resolutionChangeCount = -1;
+    if (item->getInt32("android.media.mediacodec.resolution-change-count",
+            &resolutionChangeCount)) {
+        metrics_proto.set_resolution_change_count(resolutionChangeCount);
+    }
+    AStatsEvent_writeInt32(event, resolutionChangeCount);
+
+    int32_t componentColorFormat = -1;
+    if (item->getInt32("android.media.mediacodec.component-color-format", &componentColorFormat)) {
+        metrics_proto.set_component_color_format(componentColorFormat);
+    }
+    AStatsEvent_writeInt32(event, componentColorFormat);
+
+    int64_t firstRenderTimeUs = -1;
+    item->getInt64("android.media.mediacodec.first-render-time-us", &firstRenderTimeUs);
+    int64_t framesReleased = -1;
+    item->getInt64("android.media.mediacodec.frames-released", &framesReleased);
+    int64_t framesRendered = -1;
+    item->getInt64("android.media.mediacodec.frames-rendered", &framesRendered);
+    int64_t framesDropped = -1;
+    item->getInt64("android.media.mediacodec.frames-dropped", &framesDropped);
+    int64_t framesSkipped = -1;
+    item->getInt64("android.media.mediacodec.frames-skipped", &framesSkipped);
+    double framerateContent = -1;
+    item->getDouble("android.media.mediacodec.framerate-content", &framerateContent);
+    double framerateActual = -1;
+    item->getDouble("android.media.mediacodec.framerate-actual", &framerateActual);
+    int64_t freezeScore = -1;
+    item->getInt64("android.media.mediacodec.freeze-score", &freezeScore);
+    double freezeRate = -1;
+    item->getDouble("android.media.mediacodec.freeze-rate", &freezeRate);
+    std::string freezeScoreHistogramStr;
+    item->getString("android.media.mediacodec.freeze-score-histogram", &freezeScoreHistogramStr);
+    std::string freezeScoreHistogramBucketsStr;
+    item->getString("android.media.mediacodec.freeze-score-histogram-buckets",
+                    &freezeScoreHistogramBucketsStr);
+    std::string freezeDurationMsHistogramStr;
+    item->getString("android.media.mediacodec.freeze-duration-ms-histogram",
+                    &freezeDurationMsHistogramStr);
+    std::string freezeDurationMsHistogramBucketsStr;
+    item->getString("android.media.mediacodec.freeze-duration-ms-histogram-buckets",
+                    &freezeDurationMsHistogramBucketsStr);
+    std::string freezeDistanceMsHistogramStr;
+    item->getString("android.media.mediacodec.freeze-distance-ms-histogram",
+                    &freezeDistanceMsHistogramStr);
+    std::string freezeDistanceMsHistogramBucketsStr;
+    item->getString("android.media.mediacodec.freeze-distance-ms-histogram-buckets",
+                    &freezeDistanceMsHistogramBucketsStr);
+    int64_t judderScore = -1;
+    item->getInt64("android.media.mediacodec.judder-score", &judderScore);
+    double judderRate = -1;
+    item->getDouble("android.media.mediacodec.judder-rate", &judderRate);
+    std::string judderScoreHistogramStr;
+    item->getString("android.media.mediacodec.judder-score-histogram", &judderScoreHistogramStr);
+    std::string judderScoreHistogramBucketsStr;
+    item->getString("android.media.mediacodec.judder-score-histogram-buckets",
+                    &judderScoreHistogramBucketsStr);
+
     int err = AStatsEvent_write(event);
     if (err < 0) {
       ALOGE("Failed to write codec metrics to statsd (%d)", err);
     }
     AStatsEvent_release(event);
 
+    if (framesRendered > 0) {
+        int32_t statsUid = item->getUid();
+        int64_t statsCodecId = codecId;
+        char const *statsLogSessionId = sessionId.c_str();
+        int32_t statsIsHardware = isHardware;
+        int32_t statsIsSecure = isSecure;
+        int32_t statsIsTunneled = isTunneled;
+        int32_t statsCodec = getMetricsCodecEnum(mime, codec);
+        int32_t statsResolution = getMetricsResolutionEnum(width, height);
+        int32_t statsBitrate = BITRATE_UNKNOWN;
+        int32_t statsContentFramerate = getMetricsFramerateEnum(framerateContent);
+        int32_t statsActualFramerate = getMetricsFramerateEnum(framerateActual);
+        int32_t statsHdrFormat = getMetricsHdrFormatEnum(mime, codec, configColorTransfer,
+                                                         parsedColorTransfer, hdrStaticInfo,
+                                                         hdr10PlusInfo);
+        int64_t statsFirstRenderTimeUs = firstRenderTimeUs;
+        int64_t statsPlaybackDurationSeconds = playbackDurationSec;
+        int64_t statsFramesTotal = framesReleased + framesSkipped;
+        int64_t statsFramesReleased = framesReleased;
+        int64_t statsFramesRendered = framesRendered;
+        int64_t statsFramesDropped = framesDropped;
+        int64_t statsFramesSkipped = framesSkipped;
+        float statsFrameDropRate = float(double(framesDropped) / statsFramesTotal);
+        float statsFrameSkipRate = float(double(framesSkipped) / statsFramesTotal);
+        float statsFrameSkipDropRate = float(double(framesSkipped + framesDropped) /
+                                             statsFramesTotal);
+        int64_t statsFreezeScore = freezeScore;
+        float statsFreezeRate = freezeRate;
+        std::vector<int32_t> statsFreezeDurationMsHistogram;
+        parseVector(freezeDurationMsHistogramStr, &statsFreezeDurationMsHistogram);
+        std::vector<int32_t> statsFreezeDurationMsHistogramBuckets;
+        parseVector(freezeDurationMsHistogramBucketsStr, &statsFreezeDurationMsHistogramBuckets);
+        std::vector<int32_t> statsFreezeDistanceMsHistogram;
+        parseVector(freezeDistanceMsHistogramStr, &statsFreezeDistanceMsHistogram);
+        std::vector<int32_t> statsFreezeDistanceMsHistogramBuckets;
+        parseVector(freezeDistanceMsHistogramBucketsStr, &statsFreezeDistanceMsHistogramBuckets);
+        int64_t statsJudderScore = judderScore;
+        float statsJudderRate = judderRate;
+        std::vector<int32_t> statsJudderScoreHistogram;
+        parseVector(judderScoreHistogramStr, &statsJudderScoreHistogram);
+        std::vector<int32_t> statsJudderScoreHistogramBuckets;
+        parseVector(judderScoreHistogramBucketsStr, &statsJudderScoreHistogramBuckets);
+        int result = stats_write(
+            MEDIA_CODEC_RENDERED,
+            statsUid,
+            statsCodecId,
+            statsLogSessionId,
+            statsIsHardware,
+            statsIsSecure,
+            statsIsTunneled,
+            statsCodec,
+            statsResolution,
+            statsBitrate,
+            statsContentFramerate,
+            statsActualFramerate,
+            statsHdrFormat,
+            statsFirstRenderTimeUs,
+            statsPlaybackDurationSeconds,
+            statsFramesTotal,
+            statsFramesReleased,
+            statsFramesRendered,
+            statsFramesDropped,
+            statsFramesSkipped,
+            statsFrameDropRate,
+            statsFrameSkipRate,
+            statsFrameSkipDropRate,
+            statsFreezeScore,
+            statsFreezeRate,
+            statsFreezeDurationMsHistogram,
+            statsFreezeDurationMsHistogramBuckets,
+            statsFreezeDistanceMsHistogram,
+            statsFreezeDistanceMsHistogramBuckets,
+            statsJudderScore,
+            statsJudderRate,
+            statsJudderScoreHistogram,
+            statsJudderScoreHistogramBuckets);
+        ALOGE_IF(result < 0, "Failed to record MEDIA_CODEC_RENDERED atom (%d)", result);
+    }
+
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
         ALOGE("Failed to serialize codec metrics");
         return false;
     }
-    const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    const stats::media_metrics::BytesField bf_serialized(serialized.c_str(), serialized.size());
     const int result = stats::media_metrics::stats_write(stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED,
-                               timestamp_nanos, package_name.c_str(), package_version_code,
-                               media_apex_version,
+                               timestampNanos, packageName.c_str(), packageVersionCode,
+                               mediaApexVersion,
                                bf_serialized);
 
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_codec_reported:"
             << stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED
-            << " timestamp_nanos:" << timestamp_nanos
-            << " package_name:" << package_name
-            << " package_version_code:" << package_version_code
-            << " media_apex_version:" << media_apex_version
-
+            << " timestamp_nanos:" << timestampNanos
+            << " package_name:" << packageName
+            << " package_version_code:" << packageVersionCode
+            << " media_apex_version:" << mediaApexVersion
             << " codec:" << codec
             << " mime:" << mime
             << " mode:" << mode
-            << " encoder:" << encoder
-            << " secure:" << secure
+            << " encoder:" << isEncoder
+            << " secure:" << isSecure
             << " width:" << width
             << " height:" << height
             << " rotation:" << rotation
             << " crypto:" << crypto
             << " profile:" << profile
-
             << " level:" << level
-            << " max_width:" << max_width
-            << " max_height:" << max_height
-            << " error_code:" << error_code
-            << " error_state:" << error_state
-            << " latency_max:" << latency_max
-            << " latency_min:" << latency_min
-            << " latency_avg:" << latency_avg
-            << " latency_count:" << latency_count
-            << " latency_unknown:" << latency_unknown
-
-            << " queue_input_buffer_error:" << queue_input_buffer_error
-            << " queue_secure_input_buffer_error:" << queue_secure_input_buffer_error
-            << " bitrate_mode:" << bitrate_mode
+            << " max_width:" << maxWidth
+            << " max_height:" << maxHeight
+            << " error_code:" << errorCode
+            << " error_state:" << errorState
+            << " latency_max:" << latencyMax
+            << " latency_min:" << latencyMin
+            << " latency_avg:" << latencyAvg
+            << " latency_count:" << latencyCount
+            << " latency_unknown:" << latencyUnknown
+            << " queue_input_buffer_error:" << queueInputBufferError
+            << " queue_secure_input_buffer_error:" << queueSecureInputBufferError
+            << " bitrate_mode:" << bitrateMode
             << " bitrate:" << bitrate
             << " original_bitrate:" << originalBitrate
-            << " lifetime_millis:" << lifetime_millis
-            << " playback_duration_seconds:" << playback_duration_sec
+            << " lifetime_millis:" << lifetimeMillis
+            << " playback_duration_seconds:" << playbackDurationSec
             << " log_session_id:" << sessionId
             << " channel_count:" << channelCount
             << " sample_rate:" << sampleRate
@@ -517,7 +842,6 @@
             << " operating_rate:" << operatingRate
             << " priority:" << priority
             << " shaping_enhanced:" << shapingEnhanced
-
             << " qp_i_min:" << qpIMin
             << " qp_i_max:" << qpIMax
             << " qp_p_min:" << qpPMin
diff --git a/services/mediametrics/statsd_drm.cpp b/services/mediametrics/statsd_drm.cpp
index 9f08eca..e5f7190 100644
--- a/services/mediametrics/statsd_drm.cpp
+++ b/services/mediametrics/statsd_drm.cpp
@@ -281,9 +281,9 @@
     const int32_t uid = IPCThreadState::self()->getCallingUid();
     int32_t frontend = 0;
     if (!item->getInt32("frontend", &frontend)) return false;
-    int32_t requested_security_level = -1;
+    int32_t requested_security_level = 0;
     if (!item->getInt32("requested_security_level", &requested_security_level)) return false;
-    int32_t opened_security_level = -1;
+    int32_t opened_security_level = 0;
     if (!item->getInt32("opened_security_level", &opened_security_level)) return false;
 
     // Optional to be included
@@ -325,12 +325,10 @@
     if (!item->getInt32("frontend", &frontend)) return false;
     std::string object_nonce = "";
     if (!item->getString("object_nonce", &object_nonce)) return false;
-    int32_t security_level = -1;
-    if (!item->getInt32("security_level", &security_level)) return false;
     std::string api_str = "";
     if (!item->getString("api", &api_str)) return false;
     const int32_t api = MediaDrmStatsdHelper::findDrmApi(api_str);
-    int32_t error_code = -1;
+    int32_t error_code = 0;
     if (!item->getInt32("error_code", &error_code)) return false;
 
     // Optional to be included
@@ -338,12 +336,14 @@
     item->getString("version", &version);
     std::string session_nonce = "";
     item->getString("session_nonce", &session_nonce);
+    int32_t security_level = 0;
+    item->getInt32("security_level", &security_level);
 
     int32_t cdm_err = 0;
     item->getInt32("cdm_err", &cdm_err);
     int32_t oem_err = 0;
     item->getInt32("oem_err", &oem_err);
-    int32_t error_context = -1;
+    int32_t error_context = 0;
     item->getInt32("error_context", &error_context);
 
     const int result = stats_write(stats::media_metrics::MEDIA_DRM_ERRORED, scheme, uuid_lsb,
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index bc7b47b..4a6aee4 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -17,9 +17,10 @@
 #define LOG_TAG "mediametrics_tests"
 #include <utils/Log.h>
 
-
 #include <stdio.h>
+#include <string>
 #include <unordered_set>
+#include <vector>
 
 #include <gtest/gtest.h>
 #include <media/MediaMetricsItem.h>
@@ -30,6 +31,7 @@
 #include <system/audio.h>
 
 using namespace android;
+using android::mediametrics::stringutils::parseVector;
 
 static size_t countNewlines(const char *s) {
     size_t count = 0;
@@ -57,6 +59,35 @@
   ASSERT_EQ(false, android::mediametrics::startsWith(s, std::string("est")));
 }
 
+TEST(mediametrics_tests, parseVector) {
+    {
+        std::vector<int32_t> values;
+        EXPECT_EQ(true, parseVector("0{4,300,0,-112343,350}9", &values));
+        EXPECT_EQ(values, std::vector<int32_t>({0, 4, 300, 0, -112343, 350, 9}));
+    }
+    {
+        std::vector<int32_t> values;
+        EXPECT_EQ(true, parseVector("53", &values));
+        EXPECT_EQ(values, std::vector<int32_t>({53}));
+    }
+    {
+        std::vector<int32_t> values;
+        EXPECT_EQ(false, parseVector("5{3,6*3}3", &values));
+        EXPECT_EQ(values, std::vector<int32_t>({}));
+    }
+    {
+        std::vector<int32_t> values = {1}; // should still be this when parsing fails
+        std::vector<int32_t> expected = {1};
+        EXPECT_EQ(false, parseVector("51342abcd,1232", &values));
+        EXPECT_EQ(values, std::vector<int32_t>({1}));
+    }
+    {
+        std::vector<int32_t> values = {2}; // should still be this when parsing fails
+        EXPECT_EQ(false, parseVector("12345678901234,12345678901234", &values));
+        EXPECT_EQ(values, std::vector<int32_t>({2}));
+    }
+}
+
 TEST(mediametrics_tests, defer) {
   bool check = false;
   {
diff --git a/services/mediaresourcemanager/OWNERS b/services/mediaresourcemanager/OWNERS
index 82abf8f..4fc3728 100644
--- a/services/mediaresourcemanager/OWNERS
+++ b/services/mediaresourcemanager/OWNERS
@@ -1 +1,3 @@
-dwkang@google.com
+girishshetty@google.com
+lajos@google.com
+wonsik@google.com
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
index b60e734..f8cdb80 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.cpp
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -146,18 +146,21 @@
 
 void ResourceManagerMetrics::notifyClientReleased(const ClientInfoParcel& clientInfo) {
     bool stopCalled = true;
-    ClientConfigMap::iterator found;
+    ClientConfigParcel clientConfig;
     {
         std::scoped_lock lock(mLock);
-        found = mClientConfigMap.find(clientInfo.id);
+        ClientConfigMap::iterator found = mClientConfigMap.find(clientInfo.id);
         if (found != mClientConfigMap.end()) {
             // Release is called without Stop!
             stopCalled = false;
+            clientConfig = found->second;
+            // Update the timestamp for stopping the codec.
+            clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
         }
     }
     if (!stopCalled) {
         // call Stop to update the metrics.
-        notifyClientStopped(found->second);
+        notifyClientStopped(clientConfig);
     }
     {
         std::scoped_lock lock(mLock);
@@ -172,6 +175,22 @@
     }
 }
 
+void ResourceManagerMetrics::notifyClientConfigChanged(const ClientConfigParcel& clientConfig) {
+    std::scoped_lock lock(mLock);
+    ClientConfigMap::iterator entry = mClientConfigMap.find(clientConfig.clientInfo.id);
+    if (entry != mClientConfigMap.end() &&
+        (clientConfig.codecType == MediaResourceSubType::kVideoCodec ||
+        clientConfig.codecType == MediaResourceSubType::kImageCodec)) {
+        int pid = clientConfig.clientInfo.pid;
+        // Update the pixel count for this process
+        updatePixelCount(pid, clientConfig.width * (long)clientConfig.height,
+                         entry->second.width * (long)entry->second.height);
+        // Update the resolution in the record.
+        entry->second.width = clientConfig.width;
+        entry->second.height = clientConfig.height;
+    }
+}
+
 void ResourceManagerMetrics::notifyClientStarted(const ClientConfigParcel& clientConfig) {
     std::scoped_lock lock(mLock);
     int pid = clientConfig.clientInfo.pid;
@@ -194,9 +213,15 @@
     }
 
     // System concurrent codec usage
-    int systemConcurrentCodecCount = mConcurrentCodecsMap[codecBucket];
+    int systemConcurrentCodecs = mConcurrentCodecsMap[codecBucket];
     // Process/Application concurrent codec usage for this type of codec
-    int appConcurrentCodecCount = mProcessConcurrentCodecsMap[pid].mCurrent[codecBucket];
+    const ConcurrentCodecs& concurrentCodecs = mProcessConcurrentCodecsMap[pid];
+    int appConcurrentCodecs = concurrentCodecs.mCurrent[codecBucket];
+    int hwVideoCodecs = concurrentCodecs.mHWVideoCodecs;
+    int swVideoCodecs = concurrentCodecs.mSWVideoCodecs;
+    int videoCodecs = concurrentCodecs.mVideoCodecs;
+    int audioCodecs = concurrentCodecs.mAudioCodecs;
+    int imageCodecs = concurrentCodecs.mImageCodecs;
     // Process/Application's current pixel count.
     long pixelCount = 0;
     std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid);
@@ -213,9 +238,14 @@
          clientConfig.isEncoder,
          clientConfig.isHardware,
          clientConfig.width, clientConfig.height,
-         systemConcurrentCodecCount,
-         appConcurrentCodecCount,
-         pixelCount);
+         systemConcurrentCodecs,
+         appConcurrentCodecs,
+         pixelCount,
+         hwVideoCodecs,
+         swVideoCodecs,
+         videoCodecs,
+         audioCodecs,
+         imageCodecs);
 
     ALOGV("%s: Pushed MEDIA_CODEC_STARTED atom: "
           "Process[pid(%d): uid(%d)] "
@@ -223,6 +253,7 @@
           "Timestamp: %jd "
           "Resolution: %d x %d "
           "ConcurrentCodec[%d]={System: %d App: %d} "
+          "AppConcurrentCodecs{Video: %d(HW[%d] SW[%d]) Audio: %d Image: %d} "
           "result: %d",
           __func__,
           pid, clientConfig.clientInfo.uid,
@@ -233,7 +264,8 @@
           clientConfig.isEncoder? "encoder" : "decoder",
           clientConfig.timeStamp,
           clientConfig.width, clientConfig.height,
-          codecBucket, systemConcurrentCodecCount, appConcurrentCodecCount,
+          codecBucket, systemConcurrentCodecs, appConcurrentCodecs,
+          videoCodecs, hwVideoCodecs, swVideoCodecs, audioCodecs, imageCodecs,
           result);
 }
 
@@ -253,12 +285,12 @@
     }
 
     // System concurrent codec usage
-    int systemConcurrentCodecCount = mConcurrentCodecsMap[codecBucket];
+    int systemConcurrentCodecs = mConcurrentCodecsMap[codecBucket];
     // Process/Application concurrent codec usage for this type of codec
-    int appConcurrentCodecCount = 0;
+    int appConcurrentCodecs = 0;
     std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
     if (found != mProcessConcurrentCodecsMap.end()) {
-        appConcurrentCodecCount = found->second.mCurrent[codecBucket];
+        appConcurrentCodecs = found->second.mCurrent[codecBucket];
     }
     // Process/Application's current pixel count.
     long pixelCount = 0;
@@ -289,8 +321,8 @@
          clientConfig.isEncoder,
          clientConfig.isHardware,
          clientConfig.width, clientConfig.height,
-         systemConcurrentCodecCount,
-         appConcurrentCodecCount,
+         systemConcurrentCodecs,
+         appConcurrentCodecs,
          pixelCount,
          usageTime);
     ALOGV("%s: Pushed MEDIA_CODEC_STOPPED atom: "
@@ -309,7 +341,7 @@
           clientConfig.isEncoder? "encoder" : "decoder",
           clientConfig.timeStamp, usageTime,
           clientConfig.width, clientConfig.height,
-          codecBucket, systemConcurrentCodecCount, appConcurrentCodecCount,
+          codecBucket, systemConcurrentCodecs, appConcurrentCodecs,
           result);
 }
 
@@ -481,7 +513,8 @@
         ConcurrentCodecs codecs;
         codecs.mCurrent[codecBucket] = 1;
         codecs.mPeak[codecBucket] = 1;
-        mProcessConcurrentCodecsMap.emplace(pid, codecs);
+        auto added = mProcessConcurrentCodecsMap.emplace(pid, codecs);
+        found = added.first;
     } else {
         found->second.mCurrent[codecBucket]++;
         // Check if it's the peak count for this slot.
@@ -489,6 +522,34 @@
             found->second.mPeak[codecBucket] = found->second.mCurrent[codecBucket];
         }
     }
+
+    switch (codecBucket) {
+        case HwVideoEncoder:
+        case HwVideoDecoder:
+        case SwVideoEncoder:
+        case SwVideoDecoder:
+            if (codecBucket == HwVideoEncoder || codecBucket == HwVideoDecoder) {
+                found->second.mHWVideoCodecs++;
+            } else {
+                found->second.mSWVideoCodecs++;
+            }
+            found->second.mVideoCodecs++;
+            break;
+        case HwAudioEncoder:
+        case HwAudioDecoder:
+        case SwAudioEncoder:
+        case SwAudioDecoder:
+            found->second.mAudioCodecs++;
+            break;
+        case HwImageEncoder:
+        case HwImageDecoder:
+        case SwImageEncoder:
+        case SwImageDecoder:
+            found->second.mImageCodecs++;
+            break;
+        default:
+            break;
+    }
 }
 
 void ResourceManagerMetrics::decreaseConcurrentCodecs(int32_t pid,
@@ -504,6 +565,34 @@
         if (found->second.mCurrent[codecBucket] > 0) {
             found->second.mCurrent[codecBucket]--;
         }
+
+        switch (codecBucket) {
+            case HwVideoEncoder:
+            case HwVideoDecoder:
+            case SwVideoEncoder:
+            case SwVideoDecoder:
+                if (codecBucket == HwVideoEncoder || codecBucket == HwVideoDecoder) {
+                    found->second.mHWVideoCodecs--;
+                } else {
+                    found->second.mSWVideoCodecs--;
+                }
+                found->second.mVideoCodecs--;
+                break;
+            case HwAudioEncoder:
+            case HwAudioDecoder:
+            case SwAudioEncoder:
+            case SwAudioDecoder:
+                found->second.mAudioCodecs--;
+                break;
+            case HwImageEncoder:
+            case HwImageDecoder:
+            case SwImageEncoder:
+            case SwImageDecoder:
+                found->second.mImageCodecs--;
+                break;
+            default:
+                break;
+        }
     }
 }
 
@@ -525,6 +614,13 @@
     }
 }
 
+void ResourceManagerMetrics::updatePixelCount(int32_t pid, long newPixels, long lastPixels) {
+    // Since there is change in resolution, decrease it by last pixels and
+    // increase it by new pixels.
+    decreasePixelCount(pid, lastPixels);
+    increasePixelCount(pid, newPixels);
+}
+
 void ResourceManagerMetrics::decreasePixelCount(int32_t pid, long pixels) {
     // Now update the current pixel usage for this (pid) process.
     std::map<int32_t, PixelCount>::iterator found = mProcessPixelsMap.find(pid);
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.h b/services/mediaresourcemanager/ResourceManagerMetrics.h
index b7810e5..3124aa2 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.h
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.h
@@ -77,6 +77,16 @@
 struct ConcurrentCodecs {
     ConcurrentCodecsMap mCurrent;
     ConcurrentCodecsMap mPeak;
+    // concurrent HW Video codecs.
+    int mHWVideoCodecs;
+    // concurrent SW Video codecs.
+    int mSWVideoCodecs;
+    // concurrent Video codecs.
+    int mVideoCodecs;
+    // concurrent Audio codecs.
+    int mAudioCodecs;
+    // concurrent Image codecs.
+    int mImageCodecs;
 };
 
 // Current and Peak pixel count for a process.
@@ -119,6 +129,9 @@
     // To be called when a client is stopped.
     void notifyClientStopped(const ClientConfigParcel& clientConfig);
 
+    // To be called when a client's configuration has changed.
+    void notifyClientConfigChanged(const ClientConfigParcel& clientConfig);
+
     // To be called when after a reclaim event.
     void pushReclaimAtom(const ClientInfoParcel& clientInfo,
                          const std::vector<int>& priorities,
@@ -143,8 +156,9 @@
     void increaseConcurrentCodecs(int32_t pid, CodecBucket codecBucket);
     void decreaseConcurrentCodecs(int32_t pid, CodecBucket codecBucket);
 
-    // To increase/decrease the concurrent pixels usage for a process.
+    // To increase/update/decrease the concurrent pixels usage for a process.
     void increasePixelCount(int32_t pid, long pixels);
+    void updatePixelCount(int32_t pid, long newPixels, long lastPixels);
     void decreasePixelCount(int32_t pid, long pixels);
 
     // Issued when the process/application with given pid/uid is terminated.
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 6822b06..23b23cc 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -133,7 +133,7 @@
 }
 
 static bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
-        MediaResourceParcel resource) {
+        const MediaResourceParcel& resource) {
     if (type != resource.type) {
       return false;
     }
@@ -1208,6 +1208,11 @@
     return Status::ok();
 }
 
+Status ResourceManagerService::notifyClientConfigChanged(const ClientConfigParcel& clientConfig) {
+    mResourceManagerMetrics->notifyClientConfigChanged(clientConfig);
+    return Status::ok();
+}
+
 long ResourceManagerService::getPeakConcurrentPixelCount(int pid) const {
     return mResourceManagerMetrics->getPeakConcurrentPixelCount(pid);
 }
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index b9756ae..1519e0e 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -127,6 +127,8 @@
 
     Status notifyClientStopped(const ClientConfigParcel& clientConfig) override;
 
+    Status notifyClientConfigChanged(const ClientConfigParcel& clientConfig) override;
+
 private:
     friend class ResourceManagerServiceTest;
     friend class DeathNotifier;
diff --git a/services/mediaresourcemanager/UidObserver.cpp b/services/mediaresourcemanager/UidObserver.cpp
index f321ebc..4bcd325 100644
--- a/services/mediaresourcemanager/UidObserver.cpp
+++ b/services/mediaresourcemanager/UidObserver.cpp
@@ -170,7 +170,7 @@
                                     int32_t /*capability*/) {
 }
 
-void UidObserver::onUidProcAdjChanged(uid_t /*uid*/) {
+void UidObserver::onUidProcAdjChanged(uid_t /*uid*/, int32_t /*adj*/) {
 }
 
 void UidObserver::binderDied(const wp<IBinder>& /*who*/) {
diff --git a/services/mediaresourcemanager/UidObserver.h b/services/mediaresourcemanager/UidObserver.h
index ed76839..b5cc3b9 100644
--- a/services/mediaresourcemanager/UidObserver.h
+++ b/services/mediaresourcemanager/UidObserver.h
@@ -74,7 +74,7 @@
     void onUidIdle(uid_t uid, bool disabled) override;
     void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
             int32_t capability) override;
-    void onUidProcAdjChanged(uid_t uid) override;
+    void onUidProcAdjChanged(uid_t uid, int32_t adj) override;
 
     // IServiceManager::LocalRegistrationCallback implementation.
     void onServiceRegistration(const String16& name,
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
index fcade38..5071fa3 100644
--- a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
@@ -156,4 +156,16 @@
      * @param clientConfig Configuration information of the client.
      */
     void notifyClientStopped(in ClientConfigParcel clientConfig);
+
+    /**
+     * Notify that the client's configuration has changed.
+     *
+     * This call is made to collect the (concurrent) metrics about the
+     * resources associated with the Codec (and also DRM sessions).
+     * This is called after notifyClientStarted (and before notifyClientStopped)
+     * to make changes to some of the configurations associated with the client.
+     *
+     * @param clientConfig Configuration information of the client.
+     */
+    void notifyClientConfigChanged(in ClientConfigParcel clientConfig);
 }
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index d98974f..f38a085 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -27,21 +27,18 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_fuzz {
-    name: "mediaresourcemanager_fuzzer",
-    srcs: [
-        "mediaresourcemanager_fuzzer.cpp",
+cc_defaults {
+    name: "mediaresourcemanager_fuzzer_defaults",
+    defaults: [
+        "service_fuzzer_defaults",
     ],
     static_libs: [
         "liblog",
         "libresourcemanagerservice",
     ],
     shared_libs: [
-        "libbinder",
-        "libbinder_ndk",
         "libmedia",
         "libmediautils",
-        "libutils",
         "libstats_media_metrics",
         "libstatspull",
         "libstatssocket",
@@ -54,3 +51,39 @@
         componentid: 155276,
     },
 }
+
+cc_fuzz {
+    name: "mediaresourcemanager_fuzzer",
+    defaults: [
+        "mediaresourcemanager_fuzzer_defaults",
+    ],
+    srcs: [
+        "mediaresourcemanager_fuzzer.cpp",
+    ],
+}
+
+cc_fuzz {
+    name: "resourcemanager_service_fuzzer",
+    defaults: [
+        "mediaresourcemanager_fuzzer_defaults",
+    ],
+    srcs: [
+        "resourcemanager_service_fuzzer.cpp",
+    ],
+}
+
+cc_fuzz {
+    name: "resourceobserver_service_fuzzer",
+    defaults: [
+        "mediaresourcemanager_fuzzer_defaults",
+    ],
+    static_libs: [
+        "resourceobserver_aidl_interface-V1-ndk",
+    ],
+    srcs: [
+        "resourceobserver_service_fuzzer.cpp",
+    ],
+    fuzz_config: {
+        triage_assignee: "waghpawan@google.com",
+    },
+}
diff --git a/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp
new file mode 100644
index 0000000..ca10d20
--- /dev/null
+++ b/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/binder_interface_utils.h>
+
+#include <fuzzbinder/libbinder_ndk_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "ResourceManagerService.h"
+
+using android::fuzzService;
+using android::ResourceManagerService;
+using ndk::SharedRefBase;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+   auto service = SharedRefBase::make<ResourceManagerService>();
+   fuzzService(service->asBinder().get(), FuzzedDataProvider(data, size));
+   return 0;
+}
diff --git a/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp
new file mode 100644
index 0000000..e69368d
--- /dev/null
+++ b/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/binder_interface_utils.h>
+
+#include <fuzzbinder/libbinder_ndk_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "ResourceObserverService.h"
+
+using android::fuzzService;
+using android::ResourceObserverService;
+using ndk::SharedRefBase;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+   auto service = SharedRefBase::make<ResourceObserverService>();
+   fuzzService(service->asBinder().get(), FuzzedDataProvider(data, size));
+   return 0;
+}
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index 2b94dbf..e7d14a0 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -196,11 +196,11 @@
             : AUDIO_SOURCE_DEFAULT;
     audio_flags_mask_t flags;
     if (direction == AAUDIO_DIRECTION_OUTPUT) {
-        flags = static_cast<audio_flags_mask_t>(AUDIO_FLAG_LOW_LATENCY
-                | AAudioConvert_allowCapturePolicyToAudioFlagsMask(
+        flags = AAudio_computeAudioFlagsMask(
                         params->getAllowedCapturePolicy(),
                         params->getSpatializationBehavior(),
-                        params->isContentSpatialized()));
+                        params->isContentSpatialized(),
+                        AUDIO_OUTPUT_FLAG_FAST);
     } else {
         flags = static_cast<audio_flags_mask_t>(AUDIO_FLAG_LOW_LATENCY
                 | AAudioConvert_privacySensitiveToAudioFlagsMask(params->isPrivacySensitive()));
diff --git a/services/oboeservice/AAudioThread.cpp b/services/oboeservice/AAudioThread.cpp
index 549fa59..502d773 100644
--- a/services/oboeservice/AAudioThread.cpp
+++ b/services/oboeservice/AAudioThread.cpp
@@ -75,7 +75,9 @@
 
 aaudio_result_t AAudioThread::stop() {
     if (!mHasThread) {
-        ALOGE("stop() but no thread running");
+        // There can be cases that the thread is just created but not started.
+        // Logging as warning to attract attention but not too serious.
+        ALOGW("stop() but no thread running");
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index c5080a4..3521979 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -78,12 +78,38 @@
 
 ]
 
+cc_defaults {
+    name: "libaaudioservice_dependencies",
 
-cc_library {
+    shared_libs: [
+        "libaaudio_internal",
+        "libaudioclient",
+        "libaudioutils",
+        "libmedia_helper",
+        "libmediametrics",
+        "libmediautils",
+        "libbase",
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libutils",
+        "aaudio-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "packagemanager_aidl-cpp",
+    ],
+
+    static_libs: [
+        "libaudioflinger",
+    ]
+}
+
+cc_library_static {
 
     name: "libaaudioservice",
 
     defaults: [
+        "libaaudioservice_dependencies",
         "latest_android_media_audio_common_types_cpp_shared",
     ],
 
@@ -116,25 +142,6 @@
         "-Werror",
     ],
 
-    shared_libs: [
-        "libaaudio_internal",
-        "libaudioclient",
-        "libaudioflinger",
-        "libaudioutils",
-        "libmedia_helper",
-        "libmediametrics",
-        "libmediautils",
-        "libbase",
-        "libbinder",
-        "libcutils",
-        "liblog",
-        "libutils",
-        "aaudio-aidl-cpp",
-        "framework-permission-aidl-cpp",
-        "libaudioclient_aidl_conversion",
-        "packagemanager_aidl-cpp",
-    ],
-
     export_shared_lib_headers: [
         "libaaudio_internal",
         "framework-permission-aidl-cpp",
diff --git a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
index f047065..f5c2e6c 100644
--- a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
+++ b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
@@ -403,13 +403,6 @@
 
     request.getConfiguration().setBufferCapacity(fdp.ConsumeIntegral<int32_t>());
 
-    request.getConfiguration().setHardwareSampleRate(fdp.ConsumeIntegral<int32_t>());
-    request.getConfiguration().setHardwareSamplesPerFrame(fdp.ConsumeIntegral<int32_t>());
-    request.getConfiguration().setHardwareFormat((audio_format_t)(
-        fdp.ConsumeBool()
-            ? fdp.ConsumeIntegral<int32_t>()
-            : kAAudioFormats[fdp.ConsumeIntegralInRange<int32_t>(0, kNumAAudioFormats - 1)]));
-
     auto streamHandleInfo = mClient->openStream(request, configurationOutput);
     if (streamHandleInfo.getHandle() < 0) {
         // invalid request, stream not opened.
diff --git a/services/tuner/TunerService.cpp b/services/tuner/TunerService.cpp
index e5bcf1f..9a1e8bb 100644
--- a/services/tuner/TunerService.cpp
+++ b/services/tuner/TunerService.cpp
@@ -100,8 +100,11 @@
 
     if (fallBackToOpenDemux) {
         auto status = mTuner->openDemux(&ids, &demux);
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
+        if (status.isOk()) {
+            *_aidl_return = ::ndk::SharedRefBase::make<TunerDemux>(demux, ids[0],
+                                                                   this->ref<TunerService>());
+        }
+        return status;
     } else {
         int id = TunerHelper::getResourceIdFromHandle(in_demuxHandle, DEMUX);
         auto status = mTuner->openDemuxById(id, &demux);
diff --git a/services/tuner/hidl/TunerHidlFilter.cpp b/services/tuner/hidl/TunerHidlFilter.cpp
index 1789028..617622d 100644
--- a/services/tuner/hidl/TunerHidlFilter.cpp
+++ b/services/tuner/hidl/TunerHidlFilter.cpp
@@ -368,7 +368,7 @@
     }
 
     hidl_handle handle;
-    handle.setTo(makeFromAidl(in_handle), true);
+    handle.setTo(makeFromAidl(in_handle));
     HidlResult res = mFilter->releaseAvHandle(handle, in_avDataId);
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
diff --git a/services/tuner/mediatuner.rc b/services/tuner/mediatuner.rc
index 90a0090..8e5d100 100644
--- a/services/tuner/mediatuner.rc
+++ b/services/tuner/mediatuner.rc
@@ -4,6 +4,7 @@
 service media.tuner /system/bin/mediatuner
     class main
     group media
+    user root
     ioprio rt 4
     task_profiles ProcessCapacityHigh HighPerformance
     interface aidl media.tuner