Merge "Include module dependencies under `data`"
diff --git a/camera/Android.bp b/camera/Android.bp
index e44202b..3e28e4f 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -93,6 +93,7 @@
         "libgui",
         "libcamera_metadata",
         "libnativewindow",
+        "lib-platform-compat-native-api",
     ],
 
     include_dirs: [
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 604dbb8..d1618e4 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -71,10 +71,10 @@
 }
 
 sp<Camera> Camera::connect(int cameraId, const String16& clientPackageName,
-        int clientUid, int clientPid, int targetSdkVersion)
+        int clientUid, int clientPid, int targetSdkVersion, bool overrideToPortrait)
 {
     return CameraBaseT::connect(cameraId, clientPackageName, clientUid,
-            clientPid, targetSdkVersion);
+            clientPid, targetSdkVersion, overrideToPortrait);
 }
 
 status_t Camera::reconnect()
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 24c9108..0a5bc12 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -23,6 +23,7 @@
 #include <cutils/properties.h>
 
 #include <android/hardware/ICameraService.h>
+#include <com/android/internal/compat/IPlatformCompatNative.h>
 
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
@@ -161,7 +162,8 @@
 template <typename TCam, typename TCamTraits>
 sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
                                                const String16& clientPackageName,
-                                               int clientUid, int clientPid, int targetSdkVersion)
+                                               int clientUid, int clientPid, int targetSdkVersion,
+                                               bool overrideToPortrait)
 {
     ALOGV("%s: connect", __FUNCTION__);
     sp<TCam> c = new TCam(cameraId);
@@ -171,8 +173,9 @@
     binder::Status ret;
     if (cs != nullptr) {
         TCamConnectService fnConnectService = TCamTraits::fnConnectService;
+        ALOGI("Connect camera (legacy API) - overrideToPortrait %d", overrideToPortrait);
         ret = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
-                                               clientPid, targetSdkVersion, /*out*/ &c->mCamera);
+                clientPid, targetSdkVersion, overrideToPortrait, /*out*/ &c->mCamera);
     }
     if (ret.isOk() && c->mCamera != nullptr) {
         IInterface::asBinder(c->mCamera)->linkToDeath(c);
@@ -273,10 +276,11 @@
 // this can be in BaseCamera but it should be an instance method
 template <typename TCam, typename TCamTraits>
 status_t CameraBase<TCam, TCamTraits>::getCameraInfo(int cameraId,
+        bool overrideToPortrait,
         struct hardware::CameraInfo* cameraInfo) {
     const sp<::android::hardware::ICameraService> cs = getCameraService();
     if (cs == 0) return UNKNOWN_ERROR;
-    binder::Status res = cs->getCameraInfo(cameraId, cameraInfo);
+    binder::Status res = cs->getCameraInfo(cameraId, overrideToPortrait, cameraInfo);
     return res.isOk() ? OK : res.serviceSpecificErrorCode();
 }
 
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 1e748c7..01baba1 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -67,7 +67,7 @@
     /**
      * Fetch basic camera information for a camera device
      */
-    CameraInfo getCameraInfo(int cameraId);
+    CameraInfo getCameraInfo(int cameraId, boolean overrideToPortrait);
 
     /**
      * Default UID/PID values for non-privileged callers of
@@ -83,7 +83,8 @@
             int cameraId,
             String opPackageName,
             int clientUid, int clientPid,
-            int targetSdkVersion);
+            int targetSdkVersion,
+            boolean overrideToPortrait);
 
     /**
      * Open a camera device through the new camera API
@@ -94,7 +95,8 @@
             String opPackageName,
             @nullable String featureId,
             int clientUid, int oomScoreOffset,
-            int targetSdkVersion);
+            int targetSdkVersion,
+            boolean overrideToPortrait);
 
     /**
      * Add listener for changes to camera device and flashlight state.
@@ -135,7 +137,8 @@
      * Read the static camera metadata for a camera device.
      * Only supported for device HAL versions >= 3.2
      */
-    CameraMetadataNative getCameraCharacteristics(String cameraId, int targetSdkVersion);
+    CameraMetadataNative getCameraCharacteristics(String cameraId, int targetSdkVersion,
+            boolean overrideToPortrait);
 
     /**
      * Read in the vendor tag descriptors from the camera module HAL.
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 58ccd69..26c36a7 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -58,7 +58,7 @@
     typedef ::android::hardware::ICameraClient TCamCallbacks;
     typedef ::android::binder::Status(::android::hardware::ICameraService::*TCamConnectService)
         (const sp<::android::hardware::ICameraClient>&,
-        int, const String16&, int, int, int,
+        int, const String16&, int, int, int, bool,
         /*out*/
         sp<::android::hardware::ICamera>*);
     static TCamConnectService     fnConnectService;
@@ -81,7 +81,8 @@
     static  sp<Camera>  create(const sp<::android::hardware::ICamera>& camera);
     static  sp<Camera>  connect(int cameraId,
                                 const String16& clientPackageName,
-                                int clientUid, int clientPid, int targetSdkVersion);
+                                int clientUid, int clientPid, int targetSdkVersion,
+                                bool overrideToPortrait);
 
             virtual     ~Camera();
 
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index 8e53968..9d0721b 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -119,7 +119,8 @@
 
     static sp<TCam>      connect(int cameraId,
                                  const String16& clientPackageName,
-                                 int clientUid, int clientPid, int targetSdkVersion);
+                                 int clientUid, int clientPid, int targetSdkVersion,
+                                 bool overrideToPortrait);
     virtual void         disconnect();
 
     void                 setListener(const sp<TCamListener>& listener);
@@ -127,6 +128,7 @@
     static int           getNumberOfCameras();
 
     static status_t      getCameraInfo(int cameraId,
+                                       bool overrideToPortrait,
                                        /*out*/
                                        struct hardware::CameraInfo* cameraInfo);
 
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 5892f1a..23d90cc 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -692,10 +692,11 @@
         ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
+
     CameraMetadata rawMetadata;
     int targetSdkVersion = android_get_application_target_sdk_version();
     binder::Status serviceRet = cs->getCameraCharacteristics(String16(cameraIdStr),
-            targetSdkVersion, &rawMetadata);
+            targetSdkVersion, /*overrideToPortrait*/true, &rawMetadata);
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
             case hardware::ICameraService::ERROR_DISCONNECTED:
@@ -747,7 +748,7 @@
     binder::Status serviceRet = cs->connectDevice(
             callbacks, String16(cameraId), String16(""), {},
             hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/0,
-            targetSdkVersion, /*out*/&deviceRemote);
+            targetSdkVersion, /*overrideToPortrait*/true, /*out*/&deviceRemote);
 
     if (!serviceRet.isOk()) {
         ALOGE("%s: connect camera device failed: %s", __FUNCTION__, serviceRet.toString8().string());
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 0d156a5..9174adf 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -9215,24 +9215,25 @@
      * camera's crop region is set to maximum size, the FOV of the physical streams for the
      * ultrawide lens will be the same as the logical stream, by making the crop region
      * smaller than its active array size to compensate for the smaller focal length.</p>
-     * <p>There are two ways for the application to capture RAW images from a logical camera
-     * with RAW capability:</p>
+     * <p>For a logical camera, typically the underlying physical cameras have different RAW
+     * capabilities (such as resolution or CFA pattern). There are two ways for the
+     * application to capture RAW images from the logical camera:</p>
      * <ul>
-     * <li>Because the underlying physical cameras may have different RAW capabilities (such
-     * as resolution or CFA pattern), to maintain backward compatibility, when a RAW stream
-     * is configured, the camera device makes sure the default active physical camera remains
-     * active and does not switch to other physical cameras. (One exception is that, if the
-     * logical camera consists of identical image sensors and advertises multiple focalLength
-     * due to different lenses, the camera device may generate RAW images from different
-     * physical cameras based on the focalLength being set by the application.) This
-     * backward-compatible approach usually results in loss of optical zoom, to telephoto
-     * lens or to ultrawide lens.</li>
-     * <li>Alternatively, to take advantage of the full zoomRatio range of the logical camera,
-     * the application should use <a href="https://developer.android.com/reference/android/hardware/camera2/MultiResolutionImageReader.html">MultiResolutionImageReader</a>
-     * to capture RAW images from the currently active physical camera. Because different
-     * physical camera may have different RAW characteristics, the application needs to use
-     * the characteristics and result metadata of the active physical camera for the
-     * relevant RAW metadata.</li>
+     * <li>If the logical camera has RAW capability, the application can create and use RAW
+     * streams in the same way as before. In case a RAW stream is configured, to maintain
+     * backward compatibility, the camera device makes sure the default active physical
+     * camera remains active and does not switch to other physical cameras. (One exception
+     * is that, if the logical camera consists of identical image sensors and advertises
+     * multiple focalLength due to different lenses, the camera device may generate RAW
+     * images from different physical cameras based on the focalLength being set by the
+     * application.) This backward-compatible approach usually results in loss of optical
+     * zoom, to telephoto lens or to ultrawide lens.</li>
+     * <li>Alternatively, if supported by the device,
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/MultiResolutionImageReader.html">MultiResolutionImageReader</a>
+     * can be used to capture RAW images from one of the underlying physical cameras (
+     * depending on current zoom level). Because different physical cameras may have
+     * different RAW characteristics, the application needs to use the characteristics
+     * and result metadata of the active physical camera for the relevant RAW metadata.</li>
      * </ul>
      * <p>The capture request and result metadata tags required for backward compatible camera
      * functionalities will be solely based on the logical camera capability. On the other
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 17ea512..1af5637 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -370,7 +370,7 @@
         // Check metadata binder call
         CameraMetadata metadata;
         res = service->getCameraCharacteristics(cameraId,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &metadata);
         EXPECT_TRUE(res.isOk()) << res;
         EXPECT_FALSE(metadata.isEmpty());
 
@@ -386,7 +386,8 @@
         sp<hardware::camera2::ICameraDeviceUser> device;
         res = service->connectDevice(callbacks, cameraId, String16("meeeeeeeee!"),
                 {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*out*/&device);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                /*overrideToPortrait*/false, /*out*/&device);
         EXPECT_TRUE(res.isOk()) << res;
         ASSERT_NE(nullptr, device.get());
         device->disconnect();
@@ -429,7 +430,8 @@
             SCOPED_TRACE("openNewDevice");
             binder::Status res = service->connectDevice(callbacks, deviceId, String16("meeeeeeeee!"),
                     {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
-                    /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*out*/&device);
+                    /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                    /*overrideToPortrait*/false, /*out*/&device);
             EXPECT_TRUE(res.isOk()) << res;
         }
         auto p = std::make_pair(callbacks, device);
diff --git a/camera/tests/CameraCharacteristicsPermission.cpp b/camera/tests/CameraCharacteristicsPermission.cpp
index 76dc38c..f2fa48c 100644
--- a/camera/tests/CameraCharacteristicsPermission.cpp
+++ b/camera/tests/CameraCharacteristicsPermission.cpp
@@ -74,7 +74,8 @@
         CameraMetadata metadata;
         std::vector<int32_t> tagsNeedingPermission;
         rc = mCameraService->getCameraCharacteristics(cameraIdStr,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                /*overrideToPortrait*/false, &metadata);
         ASSERT_TRUE(rc.isOk());
         EXPECT_FALSE(metadata.isEmpty());
         EXPECT_EQ(metadata.removePermissionEntries(CAMERA_METADATA_INVALID_VENDOR_ID,
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index efd9dae..bdfb84a 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -182,7 +182,8 @@
 
         CameraMetadata metadata;
         rc = mCameraService->getCameraCharacteristics(cameraIdStr,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                &metadata);
         if (!rc.isOk()) {
             // The test is relevant only for cameras with Hal 3.x
             // support.
@@ -209,7 +210,8 @@
         rc = mCameraService->connect(this, cameraId,
                 String16("ZSLTest"), hardware::ICameraService::USE_CALLING_UID,
                 hardware::ICameraService::USE_CALLING_PID,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &cameraDevice);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                /*overrideToPortrait*/false, &cameraDevice);
         EXPECT_TRUE(rc.isOk());
 
         CameraParameters params(cameraDevice->getParameters());
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 2e0b678..d866c18 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -13,6 +13,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+#include <algorithm>
+#include <string_view>
+#include <type_traits>
 
 #include <assert.h>
 #include <ctype.h>
@@ -100,7 +103,6 @@
 static const uint32_t kFallbackHeight = 720;
 static const char* kMimeTypeAvc = "video/avc";
 static const char* kMimeTypeApplicationOctetstream = "application/octet-stream";
-static const char* kWinscopeMagicString = "#VV1NSC0PET1ME!#";
 
 // Command-line parameters.
 static bool gVerbose = false;           // chatty on stdout
@@ -354,14 +356,15 @@
 }
 
 /*
- * Writes an unsigned integer byte-by-byte in little endian order regardless
+ * Writes an unsigned/signed integer byte-by-byte in little endian order regardless
  * of the platform endianness.
  */
-template <typename UINT>
-static void writeValueLE(UINT value, uint8_t* buffer) {
-    for (int i = 0; i < sizeof(UINT); ++i) {
-        buffer[i] = static_cast<uint8_t>(value);
-        value >>= 8;
+template <typename T>
+static void writeValueLE(T value, uint8_t* buffer) {
+    std::remove_const_t<T> temp = value;
+    for (int i = 0; i < sizeof(T); ++i) {
+        buffer[i] = static_cast<std::uint8_t>(temp & 0xff);
+        temp >>= 8;
     }
 }
 
@@ -377,16 +380,18 @@
  * - for every frame its presentation time relative to the elapsed realtime clock in microseconds
  *   (as little endian uint64).
  */
-static status_t writeWinscopeMetadata(const Vector<int64_t>& timestamps,
+static status_t writeWinscopeMetadataLegacy(const Vector<int64_t>& timestamps,
         const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
-    ALOGV("Writing metadata");
+    static constexpr auto kWinscopeMagicStringLegacy = "#VV1NSC0PET1ME!#";
+
+    ALOGV("Writing winscope metadata legacy");
     int64_t systemTimeToElapsedTimeOffsetMicros = (android::elapsedRealtimeNano()
         - systemTime(SYSTEM_TIME_MONOTONIC)) / 1000;
     sp<ABuffer> buffer = new ABuffer(timestamps.size() * sizeof(int64_t)
-        + sizeof(uint32_t) + strlen(kWinscopeMagicString));
+        + sizeof(uint32_t) + strlen(kWinscopeMagicStringLegacy));
     uint8_t* pos = buffer->data();
-    strcpy(reinterpret_cast<char*>(pos), kWinscopeMagicString);
-    pos += strlen(kWinscopeMagicString);
+    strcpy(reinterpret_cast<char*>(pos), kWinscopeMagicStringLegacy);
+    pos += strlen(kWinscopeMagicStringLegacy);
     writeValueLE<uint32_t>(timestamps.size(), pos);
     pos += sizeof(uint32_t);
     for (size_t idx = 0; idx < timestamps.size(); ++idx) {
@@ -395,10 +400,79 @@
         pos += sizeof(uint64_t);
     }
     AMediaCodecBufferInfo bufferInfo = {
-        0,
+        0 /* offset */,
         static_cast<int32_t>(buffer->size()),
-        timestamps[0],
-        0
+        timestamps[0] /* presentationTimeUs */,
+        0 /* flags */
+    };
+    return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
+}
+
+/*
+ * Saves metadata needed by Winscope to synchronize the screen recording playback with other traces.
+ *
+ * The metadata (version 2) is written as a binary array with the following format:
+ * - winscope magic string (#VV1NSC0PET1ME2#, 16B).
+ * - the metadata version number (4B little endian).
+ * - Realtime-to-elapsed time offset in nanoseconds (8B little endian).
+ * - the recorded frames count (8B little endian)
+ * - for each recorded frame:
+ *     - System time in elapsed clock timebase in nanoseconds (8B little endian).
+ *
+ *
+ * Metadata version 2 changes
+ *
+ * Use elapsed time for compatibility with other UI traces (most of them):
+ * - Realtime-to-elapsed time offset (instead of realtime-to-monotonic)
+ * - Frame timestamps in elapsed clock timebase (instead of monotonic)
+ */
+static status_t writeWinscopeMetadata(const Vector<std::int64_t>& timestampsMonotonicUs,
+        const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
+    ALOGV("Writing winscope metadata");
+
+    static constexpr auto kWinscopeMagicString = std::string_view {"#VV1NSC0PET1ME2#"};
+    static constexpr std::uint32_t metadataVersion = 2;
+
+    const auto elapsedTimeNs = android::elapsedRealtimeNano();
+    const std::int64_t elapsedToMonotonicTimeOffsetNs =
+            elapsedTimeNs - systemTime(SYSTEM_TIME_MONOTONIC);
+    const std::int64_t realToElapsedTimeOffsetNs =
+            systemTime(SYSTEM_TIME_REALTIME) - elapsedTimeNs;
+    const std::uint32_t framesCount = static_cast<std::uint32_t>(timestampsMonotonicUs.size());
+
+    sp<ABuffer> buffer = new ABuffer(
+        kWinscopeMagicString.size() +
+        sizeof(decltype(metadataVersion)) +
+        sizeof(decltype(realToElapsedTimeOffsetNs)) +
+        sizeof(decltype(framesCount)) +
+        framesCount * sizeof(std::uint64_t)
+    );
+    std::uint8_t* pos = buffer->data();
+
+    std::copy(kWinscopeMagicString.cbegin(), kWinscopeMagicString.cend(), pos);
+    pos += kWinscopeMagicString.size();
+
+    writeValueLE(metadataVersion, pos);
+    pos += sizeof(decltype(metadataVersion));
+
+    writeValueLE(realToElapsedTimeOffsetNs, pos);
+    pos += sizeof(decltype(realToElapsedTimeOffsetNs));
+
+    writeValueLE(framesCount, pos);
+    pos += sizeof(decltype(framesCount));
+
+    for (const auto timestampMonotonicUs : timestampsMonotonicUs) {
+        const auto timestampElapsedNs =
+                elapsedToMonotonicTimeOffsetNs + timestampMonotonicUs * 1000;
+        writeValueLE<std::uint64_t>(timestampElapsedNs, pos);
+        pos += sizeof(std::uint64_t);
+    }
+
+    AMediaCodecBufferInfo bufferInfo = {
+        0 /* offset */,
+        static_cast<std::int32_t>(buffer->size()),
+        timestampsMonotonicUs[0] /* presentationTimeUs */,
+        0 /* flags */
     };
     return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
 }
@@ -418,11 +492,12 @@
     static int kTimeout = 250000;   // be responsive on signal
     status_t err;
     ssize_t trackIdx = -1;
+    ssize_t metaLegacyTrackIdx = -1;
     ssize_t metaTrackIdx = -1;
     uint32_t debugNumFrames = 0;
     int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
     int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
-    Vector<int64_t> timestamps;
+    Vector<int64_t> timestampsMonotonicUs;
     bool firstFrame = true;
 
     assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
@@ -520,9 +595,9 @@
                     sp<ABuffer> buffer = new ABuffer(
                             buffers[bufIndex]->data(), buffers[bufIndex]->size());
                     AMediaCodecBufferInfo bufferInfo = {
-                        0,
+                        0 /* offset */,
                         static_cast<int32_t>(buffer->size()),
-                        ptsUsec,
+                        ptsUsec /* presentationTimeUs */,
                         flags
                     };
                     err = AMediaMuxer_writeSampleData(muxer, trackIdx, buffer->data(), &bufferInfo);
@@ -532,7 +607,7 @@
                         return err;
                     }
                     if (gOutputFormat == FORMAT_MP4) {
-                        timestamps.add(ptsUsec);
+                        timestampsMonotonicUs.add(ptsUsec);
                     }
                 }
                 debugNumFrames++;
@@ -565,6 +640,7 @@
                     if (gOutputFormat == FORMAT_MP4) {
                         AMediaFormat *metaFormat = AMediaFormat_new();
                         AMediaFormat_setString(metaFormat, AMEDIAFORMAT_KEY_MIME, kMimeTypeApplicationOctetstream);
+                        metaLegacyTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
                         metaTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
                         AMediaFormat_delete(metaFormat);
                     }
@@ -604,10 +680,16 @@
                         systemTime(CLOCK_MONOTONIC) - startWhenNsec));
         fflush(stdout);
     }
-    if (metaTrackIdx >= 0 && !timestamps.isEmpty()) {
-        err = writeWinscopeMetadata(timestamps, metaTrackIdx, muxer);
+    if (metaLegacyTrackIdx >= 0 && metaTrackIdx >= 0 && !timestampsMonotonicUs.isEmpty()) {
+        err = writeWinscopeMetadataLegacy(timestampsMonotonicUs, metaLegacyTrackIdx, muxer);
         if (err != NO_ERROR) {
-            fprintf(stderr, "Failed writing metadata to muxer (err=%d)\n", err);
+            fprintf(stderr, "Failed writing legacy winscope metadata to muxer (err=%d)\n", err);
+            return err;
+        }
+
+        err = writeWinscopeMetadata(timestampsMonotonicUs, metaTrackIdx, muxer);
+        if (err != NO_ERROR) {
+            fprintf(stderr, "Failed writing winscope metadata to muxer (err=%d)\n", err);
             return err;
         }
     }
diff --git a/drm/TEST_MAPPING b/drm/TEST_MAPPING
index 3642898..b2d4d6e 100644
--- a/drm/TEST_MAPPING
+++ b/drm/TEST_MAPPING
@@ -1,5 +1,5 @@
 {
-  "presubmit-large": [
+  "presubmit": [
     // The following tests validate codec and drm path.
     {
       "name": "GtsMediaTestCases",
diff --git a/drm/libmediadrm/DrmHalAidl.cpp b/drm/libmediadrm/DrmHalAidl.cpp
index 1844acb..5ec7337 100644
--- a/drm/libmediadrm/DrmHalAidl.cpp
+++ b/drm/libmediadrm/DrmHalAidl.cpp
@@ -459,7 +459,7 @@
 
 DrmStatus DrmHalAidl::createPlugin(const uint8_t uuid[16], const String8& appPackageName) {
     Mutex::Autolock autoLock(mLock);
-
+    if (mInitCheck == ERROR_UNSUPPORTED) return mInitCheck;
     Uuid uuidAidl = DrmUtils::toAidlUuid(uuid);
     std::string appPackageNameAidl = toStdString(appPackageName);
     std::shared_ptr<IDrmPluginAidl> pluginAidl;
@@ -1216,7 +1216,7 @@
     closeOpenSessions();
 
     Mutex::Autolock autoLock(mLock);
-    reportFrameworkMetrics(reportPluginMetrics());
+    if (mInitCheck == OK) reportFrameworkMetrics(reportPluginMetrics());
 
     setListener(NULL);
     mInitCheck = NO_INIT;
diff --git a/drm/libmediadrm/DrmHalHidl.cpp b/drm/libmediadrm/DrmHalHidl.cpp
index 6010739..6106aa7 100644
--- a/drm/libmediadrm/DrmHalHidl.cpp
+++ b/drm/libmediadrm/DrmHalHidl.cpp
@@ -557,6 +557,7 @@
 DrmStatus DrmHalHidl::createPlugin(const uint8_t uuid[16], const String8& appPackageName) {
     Mutex::Autolock autoLock(mLock);
 
+    if (mInitCheck == ERROR_UNSUPPORTED) return mInitCheck;
     for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
         auto hResult = mFactories[i]->isCryptoSchemeSupported(uuid);
         if (hResult.isOk() && hResult) {
diff --git a/drm/libmediadrm/DrmMetricsLogger.cpp b/drm/libmediadrm/DrmMetricsLogger.cpp
index 89b1dcc..bc004c8 100644
--- a/drm/libmediadrm/DrmMetricsLogger.cpp
+++ b/drm/libmediadrm/DrmMetricsLogger.cpp
@@ -41,6 +41,70 @@
 
 DrmMetricsLogger::~DrmMetricsLogger() {}
 
+int MediaErrorToJavaError(status_t err) {
+#define STATUS_CASE(status) \
+    case status: \
+        return J##status
+
+    switch (err) {
+        STATUS_CASE(ERROR_DRM_UNKNOWN);
+        STATUS_CASE(ERROR_DRM_NO_LICENSE);
+        STATUS_CASE(ERROR_DRM_LICENSE_EXPIRED);
+        STATUS_CASE(ERROR_DRM_RESOURCE_BUSY);
+        STATUS_CASE(ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION);
+        STATUS_CASE(ERROR_DRM_SESSION_NOT_OPENED);
+        STATUS_CASE(ERROR_DRM_CANNOT_HANDLE);
+        STATUS_CASE(ERROR_DRM_INSUFFICIENT_SECURITY);
+        STATUS_CASE(ERROR_DRM_FRAME_TOO_LARGE);
+        STATUS_CASE(ERROR_DRM_SESSION_LOST_STATE);
+        STATUS_CASE(ERROR_DRM_CERTIFICATE_MALFORMED);
+        STATUS_CASE(ERROR_DRM_CERTIFICATE_MISSING);
+        STATUS_CASE(ERROR_DRM_CRYPTO_LIBRARY);
+        STATUS_CASE(ERROR_DRM_GENERIC_OEM);
+        STATUS_CASE(ERROR_DRM_GENERIC_PLUGIN);
+        STATUS_CASE(ERROR_DRM_INIT_DATA);
+        STATUS_CASE(ERROR_DRM_KEY_NOT_LOADED);
+        STATUS_CASE(ERROR_DRM_LICENSE_PARSE);
+        STATUS_CASE(ERROR_DRM_LICENSE_POLICY);
+        STATUS_CASE(ERROR_DRM_LICENSE_RELEASE);
+        STATUS_CASE(ERROR_DRM_LICENSE_REQUEST_REJECTED);
+        STATUS_CASE(ERROR_DRM_LICENSE_RESTORE);
+        STATUS_CASE(ERROR_DRM_LICENSE_STATE);
+        STATUS_CASE(ERROR_DRM_MEDIA_FRAMEWORK);
+        STATUS_CASE(ERROR_DRM_PROVISIONING_CERTIFICATE);
+        STATUS_CASE(ERROR_DRM_PROVISIONING_CONFIG);
+        STATUS_CASE(ERROR_DRM_PROVISIONING_PARSE);
+        STATUS_CASE(ERROR_DRM_PROVISIONING_REQUEST_REJECTED);
+        STATUS_CASE(ERROR_DRM_PROVISIONING_RETRY);
+        STATUS_CASE(ERROR_DRM_RESOURCE_CONTENTION);
+        STATUS_CASE(ERROR_DRM_SECURE_STOP_RELEASE);
+        STATUS_CASE(ERROR_DRM_STORAGE_READ);
+        STATUS_CASE(ERROR_DRM_STORAGE_WRITE);
+        STATUS_CASE(ERROR_DRM_ZERO_SUBSAMPLES);
+#undef STATUS_CASE
+    }
+    return static_cast<int>(err);
+}
+
+int DrmPluginSecurityLevelToJavaSecurityLevel(DrmPlugin::SecurityLevel securityLevel) {
+#define STATUS_CASE(status) \
+    case DrmPlugin::k##status: \
+        return J##status
+
+    switch (securityLevel) {
+        STATUS_CASE(SecurityLevelUnknown);
+        STATUS_CASE(SecurityLevelSwSecureCrypto);
+        STATUS_CASE(SecurityLevelSwSecureDecode);
+        STATUS_CASE(SecurityLevelHwSecureCrypto);
+        STATUS_CASE(SecurityLevelHwSecureDecode);
+        STATUS_CASE(SecurityLevelHwSecureAll);
+        STATUS_CASE(SecurityLevelMax);
+#undef STATUS_CASE
+    }
+    return static_cast<int>(securityLevel);
+}
+
+
 DrmStatus DrmMetricsLogger::initCheck() const {
     DrmStatus status = mImpl->initCheck();
     if (status != OK) {
@@ -63,6 +127,8 @@
 DrmStatus DrmMetricsLogger::createPlugin(const uint8_t uuid[IDRM_UUID_SIZE],
                                          const String8& appPackageName) {
     std::memcpy(mUuid.data(), uuid, IDRM_UUID_SIZE);
+    mUuid[0] = betoh64(mUuid[0]);
+    mUuid[1] = betoh64(mUuid[1]);
     if (kUuidSchemeMap.count(mUuid)) {
         mScheme = kUuidSchemeMap.at(mUuid);
     } else {
@@ -73,6 +139,10 @@
     }
     DrmStatus status = mImpl->createPlugin(uuid, appPackageName);
     if (status == OK) {
+        String8 version8;
+        if (getPropertyString(String8("version"), version8) == OK) {
+            mVersion = version8.string();
+        }
         reportMediaDrmCreated();
     } else {
         reportMediaDrmErrored(status, __func__);
@@ -101,6 +171,9 @@
         if (getSecurityLevel(sessionId, &ctx.mActualSecurityLevel) != OK) {
             ctx.mActualSecurityLevel = DrmPlugin::kSecurityLevelUnknown;
         }
+        if (!mVersion.empty()) {
+            ctx.mVersion = mVersion;
+        }
         {
             const std::lock_guard<std::mutex> lock(mSessionMapMutex);
             mSessionMap.insert({sessionKey, ctx});
@@ -460,10 +533,11 @@
 void DrmMetricsLogger::reportMediaDrmCreated() const {
     mediametrics_handle_t handle(mediametrics_create("mediadrm.created"));
     mediametrics_setCString(handle, "scheme", mScheme.c_str());
-    mediametrics_setInt64(handle, "uuid_msb", be64toh(mUuid[0]));
-    mediametrics_setInt64(handle, "uuid_lsb", be64toh(mUuid[1]));
+    mediametrics_setInt64(handle, "uuid_msb", mUuid[0]);
+    mediametrics_setInt64(handle, "uuid_lsb", mUuid[1]);
     mediametrics_setInt32(handle, "frontend", mFrontend);
     mediametrics_setCString(handle, "object_nonce", mObjNonce.c_str());
+    mediametrics_setCString(handle, "version", mVersion.c_str());
     mediametrics_selfRecord(handle);
     mediametrics_delete(handle);
 }
@@ -471,16 +545,19 @@
 void DrmMetricsLogger::reportMediaDrmSessionOpened(const std::vector<uint8_t>& sessionId) const {
     mediametrics_handle_t handle(mediametrics_create("mediadrm.session_opened"));
     mediametrics_setCString(handle, "scheme", mScheme.c_str());
-    mediametrics_setInt64(handle, "uuid_msb", be64toh(mUuid[0]));
-    mediametrics_setInt64(handle, "uuid_lsb", be64toh(mUuid[1]));
+    mediametrics_setInt64(handle, "uuid_msb", mUuid[0]);
+    mediametrics_setInt64(handle, "uuid_lsb", mUuid[1]);
     mediametrics_setInt32(handle, "frontend", mFrontend);
+    mediametrics_setCString(handle, "version", mVersion.c_str());
     mediametrics_setCString(handle, "object_nonce", mObjNonce.c_str());
     const std::lock_guard<std::mutex> lock(mSessionMapMutex);
     auto it = mSessionMap.find(sessionId);
     if (it != mSessionMap.end()) {
         mediametrics_setCString(handle, "session_nonce", it->second.mNonce.c_str());
-        mediametrics_setInt64(handle, "requested_seucrity_level", it->second.mTargetSecurityLevel);
-        mediametrics_setInt64(handle, "opened_seucrity_level", it->second.mActualSecurityLevel);
+        mediametrics_setInt32(handle, "requested_security_level",
+                    DrmPluginSecurityLevelToJavaSecurityLevel(it->second.mTargetSecurityLevel));
+        mediametrics_setInt32(handle, "opened_security_level",
+                    DrmPluginSecurityLevelToJavaSecurityLevel(it->second.mActualSecurityLevel));
     }
     mediametrics_selfRecord(handle);
     mediametrics_delete(handle);
@@ -490,20 +567,22 @@
                                              const std::vector<uint8_t>& sessionId) const {
     mediametrics_handle_t handle(mediametrics_create("mediadrm.errored"));
     mediametrics_setCString(handle, "scheme", mScheme.c_str());
-    mediametrics_setInt64(handle, "uuid_msb", be64toh(mUuid[0]));
-    mediametrics_setInt64(handle, "uuid_lsb", be64toh(mUuid[1]));
+    mediametrics_setInt64(handle, "uuid_msb", mUuid[0]);
+    mediametrics_setInt64(handle, "uuid_lsb", mUuid[1]);
     mediametrics_setInt32(handle, "frontend", mFrontend);
+    mediametrics_setCString(handle, "version", mVersion.c_str());
     mediametrics_setCString(handle, "object_nonce", mObjNonce.c_str());
     if (!sessionId.empty()) {
         const std::lock_guard<std::mutex> lock(mSessionMapMutex);
         auto it = mSessionMap.find(sessionId);
         if (it != mSessionMap.end()) {
             mediametrics_setCString(handle, "session_nonce", it->second.mNonce.c_str());
-            mediametrics_setInt64(handle, "seucrity_level", it->second.mActualSecurityLevel);
+            mediametrics_setInt32(handle, "security_level",
+                        DrmPluginSecurityLevelToJavaSecurityLevel(it->second.mActualSecurityLevel));
         }
     }
     mediametrics_setCString(handle, "api", api);
-    mediametrics_setInt32(handle, "error_code", error_code);
+    mediametrics_setInt32(handle, "error_code", MediaErrorToJavaError(error_code));
     mediametrics_setInt32(handle, "cdm_err", error_code.getCdmErr());
     mediametrics_setInt32(handle, "oem_err", error_code.getOemErr());
     mediametrics_setInt32(handle, "error_context", error_code.getContext());
diff --git a/drm/libmediadrm/DrmSessionManager.cpp b/drm/libmediadrm/DrmSessionManager.cpp
index e31395d..301538f 100644
--- a/drm/libmediadrm/DrmSessionManager.cpp
+++ b/drm/libmediadrm/DrmSessionManager.cpp
@@ -34,6 +34,7 @@
 namespace android {
 
 using aidl::android::media::MediaResourceParcel;
+using aidl::android::media::ClientInfoParcel;
 
 namespace {
 void ResourceManagerServiceDied(void* cookie) {
@@ -137,7 +138,10 @@
 
     static int64_t clientId = 0;
     mSessionMap[toStdVec(sessionId)] = (SessionInfo){pid, uid, clientId};
-    mService->addResource(pid, uid, clientId++, drm, toResourceVec(sessionId, INT64_MAX));
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(pid),
+                                .uid = static_cast<int32_t>(uid),
+                                .id = clientId++};
+    mService->addResource(clientInfo, drm, toResourceVec(sessionId, INT64_MAX));
 }
 
 void DrmSessionManager::useSession(const Vector<uint8_t> &sessionId) {
@@ -150,7 +154,10 @@
     }
 
     auto info = it->second;
-    mService->addResource(info.pid, info.uid, info.clientId, NULL, toResourceVec(sessionId, -1));
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(info.pid),
+                                .uid = static_cast<int32_t>(info.uid),
+                                .id = info.clientId};
+    mService->addResource(clientInfo, NULL, toResourceVec(sessionId, -1));
 }
 
 void DrmSessionManager::removeSession(const Vector<uint8_t> &sessionId) {
@@ -164,7 +171,10 @@
 
     auto info = it->second;
     // removeClient instead of removeSession because each client has only one session
-    mService->removeClient(info.pid, info.clientId);
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(info.pid),
+                                .uid = static_cast<int32_t>(info.uid),
+                                .id = info.clientId};
+    mService->removeClient(clientInfo);
     mSessionMap.erase(it);
 }
 
@@ -182,9 +192,13 @@
 
     // cannot update mSessionMap because we do not know which sessionId is reclaimed;
     // we rely on IResourceManagerClient to removeSession in reclaimResource
-    Vector<uint8_t> dummy;
+    Vector<uint8_t> placeHolder;
     bool success;
-    ScopedAStatus status = service->reclaimResource(callingPid, toResourceVec(dummy, INT64_MAX), &success);
+    uid_t uid = AIBinder_getCallingUid();
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(callingPid),
+                                .uid = static_cast<int32_t>(uid)};
+    ScopedAStatus status = service->reclaimResource(
+        clientInfo, toResourceVec(placeHolder, INT64_MAX), &success);
     return status.isOk() && success;
 }
 
diff --git a/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h b/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h
index f4e3c3e..7666f04 100644
--- a/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h
+++ b/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h
@@ -25,10 +25,58 @@
 
 namespace android {
 
+enum {
+    JERROR_DRM_UNKNOWN = 0,
+    JERROR_DRM_NO_LICENSE = 1,
+    JERROR_DRM_LICENSE_EXPIRED = 2,
+    JERROR_DRM_RESOURCE_BUSY = 3,
+    JERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION = 4,
+    JERROR_DRM_SESSION_NOT_OPENED = 5,
+    JERROR_DRM_CANNOT_HANDLE = 6,
+    JERROR_DRM_INSUFFICIENT_SECURITY = 7,
+    JERROR_DRM_FRAME_TOO_LARGE = 8,
+    JERROR_DRM_SESSION_LOST_STATE = 9,
+    JERROR_DRM_CERTIFICATE_MALFORMED = 10,
+    JERROR_DRM_CERTIFICATE_MISSING = 11,
+    JERROR_DRM_CRYPTO_LIBRARY = 12,
+    JERROR_DRM_GENERIC_OEM = 13,
+    JERROR_DRM_GENERIC_PLUGIN = 14,
+    JERROR_DRM_INIT_DATA = 15,
+    JERROR_DRM_KEY_NOT_LOADED = 16,
+    JERROR_DRM_LICENSE_PARSE = 17,
+    JERROR_DRM_LICENSE_POLICY = 18,
+    JERROR_DRM_LICENSE_RELEASE = 19,
+    JERROR_DRM_LICENSE_REQUEST_REJECTED = 20,
+    JERROR_DRM_LICENSE_RESTORE = 21,
+    JERROR_DRM_LICENSE_STATE = 22,
+    JERROR_DRM_MEDIA_FRAMEWORK = 23,
+    JERROR_DRM_PROVISIONING_CERTIFICATE = 24,
+    JERROR_DRM_PROVISIONING_CONFIG = 25,
+    JERROR_DRM_PROVISIONING_PARSE = 26,
+    JERROR_DRM_PROVISIONING_REQUEST_REJECTED = 27,
+    JERROR_DRM_PROVISIONING_RETRY = 28,
+    JERROR_DRM_RESOURCE_CONTENTION = 29,
+    JERROR_DRM_SECURE_STOP_RELEASE = 30,
+    JERROR_DRM_STORAGE_READ = 31,
+    JERROR_DRM_STORAGE_WRITE = 32,
+    JERROR_DRM_ZERO_SUBSAMPLES = 33,
+};
+
+enum {
+    JSecurityLevelUnknown = 0,
+    JSecurityLevelSwSecureCrypto = 1,
+    JSecurityLevelSwSecureDecode = 2,
+    JSecurityLevelHwSecureCrypto = 3,
+    JSecurityLevelHwSecureDecode = 4,
+    JSecurityLevelHwSecureAll = 5,
+    JSecurityLevelMax = 6,
+};
+
 struct SessionContext {
     std::string mNonce;
-    int64_t mTargetSecurityLevel;
+    DrmPlugin::SecurityLevel mTargetSecurityLevel;
     DrmPlugin::SecurityLevel mActualSecurityLevel;
+    std::string mVersion;
 };
 
 class DrmMetricsLogger : public IDrm {
@@ -161,6 +209,7 @@
     std::array<int64_t, 2> mUuid;
     std::string mObjNonce;
     std::string mScheme;
+    std::string mVersion;
     std::map<std::vector<uint8_t>, SessionContext> mSessionMap;
     mutable std::mutex mSessionMapMutex;
     IDrmFrontend mFrontend;
diff --git a/drm/libmediadrm/interface/mediadrm/DrmUtils.h b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
index 94cf743..2510f4e 100644
--- a/drm/libmediadrm/interface/mediadrm/DrmUtils.h
+++ b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
@@ -37,6 +37,7 @@
 #include <ctime>
 #include <deque>
 #include <endian.h>
+#include <inttypes.h>
 #include <iterator>
 #include <mutex>
 #include <string>
@@ -105,9 +106,9 @@
 void LogToBuffer(android_LogPriority level, const uint8_t uuid[16], const char *fmt, Args... args) {
     uint64_t uuid2[2] = {};
     std::memcpy(uuid2, uuid, sizeof(uuid2));
-    std::string uuidFmt("uuid=[%lx %lx] ");
+    std::string uuidFmt("uuid=[%" PRIx64 " %" PRIx64 "] ");
     uuidFmt += fmt;
-    LogToBuffer(level, uuidFmt.c_str(), htobe64(uuid2[0]), htobe64(uuid2[1]), args...);
+    LogToBuffer(level, uuidFmt.c_str(), betoh64(uuid2[0]), betoh64(uuid2[1]), args...);
 }
 
 #ifndef LOG2BE
@@ -281,7 +282,7 @@
               });
 
     logs.appendVector(allLogs);
-    return OK;
+    return toStatusT(err);
 }
 
 std::string GetExceptionMessage(const DrmStatus & err, const char *defaultMsg,
diff --git a/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp
index afc9b6a..a63471f 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp
@@ -137,6 +137,8 @@
         *_aidl_return = static_cast<ssize_t>(offset);
         return toNdkScopedAStatus(Status::OK);
     } else if (in_args.mode == Mode::AES_CTR) {
+        if (!mSession) return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE,
+                    "session not found");
         size_t bytesDecrypted{};
         std::vector<int32_t> clearDataLengths;
         std::vector<int32_t> encryptedDataLengths;
@@ -149,6 +151,7 @@
             detailedError = "invalid decrypt parameter size";
             return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE, detailedError);
         }
+
         auto res =
                 mSession->decrypt(in_args.keyId.data(), in_args.iv.data(),
                                   srcPtr, static_cast<uint8_t*>(destPtr),
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
index 7bc320d..64a43b0 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
@@ -211,6 +211,10 @@
             _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0, "invalid decrypt parameter size");
             return Void();
         }
+        if (!mSession) {
+            _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0, "session not found");
+            return Void();
+        }
         Status_V1_2 res = mSession->decrypt(keyId.data(), iv.data(), srcPtr,
                 static_cast<uint8_t*>(destPtr), toVector(subSamples), &bytesDecrypted);
         if (res == Status_V1_2::OK) {
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index a22ec19..9aa896c 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -34,12 +34,15 @@
                 }
             ],
             "file_patterns": ["(?i)drm|crypto"]
-        }
-    ],
-
-    "imports": [
+        },
         {
-            "path": "frameworks/av/drm/mediadrm/plugins"
+            "name": "CtsMediaDrmFrameworkTestCases",
+            "options" : [
+                {
+                    "include-annotation": "android.platform.test.annotations.Presubmit"
+                }
+            ],
+            "file_patterns": ["(?i)drm|crypto"]
         }
     ],
 
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index b0852f5..8c2b940 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -43,6 +43,9 @@
 
 using ::android::BAD_VALUE;
 using ::android::OK;
+using ::android::String16;
+using ::android::String8;
+using ::android::status_t;
 using ::android::base::unexpected;
 
 using media::audio::common::AudioChannelLayout;
@@ -2184,6 +2187,11 @@
     audio_port_device_ext legacy{};
     RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
                     aidl.device, &legacy.type, legacy.address));
+    legacy.encapsulation_modes = VALUE_OR_RETURN(
+            aidl2legacy_AudioEncapsulationMode_mask(aidl.encapsulationModes));
+    legacy.encapsulation_metadata_types = VALUE_OR_RETURN(
+            aidl2legacy_AudioEncapsulationMetadataType_mask(
+                    aidl.encapsulationMetadataTypes));
     return legacy;
 }
 
@@ -2192,6 +2200,10 @@
     AudioPortDeviceExt aidl;
     aidl.device = VALUE_OR_RETURN(
             legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
+    aidl.encapsulationModes = VALUE_OR_RETURN(
+            legacy2aidl_AudioEncapsulationMode_mask(legacy.encapsulation_modes));
+    aidl.encapsulationMetadataTypes = VALUE_OR_RETURN(
+            legacy2aidl_AudioEncapsulationMetadataType_mask(legacy.encapsulation_metadata_types));
     return aidl;
 }
 
@@ -2699,6 +2711,10 @@
             return AUDIO_LATENCY_MODE_FREE;
         case AudioLatencyMode::LOW:
             return AUDIO_LATENCY_MODE_LOW;
+        case AudioLatencyMode::DYNAMIC_SPATIAL_AUDIO_SOFTWARE:
+            return AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE;
+        case AudioLatencyMode::DYNAMIC_SPATIAL_AUDIO_HARDWARE:
+            return AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE;
     }
     return unexpected(BAD_VALUE);
 }
@@ -2709,6 +2725,10 @@
             return AudioLatencyMode::FREE;
         case AUDIO_LATENCY_MODE_LOW:
             return AudioLatencyMode::LOW;
+        case AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE:
+            return AudioLatencyMode::DYNAMIC_SPATIAL_AUDIO_SOFTWARE;
+        case AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE:
+            return AudioLatencyMode::DYNAMIC_SPATIAL_AUDIO_HARDWARE;
     }
     return unexpected(BAD_VALUE);
 }
diff --git a/media/audioaidlconversion/AidlConversionEffect.cpp b/media/audioaidlconversion/AidlConversionEffect.cpp
index 2df97d1..611cfab 100644
--- a/media/audioaidlconversion/AidlConversionEffect.cpp
+++ b/media/audioaidlconversion/AidlConversionEffect.cpp
@@ -14,12 +14,16 @@
  * limitations under the License.
  */
 
+#include <cstdint>
+#include <inttypes.h>
 #include <utility>
 
 #define LOG_TAG "AidlConversionEffect"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
+#include <aidl/android/hardware/audio/effect/DefaultExtension.h>
+#include <aidl/android/hardware/audio/effect/VendorExtension.h>
 #include <media/AidlConversionCppNdk.h>
 #include <media/AidlConversionEffect.h>
 
@@ -32,16 +36,23 @@
 using ::aidl::android::hardware::audio::effect::AcousticEchoCanceler;
 using ::aidl::android::hardware::audio::effect::AutomaticGainControlV2;
 using ::aidl::android::hardware::audio::effect::BassBoost;
+using ::aidl::android::hardware::audio::effect::DefaultExtension;
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::Downmix;
 using ::aidl::android::hardware::audio::effect::DynamicsProcessing;
 using ::aidl::android::hardware::audio::effect::Flags;
 using ::aidl::android::hardware::audio::effect::Parameter;
 using ::aidl::android::hardware::audio::effect::PresetReverb;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::aidl::android::hardware::audio::effect::Visualizer;
 using ::aidl::android::media::audio::common::AudioDeviceDescription;
 
 using ::android::BAD_VALUE;
+using ::android::OK;
+using ::android::status_t;
 using ::android::base::unexpected;
+using ::android::effect::utils::EffectParamReader;
+using ::android::effect::utils::EffectParamWriter;
 
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // Converters
@@ -349,5 +360,116 @@
     return static_cast<int32_t>(aidl);
 }
 
+ConversionResult<uint32_t> aidl2legacy_Parameter_Visualizer_ScalingMode_uint32(
+        Visualizer::ScalingMode aidl) {
+    switch (aidl) {
+        case Visualizer::ScalingMode::NORMALIZED: {
+            return 0;
+        }
+        case Visualizer::ScalingMode::AS_PLAYED: {
+            return 1;
+        }
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<Visualizer::ScalingMode> legacy2aidl_Parameter_Visualizer_uint32_ScalingMode(
+        uint32_t legacy) {
+    if (legacy == 0) {
+        return Visualizer::ScalingMode::NORMALIZED;
+    } else if (legacy == 1) {
+        return Visualizer::ScalingMode::AS_PLAYED;
+    } else {
+        return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_Visualizer_MeasurementMode_uint32(
+        Visualizer::MeasurementMode aidl) {
+    switch (aidl) {
+        case Visualizer::MeasurementMode::NONE: {
+            return 0;
+        }
+        case Visualizer::MeasurementMode::PEAK_RMS: {
+            return 1;
+        }
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<Visualizer::MeasurementMode>
+legacy2aidl_Parameter_Visualizer_uint32_MeasurementMode(uint32_t legacy) {
+    if (legacy == 0) {
+        return Visualizer::MeasurementMode::NONE;
+    } else if (legacy == 1) {
+        return Visualizer::MeasurementMode::PEAK_RMS;
+    } else {
+        return unexpected(BAD_VALUE);
+    }
+}
+
+/**
+ * Copy the parameter area of effect_param_t to DefaultExtension::bytes.
+ */
+ConversionResult<VendorExtension> legacy2aidl_EffectParameterReader_Param_VendorExtension(
+        EffectParamReader& param) {
+    size_t len = param.getParameterSize();
+    DefaultExtension defaultExt;
+    defaultExt.bytes.resize(len);
+    RETURN_IF_ERROR(param.readFromParameter(defaultExt.bytes.data(), len));
+
+    VendorExtension ext;
+    ext.extension.setParcelable(defaultExt);
+    return ext;
+}
+
+/**
+ * Copy the data area of effect_param_t to DefaultExtension::bytes.
+ */
+ConversionResult<VendorExtension> legacy2aidl_EffectParameterReader_Data_VendorExtension(
+        EffectParamReader& param) {
+    size_t len = param.getValueSize();
+    DefaultExtension defaultExt;
+    defaultExt.bytes.resize(len);
+    RETURN_IF_ERROR(param.readFromValue(defaultExt.bytes.data(), len));
+
+    VendorExtension ext;
+    ext.extension.setParcelable(defaultExt);
+    return ext;
+}
+
+/**
+ * Copy DefaultExtension::bytes to the data area of effect_param_t.
+ */
+ConversionResult<status_t> aidl2legacy_VendorExtension_EffectParameterWriter_Data(
+        EffectParamWriter& param, VendorExtension ext) {
+    std::optional<DefaultExtension> defaultExt;
+    RETURN_IF_ERROR(ext.extension.getParcelable(&defaultExt));
+    if (!defaultExt.has_value()) {
+        return unexpected(BAD_VALUE);
+    }
+
+    RETURN_IF_ERROR(param.writeToValue(defaultExt->bytes.data(), defaultExt->bytes.size()));
+
+    return OK;
+}
+
+ConversionResult<Parameter> legacy2aidl_EffectParameterReader_ParameterExtension(
+        EffectParamReader& param) {
+    VendorExtension ext =
+            VALUE_OR_RETURN(legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+    return UNION_MAKE(Parameter, specific, UNION_MAKE(Parameter::Specific, vendorEffect, ext));
+}
+
+ConversionResult<::android::status_t> aidl2legacy_ParameterExtension_EffectParameterWriter(
+        const ::aidl::android::hardware::audio::effect::Parameter& aidl,
+        EffectParamWriter& legacy) {
+    VendorExtension ext = VALUE_OR_RETURN(
+            (::aidl::android::getParameterSpecific<Parameter, VendorExtension,
+                                                   Parameter::Specific::vendorEffect>(aidl)));
+    return VALUE_OR_RETURN_STATUS(
+            aidl2legacy_VendorExtension_EffectParameterWriter_Data(legacy, ext));
+}
+
 }  // namespace android
 }  // aidl
diff --git a/media/audioaidlconversion/AidlConversionNdk.cpp b/media/audioaidlconversion/AidlConversionNdk.cpp
index 7c63339..71c547c 100644
--- a/media/audioaidlconversion/AidlConversionNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionNdk.cpp
@@ -14,14 +14,18 @@
  * limitations under the License.
  */
 
+#include <sstream>
 #include <utility>
 
+#include <system/audio.h>
 #define LOG_TAG "AidlConversionNdk"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
+#include <utils/Errors.h>
 
 #include <media/AidlConversionCppNdk.h>
 #include <media/AidlConversionNdk.h>
+#include <Utils.h>
 
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // AIDL NDK backend to legacy audio data structure conversion utilities.
@@ -29,6 +33,51 @@
 namespace aidl {
 namespace android {
 
+using hardware::audio::common::PlaybackTrackMetadata;
+using hardware::audio::common::RecordTrackMetadata;
+using ::android::BAD_VALUE;
+using ::android::OK;
+
+namespace {
+
+::android::status_t combineString(
+        const std::vector<std::string>& v, char separator, std::string* result) {
+    std::ostringstream oss;
+    for (const auto& s : v) {
+        if (oss.tellp() > 0) {
+            oss << separator;
+        }
+        if (s.find(separator) == std::string::npos) {
+            oss << s;
+        } else {
+            ALOGE("%s: string \"%s\" contains separator character \"%c\"",
+                    __func__, s.c_str(), separator);
+            return BAD_VALUE;
+        }
+    }
+    *result = oss.str();
+    return OK;
+}
+
+std::vector<std::string> splitString(const std::string& s, char separator) {
+    std::istringstream iss(s);
+    std::string t;
+    std::vector<std::string> result;
+    while (std::getline(iss, t, separator)) {
+        result.push_back(std::move(t));
+    }
+    return result;
+}
+
+std::vector<std::string> filterOutNonVendorTags(const std::vector<std::string>& tags) {
+    std::vector<std::string> result;
+    std::copy_if(tags.begin(), tags.end(), std::back_inserter(result),
+            ::aidl::android::hardware::audio::common::maybeVendorExtension);
+    return result;
+}
+
+}  // namespace
+
 // buffer_provider_t is not supported thus skipped
 ConversionResult<buffer_config_t> aidl2legacy_AudioConfigBase_buffer_config_t(
         const media::audio::common::AudioConfigBase& aidl, bool isInput) {
@@ -68,5 +117,79 @@
     return aidl;
 }
 
+::android::status_t aidl2legacy_AudioAttributesTags(
+        const std::vector<std::string>& aidl, char* legacy) {
+    std::string aidlTags;
+    RETURN_STATUS_IF_ERROR(combineString(
+                    filterOutNonVendorTags(aidl), AUDIO_ATTRIBUTES_TAGS_SEPARATOR, &aidlTags));
+    RETURN_STATUS_IF_ERROR(aidl2legacy_string(aidlTags, legacy, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE));
+    return OK;
+}
+
+ConversionResult<std::vector<std::string>> legacy2aidl_AudioAttributesTags(const char* legacy) {
+    std::string legacyTags = VALUE_OR_RETURN(legacy2aidl_string(
+                    legacy, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE));
+    return filterOutNonVendorTags(splitString(legacyTags, AUDIO_ATTRIBUTES_TAGS_SEPARATOR));
+}
+
+ConversionResult<playback_track_metadata_v7>
+aidl2legacy_PlaybackTrackMetadata_playback_track_metadata_v7(const PlaybackTrackMetadata& aidl) {
+    playback_track_metadata_v7 legacy;
+    legacy.base.usage = VALUE_OR_RETURN(aidl2legacy_AudioUsage_audio_usage_t(aidl.usage));
+    legacy.base.content_type = VALUE_OR_RETURN(aidl2legacy_AudioContentType_audio_content_type_t(
+                    aidl.contentType));
+    legacy.base.gain = aidl.gain;
+    legacy.channel_mask = VALUE_OR_RETURN(aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+                    aidl.channelMask, false /*isInput*/));
+    RETURN_IF_ERROR(aidl2legacy_AudioAttributesTags(aidl.tags, legacy.tags));
+    return legacy;
+}
+
+ConversionResult<PlaybackTrackMetadata>
+legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata(
+        const playback_track_metadata_v7& legacy) {
+    PlaybackTrackMetadata aidl;
+    aidl.usage = VALUE_OR_RETURN(legacy2aidl_audio_usage_t_AudioUsage(legacy.base.usage));
+    aidl.contentType = VALUE_OR_RETURN(legacy2aidl_audio_content_type_t_AudioContentType(
+                    legacy.base.content_type));
+    aidl.gain = legacy.base.gain;
+    aidl.channelMask = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+                    legacy.channel_mask, false /*isInput*/));
+    aidl.tags = VALUE_OR_RETURN(legacy2aidl_AudioAttributesTags(legacy.tags));
+    return aidl;
+}
+
+ConversionResult<record_track_metadata_v7>
+aidl2legacy_RecordTrackMetadata_record_track_metadata_v7(const RecordTrackMetadata& aidl) {
+    record_track_metadata_v7 legacy;
+    legacy.base.source = VALUE_OR_RETURN(aidl2legacy_AudioSource_audio_source_t(aidl.source));
+    legacy.base.gain = aidl.gain;
+    if (aidl.destinationDevice.has_value()) {
+        RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(aidl.destinationDevice.value(),
+                        &legacy.base.dest_device, legacy.base.dest_device_address));
+    } else {
+        legacy.base.dest_device = AUDIO_DEVICE_NONE;
+    }
+    legacy.channel_mask = VALUE_OR_RETURN(aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+                    aidl.channelMask, true /*isInput*/));
+    RETURN_IF_ERROR(aidl2legacy_AudioAttributesTags(aidl.tags, legacy.tags));
+    return legacy;
+}
+
+ConversionResult<RecordTrackMetadata>
+legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(const record_track_metadata_v7& legacy) {
+    RecordTrackMetadata aidl;
+    aidl.source = VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSource(legacy.base.source));
+    aidl.gain = legacy.base.gain;
+    if (legacy.base.dest_device != AUDIO_DEVICE_NONE) {
+        aidl.destinationDevice = VALUE_OR_RETURN(legacy2aidl_audio_device_AudioDevice(
+                        legacy.base.dest_device, legacy.base.dest_device_address));
+    }
+    aidl.channelMask = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+                    legacy.channel_mask, true /*isInput*/));
+    aidl.tags = VALUE_OR_RETURN(legacy2aidl_AudioAttributesTags(legacy.tags));
+    return aidl;
+}
+
 }  // namespace android
 }  // aidl
diff --git a/media/audioaidlconversion/Android.bp b/media/audioaidlconversion/Android.bp
index c0024ef..bdb3a2c 100644
--- a/media/audioaidlconversion/Android.bp
+++ b/media/audioaidlconversion/Android.bp
@@ -135,12 +135,16 @@
     ],
     defaults: [
         "audio_aidl_conversion_common_default",
+        "latest_android_hardware_audio_common_ndk_shared",
         "latest_android_media_audio_common_types_ndk_shared",
     ],
     shared_libs: [
         "libbinder_ndk",
         "libbase",
     ],
+    static_libs: [
+        "libaudioaidlcommon",
+    ],
     cflags: [
         "-DBACKEND_NDK",
     ],
diff --git a/media/audioaidlconversion/TEST_MAPPING b/media/audioaidlconversion/TEST_MAPPING
new file mode 100644
index 0000000..a0c9759
--- /dev/null
+++ b/media/audioaidlconversion/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+  "presubmit": [
+    {
+      "name": "audio_aidl_ndk_conversion_tests"
+    }
+  ]
+}
diff --git a/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h b/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
new file mode 100644
index 0000000..9100892
--- /dev/null
+++ b/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
@@ -0,0 +1,433 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// WARNING: This file is intended for multiple inclusion.
+// Do not include directly, use 'AidlConversionCppNdk.h'.
+#if (defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_NDK)) || \
+    (!defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_CPP))
+#if defined(BACKEND_NDK_IMPL)
+#define AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_NDK
+#else
+#define AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_CPP
+#endif  // BACKEND_NDK_IMPL
+
+#include <limits>
+#include <type_traits>
+
+/**
+ * Can handle conversion between AIDL (both CPP and NDK backend) and legacy type.
+ * Controlled by the cflags preprocessor in Android.bp.
+ */
+#if defined(BACKEND_NDK_IMPL)
+#define PREFIX(f) <aidl/f>
+#else
+#define PREFIX(f) <f>
+#endif
+
+#include PREFIX(android/media/audio/common/AudioChannelLayout.h)
+#include PREFIX(android/media/audio/common/AudioConfig.h)
+#include PREFIX(android/media/audio/common/AudioConfigBase.h)
+#include PREFIX(android/media/audio/common/AudioContentType.h)
+#include PREFIX(android/media/audio/common/AudioDeviceDescription.h)
+#include PREFIX(android/media/audio/common/AudioDualMonoMode.h)
+#include PREFIX(android/media/audio/common/AudioEncapsulationMetadataType.h)
+#include PREFIX(android/media/audio/common/AudioEncapsulationMode.h)
+#include PREFIX(android/media/audio/common/AudioEncapsulationType.h)
+#include PREFIX(android/media/audio/common/AudioFormatDescription.h)
+#include PREFIX(android/media/audio/common/AudioGain.h)
+#include PREFIX(android/media/audio/common/AudioGainConfig.h)
+#include PREFIX(android/media/audio/common/AudioGainMode.h)
+#include PREFIX(android/media/audio/common/AudioInputFlags.h)
+#include PREFIX(android/media/audio/common/AudioIoFlags.h)
+#include PREFIX(android/media/audio/common/AudioLatencyMode.h)
+#include PREFIX(android/media/audio/common/AudioMode.h)
+#include PREFIX(android/media/audio/common/AudioOffloadInfo.h)
+#include PREFIX(android/media/audio/common/AudioOutputFlags.h)
+#include PREFIX(android/media/audio/common/AudioPort.h)
+#include PREFIX(android/media/audio/common/AudioPortConfig.h)
+#include PREFIX(android/media/audio/common/AudioPortExt.h)
+#include PREFIX(android/media/audio/common/AudioPortMixExt.h)
+#include PREFIX(android/media/audio/common/AudioPlaybackRate.h)
+#include PREFIX(android/media/audio/common/AudioProfile.h)
+#include PREFIX(android/media/audio/common/AudioSource.h)
+#include PREFIX(android/media/audio/common/AudioStandard.h)
+#include PREFIX(android/media/audio/common/AudioUsage.h)
+#include PREFIX(android/media/audio/common/AudioUuid.h)
+#include PREFIX(android/media/audio/common/ExtraAudioDescriptor.h)
+#include PREFIX(android/media/audio/common/Int.h)
+#include PREFIX(android/media/audio/common/MicrophoneDynamicInfo.h)
+#include PREFIX(android/media/audio/common/MicrophoneInfo.h)
+#undef PREFIX
+
+#include <system/audio.h>
+#include <system/audio_effect.h>
+
+#if defined(BACKEND_NDK_IMPL)
+namespace aidl {
+#endif
+
+namespace android {
+
+// maxSize is the size of the C-string buffer (including the 0-terminator), NOT the max length of
+// the string.
+::android::status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize);
+ConversionResult<std::string> legacy2aidl_string(const char* legacy, size_t maxSize);
+
+ConversionResult<audio_module_handle_t> aidl2legacy_int32_t_audio_module_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_module_handle_t_int32_t(audio_module_handle_t legacy);
+
+ConversionResult<audio_io_handle_t> aidl2legacy_int32_t_audio_io_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_io_handle_t_int32_t(audio_io_handle_t legacy);
+
+ConversionResult<audio_port_handle_t> aidl2legacy_int32_t_audio_port_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_port_handle_t_int32_t(audio_port_handle_t legacy);
+
+ConversionResult<audio_patch_handle_t> aidl2legacy_int32_t_audio_patch_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_patch_handle_t_int32_t(audio_patch_handle_t legacy);
+
+ConversionResult<audio_unique_id_t> aidl2legacy_int32_t_audio_unique_id_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_unique_id_t_int32_t(audio_unique_id_t legacy);
+
+ConversionResult<audio_hw_sync_t> aidl2legacy_int32_t_audio_hw_sync_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_hw_sync_t_int32_t(audio_hw_sync_t legacy);
+
+ConversionResult<unsigned int> aidl2legacy_int32_t_config_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_config_mask_int32_t(unsigned int legacy);
+
+ConversionResult<pid_t> aidl2legacy_int32_t_pid_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_pid_t_int32_t(pid_t legacy);
+
+ConversionResult<uid_t> aidl2legacy_int32_t_uid_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_uid_t_int32_t(uid_t legacy);
+
+ConversionResult<::android::String8> aidl2legacy_string_view_String8(std::string_view aidl);
+ConversionResult<std::string> legacy2aidl_String8_string(const ::android::String8& legacy);
+
+ConversionResult<::android::String16> aidl2legacy_string_view_String16(std::string_view aidl);
+ConversionResult<std::string> legacy2aidl_String16_string(const ::android::String16& legacy);
+
+ConversionResult<std::optional<::android::String16>>
+aidl2legacy_optional_string_view_optional_String16(std::optional<std::string_view> aidl);
+ConversionResult<std::optional<std::string_view>>
+legacy2aidl_optional_String16_optional_string(std::optional<::android::String16> legacy);
+
+ConversionResult<audio_channel_mask_t> aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+        const media::audio::common::AudioChannelLayout& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioChannelLayout>
+legacy2aidl_audio_channel_mask_t_AudioChannelLayout(audio_channel_mask_t legacy, bool isInput);
+
+audio_channel_mask_t aidl2legacy_AudioChannelLayout_layout_audio_channel_mask_t_bits(
+        int aidlLayout, bool isInput);
+int legacy2aidl_audio_channel_mask_t_bits_AudioChannelLayout_layout(
+        audio_channel_mask_t legacy, bool isInput);
+
+enum class AudioPortDirection {
+    INPUT, OUTPUT
+};
+ConversionResult<AudioPortDirection> portDirection(audio_port_role_t role, audio_port_type_t type);
+ConversionResult<audio_port_role_t> portRole(AudioPortDirection direction, audio_port_type_t type);
+
+ConversionResult<audio_config_t>
+aidl2legacy_AudioConfig_audio_config_t(const media::audio::common::AudioConfig& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioConfig>
+legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy, bool isInput);
+
+ConversionResult<audio_config_base_t>
+aidl2legacy_AudioConfigBase_audio_config_base_t(
+        const media::audio::common::AudioConfigBase& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioConfigBase>
+legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy, bool isInput);
+
+ConversionResult<audio_input_flags_t>
+aidl2legacy_AudioInputFlags_audio_input_flags_t(media::audio::common::AudioInputFlags aidl);
+ConversionResult<media::audio::common::AudioInputFlags>
+legacy2aidl_audio_input_flags_t_AudioInputFlags(audio_input_flags_t legacy);
+
+ConversionResult<audio_output_flags_t>
+aidl2legacy_AudioOutputFlags_audio_output_flags_t(media::audio::common::AudioOutputFlags aidl);
+ConversionResult<media::audio::common::AudioOutputFlags>
+legacy2aidl_audio_output_flags_t_AudioOutputFlags(audio_output_flags_t legacy);
+
+ConversionResult<audio_input_flags_t> aidl2legacy_int32_t_audio_input_flags_t_mask(
+        int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_input_flags_t_int32_t_mask(
+        audio_input_flags_t legacy);
+
+ConversionResult<audio_output_flags_t> aidl2legacy_int32_t_audio_output_flags_t_mask(
+        int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_output_flags_t_int32_t_mask(
+        audio_output_flags_t legacy);
+
+ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
+        const media::audio::common::AudioIoFlags& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
+        const audio_io_flags& legacy, bool isInput);
+
+ConversionResult<audio_session_t> aidl2legacy_int32_t_audio_session_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_session_t_int32_t(audio_session_t legacy);
+
+ConversionResult<audio_content_type_t>
+aidl2legacy_AudioContentType_audio_content_type_t(
+        media::audio::common::AudioContentType aidl);
+ConversionResult<media::audio::common::AudioContentType>
+legacy2aidl_audio_content_type_t_AudioContentType(audio_content_type_t legacy);
+
+ConversionResult<audio_devices_t> aidl2legacy_AudioDeviceDescription_audio_devices_t(
+        const media::audio::common::AudioDeviceDescription& aidl);
+ConversionResult<media::audio::common::AudioDeviceDescription>
+legacy2aidl_audio_devices_t_AudioDeviceDescription(audio_devices_t legacy);
+
+::android::status_t aidl2legacy_AudioDevice_audio_device(
+        const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
+        char* legacyAddress);
+::android::status_t aidl2legacy_AudioDevice_audio_device(
+        const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
+        ::android::String8* legacyAddress);
+::android::status_t aidl2legacy_AudioDevice_audio_device(
+        const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
+        std::string* legacyAddress);
+
+ConversionResult<media::audio::common::AudioDevice> legacy2aidl_audio_device_AudioDevice(
+        audio_devices_t legacyType, const char* legacyAddress);
+ConversionResult<media::audio::common::AudioDevice> legacy2aidl_audio_device_AudioDevice(
+        audio_devices_t legacyType, const ::android::String8& legacyAddress);
+
+ConversionResult<audio_extra_audio_descriptor>
+aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(
+        const media::audio::common::ExtraAudioDescriptor& aidl);
+
+ConversionResult<media::audio::common::ExtraAudioDescriptor>
+legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(
+        const audio_extra_audio_descriptor& legacy);
+
+ConversionResult<audio_encapsulation_metadata_type_t>
+aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t(
+        media::audio::common::AudioEncapsulationMetadataType aidl);
+ConversionResult<media::audio::common::AudioEncapsulationMetadataType>
+legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType(
+        audio_encapsulation_metadata_type_t legacy);
+
+ConversionResult<uint32_t> aidl2legacy_AudioEncapsulationMetadataType_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_AudioEncapsulationMetadataType_mask(uint32_t legacy);
+
+ConversionResult<audio_encapsulation_mode_t>
+aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(
+        media::audio::common::AudioEncapsulationMode aidl);
+ConversionResult<media::audio::common::AudioEncapsulationMode>
+legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(audio_encapsulation_mode_t legacy);
+
+ConversionResult<uint32_t> aidl2legacy_AudioEncapsulationMode_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_AudioEncapsulationMode_mask(uint32_t legacy);
+
+ConversionResult<audio_encapsulation_type_t>
+aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
+        const media::audio::common::AudioEncapsulationType& aidl);
+ConversionResult<media::audio::common::AudioEncapsulationType>
+legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
+        const audio_encapsulation_type_t& legacy);
+
+ConversionResult<audio_format_t> aidl2legacy_AudioFormatDescription_audio_format_t(
+        const media::audio::common::AudioFormatDescription& aidl);
+ConversionResult<media::audio::common::AudioFormatDescription>
+legacy2aidl_audio_format_t_AudioFormatDescription(audio_format_t legacy);
+
+ConversionResult<audio_gain_mode_t>
+aidl2legacy_AudioGainMode_audio_gain_mode_t(media::audio::common::AudioGainMode aidl);
+ConversionResult<media::audio::common::AudioGainMode>
+legacy2aidl_audio_gain_mode_t_AudioGainMode(audio_gain_mode_t legacy);
+
+ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t_mask(audio_gain_mode_t legacy);
+
+ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
+        const media::audio::common::AudioGainConfig& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioGainConfig>
+legacy2aidl_audio_gain_config_AudioGainConfig(const audio_gain_config& legacy, bool isInput);
+
+ConversionResult<audio_gain>
+aidl2legacy_AudioGain_audio_gain(const media::audio::common::AudioGain& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioGain>
+legacy2aidl_audio_gain_AudioGain(const audio_gain& legacy, bool isInput);
+
+ConversionResult<audio_input_flags_t>
+aidl2legacy_AudioInputFlags_audio_input_flags_t(media::audio::common::AudioInputFlags aidl);
+ConversionResult<media::audio::common::AudioInputFlags>
+legacy2aidl_audio_input_flags_t_AudioInputFlags(audio_input_flags_t legacy);
+
+ConversionResult<audio_mode_t>
+aidl2legacy_AudioMode_audio_mode_t(media::audio::common::AudioMode aidl);
+ConversionResult<media::audio::common::AudioMode>
+legacy2aidl_audio_mode_t_AudioMode(audio_mode_t legacy);
+
+ConversionResult<audio_offload_info_t>
+aidl2legacy_AudioOffloadInfo_audio_offload_info_t(
+        const media::audio::common::AudioOffloadInfo& aidl);
+ConversionResult<media::audio::common::AudioOffloadInfo>
+legacy2aidl_audio_offload_info_t_AudioOffloadInfo(const audio_offload_info_t& legacy);
+
+ConversionResult<audio_output_flags_t>
+aidl2legacy_AudioOutputFlags_audio_output_flags_t(media::audio::common::AudioOutputFlags aidl);
+ConversionResult<media::audio::common::AudioOutputFlags>
+legacy2aidl_audio_output_flags_t_AudioOutputFlags(audio_output_flags_t legacy);
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_config_mix_ext_usecase = decltype(audio_port_config_mix_ext::usecase);
+ConversionResult<audio_port_config_mix_ext_usecase>
+aidl2legacy_AudioPortMixExtUseCase_audio_port_config_mix_ext_usecase(
+        const media::audio::common::AudioPortMixExtUseCase& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioPortMixExtUseCase>
+legacy2aidl_audio_port_config_mix_ext_usecase_AudioPortMixExtUseCase(
+        const audio_port_config_mix_ext_usecase& legacy, bool isInput);
+
+ConversionResult<audio_port_config_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
+        const media::audio::common::AudioPortDeviceExt& aidl);
+ConversionResult<media::audio::common::AudioPortDeviceExt>
+        legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
+        const audio_port_config_device_ext& legacy);
+
+::android::status_t aidl2legacy_AudioPortConfig_audio_port_config(
+        const media::audio::common::AudioPortConfig& aidl, bool isInput,
+        audio_port_config* legacy, int32_t* portId);
+ConversionResult<media::audio::common::AudioPortConfig>
+legacy2aidl_audio_port_config_AudioPortConfig(
+        const audio_port_config& legacy, bool isInput, int32_t portId);
+
+ConversionResult<audio_port_mix_ext> aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
+        const media::audio::common::AudioPortMixExt& aidl);
+ConversionResult<media::audio::common::AudioPortMixExt>
+legacy2aidl_audio_port_mix_ext_AudioPortMixExt(
+        const audio_port_mix_ext& legacy);
+
+ConversionResult<audio_port_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
+        const media::audio::common::AudioPortDeviceExt& aidl);
+ConversionResult<media::audio::common::AudioPortDeviceExt>
+legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
+        const audio_port_device_ext& legacy);
+
+ConversionResult<audio_port_v7>
+aidl2legacy_AudioPort_audio_port_v7(
+        const media::audio::common::AudioPort& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioPort>
+legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy, bool isInput);
+
+ConversionResult<audio_profile> aidl2legacy_AudioProfile_audio_profile(
+        const media::audio::common::AudioProfile& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioProfile> legacy2aidl_audio_profile_AudioProfile(
+        const audio_profile& legacy, bool isInput);
+
+ConversionResult<audio_standard_t> aidl2legacy_AudioStandard_audio_standard_t(
+        media::audio::common::AudioStandard aidl);
+ConversionResult<media::audio::common::AudioStandard> legacy2aidl_audio_standard_t_AudioStandard(
+        audio_standard_t legacy);
+
+ConversionResult<audio_source_t> aidl2legacy_AudioSource_audio_source_t(
+        media::audio::common::AudioSource aidl);
+ConversionResult<media::audio::common::AudioSource> legacy2aidl_audio_source_t_AudioSource(
+        audio_source_t legacy);
+
+ConversionResult<audio_usage_t> aidl2legacy_AudioUsage_audio_usage_t(
+        media::audio::common::AudioUsage aidl);
+ConversionResult<media::audio::common::AudioUsage> legacy2aidl_audio_usage_t_AudioUsage(
+        audio_usage_t legacy);
+
+ConversionResult<audio_uuid_t> aidl2legacy_AudioUuid_audio_uuid_t(
+        const media::audio::common::AudioUuid &aidl);
+ConversionResult<media::audio::common::AudioUuid> legacy2aidl_audio_uuid_t_AudioUuid(
+        const audio_uuid_t& legacy);
+
+ConversionResult<audio_dual_mono_mode_t>
+aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(media::audio::common::AudioDualMonoMode aidl);
+ConversionResult<media::audio::common::AudioDualMonoMode>
+legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(audio_dual_mono_mode_t legacy);
+
+ConversionResult<audio_timestretch_fallback_mode_t>
+aidl2legacy_TimestretchFallbackMode_audio_timestretch_fallback_mode_t(
+        media::audio::common::AudioPlaybackRate::TimestretchFallbackMode aidl);
+ConversionResult<media::audio::common::AudioPlaybackRate::TimestretchFallbackMode>
+legacy2aidl_audio_timestretch_fallback_mode_t_TimestretchFallbackMode(
+        audio_timestretch_fallback_mode_t legacy);
+
+ConversionResult<audio_timestretch_stretch_mode_t>
+aidl2legacy_TimestretchMode_audio_timestretch_stretch_mode_t(
+        media::audio::common::AudioPlaybackRate::TimestretchMode aidl);
+ConversionResult<media::audio::common::AudioPlaybackRate::TimestretchMode>
+legacy2aidl_audio_timestretch_stretch_mode_t_TimestretchMode(
+        audio_timestretch_stretch_mode_t legacy);
+
+ConversionResult<audio_playback_rate_t>
+aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(
+        const media::audio::common::AudioPlaybackRate& aidl);
+ConversionResult<media::audio::common::AudioPlaybackRate>
+legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(const audio_playback_rate_t& legacy);
+
+ConversionResult<audio_latency_mode_t>
+aidl2legacy_AudioLatencyMode_audio_latency_mode_t(media::audio::common::AudioLatencyMode aidl);
+ConversionResult<media::audio::common::AudioLatencyMode>
+legacy2aidl_audio_latency_mode_t_AudioLatencyMode(audio_latency_mode_t legacy);
+
+ConversionResult<audio_microphone_location_t>
+aidl2legacy_MicrophoneInfoLocation_audio_microphone_location_t(
+        media::audio::common::MicrophoneInfo::Location aidl);
+ConversionResult<media::audio::common::MicrophoneInfo::Location>
+legacy2aidl_audio_microphone_location_t_MicrophoneInfoLocation(audio_microphone_location_t legacy);
+
+ConversionResult<audio_microphone_group_t> aidl2legacy_int32_t_audio_microphone_group_t(
+        int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_microphone_group_t_int32_t(
+        audio_microphone_group_t legacy);
+
+ConversionResult<audio_microphone_directionality_t>
+aidl2legacy_MicrophoneInfoDirectionality_audio_microphone_directionality_t(
+        media::audio::common::MicrophoneInfo::Directionality aidl);
+ConversionResult<media::audio::common::MicrophoneInfo::Directionality>
+legacy2aidl_audio_microphone_directionality_t_MicrophoneInfoDirectionality(
+        audio_microphone_directionality_t legacy);
+
+ConversionResult<audio_microphone_coordinate>
+aidl2legacy_MicrophoneInfoCoordinate_audio_microphone_coordinate(
+        const media::audio::common::MicrophoneInfo::Coordinate& aidl);
+ConversionResult<media::audio::common::MicrophoneInfo::Coordinate>
+legacy2aidl_audio_microphone_coordinate_MicrophoneInfoCoordinate(
+        const audio_microphone_coordinate& legacy);
+
+ConversionResult<audio_microphone_channel_mapping_t>
+aidl2legacy_MicrophoneDynamicInfoChannelMapping_audio_microphone_channel_mapping_t(
+        media::audio::common::MicrophoneDynamicInfo::ChannelMapping aidl);
+ConversionResult<media::audio::common::MicrophoneDynamicInfo::ChannelMapping>
+legacy2aidl_audio_microphone_channel_mapping_t_MicrophoneDynamicInfoChannelMapping(
+        audio_microphone_channel_mapping_t legacy);
+
+ConversionResult<audio_microphone_characteristic_t>
+aidl2legacy_MicrophoneInfos_audio_microphone_characteristic_t(
+        const media::audio::common::MicrophoneInfo& aidlInfo,
+        const media::audio::common::MicrophoneDynamicInfo& aidlDynamic);
+::android::status_t
+legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfos(
+        const audio_microphone_characteristic_t& legacy,
+        media::audio::common::MicrophoneInfo* aidlInfo,
+        media::audio::common::MicrophoneDynamicInfo* aidlDynamic);
+
+}  // namespace android
+
+#if defined(BACKEND_NDK_IMPL)
+} // aidl
+#endif
+
+// (defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_NDK)) || \
+// (!defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_CPP))
+#endif
diff --git a/media/audioaidlconversion/include/media/AidlConversionCppNdk.h b/media/audioaidlconversion/include/media/AidlConversionCppNdk.h
index abf0231..ea168a4 100644
--- a/media/audioaidlconversion/include/media/AidlConversionCppNdk.h
+++ b/media/audioaidlconversion/include/media/AidlConversionCppNdk.h
@@ -16,407 +16,19 @@
 
 #pragma once
 
-#include <limits>
-#include <type_traits>
-#include <system/audio.h>
-
-/**
- * Can handle conversion between AIDL (both CPP and NDK backend) and legacy type.
- * Controlled by the cflags preprocessor in Android.bp.
- */
-#if defined(BACKEND_NDK)
-#define PREFIX(f) <aidl/f>
-#else
-#define PREFIX(f) <f>
-#endif
-
-#include PREFIX(android/media/audio/common/AudioChannelLayout.h)
-#include PREFIX(android/media/audio/common/AudioConfig.h)
-#include PREFIX(android/media/audio/common/AudioConfigBase.h)
-#include PREFIX(android/media/audio/common/AudioContentType.h)
-#include PREFIX(android/media/audio/common/AudioDeviceDescription.h)
-#include PREFIX(android/media/audio/common/AudioDualMonoMode.h)
-#include PREFIX(android/media/audio/common/AudioEncapsulationMetadataType.h)
-#include PREFIX(android/media/audio/common/AudioEncapsulationMode.h)
-#include PREFIX(android/media/audio/common/AudioEncapsulationType.h)
-#include PREFIX(android/media/audio/common/AudioFormatDescription.h)
-#include PREFIX(android/media/audio/common/AudioGain.h)
-#include PREFIX(android/media/audio/common/AudioGainConfig.h)
-#include PREFIX(android/media/audio/common/AudioGainMode.h)
-#include PREFIX(android/media/audio/common/AudioInputFlags.h)
-#include PREFIX(android/media/audio/common/AudioIoFlags.h)
-#include PREFIX(android/media/audio/common/AudioLatencyMode.h)
-#include PREFIX(android/media/audio/common/AudioMode.h)
-#include PREFIX(android/media/audio/common/AudioOffloadInfo.h)
-#include PREFIX(android/media/audio/common/AudioOutputFlags.h)
-#include PREFIX(android/media/audio/common/AudioPort.h)
-#include PREFIX(android/media/audio/common/AudioPortConfig.h)
-#include PREFIX(android/media/audio/common/AudioPortExt.h)
-#include PREFIX(android/media/audio/common/AudioPortMixExt.h)
-#include PREFIX(android/media/audio/common/AudioPlaybackRate.h)
-#include PREFIX(android/media/audio/common/AudioProfile.h)
-#include PREFIX(android/media/audio/common/AudioSource.h)
-#include PREFIX(android/media/audio/common/AudioStandard.h)
-#include PREFIX(android/media/audio/common/AudioUsage.h)
-#include PREFIX(android/media/audio/common/AudioUuid.h)
-#include PREFIX(android/media/audio/common/ExtraAudioDescriptor.h)
-#include PREFIX(android/media/audio/common/Int.h)
-#include PREFIX(android/media/audio/common/MicrophoneDynamicInfo.h)
-#include PREFIX(android/media/audio/common/MicrophoneInfo.h)
-#undef PREFIX
-
+// Since conversion functions use ConversionResult, pull it in here.
 #include <media/AidlConversionUtil.h>
-#include <system/audio.h>
-#include <system/audio_effect.h>
 
-using ::android::String16;
-using ::android::String8;
-using ::android::status_t;
+// Include 'AidlConversionCppNdk.h' once if 'BACKEND_NDK' is defined,
+// or no 'BACKEND_*' is defined (C++ backend). Include twice if
+// 'BACKEND_CPP_NDK' is defined: once with 'BACKEND_NDK_IMPL', once w/o defines.
 
-#if defined(BACKEND_NDK)
-namespace aidl {
+#if defined(BACKEND_CPP_NDK) || defined(BACKEND_NDK)
+#define BACKEND_NDK_IMPL
+#include <media/AidlConversionCppNdk-impl.h>
+#undef BACKEND_NDK_IMPL
 #endif
 
-namespace android {
-
-// maxSize is the size of the C-string buffer (including the 0-terminator), NOT the max length of
-// the string.
-status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize);
-ConversionResult<std::string> legacy2aidl_string(const char* legacy, size_t maxSize);
-
-ConversionResult<audio_module_handle_t> aidl2legacy_int32_t_audio_module_handle_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_module_handle_t_int32_t(audio_module_handle_t legacy);
-
-ConversionResult<audio_io_handle_t> aidl2legacy_int32_t_audio_io_handle_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_io_handle_t_int32_t(audio_io_handle_t legacy);
-
-ConversionResult<audio_port_handle_t> aidl2legacy_int32_t_audio_port_handle_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_port_handle_t_int32_t(audio_port_handle_t legacy);
-
-ConversionResult<audio_patch_handle_t> aidl2legacy_int32_t_audio_patch_handle_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_patch_handle_t_int32_t(audio_patch_handle_t legacy);
-
-ConversionResult<audio_unique_id_t> aidl2legacy_int32_t_audio_unique_id_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_unique_id_t_int32_t(audio_unique_id_t legacy);
-
-ConversionResult<audio_hw_sync_t> aidl2legacy_int32_t_audio_hw_sync_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_hw_sync_t_int32_t(audio_hw_sync_t legacy);
-
-ConversionResult<unsigned int> aidl2legacy_int32_t_config_mask(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_config_mask_int32_t(unsigned int legacy);
-
-ConversionResult<pid_t> aidl2legacy_int32_t_pid_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_pid_t_int32_t(pid_t legacy);
-
-ConversionResult<uid_t> aidl2legacy_int32_t_uid_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_uid_t_int32_t(uid_t legacy);
-
-ConversionResult<String8> aidl2legacy_string_view_String8(std::string_view aidl);
-ConversionResult<std::string> legacy2aidl_String8_string(const String8& legacy);
-
-ConversionResult<String16> aidl2legacy_string_view_String16(std::string_view aidl);
-ConversionResult<std::string> legacy2aidl_String16_string(const String16& legacy);
-
-ConversionResult<std::optional<String16>>
-aidl2legacy_optional_string_view_optional_String16(std::optional<std::string_view> aidl);
-ConversionResult<std::optional<std::string_view>>
-legacy2aidl_optional_String16_optional_string(std::optional<String16> legacy);
-
-ConversionResult<audio_channel_mask_t> aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
-        const media::audio::common::AudioChannelLayout& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioChannelLayout>
-legacy2aidl_audio_channel_mask_t_AudioChannelLayout(audio_channel_mask_t legacy, bool isInput);
-
-enum class AudioPortDirection {
-    INPUT, OUTPUT
-};
-ConversionResult<AudioPortDirection> portDirection(audio_port_role_t role, audio_port_type_t type);
-ConversionResult<audio_port_role_t> portRole(AudioPortDirection direction, audio_port_type_t type);
-
-ConversionResult<audio_config_t>
-aidl2legacy_AudioConfig_audio_config_t(const media::audio::common::AudioConfig& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioConfig>
-legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy, bool isInput);
-
-ConversionResult<audio_config_base_t>
-aidl2legacy_AudioConfigBase_audio_config_base_t(
-        const media::audio::common::AudioConfigBase& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioConfigBase>
-legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy, bool isInput);
-
-ConversionResult<audio_input_flags_t>
-aidl2legacy_AudioInputFlags_audio_input_flags_t(media::audio::common::AudioInputFlags aidl);
-ConversionResult<media::audio::common::AudioInputFlags>
-legacy2aidl_audio_input_flags_t_AudioInputFlags(audio_input_flags_t legacy);
-
-ConversionResult<audio_output_flags_t>
-aidl2legacy_AudioOutputFlags_audio_output_flags_t(media::audio::common::AudioOutputFlags aidl);
-ConversionResult<media::audio::common::AudioOutputFlags>
-legacy2aidl_audio_output_flags_t_AudioOutputFlags(audio_output_flags_t legacy);
-
-ConversionResult<audio_input_flags_t> aidl2legacy_int32_t_audio_input_flags_t_mask(
-        int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_input_flags_t_int32_t_mask(
-        audio_input_flags_t legacy);
-
-ConversionResult<audio_output_flags_t> aidl2legacy_int32_t_audio_output_flags_t_mask(
-        int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_output_flags_t_int32_t_mask(
-        audio_output_flags_t legacy);
-
-ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
-        const media::audio::common::AudioIoFlags& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
-        const audio_io_flags& legacy, bool isInput);
-
-ConversionResult<audio_session_t> aidl2legacy_int32_t_audio_session_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_session_t_int32_t(audio_session_t legacy);
-
-ConversionResult<audio_content_type_t>
-aidl2legacy_AudioContentType_audio_content_type_t(
-        media::audio::common::AudioContentType aidl);
-ConversionResult<media::audio::common::AudioContentType>
-legacy2aidl_audio_content_type_t_AudioContentType(audio_content_type_t legacy);
-
-ConversionResult<audio_devices_t> aidl2legacy_AudioDeviceDescription_audio_devices_t(
-        const media::audio::common::AudioDeviceDescription& aidl);
-ConversionResult<media::audio::common::AudioDeviceDescription>
-legacy2aidl_audio_devices_t_AudioDeviceDescription(audio_devices_t legacy);
-
-status_t aidl2legacy_AudioDevice_audio_device(
-        const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
-        char* legacyAddress);
-status_t aidl2legacy_AudioDevice_audio_device(
-        const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
-        String8* legacyAddress);
-status_t aidl2legacy_AudioDevice_audio_device(
-        const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
-        std::string* legacyAddress);
-
-ConversionResult<media::audio::common::AudioDevice> legacy2aidl_audio_device_AudioDevice(
-        audio_devices_t legacyType, const char* legacyAddress);
-ConversionResult<media::audio::common::AudioDevice> legacy2aidl_audio_device_AudioDevice(
-        audio_devices_t legacyType, const String8& legacyAddress);
-
-ConversionResult<audio_extra_audio_descriptor>
-aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(
-        const media::audio::common::ExtraAudioDescriptor& aidl);
-
-ConversionResult<media::audio::common::ExtraAudioDescriptor>
-legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(
-        const audio_extra_audio_descriptor& legacy);
-
-ConversionResult<audio_encapsulation_metadata_type_t>
-aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t(
-        media::audio::common::AudioEncapsulationMetadataType aidl);
-ConversionResult<media::audio::common::AudioEncapsulationMetadataType>
-legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType(
-        audio_encapsulation_metadata_type_t legacy);
-
-ConversionResult<uint32_t> aidl2legacy_AudioEncapsulationMetadataType_mask(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_AudioEncapsulationMetadataType_mask(uint32_t legacy);
-
-ConversionResult<audio_encapsulation_mode_t>
-aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(
-        media::audio::common::AudioEncapsulationMode aidl);
-ConversionResult<media::audio::common::AudioEncapsulationMode>
-legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(audio_encapsulation_mode_t legacy);
-
-ConversionResult<uint32_t> aidl2legacy_AudioEncapsulationMode_mask(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_AudioEncapsulationMode_mask(uint32_t legacy);
-
-ConversionResult<audio_encapsulation_type_t>
-aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
-        const media::audio::common::AudioEncapsulationType& aidl);
-ConversionResult<media::audio::common::AudioEncapsulationType>
-legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
-        const audio_encapsulation_type_t& legacy);
-
-ConversionResult<audio_format_t> aidl2legacy_AudioFormatDescription_audio_format_t(
-        const media::audio::common::AudioFormatDescription& aidl);
-ConversionResult<media::audio::common::AudioFormatDescription>
-legacy2aidl_audio_format_t_AudioFormatDescription(audio_format_t legacy);
-
-ConversionResult<audio_gain_mode_t>
-aidl2legacy_AudioGainMode_audio_gain_mode_t(media::audio::common::AudioGainMode aidl);
-ConversionResult<media::audio::common::AudioGainMode>
-legacy2aidl_audio_gain_mode_t_AudioGainMode(audio_gain_mode_t legacy);
-
-ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t_mask(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t_mask(audio_gain_mode_t legacy);
-
-ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
-        const media::audio::common::AudioGainConfig& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioGainConfig>
-legacy2aidl_audio_gain_config_AudioGainConfig(const audio_gain_config& legacy, bool isInput);
-
-ConversionResult<audio_gain>
-aidl2legacy_AudioGain_audio_gain(const media::audio::common::AudioGain& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioGain>
-legacy2aidl_audio_gain_AudioGain(const audio_gain& legacy, bool isInput);
-
-ConversionResult<audio_input_flags_t>
-aidl2legacy_AudioInputFlags_audio_input_flags_t(media::audio::common::AudioInputFlags aidl);
-ConversionResult<media::audio::common::AudioInputFlags>
-legacy2aidl_audio_input_flags_t_AudioInputFlags(audio_input_flags_t legacy);
-
-ConversionResult<audio_mode_t>
-aidl2legacy_AudioMode_audio_mode_t(media::audio::common::AudioMode aidl);
-ConversionResult<media::audio::common::AudioMode>
-legacy2aidl_audio_mode_t_AudioMode(audio_mode_t legacy);
-
-ConversionResult<audio_offload_info_t>
-aidl2legacy_AudioOffloadInfo_audio_offload_info_t(
-        const media::audio::common::AudioOffloadInfo& aidl);
-ConversionResult<media::audio::common::AudioOffloadInfo>
-legacy2aidl_audio_offload_info_t_AudioOffloadInfo(const audio_offload_info_t& legacy);
-
-ConversionResult<audio_output_flags_t>
-aidl2legacy_AudioOutputFlags_audio_output_flags_t(media::audio::common::AudioOutputFlags aidl);
-ConversionResult<media::audio::common::AudioOutputFlags>
-legacy2aidl_audio_output_flags_t_AudioOutputFlags(audio_output_flags_t legacy);
-
-// This type is unnamed in the original definition, thus we name it here.
-using audio_port_config_mix_ext_usecase = decltype(audio_port_config_mix_ext::usecase);
-ConversionResult<audio_port_config_mix_ext_usecase>
-aidl2legacy_AudioPortMixExtUseCase_audio_port_config_mix_ext_usecase(
-        const media::audio::common::AudioPortMixExtUseCase& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioPortMixExtUseCase>
-legacy2aidl_audio_port_config_mix_ext_usecase_AudioPortMixExtUseCase(
-        const audio_port_config_mix_ext_usecase& legacy, bool isInput);
-
-ConversionResult<audio_port_config_device_ext>
-aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
-        const media::audio::common::AudioPortDeviceExt& aidl);
-ConversionResult<media::audio::common::AudioPortDeviceExt>
-        legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
-        const audio_port_config_device_ext& legacy);
-
-status_t aidl2legacy_AudioPortConfig_audio_port_config(
-        const media::audio::common::AudioPortConfig& aidl, bool isInput,
-        audio_port_config* legacy, int32_t* portId);
-ConversionResult<media::audio::common::AudioPortConfig>
-legacy2aidl_audio_port_config_AudioPortConfig(
-        const audio_port_config& legacy, bool isInput, int32_t portId);
-
-ConversionResult<audio_port_mix_ext> aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
-        const media::audio::common::AudioPortMixExt& aidl);
-ConversionResult<media::audio::common::AudioPortMixExt>
-legacy2aidl_audio_port_mix_ext_AudioPortMixExt(
-        const audio_port_mix_ext& legacy);
-
-ConversionResult<audio_port_device_ext>
-aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
-        const media::audio::common::AudioPortDeviceExt& aidl);
-ConversionResult<media::audio::common::AudioPortDeviceExt>
-legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
-        const audio_port_device_ext& legacy);
-
-ConversionResult<audio_port_v7>
-aidl2legacy_AudioPort_audio_port_v7(
-        const media::audio::common::AudioPort& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioPort>
-legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy, bool isInput);
-
-ConversionResult<audio_profile> aidl2legacy_AudioProfile_audio_profile(
-        const media::audio::common::AudioProfile& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioProfile> legacy2aidl_audio_profile_AudioProfile(
-        const audio_profile& legacy, bool isInput);
-
-ConversionResult<audio_standard_t> aidl2legacy_AudioStandard_audio_standard_t(
-        media::audio::common::AudioStandard aidl);
-ConversionResult<media::audio::common::AudioStandard> legacy2aidl_audio_standard_t_AudioStandard(
-        audio_standard_t legacy);
-
-ConversionResult<audio_source_t> aidl2legacy_AudioSource_audio_source_t(
-        media::audio::common::AudioSource aidl);
-ConversionResult<media::audio::common::AudioSource> legacy2aidl_audio_source_t_AudioSource(
-        audio_source_t legacy);
-
-ConversionResult<audio_usage_t> aidl2legacy_AudioUsage_audio_usage_t(
-        media::audio::common::AudioUsage aidl);
-ConversionResult<media::audio::common::AudioUsage> legacy2aidl_audio_usage_t_AudioUsage(
-        audio_usage_t legacy);
-
-ConversionResult<audio_uuid_t> aidl2legacy_AudioUuid_audio_uuid_t(
-        const media::audio::common::AudioUuid &aidl);
-ConversionResult<media::audio::common::AudioUuid> legacy2aidl_audio_uuid_t_AudioUuid(
-        const audio_uuid_t& legacy);
-
-ConversionResult<audio_dual_mono_mode_t>
-aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(media::audio::common::AudioDualMonoMode aidl);
-ConversionResult<media::audio::common::AudioDualMonoMode>
-legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(audio_dual_mono_mode_t legacy);
-
-ConversionResult<audio_timestretch_fallback_mode_t>
-aidl2legacy_TimestretchFallbackMode_audio_timestretch_fallback_mode_t(
-        media::audio::common::AudioPlaybackRate::TimestretchFallbackMode aidl);
-ConversionResult<media::audio::common::AudioPlaybackRate::TimestretchFallbackMode>
-legacy2aidl_audio_timestretch_fallback_mode_t_TimestretchFallbackMode(
-        audio_timestretch_fallback_mode_t legacy);
-
-ConversionResult<audio_timestretch_stretch_mode_t>
-aidl2legacy_TimestretchMode_audio_timestretch_stretch_mode_t(
-        media::audio::common::AudioPlaybackRate::TimestretchMode aidl);
-ConversionResult<media::audio::common::AudioPlaybackRate::TimestretchMode>
-legacy2aidl_audio_timestretch_stretch_mode_t_TimestretchMode(
-        audio_timestretch_stretch_mode_t legacy);
-
-ConversionResult<audio_playback_rate_t>
-aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(
-        const media::audio::common::AudioPlaybackRate& aidl);
-ConversionResult<media::audio::common::AudioPlaybackRate>
-legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(const audio_playback_rate_t& legacy);
-
-ConversionResult<audio_latency_mode_t>
-aidl2legacy_AudioLatencyMode_audio_latency_mode_t(media::audio::common::AudioLatencyMode aidl);
-ConversionResult<media::audio::common::AudioLatencyMode>
-legacy2aidl_audio_latency_mode_t_AudioLatencyMode(audio_latency_mode_t legacy);
-
-ConversionResult<audio_microphone_location_t>
-aidl2legacy_MicrophoneInfoLocation_audio_microphone_location_t(
-        media::audio::common::MicrophoneInfo::Location aidl);
-ConversionResult<media::audio::common::MicrophoneInfo::Location>
-legacy2aidl_audio_microphone_location_t_MicrophoneInfoLocation(audio_microphone_location_t legacy);
-
-ConversionResult<audio_microphone_group_t> aidl2legacy_int32_t_audio_microphone_group_t(
-        int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_microphone_group_t_int32_t(
-        audio_microphone_group_t legacy);
-
-ConversionResult<audio_microphone_directionality_t>
-aidl2legacy_MicrophoneInfoDirectionality_audio_microphone_directionality_t(
-        media::audio::common::MicrophoneInfo::Directionality aidl);
-ConversionResult<media::audio::common::MicrophoneInfo::Directionality>
-legacy2aidl_audio_microphone_directionality_t_MicrophoneInfoDirectionality(
-        audio_microphone_directionality_t legacy);
-
-ConversionResult<audio_microphone_coordinate>
-aidl2legacy_MicrophoneInfoCoordinate_audio_microphone_coordinate(
-        const media::audio::common::MicrophoneInfo::Coordinate& aidl);
-ConversionResult<media::audio::common::MicrophoneInfo::Coordinate>
-legacy2aidl_audio_microphone_coordinate_MicrophoneInfoCoordinate(
-        const audio_microphone_coordinate& legacy);
-
-ConversionResult<audio_microphone_channel_mapping_t>
-aidl2legacy_MicrophoneDynamicInfoChannelMapping_audio_microphone_channel_mapping_t(
-        media::audio::common::MicrophoneDynamicInfo::ChannelMapping aidl);
-ConversionResult<media::audio::common::MicrophoneDynamicInfo::ChannelMapping>
-legacy2aidl_audio_microphone_channel_mapping_t_MicrophoneDynamicInfoChannelMapping(
-        audio_microphone_channel_mapping_t legacy);
-
-ConversionResult<audio_microphone_characteristic_t>
-aidl2legacy_MicrophoneInfos_audio_microphone_characteristic_t(
-        const media::audio::common::MicrophoneInfo& aidlInfo,
-        const media::audio::common::MicrophoneDynamicInfo& aidlDynamic);
-status_t
-legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfos(
-        const audio_microphone_characteristic_t& legacy,
-        media::audio::common::MicrophoneInfo* aidlInfo,
-        media::audio::common::MicrophoneDynamicInfo* aidlDynamic);
-
-}  // namespace android
-
-#if defined(BACKEND_NDK)
-} // aidl
+#if defined(BACKEND_CPP_NDK) || !defined(BACKEND_NDK)
+#include <media/AidlConversionCppNdk-impl.h>
 #endif
diff --git a/media/audioaidlconversion/include/media/AidlConversionEffect.h b/media/audioaidlconversion/include/media/AidlConversionEffect.h
index 83aa614..5e245a7 100644
--- a/media/audioaidlconversion/include/media/AidlConversionEffect.h
+++ b/media/audioaidlconversion/include/media/AidlConversionEffect.h
@@ -26,6 +26,7 @@
 #include <hardware/audio_effect.h>
 #include <media/AidlConversionUtil.h>
 #include <system/audio_effect.h>
+#include <system/audio_effects/audio_effects_utils.h>
 
 #include <aidl/android/hardware/audio/effect/IEffect.h>
 
@@ -45,19 +46,39 @@
     return VALUE_OR_RETURN((unionGetField<T, field>(spec)));
 }
 
-#define GET_PARAMETER_SPECIFIC_FIELD(u, specific, tag, field, fieldType)                        \
-    getParameterSpecificField<std::decay_t<decltype(u)>, specific,                              \
-                              aidl::android::hardware::audio::effect::Parameter::Specific::tag, \
-                              specific::field, fieldType>(u)
+#define GET_PARAMETER_SPECIFIC_FIELD(_u, _effect, _tag, _field, _fieldType)                      \
+    getParameterSpecificField<std::decay_t<decltype(_u)>, _effect,                               \
+                              aidl::android::hardware::audio::effect::Parameter::Specific::_tag, \
+                              _effect::_field, _fieldType>(_u)
 
-#define MAKE_SPECIFIC_PARAMETER(spec, tag, field, value)                                    \
-    UNION_MAKE(aidl::android::hardware::audio::effect::Parameter, specific,                 \
-               UNION_MAKE(aidl::android::hardware::audio::effect::Parameter::Specific, tag, \
-                          UNION_MAKE(spec, field, value)))
+#define MAKE_SPECIFIC_PARAMETER(_spec, _tag, _field, _value)                                 \
+    UNION_MAKE(aidl::android::hardware::audio::effect::Parameter, specific,                  \
+               UNION_MAKE(aidl::android::hardware::audio::effect::Parameter::Specific, _tag, \
+                          UNION_MAKE(_spec, _field, _value)))
 
-#define MAKE_SPECIFIC_PARAMETER_ID(spec, tag, field)                       \
-    UNION_MAKE(aidl::android::hardware::audio::effect::Parameter::Id, tag, \
-               UNION_MAKE(spec::Id, commonTag, field))
+#define MAKE_SPECIFIC_PARAMETER_ID(_spec, _tag, _field)                     \
+    UNION_MAKE(aidl::android::hardware::audio::effect::Parameter::Id, _tag, \
+               UNION_MAKE(_spec::Id, commonTag, _field))
+
+#define MAKE_EXTENSION_PARAMETER_ID(_effect, _tag, _field)                  \
+    UNION_MAKE(aidl::android::hardware::audio::effect::Parameter::Id, _tag, \
+               UNION_MAKE(_effect::Id, vendorExtensionTag, _field))
+
+#define VENDOR_EXTENSION_GET_AND_RETURN(_effect, _tag, _param)                                    \
+    {                                                                                             \
+        aidl::android::hardware::audio::effect::VendorExtension _extId = VALUE_OR_RETURN_STATUS(  \
+                aidl::android::legacy2aidl_EffectParameterReader_Param_VendorExtension(_param));  \
+        aidl::android::hardware::audio::effect::Parameter::Id _id =                               \
+                MAKE_EXTENSION_PARAMETER_ID(_effect, _tag##Tag, _extId);                          \
+        aidl::android::hardware::audio::effect::Parameter _aidlParam;                             \
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(_id, &_aidlParam))); \
+        aidl::android::hardware::audio::effect::VendorExtension _ext =                            \
+                VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(                              \
+                        _aidlParam, _effect, _tag, _effect::vendor, VendorExtension));            \
+        return VALUE_OR_RETURN_STATUS(                                                            \
+                aidl::android::aidl2legacy_ParameterExtension_EffectParameterWriter(_aidlParam,   \
+                                                                                    _param));     \
+    }
 
 ConversionResult<uint32_t> aidl2legacy_Flags_Type_uint32(
         ::aidl::android::hardware::audio::effect::Flags::Type type);
@@ -126,5 +147,36 @@
 ConversionResult<int32_t> aidl2legacy_DynamicsProcessing_ResolutionPreference_int32(
         ::aidl::android::hardware::audio::effect::DynamicsProcessing::ResolutionPreference aidl);
 
+ConversionResult<uint32_t> aidl2legacy_Parameter_Visualizer_ScalingMode_uint32(
+        ::aidl::android::hardware::audio::effect::Visualizer::ScalingMode aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Visualizer::ScalingMode>
+legacy2aidl_Parameter_Visualizer_uint32_ScalingMode(uint32_t legacy);
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_Visualizer_MeasurementMode_uint32(
+        ::aidl::android::hardware::audio::effect::Visualizer::MeasurementMode aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Visualizer::MeasurementMode>
+legacy2aidl_Parameter_Visualizer_uint32_MeasurementMode(uint32_t legacy);
+
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_EffectParameterReader_ParameterExtension(
+        ::android::effect::utils::EffectParamReader& param);
+ConversionResult<::android::status_t> aidl2legacy_ParameterExtension_EffectParameterWriter(
+        const ::aidl::android::hardware::audio::effect::Parameter& aidl,
+        ::android::effect::utils::EffectParamWriter& legacy);
+
+ConversionResult<::aidl::android::hardware::audio::effect::VendorExtension>
+legacy2aidl_EffectParameterReader_Param_VendorExtension(
+        ::android::effect::utils::EffectParamReader& param);
+ConversionResult<::aidl::android::hardware::audio::effect::VendorExtension>
+legacy2aidl_EffectParameterReader_Data_VendorExtension(
+        ::android::effect::utils::EffectParamReader& param);
+
+ConversionResult<::android::status_t> aidl2legacy_VendorExtension_EffectParameterWriter_Data(
+        ::android::effect::utils::EffectParamWriter& param,
+        ::aidl::android::hardware::audio::effect::VendorExtension ext);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_EffectParameterReader_ParameterExtension(
+        ::android::effect::utils::EffectParamReader& param);
+
 }  // namespace android
 }  // namespace aidl
diff --git a/media/audioaidlconversion/include/media/AidlConversionNdk.h b/media/audioaidlconversion/include/media/AidlConversionNdk.h
index 98a7d41..e92f1a9 100644
--- a/media/audioaidlconversion/include/media/AidlConversionNdk.h
+++ b/media/audioaidlconversion/include/media/AidlConversionNdk.h
@@ -16,17 +16,20 @@
 
 #pragma once
 
-#include <android/binder_auto_utils.h>
-#include <android/binder_manager.h>
-#include <android/binder_process.h>
-
 /**
- * Can only handle conversion between AIDL (NDK backend) and legacy type.
+ * Can only handle conversion between AIDL (NDK backend) and legacy types.
  */
+
+#include <string>
+#include <vector>
+
 #include <hardware/audio_effect.h>
-#include <media/AidlConversionUtil.h>
 #include <system/audio_effect.h>
+
+#include <aidl/android/hardware/audio/common/PlaybackTrackMetadata.h>
+#include <aidl/android/hardware/audio/common/RecordTrackMetadata.h>
 #include <aidl/android/media/audio/common/AudioConfig.h>
+#include <media/AidlConversionUtil.h>
 
 namespace aidl {
 namespace android {
@@ -36,5 +39,22 @@
 ConversionResult<media::audio::common::AudioConfigBase> legacy2aidl_buffer_config_t_AudioConfigBase(
         const buffer_config_t& legacy, bool isInput);
 
+::android::status_t aidl2legacy_AudioAttributesTags(
+        const std::vector<std::string>& aidl, char* legacy);
+ConversionResult<std::vector<std::string>> legacy2aidl_AudioAttributesTags(const char* legacy);
+
+ConversionResult<playback_track_metadata_v7>
+aidl2legacy_PlaybackTrackMetadata_playback_track_metadata_v7(
+        const hardware::audio::common::PlaybackTrackMetadata& aidl);
+ConversionResult<hardware::audio::common::PlaybackTrackMetadata>
+legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata(
+        const playback_track_metadata_v7& legacy);
+
+ConversionResult<record_track_metadata_v7>
+aidl2legacy_RecordTrackMetadata_record_track_metadata_v7(
+        const hardware::audio::common::RecordTrackMetadata& aidl);
+ConversionResult<hardware::audio::common::RecordTrackMetadata>
+legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(const record_track_metadata_v7& legacy);
+
 }  // namespace android
 }  // namespace aidl
diff --git a/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h b/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
new file mode 100644
index 0000000..ed91e2c
--- /dev/null
+++ b/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
@@ -0,0 +1,438 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// WARNING: This file is intended for multiple inclusion, one time
+// with BACKEND_NDK_IMPL defined, one time without it.
+// Do not include directly, use 'AidlConversionUtil.h'.
+#if (defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_NDK)) || \
+    (!defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_CPP))
+#if defined(BACKEND_NDK_IMPL)
+#define AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_NDK
+#else
+#define AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_CPP
+#endif  // BACKEND_NDK_IMPL
+
+#include <limits>
+#include <type_traits>
+#include <utility>
+
+#include <android-base/expected.h>
+#include <binder/Status.h>
+
+#if defined(BACKEND_NDK_IMPL)
+#include <android/binder_auto_utils.h>
+#include <android/binder_enums.h>
+#include <android/binder_status.h>
+
+namespace aidl {
+#else
+#include <binder/Enums.h>
+#endif  // BACKEND_NDK_IMPL
+namespace android {
+
+#if defined(BACKEND_NDK_IMPL)
+// This adds `::aidl::android::ConversionResult` for convenience.
+// Otherwise, it would be required to write `::android::ConversionResult` everywhere.
+template <typename T>
+using ConversionResult = ::android::ConversionResult<T>;
+#endif  // BACKEND_NDK_IMPL
+
+/**
+ * A generic template to safely cast between integral types, respecting limits of the destination
+ * type.
+ */
+template<typename To, typename From>
+ConversionResult<To> convertIntegral(From from) {
+    // Special handling is required for signed / vs. unsigned comparisons, since otherwise we may
+    // have the signed converted to unsigned and produce wrong results.
+    if (std::is_signed_v<From> && !std::is_signed_v<To>) {
+        if (from < 0 || from > std::numeric_limits<To>::max()) {
+            return ::android::base::unexpected(::android::BAD_VALUE);
+        }
+    } else if (std::is_signed_v<To> && !std::is_signed_v<From>) {
+        if (from > std::numeric_limits<To>::max()) {
+            return ::android::base::unexpected(::android::BAD_VALUE);
+        }
+    } else {
+        if (from < std::numeric_limits<To>::min() || from > std::numeric_limits<To>::max()) {
+            return ::android::base::unexpected(::android::BAD_VALUE);
+        }
+    }
+    return static_cast<To>(from);
+}
+
+/**
+ * A generic template to safely cast between types, that are intended to be the same size, but
+ * interpreted differently.
+ */
+template<typename To, typename From>
+ConversionResult<To> convertReinterpret(From from) {
+    static_assert(sizeof(From) == sizeof(To));
+    return static_cast<To>(from);
+}
+
+/**
+ * A generic template that helps convert containers of convertible types, using iterators.
+ */
+template<typename InputIterator, typename OutputIterator, typename Func>
+::android::status_t convertRange(InputIterator start,
+                      InputIterator end,
+                      OutputIterator out,
+                      const Func& itemConversion) {
+    for (InputIterator iter = start; iter != end; ++iter, ++out) {
+        *out = VALUE_OR_RETURN_STATUS(itemConversion(*iter));
+    }
+    return ::android::OK;
+}
+
+/**
+ * A generic template that helps convert containers of convertible types, using iterators.
+ * Uses a limit as maximum conversion items.
+ */
+template<typename InputIterator, typename OutputIterator, typename Func>
+::android::status_t convertRangeWithLimit(InputIterator start,
+                      InputIterator end,
+                      OutputIterator out,
+                      const Func& itemConversion,
+                      const size_t limit) {
+    InputIterator last = end;
+    if (end - start > limit) {
+        last = start + limit;
+    }
+    for (InputIterator iter = start; (iter != last); ++iter, ++out) {
+        *out = VALUE_OR_RETURN_STATUS(itemConversion(*iter));
+    }
+    return ::android::OK;
+}
+
+/**
+ * A generic template that helps convert containers of convertible types.
+ */
+template<typename OutputContainer, typename InputContainer, typename Func>
+ConversionResult<OutputContainer>
+convertContainer(const InputContainer& input, const Func& itemConversion) {
+    OutputContainer output;
+    auto ins = std::inserter(output, output.begin());
+    for (const auto& item : input) {
+        *ins = VALUE_OR_RETURN(itemConversion(item));
+    }
+    return output;
+}
+
+/**
+ * A generic template that helps convert containers of convertible types
+ * using an item conversion function with an additional parameter.
+ */
+template<typename OutputContainer, typename InputContainer, typename Func, typename Parameter>
+ConversionResult<OutputContainer>
+convertContainer(const InputContainer& input, const Func& itemConversion, const Parameter& param) {
+    OutputContainer output;
+    auto ins = std::inserter(output, output.begin());
+    for (const auto& item : input) {
+        *ins = VALUE_OR_RETURN(itemConversion(item, param));
+    }
+    return output;
+}
+
+/**
+ * A generic template that helps to "zip" two input containers of the same size
+ * into a single vector of converted types. The conversion function must
+ * thus accept two arguments.
+ */
+template<typename OutputContainer, typename InputContainer1,
+        typename InputContainer2, typename Func>
+ConversionResult<OutputContainer>
+convertContainers(const InputContainer1& input1, const InputContainer2& input2,
+        const Func& itemConversion) {
+    auto iter2 = input2.begin();
+    OutputContainer output;
+    auto ins = std::inserter(output, output.begin());
+    for (const auto& item1 : input1) {
+        RETURN_IF_ERROR(iter2 != input2.end() ? ::android::OK : ::android::BAD_VALUE);
+        *ins = VALUE_OR_RETURN(itemConversion(item1, *iter2++));
+    }
+    return output;
+}
+
+/**
+ * A generic template that helps to "unzip" a per-element conversion into
+ * a pair of elements into a pair of containers. The conversion function
+ * must emit a pair of elements.
+ */
+template<typename OutputContainer1, typename OutputContainer2,
+        typename InputContainer, typename Func>
+ConversionResult<std::pair<OutputContainer1, OutputContainer2>>
+convertContainerSplit(const InputContainer& input, const Func& itemConversion) {
+    OutputContainer1 output1;
+    OutputContainer2 output2;
+    auto ins1 = std::inserter(output1, output1.begin());
+    auto ins2 = std::inserter(output2, output2.begin());
+    for (const auto& item : input) {
+        auto out_pair = VALUE_OR_RETURN(itemConversion(item));
+        *ins1 = out_pair.first;
+        *ins2 = out_pair.second;
+    }
+    return std::make_pair(output1, output2);
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// The code below establishes:
+// IntegralTypeOf<T>, which works for either integral types (in which case it evaluates to T), or
+// enum types (in which case it evaluates to std::underlying_type_T<T>).
+
+template<typename T, typename = std::enable_if_t<std::is_integral_v<T> || std::is_enum_v<T>>>
+struct IntegralTypeOfStruct {
+    using Type = T;
+};
+
+template<typename T>
+struct IntegralTypeOfStruct<T, std::enable_if_t<std::is_enum_v<T>>> {
+    using Type = std::underlying_type_t<T>;
+};
+
+template<typename T>
+using IntegralTypeOf = typename IntegralTypeOfStruct<T>::Type;
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Utilities for handling bitmasks.
+
+template<typename Enum>
+Enum indexToEnum_index(int index) {
+    static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+    return static_cast<Enum>(index);
+}
+
+template<typename Enum>
+Enum indexToEnum_bitmask(int index) {
+    static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+    return static_cast<Enum>(1 << index);
+}
+
+template<typename Mask, typename Enum>
+Mask enumToMask_bitmask(Enum e) {
+    static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+    static_assert(std::is_enum_v<Mask> || std::is_integral_v<Mask>);
+    return static_cast<Mask>(e);
+}
+
+template<typename Mask, typename Enum>
+Mask enumToMask_index(Enum e) {
+    static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+    static_assert(std::is_enum_v<Mask> || std::is_integral_v<Mask>);
+    return static_cast<Mask>(static_cast<std::make_unsigned_t<IntegralTypeOf<Mask>>>(1)
+            << static_cast<int>(e));
+}
+
+template<typename DestMask, typename SrcMask, typename DestEnum, typename SrcEnum>
+ConversionResult<DestMask> convertBitmask(
+        SrcMask src, const std::function<ConversionResult<DestEnum>(SrcEnum)>& enumConversion,
+        const std::function<SrcEnum(int)>& srcIndexToEnum,
+        const std::function<DestMask(DestEnum)>& destEnumToMask) {
+    using UnsignedDestMask = std::make_unsigned_t<IntegralTypeOf<DestMask>>;
+    using UnsignedSrcMask = std::make_unsigned_t<IntegralTypeOf<SrcMask>>;
+
+    UnsignedDestMask dest = static_cast<UnsignedDestMask>(0);
+    UnsignedSrcMask usrc = static_cast<UnsignedSrcMask>(src);
+
+    int srcBitIndex = 0;
+    while (usrc != 0) {
+        if (usrc & 1) {
+            SrcEnum srcEnum = srcIndexToEnum(srcBitIndex);
+            DestEnum destEnum = VALUE_OR_RETURN(enumConversion(srcEnum));
+            DestMask destMask = destEnumToMask(destEnum);
+            dest |= destMask;
+        }
+        ++srcBitIndex;
+        usrc >>= 1;
+    }
+    return static_cast<DestMask>(dest);
+}
+
+template<typename Mask, typename Enum>
+bool bitmaskIsSet(Mask mask, Enum index) {
+    return (mask & enumToMask_index<Mask, Enum>(index)) != 0;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Utilities for working with AIDL unions.
+// UNION_GET(obj, fieldname) returns a ConversionResult<T> containing either the strongly-typed
+//   value of the respective field, or ::android::BAD_VALUE if the union is not set to the requested
+//   field.
+// UNION_SET(obj, fieldname, value) sets the requested field to the given value.
+
+template<typename T, typename T::Tag tag>
+using UnionFieldType = std::decay_t<decltype(std::declval<T>().template get<tag>())>;
+
+template<typename T, typename T::Tag tag>
+ConversionResult<UnionFieldType<T, tag>> unionGetField(const T& u) {
+    if (u.getTag() != tag) {
+        return ::android::base::unexpected(::android::BAD_VALUE);
+    }
+    return u.template get<tag>();
+}
+
+#define UNION_GET(u, field) \
+    unionGetField<std::decay_t<decltype(u)>, std::decay_t<decltype(u)>::Tag::field>(u)
+
+#define UNION_SET(u, field, value) \
+    (u).set<std::decay_t<decltype(u)>::Tag::field>(value)
+
+#define UNION_MAKE(u, field, value) u::make<u::Tag::field>(value)
+
+namespace aidl_utils {
+
+/**
+ * Return true if the value is valid for the AIDL enumeration.
+ */
+template <typename T>
+bool isValidEnum(T value) {
+#if defined(BACKEND_NDK_IMPL)
+    constexpr ndk::enum_range<T> er{};
+#else
+    constexpr ::android::enum_range<T> er{};
+#endif
+    return std::find(er.begin(), er.end(), value) != er.end();
+}
+
+// T is a "container" of enum binder types with a toString().
+template <typename T>
+std::string enumsToString(const T& t) {
+    std::string s;
+    for (const auto item : t) {
+        if (s.empty()) {
+            s = toString(item);
+        } else {
+            s.append("|").append(toString(item));
+        }
+    }
+    return s;
+}
+
+/**
+ * Return the equivalent Android ::android::status_t from a binder exception code.
+ *
+ * Generally one should use statusTFromBinderStatus() instead.
+ *
+ * Exception codes can be generated from a remote Java service exception, translate
+ * them for use on the Native side.
+ *
+ * Note: for EX_TRANSACTION_FAILED and EX_SERVICE_SPECIFIC a more detailed error code
+ * can be found from transactionError() or serviceSpecificErrorCode().
+ */
+static inline ::android::status_t statusTFromExceptionCode(int32_t exceptionCode) {
+    using namespace ::android::binder;
+    switch (exceptionCode) {
+        case Status::EX_NONE:
+            return ::android::OK;
+        case Status::EX_SECURITY:  // Java SecurityException, rethrows locally in Java
+            return ::android::PERMISSION_DENIED;
+        case Status::EX_BAD_PARCELABLE:  // Java BadParcelableException, rethrows in Java
+        case Status::EX_ILLEGAL_ARGUMENT:  // Java IllegalArgumentException, rethrows in Java
+        case Status::EX_NULL_POINTER:  // Java NullPointerException, rethrows in Java
+            return ::android::BAD_VALUE;
+        case Status::EX_ILLEGAL_STATE:  // Java IllegalStateException, rethrows in Java
+        case Status::EX_UNSUPPORTED_OPERATION:  // Java UnsupportedOperationException, rethrows
+            return ::android::INVALID_OPERATION;
+        case Status::EX_HAS_REPLY_HEADER: // Native strictmode violation
+        case Status::EX_PARCELABLE:  // Java bootclass loader (not standard exception), rethrows
+        case Status::EX_NETWORK_MAIN_THREAD:  // Java NetworkOnMainThreadException, rethrows
+        case Status::EX_TRANSACTION_FAILED: // Native - see error code
+        case Status::EX_SERVICE_SPECIFIC:   // Java ServiceSpecificException,
+                                            // rethrows in Java with integer error code
+            return ::android::UNKNOWN_ERROR;
+    }
+    return ::android::UNKNOWN_ERROR;
+}
+
+/**
+ * Return the equivalent Android ::android::status_t from a binder status.
+ *
+ * Used to handle errors from a AIDL method declaration
+ *
+ * [oneway] void method(type0 param0, ...)
+ *
+ * or the following (where return_type is not a status_t)
+ *
+ * return_type method(type0 param0, ...)
+ */
+static inline ::android::status_t statusTFromBinderStatus(const ::android::binder::Status &status) {
+    return status.isOk() ? ::android::OK // check ::android::OK,
+        : status.serviceSpecificErrorCode() // service-side error, not standard Java exception
+                                            // (fromServiceSpecificError)
+        ?: status.transactionError() // a native binder transaction error (fromStatusT)
+        ?: statusTFromExceptionCode(status.exceptionCode()); // a service-side error with a
+                                                    // standard Java exception (fromExceptionCode)
+}
+
+#if defined(BACKEND_NDK_IMPL)
+static inline ::android::status_t statusTFromBinderStatus(const ::ndk::ScopedAStatus &status) {
+    // What we want to do is to 'return statusTFromBinderStatus(status.get()->get())'
+    // However, since the definition of AStatus is not exposed, we have to do the same
+    // via methods of ScopedAStatus:
+    return status.isOk() ? ::android::OK // check ::android::OK,
+        : status.getServiceSpecificError() // service-side error, not standard Java exception
+                                           // (fromServiceSpecificError)
+        ?: status.getStatus() // a native binder transaction error (fromStatusT)
+        ?: statusTFromExceptionCode(status.getExceptionCode()); // a service-side error with a
+                                                     // standard Java exception (fromExceptionCode)
+}
+#endif
+
+/**
+ * Return a binder::Status from native service status.
+ *
+ * This is used for methods not returning an explicit status_t,
+ * where Java callers expect an exception, not an integer return value.
+ */
+static inline ::android::binder::Status binderStatusFromStatusT(
+        ::android::status_t status, const char *optionalMessage = nullptr) {
+    const char * const emptyIfNull = optionalMessage == nullptr ? "" : optionalMessage;
+    // From binder::Status instructions:
+    //  Prefer a generic exception code when possible, then a service specific
+    //  code, and finally a ::android::status_t for low level failures or legacy support.
+    //  Exception codes and service specific errors map to nicer exceptions for
+    //  Java clients.
+
+    using namespace ::android::binder;
+    switch (status) {
+        case ::android::OK:
+            return Status::ok();
+        case ::android::PERMISSION_DENIED: // throw SecurityException on Java side
+            return Status::fromExceptionCode(Status::EX_SECURITY, emptyIfNull);
+        case ::android::BAD_VALUE: // throw IllegalArgumentException on Java side
+            return Status::fromExceptionCode(Status::EX_ILLEGAL_ARGUMENT, emptyIfNull);
+        case ::android::INVALID_OPERATION: // throw IllegalStateException on Java side
+            return Status::fromExceptionCode(Status::EX_ILLEGAL_STATE, emptyIfNull);
+    }
+
+    // A service specific error will not show on status.transactionError() so
+    // be sure to use statusTFromBinderStatus() for reliable error handling.
+
+    // throw a ServiceSpecificException.
+    return Status::fromServiceSpecificError(status, emptyIfNull);
+}
+
+} // namespace aidl_utils
+
+}  // namespace android
+
+#if defined(BACKEND_NDK_IMPL)
+}  // namespace aidl
+#endif
+
+// (defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_NDK)) || \
+// (!defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_CPP))
+#endif
diff --git a/media/audioaidlconversion/include/media/AidlConversionUtil.h b/media/audioaidlconversion/include/media/AidlConversionUtil.h
index 8b2e0de..b846436 100644
--- a/media/audioaidlconversion/include/media/AidlConversionUtil.h
+++ b/media/audioaidlconversion/include/media/AidlConversionUtil.h
@@ -16,407 +16,26 @@
 
 #pragma once
 
-#include <limits>
-#include <type_traits>
-#include <utility>
-
-#include <android-base/expected.h>
-#include <binder/Status.h>
 #include <error/Result.h>
 
-#if defined(BACKEND_NDK)
-#include <android/binder_auto_utils.h>
-#include <android/binder_enums.h>
-#include <android/binder_status.h>
-
-namespace aidl {
-#else
-#include <binder/Enums.h>
-#endif
-
+namespace android {
+// `ConversionResult` is always defined in the `::android` namespace,
+// so that it can be found from any nested namespace.
+// See below for the convenience alias specific to the NDK backend.
 template <typename T>
 using ConversionResult = ::android::error::Result<T>;
-
-namespace android {
-/**
- * A generic template to safely cast between integral types, respecting limits of the destination
- * type.
- */
-template<typename To, typename From>
-ConversionResult<To> convertIntegral(From from) {
-    // Special handling is required for signed / vs. unsigned comparisons, since otherwise we may
-    // have the signed converted to unsigned and produce wrong results.
-    if (std::is_signed_v<From> && !std::is_signed_v<To>) {
-        if (from < 0 || from > std::numeric_limits<To>::max()) {
-            return ::android::base::unexpected(::android::BAD_VALUE);
-        }
-    } else if (std::is_signed_v<To> && !std::is_signed_v<From>) {
-        if (from > std::numeric_limits<To>::max()) {
-            return ::android::base::unexpected(::android::BAD_VALUE);
-        }
-    } else {
-        if (from < std::numeric_limits<To>::min() || from > std::numeric_limits<To>::max()) {
-            return ::android::base::unexpected(::android::BAD_VALUE);
-        }
-    }
-    return static_cast<To>(from);
-}
-
-/**
- * A generic template to safely cast between types, that are intended to be the same size, but
- * interpreted differently.
- */
-template<typename To, typename From>
-ConversionResult<To> convertReinterpret(From from) {
-    static_assert(sizeof(From) == sizeof(To));
-    return static_cast<To>(from);
-}
-
-/**
- * A generic template that helps convert containers of convertible types, using iterators.
- */
-template<typename InputIterator, typename OutputIterator, typename Func>
-::android::status_t convertRange(InputIterator start,
-                      InputIterator end,
-                      OutputIterator out,
-                      const Func& itemConversion) {
-    for (InputIterator iter = start; iter != end; ++iter, ++out) {
-        *out = VALUE_OR_RETURN_STATUS(itemConversion(*iter));
-    }
-    return ::android::OK;
-}
-
-/**
- * A generic template that helps convert containers of convertible types, using iterators.
- * Uses a limit as maximum conversion items.
- */
-template<typename InputIterator, typename OutputIterator, typename Func>
-::android::status_t convertRangeWithLimit(InputIterator start,
-                      InputIterator end,
-                      OutputIterator out,
-                      const Func& itemConversion,
-                      const size_t limit) {
-    InputIterator last = end;
-    if (end - start > limit) {
-        last = start + limit;
-    }
-    for (InputIterator iter = start; (iter != last); ++iter, ++out) {
-        *out = VALUE_OR_RETURN_STATUS(itemConversion(*iter));
-    }
-    return ::android::OK;
-}
-
-/**
- * A generic template that helps convert containers of convertible types.
- */
-template<typename OutputContainer, typename InputContainer, typename Func>
-ConversionResult<OutputContainer>
-convertContainer(const InputContainer& input, const Func& itemConversion) {
-    OutputContainer output;
-    auto ins = std::inserter(output, output.begin());
-    for (const auto& item : input) {
-        *ins = VALUE_OR_RETURN(itemConversion(item));
-    }
-    return output;
-}
-
-/**
- * A generic template that helps convert containers of convertible types
- * using an item conversion function with an additional parameter.
- */
-template<typename OutputContainer, typename InputContainer, typename Func, typename Parameter>
-ConversionResult<OutputContainer>
-convertContainer(const InputContainer& input, const Func& itemConversion, const Parameter& param) {
-    OutputContainer output;
-    auto ins = std::inserter(output, output.begin());
-    for (const auto& item : input) {
-        *ins = VALUE_OR_RETURN(itemConversion(item, param));
-    }
-    return output;
-}
-
-/**
- * A generic template that helps to "zip" two input containers of the same size
- * into a single vector of converted types. The conversion function must
- * thus accept two arguments.
- */
-template<typename OutputContainer, typename InputContainer1,
-        typename InputContainer2, typename Func>
-ConversionResult<OutputContainer>
-convertContainers(const InputContainer1& input1, const InputContainer2& input2,
-        const Func& itemConversion) {
-    auto iter2 = input2.begin();
-    OutputContainer output;
-    auto ins = std::inserter(output, output.begin());
-    for (const auto& item1 : input1) {
-        RETURN_IF_ERROR(iter2 != input2.end() ? ::android::OK : ::android::BAD_VALUE);
-        *ins = VALUE_OR_RETURN(itemConversion(item1, *iter2++));
-    }
-    return output;
-}
-
-/**
- * A generic template that helps to "unzip" a per-element conversion into
- * a pair of elements into a pair of containers. The conversion function
- * must emit a pair of elements.
- */
-template<typename OutputContainer1, typename OutputContainer2,
-        typename InputContainer, typename Func>
-ConversionResult<std::pair<OutputContainer1, OutputContainer2>>
-convertContainerSplit(const InputContainer& input, const Func& itemConversion) {
-    OutputContainer1 output1;
-    OutputContainer2 output2;
-    auto ins1 = std::inserter(output1, output1.begin());
-    auto ins2 = std::inserter(output2, output2.begin());
-    for (const auto& item : input) {
-        auto out_pair = VALUE_OR_RETURN(itemConversion(item));
-        *ins1 = out_pair.first;
-        *ins2 = out_pair.second;
-    }
-    return std::make_pair(output1, output2);
-}
-
-////////////////////////////////////////////////////////////////////////////////////////////////////
-// The code below establishes:
-// IntegralTypeOf<T>, which works for either integral types (in which case it evaluates to T), or
-// enum types (in which case it evaluates to std::underlying_type_T<T>).
-
-template<typename T, typename = std::enable_if_t<std::is_integral_v<T> || std::is_enum_v<T>>>
-struct IntegralTypeOfStruct {
-    using Type = T;
-};
-
-template<typename T>
-struct IntegralTypeOfStruct<T, std::enable_if_t<std::is_enum_v<T>>> {
-    using Type = std::underlying_type_t<T>;
-};
-
-template<typename T>
-using IntegralTypeOf = typename IntegralTypeOfStruct<T>::Type;
-
-////////////////////////////////////////////////////////////////////////////////////////////////////
-// Utilities for handling bitmasks.
-
-template<typename Enum>
-Enum indexToEnum_index(int index) {
-    static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
-    return static_cast<Enum>(index);
-}
-
-template<typename Enum>
-Enum indexToEnum_bitmask(int index) {
-    static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
-    return static_cast<Enum>(1 << index);
-}
-
-template<typename Mask, typename Enum>
-Mask enumToMask_bitmask(Enum e) {
-    static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
-    static_assert(std::is_enum_v<Mask> || std::is_integral_v<Mask>);
-    return static_cast<Mask>(e);
-}
-
-template<typename Mask, typename Enum>
-Mask enumToMask_index(Enum e) {
-    static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
-    static_assert(std::is_enum_v<Mask> || std::is_integral_v<Mask>);
-    return static_cast<Mask>(static_cast<std::make_unsigned_t<IntegralTypeOf<Mask>>>(1)
-            << static_cast<int>(e));
-}
-
-template<typename DestMask, typename SrcMask, typename DestEnum, typename SrcEnum>
-ConversionResult<DestMask> convertBitmask(
-        SrcMask src, const std::function<ConversionResult<DestEnum>(SrcEnum)>& enumConversion,
-        const std::function<SrcEnum(int)>& srcIndexToEnum,
-        const std::function<DestMask(DestEnum)>& destEnumToMask) {
-    using UnsignedDestMask = std::make_unsigned_t<IntegralTypeOf<DestMask>>;
-    using UnsignedSrcMask = std::make_unsigned_t<IntegralTypeOf<SrcMask>>;
-
-    UnsignedDestMask dest = static_cast<UnsignedDestMask>(0);
-    UnsignedSrcMask usrc = static_cast<UnsignedSrcMask>(src);
-
-    int srcBitIndex = 0;
-    while (usrc != 0) {
-        if (usrc & 1) {
-            SrcEnum srcEnum = srcIndexToEnum(srcBitIndex);
-            DestEnum destEnum = VALUE_OR_RETURN(enumConversion(srcEnum));
-            DestMask destMask = destEnumToMask(destEnum);
-            dest |= destMask;
-        }
-        ++srcBitIndex;
-        usrc >>= 1;
-    }
-    return static_cast<DestMask>(dest);
-}
-
-template<typename Mask, typename Enum>
-bool bitmaskIsSet(Mask mask, Enum index) {
-    return (mask & enumToMask_index<Mask, Enum>(index)) != 0;
-}
-
-////////////////////////////////////////////////////////////////////////////////////////////////////
-// Utilities for working with AIDL unions.
-// UNION_GET(obj, fieldname) returns a ConversionResult<T> containing either the strongly-typed
-//   value of the respective field, or ::android::BAD_VALUE if the union is not set to the requested
-//   field.
-// UNION_SET(obj, fieldname, value) sets the requested field to the given value.
-
-template<typename T, typename T::Tag tag>
-using UnionFieldType = std::decay_t<decltype(std::declval<T>().template get<tag>())>;
-
-template<typename T, typename T::Tag tag>
-ConversionResult<UnionFieldType<T, tag>> unionGetField(const T& u) {
-    if (u.getTag() != tag) {
-        return ::android::base::unexpected(::android::BAD_VALUE);
-    }
-    return u.template get<tag>();
-}
-
-#define UNION_GET(u, field) \
-    unionGetField<std::decay_t<decltype(u)>, std::decay_t<decltype(u)>::Tag::field>(u)
-
-#define UNION_SET(u, field, value) \
-    (u).set<std::decay_t<decltype(u)>::Tag::field>(value)
-
-#define UNION_MAKE(u, field, value) u::make<u::Tag::field>(value)
-
-namespace aidl_utils {
-
-/**
- * Return true if the value is valid for the AIDL enumeration.
- */
-template <typename T>
-bool isValidEnum(T value) {
-#if defined(BACKEND_NDK)
-    constexpr ndk::enum_range<T> er{};
-#else
-    constexpr ::android::enum_range<T> er{};
-#endif
-    return std::find(er.begin(), er.end(), value) != er.end();
-}
-
-// T is a "container" of enum binder types with a toString().
-template <typename T>
-std::string enumsToString(const T& t) {
-    std::string s;
-    for (const auto item : t) {
-        if (s.empty()) {
-            s = toString(item);
-        } else {
-            s.append("|").append(toString(item));
-        }
-    }
-    return s;
-}
-
-/**
- * Return the equivalent Android ::android::status_t from a binder exception code.
- *
- * Generally one should use statusTFromBinderStatus() instead.
- *
- * Exception codes can be generated from a remote Java service exception, translate
- * them for use on the Native side.
- *
- * Note: for EX_TRANSACTION_FAILED and EX_SERVICE_SPECIFIC a more detailed error code
- * can be found from transactionError() or serviceSpecificErrorCode().
- */
-static inline ::android::status_t statusTFromExceptionCode(int32_t exceptionCode) {
-    using namespace ::android::binder;
-    switch (exceptionCode) {
-        case Status::EX_NONE:
-            return ::android::OK;
-        case Status::EX_SECURITY:  // Java SecurityException, rethrows locally in Java
-            return ::android::PERMISSION_DENIED;
-        case Status::EX_BAD_PARCELABLE:  // Java BadParcelableException, rethrows in Java
-        case Status::EX_ILLEGAL_ARGUMENT:  // Java IllegalArgumentException, rethrows in Java
-        case Status::EX_NULL_POINTER:  // Java NullPointerException, rethrows in Java
-            return ::android::BAD_VALUE;
-        case Status::EX_ILLEGAL_STATE:  // Java IllegalStateException, rethrows in Java
-        case Status::EX_UNSUPPORTED_OPERATION:  // Java UnsupportedOperationException, rethrows
-            return ::android::INVALID_OPERATION;
-        case Status::EX_HAS_REPLY_HEADER: // Native strictmode violation
-        case Status::EX_PARCELABLE:  // Java bootclass loader (not standard exception), rethrows
-        case Status::EX_NETWORK_MAIN_THREAD:  // Java NetworkOnMainThreadException, rethrows
-        case Status::EX_TRANSACTION_FAILED: // Native - see error code
-        case Status::EX_SERVICE_SPECIFIC:   // Java ServiceSpecificException,
-                                            // rethrows in Java with integer error code
-            return ::android::UNKNOWN_ERROR;
-    }
-    return ::android::UNKNOWN_ERROR;
-}
-
-/**
- * Return the equivalent Android ::android::status_t from a binder status.
- *
- * Used to handle errors from a AIDL method declaration
- *
- * [oneway] void method(type0 param0, ...)
- *
- * or the following (where return_type is not a status_t)
- *
- * return_type method(type0 param0, ...)
- */
-static inline ::android::status_t statusTFromBinderStatus(const ::android::binder::Status &status) {
-    return status.isOk() ? ::android::OK // check ::android::OK,
-        : status.serviceSpecificErrorCode() // service-side error, not standard Java exception
-                                            // (fromServiceSpecificError)
-        ?: status.transactionError() // a native binder transaction error (fromStatusT)
-        ?: statusTFromExceptionCode(status.exceptionCode()); // a service-side error with a
-                                                    // standard Java exception (fromExceptionCode)
-}
-
-#if defined(BACKEND_NDK)
-static inline ::android::status_t statusTFromBinderStatus(const ::ndk::ScopedAStatus &status) {
-    // What we want to do is to 'return statusTFromBinderStatus(status.get()->get())'
-    // However, since the definition of AStatus is not exposed, we have to do the same
-    // via methods of ScopedAStatus:
-    return status.isOk() ? ::android::OK // check ::android::OK,
-        : status.getServiceSpecificError() // service-side error, not standard Java exception
-                                           // (fromServiceSpecificError)
-        ?: status.getStatus() // a native binder transaction error (fromStatusT)
-        ?: statusTFromExceptionCode(status.getExceptionCode()); // a service-side error with a
-                                                     // standard Java exception (fromExceptionCode)
-}
-#endif
-
-/**
- * Return a binder::Status from native service status.
- *
- * This is used for methods not returning an explicit status_t,
- * where Java callers expect an exception, not an integer return value.
- */
-static inline ::android::binder::Status binderStatusFromStatusT(
-        ::android::status_t status, const char *optionalMessage = nullptr) {
-    const char * const emptyIfNull = optionalMessage == nullptr ? "" : optionalMessage;
-    // From binder::Status instructions:
-    //  Prefer a generic exception code when possible, then a service specific
-    //  code, and finally a ::android::status_t for low level failures or legacy support.
-    //  Exception codes and service specific errors map to nicer exceptions for
-    //  Java clients.
-
-    using namespace ::android::binder;
-    switch (status) {
-        case ::android::OK:
-            return Status::ok();
-        case ::android::PERMISSION_DENIED: // throw SecurityException on Java side
-            return Status::fromExceptionCode(Status::EX_SECURITY, emptyIfNull);
-        case ::android::BAD_VALUE: // throw IllegalArgumentException on Java side
-            return Status::fromExceptionCode(Status::EX_ILLEGAL_ARGUMENT, emptyIfNull);
-        case ::android::INVALID_OPERATION: // throw IllegalStateException on Java side
-            return Status::fromExceptionCode(Status::EX_ILLEGAL_STATE, emptyIfNull);
-    }
-
-    // A service specific error will not show on status.transactionError() so
-    // be sure to use statusTFromBinderStatus() for reliable error handling.
-
-    // throw a ServiceSpecificException.
-    return Status::fromServiceSpecificError(status, emptyIfNull);
-}
-
-} // namespace aidl_utils
-
 }  // namespace android
 
-#if defined(BACKEND_NDK)
-}  // namespace aidl
+// Include 'AidlConversionUtil.h' once if 'BACKEND_NDK' is defined,
+// or no 'BACKEND_*' is defined (C++ backend). Include twice if
+// 'BACKEND_CPP_NDK' is defined: once with 'BACKEND_NDK_IMPL', once w/o defines.
+
+#if defined(BACKEND_CPP_NDK) || defined(BACKEND_NDK)
+#define BACKEND_NDK_IMPL
+#include <media/AidlConversionUtil-impl.h>
+#undef BACKEND_NDK_IMPL
+#endif
+
+#if defined(BACKEND_CPP_NDK) || !defined(BACKEND_NDK)
+#include <media/AidlConversionUtil-impl.h>
 #endif
diff --git a/media/audioaidlconversion/tests/Android.bp b/media/audioaidlconversion/tests/Android.bp
new file mode 100644
index 0000000..de7c8a2
--- /dev/null
+++ b/media/audioaidlconversion/tests/Android.bp
@@ -0,0 +1,46 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "libaudio_aidl_conversion_tests_defaults",
+    test_suites: ["device-tests"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
+
+cc_test {
+    name: "audio_aidl_ndk_conversion_tests",
+
+    defaults: [
+        "latest_android_media_audio_common_types_ndk_static",
+        "latest_android_hardware_audio_common_ndk_static",
+        "libaudio_aidl_conversion_tests_defaults",
+    ],
+    srcs: ["audio_aidl_ndk_conversion_tests.cpp"],
+    shared_libs: [
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
+    static_libs: [
+        "libaudio_aidl_conversion_common_ndk",
+    ],
+    cflags: [
+        "-DBACKEND_NDK",
+    ],
+}
diff --git a/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp b/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp
new file mode 100644
index 0000000..c505e60
--- /dev/null
+++ b/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+#include <type_traits>
+
+#include <gtest/gtest.h>
+
+#include <media/AidlConversionNdk.h>
+
+namespace {
+template<typename> struct mf_traits {};
+template<class T, class U> struct mf_traits<U T::*> {
+    using member_type = U;
+};
+}  // namespace
+
+// Provide value printers for types generated from AIDL
+// They need to be in the same namespace as the types we intend to print
+namespace aidl::android::hardware::audio::common {
+    template <typename P>
+    std::enable_if_t<std::is_function_v<typename mf_traits<decltype(&P::toString)>::member_type>,
+            std::ostream&> operator<<(std::ostream& os, const P& p) {
+        return os << p.toString();
+    }
+    template <typename E>
+    std::enable_if_t<std::is_enum_v<E>, std::ostream&> operator<<(std::ostream& os, const E& e) {
+        return os << toString(e);
+    }
+}  // namespace aidl::android::hardware::audio::common
+
+using aidl::android::hardware::audio::common::PlaybackTrackMetadata;
+using aidl::android::hardware::audio::common::RecordTrackMetadata;
+using aidl::android::media::audio::common::AudioSource;
+using aidl::android::media::audio::common::AudioUsage;
+using namespace aidl::android;   // for conversion functions
+
+TEST(AudioPlaybackTrackMetadata, Aidl2Legacy2Aidl) {
+    const PlaybackTrackMetadata initial{ .usage = AudioUsage::UNKNOWN };
+    auto conv = aidl2legacy_PlaybackTrackMetadata_playback_track_metadata_v7(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+
+TEST(AudioPlaybackTrackMetadata, NonVendorTags) {
+    PlaybackTrackMetadata initial{ .usage = AudioUsage::UNKNOWN };
+    initial.tags.emplace_back("random string");  // Must be filtered out.
+    initial.tags.emplace_back("VX_GOOGLE_42");
+    auto conv = aidl2legacy_PlaybackTrackMetadata_playback_track_metadata_v7(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    ASSERT_EQ(1, convBack.value().tags.size());
+    EXPECT_EQ(initial.tags[1], convBack.value().tags[0]);
+}
+
+TEST(AudioRecordTrackMetadata, Aidl2Legacy2Aidl) {
+    const RecordTrackMetadata initial{ .source = AudioSource::DEFAULT };
+    auto conv = aidl2legacy_RecordTrackMetadata_record_track_metadata_v7(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+
+TEST(AudioRecordTrackMetadata, NonVendorTags) {
+    RecordTrackMetadata initial{ .source = AudioSource::DEFAULT };
+    initial.tags.emplace_back("random string");  // Must be filtered out.
+    initial.tags.emplace_back("VX_GOOGLE_42");
+    auto conv = aidl2legacy_RecordTrackMetadata_record_track_metadata_v7(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    ASSERT_EQ(1, convBack.value().tags.size());
+    EXPECT_EQ(initial.tags[1], convBack.value().tags[0]);
+}
diff --git a/media/codec2/TEST_MAPPING b/media/codec2/TEST_MAPPING
index 90bb054..8a894f3 100644
--- a/media/codec2/TEST_MAPPING
+++ b/media/codec2/TEST_MAPPING
@@ -8,17 +8,6 @@
   ],
   "presubmit-large": [
     {
-      "name": "CtsMediaMiscTestCases",
-      "options": [
-        {
-          "include-annotation": "android.platform.test.annotations.Presubmit"
-        },
-        {
-          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
-        }
-      ]
-    },
-    {
       "name": "CtsMediaAudioTestCases",
       "options": [
         {
@@ -35,50 +24,6 @@
           "exclude-filter": "android.media.audio.cts.AudioRecordTest"
         }
       ]
-    },
-    {
-      "name": "CtsMediaDecoderTestCases",
-      "options": [
-        {
-          "include-annotation": "android.platform.test.annotations.Presubmit"
-        },
-        {
-          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
-        }
-      ]
-    },
-    {
-      "name": "CtsMediaEncoderTestCases",
-      "options": [
-        {
-          "include-annotation": "android.platform.test.annotations.Presubmit"
-        },
-        {
-          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
-        }
-      ]
-    },
-    {
-      "name": "CtsMediaCodecTestCases",
-      "options": [
-        {
-          "include-annotation": "android.platform.test.annotations.Presubmit"
-        },
-        {
-          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
-        }
-      ]
-    },
-    {
-      "name": "CtsMediaPlayerTestCases",
-      "options": [
-        {
-          "include-annotation": "android.platform.test.annotations.Presubmit"
-        },
-        {
-          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
-        }
-      ]
     }
   ]
 }
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 6ff3dbc..417b261 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -2503,7 +2503,8 @@
  * Note: This parameter allows a decoder to ignore the video peek machinery and
  * to revert to its preferred behavior.
  */
-typedef C2StreamParam<C2Tuning, C2EasyEnum<C2PlatformConfig::tunnel_peek_mode_t>,
+typedef C2StreamParam<C2Tuning,
+        C2SimpleValueStruct<C2EasyEnum<C2PlatformConfig::tunnel_peek_mode_t>>,
         kParamIndexTunnelPeekMode> C2StreamTunnelPeekModeTuning;
 constexpr char C2_PARAMKEY_TUNNEL_PEEK_MODE[] =
         "output.tunnel-peek-mode";
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hidl/client/client.cpp
index 0acf7d7..9359e29 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hidl/client/client.cpp
@@ -1582,6 +1582,10 @@
     return mOutputBufferQueue->outputBuffer(block, input, output);
 }
 
+void Codec2Client::Component::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
+    mOutputBufferQueue->pollForRenderedFrames(delta);
+}
+
 void Codec2Client::Component::setOutputSurfaceMaxDequeueCount(
         int maxDequeueCount) {
     mOutputBufferQueue->updateMaxDequeueBufferCount(maxDequeueCount);
diff --git a/media/codec2/hidl/client/include/codec2/hidl/client.h b/media/codec2/hidl/client/include/codec2/hidl/client.h
index 49d9b28..2fdca29 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/client.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/client.h
@@ -23,6 +23,7 @@
 #include <C2Param.h>
 #include <C2.h>
 
+#include <gui/FrameTimestamps.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <hidl/HidlSupport.h>
 #include <utils/StrongPointer.h>
@@ -408,6 +409,9 @@
             const QueueBufferInput& input,
             QueueBufferOutput* output);
 
+    // Retrieve frame event history from the output surface.
+    void pollForRenderedFrames(FrameEventHistoryDelta* delta);
+
     // Set max dequeue count for output surface.
     void setOutputSurfaceMaxDequeueCount(int maxDequeueCount);
 
diff --git a/media/codec2/hidl/client/include/codec2/hidl/output.h b/media/codec2/hidl/client/include/codec2/hidl/output.h
index a13edf3..35a0224 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/output.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/output.h
@@ -17,6 +17,7 @@
 #ifndef CODEC2_HIDL_V1_0_UTILS_OUTPUT_BUFFER_QUEUE
 #define CODEC2_HIDL_V1_0_UTILS_OUTPUT_BUFFER_QUEUE
 
+#include <gui/FrameTimestamps.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <codec2/hidl/1.0/types.h>
 #include <codec2/hidl/1.2/types.h>
@@ -60,6 +61,9 @@
             const BnGraphicBufferProducer::QueueBufferInput& input,
             BnGraphicBufferProducer::QueueBufferOutput* output);
 
+    // Retrieve frame event history from the output surface.
+    void pollForRenderedFrames(FrameEventHistoryDelta* delta);
+
     // Call holdBufferQueueBlock() on output blocks in the given workList.
     // The OutputBufferQueue will take the ownership of output blocks.
     //
diff --git a/media/codec2/hidl/client/output.cpp b/media/codec2/hidl/client/output.cpp
index f789030..dd10691 100644
--- a/media/codec2/hidl/client/output.cpp
+++ b/media/codec2/hidl/client/output.cpp
@@ -476,6 +476,12 @@
     return OK;
 }
 
+void OutputBufferQueue::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
+    if (mIgbp) {
+        mIgbp->getFrameTimestamps(delta);
+    }
+}
+
 void OutputBufferQueue::holdBufferQueueBlocks(
         const std::list<std::unique_ptr<C2Work>>& workList) {
     forEachBlock(workList,
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 4bf8dce..fc82426 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -899,7 +899,7 @@
     }
 
     // TODO: revisit this after C2Fence implementation.
-    android::IGraphicBufferProducer::QueueBufferInput qbi(
+    IGraphicBufferProducer::QueueBufferInput qbi(
             timestampNs,
             false, // droppable
             dataSpace,
@@ -963,9 +963,9 @@
     }
     SetMetadataToGralloc4Handle(dataSpace, hdrStaticInfo, hdrDynamicInfo, block.handle());
 
-    // we don't have dirty regions
-    qbi.setSurfaceDamage(Region::INVALID_REGION);
-    android::IGraphicBufferProducer::QueueBufferOutput qbo;
+    qbi.setSurfaceDamage(Region::INVALID_REGION); // we don't have dirty regions
+    qbi.getFrameTimestamps = true; // we need to know when a frame is rendered
+    IGraphicBufferProducer::QueueBufferOutput qbo;
     status_t result = mComponent->queueToOutputSurface(block, qbi, &qbo);
     if (result != OK) {
         ALOGI("[%s] queueBuffer failed: %d", mName, result);
@@ -984,10 +984,107 @@
     int64_t mediaTimeUs = 0;
     (void)buffer->meta()->findInt64("timeUs", &mediaTimeUs);
     mCCodecCallback->onOutputFramesRendered(mediaTimeUs, timestampNs);
+    trackReleasedFrame(qbo, mediaTimeUs, timestampNs);
+    processRenderedFrames(qbo.frameTimestamps);
 
     return OK;
 }
 
+void CCodecBufferChannel::initializeFrameTrackingFor(ANativeWindow * window) {
+    int hasPresentFenceTimes = 0;
+    window->query(window, NATIVE_WINDOW_FRAME_TIMESTAMPS_SUPPORTS_PRESENT, &hasPresentFenceTimes);
+    mHasPresentFenceTimes = hasPresentFenceTimes == 1;
+    if (mHasPresentFenceTimes) {
+        ALOGI("Using latch times for frame rendered signals - present fences not supported");
+    }
+    mTrackedFrames.clear();
+}
+
+void CCodecBufferChannel::trackReleasedFrame(const IGraphicBufferProducer::QueueBufferOutput& qbo,
+                                             int64_t mediaTimeUs, int64_t desiredRenderTimeNs) {
+    // If the render time is earlier than now, then we're suggesting it should be rendered ASAP,
+    // so track the frame as if the desired render time is now.
+    int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
+    if (desiredRenderTimeNs < nowNs) {
+        desiredRenderTimeNs = nowNs;
+    }
+    // We've just released a frame to the surface, so keep track of it and later check to see if it
+    // is actually rendered.
+    TrackedFrame frame;
+    frame.number = qbo.nextFrameNumber - 1;
+    frame.mediaTimeUs = mediaTimeUs;
+    frame.desiredRenderTimeNs = desiredRenderTimeNs;
+    frame.latchTime = -1;
+    frame.presentFence = nullptr;
+    mTrackedFrames.push_back(frame);
+}
+
+void CCodecBufferChannel::processRenderedFrames(const FrameEventHistoryDelta& deltas) {
+    // Grab the latch times and present fences from the frame event deltas
+    for (const auto& delta : deltas) {
+        for (auto& frame : mTrackedFrames) {
+            if (delta.getFrameNumber() == frame.number) {
+                delta.getLatchTime(&frame.latchTime);
+                delta.getDisplayPresentFence(&frame.presentFence);
+            }
+        }
+    }
+
+    // Scan all frames and check to see if the frames that SHOULD have been rendered by now, have,
+    // in fact, been rendered.
+    int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
+    while (!mTrackedFrames.empty()) {
+        TrackedFrame & frame = mTrackedFrames.front();
+        // Frames that should have been rendered at least 100ms in the past are checked
+        if (frame.desiredRenderTimeNs > nowNs - 100*1000*1000LL) {
+            break;
+        }
+
+        // If we don't have a render time by now, then consider the frame as dropped
+        int64_t renderTimeNs = getRenderTimeNs(frame);
+        if (renderTimeNs != -1) {
+            mCCodecCallback->onOutputFramesRendered(frame.mediaTimeUs, renderTimeNs);
+        }
+        mTrackedFrames.pop_front();
+    }
+}
+
+int64_t CCodecBufferChannel::getRenderTimeNs(const TrackedFrame& frame) {
+    // If the device doesn't have accurate present fence times, then use the latch time as a proxy
+    if (!mHasPresentFenceTimes) {
+        if (frame.latchTime == -1) {
+            ALOGD("no latch time for frame %d", (int) frame.number);
+            return -1;
+        }
+        return frame.latchTime;
+    }
+
+    if (frame.presentFence == nullptr) {
+        ALOGW("no present fence for frame %d", (int) frame.number);
+        return -1;
+    }
+
+    nsecs_t actualRenderTimeNs = frame.presentFence->getSignalTime();
+
+    if (actualRenderTimeNs == Fence::SIGNAL_TIME_INVALID) {
+        ALOGW("invalid signal time for frame %d", (int) frame.number);
+        return -1;
+    }
+
+    if (actualRenderTimeNs == Fence::SIGNAL_TIME_PENDING) {
+        ALOGD("present fence has not fired for frame %d", (int) frame.number);
+        return -1;
+    }
+
+    return actualRenderTimeNs;
+}
+
+void CCodecBufferChannel::pollForRenderedBuffers() {
+    FrameEventHistoryDelta delta;
+    mComponent->pollForRenderedFrames(&delta);
+    processRenderedFrames(delta);
+}
+
 status_t CCodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
     ALOGV("[%s] discardBuffer: %p", mName, buffer.get());
     bool released = false;
@@ -1604,6 +1701,8 @@
         Mutexed<Output>::Locked output(mOutput);
         output->buffers.reset();
     }
+    // reset the frames that are being tracked for onFrameRendered callbacks
+    mTrackedFrames.clear();
 }
 
 void CCodecBufferChannel::release() {
@@ -1672,6 +1771,8 @@
             output->buffers->flushStash();
         }
     }
+    // reset the frames that are being tracked for onFrameRendered callbacks
+    mTrackedFrames.clear();
 }
 
 void CCodecBufferChannel::onWorkDone(
@@ -2140,7 +2241,7 @@
         output->surface = newSurface;
         output->generation = generation;
     }
-
+    initializeFrameTrackingFor(static_cast<ANativeWindow *>(newSurface.get()));
     return OK;
 }
 
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 61fb06f..73299d7 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -18,6 +18,7 @@
 
 #define CCODEC_BUFFER_CHANNEL_H_
 
+#include <deque>
 #include <map>
 #include <memory>
 #include <vector>
@@ -88,6 +89,7 @@
             const sp<MediaCodecBuffer> &buffer) override;
     virtual status_t renderOutputBuffer(
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
+    virtual void pollForRenderedBuffers() override;
     virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
     virtual void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
     virtual void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
@@ -263,6 +265,14 @@
         bool mRunning;
     };
 
+    struct TrackedFrame {
+        uint64_t number;
+        int64_t mediaTimeUs;
+        int64_t desiredRenderTimeNs;
+        nsecs_t latchTime;
+        sp<Fence> presentFence;
+    };
+
     void feedInputBufferIfAvailable();
     void feedInputBufferIfAvailableInternal();
     status_t queueInputBufferInternal(sp<MediaCodecBuffer> buffer,
@@ -275,6 +285,12 @@
     void ensureDecryptDestination(size_t size);
     int32_t getHeapSeqNum(const sp<hardware::HidlMemory> &memory);
 
+    void initializeFrameTrackingFor(ANativeWindow * window);
+    void trackReleasedFrame(const IGraphicBufferProducer::QueueBufferOutput& qbo,
+                            int64_t mediaTimeUs, int64_t desiredRenderTimeNs);
+    void processRenderedFrames(const FrameEventHistoryDelta& delta);
+    int64_t getRenderTimeNs(const TrackedFrame& frame);
+
     QueueSync mSync;
     sp<MemoryDealer> mDealer;
     sp<IMemory> mDecryptDestination;
@@ -316,6 +332,9 @@
 
     sp<MemoryDealer> makeMemoryDealer(size_t heapSize);
 
+    std::deque<TrackedFrame> mTrackedFrames;
+    bool mHasPresentFenceTimes;
+
     struct OutputSurface {
         sp<Surface> surface;
         uint32_t generation;
diff --git a/media/codec2/vndk/internal/C2BlockInternal.h b/media/codec2/vndk/internal/C2BlockInternal.h
index c510fca..6bcad4a 100644
--- a/media/codec2/vndk/internal/C2BlockInternal.h
+++ b/media/codec2/vndk/internal/C2BlockInternal.h
@@ -238,7 +238,7 @@
      *   - Local migration on blockpool side will be done automatically by
      *     blockpool.
      *   - Before attachBuffer(), BeginAttachBlockToBufferQueue() should be called
-     *     to test eligiblity.
+     *     to test eligibility.
      *   - After attachBuffer() is called, EndAttachBlockToBufferQueue() should
      *     be called. This will set "held" status to true. If it returned
      *     false, cancelBuffer() should be called.
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 270bbf4..f2cd585 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -432,6 +432,10 @@
         if (fence) {
             static constexpr int kFenceWaitTimeMs = 10;
 
+            if (bufferNeedsReallocation) {
+                mBuffers[slot].clear();
+            }
+
             status_t status = fence->wait(kFenceWaitTimeMs);
             if (status == -ETIME) {
                 // fence is not signalled yet.
diff --git a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
index 99bccac..bf4ca32 100644
--- a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
+++ b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
@@ -64,6 +64,11 @@
     }
 
     HandleSyncMem *o = static_cast<HandleSyncMem*>(handle);
+    if (o->size() < sizeof(C2SyncVariables)) {
+        android_errorWriteLog(0x534e4554, "240140929");
+        return nullptr;
+    }
+
     void *ptr = mmap(NULL, o->size(), PROT_READ | PROT_WRITE, MAP_SHARED, o->memFd(), 0);
 
     if (ptr == MAP_FAILED) {
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index 6fff568..4affaed 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -108,7 +108,7 @@
     aaudio_result_t    outputError = AAUDIO_OK;
 
     GlitchAnalyzer     sineAnalyzer;
-    PulseLatencyAnalyzer echoAnalyzer;
+    WhiteNoiseLatencyAnalyzer echoAnalyzer;
     AudioRecording     audioRecording;
     LoopbackProcessor *loopbackProcessor;
 
diff --git a/media/libaudioclient/AidlConversion.cpp b/media/libaudioclient/AidlConversion.cpp
index 3a5ba78..b32667e 100644
--- a/media/libaudioclient/AidlConversion.cpp
+++ b/media/libaudioclient/AidlConversion.cpp
@@ -724,11 +724,6 @@
             aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(aidl));
     legacy.hw_module = VALUE_OR_RETURN(
             aidl2legacy_int32_t_audio_module_handle_t(aidlSys.hwModule));
-    legacy.encapsulation_modes = VALUE_OR_RETURN(
-            aidl2legacy_AudioEncapsulationMode_mask(aidlSys.encapsulationModes));
-    legacy.encapsulation_metadata_types = VALUE_OR_RETURN(
-            aidl2legacy_AudioEncapsulationMetadataType_mask(
-                    aidlSys.encapsulationMetadataTypes));
     return legacy;
 }
 
@@ -738,10 +733,6 @@
     *aidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(legacy));
     aidlDeviceExt->hwModule = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
-    aidlDeviceExt->encapsulationModes = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_AudioEncapsulationMode_mask(legacy.encapsulation_modes));
-    aidlDeviceExt->encapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_AudioEncapsulationMetadataType_mask(legacy.encapsulation_metadata_types));
     return OK;
 }
 
diff --git a/media/libaudioclient/aidl/android/media/AudioPortDeviceExtSys.aidl b/media/libaudioclient/aidl/android/media/AudioPortDeviceExtSys.aidl
index 0f5a9b6..24ec230 100644
--- a/media/libaudioclient/aidl/android/media/AudioPortDeviceExtSys.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortDeviceExtSys.aidl
@@ -22,8 +22,4 @@
 parcelable AudioPortDeviceExtSys {
     /** Module the device is attached to. Interpreted as audio_module_handle_t. */
     int hwModule;
-    /** Bitmask, indexed by AudioEncapsulationMode. */
-    int encapsulationModes;
-    /** Bitmask, indexed by AudioEncapsulationMetadataType. */
-    int encapsulationMetadataTypes;
 }
diff --git a/media/libaudioclient/aidl/android/media/ISpatializer.aidl b/media/libaudioclient/aidl/android/media/ISpatializer.aidl
index a61ad58..250c450 100644
--- a/media/libaudioclient/aidl/android/media/ISpatializer.aidl
+++ b/media/libaudioclient/aidl/android/media/ISpatializer.aidl
@@ -96,17 +96,33 @@
 
     /**
      * Sets the display orientation.
+     *
+     * This is the rotation of the displayed content relative to its natural orientation.
+     *
      * Orientation is expressed in the angle of rotation from the physical "up" side of the screen
      * to the logical "up" side of the content displayed the screen. Counterclockwise angles, as
      * viewed while facing the screen are positive.
+     *
+     * Note: DisplayManager currently only returns this in increments of 90 degrees,
+     * so the values will be 0, PI/2, PI, 3PI/2.
      */
     void setDisplayOrientation(float physicalToLogicalAngle);
 
     /**
      * Sets the hinge angle for foldable devices.
+     *
+     * Per the hinge angle sensor, this returns a value from 0 to 2PI.
+     * The value of 0 is considered closed, and PI is considered flat open.
      */
     void setHingeAngle(float hingeAngle);
 
+    /**
+     * Sets whether a foldable is considered "folded" or not.
+     *
+     * The fold state may affect which physical screen is active for display.
+     */
+    void setFoldState(boolean folded);
+
     /** Reports the list of supported spatialization modess (see SpatializationMode.aidl).
      * The list should never be empty if an ISpatializer interface was successfully
      * retrieved with IAudioPolicyService.getSpatializer().
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
index b7a2d60..682f43e 100644
--- a/media/libaudioclient/tests/audiosystem_tests.cpp
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -214,8 +214,11 @@
         GTEST_SKIP() << "No ports returned by the audio system";
     }
 
+    bool sourceFound = false;
     for (const auto& port : ports) {
         if (port.role != AUDIO_PORT_ROLE_SOURCE || port.type != AUDIO_PORT_TYPE_DEVICE) continue;
+        if (port.ext.device.type != AUDIO_DEVICE_IN_FM_TUNER) continue;
+        sourceFound = true;
         sourcePortConfig = port.active_config;
 
         bool patchFound;
@@ -223,8 +226,9 @@
         // start audio source.
         status_t ret =
                 AudioSystem::startAudioSource(&sourcePortConfig, &attributes, &sourcePortHandle);
-        EXPECT_EQ(OK, ret) << "AudioSystem::startAudioSource for source " << port.ext.device.address
-                           << " failed";
+        EXPECT_EQ(OK, ret) << "AudioSystem::startAudioSource for source "
+                           << audio_device_to_string(port.ext.device.type) << " failed";
+        if (ret != OK) continue;
 
         // verify that patch is established by the source port.
         ASSERT_NO_FATAL_FAILURE(anyPatchContainsInputDevice(port.id, patchFound));
@@ -233,13 +237,17 @@
 
         if (sourcePortHandle != AUDIO_PORT_HANDLE_NONE) {
             ret = AudioSystem::stopAudioSource(sourcePortHandle);
-            EXPECT_EQ(OK, ret) << "AudioSystem::stopAudioSource for handle failed";
+            EXPECT_EQ(OK, ret) << "AudioSystem::stopAudioSource failed for handle "
+                               << sourcePortHandle;
         }
 
         // verify that no source port patch exists.
         ASSERT_NO_FATAL_FAILURE(anyPatchContainsInputDevice(port.id, patchFound));
         EXPECT_EQ(false, patchFound);
     }
+    if (!sourceFound) {
+        GTEST_SKIP() << "No ports suitable for testing";
+    }
 }
 
 TEST_F(AudioSystemTest, CreateAndReleaseAudioPatch) {
diff --git a/media/libaudiofoundation/DeviceDescriptorBase.cpp b/media/libaudiofoundation/DeviceDescriptorBase.cpp
index 4185b5f..9ffc75b 100644
--- a/media/libaudiofoundation/DeviceDescriptorBase.cpp
+++ b/media/libaudiofoundation/DeviceDescriptorBase.cpp
@@ -186,12 +186,12 @@
     deviceExt.encodedFormats = VALUE_OR_RETURN_STATUS(
             convertContainer<std::vector<media::audio::common::AudioFormatDescription>>(
                     mEncodedFormats, legacy2aidl_audio_format_t_AudioFormatDescription));
+    deviceExt.encapsulationModes = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_AudioEncapsulationMode_mask(mEncapsulationModes));
+    deviceExt.encapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_AudioEncapsulationMetadataType_mask(mEncapsulationMetadataTypes));
     UNION_SET(parcelable->hal.ext, device, deviceExt);
     media::AudioPortDeviceExtSys deviceSys;
-    deviceSys.encapsulationModes = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_AudioEncapsulationMode_mask(mEncapsulationModes));
-    deviceSys.encapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_AudioEncapsulationMetadataType_mask(mEncapsulationMetadataTypes));
     UNION_SET(parcelable->sys.ext, device, deviceSys);
     return OK;
 }
@@ -214,12 +214,12 @@
     mEncodedFormats = VALUE_OR_RETURN_STATUS(
             convertContainer<FormatVector>(deviceExt.encodedFormats,
                     aidl2legacy_AudioFormatDescription_audio_format_t));
+    mEncapsulationModes = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioEncapsulationMode_mask(deviceExt.encapsulationModes));
+    mEncapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioEncapsulationMetadataType_mask(deviceExt.encapsulationMetadataTypes));
     media::AudioPortDeviceExtSys deviceSys = VALUE_OR_RETURN_STATUS(
             UNION_GET(parcelable.sys.ext, device));
-    mEncapsulationModes = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioEncapsulationMode_mask(deviceSys.encapsulationModes));
-    mEncapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioEncapsulationMetadataType_mask(deviceSys.encapsulationMetadataTypes));
     return OK;
 }
 
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index 3f19219..d5f6598 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -256,6 +256,7 @@
         "EffectBufferHalAidl.cpp",
         "EffectHalAidl.cpp",
         "effectsAidlConversion/AidlConversionAec.cpp",
+        "effectsAidlConversion/AidlConversionAgc1.cpp",
         "effectsAidlConversion/AidlConversionAgc2.cpp",
         "effectsAidlConversion/AidlConversionBassBoost.cpp",
         "effectsAidlConversion/AidlConversionDownmix.cpp",
@@ -280,6 +281,7 @@
     ],
     shared_libs: [
         "libbinder_ndk",
+        "libaudio_aidl_conversion_common_cpp",
         "libaudio_aidl_conversion_common_ndk",
         "libaudio_aidl_conversion_effect_ndk",
         "libaudioaidlcommon",
@@ -292,6 +294,6 @@
         "-Wextra",
         "-Werror",
         "-Wthread-safety",
-        "-DBACKEND_NDK",
+        "-DBACKEND_CPP_NDK",
     ],
 }
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 32ebe36..8b88f24 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -45,18 +45,22 @@
 using aidl::android::media::audio::common::AudioPort;
 using aidl::android::media::audio::common::AudioPortConfig;
 using aidl::android::media::audio::common::AudioPortDeviceExt;
+using aidl::android::media::audio::common::AudioPortMixExt;
+using aidl::android::media::audio::common::AudioPortMixExtUseCase;
 using aidl::android::media::audio::common::AudioPortExt;
 using aidl::android::media::audio::common::AudioSource;
 using aidl::android::media::audio::common::Int;
 using aidl::android::media::audio::common::Float;
+using aidl::android::hardware::audio::common::getFrameSizeInBytes;
+using aidl::android::hardware::audio::common::isBitPositionFlagSet;
+using aidl::android::hardware::audio::common::makeBitPositionFlagMask;
+using aidl::android::media::audio::common::MicrophoneDynamicInfo;
+using aidl::android::media::audio::common::MicrophoneInfo;
 using aidl::android::hardware::audio::common::RecordTrackMetadata;
 using aidl::android::hardware::audio::core::AudioPatch;
 using aidl::android::hardware::audio::core::IModule;
 using aidl::android::hardware::audio::core::ITelephony;
 using aidl::android::hardware::audio::core::StreamDescriptor;
-using android::hardware::audio::common::getFrameSizeInBytes;
-using android::hardware::audio::common::isBitPositionFlagSet;
-using android::hardware::audio::common::makeBitPositionFlagMask;
 
 namespace android {
 
@@ -248,13 +252,14 @@
             ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, true /*isInput*/));
     AudioDevice aidlDevice;
     aidlDevice.type.type = AudioDeviceType::IN_DEFAULT;
+    AudioSource aidlSource = AudioSource::DEFAULT;
     AudioIoFlags aidlFlags = AudioIoFlags::make<AudioIoFlags::Tag::input>(0);
     AudioPortConfig mixPortConfig;
     Cleanups cleanups;
     audio_config writableConfig = *config;
     int32_t nominalLatency;
-    RETURN_STATUS_IF_ERROR(prepareToOpenStream(0 /*handle*/, aidlDevice, aidlFlags, &writableConfig,
-                    &cleanups, &aidlConfig, &mixPortConfig, &nominalLatency));
+    RETURN_STATUS_IF_ERROR(prepareToOpenStream(0 /*handle*/, aidlDevice, aidlFlags, aidlSource,
+                    &writableConfig, &cleanups, &aidlConfig, &mixPortConfig, &nominalLatency));
     *size = aidlConfig.frameCount *
             getFrameSizeInBytes(aidlConfig.base.format, aidlConfig.base.channelMask);
     // Do not disarm cleanups to release temporary port configs.
@@ -263,7 +268,7 @@
 
 status_t DeviceHalAidl::prepareToOpenStream(
         int32_t aidlHandle, const AudioDevice& aidlDevice, const AudioIoFlags& aidlFlags,
-        struct audio_config* config,
+        AudioSource aidlSource, struct audio_config* config,
         Cleanups* cleanups, AudioConfig* aidlConfig, AudioPortConfig* mixPortConfig,
         int32_t* nominalLatency) {
     const bool isInput = aidlFlags.getTag() == AudioIoFlags::Tag::input;
@@ -275,7 +280,7 @@
     if (created) {
         cleanups->emplace_front(this, &DeviceHalAidl::resetPortConfig, devicePortConfig.id);
     }
-    RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(*aidlConfig, aidlFlags, aidlHandle,
+    RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(*aidlConfig, aidlFlags, aidlHandle, aidlSource,
                     mixPortConfig, &created));
     if (created) {
         cleanups->emplace_front(this, &DeviceHalAidl::resetPortConfig, mixPortConfig->id);
@@ -441,8 +446,9 @@
     AudioPortConfig mixPortConfig;
     Cleanups cleanups;
     int32_t nominalLatency;
-    RETURN_STATUS_IF_ERROR(prepareToOpenStream(aidlHandle, aidlDevice, aidlFlags, config,
-                    &cleanups, &aidlConfig, &mixPortConfig, &nominalLatency));
+    RETURN_STATUS_IF_ERROR(prepareToOpenStream(aidlHandle, aidlDevice, aidlFlags,
+                    AudioSource::SYS_RESERVED_INVALID /*only needed for input*/,
+                    config, &cleanups, &aidlConfig, &mixPortConfig, &nominalLatency));
     ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamArguments args;
     args.portConfigId = mixPortConfig.id;
     const bool isOffload = isBitPositionFlagSet(
@@ -460,7 +466,7 @@
     args.eventCallback = eventCb;
     ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamReturn ret;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openOutputStream(args, &ret)));
-    StreamContextAidl context(ret.desc);
+    StreamContextAidl context(ret.desc, isOffload);
     if (!context.isValid()) {
         ALOGE("%s: Failed to created a valid stream context from the descriptor: %s",
                 __func__, ret.desc.toString().c_str());
@@ -505,8 +511,8 @@
     AudioPortConfig mixPortConfig;
     Cleanups cleanups;
     int32_t nominalLatency;
-    RETURN_STATUS_IF_ERROR(prepareToOpenStream(aidlHandle, aidlDevice, aidlFlags, config,
-                    &cleanups, &aidlConfig, &mixPortConfig, &nominalLatency));
+    RETURN_STATUS_IF_ERROR(prepareToOpenStream(aidlHandle, aidlDevice, aidlFlags, aidlSource,
+                    config, &cleanups, &aidlConfig, &mixPortConfig, &nominalLatency));
     ::aidl::android::hardware::audio::core::IModule::OpenInputStreamArguments args;
     args.portConfigId = mixPortConfig.id;
     RecordTrackMetadata aidlTrackMetadata{
@@ -520,14 +526,14 @@
     args.bufferSizeFrames = aidlConfig.frameCount;
     ::aidl::android::hardware::audio::core::IModule::OpenInputStreamReturn ret;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openInputStream(args, &ret)));
-    StreamContextAidl context(ret.desc);
+    StreamContextAidl context(ret.desc, false /*isAsynchronous*/);
     if (!context.isValid()) {
         ALOGE("%s: Failed to created a valid stream context from the descriptor: %s",
                 __func__, ret.desc.toString().c_str());
         return NO_INIT;
     }
     *inStream = sp<StreamInHalAidl>::make(*config, std::move(context), nominalLatency,
-            std::move(ret.stream));
+            std::move(ret.stream), this /*micInfoProvider*/);
     cleanups.disarmAll();
     return OK;
 }
@@ -549,8 +555,19 @@
         sources == nullptr || sinks == nullptr || patch == nullptr) {
         return BAD_VALUE;
     }
-    // Note that the patch handle (*patch) is provided by the framework.
-    // In tests it's possible that its value is AUDIO_PATCH_HANDLE_NONE.
+    // When the patch handle (*patch) is AUDIO_PATCH_HANDLE_NONE, it means
+    // the framework wants to create a new patch. The handle has to be generated
+    // by the HAL. Since handles generated this way can only be unique within
+    // a HAL module, the framework generates a globally unique handle, and maps
+    // it on the <HAL module, patch handle> pair.
+    // When the patch handle is set, it meant the framework intends to update
+    // an existing patch.
+    //
+    // This behavior corresponds to HAL module behavior, with the only difference
+    // that the HAL module uses `int32_t` for patch IDs. The following assert ensures
+    // that both the framework and the HAL use the same value for "no ID":
+    static_assert(AUDIO_PATCH_HANDLE_NONE == 0);
+    int32_t halPatchId = static_cast<int32_t>(*patch);
 
     // Upon conversion, mix port configs contain audio configuration, while
     // device port configs contain device address. This data is used to find
@@ -573,17 +590,12 @@
                                 sinks[i], isInput, 0)));
     }
     Cleanups cleanups;
-    auto existingPatchIt = mPatches.end();
-    auto fwkHandlesIt = *patch != AUDIO_PATCH_HANDLE_NONE ?
-            mFwkHandles.find(*patch) : mFwkHandles.end();
+    auto existingPatchIt = halPatchId != 0 ? mPatches.find(halPatchId): mPatches.end();
     AudioPatch aidlPatch;
-    if (fwkHandlesIt != mFwkHandles.end()) {
-        existingPatchIt = mPatches.find(fwkHandlesIt->second);
-        if (existingPatchIt != mPatches.end()) {
-            aidlPatch = existingPatchIt->second;
-            aidlPatch.sourcePortConfigIds.clear();
-            aidlPatch.sinkPortConfigIds.clear();
-        }
+    if (existingPatchIt != mPatches.end()) {
+        aidlPatch = existingPatchIt->second;
+        aidlPatch.sourcePortConfigIds.clear();
+        aidlPatch.sinkPortConfigIds.clear();
     }
     ALOGD("%s: sources: %s, sinks: %s",
             __func__, ::android::internal::ToString(aidlSources).c_str(),
@@ -611,20 +623,8 @@
         bool created = false;
         RETURN_STATUS_IF_ERROR(findOrCreatePatch(aidlPatch, &aidlPatch, &created));
         // Since no cleanup of the patch is needed, 'created' is ignored.
-        if (fwkHandlesIt != mFwkHandles.end()) {
-            fwkHandlesIt->second = aidlPatch.id;
-            // Patch handle (*patch) stays the same.
-        } else {
-            if (*patch == AUDIO_PATCH_HANDLE_NONE) {
-                // This isn't good as the module can't provide a handle which is really unique.
-                // However, this situation should only happen in tests.
-                *patch = aidlPatch.id;
-                LOG_ALWAYS_FATAL_IF(mFwkHandles.count(*patch) > 0,
-                        "%s: patch id %d clashes with another framework patch handle",
-                        __func__, *patch);
-            }
-            mFwkHandles.emplace(*patch, aidlPatch.id);
-        }
+        halPatchId = aidlPatch.id;
+        *patch = static_cast<audio_patch_handle_t>(halPatchId);
     }
     cleanups.disarmAll();
     return OK;
@@ -634,12 +634,18 @@
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mModule) return NO_INIT;
-    auto idMapIt = mFwkHandles.find(patch);
-    if (idMapIt == mFwkHandles.end()) {
+    static_assert(AUDIO_PATCH_HANDLE_NONE == 0);
+    if (patch == AUDIO_PATCH_HANDLE_NONE) {
         return BAD_VALUE;
     }
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->resetAudioPatch(idMapIt->second)));
-    mFwkHandles.erase(idMapIt);
+    int32_t halPatchId = static_cast<int32_t>(patch);
+    auto patchIt = mPatches.find(halPatchId);
+    if (patchIt == mPatches.end()) {
+        ALOGE("%s: patch with id %d not found", __func__, halPatchId);
+        return BAD_VALUE;
+    }
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->resetAudioPatch(halPatchId)));
+    mPatches.erase(patchIt);
     return OK;
 }
 
@@ -662,11 +668,45 @@
     return OK;
 }
 
+MicrophoneInfoProvider::Info const* DeviceHalAidl::getMicrophoneInfo() {
+    if (mMicrophones.status == Microphones::Status::UNKNOWN) {
+        TIME_CHECK();
+        std::vector<MicrophoneInfo> aidlInfo;
+        status_t status = statusTFromBinderStatus(mModule->getMicrophones(&aidlInfo));
+        if (status == OK) {
+            mMicrophones.status = Microphones::Status::QUERIED;
+            mMicrophones.info = std::move(aidlInfo);
+        } else if (status == INVALID_OPERATION) {
+            mMicrophones.status = Microphones::Status::NOT_SUPPORTED;
+        } else {
+            ALOGE("%s: Unexpected status from 'IModule.getMicrophones': %d", __func__, status);
+            return {};
+        }
+    }
+    if (mMicrophones.status == Microphones::Status::QUERIED) {
+        return &mMicrophones.info;
+    }
+    return {};  // NOT_SUPPORTED
+}
+
 status_t DeviceHalAidl::getMicrophones(
-        std::vector<audio_microphone_characteristic_t>* microphones __unused) {
+        std::vector<audio_microphone_characteristic_t>* microphones) {
+    if (!microphones) {
+        return BAD_VALUE;
+    }
     TIME_CHECK();
     if (!mModule) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
+    auto staticInfo = getMicrophoneInfo();
+    if (!staticInfo) return INVALID_OPERATION;
+    std::vector<MicrophoneDynamicInfo> emptyDynamicInfo;
+    emptyDynamicInfo.reserve(staticInfo->size());
+    std::transform(staticInfo->begin(), staticInfo->end(), std::back_inserter(emptyDynamicInfo),
+            [](const auto& info) { return MicrophoneDynamicInfo{ .id = info.id }; });
+    *microphones = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::convertContainers<std::vector<audio_microphone_characteristic_t>>(
+                    *staticInfo, emptyDynamicInfo,
+                    ::aidl::android::aidl2legacy_MicrophoneInfos_audio_microphone_characteristic_t)
+    );
     return OK;
 }
 
@@ -713,8 +753,11 @@
 
 error::Result<audio_hw_sync_t> DeviceHalAidl::getHwAvSync() {
     TIME_CHECK();
-    ALOGE("%s not implemented yet", __func__);
-    return base::unexpected(INVALID_OPERATION);
+    if (!mModule) return NO_INIT;
+    int32_t aidlHwAvSync;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->generateHwAvSyncId(&aidlHwAvSync)));
+    return VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_int32_t_audio_hw_sync_t(aidlHwAvSync));
 }
 
 status_t DeviceHalAidl::dump(int fd, const Vector<String16>& args) {
@@ -744,22 +787,27 @@
     return p.ext.get<AudioPortExt::Tag::device>().device == device;
 }
 
-status_t DeviceHalAidl::createPortConfig(const AudioPortConfig& requestedPortConfig,
-        AudioPortConfig* appliedPortConfig) {
+status_t DeviceHalAidl::createPortConfig(
+        const AudioPortConfig& requestedPortConfig, PortConfigs::iterator* result) {
     TIME_CHECK();
+    AudioPortConfig appliedPortConfig;
     bool applied = false;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->setAudioPortConfig(
-                            requestedPortConfig, appliedPortConfig, &applied)));
+                            requestedPortConfig, &appliedPortConfig, &applied)));
     if (!applied) {
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->setAudioPortConfig(
-                                *appliedPortConfig, appliedPortConfig, &applied)));
+                                appliedPortConfig, &appliedPortConfig, &applied)));
         if (!applied) {
             ALOGE("%s: module %s did not apply suggested config %s",
-                    __func__, mInstance.c_str(), appliedPortConfig->toString().c_str());
+                    __func__, mInstance.c_str(), appliedPortConfig.toString().c_str());
             return NO_INIT;
         }
     }
-    mPortConfigs.emplace(appliedPortConfig->id, *appliedPortConfig);
+    auto id = appliedPortConfig.id;
+    auto [it, inserted] = mPortConfigs.emplace(std::move(id), std::move(appliedPortConfig));
+    LOG_ALWAYS_FATAL_IF(!inserted, "%s: port config with id %d already exists",
+            __func__, it->first);
+    *result = it;
     return OK;
 }
 
@@ -806,10 +854,7 @@
         }
         AudioPortConfig requestedPortConfig;
         requestedPortConfig.portId = portsIt->first;
-        AudioPortConfig appliedPortConfig;
-        RETURN_STATUS_IF_ERROR(createPortConfig(requestedPortConfig, &appliedPortConfig));
-        portConfigIt = mPortConfigs.insert(
-                mPortConfigs.end(), std::make_pair(appliedPortConfig.id, appliedPortConfig));
+        RETURN_STATUS_IF_ERROR(createPortConfig(requestedPortConfig, &portConfigIt));
         *created = true;
     } else {
         *created = false;
@@ -820,24 +865,46 @@
 
 status_t DeviceHalAidl::findOrCreatePortConfig(
         const AudioConfig& config, const std::optional<AudioIoFlags>& flags, int32_t ioHandle,
-        AudioPortConfig* portConfig, bool* created) {
+        AudioSource source, AudioPortConfig* portConfig, bool* created) {
+    // These flags get removed one by one in this order when retrying port finding.
+    static const std::vector<AudioInputFlags> kOptionalInputFlags{
+        AudioInputFlags::FAST, AudioInputFlags::RAW };
     auto portConfigIt = findPortConfig(config, flags, ioHandle);
     if (portConfigIt == mPortConfigs.end() && flags.has_value()) {
-        auto portsIt = findPort(config, flags.value());
+        auto optionalInputFlagsIt = kOptionalInputFlags.begin();
+        AudioIoFlags matchFlags = flags.value();
+        auto portsIt = findPort(config, matchFlags);
+        while (portsIt == mPorts.end() && matchFlags.getTag() == AudioIoFlags::Tag::input
+                && optionalInputFlagsIt != kOptionalInputFlags.end()) {
+            if (!isBitPositionFlagSet(
+                            matchFlags.get<AudioIoFlags::Tag::input>(), *optionalInputFlagsIt)) {
+                ++optionalInputFlagsIt;
+                continue;
+            }
+            matchFlags.set<AudioIoFlags::Tag::input>(matchFlags.get<AudioIoFlags::Tag::input>() &
+                    ~makeBitPositionFlagMask(*optionalInputFlagsIt++));
+            portsIt = findPort(config, matchFlags);
+            ALOGI("%s: mix port for config %s, flags %s was not found in the module %s, "
+                    "retried with flags %s", __func__, config.toString().c_str(),
+                    flags.value().toString().c_str(), mInstance.c_str(),
+                    matchFlags.toString().c_str());
+        }
         if (portsIt == mPorts.end()) {
             ALOGE("%s: mix port for config %s, flags %s is not found in the module %s",
-                    __func__, config.toString().c_str(), flags.value().toString().c_str(),
+                    __func__, config.toString().c_str(), matchFlags.toString().c_str(),
                     mInstance.c_str());
             return BAD_VALUE;
         }
         AudioPortConfig requestedPortConfig;
         requestedPortConfig.portId = portsIt->first;
         setPortConfigFromConfig(&requestedPortConfig, config);
-        AudioPortConfig appliedPortConfig;
-        RETURN_STATUS_IF_ERROR(createPortConfig(requestedPortConfig, &appliedPortConfig));
-        appliedPortConfig.ext.get<AudioPortExt::Tag::mix>().handle = ioHandle;
-        portConfigIt = mPortConfigs.insert(
-                mPortConfigs.end(), std::make_pair(appliedPortConfig.id, appliedPortConfig));
+        requestedPortConfig.ext = AudioPortMixExt{ .handle = ioHandle };
+        if (matchFlags.getTag() == AudioIoFlags::Tag::input
+                && source != AudioSource::SYS_RESERVED_INVALID) {
+            requestedPortConfig.ext.get<AudioPortExt::Tag::mix>().usecase =
+                    AudioPortMixExtUseCase::make<AudioPortMixExtUseCase::Tag::source>(source);
+        }
+        RETURN_STATUS_IF_ERROR(createPortConfig(requestedPortConfig, &portConfigIt));
         *created = true;
     } else if (!flags.has_value()) {
         ALOGW("%s: mix port config for %s, handle %d not found in the module %s, "
@@ -864,8 +931,12 @@
         }
         AudioConfig config;
         setConfigFromPortConfig(&config, requestedPortConfig);
+        AudioSource source = requestedPortConfig.ext.get<Tag::mix>().usecase.getTag() ==
+                AudioPortMixExtUseCase::Tag::source ?
+                requestedPortConfig.ext.get<Tag::mix>().usecase.
+                get<AudioPortMixExtUseCase::Tag::source>() : AudioSource::SYS_RESERVED_INVALID;
         return findOrCreatePortConfig(config, requestedPortConfig.flags,
-                requestedPortConfig.ext.get<Tag::mix>().handle, portConfig, created);
+                requestedPortConfig.ext.get<Tag::mix>().handle, source, portConfig, created);
     } else if (requestedPortConfig.ext.getTag() == Tag::device) {
         return findOrCreatePortConfig(
                 requestedPortConfig.ext.get<Tag::device>().device, portConfig, created);
@@ -899,12 +970,10 @@
 
 DeviceHalAidl::Ports::iterator DeviceHalAidl::findPort(
             const AudioConfig& config, const AudioIoFlags& flags) {
-    using Tag = AudioPortExt::Tag;
-    AudioIoFlags matchFlags = flags;
     auto matcher = [&](const auto& pair) {
         const auto& p = pair.second;
-        return p.ext.getTag() == Tag::mix &&
-                p.flags == matchFlags &&
+        return p.ext.getTag() == AudioPortExt::Tag::mix &&
+                p.flags == flags &&
                 std::find_if(p.profiles.begin(), p.profiles.end(),
                         [&](const auto& prof) {
                             return prof.format == config.base.format &&
@@ -913,15 +982,7 @@
                                     std::find(prof.sampleRates.begin(), prof.sampleRates.end(),
                                             config.base.sampleRate) != prof.sampleRates.end();
                         }) != p.profiles.end(); };
-    auto it = std::find_if(mPorts.begin(), mPorts.end(), matcher);
-    if (it == mPorts.end() && flags.getTag() == AudioIoFlags::Tag::input &&
-            isBitPositionFlagSet(flags.get<AudioIoFlags::Tag::input>(), AudioInputFlags::FAST)) {
-        // "Fast" input is not a mandatory flag, try without it.
-        matchFlags.set<AudioIoFlags::Tag::input>(flags.get<AudioIoFlags::Tag::input>() &
-                ~makeBitPositionFlagMask(AudioInputFlags::FAST));
-        it = std::find_if(mPorts.begin(), mPorts.end(), matcher);
-    }
-    return it;
+    return std::find_if(mPorts.begin(), mPorts.end(), matcher);
 }
 
 DeviceHalAidl::PortConfigs::iterator DeviceHalAidl::findPortConfig(const AudioDevice& device) {
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index 6f16daf..0a86ddc 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -56,8 +56,16 @@
             void* cookie, const sp<StreamOutHalInterfaceLatencyModeCallback>&) = 0;
 };
 
+class MicrophoneInfoProvider : public virtual RefBase {
+  public:
+    using Info = std::vector<::aidl::android::media::audio::common::MicrophoneInfo>;
+    virtual ~MicrophoneInfoProvider() = default;
+    // Returns a nullptr if the HAL does not support microphone info retrieval.
+    virtual Info const* getMicrophoneInfo() = 0;
+};
+
 class DeviceHalAidl : public DeviceHalInterface, public ConversionHelperAidl,
-                      public CallbackBroker {
+                      public CallbackBroker, public MicrophoneInfoProvider {
   public:
     // Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
     status_t getSupportedDevices(uint32_t *devices) override;
@@ -158,6 +166,11 @@
         wp<StreamOutHalInterfaceEventCallback> event;
         wp<StreamOutHalInterfaceLatencyModeCallback> latency;
     };
+    struct Microphones {
+        enum Status { UNKNOWN, NOT_SUPPORTED, QUERIED };
+        Status status = Status::UNKNOWN;
+        MicrophoneInfoProvider::Info info;
+    };
     using Patches = std::map<int32_t /*patch ID*/,
             ::aidl::android::hardware::audio::core::AudioPatch>;
     using PortConfigs = std::map<int32_t /*port config ID*/,
@@ -179,7 +192,7 @@
             const ::aidl::android::media::audio::common::AudioPortConfig& p);
     status_t createPortConfig(
             const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
-            ::aidl::android::media::audio::common::AudioPortConfig* appliedPortConfig);
+            PortConfigs::iterator* result);
     status_t findOrCreatePatch(
         const std::set<int32_t>& sourcePortConfigIds,
         const std::set<int32_t>& sinkPortConfigIds,
@@ -195,6 +208,7 @@
             const ::aidl::android::media::audio::common::AudioConfig& config,
             const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
             int32_t ioHandle,
+            ::aidl::android::media::audio::common::AudioSource aidlSource,
             ::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
     status_t findOrCreatePortConfig(
         const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
@@ -218,6 +232,7 @@
         int32_t aidlHandle,
         const ::aidl::android::media::audio::common::AudioDevice& aidlDevice,
         const ::aidl::android::media::audio::common::AudioIoFlags& aidlFlags,
+        ::aidl::android::media::audio::common::AudioSource aidlSource,
         struct audio_config* config,
         Cleanups* cleanups,
         ::aidl::android::media::audio::common::AudioConfig* aidlConfig,
@@ -241,6 +256,9 @@
     template<class C> sp<C> getCallbackImpl(void* cookie, wp<C> Callbacks::* field);
     template<class C> void setCallbackImpl(void* cookie, wp<C> Callbacks::* field, const sp<C>& cb);
 
+    // MicrophoneInfoProvider implementation
+    MicrophoneInfoProvider::Info const* getMicrophoneInfo() override;
+
     const std::string mInstance;
     const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
     Ports mPorts;
@@ -248,7 +266,7 @@
     int32_t mDefaultOutputPortId = -1;
     PortConfigs mPortConfigs;
     Patches mPatches;
-    std::map<audio_patch_handle_t, int32_t /*patch ID*/> mFwkHandles;
+    Microphones mMicrophones;
     std::mutex mLock;
     std::map<void*, Callbacks> mCallbacks GUARDED_BY(mLock);
 };
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
index b452fa3..2eaaf5d 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
@@ -48,7 +48,7 @@
     // however currently we still get the list of module names from the config.
     // Since the example service does not have all modules, the SM will wait
     // for the missing ones forever.
-    if (strcmp(name, "primary") == 0 || strcmp(name, "r_submix") == 0) {
+    if (strcmp(name, "primary") == 0 || strcmp(name, "r_submix") == 0 || strcmp(name, "usb") == 0) {
         if (strcmp(name, "primary") == 0) name = "default";
         auto serviceName = std::string(IModule::descriptor) + "/" + name;
         service = IModule::fromBinder(
diff --git a/media/libaudiohal/impl/EffectBufferHalAidl.cpp b/media/libaudiohal/impl/EffectBufferHalAidl.cpp
index 5af8e24..a701852 100644
--- a/media/libaudiohal/impl/EffectBufferHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectBufferHalAidl.cpp
@@ -14,26 +14,39 @@
  * limitations under the License.
  */
 
+#include <algorithm>
+#include <cstdint>
+#include <cstring>
+#include <sys/mman.h>
 #define LOG_TAG "EffectBufferHalAidl"
 //#define LOG_NDEBUG 0
 
+#include <cutils/ashmem.h>
 #include <utils/Log.h>
 
 #include "EffectBufferHalAidl.h"
 
+using ndk::ScopedFileDescriptor;
+
 namespace android {
 namespace effect {
 
 // static
 status_t EffectBufferHalAidl::allocate(size_t size, sp<EffectBufferHalInterface>* buffer) {
-    ALOGE("%s not implemented yet %zu %p", __func__, size, buffer);
     return mirror(nullptr, size, buffer);
 }
 
 status_t EffectBufferHalAidl::mirror(void* external, size_t size,
                                      sp<EffectBufferHalInterface>* buffer) {
-    // buffer->setExternalData(external);
-    ALOGW("%s not implemented yet %p %zu %p", __func__, external, size, buffer);
+    sp<EffectBufferHalAidl> tempBuffer = new EffectBufferHalAidl(size);
+    status_t status = tempBuffer.get()->init();
+    if (status != OK) {
+        ALOGE("%s init failed %d", __func__, status);
+        return status;
+    }
+
+    tempBuffer->setExternalData(external);
+    *buffer = tempBuffer;
     return OK;
 }
 
@@ -48,7 +61,22 @@
 }
 
 status_t EffectBufferHalAidl::init() {
-    ALOGW("%s not implemented yet", __func__);
+    int fd = ashmem_create_region("audioEffectAidl", mBufferSize);
+    if (fd < 0) {
+        ALOGE("%s create ashmem failed %d", __func__, fd);
+        return fd;
+    }
+
+    ScopedFileDescriptor tempFd(fd);
+    mAudioBuffer.raw = mmap(nullptr /* address */, mBufferSize /* length */, PROT_READ | PROT_WRITE,
+                            MAP_SHARED, fd, 0 /* offset */);
+    if (mAudioBuffer.raw == MAP_FAILED) {
+        ALOGE("mmap failed for fd %d", fd);
+        mAudioBuffer.raw = nullptr;
+        return INVALID_OPERATION;
+    }
+
+    mMemory = {std::move(tempFd), static_cast<int64_t>(mBufferSize)};
     return OK;
 }
 
@@ -76,11 +104,26 @@
 }
 
 void EffectBufferHalAidl::update() {
-    ALOGW("%s not implemented yet", __func__);
+    update(mBufferSize);
 }
 
 void EffectBufferHalAidl::commit() {
-    ALOGW("%s not implemented yet", __func__);
+    commit(mBufferSize);
+}
+
+void EffectBufferHalAidl::copy(void* dst, const void* src, size_t n) const {
+    if (!dst || !src) {
+        return;
+    }
+    std::memcpy(dst, src, std::min(n, mBufferSize));
+}
+
+void EffectBufferHalAidl::update(size_t n) {
+    copy(mAudioBuffer.raw, mExternalData, n);
+}
+
+void EffectBufferHalAidl::commit(size_t n) {
+    copy(mExternalData, mAudioBuffer.raw, n);
 }
 
 } // namespace effect
diff --git a/media/libaudiohal/impl/EffectBufferHalAidl.h b/media/libaudiohal/impl/EffectBufferHalAidl.h
index f488708..035314b 100644
--- a/media/libaudiohal/impl/EffectBufferHalAidl.h
+++ b/media/libaudiohal/impl/EffectBufferHalAidl.h
@@ -16,6 +16,8 @@
 
 #pragma once
 
+#include <aidl/android/hardware/common/Ashmem.h>
+
 #include <media/audiohal/EffectBufferHalInterface.h>
 #include <system/audio_effect.h>
 
@@ -44,16 +46,18 @@
   private:
     friend class EffectBufferHalInterface;
 
+    // buffer size in bytes
     const size_t mBufferSize;
     bool mFrameCountChanged;
     void* mExternalData;
+    aidl::android::hardware::common::Ashmem mMemory;
     audio_buffer_t mAudioBuffer;
 
     // Can not be constructed directly by clients.
     explicit EffectBufferHalAidl(size_t size);
 
     ~EffectBufferHalAidl();
-
+    void copy(void* dst, const void* src, size_t n) const;
     status_t init();
 };
 
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
index 7e25b04..519b871 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
@@ -24,6 +24,7 @@
 #include <media/AidlConversionCppNdk.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionEffect.h>
+#include <system/audio_effects/effect_visualizer.h>
 
 #include <utils/Log.h>
 
@@ -35,6 +36,7 @@
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
 using ::aidl::android::hardware::audio::effect::CommandId;
 using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::Flags;
 using ::aidl::android::hardware::audio::effect::Parameter;
 using ::aidl::android::media::audio::common::AudioDeviceDescription;
 using ::aidl::android::media::audio::common::AudioMode;
@@ -59,12 +61,18 @@
                 {EFFECT_CMD_SET_INPUT_DEVICE, &EffectConversionHelperAidl::handleSetDevice},
                 {EFFECT_CMD_SET_VOLUME, &EffectConversionHelperAidl::handleSetVolume},
                 {EFFECT_CMD_OFFLOAD, &EffectConversionHelperAidl::handleSetOffload},
-                {EFFECT_CMD_FIRST_PROPRIETARY, &EffectConversionHelperAidl::handleFirstPriority}};
+                // Only visualizer support these commands, reuse of EFFECT_CMD_FIRST_PROPRIETARY
+                {VISUALIZER_CMD_CAPTURE, &EffectConversionHelperAidl::handleVisualizerCapture},
+                {VISUALIZER_CMD_MEASURE, &EffectConversionHelperAidl::handleVisualizerMeasure}};
 
 EffectConversionHelperAidl::EffectConversionHelperAidl(
         std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
         int32_t sessionId, int32_t ioId, const Descriptor& desc)
-    : mSessionId(sessionId), mIoId(ioId), mDesc(desc), mEffect(std::move(effect)) {
+    : mSessionId(sessionId),
+      mIoId(ioId),
+      mDesc(desc),
+      mEffect(std::move(effect)),
+      mIsInputStream(mDesc.common.flags.type == Flags::Type::PRE_PROC) {
     mCommon.session = sessionId;
     mCommon.ioHandle = ioId;
     mCommon.input = mCommon.output = kDefaultAudioConfig;
@@ -136,16 +144,32 @@
     return ret;
 }
 
-status_t EffectConversionHelperAidl::handleSetConfig(uint32_t cmdSize,
-                                                     const void* pCmdData __unused,
+status_t EffectConversionHelperAidl::handleSetConfig(uint32_t cmdSize, const void* pCmdData,
                                                      uint32_t* replySize, void* pReplyData) {
     if (!replySize || *replySize != sizeof(int) || !pReplyData ||
         cmdSize != sizeof(effect_config_t)) {
+        ALOGE("%s parameter invalid %u %p %p %p", __func__, cmdSize, pCmdData, replySize,
+              pReplyData);
         return BAD_VALUE;
     }
 
-    // TODO: need to implement setConfig with setParameter(common)
-    return *static_cast<int32_t*>(pReplyData) = OK;
+    effect_config_t* config = (effect_config_t*)pCmdData;
+    Parameter::Common aidlCommon = {
+            .session = mSessionId,
+            .ioHandle = mIoId,
+            .input = {.base = VALUE_OR_RETURN_STATUS(
+                              ::aidl::android::legacy2aidl_buffer_config_t_AudioConfigBase(
+                                      config->inputCfg, mIsInputStream))},
+            .output = {.base = VALUE_OR_RETURN_STATUS(
+                               ::aidl::android::legacy2aidl_buffer_config_t_AudioConfigBase(
+                                       config->outputCfg, mIsInputStream))}};
+
+    Parameter aidlParam = UNION_MAKE(Parameter, common, aidlCommon);
+
+    status_t ret = statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+    EffectParamWriter writer(*(effect_param_t*)pReplyData);
+    writer.setStatus(ret);
+    return ret;
 }
 
 status_t EffectConversionHelperAidl::handleGetConfig(uint32_t cmdSize __unused,
@@ -250,17 +274,17 @@
     return *static_cast<int32_t*>(pReplyData) = OK;
 }
 status_t EffectConversionHelperAidl::handleSetVolume(uint32_t cmdSize, const void* pCmdData,
-                                                     uint32_t* replySize, void* pReplyData) {
-    if (cmdSize != 2 * sizeof(uint32_t) || !pCmdData || !replySize || !pReplyData) {
-        ALOGE("%s parameter invalid %u %p %p %p", __func__, cmdSize, pCmdData, replySize,
-              pReplyData);
+                                                     uint32_t* replySize __unused,
+                                                     void* pReplyData __unused) {
+    if (cmdSize != 2 * sizeof(uint32_t) || !pCmdData) {
+        ALOGE("%s parameter invalid %u %p", __func__, cmdSize, pCmdData);
         return BAD_VALUE;
     }
     Parameter::VolumeStereo volume = {.left = (float)(*(uint32_t*)pCmdData) / (1 << 24),
                                       .right = (float)(*(uint32_t*)pCmdData + 1) / (1 << 24)};
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             mEffect->setParameter(Parameter::make<Parameter::volumeStereo>(volume))));
-    return *static_cast<int32_t*>(pReplyData) = OK;
+    return OK;
 }
 
 status_t EffectConversionHelperAidl::handleSetOffload(uint32_t cmdSize, const void* pCmdData,
@@ -274,16 +298,44 @@
     return *static_cast<int32_t*>(pReplyData) = OK;
 }
 
-status_t EffectConversionHelperAidl::handleFirstPriority(uint32_t cmdSize __unused,
-                                                         const void* pCmdData __unused,
-                                                         uint32_t* replySize, void* pReplyData) {
+status_t EffectConversionHelperAidl::handleVisualizerCapture(uint32_t cmdSize __unused,
+                                                             const void* pCmdData __unused,
+                                                             uint32_t* replySize,
+                                                             void* pReplyData) {
     if (!replySize || !pReplyData) {
         ALOGE("%s parameter invalid %p %p", __func__, replySize, pReplyData);
         return BAD_VALUE;
     }
 
-    // TODO to be implemented
-    return OK;
+    const auto& uuid = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioUuid_audio_uuid_t(mDesc.common.id.type));
+    if (0 != memcmp(&uuid, SL_IID_VISUALIZATION, sizeof(effect_uuid_t))) {
+        ALOGE("%s visualizer command not supported by %s", __func__,
+              mDesc.common.id.toString().c_str());
+        return BAD_VALUE;
+    }
+
+    return visualizerCapture(replySize, pReplyData);
+}
+
+status_t EffectConversionHelperAidl::handleVisualizerMeasure(uint32_t cmdSize __unused,
+                                                             const void* pCmdData __unused,
+                                                             uint32_t* replySize,
+                                                             void* pReplyData) {
+    if (!replySize || !pReplyData) {
+        ALOGE("%s parameter invalid %p %p", __func__, replySize, pReplyData);
+        return BAD_VALUE;
+    }
+
+    const auto& uuid = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioUuid_audio_uuid_t(mDesc.common.id.type));
+    if (0 != memcmp(&uuid, SL_IID_VISUALIZATION, sizeof(effect_uuid_t))) {
+        ALOGE("%s visualizer command not supported by %s", __func__,
+              mDesc.common.id.toString().c_str());
+        return BAD_VALUE;
+    }
+
+    return visualizerMeasure(replySize, pReplyData);
 }
 
 }  // namespace effect
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.h b/media/libaudiohal/impl/EffectConversionHelperAidl.h
index 94435c6..54df1b8 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.h
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.h
@@ -30,12 +30,18 @@
     status_t handleCommand(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData, uint32_t* replySize,
                            void* pReplyData);
     virtual ~EffectConversionHelperAidl() {}
+    const ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn&
+    getEffectReturnParam() const {
+        return mOpenReturn;
+    }
 
   protected:
     const int32_t mSessionId;
     const int32_t mIoId;
     const ::aidl::android::hardware::audio::effect::Descriptor mDesc;
     const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> mEffect;
+    // whether the effect is instantiated on an input stream
+    const bool mIsInputStream;
     ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn mOpenReturn;
     ::aidl::android::hardware::audio::effect::Parameter::Common mCommon;
 
@@ -92,12 +98,20 @@
                              void* pReplyData);
     status_t handleSetOffload(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
                               void* pReplyData);
-    status_t handleFirstPriority(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
-                                 void* pReplyData);
+    status_t handleVisualizerCapture(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                                     void* pReplyData);
+    status_t handleVisualizerMeasure(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                                     void* pReplyData);
 
     // implemented by conversion of each effect
     virtual status_t setParameter(utils::EffectParamReader& param) = 0;
     virtual status_t getParameter(utils::EffectParamWriter& param) = 0;
+    virtual status_t visualizerCapture(uint32_t* replySize __unused, void* pReplyData __unused) {
+        return BAD_VALUE;
+    }
+    virtual status_t visualizerMeasure(uint32_t* replySize __unused, void* pReplyData __unused) {
+        return BAD_VALUE;
+    }
 };
 
 }  // namespace effect
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index 8fa301a..0c19ac8 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <cstddef>
 #define LOG_TAG "EffectHalAidl"
 //#define LOG_NDEBUG 0
 
@@ -23,10 +24,10 @@
 #include <media/AidlConversionCppNdk.h>
 #include <media/AidlConversionEffect.h>
 #include <media/AidlConversionUtil.h>
-#include <media/audiohal/AudioEffectUuid.h>
 #include <media/EffectsFactoryApi.h>
 #include <mediautils/TimeCheck.h>
 #include <system/audio.h>
+#include <system/audio_effects/effect_uuid.h>
 #include <utils/Log.h>
 
 #include "EffectHalAidl.h"
@@ -34,6 +35,7 @@
 #include <aidl/android/hardware/audio/effect/IEffect.h>
 
 #include "effectsAidlConversion/AidlConversionAec.h"
+#include "effectsAidlConversion/AidlConversionAgc1.h"
 #include "effectsAidlConversion/AidlConversionAgc2.h"
 #include "effectsAidlConversion/AidlConversionBassBoost.h"
 #include "effectsAidlConversion/AidlConversionDownmix.h"
@@ -50,20 +52,16 @@
 #include "effectsAidlConversion/AidlConversionVisualizer.h"
 
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
-using ::aidl::android::hardware::audio::effect::CommandId;
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::IEffect;
 using ::aidl::android::hardware::audio::effect::IFactory;
-using ::aidl::android::hardware::audio::effect::Parameter;
 
 namespace android {
 namespace effect {
 
-EffectHalAidl::EffectHalAidl(
-        const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory>& factory,
-        const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& effect,
-        uint64_t effectId, int32_t sessionId, int32_t ioId,
-        const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+EffectHalAidl::EffectHalAidl(const std::shared_ptr<IFactory>& factory,
+                             const std::shared_ptr<IEffect>& effect, uint64_t effectId,
+                             int32_t sessionId, int32_t ioId, const Descriptor& desc)
     : mFactory(factory),
       mEffect(effect),
       mEffectId(effectId),
@@ -80,51 +78,65 @@
 }
 
 status_t EffectHalAidl::createAidlConversion(
-        std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+        std::shared_ptr<IEffect> effect,
         int32_t sessionId, int32_t ioId,
-        const ::aidl::android::hardware::audio::effect::Descriptor& desc) {
+        const Descriptor& desc) {
     const auto& typeUuid = desc.common.id.type;
     ALOGI("%s create UUID %s", __func__, typeUuid.toString().c_str());
-    if (typeUuid == kAcousticEchoCancelerTypeUUID) {
+    if (typeUuid ==
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidAcousticEchoCanceler()) {
         mConversion =
                 std::make_unique<android::effect::AidlConversionAec>(effect, sessionId, ioId, desc);
-    } else if (typeUuid == kAutomaticGainControl2TypeUUID) {
+    } else if (typeUuid == ::aidl::android::hardware::audio::effect::
+                                   getEffectTypeUuidAutomaticGainControlV1()) {
+        mConversion = std::make_unique<android::effect::AidlConversionAgc1>(effect, sessionId, ioId,
+                                                                            desc);
+    } else if (typeUuid == ::aidl::android::hardware::audio::effect::
+                                   getEffectTypeUuidAutomaticGainControlV2()) {
         mConversion = std::make_unique<android::effect::AidlConversionAgc2>(effect, sessionId, ioId,
                                                                             desc);
-    } else if (typeUuid == kBassBoostTypeUUID) {
+    } else if (typeUuid == ::aidl::android::hardware::audio::effect::getEffectTypeUuidBassBoost()) {
         mConversion = std::make_unique<android::effect::AidlConversionBassBoost>(effect, sessionId,
                                                                                  ioId, desc);
-    } else if (typeUuid == kDownmixTypeUUID) {
+    } else if (typeUuid == ::aidl::android::hardware::audio::effect::getEffectTypeUuidDownmix()) {
         mConversion = std::make_unique<android::effect::AidlConversionDownmix>(effect, sessionId,
                                                                                ioId, desc);
-    } else if (typeUuid == kDynamicsProcessingTypeUUID) {
+    } else if (typeUuid ==
+               ::aidl::android::hardware::audio::effect::getEffectTypeUuidDynamicsProcessing()) {
         mConversion =
                 std::make_unique<android::effect::AidlConversionDp>(effect, sessionId, ioId, desc);
-    } else if (typeUuid == kEnvReverbTypeUUID) {
+    } else if (typeUuid == ::aidl::android::hardware::audio::effect::getEffectTypeUuidEnvReverb()) {
         mConversion = std::make_unique<android::effect::AidlConversionEnvReverb>(effect, sessionId,
                                                                                  ioId, desc);
-    } else if (typeUuid == kEqualizerTypeUUID) {
+    } else if (typeUuid == ::aidl::android::hardware::audio::effect::getEffectTypeUuidEqualizer()) {
         mConversion =
                 std::make_unique<android::effect::AidlConversionEq>(effect, sessionId, ioId, desc);
-    } else if (typeUuid == kHapticGeneratorTypeUUID) {
+    } else if (typeUuid ==
+               ::aidl::android::hardware::audio::effect::getEffectTypeUuidHapticGenerator()) {
         mConversion = std::make_unique<android::effect::AidlConversionHapticGenerator>(
                 effect, sessionId, ioId, desc);
-    } else if (typeUuid == kLoudnessEnhancerTypeUUID) {
+    } else if (typeUuid ==
+               ::aidl::android::hardware::audio::effect::getEffectTypeUuidLoudnessEnhancer()) {
         mConversion = std::make_unique<android::effect::AidlConversionLoudnessEnhancer>(
                 effect, sessionId, ioId, desc);
-    } else if (typeUuid == kNoiseSuppressionTypeUUID) {
+    } else if (typeUuid ==
+               ::aidl::android::hardware::audio::effect::getEffectTypeUuidNoiseSuppression()) {
         mConversion = std::make_unique<android::effect::AidlConversionNoiseSuppression>(
                 effect, sessionId, ioId, desc);
-    } else if (typeUuid == kPresetReverbTypeUUID) {
+    } else if (typeUuid ==
+               ::aidl::android::hardware::audio::effect::getEffectTypeUuidPresetReverb()) {
         mConversion = std::make_unique<android::effect::AidlConversionPresetReverb>(
                 effect, sessionId, ioId, desc);
-    } else if (typeUuid == kSpatializerTypeUUID) {
+    } else if (typeUuid ==
+               ::aidl::android::hardware::audio::effect::getEffectTypeUuidSpatializer()) {
         mConversion = std::make_unique<android::effect::AidlConversionSpatializer>(
                 effect, sessionId, ioId, desc);
-    } else if (typeUuid == kVirtualizerTypeUUID) {
+    } else if (typeUuid ==
+               ::aidl::android::hardware::audio::effect::getEffectTypeUuidVirtualizer()) {
         mConversion = std::make_unique<android::effect::AidlConversionVirtualizer>(
                 effect, sessionId, ioId, desc);
-    } else if (typeUuid == kVisualizerTypeUUID) {
+    } else if (typeUuid ==
+               ::aidl::android::hardware::audio::effect::getEffectTypeUuidVisualizer()) {
         mConversion = std::make_unique<android::effect::AidlConversionVisualizer>(effect, sessionId,
                                                                                   ioId, desc);
     } else {
@@ -136,24 +148,51 @@
 }
 
 status_t EffectHalAidl::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
-    if (buffer == nullptr) {
-        return BAD_VALUE;
-    }
-    ALOGW("%s not implemented yet", __func__);
+    mInBuffer = buffer;
     return OK;
 }
 
 status_t EffectHalAidl::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
-    if (buffer == nullptr) {
-        return BAD_VALUE;
-    }
-    ALOGW("%s not implemented yet", __func__);
+    mOutBuffer = buffer;
     return OK;
 }
 
+
+// write to input FMQ here, wait for statusMQ STATUS_OK, and read from output FMQ
 status_t EffectHalAidl::process() {
-    ALOGW("%s not implemented yet", __func__);
-    // write to input FMQ here, and wait for statusMQ STATUS_OK
+    size_t available = mInputQ->availableToWrite();
+    size_t floatsToWrite = std::min(available, mInBuffer->getSize() / sizeof(float));
+    if (floatsToWrite == 0) {
+        ALOGW("%s not able to write, floats in buffer %zu, space in FMQ %zu", __func__,
+              mInBuffer->getSize() / sizeof(float), available);
+        return INVALID_OPERATION;
+    }
+    if (!mInputQ->write((float*)mInBuffer->ptr(), floatsToWrite)) {
+        ALOGW("%s failed to write %zu into inputQ", __func__, floatsToWrite);
+        return INVALID_OPERATION;
+    }
+
+    IEffect::Status retStatus{};
+    if (!mStatusQ->readBlocking(&retStatus, 1) || retStatus.status != OK ||
+        (size_t)retStatus.fmqConsumed != floatsToWrite || retStatus.fmqProduced == 0) {
+        ALOGW("%s read status failed: %s", __func__, retStatus.toString().c_str());
+        return INVALID_OPERATION;
+    }
+
+    available = mOutputQ->availableToRead();
+    size_t floatsToRead = std::min(available, mOutBuffer->getSize() / sizeof(float));
+    if (floatsToRead == 0) {
+        ALOGW("%s not able to read, buffer space %zu, floats in FMQ %zu", __func__,
+              mOutBuffer->getSize() / sizeof(float), available);
+        return INVALID_OPERATION;
+    }
+    if (!mOutputQ->read((float*)mOutBuffer->ptr(), floatsToRead)) {
+        ALOGW("%s failed to read %zu from outputQ", __func__, floatsToRead);
+        return INVALID_OPERATION;
+    }
+
+    ALOGD("%s %s consumed %zu produced %zu", __func__, mDesc.common.name.c_str(), floatsToWrite,
+          floatsToRead);
     return OK;
 }
 
@@ -165,14 +204,32 @@
 
 status_t EffectHalAidl::command(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
                                 uint32_t* replySize, void* pReplyData) {
-    return mConversion
-                   ? mConversion->handleCommand(cmdCode, cmdSize, pCmdData, replySize, pReplyData)
-                   : INVALID_OPERATION;
+    TIME_CHECK();
+    if (!mConversion) {
+        ALOGE("%s can not handle command %d when conversion not exist", __func__, cmdCode);
+        return INVALID_OPERATION;
+    }
+
+    status_t ret = mConversion->handleCommand(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+    // update FMQs when effect open successfully
+    if (ret == OK && cmdCode == EFFECT_CMD_INIT) {
+        const auto& retParam = mConversion->getEffectReturnParam();
+        mStatusQ = std::make_unique<StatusMQ>(retParam.statusMQ);
+        mInputQ = std::make_unique<DataMQ>(retParam.inputDataMQ);
+        mOutputQ = std::make_unique<DataMQ>(retParam.outputDataMQ);
+        if (!mStatusQ->isValid() || !mInputQ->isValid() || !mOutputQ->isValid()) {
+            ALOGE("%s return with invalid FMQ", __func__);
+            return NO_INIT;
+        }
+    }
+
+    return ret;
 }
 
 status_t EffectHalAidl::getDescriptor(effect_descriptor_t* pDescriptor) {
-    ALOGW("%s %p", __func__, pDescriptor);
+    TIME_CHECK();
     if (pDescriptor == nullptr) {
+        ALOGE("%s null descriptor pointer", __func__);
         return BAD_VALUE;
     }
     Descriptor aidlDesc;
@@ -184,12 +241,13 @@
 }
 
 status_t EffectHalAidl::close() {
+    TIME_CHECK();
     return statusTFromBinderStatus(mEffect->close());
 }
 
 status_t EffectHalAidl::dump(int fd) {
-    ALOGW("%s not implemented yet, fd %d", __func__, fd);
-    return OK;
+    TIME_CHECK();
+    return mEffect->dump(fd, nullptr, 0);
 }
 
 } // namespace effect
diff --git a/media/libaudiohal/impl/EffectHalAidl.h b/media/libaudiohal/impl/EffectHalAidl.h
index 83b644b..194150d 100644
--- a/media/libaudiohal/impl/EffectHalAidl.h
+++ b/media/libaudiohal/impl/EffectHalAidl.h
@@ -16,11 +16,13 @@
 
 #pragma once
 
+#include <memory>
+
 #include <aidl/android/hardware/audio/effect/IEffect.h>
 #include <aidl/android/hardware/audio/effect/IFactory.h>
+#include <fmq/AidlMessageQueue.h>
 #include <media/audiohal/EffectHalInterface.h>
 #include <system/audio_effect.h>
-#include <memory>
 
 #include "EffectConversionHelperAidl.h"
 
@@ -29,6 +31,12 @@
 
 class EffectHalAidl : public EffectHalInterface {
   public:
+    using StatusMQ = ::android::AidlMessageQueue<
+            ::aidl::android::hardware::audio::effect::IEffect::Status,
+            ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
+    using DataMQ = ::android::AidlMessageQueue<
+            float, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
+
     // Set the input buffer.
     status_t setInBuffer(const sp<EffectBufferHalInterface>& buffer) override;
 
@@ -63,6 +71,9 @@
         return mEffect;
     }
 
+    // for TIME_CHECK
+    const std::string getClassName() const { return "EffectHalAidl"; }
+
   private:
     friend class sp<EffectHalAidl>;
 
@@ -73,6 +84,8 @@
     const int32_t mIoId;
     const ::aidl::android::hardware::audio::effect::Descriptor mDesc;
     std::unique_ptr<EffectConversionHelperAidl> mConversion;
+    std::unique_ptr<StatusMQ> mStatusQ;
+    std::unique_ptr<DataMQ> mInputQ, mOutputQ;
 
     sp<EffectBufferHalInterface> mInBuffer, mOutBuffer;
     effect_config_t mConfig;
diff --git a/media/libaudiohal/impl/EffectHalHidl.cpp b/media/libaudiohal/impl/EffectHalHidl.cpp
index ed952a3..7ecdbd2 100644
--- a/media/libaudiohal/impl/EffectHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectHalHidl.cpp
@@ -107,13 +107,13 @@
 }
 
 status_t EffectHalHidl::process() {
-    // TIME_CHECK();  // TODO(b/238654698) reenable only when optimized.
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
 
     return processImpl(static_cast<uint32_t>(MessageQueueFlagBits::REQUEST_PROCESS));
 }
 
 status_t EffectHalHidl::processReverse() {
-    // TIME_CHECK();  // TODO(b/238654698) reenable only when optimized.
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
 
     return processImpl(static_cast<uint32_t>(MessageQueueFlagBits::REQUEST_PROCESS_REVERSE));
 }
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index 0aae87b..f289f24 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -20,7 +20,6 @@
 #define LOG_TAG "EffectsFactoryHalAidl"
 //#define LOG_NDEBUG 0
 
-#include <aidl/android/hardware/audio/effect/IFactory.h>
 #include <error/expected_utils.h>
 #include <android/binder_manager.h>
 #include <media/AidlConversionCppNdk.h>
@@ -90,7 +89,8 @@
         return BAD_VALUE;
     }
 
-    AudioUuid uuid = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(*halUuid));
+    AudioUuid uuid = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*halUuid));
     std::lock_guard lg(mLock);
     return getHalDescriptorWithImplUuid_l(uuid, pDescriptor);
 }
@@ -101,7 +101,8 @@
         return BAD_VALUE;
     }
 
-    AudioUuid type = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(*halType));
+    AudioUuid type = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*halType));
     std::lock_guard lg(mLock);
     return getHalDescriptorWithTypeUuid_l(type, descriptors);
 }
@@ -118,7 +119,8 @@
 
     ALOGI("%s session %d ioId %d", __func__, sessionId, ioId);
 
-    AudioUuid aidlUuid = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
+    AudioUuid aidlUuid = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
     std::shared_ptr<IEffect> aidlEffect;
     Descriptor desc;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mFactory->createEffect(aidlUuid, &aidlEffect)));
@@ -139,20 +141,18 @@
 }
 
 status_t EffectsFactoryHalAidl::dumpEffects(int fd) {
-    ALOGE("%s not implemented yet, fd %d", __func__, fd);
-    return INVALID_OPERATION;
+    // TODO: add proxy dump here because AIDL service EffectFactory doesn't have proxy handle
+    return mFactory->dump(fd, nullptr, 0);
 }
 
 status_t EffectsFactoryHalAidl::allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) {
     ALOGI("%s size %zu buffer %p", __func__, size, buffer);
-    // Buffer doesn't allocated here for AIDL, instead each effect open will return I/O data FMQ.
     return EffectBufferHalAidl::allocate(size, buffer);
 }
 
 status_t EffectsFactoryHalAidl::mirrorBuffer(void* external, size_t size,
                                              sp<EffectBufferHalInterface>* buffer) {
     ALOGI("%s extern %p size %zu buffer %p", __func__, external, size, buffer);
-    // TODO: implement with FMQ
     return EffectBufferHalAidl::mirror(external, size, buffer);
 }
 
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.h b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
index 1e85da9..9c3643b 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
@@ -20,6 +20,7 @@
 #include <memory>
 #include <mutex>
 
+#include <aidl/android/hardware/audio/effect/IFactory.h>
 #include <android-base/thread_annotations.h>
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <system/thread_defs.h>
@@ -59,6 +60,9 @@
 
     detail::AudioHalVersionInfo getHalVersion() const override;
 
+    // for TIME_CHECK
+    const std::string getClassName() const { return "EffectHalAidl"; }
+
   private:
     std::mutex mLock;
     const std::shared_ptr<IFactory> mFactory;
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 7aa8231..cbc1578 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -21,16 +21,26 @@
 #include <cstdint>
 
 #include <audio_utils/clock.h>
+#include <media/AidlConversion.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionUtil.h>
+#include <media/AudioParameter.h>
 #include <mediautils/TimeCheck.h>
+#include <system/audio.h>
 #include <utils/Log.h>
 
 #include "DeviceHalAidl.h"
 #include "StreamHalAidl.h"
 
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::common::PlaybackTrackMetadata;
+using ::aidl::android::hardware::audio::common::RecordTrackMetadata;
 using ::aidl::android::hardware::audio::core::IStreamCommon;
 using ::aidl::android::hardware::audio::core::IStreamIn;
 using ::aidl::android::hardware::audio::core::IStreamOut;
 using ::aidl::android::hardware::audio::core::StreamDescriptor;
+using ::aidl::android::media::audio::common::MicrophoneDynamicInfo;
 
 namespace android {
 
@@ -110,11 +120,45 @@
     return OK;
 }
 
-status_t StreamHalAidl::setParameters(const String8& kvPairs __unused) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+namespace {
+
+// 'action' must accept a value of type 'T' and return 'status_t'.
+// The function returns 'true' if the parameter was found, and the action has succeeded.
+// The function returns 'false' if the parameter was not found.
+// Any errors get propagated, if there are errors it means the parameter was found.
+template<typename T, typename F>
+error::Result<bool> filterOutAndProcessParameter(
+        AudioParameter& parameters, const String8& key, const F& action) {
+    if (parameters.containsKey(key)) {
+        T value;
+        status_t status = parameters.get(key, value);
+        if (status == OK) {
+            parameters.remove(key);
+            status = action(value);
+            if (status == OK) return true;
+        }
+        return base::unexpected(status);
+    }
+    return false;
+}
+
+}  // namespace
+
+status_t StreamHalAidl::setParameters(const String8& kvPairs) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
+
+    AudioParameter parameters(kvPairs);
+    ALOGD("%s: parameters: %s", __func__, parameters.toString().c_str());
+
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                    parameters, String8(AudioParameter::keyStreamHwAvSync),
+            [&](int hwAvSyncId) {
+                return statusTFromBinderStatus(mStream->updateHwAvSyncId(hwAvSyncId));
+            }));
+
+    ALOGW_IF(parameters.size() != 0, "%s: unknown parameters, ignored: %s",
+            __func__, parameters.toString().c_str());
     return OK;
 }
 
@@ -172,10 +216,17 @@
             FALLTHROUGH_INTENDED;
         case StreamDescriptor::State::PAUSED:
         case StreamDescriptor::State::DRAIN_PAUSED:
-            return flush();
+            if (mIsInput) return flush();
+            if (status_t status = flush(&reply); status != OK) return status;
+            if (reply.state != StreamDescriptor::State::IDLE) {
+                ALOGE("%s: unexpected stream state: %s (expected IDLE)",
+                        __func__, toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            FALLTHROUGH_INTENDED;
         case StreamDescriptor::State::IDLE:
             if (status_t status = sendCommand(makeHalCommand<HalCommand::Tag::standby>(),
-                            &reply); status != OK) {
+                            &reply, true /*safeFromNonWorkerThread*/); status != OK) {
                 return status;
             }
             if (reply.state != StreamDescriptor::State::STANDBY) {
@@ -252,7 +303,7 @@
 
 status_t StreamHalAidl::transfer(void *buffer, size_t bytes, size_t *transferred) {
     ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
-    // TIME_CHECK();  // TODO(b/238654698) reenable only when optimized.
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (!mStream || mContext.getDataMQ() == nullptr) return NO_INIT;
     mWorkerTid.store(gettid(), std::memory_order_release);
     // Switch the stream into an active state if needed.
@@ -291,9 +342,9 @@
         LOG_ALWAYS_FATAL_IF(*transferred > bytes,
                 "%s: HAL module read %zu bytes, which exceeds requested count %zu",
                 __func__, *transferred, bytes);
-        if (!mContext.getDataMQ()->read(static_cast<int8_t*>(buffer),
-                                        mContext.getDataMQ()->availableToRead())) {
-            ALOGE("%s: failed to read %zu bytes to data MQ", __func__, *transferred);
+        if (auto toRead = mContext.getDataMQ()->availableToRead();
+                toRead != 0 && !mContext.getDataMQ()->read(static_cast<int8_t*>(buffer), toRead)) {
+            ALOGE("%s: failed to read %zu bytes to data MQ", __func__, toRead);
             return NOT_ENOUGH_DATA;
         }
     }
@@ -316,6 +367,26 @@
     if (mIsInput) {
         return sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), reply);
     } else {
+        if (mContext.isAsynchronous()) {
+            // Handle pause-flush-resume sequence. 'flush' from PAUSED goes to
+            // IDLE. We move here from IDLE to ACTIVE (same as 'start' from PAUSED).
+            const auto state = getState();
+            if (state == StreamDescriptor::State::IDLE) {
+                StreamDescriptor::Reply localReply{};
+                StreamDescriptor::Reply* innerReply = reply ?: &localReply;
+                if (status_t status =
+                        sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), innerReply);
+                        status != OK) {
+                    return status;
+                }
+                if (innerReply->state != StreamDescriptor::State::ACTIVE) {
+                    ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
+                            __func__, toString(innerReply->state).c_str());
+                    return INVALID_OPERATION;
+                }
+                return OK;
+            }
+        }
         return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
     }
 }
@@ -327,7 +398,8 @@
     return sendCommand(makeHalCommand<HalCommand::Tag::drain>(
                     mIsInput ? StreamDescriptor::DrainMode::DRAIN_UNSPECIFIED :
                     earlyNotify ? StreamDescriptor::DrainMode::DRAIN_EARLY_NOTIFY :
-                    StreamDescriptor::DrainMode::DRAIN_ALL), reply);
+                    StreamDescriptor::DrainMode::DRAIN_ALL), reply,
+                    true /*safeFromNonWorkerThread*/);
 }
 
 status_t StreamHalAidl::flush(StreamDescriptor::Reply* reply) {
@@ -397,7 +469,7 @@
         const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
         ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
         bool safeFromNonWorkerThread) {
-    // TIME_CHECK();  // TODO(b/238654698) reenable only when optimized.
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (!safeFromNonWorkerThread) {
         const pid_t workerTid = mWorkerTid.load(std::memory_order_acquire);
         LOG_ALWAYS_FATAL_IF(workerTid != gettid(),
@@ -448,12 +520,30 @@
     return OK;
 }
 
+// static
+ConversionResult<::aidl::android::hardware::audio::common::SourceMetadata>
+StreamOutHalAidl::legacy2aidl_SourceMetadata(const StreamOutHalInterface::SourceMetadata& legacy) {
+    ::aidl::android::hardware::audio::common::SourceMetadata aidl;
+    aidl.tracks = VALUE_OR_RETURN(
+            ::aidl::android::convertContainer<std::vector<PlaybackTrackMetadata>>(
+                    legacy.tracks,
+                    ::aidl::android::legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata));
+    return aidl;
+}
+
 StreamOutHalAidl::StreamOutHalAidl(
         const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
         const std::shared_ptr<IStreamOut>& stream, const sp<CallbackBroker>& callbackBroker)
         : StreamHalAidl("StreamOutHalAidl", false /*isInput*/, config, nominalLatency,
                 std::move(context), getStreamCommon(stream)),
-          mStream(stream), mCallbackBroker(callbackBroker) {}
+          mStream(stream), mCallbackBroker(callbackBroker) {
+    // Initialize the offload metadata
+    mOffloadMetadata.sampleRate = static_cast<int32_t>(config.sample_rate);
+    mOffloadMetadata.channelMask = VALUE_OR_FATAL(
+            ::aidl::android::legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+                    config.channel_mask, false));
+    mOffloadMetadata.averageBitRatePerSecond = static_cast<int32_t>(config.offload_info.bit_rate);
+}
 
 StreamOutHalAidl::~StreamOutHalAidl() {
     if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
@@ -461,15 +551,27 @@
     }
 }
 
+status_t StreamOutHalAidl::setParameters(const String8& kvPairs) {
+    if (!mStream) return NO_INIT;
+
+    AudioParameter parameters(kvPairs);
+    ALOGD("%s parameters: %s", __func__, parameters.toString().c_str());
+
+    if (status_t status = filterAndUpdateOffloadMetadata(parameters); status != OK) {
+        ALOGW("%s filtering or updating offload metadata failed: %d", __func__, status);
+    }
+
+    return StreamHalAidl::setParameters(parameters.toString());
+}
+
 status_t StreamOutHalAidl::getLatency(uint32_t *latency) {
     return StreamHalAidl::getLatency(latency);
 }
 
-status_t StreamOutHalAidl::setVolume(float left __unused, float right __unused) {
+status_t StreamOutHalAidl::setVolume(float left, float right) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    return statusTFromBinderStatus(mStream->setHwVolume({left, right}));
 }
 
 status_t StreamOutHalAidl::selectPresentation(int presentationId __unused, int programId __unused) {
@@ -507,6 +609,10 @@
 status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
+    if (!mContext.isAsynchronous()) {
+        ALOGE("%s: the callback is intended for asynchronous streams only", __func__);
+        return INVALID_OPERATION;
+    }
     if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
         if (auto cb = callback.promote(); cb != nullptr) {
             broker->setStreamOutCallback(this, cb);
@@ -570,11 +676,12 @@
 }
 
 status_t StreamOutHalAidl::updateSourceMetadata(
-        const StreamOutHalInterface::SourceMetadata& sourceMetadata __unused) {
+        const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    ::aidl::android::hardware::audio::common::SourceMetadata aidlMetadata =
+              VALUE_OR_RETURN_STATUS(legacy2aidl_SourceMetadata(sourceMetadata));
+    return statusTFromBinderStatus(mStream->updateMetadata(aidlMetadata));
 }
 
 status_t StreamOutHalAidl::getDualMonoMode(audio_dual_mono_mode_t* mode __unused) {
@@ -610,7 +717,7 @@
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     ALOGE("%s not implemented yet", __func__);
-    return OK;
+    return BAD_VALUE;
 }
 
 status_t StreamOutHalAidl::setPlaybackRateParameters(
@@ -618,7 +725,7 @@
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     ALOGE("%s not implemented yet", __func__);
-    return OK;
+    return BAD_VALUE;
 }
 
 status_t StreamOutHalAidl::setEventCallback(
@@ -660,12 +767,83 @@
     return StreamHalAidl::exit();
 }
 
+status_t StreamOutHalAidl::filterAndUpdateOffloadMetadata(AudioParameter &parameters) {
+    TIME_CHECK();
+    bool updateMetadata = false;
+    if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                parameters, String8(AudioParameter::keyOffloadCodecAverageBitRate),
+                [&](int value) {
+                    return value > 0 ?
+                            mOffloadMetadata.averageBitRatePerSecond = value, OK : BAD_VALUE;
+                }))) {
+        updateMetadata = true;
+    }
+    if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                parameters, String8(AudioParameter::keyOffloadCodecSampleRate),
+                [&](int value) {
+                    return value > 0 ? mOffloadMetadata.sampleRate = value, OK : BAD_VALUE;
+                }))) {
+        updateMetadata = true;
+    }
+    if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                parameters, String8(AudioParameter::keyOffloadCodecChannels),
+                [&](int value) -> status_t {
+                    if (value > 0) {
+                        audio_channel_mask_t channel_mask = audio_channel_out_mask_from_count(
+                                static_cast<uint32_t>(value));
+                        if (channel_mask == AUDIO_CHANNEL_INVALID) return BAD_VALUE;
+                        mOffloadMetadata.channelMask = VALUE_OR_RETURN_STATUS(
+                                ::aidl::android::legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+                                        channel_mask, false /*isInput*/));
+                    }
+                    return BAD_VALUE;
+                }))) {
+        updateMetadata = true;
+    }
+    if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                parameters, String8(AudioParameter::keyOffloadCodecDelaySamples),
+                [&](int value) {
+                    // The legacy keys are misnamed, the value is in frames.
+                    return value > 0 ? mOffloadMetadata.delayFrames = value, OK : BAD_VALUE;
+                }))) {
+        updateMetadata = true;
+    }
+    if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                parameters, String8(AudioParameter::keyOffloadCodecPaddingSamples),
+                [&](int value) {
+                    // The legacy keys are misnamed, the value is in frames.
+                    return value > 0 ? mOffloadMetadata.paddingFrames = value, OK : BAD_VALUE;
+                }))) {
+        updateMetadata = true;
+    }
+    if (updateMetadata) {
+        ALOGD("%s set offload metadata %s", __func__, mOffloadMetadata.toString().c_str());
+        if (status_t status = statusTFromBinderStatus(
+                        mStream->updateOffloadMetadata(mOffloadMetadata)); status != OK) {
+            ALOGE("%s: updateOffloadMetadata failed %d", __func__, status);
+            return status;
+        }
+    }
+    return OK;
+}
+
+// static
+ConversionResult<::aidl::android::hardware::audio::common::SinkMetadata>
+StreamInHalAidl::legacy2aidl_SinkMetadata(const StreamInHalInterface::SinkMetadata& legacy) {
+    ::aidl::android::hardware::audio::common::SinkMetadata aidl;
+    aidl.tracks = VALUE_OR_RETURN(
+            ::aidl::android::convertContainer<std::vector<RecordTrackMetadata>>(
+                    legacy.tracks,
+                    ::aidl::android::legacy2aidl_record_track_metadata_v7_RecordTrackMetadata));
+    return aidl;
+}
+
 StreamInHalAidl::StreamInHalAidl(
         const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
-        const std::shared_ptr<IStreamIn>& stream)
+        const std::shared_ptr<IStreamIn>& stream, const sp<MicrophoneInfoProvider>& micInfoProvider)
         : StreamHalAidl("StreamInHalAidl", true /*isInput*/, config, nominalLatency,
                 std::move(context), getStreamCommon(stream)),
-          mStream(stream) {}
+          mStream(stream), mMicInfoProvider(micInfoProvider) {}
 
 status_t StreamInHalAidl::setGain(float gain __unused) {
     TIME_CHECK();
@@ -700,20 +878,48 @@
     return getObservablePosition(frames, time);
 }
 
-status_t StreamInHalAidl::getActiveMicrophones(
-        std::vector<media::MicrophoneInfoFw> *microphones __unused) {
+status_t StreamInHalAidl::getActiveMicrophones(std::vector<media::MicrophoneInfoFw> *microphones) {
+    if (!microphones) {
+        return BAD_VALUE;
+    }
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
+    sp<MicrophoneInfoProvider> micInfoProvider = mMicInfoProvider.promote();
+    if (!micInfoProvider) return NO_INIT;
+    auto staticInfo = micInfoProvider->getMicrophoneInfo();
+    if (!staticInfo) return INVALID_OPERATION;
+    std::vector<MicrophoneDynamicInfo> dynamicInfo;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mStream->getActiveMicrophones(&dynamicInfo)));
+    std::vector<media::MicrophoneInfoFw> result;
+    result.reserve(dynamicInfo.size());
+    for (const auto& d : dynamicInfo) {
+        const auto staticInfoIt = std::find_if(staticInfo->begin(), staticInfo->end(),
+                [&](const auto& s) { return s.id == d.id; });
+        if (staticInfoIt != staticInfo->end()) {
+            // Convert into the c++ backend type from the ndk backend type via the legacy structure.
+            audio_microphone_characteristic_t legacy = VALUE_OR_RETURN_STATUS(
+                    ::aidl::android::aidl2legacy_MicrophoneInfos_audio_microphone_characteristic_t(
+                            *staticInfoIt, d));
+            media::MicrophoneInfoFw info = VALUE_OR_RETURN_STATUS(
+                    ::android::legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfoFw(
+                            legacy));
+            // Note: info.portId is not filled because it's a bit of framework info.
+            result.push_back(std::move(info));
+        } else {
+            ALOGE("%s: no static info for active microphone with id '%s'", __func__, d.id.c_str());
+        }
+    }
+    *microphones = std::move(result);
     return OK;
 }
 
 status_t StreamInHalAidl::updateSinkMetadata(
-        const StreamInHalInterface::SinkMetadata& sinkMetadata  __unused) {
+        const StreamInHalInterface::SinkMetadata& sinkMetadata) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    ALOGE("%s not implemented yet", __func__);
-    return OK;
+    ::aidl::android::hardware::audio::common::SinkMetadata aidlMetadata =
+              VALUE_OR_RETURN_STATUS(legacy2aidl_SinkMetadata(sinkMetadata));
+    return statusTFromBinderStatus(mStream->updateMetadata(aidlMetadata));
 }
 
 status_t StreamInHalAidl::setPreferredMicrophoneDirection(
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index f43c8e2..157e8bb 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -21,16 +21,20 @@
 #include <mutex>
 #include <string_view>
 
+#include <aidl/android/hardware/audio/common/AudioOffloadMetadata.h>
 #include <aidl/android/hardware/audio/core/BpStreamCommon.h>
 #include <aidl/android/hardware/audio/core/BpStreamIn.h>
 #include <aidl/android/hardware/audio/core/BpStreamOut.h>
 #include <fmq/AidlMessageQueue.h>
 #include <media/audiohal/EffectHalInterface.h>
 #include <media/audiohal/StreamHalInterface.h>
+#include <media/AudioParameter.h>
 
 #include "ConversionHelperAidl.h"
 #include "StreamPowerLog.h"
 
+using ::aidl::android::hardware::audio::common::AudioOffloadMetadata;
+
 namespace android {
 
 class StreamContextAidl {
@@ -43,24 +47,28 @@
             ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> DataMQ;
 
     explicit StreamContextAidl(
-            const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor)
+            const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor,
+            bool isAsynchronous)
         : mFrameSizeBytes(descriptor.frameSizeBytes),
           mCommandMQ(new CommandMQ(descriptor.command)),
           mReplyMQ(new ReplyMQ(descriptor.reply)),
           mBufferSizeFrames(descriptor.bufferSizeFrames),
-          mDataMQ(maybeCreateDataMQ(descriptor)) {}
+          mDataMQ(maybeCreateDataMQ(descriptor)),
+          mIsAsynchronous(isAsynchronous) {}
     StreamContextAidl(StreamContextAidl&& other) :
             mFrameSizeBytes(other.mFrameSizeBytes),
             mCommandMQ(std::move(other.mCommandMQ)),
             mReplyMQ(std::move(other.mReplyMQ)),
             mBufferSizeFrames(other.mBufferSizeFrames),
-            mDataMQ(std::move(other.mDataMQ)) {}
+            mDataMQ(std::move(other.mDataMQ)),
+            mIsAsynchronous(other.mIsAsynchronous) {}
     StreamContextAidl& operator=(StreamContextAidl&& other) {
         mFrameSizeBytes = other.mFrameSizeBytes;
         mCommandMQ = std::move(other.mCommandMQ);
         mReplyMQ = std::move(other.mReplyMQ);
         mBufferSizeFrames = other.mBufferSizeFrames;
         mDataMQ = std::move(other.mDataMQ);
+        mIsAsynchronous = other.mIsAsynchronous;
         return *this;
     }
     bool isValid() const {
@@ -78,6 +86,7 @@
     DataMQ* getDataMQ() const { return mDataMQ.get(); }
     size_t getFrameSizeBytes() const { return mFrameSizeBytes; }
     ReplyMQ* getReplyMQ() const { return mReplyMQ.get(); }
+    bool isAsynchronous() const { return mIsAsynchronous; }
 
   private:
     static std::unique_ptr<DataMQ> maybeCreateDataMQ(
@@ -94,6 +103,7 @@
     std::unique_ptr<ReplyMQ> mReplyMQ;
     size_t mBufferSizeFrames;
     std::unique_ptr<DataMQ> mDataMQ;
+    bool mIsAsynchronous;
 };
 
 class StreamHalAidl : public virtual StreamHalInterface, public ConversionHelperAidl {
@@ -224,6 +234,9 @@
 
 class StreamOutHalAidl : public StreamOutHalInterface, public StreamHalAidl {
   public:
+    // Extract the output stream parameters and set by AIDL APIs.
+    status_t setParameters(const String8& kvPairs) override;
+
     // Return the audio hardware driver estimated latency in milliseconds.
     status_t getLatency(uint32_t *latency) override;
 
@@ -300,9 +313,14 @@
   private:
     friend class sp<StreamOutHalAidl>;
 
+    static ConversionResult<::aidl::android::hardware::audio::common::SourceMetadata>
+    legacy2aidl_SourceMetadata(const StreamOutHalInterface::SourceMetadata& legacy);
+
     const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOut> mStream;
     const wp<CallbackBroker> mCallbackBroker;
 
+    AudioOffloadMetadata mOffloadMetadata;
+
     // Can not be constructed directly by clients.
     StreamOutHalAidl(
             const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
@@ -310,8 +328,14 @@
             const sp<CallbackBroker>& callbackBroker);
 
     ~StreamOutHalAidl() override;
+
+    // Filter and update the offload metadata. The parameters which are related to the offload
+    // metadata will be removed after filtering.
+    status_t filterAndUpdateOffloadMetadata(AudioParameter &parameters);
 };
 
+class MicrophoneInfoProvider;
+
 class StreamInHalAidl : public StreamInHalInterface, public StreamHalAidl {
   public:
     // Set the input gain for the audio driver.
@@ -343,12 +367,17 @@
   private:
     friend class sp<StreamInHalAidl>;
 
+    static ConversionResult<::aidl::android::hardware::audio::common::SinkMetadata>
+    legacy2aidl_SinkMetadata(const StreamInHalInterface::SinkMetadata& legacy);
+
     const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamIn> mStream;
+    const wp<MicrophoneInfoProvider> mMicInfoProvider;
 
     // Can not be constructed directly by clients.
     StreamInHalAidl(
             const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
-            const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamIn>& stream);
+            const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamIn>& stream,
+            const sp<MicrophoneInfoProvider>& micInfoProvider);
 
     ~StreamInHalAidl() override = default;
 };
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 07c6df5..192790c 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -441,7 +441,7 @@
 #endif
 
 status_t StreamOutHalHidl::write(const void *buffer, size_t bytes, size_t *written) {
-    // TIME_CHECK();  // TODO(b/238654698) reenable only when optimized.
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
     *written = 0;
 
@@ -587,7 +587,7 @@
 }
 
 status_t StreamOutHalHidl::getRenderPosition(uint32_t *dspFrames) {
-    // TIME_CHECK();  // TODO(b/238654698) reenable only when optimized.
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mStream->getRenderPosition(
@@ -668,7 +668,7 @@
 }
 
 status_t StreamOutHalHidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
-    // TIME_CHECK();  // TODO(b/238654698) reenable only when optimized.
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
     if (mWriterClient == gettid() && mCommandMQ) {
         return callWriterThread(
@@ -1012,7 +1012,7 @@
 }
 
 status_t StreamInHalHidl::read(void *buffer, size_t bytes, size_t *read) {
-    // TIME_CHECK();  // TODO(b/238654698) reenable only when optimized.
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
     *read = 0;
 
@@ -1146,7 +1146,7 @@
 }
 
 status_t StreamInHalHidl::getCapturePosition(int64_t *frames, int64_t *time) {
-    // TIME_CHECK();  // TODO(b/238654698) reenable only when optimized.
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
     if (mReaderClient == gettid() && mCommandMQ) {
         ReadParameters params;
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.cpp
index 15768b3..92b77d8 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.cpp
@@ -23,7 +23,6 @@
 #include <error/expected_utils.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
 #include <system/audio_effects/effect_aec.h>
 
 #include <utils/Log.h>
@@ -33,9 +32,11 @@
 namespace android {
 namespace effect {
 
+using ::aidl::android::getParameterSpecificField;
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
 using ::aidl::android::hardware::audio::effect::AcousticEchoCanceler;
 using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
 using ::android::status_t;
 using utils::EffectParamReader;
 using utils::EffectParamWriter;
@@ -64,8 +65,13 @@
             break;
         }
         default: {
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(AcousticEchoCanceler, acousticEchoCanceler, vendor,
+                                                ext);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam)));
+            break;
         }
     }
 
@@ -73,7 +79,7 @@
 }
 
 status_t AidlConversionAec::getParameter(EffectParamWriter& param) {
-    uint32_t type = 0, value = 0;
+    uint32_t type = 0;
     if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
         OK != param.readFromParameter(&type)) {
         param.setStatus(BAD_VALUE);
@@ -85,29 +91,30 @@
         case AEC_PARAM_ECHO_DELAY:
             FALLTHROUGH_INTENDED;
         case AEC_PARAM_PROPERTIES: {
+            int32_t delay = 0;
             Parameter::Id id =
                     MAKE_SPECIFIC_PARAMETER_ID(AcousticEchoCanceler, acousticEchoCancelerTag,
                                                AcousticEchoCanceler::echoDelayUs);
             RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
-            value = VALUE_OR_RETURN_STATUS(
+            delay = VALUE_OR_RETURN_STATUS(
                     aidl::android::aidl2legacy_Parameter_aec_uint32_echoDelay(aidlParam));
-            break;
+            return param.writeToValue(&delay);
         }
         case AEC_PARAM_MOBILE_MODE: {
+            int32_t mode = 0;
             Parameter::Id id =
                     MAKE_SPECIFIC_PARAMETER_ID(AcousticEchoCanceler, acousticEchoCancelerTag,
                                                AcousticEchoCanceler::mobileMode);
             RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
-            value = VALUE_OR_RETURN_STATUS(
+            mode = VALUE_OR_RETURN_STATUS(
                     aidl::android::aidl2legacy_Parameter_aec_uint32_mobileMode(aidlParam));
-            break;
+            return param.writeToValue(&mode);
         }
-        default:
-            // use vendor extension implementation
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+        default: {
+            // use vendor extension implementation, the first 32bits (param type) won't pass to HAL
+            VENDOR_EXTENSION_GET_AND_RETURN(AcousticEchoCanceler, acousticEchoCanceler, param);
+        }
     }
-    return param.writeToValue(&value);
 }
 
 } // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.cpp
new file mode 100644
index 0000000..1363ba4
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.cpp
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionAgc1"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/effect_agc.h>
+
+#include <utils/Log.h>
+
+#include "AidlConversionAgc1.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::AutomaticGainControlV1;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+status_t AidlConversionAgc1::setParameterLevel(EffectParamReader& param) {
+    int16_t level;
+    RETURN_STATUS_IF_ERROR(param.readFromValue(&level));
+    Parameter aidlParam = MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV1, automaticGainControlV1,
+                                                  targetPeakLevelDbFs, level);
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionAgc1::setParameterGain(EffectParamReader& param) {
+    int16_t gain;
+    RETURN_STATUS_IF_ERROR(param.readFromValue(&gain));
+    Parameter aidlParam = MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV1, automaticGainControlV1,
+                                                  maxCompressionGainDb, gain);
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionAgc1::setParameterLimiterEnable(EffectParamReader& param) {
+    bool enable;
+    RETURN_STATUS_IF_ERROR(param.readFromValue(&enable));
+    Parameter aidlParam = MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV1, automaticGainControlV1,
+                                                  enableLimiter, enable);
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionAgc1::setParameter(EffectParamReader& param) {
+    uint32_t type = 0;
+    if (OK != param.readFromParameter(&type)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+    switch (type) {
+        case AGC_PARAM_TARGET_LEVEL: {
+            return setParameterLevel(param);
+        }
+        case AGC_PARAM_COMP_GAIN: {
+            return setParameterGain(param);
+        }
+        case AGC_PARAM_LIMITER_ENA: {
+            return setParameterLimiterEnable(param);
+        }
+        case AGC_PARAM_PROPERTIES: {
+            RETURN_STATUS_IF_ERROR(setParameterLevel(param));
+            RETURN_STATUS_IF_ERROR(setParameterGain(param));
+            RETURN_STATUS_IF_ERROR(setParameterLimiterEnable(param));
+            return OK;
+        }
+        default: {
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            Parameter aidlParam = MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV1,
+                                                          automaticGainControlV1, vendor, ext);
+            return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+        }
+    }
+}
+
+status_t AidlConversionAgc1::getParameterLevel(EffectParamWriter& param) {
+    Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(AutomaticGainControlV1, automaticGainControlV1Tag,
+                                                  AutomaticGainControlV1::targetPeakLevelDbFs);
+    Parameter aidlParam;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+    int32_t level = VALUE_OR_RETURN_STATUS(
+            GET_PARAMETER_SPECIFIC_FIELD(aidlParam, AutomaticGainControlV1, automaticGainControlV1,
+                                         AutomaticGainControlV1::targetPeakLevelDbFs, int32_t));
+    return param.writeToValue(&level);
+}
+
+status_t AidlConversionAgc1::getParameterGain(EffectParamWriter& param) {
+    Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(AutomaticGainControlV1, automaticGainControlV1Tag,
+                                                  AutomaticGainControlV1::maxCompressionGainDb);
+    Parameter aidlParam;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+    int32_t gain = VALUE_OR_RETURN_STATUS(
+            GET_PARAMETER_SPECIFIC_FIELD(aidlParam, AutomaticGainControlV1, automaticGainControlV1,
+                                         AutomaticGainControlV1::maxCompressionGainDb, int32_t));
+    return param.writeToValue(&gain);
+}
+
+status_t AidlConversionAgc1::getParameterLimiterEnable(EffectParamWriter& param) {
+    Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(AutomaticGainControlV1, automaticGainControlV1Tag,
+                                                  AutomaticGainControlV1::enableLimiter);
+    Parameter aidlParam;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+    bool enable = VALUE_OR_RETURN_STATUS(
+            GET_PARAMETER_SPECIFIC_FIELD(aidlParam, AutomaticGainControlV1, automaticGainControlV1,
+                                         AutomaticGainControlV1::enableLimiter, bool));
+    return param.writeToValue(&enable);
+}
+
+status_t AidlConversionAgc1::getParameter(EffectParamWriter& param) {
+    uint32_t type = 0;
+    if (OK != param.readFromParameter(&type)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+    switch (type) {
+        case AGC_PARAM_TARGET_LEVEL: {
+            return getParameterLevel(param);
+        }
+        case AGC_PARAM_COMP_GAIN: {
+            return getParameterGain(param);
+        }
+        case AGC_PARAM_LIMITER_ENA: {
+            return getParameterLimiterEnable(param);
+        }
+        case AGC_PARAM_PROPERTIES: {
+            RETURN_STATUS_IF_ERROR(getParameterLevel(param));
+            RETURN_STATUS_IF_ERROR(getParameterGain(param));
+            RETURN_STATUS_IF_ERROR(getParameterLimiterEnable(param));
+            return OK;
+        }
+        default: {
+            VENDOR_EXTENSION_GET_AND_RETURN(AutomaticGainControlV1, automaticGainControlV1, param);
+        }
+    }
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.h
new file mode 100644
index 0000000..b0509fd
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionAgc1 : public EffectConversionHelperAidl {
+  public:
+    AidlConversionAgc1(std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+                       int32_t sessionId, int32_t ioId,
+                       const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionAgc1() {}
+
+  private:
+    status_t setParameterLevel(utils::EffectParamReader& param);
+    status_t setParameterGain(utils::EffectParamReader& param);
+    status_t setParameterLimiterEnable(utils::EffectParamReader& param);
+    status_t setParameter(utils::EffectParamReader& param) override;
+
+    status_t getParameterLevel(utils::EffectParamWriter& param);
+    status_t getParameterGain(utils::EffectParamWriter& param);
+    status_t getParameterLimiterEnable(utils::EffectParamWriter& param);
+    status_t getParameter(utils::EffectParamWriter& param) override;
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.cpp
index b736936..b35a1c6 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.cpp
@@ -23,7 +23,6 @@
 #include <error/expected_utils.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
 #include <system/audio_effects/effect_agc2.h>
 
 #include <utils/Log.h>
@@ -33,9 +32,11 @@
 namespace android {
 namespace effect {
 
+using ::aidl::android::getParameterSpecificField;
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
 using ::aidl::android::hardware::audio::effect::AutomaticGainControlV2;
 using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
 using ::android::status_t;
 using utils::EffectParamReader;
 using utils::EffectParamWriter;
@@ -65,8 +66,12 @@
             break;
         }
         default: {
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV2, automaticGainControlV2,
+                                                vendor, ext);
+            break;
         }
     }
 
@@ -110,8 +115,7 @@
             break;
         }
         default: {
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+            VENDOR_EXTENSION_GET_AND_RETURN(AutomaticGainControlV2, automaticGainControlV2, param);
         }
     }
 
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.cpp
index 91c3dea..7c6a5a2 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.cpp
@@ -23,7 +23,6 @@
 #include <error/expected_utils.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
 #include <system/audio_effects/aidl_effects_utils.h>
 #include <system/audio_effects/effect_bassboost.h>
 
@@ -35,10 +34,12 @@
 namespace effect {
 
 using ::aidl::android::convertIntegral;
+using ::aidl::android::getParameterSpecificField;
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
 using ::aidl::android::hardware::audio::effect::BassBoost;
 using ::aidl::android::hardware::audio::effect::Parameter;
 using ::aidl::android::hardware::audio::effect::Range;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
 using ::android::status_t;
 using utils::EffectParamReader;
 using utils::EffectParamWriter;
@@ -63,8 +64,11 @@
             return BAD_VALUE;
         }
         default: {
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(BassBoost, bassBoost, vendor, ext);
+            break;
         }
     }
 
@@ -82,7 +86,7 @@
     Parameter aidlParam;
     switch (type) {
         case BASSBOOST_PARAM_STRENGTH: {
-            uint32_t value;
+            uint16_t value;
             Parameter::Id id =
                     MAKE_SPECIFIC_PARAMETER_ID(BassBoost, bassBoostTag, BassBoost::strengthPm);
             RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
@@ -92,14 +96,13 @@
         }
         case BASSBOOST_PARAM_STRENGTH_SUPPORTED: {
             // an invalid range indicates not setting support for this parameter
-            uint16_t value =
+            uint32_t value =
                     ::aidl::android::hardware::audio::effect::isRangeValid<Range::Tag::bassBoost>(
                             BassBoost::strengthPm, mDesc.capability);
             return param.writeToValue(&value);
         }
         default: {
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+            VENDOR_EXTENSION_GET_AND_RETURN(BassBoost, bassBoost, param);
         }
     }
 }
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.cpp
index 17cedf7..b57971c 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.cpp
@@ -23,7 +23,6 @@
 #include <error/expected_utils.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
 #include <system/audio_effects/effect_downmix.h>
 
 #include <system/audio_effect.h>
@@ -34,9 +33,11 @@
 namespace android {
 namespace effect {
 
+using ::aidl::android::getParameterSpecificField;
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
 using ::aidl::android::hardware::audio::effect::Downmix;
 using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
 using ::android::status_t;
 using utils::EffectParamReader;
 using utils::EffectParamWriter;
@@ -57,8 +58,10 @@
             break;
         }
         default: {
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Downmix, downmix, vendor, ext);
         }
     }
 
@@ -83,8 +86,7 @@
             break;
         }
         default: {
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+            VENDOR_EXTENSION_GET_AND_RETURN(Downmix, downmix, param);
         }
     }
 
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.cpp
index 4555c9f..fe845ab 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.cpp
@@ -24,10 +24,9 @@
 #include <media/AidlConversionCppNdk.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
 #include <system/audio_effect.h>
 #include <system/audio_effects/effect_dynamicsprocessing.h>
-
+#include <Utils.h>
 #include <utils/Log.h>
 
 #include "AidlConversionDynamicsProcessing.h"
@@ -36,30 +35,26 @@
 namespace effect {
 
 using ::aidl::android::convertIntegral;
+using ::aidl::android::getParameterSpecificField;
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
 using ::aidl::android::hardware::audio::effect::Capability;
 using ::aidl::android::hardware::audio::effect::DynamicsProcessing;
 using ::aidl::android::hardware::audio::effect::Parameter;
 using ::aidl::android::hardware::audio::effect::toString;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
 using ::android::status_t;
 using utils::EffectParamReader;
 using utils::EffectParamWriter;
 
 status_t AidlConversionDp::setParameter(EffectParamReader& param) {
     uint32_t type = 0;
-    if (OK != param.readFromParameter(&type)) {
-        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
-        return BAD_VALUE;
-    }
+    RETURN_STATUS_IF_ERROR(param.readFromParameter(&type));
     Parameter aidlParam;
     switch (type) {
         case DP_PARAM_INPUT_GAIN: {
             DynamicsProcessing::InputGain inputGainAidl;
-            if (OK != param.readFromParameter(&inputGainAidl.channel) ||
-                OK != param.readFromValue(&inputGainAidl.gainDb)) {
-                ALOGE("%s invalid inputGain %s", __func__, param.toString().c_str());
-                return BAD_VALUE;
-            }
+            RETURN_STATUS_IF_ERROR(param.readFromParameter(&inputGainAidl.channel));
+            RETURN_STATUS_IF_ERROR(param.readFromValue(&inputGainAidl.gainDb));
             aidlParam = MAKE_SPECIFIC_PARAMETER(DynamicsProcessing, dynamicsProcessing, inputGain,
                                                 {inputGainAidl});
             break;
@@ -122,8 +117,12 @@
             break;
         }
         default: {
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam =
+                    MAKE_SPECIFIC_PARAMETER(DynamicsProcessing, dynamicsProcessing, vendor, ext);
+            break;
         }
     }
 
@@ -132,17 +131,12 @@
 
 status_t AidlConversionDp::getParameter(EffectParamWriter& param) {
     uint32_t type = 0;
-    if (OK != param.readFromParameter(&type)) {
-        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
-    }
+    RETURN_STATUS_IF_ERROR(param.readFromParameter(&type));
     Parameter aidlParam;
     switch (type) {
         case DP_PARAM_INPUT_GAIN: {
             int32_t channel;
-            if (OK != param.readFromParameter(&channel)) {
-                ALOGE("%s invalid inputGain %s", __func__, param.toString().c_str());
-                return BAD_VALUE;
-            }
+            RETURN_STATUS_IF_ERROR(param.readFromParameter(&channel));
             Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag,
                                                           DynamicsProcessing::inputGain);
             RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
@@ -161,11 +155,6 @@
             return BAD_VALUE;
         }
         case DP_PARAM_ENGINE_ARCHITECTURE: {
-            int32_t channel;
-            if (OK != param.readFromParameter(&channel)) {
-                ALOGE("%s invalid inputGain %s", __func__, param.toString().c_str());
-                return BAD_VALUE;
-            }
             Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag,
                                                           DynamicsProcessing::engineArchitecture);
             RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
@@ -186,18 +175,15 @@
                     VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(engine.postEqStage.inUse));
             int32_t limiterInUse =
                     VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(engine.limiterInUse));
-            if (OK != param.writeToValue(&resolution) ||
-                OK != param.writeToValue(&engine.preferredProcessingDurationMs) ||
-                OK != param.writeToValue(&preEqInUse) ||
-                OK != param.writeToValue(&engine.preEqStage.bandCount) ||
-                OK != param.writeToValue(&mbcInUse) ||
-                OK != param.writeToValue(&engine.mbcStage.bandCount) ||
-                OK != param.writeToValue(&postEqInUse) ||
-                OK != param.writeToValue(&engine.postEqStage.bandCount) ||
-                OK != param.writeToValue(&limiterInUse)) {
-                ALOGE("%s invalid engineArchitecture %s", __func__, param.toString().c_str());
-                return BAD_VALUE;
-            }
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&resolution));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&engine.preferredProcessingDurationMs));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&preEqInUse));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&engine.preEqStage.bandCount));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&mbcInUse));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&engine.mbcStage.bandCount));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&postEqInUse));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&engine.postEqStage.bandCount));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&limiterInUse));
             mEngine = engine;
             return OK;
         }
@@ -223,110 +209,94 @@
             return getLimiterConfig(param);
         }
         case DP_PARAM_GET_CHANNEL_COUNT: {
-            uint32_t channel = VALUE_OR_RETURN_STATUS(
-                    aidl::android::aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
-                            mCommon.input.base.channelMask, true /* input */));
-            if (OK != param.writeToValue(&channel)) {
-                ALOGE("%s write channel number %d to param failed %s", __func__, channel,
-                      param.toString().c_str());
-                return BAD_VALUE;
-            }
+            uint32_t channel = ::aidl::android::hardware::audio::common::getChannelCount(
+                    mCommon.input.base.channelMask);
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&channel));
             return OK;
         }
         default: {
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+            VENDOR_EXTENSION_GET_AND_RETURN(DynamicsProcessing, dynamicsProcessing, param);
         }
     }
 }
 
-aidl::ConversionResult<DynamicsProcessing::ChannelConfig>
+ConversionResult<DynamicsProcessing::ChannelConfig>
 AidlConversionDp::readChannelConfigFromParam(EffectParamReader& param) {
     int32_t enable, channel;
-    if (OK != param.readFromParameter(&channel) || OK != param.readFromValue(&enable)) {
-        ALOGE("%s invalid channel config param %s", __func__, param.toString().c_str());
-        return ::android::base::unexpected(::android::BAD_VALUE);
-    }
+    RETURN_IF_ERROR(param.readFromParameter(&channel));
+    RETURN_IF_ERROR(param.readFromValue(&enable));
+
     return DynamicsProcessing::ChannelConfig(
             {.enable = VALUE_OR_RETURN(convertIntegral<bool>(enable)), .channel = channel});
 }
 
-aidl::ConversionResult<DynamicsProcessing::EqBandConfig>
+ConversionResult<DynamicsProcessing::EqBandConfig>
 AidlConversionDp::readEqBandConfigFromParam(EffectParamReader& param) {
     DynamicsProcessing::EqBandConfig config;
     int32_t enable;
-    if (OK != param.readFromParameter(&config.channel) ||
-        OK != param.readFromParameter(&config.band) ||
-        OK != param.readFromValue(&enable) ||
-        OK != param.readFromValue(&config.cutoffFrequencyHz) ||
-        OK != param.readFromValue(&config.gainDb)) {
-        ALOGE("%s invalid eq band param %s", __func__, param.toString().c_str());
-        return ::android::base::unexpected(::android::BAD_VALUE);
-    }
+    RETURN_IF_ERROR(param.readFromParameter(&config.channel));
+    RETURN_IF_ERROR(param.readFromParameter(&config.band));
+    RETURN_IF_ERROR(param.readFromValue(&enable));
+    RETURN_IF_ERROR(param.readFromValue(&config.cutoffFrequencyHz));
+    RETURN_IF_ERROR(param.readFromValue(&config.gainDb));
+
     config.enable = VALUE_OR_RETURN(convertIntegral<bool>(enable));
     return config;
 }
 
-aidl::ConversionResult<DynamicsProcessing::MbcBandConfig>
+ConversionResult<DynamicsProcessing::MbcBandConfig>
 AidlConversionDp::readMbcBandConfigFromParam(EffectParamReader& param) {
     DynamicsProcessing::MbcBandConfig config;
     int32_t enable;
-    if (OK != param.readFromParameter(&config.channel) ||
-        OK != param.readFromParameter(&config.band) ||
-        OK != param.readFromValue(&enable) ||
-        OK != param.readFromValue(&config.cutoffFrequencyHz) ||
-        OK != param.readFromValue(&config.attackTimeMs) ||
-        OK != param.readFromValue(&config.releaseTimeMs) ||
-        OK != param.readFromValue(&config.ratio) ||
-        OK != param.readFromValue(&config.thresholdDb) ||
-        OK != param.readFromValue(&config.kneeWidthDb) ||
-        OK != param.readFromValue(&config.noiseGateThresholdDb) ||
-        OK != param.readFromValue(&config.expanderRatio) ||
-        OK != param.readFromValue(&config.preGainDb) ||
-        OK != param.readFromValue(&config.postGainDb)) {
-        ALOGE("%s invalid mbc band config param %s", __func__, param.toString().c_str());
-        return ::android::base::unexpected(::android::BAD_VALUE);
-    }
+    RETURN_IF_ERROR(param.readFromParameter(&config.channel));
+    RETURN_IF_ERROR(param.readFromParameter(&config.band));
+    RETURN_IF_ERROR(param.readFromValue(&enable));
+    RETURN_IF_ERROR(param.readFromValue(&config.cutoffFrequencyHz));
+    RETURN_IF_ERROR(param.readFromValue(&config.attackTimeMs));
+    RETURN_IF_ERROR(param.readFromValue(&config.releaseTimeMs));
+    RETURN_IF_ERROR(param.readFromValue(&config.ratio));
+    RETURN_IF_ERROR(param.readFromValue(&config.thresholdDb));
+    RETURN_IF_ERROR(param.readFromValue(&config.kneeWidthDb));
+    RETURN_IF_ERROR(param.readFromValue(&config.noiseGateThresholdDb));
+    RETURN_IF_ERROR(param.readFromValue(&config.expanderRatio));
+    RETURN_IF_ERROR(param.readFromValue(&config.preGainDb));
+    RETURN_IF_ERROR(param.readFromValue(&config.postGainDb));
+
     config.enable = VALUE_OR_RETURN(convertIntegral<bool>(enable));
     return config;
 }
 
-aidl::ConversionResult<DynamicsProcessing::LimiterConfig>
+ConversionResult<DynamicsProcessing::LimiterConfig>
 AidlConversionDp::readLimiterConfigFromParam(EffectParamReader& param) {
     DynamicsProcessing::LimiterConfig config;
     int32_t enable, inUse;
-    if (OK != param.readFromParameter(&config.channel) ||
-        OK != param.readFromValue(&inUse) ||
-        OK != param.readFromValue(&enable) ||
-        OK != param.readFromValue(&config.linkGroup) ||
-        OK != param.readFromValue(&config.attackTimeMs) ||
-        OK != param.readFromValue(&config.releaseTimeMs) ||
-        OK != param.readFromValue(&config.ratio) ||
-        OK != param.readFromValue(&config.thresholdDb) ||
-        OK != param.readFromValue(&config.postGainDb)) {
-        ALOGE("%s invalid limiter config param %s", __func__, param.toString().c_str());
-        return ::android::base::unexpected(::android::BAD_VALUE);
-    }
+    RETURN_IF_ERROR(param.readFromParameter(&config.channel));
+    RETURN_IF_ERROR(param.readFromValue(&inUse));
+    RETURN_IF_ERROR(param.readFromValue(&enable));
+    RETURN_IF_ERROR(param.readFromValue(&config.linkGroup));
+    RETURN_IF_ERROR(param.readFromValue(&config.attackTimeMs));
+    RETURN_IF_ERROR(param.readFromValue(&config.releaseTimeMs));
+    RETURN_IF_ERROR(param.readFromValue(&config.ratio));
+    RETURN_IF_ERROR(param.readFromValue(&config.thresholdDb));
+    RETURN_IF_ERROR(param.readFromValue(&config.postGainDb));
+
     config.enable = VALUE_OR_RETURN(convertIntegral<bool>(enable));
     return config;
 }
 
-aidl::ConversionResult<DynamicsProcessing::EngineArchitecture>
+ConversionResult<DynamicsProcessing::EngineArchitecture>
 AidlConversionDp::readEngineArchitectureFromParam(EffectParamReader& param) {
     DynamicsProcessing::EngineArchitecture engine;
     int32_t variant, preEqInUse, mbcInUse, postEqInUse, limiterInUse;
-    if (OK != param.readFromValue(&variant) &&
-        OK != param.readFromValue(&engine.preferredProcessingDurationMs) &&
-        OK != param.readFromValue(&preEqInUse) &&
-        OK != param.readFromValue(&engine.preEqStage.bandCount) &&
-        OK != param.readFromValue(&mbcInUse) &&
-        OK != param.readFromValue(&engine.mbcStage.bandCount) &&
-        OK != param.readFromValue(&postEqInUse) &&
-        OK != param.readFromValue(&engine.postEqStage.bandCount) &&
-        OK != param.readFromValue(&limiterInUse)) {
-        ALOGE("%s invalid engineArchitecture %s", __func__, param.toString().c_str());
-        return ::android::base::unexpected(::android::BAD_VALUE);
-    }
+    RETURN_IF_ERROR(param.readFromValue(&variant));
+    RETURN_IF_ERROR(param.readFromValue(&engine.preferredProcessingDurationMs));
+    RETURN_IF_ERROR(param.readFromValue(&preEqInUse));
+    RETURN_IF_ERROR(param.readFromValue(&engine.preEqStage.bandCount));
+    RETURN_IF_ERROR(param.readFromValue(&mbcInUse));
+    RETURN_IF_ERROR(param.readFromValue(&engine.mbcStage.bandCount));
+    RETURN_IF_ERROR(param.readFromValue(&postEqInUse));
+    RETURN_IF_ERROR(param.readFromValue(&engine.postEqStage.bandCount));
+    RETURN_IF_ERROR(param.readFromValue(&limiterInUse));
 
     engine.resolutionPreference = VALUE_OR_RETURN(
             aidl::android::legacy2aidl_int32_DynamicsProcessing_ResolutionPreference(variant));
@@ -339,10 +309,7 @@
 
 status_t AidlConversionDp::getChannelConfig(DynamicsProcessing::Tag tag, EffectParamWriter& param) {
     int32_t channel;
-    if (OK != param.readFromParameter(&channel)) {
-        ALOGE("%s invalid parameter %s", __func__, param.toString().c_str());
-        return BAD_VALUE;
-    }
+    RETURN_STATUS_IF_ERROR(param.readFromParameter(&channel));
 
     Parameter aidlParam;
     Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag, tag);
@@ -384,13 +351,9 @@
     for (const auto& ch : channels) {
         if (ch.channel == channel) {
             int32_t enable = ch.enable;
-            if (OK != param.writeToValue(&inUse) ||
-                OK != param.writeToValue(&enable) ||
-                OK != param.writeToValue(&bandCount)) {
-                ALOGE("%s failed to write into param value %s", __func__,
-                      param.toString().c_str());
-                return BAD_VALUE;
-            }
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&inUse));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&enable));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandCount));
             return OK;
         }
     }
@@ -400,10 +363,8 @@
 
 status_t AidlConversionDp::getEqBandConfig(DynamicsProcessing::Tag tag, EffectParamWriter& param) {
     int32_t channel, band;
-    if (OK != param.readFromParameter(&channel) || OK != param.readFromParameter(&band)) {
-        ALOGE("%s invalid parameter %s", __func__, param.toString().c_str());
-        return BAD_VALUE;
-    }
+    RETURN_STATUS_IF_ERROR(param.readFromParameter(&channel));
+    RETURN_STATUS_IF_ERROR(param.readFromParameter(&band));
 
     Parameter aidlParam;
     Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag, tag);
@@ -425,12 +386,9 @@
     for (const auto& bandIt : bands) {
         if (bandIt.channel == channel && bandIt.band == band) {
             int32_t enable = bandIt.enable;
-            if (OK != param.writeToValue(&enable) ||
-                OK != param.writeToValue(&bandIt.cutoffFrequencyHz) ||
-                OK != param.writeToValue(&bandIt.gainDb)) {
-                ALOGE("%s failed to write into param value %s", __func__, param.toString().c_str());
-                return BAD_VALUE;
-            }
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&enable));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.cutoffFrequencyHz));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.gainDb));
             return OK;
         }
     }
@@ -440,10 +398,8 @@
 
 status_t AidlConversionDp::getMbcBandConfig(EffectParamWriter& param) {
     int32_t channel, band;
-    if (OK != param.readFromParameter(&channel) || OK != param.readFromParameter(&band)) {
-        ALOGE("%s invalid parameter %s", __func__, param.toString().c_str());
-        return BAD_VALUE;
-    }
+    RETURN_STATUS_IF_ERROR(param.readFromParameter(&channel));
+    RETURN_STATUS_IF_ERROR(param.readFromParameter(&band));
     Parameter aidlParam;
     Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag,
                                                   DynamicsProcessing::mbcBand);
@@ -457,20 +413,17 @@
     for (const auto& bandIt : bands) {
         if (bandIt.channel == channel && bandIt.band == band) {
             int32_t enable = bandIt.enable;
-            if (OK != param.writeToValue(&enable) ||
-                OK != param.writeToValue(&bandIt.cutoffFrequencyHz) ||
-                OK != param.writeToValue(&bandIt.attackTimeMs) ||
-                OK != param.writeToValue(&bandIt.releaseTimeMs) ||
-                OK != param.writeToValue(&bandIt.ratio) ||
-                OK != param.writeToValue(&bandIt.thresholdDb) ||
-                OK != param.writeToValue(&bandIt.kneeWidthDb) ||
-                OK != param.writeToValue(&bandIt.noiseGateThresholdDb) ||
-                OK != param.writeToValue(&bandIt.expanderRatio) ||
-                OK != param.writeToValue(&bandIt.preGainDb) ||
-                OK != param.writeToValue(&bandIt.postGainDb)) {
-                ALOGE("%s failed to write into param value %s", __func__, param.toString().c_str());
-                return BAD_VALUE;
-            }
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&enable));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.cutoffFrequencyHz));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.attackTimeMs));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.releaseTimeMs));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.ratio));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.thresholdDb));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.kneeWidthDb));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.noiseGateThresholdDb));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.expanderRatio));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.preGainDb));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.postGainDb));
             return OK;
         }
     }
@@ -480,10 +433,7 @@
 
 status_t AidlConversionDp::getLimiterConfig(EffectParamWriter& param) {
     int32_t channel;
-    if (OK != param.readFromParameter(&channel)) {
-        ALOGE("%s invalid parameter %s", __func__, param.toString().c_str());
-        return BAD_VALUE;
-    }
+    RETURN_STATUS_IF_ERROR(param.readFromParameter(&channel));
     Parameter aidlParam;
     Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag,
                                                   DynamicsProcessing::limiter);
@@ -498,17 +448,14 @@
         if (config.channel == channel) {
             int32_t inUse = mEngine.limiterInUse;
             int32_t enable = config.enable;
-            if (OK != param.writeToValue(&inUse) ||
-                OK != param.writeToValue(&enable) ||
-                OK != param.writeToValue(&config.linkGroup) ||
-                OK != param.writeToValue(&config.attackTimeMs) ||
-                OK != param.writeToValue(&config.releaseTimeMs) ||
-                OK != param.writeToValue(&config.ratio) ||
-                OK != param.writeToValue(&config.thresholdDb) ||
-                OK != param.writeToValue(&config.postGainDb)) {
-                ALOGE("%s failed to write into param value %s", __func__, param.toString().c_str());
-                return BAD_VALUE;
-            }
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&inUse));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&enable));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&config.linkGroup));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&config.attackTimeMs));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&config.releaseTimeMs));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&config.ratio));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&config.thresholdDb));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&config.postGainDb));
             return OK;
         }
     }
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.h
index 6bab18d..c5d5a54 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.h
@@ -36,18 +36,18 @@
     status_t setParameter(utils::EffectParamReader& param) override;
     status_t getParameter(utils::EffectParamWriter& param) override;
 
-    aidl::ConversionResult<
+    ConversionResult<
             aidl::android::hardware::audio::effect::DynamicsProcessing::ChannelConfig>
     readChannelConfigFromParam(utils::EffectParamReader& param);
-    aidl::ConversionResult<aidl::android::hardware::audio::effect::DynamicsProcessing::EqBandConfig>
+    ConversionResult<aidl::android::hardware::audio::effect::DynamicsProcessing::EqBandConfig>
     readEqBandConfigFromParam(utils::EffectParamReader& param);
-    aidl::ConversionResult<
+    ConversionResult<
             aidl::android::hardware::audio::effect::DynamicsProcessing::MbcBandConfig>
     readMbcBandConfigFromParam(utils::EffectParamReader& param);
-    aidl::ConversionResult<
+    ConversionResult<
             aidl::android::hardware::audio::effect::DynamicsProcessing::LimiterConfig>
     readLimiterConfigFromParam(utils::EffectParamReader& param);
-    aidl::ConversionResult<
+    ConversionResult<
             aidl::android::hardware::audio::effect::DynamicsProcessing::EngineArchitecture>
     readEngineArchitectureFromParam(utils::EffectParamReader& param);
 
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.cpp
index 960273b..754da43 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.cpp
@@ -24,7 +24,6 @@
 #include <media/AidlConversionCppNdk.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
 #include <system/audio_effects/effect_environmentalreverb.h>
 
 #include <utils/Log.h>
@@ -39,161 +38,218 @@
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
 using ::aidl::android::hardware::audio::effect::EnvironmentalReverb;
 using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
 using ::android::status_t;
 using utils::EffectParamReader;
 using utils::EffectParamWriter;
 
-#define MAKE_AIDL_PARAMETER(aidlParam, param, value, tag)                            \
-    {                                                                                \
-        if (OK != param.readFromValue(&value)) {                                     \
-            ALOGE("%s invalid parameter %s %d", __func__, #tag, value);              \
-            return BAD_VALUE;                                                        \
-        }                                                                            \
-        aidlParam = MAKE_SPECIFIC_PARAMETER(                                         \
-                EnvironmentalReverb, environmentalReverb, tag,                       \
-                VALUE_OR_RETURN_STATUS(aidl::android::convertIntegral<int>(value))); \
+/**
+ * Macro to get a parameter from effect_param_t wrapper and set it to AIDL effect.
+ *
+ * Return if there is any error, otherwise continue execution.
+ *
+ * @param param EffectParamReader, a reader wrapper of effect_param_t.
+ * @param aidlType Type of the AIDL parameter field, used to construct AIDL Parameter union.
+ * @param valueType Type of the value get from effect_param_t.
+ * @param tag The AIDL parameter union field tag.
+ */
+#define SET_AIDL_PARAMETER(param, aidlType, valueType, tag)                                \
+    {                                                                                      \
+        Parameter aidlParam;                                                               \
+        valueType value;                                                                   \
+        if (status_t status = param.readFromValue(&value); status != OK) {                 \
+            ALOGE("%s  %s read from parameter failed, ret %d", __func__, #tag, status);    \
+            return status;                                                                 \
+        }                                                                                  \
+        aidlParam = MAKE_SPECIFIC_PARAMETER(                                               \
+                EnvironmentalReverb, environmentalReverb, tag,                             \
+                VALUE_OR_RETURN_STATUS(aidl::android::convertIntegral<aidlType>(value)));  \
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam))); \
     }
 
-#define GET_AIDL_PARAMETER(tag, value, param)                                                      \
+/**
+ * Macro to get a parameter from AIDL effect and write the value to effect_param_t with wrapper.
+ *
+ * Return if there is any error, otherwise continue execution.
+ *
+ * @param param EffectParamWriter, a writer wrapper of effect_param_t.
+ * @param aidlType Type of the AIDL parameter field, used to construct AIDL Parameter union.
+ * @param valueType  Type of the value get from effect_param_t.
+ * @param tag The AIDL parameter union field tag.
+ */
+#define GET_AIDL_PARAMETER(param, aidltype, valueType, tag)                                        \
     {                                                                                              \
+        aidltype value;                                                                            \
         Parameter aidlParam;                                                                       \
         Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(EnvironmentalReverb, environmentalReverbTag, \
                                                       EnvironmentalReverb::tag);                   \
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));    \
-        value = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(                               \
-                aidlParam, EnvironmentalReverb, environmentalReverb, EnvironmentalReverb::tag,     \
-                std::decay_t<decltype(value)>));                                                   \
-        return param.writeToValue(&value);                                                         \
+        value = VALUE_OR_RETURN_STATUS(                                                            \
+                GET_PARAMETER_SPECIFIC_FIELD(aidlParam, EnvironmentalReverb, environmentalReverb,  \
+                                             EnvironmentalReverb::tag, std::decay_t<aidltype>));   \
+        if (status_t status = param.writeToValue((valueType*)&value); status != OK) {              \
+            param.setStatus(status);                                                               \
+            ALOGE("%s %s write to parameter failed %d, ret %d", __func__, #tag, value, status);    \
+            return status;                                                                         \
+        }                                                                                          \
     }
 
 status_t AidlConversionEnvReverb::setParameter(EffectParamReader& param) {
     uint32_t type = 0;
-    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
-        OK != param.readFromParameter(&type)) {
-        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+    if (status_t status = param.readFromParameter(&type); status != OK) {
+        ALOGE("%s failed to read type from %s, ret %d", __func__, param.toString().c_str(), status);
         return BAD_VALUE;
     }
-    Parameter aidlParam;
-    uint16_t value16;
-    uint32_t value32;
+
     switch (type) {
         case REVERB_PARAM_ROOM_LEVEL: {
-            MAKE_AIDL_PARAMETER(aidlParam, param, value16, roomLevelMb);
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, roomLevelMb);
             break;
         }
         case REVERB_PARAM_ROOM_HF_LEVEL: {
-            MAKE_AIDL_PARAMETER(aidlParam, param, value16, roomHfLevelMb);
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, roomHfLevelMb);
             break;
         }
         case REVERB_PARAM_DECAY_TIME: {
-            MAKE_AIDL_PARAMETER(aidlParam, param, value32, decayTimeMs);
+            SET_AIDL_PARAMETER(param, int32_t, uint32_t, decayTimeMs);
             break;
         }
         case REVERB_PARAM_DECAY_HF_RATIO: {
-            MAKE_AIDL_PARAMETER(aidlParam, param, value16, decayHfRatioPm);
-            break;
-        }
-        case REVERB_PARAM_REVERB_LEVEL: {
-            MAKE_AIDL_PARAMETER(aidlParam, param, value16, levelMb);
-            break;
-        }
-        case REVERB_PARAM_REVERB_DELAY: {
-            MAKE_AIDL_PARAMETER(aidlParam, param, value32, delayMs);
-            break;
-        }
-        case REVERB_PARAM_DIFFUSION: {
-            MAKE_AIDL_PARAMETER(aidlParam, param, value16, diffusionPm);
-            break;
-        }
-        case REVERB_PARAM_DENSITY: {
-            MAKE_AIDL_PARAMETER(aidlParam, param, value16, densityPm);
-            break;
-        }
-        case REVERB_PARAM_BYPASS: {
-            if (OK != param.readFromValue(&value32)) {
-                ALOGE("%s invalid bypass parameter %d", __func__, value32);
-                return BAD_VALUE;
-            }
-            bool isByPass = VALUE_OR_RETURN_STATUS(aidl::android::convertIntegral<bool>(value32));
-            aidlParam = MAKE_SPECIFIC_PARAMETER(EnvironmentalReverb, environmentalReverb, bypass,
-                                                isByPass);
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, decayHfRatioPm);
             break;
         }
         case REVERB_PARAM_REFLECTIONS_LEVEL: {
-            // TODO
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, reflectionsLevelMb);
             break;
         }
         case REVERB_PARAM_REFLECTIONS_DELAY: {
-            // TODO
+            SET_AIDL_PARAMETER(param, int32_t, uint32_t, reflectionsDelayMs);
+            break;
+        }
+        case REVERB_PARAM_REVERB_LEVEL: {
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, levelMb);
+            break;
+        }
+        case REVERB_PARAM_REVERB_DELAY: {
+            SET_AIDL_PARAMETER(param, int32_t, uint32_t, delayMs);
+            break;
+        }
+        case REVERB_PARAM_DIFFUSION: {
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, diffusionPm);
+            break;
+        }
+        case REVERB_PARAM_DENSITY: {
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, densityPm);
+            break;
+        }
+        case REVERB_PARAM_BYPASS: {
+            SET_AIDL_PARAMETER(param, bool, int32_t, bypass);
             break;
         }
         case REVERB_PARAM_PROPERTIES: {
-            // TODO
+            if (sizeof(t_reverb_settings) > param.getValueSize()) {
+                ALOGE("%s vsize %zu less than t_reverb_settings size %zu", __func__,
+                      param.getValueSize(), sizeof(t_reverb_settings));
+                return BAD_VALUE;
+            }
+            // this sequency needs to be aligned with t_reverb_settings
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, roomLevelMb);
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, roomHfLevelMb);
+            SET_AIDL_PARAMETER(param, int32_t, uint32_t, decayTimeMs);
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, decayHfRatioPm);
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, reflectionsLevelMb);
+            SET_AIDL_PARAMETER(param, int32_t, uint32_t, reflectionsDelayMs);
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, levelMb);
+            SET_AIDL_PARAMETER(param, int32_t, uint32_t, delayMs);
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, diffusionPm);
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, densityPm);
             break;
         }
         default: {
-            // TODO: handle with vendor extension
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            Parameter aidlParam = MAKE_SPECIFIC_PARAMETER(EnvironmentalReverb,
+                                                          environmentalReverb, vendor, ext);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam)));
+            break;
         }
     }
-    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+    return OK;
 }
 
 status_t AidlConversionEnvReverb::getParameter(EffectParamWriter& param) {
     uint32_t type = 0;
-    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
-        OK != param.readFromParameter(&type)) {
-        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
-        param.setStatus(BAD_VALUE);
-        return BAD_VALUE;
+    if (status_t status = param.readFromParameter(&type); status != OK) {
+        ALOGE("%s failed to read type from %s", __func__, param.toString().c_str());
+        param.setStatus(status);
+        return status;
     }
-    uint16_t value16;
-    uint32_t value32;
+
     switch (type) {
         case REVERB_PARAM_ROOM_LEVEL: {
-            GET_AIDL_PARAMETER(roomLevelMb, value16, param);
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, roomLevelMb);
+            break;
         }
         case REVERB_PARAM_ROOM_HF_LEVEL: {
-            GET_AIDL_PARAMETER(roomHfLevelMb, value16, param);
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, roomHfLevelMb);
+            break;
         }
         case REVERB_PARAM_DECAY_TIME: {
-            GET_AIDL_PARAMETER(decayTimeMs, value32, param);
+            GET_AIDL_PARAMETER(param, int32_t, uint32_t, decayTimeMs);
+            break;
         }
         case REVERB_PARAM_DECAY_HF_RATIO: {
-            GET_AIDL_PARAMETER(decayHfRatioPm, value16, param);
-        }
-        case REVERB_PARAM_REVERB_LEVEL: {
-            GET_AIDL_PARAMETER(levelMb, value16, param);
-        }
-        case REVERB_PARAM_REVERB_DELAY: {
-            GET_AIDL_PARAMETER(delayMs, value32, param);
-        }
-        case REVERB_PARAM_DIFFUSION: {
-            GET_AIDL_PARAMETER(diffusionPm, value16, param);
-        }
-        case REVERB_PARAM_DENSITY: {
-            GET_AIDL_PARAMETER(densityPm, value16, param);
-        }
-        case REVERB_PARAM_BYPASS: {
-            bool isByPass;
-            GET_AIDL_PARAMETER(bypass, isByPass, param);
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, decayHfRatioPm);
+            break;
         }
         case REVERB_PARAM_REFLECTIONS_LEVEL: {
-            // TODO
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, reflectionsLevelMb);
             break;
         }
         case REVERB_PARAM_REFLECTIONS_DELAY: {
-            // TODO
+            GET_AIDL_PARAMETER(param, int32_t, uint32_t, reflectionsDelayMs);
+            break;
+        }
+        case REVERB_PARAM_REVERB_LEVEL: {
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, levelMb);
+            break;
+        }
+        case REVERB_PARAM_REVERB_DELAY: {
+            GET_AIDL_PARAMETER(param, int32_t, uint32_t, delayMs);
+            break;
+        }
+        case REVERB_PARAM_DIFFUSION: {
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, diffusionPm);
+            break;
+        }
+        case REVERB_PARAM_DENSITY: {
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, densityPm);
+            break;
+        }
+        case REVERB_PARAM_BYPASS: {
+            GET_AIDL_PARAMETER(param, bool, int32_t, bypass);
             break;
         }
         case REVERB_PARAM_PROPERTIES: {
-            // TODO
+            // this sequency needs to be aligned with t_reverb_settings
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, roomLevelMb);
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, roomHfLevelMb);
+            GET_AIDL_PARAMETER(param, int32_t, uint32_t, decayTimeMs);
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, decayHfRatioPm);
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, reflectionsLevelMb);
+            GET_AIDL_PARAMETER(param, int32_t, uint32_t, reflectionsDelayMs);
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, levelMb);
+            GET_AIDL_PARAMETER(param, int32_t, uint32_t, delayMs);
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, diffusionPm);
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, densityPm);
             break;
         }
         default: {
-            // TODO: handle with vendor extension
+            VENDOR_EXTENSION_GET_AND_RETURN(EnvironmentalReverb, environmentalReverb, param);
         }
     }
-    return BAD_VALUE;
+    return OK;
 }
 
 } // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
index a10d271..45b98a1 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
@@ -23,7 +23,6 @@
 #include <error/expected_utils.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
 #include <system/audio_effects/effect_equalizer.h>
 
 #include <utils/Log.h>
@@ -37,16 +36,16 @@
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
 using ::aidl::android::hardware::audio::effect::Equalizer;
 using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::Range;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::base::unexpected;
 using ::android::status_t;
 using utils::EffectParamReader;
 using utils::EffectParamWriter;
 
 status_t AidlConversionEq::setParameter(EffectParamReader& param) {
     uint32_t type;
-    uint16_t value = 0;
-    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
-        OK != param.readFromParameter(&type) ||
-        OK != param.readFromValue(&value)) {
+    if (OK != param.readFromParameter(&type)) {
         ALOGE("%s invalid param %s", __func__, param.toString().c_str());
         return BAD_VALUE;
     }
@@ -54,13 +53,18 @@
     Parameter aidlParam;
     switch (type) {
         case EQ_PARAM_CUR_PRESET: {
+            uint16_t value = 0;
+            if (OK != param.readFromValue(&value)) {
+                ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+                return BAD_VALUE;
+            }
             aidlParam = MAKE_SPECIFIC_PARAMETER(Equalizer, equalizer, preset, (int)value);
             break;
         }
         case EQ_PARAM_BAND_LEVEL: {
             int32_t band;
-            uint16_t level;
-            if (OK != param.readFromParameter(&band) || OK != param.readFromParameter(&level)) {
+            int16_t level;
+            if (OK != param.readFromParameter(&band) || OK != param.readFromValue(&level)) {
                 ALOGE("%s invalid bandLevel param %s", __func__, param.toString().c_str());
                 return BAD_VALUE;
             }
@@ -69,49 +73,233 @@
             break;
         }
         case EQ_PARAM_PROPERTIES: {
-            // TODO: handle properties setting
+            int16_t num;
+            if (OK != param.readFromValue(&num)) {
+                ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+                return BAD_VALUE;
+            }
+            // set preset if it's valid
+            if (num >= 0) {
+                aidlParam = MAKE_SPECIFIC_PARAMETER(Equalizer, equalizer, preset, (int)num);
+                break;
+            }
+            // set bandLevel if no preset was set
+            if (OK != param.readFromValue(&num)) {
+                ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+                return BAD_VALUE;
+            }
+            std::vector<Equalizer::BandLevel> bandLevels;
+            for (int i = 0; i < num; i++) {
+                Equalizer::BandLevel level({.index = i});
+                if (OK != param.readFromValue((uint16_t*)&level.levelMb)) {
+                    ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+                    return BAD_VALUE;
+                }
+                bandLevels.push_back(level);
+            }
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Equalizer, equalizer, bandLevels, bandLevels);
             break;
         }
         default: {
-            // TODO: implement vendor extension parameters
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Equalizer, equalizer, vendor, ext);
+            break;
         }
     }
 
     return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
 }
 
-aidl::ConversionResult<Parameter> AidlConversionEq::getAidlParameter(Equalizer::Tag tag) {
+ConversionResult<Parameter> AidlConversionEq::getAidlParameter(Equalizer::Tag tag) {
     Parameter aidlParam;
     Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Equalizer, equalizerTag, tag);
     RETURN_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
     return aidlParam;
 }
 
+ConversionResult<int32_t> AidlConversionEq::getParameterPreset() {
+    Parameter aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::preset));
+    return VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(aidlParam, Equalizer, equalizer,
+                                                               Equalizer::preset, int32_t));
+}
+
+ConversionResult<std::string> AidlConversionEq::getParameterPresetName(
+        EffectParamWriter& param) {
+    int32_t presetIdx;
+    if (OK != param.readFromParameter(&presetIdx)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return unexpected(BAD_VALUE);
+    }
+    Parameter aidlParam = VALUE_OR_RETURN(getAidlParameter(Equalizer::presets));
+    const auto& presets = VALUE_OR_RETURN(GET_PARAMETER_SPECIFIC_FIELD(
+            aidlParam, Equalizer, equalizer, Equalizer::presets, std::vector<Equalizer::Preset>));
+    for (const auto& preset : presets) {
+        if (presetIdx == preset.index) {
+            return preset.name;
+        }
+    }
+    return unexpected(BAD_VALUE);
+}
+
 status_t AidlConversionEq::getParameter(EffectParamWriter& param) {
-    uint32_t type = 0, value = 0;
-    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
-        OK != param.readFromParameter(&type)) {
+    uint32_t type = 0;
+    if (OK != param.readFromParameter(&type)) {
         param.setStatus(BAD_VALUE);
         ALOGE("%s invalid param %s", __func__, param.toString().c_str());
         return BAD_VALUE;
     }
-    Parameter aidlParam;
+
     switch (type) {
         case EQ_PARAM_NUM_BANDS: {
-            aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::bandLevels));
-            auto bandLevels = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+            Parameter aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::bandLevels));
+            const auto& bandLevels = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
                     aidlParam, Equalizer, equalizer, Equalizer::bandLevels,
                     std::vector<Equalizer::BandLevel>));
-            uint32_t num = bandLevels.size();
+            uint16_t bands = bandLevels.size();
+            return param.writeToValue(&bands);
+        }
+        case EQ_PARAM_LEVEL_RANGE: {
+            const auto& ranges = mDesc.capability.range.get<Range::equalizer>();
+            for (const auto& r : ranges) {
+                if (r.min.getTag() == Equalizer::bandLevels &&
+                    r.max.getTag() == Equalizer::bandLevels) {
+                    const auto& aidlMin = r.min.get<Equalizer::bandLevels>();
+                    const auto& aidlMax = r.max.get<Equalizer::bandLevels>();
+                    int16_t min =
+                            std::min_element(aidlMin.begin(), aidlMin.end(), [](auto& a, auto& b) {
+                                return a.levelMb < b.levelMb;
+                            })->levelMb;
+                    int16_t max =
+                            std::max_element(aidlMax.begin(), aidlMax.end(), [](auto& a, auto& b) {
+                                return a.levelMb < b.levelMb;
+                            })->levelMb;
+                    return (OK == param.writeToValue(&min) && OK == param.writeToValue(&max))
+                                   ? OK
+                                   : BAD_VALUE;
+                }
+            }
+            break;
+        }
+        case EQ_PARAM_BAND_LEVEL: {
+            int32_t bandIdx;
+            if (OK != param.readFromParameter(&bandIdx)) {
+                break;
+            }
+
+            Parameter aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::bandLevels));
+            const auto& bandLevels = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Equalizer, equalizer, Equalizer::bandLevels,
+                    std::vector<Equalizer::BandLevel>));
+            for (const auto& band : bandLevels) {
+                if (band.index == bandIdx) {
+                    return param.writeToValue((uint16_t *)&band.levelMb);
+                }
+            }
+            break;
+        }
+        case EQ_PARAM_CENTER_FREQ: {
+            int32_t index;
+            if (OK != param.readFromParameter(&index)) {
+                break;
+            }
+
+            Parameter aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::centerFreqMh));
+            const auto& freqs = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Equalizer, equalizer, Equalizer::centerFreqMh, std::vector<int>));
+            if ((size_t)index >= freqs.size()) {
+                ALOGE("%s index %d exceed size %zu", __func__, index, freqs.size());
+                break;
+            }
+            return param.writeToValue(&freqs[index]);
+        }
+        case EQ_PARAM_BAND_FREQ_RANGE: {
+            int32_t index;
+            if (OK != param.readFromParameter(&index)) {
+                break;
+            }
+
+            Parameter aidlParam =
+                    VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::bandFrequencies));
+            const auto& bands = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Equalizer, equalizer, Equalizer::bandFrequencies,
+                    std::vector<Equalizer::BandFrequency>));
+            for (const auto& band : bands) {
+                if (band.index == index) {
+                    return (OK == param.writeToValue(&band.minMh) &&
+                            OK == param.writeToValue(&band.maxMh))
+                                   ? OK
+                                   : BAD_VALUE;
+                }
+            }
+            break;
+        }
+        case EQ_PARAM_GET_BAND: {
+            int32_t freq;
+            if (OK != param.readFromParameter(&freq)) {
+                break;
+            }
+
+            Parameter aidlParam =
+                    VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::bandFrequencies));
+            const auto& bands = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Equalizer, equalizer, Equalizer::bandFrequencies,
+                    std::vector<Equalizer::BandFrequency>));
+            for (const auto& band : bands) {
+                if (freq >= band.minMh && freq <= band.maxMh) {
+                    return param.writeToValue((uint16_t*)&band.index);
+                }
+            }
+            break;
+        }
+        case EQ_PARAM_CUR_PRESET: {
+            int32_t preset = VALUE_OR_RETURN_STATUS(getParameterPreset());
+            return param.writeToValue((uint16_t*)&preset);
+        }
+        case EQ_PARAM_GET_NUM_OF_PRESETS: {
+            Parameter aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::presets));
+            const auto& presets = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Equalizer, equalizer, Equalizer::presets,
+                    std::vector<Equalizer::Preset>));
+            uint16_t num = presets.size();
             return param.writeToValue(&num);
         }
-        default:
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+        case EQ_PARAM_GET_PRESET_NAME: {
+            std::string name = VALUE_OR_RETURN_STATUS(getParameterPresetName(param));
+            return param.writeToValue(name.c_str(), name.length());
+        }
+        case EQ_PARAM_PROPERTIES: {
+            int32_t preset = VALUE_OR_RETURN_STATUS(getParameterPreset());
+            if (OK != param.writeToValue((uint16_t*)&preset)) {
+                break;
+            }
+            Parameter aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::bandLevels));
+            std::vector<Equalizer::BandLevel> bandLevels =
+                    VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                            aidlParam, Equalizer, equalizer, Equalizer::bandLevels,
+                            std::vector<Equalizer::BandLevel>));
+            uint16_t bands = bandLevels.size();
+            if (OK != param.writeToValue(&bands)) {
+                break;
+            }
+            std::sort(bandLevels.begin(), bandLevels.end(),
+                      [](const auto& a, const auto& b) { return a.index < b.index; });
+            for (const auto& level : bandLevels) {
+                if (status_t status = param.writeToValue((uint16_t*)&level.levelMb); status != OK) {
+                    return status;
+                }
+            }
+            return OK;
+        }
+        default: {
+            VENDOR_EXTENSION_GET_AND_RETURN(Equalizer, equalizer, param);
+        }
     }
-    return param.writeToValue(&value);
+
+    param.setStatus(BAD_VALUE);
+    ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+    return BAD_VALUE;
 }
 
 } // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.h
index 0433965..f94556c 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.h
@@ -33,8 +33,10 @@
   private:
     status_t setParameter(utils::EffectParamReader& param) override;
     status_t getParameter(utils::EffectParamWriter& param) override;
-    aidl::ConversionResult<::aidl::android::hardware::audio::effect::Parameter> getAidlParameter(
+    ConversionResult<::aidl::android::hardware::audio::effect::Parameter> getAidlParameter(
             ::aidl::android::hardware::audio::effect::Equalizer::Tag tag);
+    ConversionResult<int32_t> getParameterPreset();
+    ConversionResult<std::string> getParameterPresetName(utils::EffectParamWriter& param);
 };
 
 }  // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp
index 9575e7d..73430ba 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp
@@ -23,7 +23,6 @@
 #include <error/expected_utils.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
 #include <system/audio_effects/effect_hapticgenerator.h>
 
 #include <utils/Log.h>
@@ -33,9 +32,11 @@
 namespace android {
 namespace effect {
 
+using ::aidl::android::getParameterSpecificField;
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
 using ::aidl::android::hardware::audio::effect::HapticGenerator;
 using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
 using ::android::status_t;
 using utils::EffectParamReader;
 using utils::EffectParamWriter;
@@ -76,9 +77,11 @@
             break;
         }
         default: {
-            // TODO: implement vendor extension parameters
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(HapticGenerator, hapticGenerator, vendor, ext);
+            break;
         }
     }
 
@@ -86,8 +89,8 @@
 }
 
 // No parameter to get for HapticGenerator
-status_t AidlConversionHapticGenerator::getParameter(EffectParamWriter& param __unused) {
-    return OK;
+status_t AidlConversionHapticGenerator::getParameter(EffectParamWriter& param) {
+    VENDOR_EXTENSION_GET_AND_RETURN(HapticGenerator, hapticGenerator, param);
 }
 
 } // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.cpp
index e3c898f..31eec65 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.cpp
@@ -23,7 +23,6 @@
 #include <error/expected_utils.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
 #include <system/audio_effects/effect_loudnessenhancer.h>
 
 #include <utils/Log.h>
@@ -37,6 +36,7 @@
 using ::aidl::android::getParameterSpecificField;
 using ::aidl::android::hardware::audio::effect::LoudnessEnhancer;
 using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
 using ::android::status_t;
 using utils::EffectParamReader;
 using utils::EffectParamWriter;
@@ -56,9 +56,11 @@
             break;
         }
         default: {
-            // TODO: implement vendor extension parameters
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(LoudnessEnhancer, loudnessEnhancer, vendor, ext);
+            break;
         }
     }
     return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
@@ -84,9 +86,7 @@
             return param.writeToValue(&gain);
         }
         default: {
-            // TODO: implement vendor extension parameters
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+            VENDOR_EXTENSION_GET_AND_RETURN(LoudnessEnhancer, loudnessEnhancer, param);
         }
     }
 }
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.cpp
index 69184cf..7c34ed7 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.cpp
@@ -23,7 +23,6 @@
 #include <error/expected_utils.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
 #include <system/audio_effects/effect_ns.h>
 
 #include <utils/Log.h>
@@ -33,10 +32,11 @@
 namespace android {
 namespace effect {
 
-using ::aidl::android::aidl_utils::statusTFromBinderStatus;
 using ::aidl::android::getParameterSpecificField;
-using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
 using ::aidl::android::hardware::audio::effect::NoiseSuppression;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
 using ::android::status_t;
 using utils::EffectParamReader;
 using utils::EffectParamWriter;
@@ -61,9 +61,11 @@
             break;
         }
         default: {
-            // TODO: implement vendor extension parameters
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(NoiseSuppression, noiseSuppression, vendor, ext);
+            break;
         }
     }
     return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
@@ -100,9 +102,7 @@
             break;
         }
         default: {
-            // TODO: implement vendor extension parameters
-            ALOGW("%s unknown param %s", __func__, param.toString().c_str());
-            return BAD_VALUE;
+            VENDOR_EXTENSION_GET_AND_RETURN(NoiseSuppression, noiseSuppression, param);
         }
     }
     return param.writeToValue(&value);
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
index 3e9bf4b..e936aef 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
@@ -23,7 +23,6 @@
 #include <error/expected_utils.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
 #include <system/audio_effects/effect_presetreverb.h>
 
 #include <utils/Log.h>
@@ -38,6 +37,7 @@
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
 using ::aidl::android::hardware::audio::effect::Parameter;
 using ::aidl::android::hardware::audio::effect::PresetReverb;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
 using ::android::status_t;
 using utils::EffectParamReader;
 using utils::EffectParamWriter;
@@ -59,7 +59,10 @@
         aidlParam = MAKE_SPECIFIC_PARAMETER(PresetReverb, presetReverb, preset,
                                             static_cast<PresetReverb::Presets>(value));
     } else {
-        // handle vendor extension
+        // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+        VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+        aidlParam = MAKE_SPECIFIC_PARAMETER(PresetReverb, presetReverb, vendor, ext);
     }
 
     return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
@@ -86,6 +89,7 @@
         value = static_cast<uint16_t>(aidlPreset);
     } else {
         // handle vendor extension
+        VENDOR_EXTENSION_GET_AND_RETURN(PresetReverb, presetReverb, param);
     }
     return param.writeToValue(&value);
 }
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
index 1dac479..eadd6c3 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
@@ -20,10 +20,11 @@
 #define LOG_TAG "AidlConversionSpatializer"
 //#define LOG_NDEBUG 0
 
+#include <aidl/android/hardware/audio/effect/DefaultExtension.h>
+#include <aidl/android/hardware/audio/effect/VendorExtension.h>
 #include <error/expected_utils.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
 #include <system/audio_effects/effect_spatializer.h>
 
 #include <utils/Log.h>
@@ -34,34 +35,37 @@
 namespace effect {
 
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::DefaultExtension;
 using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
 using ::android::status_t;
 using utils::EffectParamReader;
 using utils::EffectParamWriter;
 
 status_t AidlConversionSpatializer::setParameter(EffectParamReader& param) {
-    uint32_t type = 0;
-    uint16_t value = 0;
-    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
-        OK != param.readFromParameter(&type) || OK != param.readFromValue(&value)) {
-        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
-        return BAD_VALUE;
-    }
-    Parameter aidlParam;
-    // TODO
+    Parameter aidlParam = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_EffectParameterReader_ParameterExtension(param));
     return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
 }
 
 status_t AidlConversionSpatializer::getParameter(EffectParamWriter& param) {
-    uint32_t type = 0, value = 0;
-    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
-        OK != param.readFromParameter(&type)) {
+    DefaultExtension defaultExt;
+    // read parameters into DefaultExtension vector<uint8_t>
+    if (OK != param.readFromParameter(defaultExt.bytes.data(), param.getParameterSize())) {
         ALOGE("%s invalid param %s", __func__, param.toString().c_str());
         param.setStatus(BAD_VALUE);
         return BAD_VALUE;
     }
-    // TODO
-    return param.writeToValue(&value);
+
+    VendorExtension idTag;
+    idTag.extension.setParcelable(defaultExt);
+    Parameter::Id id = UNION_MAKE(Parameter::Id, vendorEffectTag, idTag);
+    Parameter aidlParam;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+    // copy the AIDL extension data back to effect_param_t
+    return VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_ParameterExtension_EffectParameterWriter(aidlParam,
+                                                                                  param));
 }
 
 } // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.cpp
index 3baf72e..488d5cd 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.cpp
@@ -22,6 +22,7 @@
 //#define LOG_NDEBUG 0
 
 #include <aidl/android/hardware/audio/effect/DefaultExtension.h>
+#include <aidl/android/hardware/audio/effect/VendorExtension.h>
 #include <error/expected_utils.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionEffect.h>
@@ -50,48 +51,21 @@
  * pass down in Parameter as is.
  */
 status_t AidlConversionVendorExtension::setParameter(EffectParamReader& param) {
-    size_t len = param.getValueSize();
-    DefaultExtension ext;
-    ext.bytes.resize(len);
-    if (OK != param.readFromValue(ext.bytes.data(), len)) {
-        ALOGE("%s read value from param %s failed", __func__, param.toString().c_str());
-        return BAD_VALUE;
-    }
-    VendorExtension effectParam;
-    effectParam.extension.setParcelable(ext);
-    Parameter aidlParam = UNION_MAKE(Parameter, specific,
-                                     UNION_MAKE(Parameter::Specific, vendorEffect, effectParam));
+    Parameter aidlParam = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_EffectParameterReader_ParameterExtension(param));
     return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
 }
 
 status_t AidlConversionVendorExtension::getParameter(EffectParamWriter& param) {
-    int32_t tag;
-    if (OK != param.readFromParameter(&tag)) {
-        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
-        param.setStatus(BAD_VALUE);
-        return BAD_VALUE;
-    }
-
+    VendorExtension extId = VALUE_OR_RETURN_STATUS(
+            aidl::android::legacy2aidl_EffectParameterReader_Param_VendorExtension(param));
+    Parameter::Id id = UNION_MAKE(Parameter::Id, vendorEffectTag, extId);
     Parameter aidlParam;
-    Parameter::Id id = UNION_MAKE(Parameter::Id, vendorEffectTag, tag /* parameter tag */);
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
-    VendorExtension effectParam = VALUE_OR_RETURN_STATUS(
-            (::aidl::android::getParameterSpecific<Parameter, VendorExtension,
-                                                   Parameter::Specific::vendorEffect>(aidlParam)));
-    std::optional<DefaultExtension> ext;
-    if (STATUS_OK != effectParam.extension.getParcelable(&ext) || !ext.has_value()) {
-        ALOGE("%s get extension parcelable failed", __func__);
-        param.setStatus(BAD_VALUE);
-        return BAD_VALUE;
-    }
-    const auto& extBytes = ext.value().bytes;
-    if (param.getValueSize() < extBytes.size()) {
-        ALOGE("%s extension return data %zu exceed vsize %zu", __func__, extBytes.size(),
-              param.getValueSize());
-        param.setStatus(BAD_VALUE);
-        return BAD_VALUE;
-    }
-    return param.writeToValue(extBytes.data(), extBytes.size());
+    // copy the AIDL extension data back to effect_param_t
+    return VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_ParameterExtension_EffectParameterWriter(aidlParam,
+                                                                                  param));
 }
 
 } // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
index 482114d..c95c3a9 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
@@ -21,10 +21,11 @@
 //#define LOG_NDEBUG 0
 
 #include <error/expected_utils.h>
+#include <media/AidlConversionCppNdk.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
-#include <system/audio_effects/effect_spatializer.h>
+#include <system/audio_effects/aidl_effects_utils.h>
+#include <system/audio_effects/effect_virtualizer.h>
 
 #include <utils/Log.h>
 
@@ -34,34 +35,129 @@
 namespace effect {
 
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::getParameterSpecificField;
 using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::Range;
+using ::aidl::android::hardware::audio::effect::Virtualizer;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::aidl::android::media::audio::common::AudioDeviceDescription;
 using ::android::status_t;
 using utils::EffectParamReader;
 using utils::EffectParamWriter;
 
 status_t AidlConversionVirtualizer::setParameter(EffectParamReader& param) {
     uint32_t type = 0;
-    uint16_t value = 0;
-    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
-        OK != param.readFromParameter(&type) || OK != param.readFromValue(&value)) {
+    if (OK != param.readFromParameter(&type)) {
         ALOGE("%s invalid param %s", __func__, param.toString().c_str());
         return BAD_VALUE;
     }
     Parameter aidlParam;
-    // TODO
+    switch (type) {
+        case VIRTUALIZER_PARAM_STRENGTH: {
+            int16_t strength = 0;
+            if (OK != param.readFromValue(&strength)) {
+                ALOGE("%s invalid param %s for type %d", __func__, param.toString().c_str(), type);
+                return BAD_VALUE;
+            }
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Virtualizer, virtualizer, strengthPm, strength);
+            break;
+        }
+        case VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE: {
+            audio_devices_t deviceType;
+            if (OK != param.readFromValue(&deviceType)) {
+                ALOGE("%s invalid param %s for type %d", __func__, param.toString().c_str(), type);
+                return BAD_VALUE;
+            }
+            AudioDeviceDescription deviceDesc = VALUE_OR_RETURN_STATUS(
+                    ::aidl::android::legacy2aidl_audio_devices_t_AudioDeviceDescription(
+                            deviceType));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Virtualizer, virtualizer, device, deviceDesc);
+            break;
+        }
+        default: {
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Virtualizer, virtualizer, vendor, ext);
+            break;
+        }
+    }
     return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
 }
 
 status_t AidlConversionVirtualizer::getParameter(EffectParamWriter& param) {
-    uint32_t type = 0, value = 0;
-    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
-        OK != param.readFromParameter(&type)) {
+    uint32_t type = 0;
+    if (OK != param.readFromParameter(&type)) {
         ALOGE("%s invalid param %s", __func__, param.toString().c_str());
         param.setStatus(BAD_VALUE);
         return BAD_VALUE;
     }
-    // TODO
-    return param.writeToValue(&value);
+    Parameter aidlParam;
+    switch (type) {
+        case VIRTUALIZER_PARAM_STRENGTH_SUPPORTED: {
+            // an invalid range indicates not setting support for this parameter
+            uint32_t support =
+                    ::aidl::android::hardware::audio::effect::isRangeValid<Range::Tag::virtualizer>(
+                            Virtualizer::strengthPm, mDesc.capability);
+            return param.writeToValue(&support);
+        }
+        case VIRTUALIZER_PARAM_STRENGTH: {
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Virtualizer, virtualizerTag,
+                                                          Virtualizer::strengthPm);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            int16_t strength = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Virtualizer, virtualizer, Virtualizer::strengthPm, int32_t));
+            return param.writeToValue(&strength);
+        }
+        case VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES: {
+            audio_channel_mask_t mask;
+            audio_devices_t device;
+            if (OK != param.readFromParameter(&mask) || OK != param.readFromParameter(&device)) {
+                ALOGW("%s illegal param %s", __func__, param.toString().c_str());
+                return BAD_VALUE;
+            }
+            Virtualizer::SpeakerAnglesPayload payload = {
+                    .layout = VALUE_OR_RETURN_STATUS(
+                            ::aidl::android::legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+                                    mask, false)),
+                    .device = VALUE_OR_RETURN_STATUS(
+                            ::aidl::android::legacy2aidl_audio_devices_t_AudioDeviceDescription(
+                                    device))};
+            Virtualizer::Id vId = UNION_MAKE(Virtualizer::Id, speakerAnglesPayload, payload);
+            Parameter::Id id = UNION_MAKE(Parameter::Id, virtualizerTag, vId);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            const auto& angles = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Virtualizer, virtualizer, Virtualizer::speakerAngles,
+                    std::vector<Virtualizer::ChannelAngle>));
+            for (const auto& angle : angles) {
+                const audio_channel_mask_t chMask = ::aidl::android::
+                        aidl2legacy_AudioChannelLayout_layout_audio_channel_mask_t_bits(
+                                angle.channel, false);
+                ALOGW("%s aidl %d ch %d", __func__, angle.channel, chMask);
+                if (OK != param.writeToValue(&chMask) ||
+                    OK != param.writeToValue(&angle.azimuthDegree) ||
+                    OK != param.writeToValue(&angle.elevationDegree)) {
+                    ALOGW("%s can't write angles to param %s", __func__, param.toString().c_str());
+                    return BAD_VALUE;
+                }
+            }
+            return OK;
+        }
+        case VIRTUALIZER_PARAM_VIRTUALIZATION_MODE: {
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Virtualizer, virtualizerTag,
+                                                          Virtualizer::device);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            AudioDeviceDescription device = VALUE_OR_RETURN_STATUS(
+                    GET_PARAMETER_SPECIFIC_FIELD(aidlParam, Virtualizer, virtualizer,
+                                                 Virtualizer::device, AudioDeviceDescription));
+            const audio_devices_t deviceType = VALUE_OR_RETURN_STATUS(
+                    ::aidl::android::aidl2legacy_AudioDeviceDescription_audio_devices_t(device));
+            return param.writeToValue(&deviceType);
+        }
+        default: {
+            VENDOR_EXTENSION_GET_AND_RETURN(Virtualizer, virtualizer, param);
+        }
+    }
 }
 
 } // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
index 9ed601f..2d5af59 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <cstddef>
 #include <cstdint>
 #include <cstring>
 #include <optional>
@@ -23,7 +24,6 @@
 #include <error/expected_utils.h>
 #include <media/AidlConversionNdk.h>
 #include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
 #include <system/audio_effects/effect_visualizer.h>
 
 #include <utils/Log.h>
@@ -33,35 +33,144 @@
 namespace android {
 namespace effect {
 
+using ::aidl::android::getParameterSpecificField;
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
 using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::aidl::android::hardware::audio::effect::Visualizer;
 using ::android::status_t;
 using utils::EffectParamReader;
 using utils::EffectParamWriter;
 
 status_t AidlConversionVisualizer::setParameter(EffectParamReader& param) {
-    uint32_t type = 0;
-    uint16_t value = 0;
-    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
+    uint32_t type = 0, value = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
         OK != param.readFromParameter(&type) || OK != param.readFromValue(&value)) {
         ALOGE("%s invalid param %s", __func__, param.toString().c_str());
         return BAD_VALUE;
     }
     Parameter aidlParam;
-    // TODO
+    switch (type) {
+        case VISUALIZER_PARAM_CAPTURE_SIZE: {
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Visualizer, visualizer, captureSamples, value);
+            break;
+        }
+        case VISUALIZER_PARAM_SCALING_MODE: {
+            Visualizer::ScalingMode mode = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_Parameter_Visualizer_uint32_ScalingMode(value));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Visualizer, visualizer, scalingMode, mode);
+            break;
+        }
+        case VISUALIZER_PARAM_LATENCY: {
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Visualizer, visualizer, latencyMs, value);
+            break;
+        }
+        case VISUALIZER_PARAM_MEASUREMENT_MODE: {
+            Visualizer::MeasurementMode mode = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_Parameter_Visualizer_uint32_MeasurementMode(value));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Visualizer, visualizer, measurementMode, mode);
+            break;
+        }
+        default: {
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Visualizer, visualizer, vendor, ext);
+            break;
+        }
+    }
     return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
 }
 
 status_t AidlConversionVisualizer::getParameter(EffectParamWriter& param) {
     uint32_t type = 0, value = 0;
-    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(int32_t)) ||
         OK != param.readFromParameter(&type)) {
         ALOGE("%s invalid param %s", __func__, param.toString().c_str());
         param.setStatus(BAD_VALUE);
         return BAD_VALUE;
     }
-    // TODO
-    return param.writeToValue(&value);
+    Parameter aidlParam;
+    switch (type) {
+        case VISUALIZER_PARAM_CAPTURE_SIZE: {
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Visualizer, visualizerTag,
+                                                          Visualizer::captureSamples);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            value = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Visualizer, visualizer, Visualizer::captureSamples, int32_t));
+            mCaptureSize = value;
+            return param.writeToValue(&value);
+        }
+        case VISUALIZER_PARAM_SCALING_MODE: {
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Visualizer, visualizerTag,
+                                                          Visualizer::scalingMode);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            Visualizer::ScalingMode mode = VALUE_OR_RETURN_STATUS(
+                    GET_PARAMETER_SPECIFIC_FIELD(aidlParam, Visualizer, visualizer,
+                                                 Visualizer::scalingMode, Visualizer::ScalingMode));
+            value = VALUE_OR_RETURN_STATUS(
+                    aidl::android::aidl2legacy_Parameter_Visualizer_ScalingMode_uint32(mode));
+            return param.writeToValue(&value);
+        }
+        case VISUALIZER_PARAM_LATENCY: {
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Visualizer, visualizerTag,
+                                                          Visualizer::latencyMs);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            value = (int32_t)VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Visualizer, visualizer, Visualizer::latencyMs, int32_t));
+            return param.writeToValue(&value);
+        }
+        case VISUALIZER_PARAM_MEASUREMENT_MODE: {
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Visualizer, visualizerTag,
+                                                          Visualizer::measurementMode);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            Visualizer::MeasurementMode mode = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Visualizer, visualizer, Visualizer::measurementMode,
+                    Visualizer::MeasurementMode));
+            value = VALUE_OR_RETURN_STATUS(
+                    aidl::android::aidl2legacy_Parameter_Visualizer_MeasurementMode_uint32(mode));
+            return param.writeToValue(&value);
+        }
+        default: {
+            VENDOR_EXTENSION_GET_AND_RETURN(Visualizer, visualizer, param);
+        }
+    }
+}
+
+status_t AidlConversionVisualizer::visualizerCapture(uint32_t* replySize, void* pReplyData) {
+    if (!replySize || !pReplyData || *replySize != mCaptureSize) {
+        ALOGE("%s illegal param replySize %p pReplyData %p", __func__, replySize, pReplyData);
+        return BAD_VALUE;
+    }
+
+    Parameter aidlParam;
+    Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Visualizer, visualizerTag,
+                                                    Visualizer::captureSampleBuffer);
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+    const auto& samples = VALUE_OR_RETURN_STATUS(
+            GET_PARAMETER_SPECIFIC_FIELD(aidlParam, Visualizer, visualizer,
+                                         Visualizer::captureSampleBuffer, std::vector<uint8_t>));
+    size_t len = std::min((size_t)*replySize, samples.size());
+    std::memcpy(pReplyData, samples.data(), *replySize = len);
+    return OK;
+}
+
+status_t AidlConversionVisualizer::visualizerMeasure(uint32_t* replySize, void* pReplyData) {
+    if (!replySize || !pReplyData || *replySize != 2 * sizeof(int32_t)) {
+        ALOGE("%s illegal param replySize %p pReplyData %p", __func__, replySize, pReplyData);
+        return BAD_VALUE;
+    }
+
+    Parameter aidlParam;
+    Parameter::Id id =
+            MAKE_SPECIFIC_PARAMETER_ID(Visualizer, visualizerTag, Visualizer::measurement);
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+    const auto& measure = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+            aidlParam, Visualizer, visualizer, Visualizer::measurement, Visualizer::Measurement));
+    int32_t* reply = (int32_t *) pReplyData;
+    *reply++ = measure.rms;
+    *reply = measure.peak;
+    return OK;
 }
 
 } // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.h
index a7e4ea1..e380bc6 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.h
@@ -32,8 +32,11 @@
     ~AidlConversionVisualizer() {}
 
   private:
+    uint32_t mCaptureSize = 0;
     status_t setParameter(utils::EffectParamReader& param) override;
     status_t getParameter(utils::EffectParamWriter& param) override;
+    status_t visualizerCapture(uint32_t* replySize, void* pReplyData) override;
+    status_t visualizerMeasure(uint32_t* replySize, void* pReplyData) override;
 };
 
 }  // namespace effect
diff --git a/media/libaudiohal/include/media/audiohal/AudioEffectUuid.h b/media/libaudiohal/include/media/audiohal/AudioEffectUuid.h
deleted file mode 100644
index b21e4c9..0000000
--- a/media/libaudiohal/include/media/audiohal/AudioEffectUuid.h
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <aidl/android/media/audio/common/AudioUuid.h>
-
-namespace android {
-namespace effect {
-
-using ::aidl::android::media::audio::common::AudioUuid;
-
-// 7b491460-8d4d-11e0-bd61-0002a5d5c51b.
-static const AudioUuid kAcousticEchoCancelerTypeUUID = {static_cast<int32_t>(0x7b491460),
-                                                        0x8d4d,
-                                                        0x11e0,
-                                                        0xbd61,
-                                                        {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
-// 0xae3c653b-be18-4ab8-8938-418f0a7f06ac
-static const AudioUuid kAutomaticGainControl2TypeUUID = {static_cast<int32_t>(0xae3c653b),
-                                                         0xbe18,
-                                                         0x4ab8,
-                                                         0x8938,
-                                                         {0x41, 0x8f, 0x0a, 0x7f, 0x06, 0xac}};
-// 0634f220-ddd4-11db-a0fc-0002a5d5c51b
-static const AudioUuid kBassBoostTypeUUID = {static_cast<int32_t>(0x0634f220),
-                                             0xddd4,
-                                             0x11db,
-                                             0xa0fc,
-                                             {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
-// fa81862a-588b-11ed-9b6a-0242ac120002
-static const AudioUuid kDownmixTypeUUID = {static_cast<int32_t>(0xfa81862a),
-                                           0x588b,
-                                           0x11ed,
-                                           0x9b6a,
-                                           {0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
-// 7261676f-6d75-7369-6364-28e2fd3ac39e
-static const AudioUuid kDynamicsProcessingTypeUUID = {static_cast<int32_t>(0x7261676f),
-                                                      0x6d75,
-                                                      0x7369,
-                                                      0x6364,
-                                                      {0x28, 0xe2, 0xfd, 0x3a, 0xc3, 0x9e}};
-// 0bed4300-ddd6-11db-8f34-0002a5d5c51b.
-static const AudioUuid kEqualizerTypeUUID = {static_cast<int32_t>(0x0bed4300),
-                                             0xddd6,
-                                             0x11db,
-                                             0x8f34,
-                                             {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
-// 1411e6d6-aecd-4021-a1cf-a6aceb0d71e5
-static const AudioUuid kHapticGeneratorTypeUUID = {static_cast<int32_t>(0x1411e6d6),
-                                                   0xaecd,
-                                                   0x4021,
-                                                   0xa1cf,
-                                                   {0xa6, 0xac, 0xeb, 0x0d, 0x71, 0xe5}};
-// fe3199be-aed0-413f-87bb-11260eb63cf1
-static const AudioUuid kLoudnessEnhancerTypeUUID = {static_cast<int32_t>(0xfe3199be),
-                                                    0xaed0,
-                                                    0x413f,
-                                                    0x87bb,
-                                                    {0x11, 0x26, 0x0e, 0xb6, 0x3c, 0xf1}};
-// c2e5d5f0-94bd-4763-9cac-4e234d06839e
-static const AudioUuid kEnvReverbTypeUUID = {static_cast<int32_t>(0xc2e5d5f0),
-                                             0x94bd,
-                                             0x4763,
-                                             0x9cac,
-                                             {0x4e, 0x23, 0x4d, 0x06, 0x83, 0x9e}};
-// 58b4b260-8e06-11e0-aa8e-0002a5d5c51b
-static const AudioUuid kNoiseSuppressionTypeUUID = {static_cast<int32_t>(0x58b4b260),
-                                                    0x8e06,
-                                                    0x11e0,
-                                                    0xaa8e,
-                                                    {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
-// 47382d60-ddd8-11db-bf3a-0002a5d5c51b
-static const AudioUuid kPresetReverbTypeUUID = {static_cast<int32_t>(0x47382d60),
-                                                0xddd8,
-                                                0x11db,
-                                                0xbf3a,
-                                                {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
-// ccd4cf09-a79d-46c2-9aae-06a1698d6c8f
-static const AudioUuid kSpatializerTypeUUID = {static_cast<int32_t>(0xccd4cf09),
-                                                0xa79d,
-                                                0x46c2,
-                                                0x9aae,
-                                                {0x06, 0xa1, 0x69, 0x8d, 0x6c, 0x8f}};
-// 37cc2c00-dddd-11db-8577-0002a5d5c51b
-static const AudioUuid kVirtualizerTypeUUID = {static_cast<int32_t>(0x37cc2c00),
-                                               0xdddd,
-                                               0x11db,
-                                               0x8577,
-                                               {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
-// fa819f3e-588b-11ed-9b6a-0242ac120002
-static const AudioUuid kVisualizerTypeUUID = {static_cast<int32_t>(0xfa819f3e),
-                                              0x588b,
-                                              0x11ed,
-                                              0x9b6a,
-                                              {0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
-// fa81a2b8-588b-11ed-9b6a-0242ac120002
-static const AudioUuid kVolumeTypeUUID = {static_cast<int32_t>(0xfa81a2b8),
-                                          0x588b,
-                                          0x11ed,
-                                          0x9b6a,
-                                          {0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
-
-}  // namespace effect
-}  // namespace android
diff --git a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
index 71c7586..c076ccc 100644
--- a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
+++ b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
@@ -27,6 +27,7 @@
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <system/audio_effects/audio_effects_utils.h>
 #include <system/audio_effects/effect_aec.h>
+#include <system/audio_effects/effect_agc.h>
 #include <system/audio_effects/effect_agc2.h>
 #include <system/audio_effects/effect_bassboost.h>
 #include <system/audio_effects/effect_downmix.h>
@@ -157,6 +158,9 @@
         std::make_tuple(FX_IID_AEC,
                         createEffectParamCombination(AEC_PARAM_ECHO_DELAY, 0xff /* echoDelayMs */,
                                                      sizeof(int32_t) /* returnValueSize */)),
+        std::make_tuple(FX_IID_AGC,
+                        createEffectParamCombination(AGC_PARAM_TARGET_LEVEL, 20 /* targetLevel */,
+                                                     sizeof(int16_t) /* returnValueSize */)),
         std::make_tuple(FX_IID_AGC2, createEffectParamCombination(
                                              AGC2_PARAM_FIXED_DIGITAL_GAIN, 15 /* digitalGainDb */,
                                              sizeof(int32_t) /* returnValueSize */)),
@@ -165,7 +169,7 @@
                                                      sizeof(int32_t) /* returnValueSize */)),
         std::make_tuple(EFFECT_UIID_DOWNMIX,
                         createEffectParamCombination(DOWNMIX_PARAM_TYPE, DOWNMIX_TYPE_FOLD,
-                                                     sizeof(int32_t) /* returnValueSize */)),
+                                                     sizeof(int16_t) /* returnValueSize */)),
         std::make_tuple(SL_IID_DYNAMICSPROCESSING,
                         createEffectParamCombination(
                                 std::array<uint32_t, 2>({DP_PARAM_INPUT_GAIN, 0 /* channel */}),
@@ -264,7 +268,8 @@
         if (mCombination->valueSize) {
             std::vector<uint8_t> response(mCombination->valueSize);
             EXPECT_EQ(OK, parameterGet.readFromValue(response.data(), mCombination->valueSize))
-                << parameterGet.toString();
+                    << " try get valueSize " << mCombination->valueSize << " from "
+                    << parameterGet.toString();
             EXPECT_EQ(response, mExpectedValue);
         }
     }
diff --git a/media/libeffects/downmix/aidl/DownmixContext.cpp b/media/libeffects/downmix/aidl/DownmixContext.cpp
index 43bfeed..ac893d8 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.cpp
+++ b/media/libeffects/downmix/aidl/DownmixContext.cpp
@@ -21,8 +21,8 @@
 #include "DownmixContext.h"
 
 using aidl::android::hardware::audio::effect::IEffect;
-using ::aidl::android::media::audio::common::AudioChannelLayout;
-using ::android::hardware::audio::common::getChannelCount;
+using aidl::android::hardware::audio::common::getChannelCount;
+using aidl::android::media::audio::common::AudioChannelLayout;
 
 namespace aidl::android::hardware::audio::effect {
 
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.cpp b/media/libeffects/downmix/aidl/EffectDownmix.cpp
index 17d0736..7068c5c 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.cpp
+++ b/media/libeffects/downmix/aidl/EffectDownmix.cpp
@@ -17,19 +17,20 @@
 #define LOG_TAG "AHAL_DownmixImpl"
 
 #include <android-base/logging.h>
+#include <system/audio_effects/effect_uuid.h>
 
 #include "EffectDownmix.h"
 
 using aidl::android::hardware::audio::effect::Descriptor;
 using aidl::android::hardware::audio::effect::DownmixImpl;
+using aidl::android::hardware::audio::effect::getEffectImplUuidDownmix;
+using aidl::android::hardware::audio::effect::getEffectTypeUuidDownmix;
 using aidl::android::hardware::audio::effect::IEffect;
-using aidl::android::hardware::audio::effect::kDownmixImplUUID;
-using aidl::android::hardware::audio::effect::kDownmixTypeUUID;
 using aidl::android::media::audio::common::AudioUuid;
 
 extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
                                            std::shared_ptr<IEffect>* instanceSpp) {
-    if (!in_impl_uuid || *in_impl_uuid != kDownmixImplUUID) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDownmix()) {
         LOG(ERROR) << __func__ << "uuid not supported";
         return EX_ILLEGAL_ARGUMENT;
     }
@@ -44,7 +45,7 @@
 }
 
 extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
-    if (!in_impl_uuid || *in_impl_uuid != kDownmixImplUUID) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDownmix()) {
         LOG(ERROR) << __func__ << "uuid not supported";
         return EX_ILLEGAL_ARGUMENT;
     }
@@ -56,11 +57,12 @@
 
 const std::string DownmixImpl::kEffectName = "Multichannel Downmix To Stereo";
 const Descriptor DownmixImpl::kDescriptor = {
-        .common = {
-                .id = {.type = kDownmixTypeUUID, .uuid = kDownmixImplUUID, .proxy = std::nullopt},
-                .flags = {.type = Flags::Type::INSERT, .insert = Flags::Insert::FIRST},
-                .name = DownmixImpl::kEffectName,
-                .implementor = "The Android Open Source Project"}};
+        .common = {.id = {.type = getEffectTypeUuidDownmix(),
+                          .uuid = getEffectImplUuidDownmix(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::INSERT, .insert = Flags::Insert::FIRST},
+                   .name = DownmixImpl::kEffectName,
+                   .implementor = "The Android Open Source Project"}};
 
 ndk::ScopedAStatus DownmixImpl::getDescriptor(Descriptor* _aidl_return) {
     RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.h b/media/libeffects/downmix/aidl/EffectDownmix.h
index d590133..812d26b 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.h
+++ b/media/libeffects/downmix/aidl/EffectDownmix.h
@@ -21,7 +21,6 @@
 
 #include "DownmixContext.h"
 #include "effect-impl/EffectImpl.h"
-#include "effect-impl/EffectUUID.h"
 
 namespace aidl::android::hardware::audio::effect {
 
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
index 4af5fd8..e508d48 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
@@ -17,6 +17,7 @@
 #define LOG_TAG "AHAL_DynamicsProcessingLibEffects"
 
 #include <android-base/logging.h>
+#include <system/audio_effects/effect_uuid.h>
 
 #include "DynamicsProcessing.h"
 
@@ -25,15 +26,16 @@
 
 using aidl::android::hardware::audio::effect::Descriptor;
 using aidl::android::hardware::audio::effect::DynamicsProcessingImpl;
+using aidl::android::hardware::audio::effect::getEffectImplUuidDynamicsProcessing;
+using aidl::android::hardware::audio::effect::getEffectTypeUuidDynamicsProcessing;
 using aidl::android::hardware::audio::effect::IEffect;
-using aidl::android::hardware::audio::effect::kDynamicsProcessingImplUUID;
 using aidl::android::hardware::audio::effect::State;
 using aidl::android::media::audio::common::AudioUuid;
 using aidl::android::media::audio::common::PcmType;
 
 extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
                                            std::shared_ptr<IEffect>* instanceSpp) {
-    if (!in_impl_uuid || *in_impl_uuid != kDynamicsProcessingImplUUID) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDynamicsProcessing()) {
         LOG(ERROR) << __func__ << "uuid not supported";
         return EX_ILLEGAL_ARGUMENT;
     }
@@ -48,7 +50,7 @@
 }
 
 extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
-    if (!in_impl_uuid || *in_impl_uuid != kDynamicsProcessingImplUUID) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDynamicsProcessing()) {
         LOG(ERROR) << __func__ << "uuid not supported";
         return EX_ILLEGAL_ARGUMENT;
     }
@@ -88,8 +90,8 @@
 const Capability DynamicsProcessingImpl::kCapability = {.range = {DynamicsProcessingImpl::kRange}};
 
 const Descriptor DynamicsProcessingImpl::kDescriptor = {
-        .common = {.id = {.type = kDynamicsProcessingTypeUUID,
-                          .uuid = kDynamicsProcessingImplUUID,
+        .common = {.id = {.type = getEffectTypeUuidDynamicsProcessing(),
+                          .uuid = getEffectImplUuidDynamicsProcessing(),
                           .proxy = std::nullopt},
                    .flags = {.type = Flags::Type::INSERT,
                              .insert = Flags::Insert::LAST,
@@ -221,7 +223,7 @@
                       EX_ILLEGAL_ARGUMENT, "setInputGainFailed");
             return ndk::ScopedAStatus::ok();
         }
-        case DynamicsProcessing::vendorExtension: {
+        case DynamicsProcessing::vendor: {
             LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
             return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
                     EX_ILLEGAL_ARGUMENT, "DPVendorExtensionTagNotSupported");
@@ -301,7 +303,7 @@
                             mContext->getInputGain()));
             return ndk::ScopedAStatus::ok();
         }
-        case DynamicsProcessing::vendorExtension: {
+        case DynamicsProcessing::vendor: {
             LOG(ERROR) << __func__ << " wrong vendor tag in CommonTag: " << toString(tag);
             return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
                     EX_ILLEGAL_ARGUMENT, "DPVendorExtensionTagInWrongId");
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
index 26b6ead..d094c69 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
@@ -19,7 +19,6 @@
 #include <aidl/android/hardware/audio/effect/BnEffect.h>
 
 #include "effect-impl/EffectImpl.h"
-#include "effect-impl/EffectUUID.h"
 #include "DynamicsProcessingContext.h"
 
 namespace aidl::android::hardware::audio::effect {
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index 7978cc5..69ff522 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -64,6 +64,7 @@
 RetCode DynamicsProcessingContext::setCommon(const Parameter::Common& common) {
     mCommon = common;
     init();
+    LOG(INFO) << __func__ << common.toString();
     return RetCode::SUCCESS;
 }
 
@@ -287,8 +288,8 @@
 void DynamicsProcessingContext::init() {
     std::lock_guard lg(mMutex);
     mState = DYNAMICS_PROCESSING_STATE_INITIALIZED;
-    mChannelCount =
-            ::android::hardware::audio::common::getChannelCount(mCommon.input.base.channelMask);
+    mChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
+            mCommon.input.base.channelMask);
 }
 
 dp_fx::DPChannel* DynamicsProcessingContext::getChannel_l(int channel) {
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
index 7e22482..031477f 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
@@ -16,20 +16,22 @@
 
 #define LOG_TAG "AHAL_HapticGeneratorImpl"
 
-#include "EffectHapticGenerator.h"
-
 #include <android-base/logging.h>
 #include <audio_effects/effect_hapticgenerator.h>
+#include <system/audio_effects/effect_uuid.h>
+
+#include "EffectHapticGenerator.h"
 
 using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::getEffectImplUuidHapticGenerator;
+using aidl::android::hardware::audio::effect::getEffectTypeUuidHapticGenerator;
 using aidl::android::hardware::audio::effect::HapticGeneratorImpl;
 using aidl::android::hardware::audio::effect::IEffect;
-using aidl::android::hardware::audio::effect::kHapticGeneratorImplUUID;
 using aidl::android::media::audio::common::AudioUuid;
 
 extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
                                            std::shared_ptr<IEffect>* instanceSpp) {
-    if (!in_impl_uuid || *in_impl_uuid != kHapticGeneratorImplUUID) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidHapticGenerator()) {
         LOG(ERROR) << __func__ << "uuid not supported";
         return EX_ILLEGAL_ARGUMENT;
     }
@@ -44,7 +46,7 @@
 }
 
 extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
-    if (!in_impl_uuid || *in_impl_uuid != kHapticGeneratorImplUUID) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidHapticGenerator()) {
         LOG(ERROR) << __func__ << "uuid not supported";
         return EX_ILLEGAL_ARGUMENT;
     }
@@ -56,8 +58,8 @@
 
 const std::string HapticGeneratorImpl::kEffectName = "Haptic Generator";
 const Descriptor HapticGeneratorImpl::kDescriptor = {
-        .common = {.id = {.type = kHapticGeneratorTypeUUID,
-                          .uuid = kHapticGeneratorImplUUID,
+        .common = {.id = {.type = getEffectTypeUuidHapticGenerator(),
+                          .uuid = getEffectImplUuidHapticGenerator(),
                           .proxy = std::nullopt},
                    .flags = {.type = Flags::Type::INSERT, .insert = Flags::Insert::FIRST},
                    .name = HapticGeneratorImpl::kEffectName,
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
index 02ca392..fe9616a 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
@@ -20,7 +20,6 @@
 
 #include "HapticGeneratorContext.h"
 #include "effect-impl/EffectImpl.h"
-#include "effect-impl/EffectUUID.h"
 
 namespace aidl::android::hardware::audio::effect {
 
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
index 8ed579b..de44e05 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
@@ -17,6 +17,7 @@
 #define LOG_TAG "AHAL_HapticGeneratorContext"
 
 #include <Utils.h>
+#include <android-base/logging.h>
 #include <android-base/parsedouble.h>
 #include <android-base/properties.h>
 
@@ -193,9 +194,9 @@
     mParams.mVibratorInfo.resonantFrequencyHz = DEFAULT_RESONANT_FREQUENCY;
     mParams.mVibratorInfo.qFactor = DEFAULT_BSF_ZERO_Q;
 
-    mParams.mAudioChannelCount = ::android::hardware::audio::common::getChannelCount(
+    mParams.mAudioChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
             inputChMask, ~media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
-    mParams.mHapticChannelCount = ::android::hardware::audio::common::getChannelCount(
+    mParams.mHapticChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
             outputChMask, media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
     LOG_ALWAYS_FATAL_IF(mParams.mHapticChannelCount > 2, "haptic channel count is too large");
     for (size_t i = 0; i < mParams.mHapticChannelCount; ++i) {
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
index 9d8bc80..a7d9282 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
@@ -17,19 +17,21 @@
 #define LOG_TAG "AHAL_LoudnessEnhancerImpl"
 
 #include <android-base/logging.h>
+#include <system/audio_effects/effect_uuid.h>
 
 #include "EffectLoudnessEnhancer.h"
 
 using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::getEffectImplUuidLoudnessEnhancer;
+using aidl::android::hardware::audio::effect::getEffectTypeUuidLoudnessEnhancer;
 using aidl::android::hardware::audio::effect::IEffect;
-using aidl::android::hardware::audio::effect::kLoudnessEnhancerImplUUID;
 using aidl::android::hardware::audio::effect::LoudnessEnhancerImpl;
 using aidl::android::hardware::audio::effect::State;
 using aidl::android::media::audio::common::AudioUuid;
 
 extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
                                            std::shared_ptr<IEffect>* instanceSpp) {
-    if (!in_impl_uuid || *in_impl_uuid != kLoudnessEnhancerImplUUID) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidLoudnessEnhancer()) {
         LOG(ERROR) << __func__ << "uuid not supported";
         return EX_ILLEGAL_ARGUMENT;
     }
@@ -44,7 +46,7 @@
 }
 
 extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
-    if (!in_impl_uuid || *in_impl_uuid != kLoudnessEnhancerImplUUID) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidLoudnessEnhancer()) {
         LOG(ERROR) << __func__ << "uuid not supported";
         return EX_ILLEGAL_ARGUMENT;
     }
@@ -56,8 +58,8 @@
 
 const std::string LoudnessEnhancerImpl::kEffectName = "Loudness Enhancer";
 const Descriptor LoudnessEnhancerImpl::kDescriptor = {
-        .common = {.id = {.type = kLoudnessEnhancerTypeUUID,
-                          .uuid = kLoudnessEnhancerImplUUID,
+        .common = {.id = {.type = getEffectTypeUuidLoudnessEnhancer(),
+                          .uuid = getEffectImplUuidLoudnessEnhancer(),
                           .proxy = std::nullopt},
                    .flags = {.type = Flags::Type::INSERT, .insert = Flags::Insert::FIRST},
                    .name = LoudnessEnhancerImpl::kEffectName,
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
index 6402fd2..5b9e924 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
@@ -19,7 +19,6 @@
 #include <aidl/android/hardware/audio/effect/BnEffect.h>
 
 #include "effect-impl/EffectImpl.h"
-#include "effect-impl/EffectUUID.h"
 #include "LoudnessEnhancerContext.h"
 
 namespace aidl::android::hardware::audio::effect {
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
index 033b222..bc3fa45 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
@@ -14,6 +14,10 @@
  * limitations under the License.
  */
 
+#define LOG_TAG "LoudnessEnhancerContext"
+
+#include <Utils.h>
+
 #include "LoudnessEnhancerContext.h"
 
 namespace aidl::android::hardware::audio::effect {
@@ -21,17 +25,15 @@
 LoudnessEnhancerContext::LoudnessEnhancerContext(int statusDepth, const Parameter::Common& common)
     : EffectContext(statusDepth, common) {
     LOG(DEBUG) << __func__;
-    mState = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
-    mSampleRate = common.input.base.sampleRate;
     init_params();
 }
 
 LoudnessEnhancerContext::~LoudnessEnhancerContext() {
     LOG(DEBUG) << __func__;
-    mState = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
 }
 
 RetCode LoudnessEnhancerContext::enable() {
+    std::lock_guard lg(mMutex);
     if (mState != LOUDNESS_ENHANCER_STATE_INITIALIZED) {
         return RetCode::ERROR_EFFECT_LIB_ERROR;
     }
@@ -40,6 +42,7 @@
 }
 
 RetCode LoudnessEnhancerContext::disable() {
+    std::lock_guard lg(mMutex);
     if (mState != LOUDNESS_ENHANCER_STATE_ACTIVE) {
         return RetCode::ERROR_EFFECT_LIB_ERROR;
     }
@@ -49,12 +52,10 @@
 
 void LoudnessEnhancerContext::reset() {
     float targetAmp = pow(10, mGain / 2000.0f);  // mB to linear amplification
-    {
-        std::lock_guard lg(mMutex);
-        if (mCompressor != nullptr) {
-            // Get samplingRate from input
-            mCompressor->Initialize(targetAmp, mSampleRate);
-        }
+    std::lock_guard lg(mMutex);
+    if (mCompressor != nullptr) {
+        // Get samplingRate from input
+        mCompressor->Initialize(targetAmp, mCommon.input.base.sampleRate);
     }
 }
 
@@ -75,39 +76,41 @@
     auto frameSize = getInputFrameSize();
     RETURN_VALUE_IF(0 == frameSize, status, "zeroFrameSize");
 
+    std::lock_guard lg(mMutex);
+    status = {STATUS_INVALID_OPERATION, 0, 0};
+    RETURN_VALUE_IF(mState != LOUDNESS_ENHANCER_STATE_ACTIVE, status, "stateNotActive");
+
     LOG(DEBUG) << __func__ << " start processing";
-    {
-        std::lock_guard lg(mMutex);
-        // PcmType is always expected to be Float 32 bit.
-        constexpr float scale = 1 << 15;  // power of 2 is lossless conversion to int16_t range
-        constexpr float inverseScale = 1.f / scale;
-        const float inputAmp = pow(10, mGain / 2000.0f) * scale;
-        float leftSample, rightSample;
-        if (mCompressor != nullptr) {
-            for (int inIdx = 0; inIdx < samples; inIdx += 2) {
-                // makeup gain is applied on the input of the compressor
-                leftSample = inputAmp * in[inIdx];
-                rightSample = inputAmp * in[inIdx + 1];
-                mCompressor->Compress(&leftSample, &rightSample);
-                in[inIdx] = leftSample * inverseScale;
-                in[inIdx + 1] = rightSample * inverseScale;
-            }
-        } else {
-            for (int inIdx = 0; inIdx < samples; inIdx += 2) {
-                leftSample = inputAmp * in[inIdx];
-                rightSample = inputAmp * in[inIdx + 1];
-                in[inIdx] = leftSample * inverseScale;
-                in[inIdx + 1] = rightSample * inverseScale;
-            }
+    // PcmType is always expected to be Float 32 bit.
+    constexpr float scale = 1 << 15;  // power of 2 is lossless conversion to int16_t range
+    constexpr float inverseScale = 1.f / scale;
+    const float inputAmp = pow(10, mGain / 2000.0f) * scale;
+    float leftSample, rightSample;
+
+    if (mCompressor != nullptr) {
+        for (int inIdx = 0; inIdx < samples; inIdx += 2) {
+            // makeup gain is applied on the input of the compressor
+            leftSample = inputAmp * in[inIdx];
+            rightSample = inputAmp * in[inIdx + 1];
+            mCompressor->Compress(&leftSample, &rightSample);
+            in[inIdx] = leftSample * inverseScale;
+            in[inIdx + 1] = rightSample * inverseScale;
         }
-        bool accumulate = false;
-        if (in != out) {
-            for (int i = 0; i < samples; i++) {
-                if (accumulate) {
-                    out[i] += in[i];
-                } else {
-                    out[i] = in[i];
-                }
+    } else {
+        for (int inIdx = 0; inIdx < samples; inIdx += 2) {
+            leftSample = inputAmp * in[inIdx];
+            rightSample = inputAmp * in[inIdx + 1];
+            in[inIdx] = leftSample * inverseScale;
+            in[inIdx + 1] = rightSample * inverseScale;
+        }
+    }
+    bool accumulate = false;
+    if (in != out) {
+        for (int i = 0; i < samples; i++) {
+            if (accumulate) {
+                out[i] += in[i];
+            } else {
+                out[i] = in[i];
             }
         }
     }
@@ -115,15 +118,17 @@
 }
 
 void LoudnessEnhancerContext::init_params() {
+    int channelCount = ::aidl::android::hardware::audio::common::getChannelCount(
+            mCommon.input.base.channelMask);
+    LOG_ALWAYS_FATAL_IF(channelCount != 2, "channel count %d not supported", channelCount);
+
     mGain = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB;
     float targetAmp = pow(10, mGain / 2000.0f);  // mB to linear amplification
     LOG(DEBUG) << __func__ << "Target gain = " << mGain << "mB <=> factor = " << targetAmp;
 
-    {
-        std::lock_guard lg(mMutex);
-        mCompressor = std::make_unique<le_fx::AdaptiveDynamicRangeCompression>();
-        mCompressor->Initialize(targetAmp, mSampleRate);
-    }
+    std::lock_guard lg(mMutex);
+    mCompressor = std::make_unique<le_fx::AdaptiveDynamicRangeCompression>();
+    mCompressor->Initialize(targetAmp, mCommon.input.base.sampleRate);
     mState = LOUDNESS_ENHANCER_STATE_INITIALIZED;
 }
 
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
index b478b27..9a1ec4c 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
@@ -46,9 +46,8 @@
 
   private:
     std::mutex mMutex;
-    LoudnessEnhancerState mState;
-    int mSampleRate;
-    int mGain;
+    LoudnessEnhancerState mState GUARDED_BY(mMutex) = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
+    int mGain = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB;
     // In this implementation, there is no coupling between the compression on the left and right
     // channels
     std::unique_ptr<le_fx::AdaptiveDynamicRangeCompression> mCompressor GUARDED_BY(mMutex);
diff --git a/media/libeffects/lvm/tests/EffectReverbTest.cpp b/media/libeffects/lvm/tests/EffectReverbTest.cpp
index 59453eb..aaac782 100644
--- a/media/libeffects/lvm/tests/EffectReverbTest.cpp
+++ b/media/libeffects/lvm/tests/EffectReverbTest.cpp
@@ -33,6 +33,27 @@
 
 constexpr size_t kNumEffectUuids = std::size(kEffectUuids);
 
+static constexpr audio_channel_mask_t kChMasks[] = {
+        AUDIO_CHANNEL_OUT_MONO,          AUDIO_CHANNEL_OUT_STEREO,
+        AUDIO_CHANNEL_OUT_2POINT1,       AUDIO_CHANNEL_OUT_5POINT1,
+        AUDIO_CHANNEL_OUT_7POINT1POINT4, AUDIO_CHANNEL_INDEX_MASK_23,
+        AUDIO_CHANNEL_OUT_22POINT2,
+};
+
+static constexpr size_t kNumChMasks = std::size(kChMasks);
+
+static constexpr size_t kSampleRates[] = {8000, 11025, 44100, 48000, 192000};
+
+static constexpr size_t kNumSampleRates = std::size(kSampleRates);
+
+static constexpr size_t kFrameCounts[] = {4, 512};
+
+static constexpr size_t kNumFrameCounts = std::size(kFrameCounts);
+
+static constexpr size_t kLoopCounts[] = {1, 4};
+
+static constexpr size_t kNumLoopCounts = std::size(kLoopCounts);
+
 static bool isAuxMode(const effect_uuid_t* uuid) {
     // Update this, if the order of effects in kEffectUuids is updated
     return (uuid == &kEffectUuids[2] || uuid == &kEffectUuids[3]);
@@ -50,15 +71,15 @@
 class SingleEffectTest : public ::testing::TestWithParam<SingleEffectTestParam> {
   public:
     SingleEffectTest()
-        : mSampleRate(EffectTestHelper::kSampleRates[std::get<1>(GetParam())]),
-          mFrameCount(EffectTestHelper::kFrameCounts[std::get<2>(GetParam())]),
-          mLoopCount(EffectTestHelper::kLoopCounts[std::get<3>(GetParam())]),
+        : mSampleRate(kSampleRates[std::get<1>(GetParam())]),
+          mFrameCount(kFrameCounts[std::get<2>(GetParam())]),
+          mLoopCount(kLoopCounts[std::get<3>(GetParam())]),
           mTotalFrameCount(mFrameCount * mLoopCount),
           mUuid(&kEffectUuids[std::get<4>(GetParam())]),
           mInChMask(isAuxMode(mUuid) ? AUDIO_CHANNEL_OUT_MONO
-                                     : EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
+                                     : kChMasks[std::get<0>(GetParam())]),
           mInChannelCount(audio_channel_count_from_out_mask(mInChMask)),
-          mOutChMask(EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
+          mOutChMask(kChMasks[std::get<0>(GetParam())]),
           mOutChannelCount(audio_channel_count_from_out_mask(mOutChMask)),
           mPreset(kPresets[std::get<5>(GetParam())]) {}
 
@@ -100,10 +121,10 @@
 
 INSTANTIATE_TEST_SUITE_P(
         EffectReverbTestAll, SingleEffectTest,
-        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumChMasks),
-                           ::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
-                           ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
-                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+        ::testing::Combine(::testing::Range(0, (int)kNumChMasks),
+                           ::testing::Range(0, (int)kNumSampleRates),
+                           ::testing::Range(0, (int)kNumFrameCounts),
+                           ::testing::Range(0, (int)kNumLoopCounts),
                            ::testing::Range(0, (int)kNumEffectUuids),
                            ::testing::Range(0, (int)kNumPresets)));
 
@@ -112,9 +133,9 @@
     : public ::testing::TestWithParam<SingleEffectComparisonTestParam> {
   public:
     SingleEffectComparisonTest()
-        : mSampleRate(EffectTestHelper::kSampleRates[std::get<0>(GetParam())]),
-          mFrameCount(EffectTestHelper::kFrameCounts[std::get<1>(GetParam())]),
-          mLoopCount(EffectTestHelper::kLoopCounts[std::get<2>(GetParam())]),
+        : mSampleRate(kSampleRates[std::get<0>(GetParam())]),
+          mFrameCount(kFrameCounts[std::get<1>(GetParam())]),
+          mLoopCount(kLoopCounts[std::get<2>(GetParam())]),
           mTotalFrameCount(mFrameCount * mLoopCount),
           mUuid(&kEffectUuids[std::get<3>(GetParam())]),
           mPreset(kPresets[std::get<4>(GetParam())]) {}
@@ -173,7 +194,7 @@
     std::vector<int16_t> monoRefI16(mTotalFrameCount);
     memcpy_to_i16_from_float(monoRefI16.data(), monoOutput.data(), mTotalFrameCount);
 
-    for (size_t outChMask : EffectTestHelper::kChMasks) {
+    for (size_t outChMask : kChMasks) {
         size_t outChannelCount = audio_channel_count_from_out_mask(outChMask);
         size_t inChMask = isAuxMode(mUuid) ? AUDIO_CHANNEL_OUT_MONO : outChMask;
 
@@ -225,9 +246,9 @@
 
 INSTANTIATE_TEST_SUITE_P(
         EffectReverbTestAll, SingleEffectComparisonTest,
-        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
-                           ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
-                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+        ::testing::Combine(::testing::Range(0, (int)kNumSampleRates),
+                           ::testing::Range(0, (int)kNumFrameCounts),
+                           ::testing::Range(0, (int)kNumLoopCounts),
                            ::testing::Range(0, (int)kNumEffectUuids),
                            ::testing::Range(0, (int)kNumPresets)));
 
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index 6124356..d026e2b 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -15,7 +15,9 @@
  */
 
 #include <cstddef>
+
 #define LOG_TAG "BundleContext"
+#include <android-base/logging.h>
 #include <Utils.h>
 
 #include "BundleContext.h"
@@ -690,7 +692,7 @@
 std::vector<Virtualizer::ChannelAngle> BundleContext::getSpeakerAngles(
         const Virtualizer::SpeakerAnglesPayload payload) {
     std::vector<Virtualizer::ChannelAngle> angles;
-    auto chCount = ::android::hardware::audio::common::getChannelCount(payload.layout);
+    auto chCount = ::aidl::android::hardware::audio::common::getChannelCount(payload.layout);
     RETURN_VALUE_IF(!isConfigSupportedVirtualizer(chCount, payload.device), angles,
                     "payloadNotSupported");
 
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
index 4652d8d..b3371a3 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
@@ -18,7 +18,8 @@
 #include <array>
 
 #include <aidl/android/hardware/audio/effect/BnEffect.h>
-#include "effect-impl/EffectUUID.h"
+#include <system/audio_effects/effect_uuid.h>
+
 #include "effect-impl/EffectTypes.h"
 #include "LVM.h"
 
@@ -70,12 +71,11 @@
 
 const std::vector<Range::EqualizerRange> kEqRanges = {
         MAKE_RANGE(Equalizer, preset, 0, MAX_NUM_PRESETS - 1),
-        MAKE_RANGE(
-                Equalizer, bandLevels,
-                std::vector<Equalizer::BandLevel>{Equalizer::BandLevel(
-                        {.index = 0, .levelMb = std::numeric_limits<int>::min()})},
-                std::vector<Equalizer::BandLevel>{Equalizer::BandLevel(
-                        {.index = MAX_NUM_BANDS - 1, .levelMb = std::numeric_limits<int>::max()})}),
+        MAKE_RANGE(Equalizer, bandLevels,
+                   std::vector<Equalizer::BandLevel>{
+                           Equalizer::BandLevel({.index = 0, .levelMb = -15})},
+                   std::vector<Equalizer::BandLevel>{
+                           Equalizer::BandLevel({.index = MAX_NUM_BANDS - 1, .levelMb = 15})}),
         /* capability definition */
         MAKE_RANGE(Equalizer, bandFrequencies, kEqBandFrequency, kEqBandFrequency),
         MAKE_RANGE(Equalizer, presets, kEqPresets, kEqPresets),
@@ -83,33 +83,36 @@
         MAKE_RANGE(Equalizer, centerFreqMh, std::vector<int>({1}), std::vector<int>({}))};
 static const Capability kEqCap = {.range = kEqRanges};
 static const std::string kEqualizerEffectName = "EqualizerBundle";
-static const Descriptor kEqualizerDesc = {.common = {.id = {.type = kEqualizerTypeUUID,
-                                                            .uuid = kEqualizerBundleImplUUID,
-                                                            .proxy = kEqualizerProxyUUID},
-                                                     .flags = {.type = Flags::Type::INSERT,
-                                                               .insert = Flags::Insert::FIRST,
-                                                               .volume = Flags::Volume::CTRL},
-                                                     .name = kEqualizerEffectName,
-                                                     .implementor = "NXP Software Ltd."},
-                                          .capability = kEqCap};
+static const Descriptor kEqualizerDesc = {
+        .common = {.id = {.type = getEffectTypeUuidEqualizer(),
+                          .uuid = getEffectImplUuidEqualizerBundle(),
+                          .proxy = getEffectImplUuidEqualizerProxy()},
+
+                   .flags = {.type = Flags::Type::INSERT,
+                             .insert = Flags::Insert::FIRST,
+                             .volume = Flags::Volume::CTRL},
+                   .name = kEqualizerEffectName,
+                   .implementor = "NXP Software Ltd."},
+        .capability = kEqCap};
 
 static const int mMaxStrengthSupported = 1000;
 static const std::vector<Range::BassBoostRange> kBassBoostRanges = {
         MAKE_RANGE(BassBoost, strengthPm, 0, mMaxStrengthSupported)};
 static const Capability kBassBoostCap = {.range = kBassBoostRanges};
 static const std::string kBassBoostEffectName = "Dynamic Bass Boost";
-static const Descriptor kBassBoostDesc = {.common = {.id = {.type = kBassBoostTypeUUID,
-                                                            .uuid = kBassBoostBundleImplUUID,
-                                                            .proxy = kBassBoostProxyUUID},
-                                                     .flags = {.type = Flags::Type::INSERT,
-                                                               .insert = Flags::Insert::FIRST,
-                                                               .volume = Flags::Volume::CTRL,
-                                                               .deviceIndication = true},
-                                                     .cpuLoad = BASS_BOOST_CUP_LOAD_ARM9E,
-                                                     .memoryUsage = BUNDLE_MEM_USAGE,
-                                                     .name = kBassBoostEffectName,
-                                                     .implementor = "NXP Software Ltd."},
-                                          .capability = kBassBoostCap};
+static const Descriptor kBassBoostDesc = {
+        .common = {.id = {.type = getEffectTypeUuidBassBoost(),
+                          .uuid = getEffectImplUuidBassBoostBundle(),
+                          .proxy = getEffectImplUuidBassBoostProxy()},
+                   .flags = {.type = Flags::Type::INSERT,
+                             .insert = Flags::Insert::FIRST,
+                             .volume = Flags::Volume::CTRL,
+                             .deviceIndication = true},
+                   .cpuLoad = BASS_BOOST_CUP_LOAD_ARM9E,
+                   .memoryUsage = BUNDLE_MEM_USAGE,
+                   .name = kBassBoostEffectName,
+                   .implementor = "NXP Software Ltd."},
+        .capability = kBassBoostCap};
 
 static const std::vector<Range::VirtualizerRange> kVirtualizerRanges = {
         MAKE_RANGE(Virtualizer, strengthPm, 0, mMaxStrengthSupported)};
@@ -117,9 +120,9 @@
 static const std::string kVirtualizerEffectName = "Virtualizer";
 
 static const Descriptor kVirtualizerDesc = {
-        .common = {.id = {.type = kVirtualizerTypeUUID,
-                          .uuid = kVirtualizerBundleImplUUID,
-                          .proxy = kVirtualizerProxyUUID},
+        .common = {.id = {.type = getEffectTypeUuidVirtualizer(),
+                          .uuid = getEffectImplUuidVirtualizerBundle(),
+                          .proxy = getEffectImplUuidVirtualizerProxy()},
                    .flags = {.type = Flags::Type::INSERT,
                              .insert = Flags::Insert::LAST,
                              .volume = Flags::Volume::CTRL,
@@ -134,17 +137,18 @@
         MAKE_RANGE(Volume, levelDb, -9600, 0)};
 static const Capability kVolumeCap = {.range = kVolumeRanges};
 static const std::string kVolumeEffectName = "Volume";
-static const Descriptor kVolumeDesc = {.common = {.id = {.type = kVolumeTypeUUID,
-                                                         .uuid = kVolumeBundleImplUUID,
-                                                         .proxy = std::nullopt},
-                                                  .flags = {.type = Flags::Type::INSERT,
-                                                            .insert = Flags::Insert::LAST,
-                                                            .volume = Flags::Volume::CTRL},
-                                                  .cpuLoad = VOLUME_CUP_LOAD_ARM9E,
-                                                  .memoryUsage = BUNDLE_MEM_USAGE,
-                                                  .name = kVolumeEffectName,
-                                                  .implementor = "NXP Software Ltd."},
-                                       .capability = kVolumeCap};
+static const Descriptor kVolumeDesc = {
+        .common = {.id = {.type = getEffectTypeUuidVolume(),
+                          .uuid = getEffectImplUuidVolumeBundle(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::INSERT,
+                             .insert = Flags::Insert::LAST,
+                             .volume = Flags::Volume::CTRL},
+                   .cpuLoad = VOLUME_CUP_LOAD_ARM9E,
+                   .memoryUsage = BUNDLE_MEM_USAGE,
+                   .name = kVolumeEffectName,
+                   .implementor = "NXP Software Ltd."},
+        .capability = kVolumeCap};
 
 /* The following tables have been computed using the actual levels measured by the output of
  * white noise or pink noise (IEC268-1) for the EQ and BassBoost Effects. These are estimates of
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
index dc52c16..cd9fb60 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
@@ -30,19 +30,21 @@
 #include <LVM.h>
 #include <limits.h>
 
+using aidl::android::hardware::audio::effect::getEffectImplUuidBassBoostBundle;
 using aidl::android::hardware::audio::effect::Descriptor;
 using aidl::android::hardware::audio::effect::EffectBundleAidl;
+using aidl::android::hardware::audio::effect::getEffectImplUuidEqualizerBundle;
 using aidl::android::hardware::audio::effect::IEffect;
-using aidl::android::hardware::audio::effect::kBassBoostBundleImplUUID;
-using aidl::android::hardware::audio::effect::kEqualizerBundleImplUUID;
-using aidl::android::hardware::audio::effect::kVirtualizerBundleImplUUID;
-using aidl::android::hardware::audio::effect::kVolumeBundleImplUUID;
 using aidl::android::hardware::audio::effect::State;
+using aidl::android::hardware::audio::effect::getEffectImplUuidVirtualizerBundle;
+using aidl::android::hardware::audio::effect::getEffectImplUuidVolumeBundle;
 using aidl::android::media::audio::common::AudioUuid;
 
 bool isUuidSupported(const AudioUuid* uuid) {
-    return (*uuid == kEqualizerBundleImplUUID || *uuid == kBassBoostBundleImplUUID ||
-            *uuid == kVirtualizerBundleImplUUID || *uuid == kVolumeBundleImplUUID);
+    return (*uuid == getEffectImplUuidBassBoostBundle() ||
+            *uuid == getEffectImplUuidEqualizerBundle() ||
+            *uuid == getEffectImplUuidVirtualizerBundle() ||
+            *uuid == getEffectImplUuidVolumeBundle());
 }
 
 extern "C" binder_exception_t createEffect(const AudioUuid* uuid,
@@ -66,13 +68,13 @@
         LOG(ERROR) << __func__ << "uuid not supported";
         return EX_ILLEGAL_ARGUMENT;
     }
-    if (*in_impl_uuid == kEqualizerBundleImplUUID) {
+    if (*in_impl_uuid == getEffectImplUuidEqualizerBundle()) {
         *_aidl_return = aidl::android::hardware::audio::effect::lvm::kEqualizerDesc;
-    } else if (*in_impl_uuid == kBassBoostBundleImplUUID) {
+    } else if (*in_impl_uuid == getEffectImplUuidBassBoostBundle()) {
         *_aidl_return = aidl::android::hardware::audio::effect::lvm:: kBassBoostDesc;
-    } else if (*in_impl_uuid == kVirtualizerBundleImplUUID) {
+    } else if (*in_impl_uuid == getEffectImplUuidVirtualizerBundle()) {
         *_aidl_return = aidl::android::hardware::audio::effect::lvm::kVirtualizerDesc;
-    } else if (*in_impl_uuid == kVolumeBundleImplUUID) {
+    } else if (*in_impl_uuid == getEffectImplUuidVolumeBundle()) {
         *_aidl_return = aidl::android::hardware::audio::effect::lvm::kVolumeDesc;
     }
     return EX_NONE;
@@ -82,19 +84,19 @@
 
 EffectBundleAidl::EffectBundleAidl(const AudioUuid& uuid) {
     LOG(DEBUG) << __func__ << uuid.toString();
-    if (uuid == kEqualizerBundleImplUUID) {
+    if (uuid == getEffectImplUuidEqualizerBundle()) {
         mType = lvm::BundleEffectType::EQUALIZER;
         mDescriptor = &lvm::kEqualizerDesc;
         mEffectName = &lvm::kEqualizerEffectName;
-    } else if (uuid == kBassBoostBundleImplUUID) {
+    } else if (uuid == getEffectImplUuidBassBoostBundle()) {
         mType = lvm::BundleEffectType::BASS_BOOST;
         mDescriptor = &lvm::kBassBoostDesc;
         mEffectName = &lvm::kBassBoostEffectName;
-    } else if (uuid == kVirtualizerBundleImplUUID) {
+    } else if (uuid == getEffectImplUuidVirtualizerBundle()) {
         mType = lvm::BundleEffectType::VIRTUALIZER;
         mDescriptor = &lvm::kVirtualizerDesc;
         mEffectName = &lvm::kVirtualizerEffectName;
-    } else if (uuid == kVolumeBundleImplUUID) {
+    } else if (uuid == getEffectImplUuidVolumeBundle()) {
         mType = lvm::BundleEffectType::VOLUME;
         mDescriptor = &lvm::kVolumeDesc;
         mEffectName = &lvm::kVolumeEffectName;
@@ -296,11 +298,19 @@
             eqParam.set<Equalizer::preset>(mContext->getEqualizerPreset());
             break;
         }
+        case Equalizer::bandFrequencies: {
+            eqParam.set<Equalizer::bandFrequencies>(lvm::kEqBandFrequency);
+            break;
+        }
+        case Equalizer::presets: {
+            eqParam.set<Equalizer::presets>(lvm::kEqPresets);
+            break;
+        }
         case Equalizer::centerFreqMh: {
             eqParam.set<Equalizer::centerFreqMh>(mContext->getEqualizerCenterFreqs());
             break;
         }
-        default: {
+        case Equalizer::vendor: {
             LOG(ERROR) << __func__ << " not handled tag: " << toString(tag);
             return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
                     EX_ILLEGAL_ARGUMENT, "unsupportedTag");
@@ -365,8 +375,9 @@
 
 ndk::ScopedAStatus EffectBundleAidl::getParameterVirtualizer(const Virtualizer::Id& id,
                                                              Parameter::Specific* specific) {
-    RETURN_IF(id.getTag() != Virtualizer::Id::commonTag, EX_ILLEGAL_ARGUMENT,
-              "VirtualizerTagNotSupported");
+    RETURN_IF((id.getTag() != Virtualizer::Id::commonTag) &&
+                      (id.getTag() != Virtualizer::Id::speakerAnglesPayload),
+              EX_ILLEGAL_ARGUMENT, "VirtualizerTagNotSupported");
 
     RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
     Virtualizer vrParam;
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
index 0330e5a..ec1abe8 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
@@ -23,7 +23,6 @@
 #include <android-base/logging.h>
 
 #include "effect-impl/EffectImpl.h"
-#include "effect-impl/EffectUUID.h"
 
 #include "BundleContext.h"
 #include "BundleTypes.h"
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
index 018f3bc..73141b6 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
@@ -31,17 +31,19 @@
 
 using aidl::android::hardware::audio::effect::Descriptor;
 using aidl::android::hardware::audio::effect::EffectReverb;
+using aidl::android::hardware::audio::effect::getEffectImplUuidAuxEnvReverb;
+using aidl::android::hardware::audio::effect::getEffectImplUuidAuxPresetReverb;
+using aidl::android::hardware::audio::effect::getEffectImplUuidInsertEnvReverb;
+using aidl::android::hardware::audio::effect::getEffectImplUuidInsertPresetReverb;
 using aidl::android::hardware::audio::effect::IEffect;
-using aidl::android::hardware::audio::effect::kAuxEnvReverbImplUUID;
-using aidl::android::hardware::audio::effect::kAuxPresetReverbImplUUID;
-using aidl::android::hardware::audio::effect::kInsertEnvReverbImplUUID;
-using aidl::android::hardware::audio::effect::kInsertPresetReverbImplUUID;
 using aidl::android::hardware::audio::effect::State;
 using aidl::android::media::audio::common::AudioUuid;
 
 bool isReverbUuidSupported(const AudioUuid* uuid) {
-    return (*uuid == kAuxEnvReverbImplUUID || *uuid == kInsertEnvReverbImplUUID ||
-            *uuid == kAuxPresetReverbImplUUID || *uuid == kInsertPresetReverbImplUUID);
+    return (*uuid == getEffectImplUuidAuxEnvReverb() ||
+            *uuid == getEffectImplUuidAuxPresetReverb() ||
+            *uuid == getEffectImplUuidInsertEnvReverb() ||
+            *uuid == getEffectImplUuidInsertPresetReverb());
 }
 
 extern "C" binder_exception_t createEffect(const AudioUuid* uuid,
@@ -61,19 +63,18 @@
 }
 
 extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
-    if (!in_impl_uuid || !isReverbUuidSupported(in_impl_uuid)) {
+    if (*in_impl_uuid == getEffectImplUuidAuxEnvReverb()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::lvm::kAuxEnvReverbDesc;
+    } else if (*in_impl_uuid == getEffectImplUuidInsertEnvReverb()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::lvm::kInsertEnvReverbDesc;
+    } else if (*in_impl_uuid == getEffectImplUuidAuxPresetReverb()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::lvm::kAuxPresetReverbDesc;
+    } else if (*in_impl_uuid == getEffectImplUuidInsertPresetReverb()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::lvm::kInsertPresetReverbDesc;
+    } else {
         LOG(ERROR) << __func__ << "uuid not supported";
         return EX_ILLEGAL_ARGUMENT;
     }
-    if (*in_impl_uuid == kAuxEnvReverbImplUUID) {
-        *_aidl_return = aidl::android::hardware::audio::effect::lvm::kAuxEnvReverbDesc;
-    } else if (*in_impl_uuid == kInsertEnvReverbImplUUID) {
-        *_aidl_return = aidl::android::hardware::audio::effect::lvm::kInsertEnvReverbDesc;
-    } else if (*in_impl_uuid == kAuxPresetReverbImplUUID) {
-        *_aidl_return = aidl::android::hardware::audio::effect::lvm::kAuxPresetReverbDesc;
-    } else if (*in_impl_uuid == kInsertPresetReverbImplUUID) {
-        *_aidl_return = aidl::android::hardware::audio::effect::lvm::kInsertPresetReverbDesc;
-    }
     return EX_NONE;
 }
 
@@ -81,19 +82,19 @@
 
 EffectReverb::EffectReverb(const AudioUuid& uuid) {
     LOG(DEBUG) << __func__ << uuid.toString();
-    if (uuid == kAuxEnvReverbImplUUID) {
+    if (uuid == getEffectImplUuidAuxEnvReverb()) {
         mType = lvm::ReverbEffectType::AUX_ENV;
         mDescriptor = &lvm::kAuxEnvReverbDesc;
         mEffectName = &lvm::kAuxEnvReverbEffectName;
-    } else if (uuid == kInsertEnvReverbImplUUID) {
+    } else if (uuid == getEffectImplUuidInsertEnvReverb()) {
         mType = lvm::ReverbEffectType::INSERT_ENV;
         mDescriptor = &lvm::kInsertEnvReverbDesc;
         mEffectName = &lvm::kInsertEnvReverbEffectName;
-    } else if (uuid == kAuxPresetReverbImplUUID) {
+    } else if (uuid == getEffectImplUuidAuxPresetReverb()) {
         mType = lvm::ReverbEffectType::AUX_PRESET;
         mDescriptor = &lvm::kAuxPresetReverbDesc;
         mEffectName = &lvm::kAuxPresetReverbEffectName;
-    } else if (uuid == kInsertPresetReverbImplUUID) {
+    } else if (uuid == getEffectImplUuidInsertPresetReverb()) {
         mType = lvm::ReverbEffectType::INSERT_PRESET;
         mDescriptor = &lvm::kInsertPresetReverbDesc;
         mEffectName = &lvm::kInsertPresetReverbEffectName;
@@ -184,6 +185,20 @@
                     EX_ILLEGAL_ARGUMENT, "setDecayHfRatioFailed");
             return ndk::ScopedAStatus::ok();
         }
+        case EnvironmentalReverb::reflectionsLevelMb: {
+            RETURN_IF(mContext->setReflectionsLevel(
+                              erParam.get<EnvironmentalReverb::reflectionsLevelMb>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setReflectionsLevelFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case EnvironmentalReverb::reflectionsDelayMs: {
+            RETURN_IF(mContext->setReflectionsDelay(
+                              erParam.get<EnvironmentalReverb::reflectionsDelayMs>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setReflectionsDelayFailed");
+            return ndk::ScopedAStatus::ok();
+        }
         case EnvironmentalReverb::levelMb: {
             RETURN_IF(mContext->setEnvironmentalReverbLevel(
                               erParam.get<EnvironmentalReverb::levelMb>()) != RetCode::SUCCESS,
@@ -292,6 +307,14 @@
                     mContext->getEnvironmentalReverbDecayHfRatio());
             break;
         }
+        case EnvironmentalReverb::reflectionsLevelMb: {
+            erParam.set<EnvironmentalReverb::reflectionsLevelMb>(mContext->getReflectionsLevel());
+            break;
+        }
+        case EnvironmentalReverb::reflectionsDelayMs: {
+            erParam.set<EnvironmentalReverb::reflectionsDelayMs>(mContext->getReflectionsDelay());
+            break;
+        }
         case EnvironmentalReverb::levelMb: {
             erParam.set<EnvironmentalReverb::levelMb>(mContext->getEnvironmentalReverbLevel());
             break;
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 87aa12b..79e67f2 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -15,7 +15,9 @@
  */
 
 #include <cstddef>
+
 #define LOG_TAG "ReverbContext"
+#include <android-base/logging.h>
 #include <Utils.h>
 
 #include "ReverbContext.h"
@@ -301,7 +303,7 @@
     /* General parameters */
     params.OperatingMode = LVM_MODE_ON;
     params.SampleRate = LVM_FS_44100;
-    params.SourceFormat = (::android::hardware::audio::common::getChannelCount(
+    params.SourceFormat = (::aidl::android::hardware::audio::common::getChannelCount(
                                    mCommon.input.base.channelMask) == 1
                                    ? LVM_MONO
                                    : LVM_STEREO);
@@ -363,10 +365,10 @@
     LOG(DEBUG) << __func__ << " start processing";
     std::lock_guard lg(mMutex);
 
-    int channels =
-            ::android::hardware::audio::common::getChannelCount(mCommon.input.base.channelMask);
-    int outChannels =
-            ::android::hardware::audio::common::getChannelCount(mCommon.output.base.channelMask);
+    int channels = ::aidl::android::hardware::audio::common::getChannelCount(
+            mCommon.input.base.channelMask);
+    int outChannels = ::aidl::android::hardware::audio::common::getChannelCount(
+            mCommon.output.base.channelMask);
     int frameCount = mCommon.input.frameCount;
 
     // Reverb only effects the stereo channels in multichannel source.
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
index af49a25..9bb0b1a 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
@@ -83,6 +83,18 @@
     RetCode setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) override;
     Parameter::VolumeStereo getVolumeStereo() override { return mVolumeStereo; }
 
+    RetCode setReflectionsDelay(int delay) {
+        mReflectionsDelayMs = delay;
+        return RetCode::SUCCESS;
+    }
+    bool getReflectionsDelay() const { return mReflectionsDelayMs; }
+
+    RetCode setReflectionsLevel(int level) {
+        mReflectionsLevelMb = level;
+        return RetCode::SUCCESS;
+    }
+    bool getReflectionsLevel() const { return mReflectionsLevelMb; }
+
     IEffect::Status lvmProcess(float* in, float* out, int samples);
 
   private:
@@ -146,15 +158,17 @@
     bool mEnabled = false;
     LVREV_Handle_t mInstance GUARDED_BY(mMutex);
 
-    int mRoomLevel;
-    int mRoomHfLevel;
-    int mDecayTime;
-    int mDecayHfRatio;
-    int mLevel;
-    int mDelay;
-    int mDiffusion;
-    int mDensity;
-    bool mBypass;
+    int mRoomLevel = 0;
+    int mRoomHfLevel = 0;
+    int mDecayTime = 0;
+    int mDecayHfRatio = 0;
+    int mLevel = 0;
+    int mDelay = 0;
+    int mDiffusion = 0;
+    int mDensity = 0;
+    bool mBypass = 0;
+    int mReflectionsLevelMb = 0;
+    int mReflectionsDelayMs = 0;
 
     PresetReverb::Presets mPreset;
     PresetReverb::Presets mNextPreset;
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbTypes.h b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbTypes.h
index 8dcda87..37f9287 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbTypes.h
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbTypes.h
@@ -20,7 +20,8 @@
 #include <android/binder_enums.h>
 #include <audio_effects/effect_environmentalreverb.h>
 #include <audio_effects/effect_presetreverb.h>
-#include "effect-impl/EffectUUID.h"
+#include <system/audio_effects/effect_uuid.h>
+
 #include "effect-impl/EffectTypes.h"
 // from Reverb/lib
 #include "LVREV.h"
@@ -50,29 +51,31 @@
 
 // NXP SW auxiliary environmental reverb
 static const std::string kAuxEnvReverbEffectName = "Auxiliary Environmental Reverb";
-static const Descriptor kAuxEnvReverbDesc = {.common = {.id = {.type = kEnvReverbTypeUUID,
-                                                               .uuid = kAuxEnvReverbImplUUID,
-                                                               .proxy = std::nullopt},
-                                                        .flags = {.type = Flags::Type::AUXILIARY},
-                                                        .cpuLoad = kCpuLoadARM9E,
-                                                        .memoryUsage = kMemUsage,
-                                                        .name = kAuxEnvReverbEffectName,
-                                                        .implementor = "NXP Software Ltd."},
-                                             .capability = kEnvReverbCap};
+static const Descriptor kAuxEnvReverbDesc = {
+        .common = {.id = {.type = getEffectTypeUuidEnvReverb(),
+                          .uuid = getEffectImplUuidAuxEnvReverb(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::AUXILIARY},
+                   .cpuLoad = kCpuLoadARM9E,
+                   .memoryUsage = kMemUsage,
+                   .name = kAuxEnvReverbEffectName,
+                   .implementor = "NXP Software Ltd."},
+        .capability = kEnvReverbCap};
 
 // NXP SW insert environmental reverb
 static const std::string kInsertEnvReverbEffectName = "Insert Environmental Reverb";
-static const Descriptor kInsertEnvReverbDesc = {.common = {.id = {.type = kEnvReverbTypeUUID,
-                                                                  .uuid = kInsertEnvReverbImplUUID,
-                                                                  .proxy = std::nullopt},
-                                                           .flags = {.type = Flags::Type::INSERT,
-                                                                     .insert = Flags::Insert::FIRST,
-                                                                     .volume = Flags::Volume::CTRL},
-                                                           .cpuLoad = kCpuLoadARM9E,
-                                                           .memoryUsage = kMemUsage,
-                                                           .name = kInsertEnvReverbEffectName,
-                                                           .implementor = "NXP Software Ltd."},
-                                                .capability = kEnvReverbCap};
+static const Descriptor kInsertEnvReverbDesc = {
+        .common = {.id = {.type = getEffectTypeUuidEnvReverb(),
+                          .uuid = getEffectImplUuidInsertEnvReverb(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::INSERT,
+                             .insert = Flags::Insert::FIRST,
+                             .volume = Flags::Volume::CTRL},
+                   .cpuLoad = kCpuLoadARM9E,
+                   .memoryUsage = kMemUsage,
+                   .name = kInsertEnvReverbEffectName,
+                   .implementor = "NXP Software Ltd."},
+        .capability = kEnvReverbCap};
 
 static const std::vector<PresetReverb::Presets> kSupportedPresets{
         ndk::enum_range<PresetReverb::Presets>().begin(),
@@ -85,8 +88,8 @@
 // NXP SW auxiliary preset reverb
 static const std::string kAuxPresetReverbEffectName = "Auxiliary Preset Reverb";
 static const Descriptor kAuxPresetReverbDesc = {
-        .common = {.id = {.type = kPresetReverbTypeUUID,
-                          .uuid = kAuxPresetReverbImplUUID,
+        .common = {.id = {.type = getEffectTypeUuidPresetReverb(),
+                          .uuid = getEffectImplUuidAuxPresetReverb(),
                           .proxy = std::nullopt},
                    .flags = {.type = Flags::Type::AUXILIARY},
                    .cpuLoad = kCpuLoadARM9E,
@@ -98,8 +101,8 @@
 // NXP SW insert preset reverb
 static const std::string kInsertPresetReverbEffectName = "Insert Preset Reverb";
 static const Descriptor kInsertPresetReverbDesc = {
-        .common = {.id = {.type = kPresetReverbTypeUUID,
-                          .uuid = kInsertPresetReverbImplUUID,
+        .common = {.id = {.type = getEffectTypeUuidPresetReverb(),
+                          .uuid = getEffectImplUuidInsertPresetReverb(),
                           .proxy = std::nullopt},
                    .flags = {.type = Flags::Type::INSERT,
                              .insert = Flags::Insert::FIRST,
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index c6e036a..d018c47 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -58,3 +58,39 @@
         "libwebrtc_absl_headers",
     ],
 }
+
+cc_library_shared {
+    name: "libpreprocessingaidl",
+    srcs: [
+        "aidl/PreProcessingContext.cpp",
+        "aidl/EffectPreProcessing.cpp",
+        ":effectCommonFile",
+    ],
+    defaults: [
+        "aidlaudioservice_defaults",
+        "latest_android_hardware_audio_effect_ndk_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
+    ],
+    local_include_dirs: ["aidl"],
+    shared_libs: [
+        "liblog",
+        "libutils",
+        "libaudioutils",
+    ],
+    static_libs: [
+        "webrtc_audio_processing",
+    ],
+    header_libs: [
+        "libwebrtc_absl_headers",
+        "libaudioeffects",
+        "libhardware_headers",
+    ],
+    cflags: [
+        "-Wthread-safety",
+        "-Wno-unused-parameter",
+    ],
+    relative_install_path: "soundfx",
+    visibility: [
+        "//hardware/interfaces/audio/aidl/default",
+    ],
+}
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
new file mode 100644
index 0000000..e8ae8b3
--- /dev/null
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
@@ -0,0 +1,456 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectPreProcessing"
+#include <algorithm>
+#include <unordered_set>
+
+#include <Utils.h>
+#include <android-base/logging.h>
+#include <fmq/AidlMessageQueue.h>
+
+#include "EffectPreProcessing.h"
+
+using aidl::android::hardware::audio::effect::getEffectImplUuidAcousticEchoCancelerSw;
+using aidl::android::hardware::audio::effect::getEffectImplUuidAutomaticGainControlV1Sw;
+using aidl::android::hardware::audio::effect::getEffectImplUuidAutomaticGainControlV2Sw;
+using aidl::android::hardware::audio::effect::getEffectImplUuidNoiseSuppressionSw;
+
+using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::EffectPreProcessing;
+using aidl::android::hardware::audio::effect::IEffect;
+using aidl::android::hardware::audio::effect::State;
+using aidl::android::media::audio::common::AudioUuid;
+
+bool isPreProcessingUuidSupported(const AudioUuid& uuid) {
+    return uuid == getEffectImplUuidAcousticEchoCancelerSw() ||
+           uuid == getEffectImplUuidAutomaticGainControlV1Sw() ||
+           uuid == getEffectImplUuidAutomaticGainControlV2Sw() ||
+           uuid == getEffectImplUuidNoiseSuppressionSw();
+}
+
+extern "C" binder_exception_t createEffect(const AudioUuid* uuid,
+                                           std::shared_ptr<IEffect>* instanceSpp) {
+    if (!uuid || !isPreProcessingUuidSupported(*uuid)) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    if (instanceSpp) {
+        *instanceSpp = ndk::SharedRefBase::make<EffectPreProcessing>(*uuid);
+        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
+        return EX_NONE;
+    } else {
+        LOG(ERROR) << __func__ << " invalid input parameter!";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+}
+
+extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
+    if (!in_impl_uuid || !isPreProcessingUuidSupported(*in_impl_uuid)) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    if (*in_impl_uuid == getEffectImplUuidAcousticEchoCancelerSw()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::kAcousticEchoCancelerDesc;
+    } else if (*in_impl_uuid == getEffectImplUuidAutomaticGainControlV1Sw()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::kAutomaticGainControlV1Desc;
+    } else if (*in_impl_uuid == getEffectImplUuidAutomaticGainControlV2Sw()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::kAutomaticGainControlV2Desc;
+    } else if (*in_impl_uuid == getEffectImplUuidNoiseSuppressionSw()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::kNoiseSuppressionDesc;
+    }
+    return EX_NONE;
+}
+
+namespace aidl::android::hardware::audio::effect {
+
+EffectPreProcessing::EffectPreProcessing(const AudioUuid& uuid) {
+    LOG(DEBUG) << __func__ << uuid.toString();
+    if (uuid == getEffectImplUuidAcousticEchoCancelerSw()) {
+        mType = PreProcessingEffectType::ACOUSTIC_ECHO_CANCELLATION;
+        mDescriptor = &kAcousticEchoCancelerDesc;
+        mEffectName = &kAcousticEchoCancelerEffectName;
+    } else if (uuid == getEffectImplUuidAutomaticGainControlV1Sw()) {
+        mType = PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V1;
+        mDescriptor = &kAutomaticGainControlV1Desc;
+        mEffectName = &kAutomaticGainControlV1EffectName;
+    } else if (uuid == getEffectImplUuidAutomaticGainControlV2Sw()) {
+        mType = PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V2;
+        mDescriptor = &kAutomaticGainControlV2Desc;
+        mEffectName = &kAutomaticGainControlV2EffectName;
+    } else if (uuid == getEffectImplUuidNoiseSuppressionSw()) {
+        mType = PreProcessingEffectType::NOISE_SUPPRESSION;
+        mDescriptor = &kNoiseSuppressionDesc;
+        mEffectName = &kNoiseSuppressionEffectName;
+    } else {
+        LOG(ERROR) << __func__ << uuid.toString() << " not supported!";
+    }
+}
+
+EffectPreProcessing::~EffectPreProcessing() {
+    cleanUp();
+    LOG(DEBUG) << __func__;
+}
+
+ndk::ScopedAStatus EffectPreProcessing::getDescriptor(Descriptor* _aidl_return) {
+    RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
+    LOG(DEBUG) << _aidl_return->toString();
+    *_aidl_return = *mDescriptor;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectPreProcessing::setParameterSpecific(const Parameter::Specific& specific) {
+    LOG(DEBUG) << __func__ << " specific " << specific.toString();
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+    auto tag = specific.getTag();
+    switch (tag) {
+        case Parameter::Specific::acousticEchoCanceler:
+            return setParameterAcousticEchoCanceler(specific);
+        case Parameter::Specific::automaticGainControlV1:
+            return setParameterAutomaticGainControlV1(specific);
+        case Parameter::Specific::automaticGainControlV2:
+            return setParameterAutomaticGainControlV2(specific);
+        case Parameter::Specific::noiseSuppression:
+            return setParameterNoiseSuppression(specific);
+        default:
+            LOG(ERROR) << __func__ << " unsupported tag " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "specificParamNotSupported");
+    }
+}
+
+ndk::ScopedAStatus EffectPreProcessing::setParameterAcousticEchoCanceler(
+        const Parameter::Specific& specific) {
+    auto& param = specific.get<Parameter::Specific::acousticEchoCanceler>();
+    RETURN_IF(!inRange(param, kAcousticEchoCancelerRanges), EX_ILLEGAL_ARGUMENT, "outOfRange");
+    auto tag = param.getTag();
+
+    switch (tag) {
+        case AcousticEchoCanceler::echoDelayUs: {
+            RETURN_IF(mContext->setAcousticEchoCancelerEchoDelay(
+                              param.get<AcousticEchoCanceler::echoDelayUs>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "echoDelayNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        case AcousticEchoCanceler::mobileMode: {
+            RETURN_IF(mContext->setAcousticEchoCancelerMobileMode(
+                              param.get<AcousticEchoCanceler::mobileMode>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "SettingMobileModeNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "AcousticEchoCancelerTagNotSupported");
+        }
+    }
+}
+
+ndk::ScopedAStatus EffectPreProcessing::setParameterAutomaticGainControlV1(
+        const Parameter::Specific& specific) {
+    auto& param = specific.get<Parameter::Specific::automaticGainControlV1>();
+    RETURN_IF(!inRange(param, kAutomaticGainControlV1Ranges), EX_ILLEGAL_ARGUMENT, "outOfRange");
+    auto tag = param.getTag();
+
+    switch (tag) {
+        case AutomaticGainControlV1::targetPeakLevelDbFs: {
+            RETURN_IF(mContext->setAutomaticGainControlV1TargetPeakLevel(
+                              param.get<AutomaticGainControlV1::targetPeakLevelDbFs>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "targetPeakLevelNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        case AutomaticGainControlV1::maxCompressionGainDb: {
+            RETURN_IF(mContext->setAutomaticGainControlV1MaxCompressionGain(
+                              param.get<AutomaticGainControlV1::maxCompressionGainDb>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "maxCompressionGainNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        case AutomaticGainControlV1::enableLimiter: {
+            RETURN_IF(
+                    mContext->setAutomaticGainControlV1EnableLimiter(
+                            param.get<AutomaticGainControlV1::enableLimiter>()) != RetCode::SUCCESS,
+                    EX_ILLEGAL_ARGUMENT, "enableLimiterNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV1TagNotSupported");
+        }
+    }
+}
+
+ndk::ScopedAStatus EffectPreProcessing::setParameterAutomaticGainControlV2(
+        const Parameter::Specific& specific) {
+    auto& param = specific.get<Parameter::Specific::automaticGainControlV2>();
+    RETURN_IF(!inRange(param, kAutomaticGainControlV2Ranges), EX_ILLEGAL_ARGUMENT, "outOfRange");
+    auto tag = param.getTag();
+
+    switch (tag) {
+        case AutomaticGainControlV2::fixedDigitalGainMb: {
+            RETURN_IF(mContext->setAutomaticGainControlV2DigitalGain(
+                              param.get<AutomaticGainControlV2::fixedDigitalGainMb>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "digitalGainNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        case AutomaticGainControlV2::levelEstimator: {
+            RETURN_IF(mContext->setAutomaticGainControlV2LevelEstimator(
+                              param.get<AutomaticGainControlV2::levelEstimator>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "levelEstimatorNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        case AutomaticGainControlV2::saturationMarginMb: {
+            RETURN_IF(mContext->setAutomaticGainControlV2SaturationMargin(
+                              param.get<AutomaticGainControlV2::saturationMarginMb>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "saturationMarginNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV2TagNotSupported");
+        }
+    }
+}
+
+ndk::ScopedAStatus EffectPreProcessing::setParameterNoiseSuppression(
+        const Parameter::Specific& specific) {
+    auto& param = specific.get<Parameter::Specific::noiseSuppression>();
+    auto tag = param.getTag();
+
+    switch (tag) {
+        case NoiseSuppression::level: {
+            RETURN_IF(mContext->setNoiseSuppressionLevel(param.get<NoiseSuppression::level>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "levelNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "NoiseSuppressionTagNotSupported");
+        }
+    }
+}
+
+ndk::ScopedAStatus EffectPreProcessing::getParameterSpecific(const Parameter::Id& id,
+                                                             Parameter::Specific* specific) {
+    RETURN_IF(!specific, EX_NULL_POINTER, "nullPtr");
+    auto tag = id.getTag();
+
+    switch (tag) {
+        case Parameter::Id::acousticEchoCancelerTag:
+            return getParameterAcousticEchoCanceler(
+                    id.get<Parameter::Id::acousticEchoCancelerTag>(), specific);
+        case Parameter::Id::automaticGainControlV1Tag:
+            return getParameterAutomaticGainControlV1(
+                    id.get<Parameter::Id::automaticGainControlV1Tag>(), specific);
+        case Parameter::Id::automaticGainControlV2Tag:
+            return getParameterAutomaticGainControlV2(
+                    id.get<Parameter::Id::automaticGainControlV2Tag>(), specific);
+        case Parameter::Id::noiseSuppressionTag:
+            return getParameterNoiseSuppression(id.get<Parameter::Id::noiseSuppressionTag>(),
+                                                specific);
+        default:
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "wrongIdTag");
+    }
+}
+
+ndk::ScopedAStatus EffectPreProcessing::getParameterAcousticEchoCanceler(
+        const AcousticEchoCanceler::Id& id, Parameter::Specific* specific) {
+    RETURN_IF(id.getTag() != AcousticEchoCanceler::Id::commonTag, EX_ILLEGAL_ARGUMENT,
+              "AcousticEchoCancelerTagNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    AcousticEchoCanceler param;
+    auto tag = id.get<AcousticEchoCanceler::Id::commonTag>();
+    switch (tag) {
+        case AcousticEchoCanceler::echoDelayUs: {
+            param.set<AcousticEchoCanceler::echoDelayUs>(
+                    mContext->getAcousticEchoCancelerEchoDelay());
+            break;
+        }
+        case AcousticEchoCanceler::mobileMode: {
+            param.set<AcousticEchoCanceler::mobileMode>(
+                    mContext->getAcousticEchoCancelerMobileMode());
+            break;
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "AcousticEchoCancelerTagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::acousticEchoCanceler>(param);
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectPreProcessing::getParameterAutomaticGainControlV1(
+        const AutomaticGainControlV1::Id& id, Parameter::Specific* specific) {
+    RETURN_IF(id.getTag() != AutomaticGainControlV1::Id::commonTag, EX_ILLEGAL_ARGUMENT,
+              "AutomaticGainControlV1TagNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    AutomaticGainControlV1 param;
+
+    auto tag = id.get<AutomaticGainControlV1::Id::commonTag>();
+    switch (tag) {
+        case AutomaticGainControlV1::targetPeakLevelDbFs: {
+            param.set<AutomaticGainControlV1::targetPeakLevelDbFs>(
+                    mContext->getAutomaticGainControlV1TargetPeakLevel());
+            break;
+        }
+        case AutomaticGainControlV1::maxCompressionGainDb: {
+            param.set<AutomaticGainControlV1::maxCompressionGainDb>(
+                    mContext->getAutomaticGainControlV1MaxCompressionGain());
+            break;
+        }
+        case AutomaticGainControlV1::enableLimiter: {
+            param.set<AutomaticGainControlV1::enableLimiter>(
+                    mContext->getAutomaticGainControlV1EnableLimiter());
+            break;
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV1TagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::automaticGainControlV1>(param);
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectPreProcessing::getParameterAutomaticGainControlV2(
+        const AutomaticGainControlV2::Id& id, Parameter::Specific* specific) {
+    RETURN_IF(id.getTag() != AutomaticGainControlV2::Id::commonTag, EX_ILLEGAL_ARGUMENT,
+              "AutomaticGainControlV2TagNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    AutomaticGainControlV2 param;
+
+    auto tag = id.get<AutomaticGainControlV2::Id::commonTag>();
+    switch (tag) {
+        case AutomaticGainControlV2::fixedDigitalGainMb: {
+            param.set<AutomaticGainControlV2::fixedDigitalGainMb>(
+                    mContext->getAutomaticGainControlV2DigitalGain());
+            break;
+        }
+        case AutomaticGainControlV2::levelEstimator: {
+            param.set<AutomaticGainControlV2::levelEstimator>(
+                    mContext->getAutomaticGainControlV2LevelEstimator());
+            break;
+        }
+        case AutomaticGainControlV2::saturationMarginMb: {
+            param.set<AutomaticGainControlV2::saturationMarginMb>(
+                    mContext->getAutomaticGainControlV2SaturationMargin());
+            break;
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV2TagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::automaticGainControlV2>(param);
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectPreProcessing::getParameterNoiseSuppression(
+        const NoiseSuppression::Id& id, Parameter::Specific* specific) {
+    RETURN_IF(id.getTag() != NoiseSuppression::Id::commonTag, EX_ILLEGAL_ARGUMENT,
+              "NoiseSuppressionTagNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    NoiseSuppression param;
+
+    auto tag = id.get<NoiseSuppression::Id::commonTag>();
+    switch (tag) {
+        case NoiseSuppression::level: {
+            param.set<NoiseSuppression::level>(mContext->getNoiseSuppressionLevel());
+            break;
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "NoiseSuppressionTagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::noiseSuppression>(param);
+    return ndk::ScopedAStatus::ok();
+}
+
+std::shared_ptr<EffectContext> EffectPreProcessing::createContext(const Parameter::Common& common) {
+    if (mContext) {
+        LOG(DEBUG) << __func__ << " context already exist";
+    } else {
+        // PreProcessingSession is a singleton
+        mContext = PreProcessingSession::getPreProcessingSession().createSession(
+                mType, 1 /* statusFmqDepth */, common);
+    }
+
+    return mContext;
+}
+
+std::shared_ptr<EffectContext> EffectPreProcessing::getContext() {
+    return mContext;
+}
+
+RetCode EffectPreProcessing::releaseContext() {
+    if (mContext) {
+        PreProcessingSession::getPreProcessingSession().releaseSession(mType,
+                                                                       mContext->getSessionId());
+        mContext.reset();
+    }
+    return RetCode::SUCCESS;
+}
+
+ndk::ScopedAStatus EffectPreProcessing::commandImpl(CommandId command) {
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    switch (command) {
+        case CommandId::START:
+            mContext->enable();
+            break;
+        case CommandId::STOP:
+            mContext->disable();
+            break;
+        case CommandId::RESET:
+            mContext->disable();
+            mContext->resetBuffer();
+            break;
+        default:
+            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "commandIdNotSupported");
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
+// Processing method running in EffectWorker thread.
+IEffect::Status EffectPreProcessing::effectProcessImpl(float* in, float* out, int sampleToProcess) {
+    IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+    RETURN_VALUE_IF(!mContext, status, "nullContext");
+    return mContext->lvmProcess(in, out, sampleToProcess);
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.h b/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
new file mode 100644
index 0000000..fad848a
--- /dev/null
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+
+#include "PreProcessingContext.h"
+#include "PreProcessingSession.h"
+#include "effect-impl/EffectImpl.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+class EffectPreProcessing final : public EffectImpl {
+  public:
+    explicit EffectPreProcessing(const AudioUuid& uuid);
+    ~EffectPreProcessing() override;
+
+    ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
+
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
+                                            Parameter::Specific* specific) override;
+
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
+    std::shared_ptr<EffectContext> getContext() override;
+    RetCode releaseContext() override;
+
+    IEffect::Status effectProcessImpl(float* in, float* out, int samples) override;
+
+    ndk::ScopedAStatus commandImpl(CommandId command) override;
+
+    std::string getEffectName() override { return *mEffectName; }
+
+  private:
+    std::shared_ptr<PreProcessingContext> mContext;
+    const Descriptor* mDescriptor;
+    const std::string* mEffectName;
+    PreProcessingEffectType mType;
+
+    ndk::ScopedAStatus setParameterAcousticEchoCanceler(const Parameter::Specific& specific);
+    ndk::ScopedAStatus getParameterAcousticEchoCanceler(const AcousticEchoCanceler::Id& id,
+                                                        Parameter::Specific* specific);
+
+    ndk::ScopedAStatus setParameterAutomaticGainControlV1(const Parameter::Specific& specific);
+    ndk::ScopedAStatus getParameterAutomaticGainControlV1(const AutomaticGainControlV1::Id& id,
+                                                          Parameter::Specific* specific);
+
+    ndk::ScopedAStatus setParameterAutomaticGainControlV2(const Parameter::Specific& specific);
+    ndk::ScopedAStatus getParameterAutomaticGainControlV2(const AutomaticGainControlV2::Id& id,
+                                                          Parameter::Specific* specific);
+
+    ndk::ScopedAStatus setParameterNoiseSuppression(const Parameter::Specific& specific);
+    ndk::ScopedAStatus getParameterNoiseSuppression(const NoiseSuppression::Id& id,
+                                                    Parameter::Specific* specific);
+};
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp b/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
new file mode 100644
index 0000000..c1e4eda
--- /dev/null
+++ b/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
@@ -0,0 +1,311 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstddef>
+#define LOG_TAG "PreProcessingContext"
+#include <Utils.h>
+
+#include "PreProcessingContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+using aidl::android::media::audio::common::AudioDeviceDescription;
+using aidl::android::media::audio::common::AudioDeviceType;
+
+RetCode PreProcessingContext::init(const Parameter::Common& common) {
+    std::lock_guard lg(mMutex);
+    webrtc::AudioProcessingBuilder apBuilder;
+    mAudioProcessingModule = apBuilder.Create();
+    if (mAudioProcessingModule == nullptr) {
+        LOG(ERROR) << "init could not get apm engine";
+        return RetCode::ERROR_EFFECT_LIB_ERROR;
+    }
+
+    updateConfigs(common);
+
+    mEnabledMsk = 0;
+    mProcessedMsk = 0;
+    mRevEnabledMsk = 0;
+    mRevProcessedMsk = 0;
+
+    auto config = mAudioProcessingModule->GetConfig();
+    switch (mType) {
+        case PreProcessingEffectType::ACOUSTIC_ECHO_CANCELLATION:
+            config.echo_canceller.mobile_mode = true;
+            break;
+        case PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V1:
+            config.gain_controller1.target_level_dbfs = kAgcDefaultTargetLevel;
+            config.gain_controller1.compression_gain_db = kAgcDefaultCompGain;
+            config.gain_controller1.enable_limiter = kAgcDefaultLimiter;
+            break;
+        case PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V2:
+            config.gain_controller2.fixed_digital.gain_db = 0.f;
+            break;
+        case PreProcessingEffectType::NOISE_SUPPRESSION:
+            config.noise_suppression.level = kNsDefaultLevel;
+            break;
+    }
+    mAudioProcessingModule->ApplyConfig(config);
+    mState = PRE_PROC_STATE_INITIALIZED;
+    return RetCode::SUCCESS;
+}
+
+RetCode PreProcessingContext::deInit() {
+    std::lock_guard lg(mMutex);
+    mAudioProcessingModule = nullptr;
+    mState = PRE_PROC_STATE_UNINITIALIZED;
+    return RetCode::SUCCESS;
+}
+
+RetCode PreProcessingContext::enable() {
+    if (mState != PRE_PROC_STATE_INITIALIZED) {
+        return RetCode::ERROR_EFFECT_LIB_ERROR;
+    }
+    int typeMsk = (1 << int(mType));
+    std::lock_guard lg(mMutex);
+    // Check if effect is already enabled.
+    if ((mEnabledMsk & typeMsk) == typeMsk) {
+        return RetCode::ERROR_ILLEGAL_PARAMETER;
+    }
+    mEnabledMsk |= typeMsk;
+    auto config = mAudioProcessingModule->GetConfig();
+    switch (mType) {
+        case PreProcessingEffectType::ACOUSTIC_ECHO_CANCELLATION:
+            config.echo_canceller.enabled = true;
+            // AEC has reverse stream
+            mRevEnabledMsk |= typeMsk;
+            mRevProcessedMsk = 0;
+            break;
+        case PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V1:
+            config.gain_controller1.enabled = true;
+            break;
+        case PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V2:
+            config.gain_controller2.enabled = true;
+            break;
+        case PreProcessingEffectType::NOISE_SUPPRESSION:
+            config.noise_suppression.enabled = true;
+            break;
+    }
+    mProcessedMsk = 0;
+    mAudioProcessingModule->ApplyConfig(config);
+    mState = PRE_PROC_STATE_ACTIVE;
+    return RetCode::SUCCESS;
+}
+
+RetCode PreProcessingContext::disable() {
+    if (mState != PRE_PROC_STATE_ACTIVE) {
+        return RetCode::ERROR_EFFECT_LIB_ERROR;
+    }
+    int typeMsk = (1 << int(mType));
+    std::lock_guard lg(mMutex);
+    // Check if effect is already disabled.
+    if ((mEnabledMsk & typeMsk) != typeMsk) {
+        return RetCode::ERROR_ILLEGAL_PARAMETER;
+    }
+    mEnabledMsk &= ~typeMsk;
+    auto config = mAudioProcessingModule->GetConfig();
+    switch (mType) {
+        case PreProcessingEffectType::ACOUSTIC_ECHO_CANCELLATION:
+            config.echo_canceller.enabled = false;
+            // AEC has reverse stream
+            mRevEnabledMsk &= ~typeMsk;
+            mRevProcessedMsk = 0;
+            break;
+        case PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V1:
+            config.gain_controller1.enabled = false;
+            break;
+        case PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V2:
+            config.gain_controller2.enabled = false;
+            break;
+        case PreProcessingEffectType::NOISE_SUPPRESSION:
+            config.noise_suppression.enabled = false;
+            break;
+    }
+    mProcessedMsk = 0;
+    mAudioProcessingModule->ApplyConfig(config);
+    mState = PRE_PROC_STATE_INITIALIZED;
+    return RetCode::SUCCESS;
+}
+
+RetCode PreProcessingContext::setCommon(const Parameter::Common& common) {
+    mCommon = common;
+    updateConfigs(common);
+    return RetCode::SUCCESS;
+}
+
+void PreProcessingContext::updateConfigs(const Parameter::Common& common) {
+    mInputConfig.set_sample_rate_hz(common.input.base.sampleRate);
+    mInputConfig.set_num_channels(::aidl::android::hardware::audio::common::getChannelCount(
+                    common.input.base.channelMask));
+    mOutputConfig.set_sample_rate_hz(common.input.base.sampleRate);
+    mOutputConfig.set_num_channels(::aidl::android::hardware::audio::common::getChannelCount(
+                    common.output.base.channelMask));
+}
+
+RetCode PreProcessingContext::setAcousticEchoCancelerEchoDelay(int echoDelayUs) {
+    mEchoDelayUs = echoDelayUs;
+    std::lock_guard lg(mMutex);
+    mAudioProcessingModule->set_stream_delay_ms(mEchoDelayUs / 1000);
+    return RetCode::SUCCESS;
+}
+
+int PreProcessingContext::getAcousticEchoCancelerEchoDelay() const {
+    return mEchoDelayUs;
+}
+
+RetCode PreProcessingContext::setAcousticEchoCancelerMobileMode(bool mobileMode) {
+    mMobileMode = mobileMode;
+    std::lock_guard lg(mMutex);
+    auto config = mAudioProcessingModule->GetConfig();
+    config.echo_canceller.mobile_mode = mobileMode;
+    mAudioProcessingModule->ApplyConfig(config);
+    return RetCode::SUCCESS;
+}
+
+bool PreProcessingContext::getAcousticEchoCancelerMobileMode() const {
+    return mMobileMode;
+}
+
+RetCode PreProcessingContext::setAutomaticGainControlV1TargetPeakLevel(int targetPeakLevel) {
+    mTargetPeakLevel = targetPeakLevel;
+    std::lock_guard lg(mMutex);
+    auto config = mAudioProcessingModule->GetConfig();
+    config.gain_controller1.target_level_dbfs = -(mTargetPeakLevel / 100);
+    mAudioProcessingModule->ApplyConfig(config);
+    return RetCode::SUCCESS;
+}
+
+int PreProcessingContext::getAutomaticGainControlV1TargetPeakLevel() const {
+    return mTargetPeakLevel;
+}
+
+RetCode PreProcessingContext::setAutomaticGainControlV1MaxCompressionGain(int maxCompressionGain) {
+    mMaxCompressionGain = maxCompressionGain;
+    std::lock_guard lg(mMutex);
+    auto config = mAudioProcessingModule->GetConfig();
+    config.gain_controller1.compression_gain_db = mMaxCompressionGain / 100;
+    mAudioProcessingModule->ApplyConfig(config);
+    return RetCode::SUCCESS;
+}
+
+int PreProcessingContext::getAutomaticGainControlV1MaxCompressionGain() const {
+    return mMaxCompressionGain;
+}
+
+RetCode PreProcessingContext::setAutomaticGainControlV1EnableLimiter(bool enableLimiter) {
+    mEnableLimiter = enableLimiter;
+    std::lock_guard lg(mMutex);
+    auto config = mAudioProcessingModule->GetConfig();
+    config.gain_controller1.enable_limiter = mEnableLimiter;
+    mAudioProcessingModule->ApplyConfig(config);
+    return RetCode::SUCCESS;
+}
+
+bool PreProcessingContext::getAutomaticGainControlV1EnableLimiter() const {
+    return mEnableLimiter;
+}
+
+RetCode PreProcessingContext::setAutomaticGainControlV2DigitalGain(int gain) {
+    mDigitalGain = gain;
+    std::lock_guard lg(mMutex);
+    auto config = mAudioProcessingModule->GetConfig();
+    config.gain_controller2.fixed_digital.gain_db = mDigitalGain;
+    mAudioProcessingModule->ApplyConfig(config);
+    return RetCode::SUCCESS;
+}
+
+int PreProcessingContext::getAutomaticGainControlV2DigitalGain() const {
+    return mDigitalGain;
+}
+
+RetCode PreProcessingContext::setAutomaticGainControlV2LevelEstimator(
+        AutomaticGainControlV2::LevelEstimator levelEstimator) {
+    mLevelEstimator = levelEstimator;
+    return RetCode::SUCCESS;
+}
+
+AutomaticGainControlV2::LevelEstimator
+PreProcessingContext::getAutomaticGainControlV2LevelEstimator() const {
+    return mLevelEstimator;
+}
+
+RetCode PreProcessingContext::setAutomaticGainControlV2SaturationMargin(int saturationMargin) {
+    mSaturationMargin = saturationMargin;
+    return RetCode::SUCCESS;
+}
+
+int PreProcessingContext::getAutomaticGainControlV2SaturationMargin() const {
+    return mSaturationMargin;
+}
+
+RetCode PreProcessingContext::setNoiseSuppressionLevel(NoiseSuppression::Level level) {
+    mLevel = level;
+    std::lock_guard lg(mMutex);
+    auto config = mAudioProcessingModule->GetConfig();
+    config.noise_suppression.level =
+            (webrtc::AudioProcessing::Config::NoiseSuppression::Level)level;
+    mAudioProcessingModule->ApplyConfig(config);
+    return RetCode::SUCCESS;
+}
+
+NoiseSuppression::Level PreProcessingContext::getNoiseSuppressionLevel() const {
+    return mLevel;
+}
+
+IEffect::Status PreProcessingContext::lvmProcess(float* in, float* out, int samples) {
+    IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+    RETURN_VALUE_IF(!in, status, "nullInput");
+    RETURN_VALUE_IF(!out, status, "nullOutput");
+    status = {EX_ILLEGAL_STATE, 0, 0};
+    int64_t inputFrameCount = getCommon().input.frameCount;
+    int64_t outputFrameCount = getCommon().output.frameCount;
+    RETURN_VALUE_IF(inputFrameCount != outputFrameCount, status, "FrameCountMismatch");
+    RETURN_VALUE_IF(0 == getInputFrameSize(), status, "zeroFrameSize");
+
+    LOG(DEBUG) << __func__ << " start processing";
+    std::lock_guard lg(mMutex);
+
+    mProcessedMsk |= (1 << int(mType));
+
+    // webrtc implementation clear out was_stream_delay_set every time after ProcessStream() call
+    mAudioProcessingModule->set_stream_delay_ms(mEchoDelayUs / 1000);
+
+    if ((mProcessedMsk & mEnabledMsk) == mEnabledMsk) {
+        mProcessedMsk = 0;
+        int processStatus = mAudioProcessingModule->ProcessStream(
+                (const int16_t* const)in, mInputConfig, mOutputConfig, (int16_t* const)out);
+        if (processStatus != 0) {
+            LOG(ERROR) << "Process stream failed with error " << processStatus;
+            return status;
+        }
+    }
+
+    mRevProcessedMsk |= (1 << int(mType));
+
+    if ((mRevProcessedMsk & mRevEnabledMsk) == mRevEnabledMsk) {
+        mRevProcessedMsk = 0;
+        int revProcessStatus = mAudioProcessingModule->ProcessReverseStream(
+                (const int16_t* const)in, mInputConfig, mInputConfig, (int16_t* const)out);
+        if (revProcessStatus != 0) {
+            LOG(ERROR) << "Process reverse stream failed with error " << revProcessStatus;
+            return status;
+        }
+    }
+
+    return {STATUS_OK, samples, samples};
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingContext.h b/media/libeffects/preprocessing/aidl/PreProcessingContext.h
new file mode 100644
index 0000000..9ba1bbe
--- /dev/null
+++ b/media/libeffects/preprocessing/aidl/PreProcessingContext.h
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/logging.h>
+#include <android-base/thread_annotations.h>
+#include <audio_processing.h>
+#include <unordered_map>
+
+#include "PreProcessingTypes.h"
+#include "effect-impl/EffectContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+enum PreProcEffectState {
+    PRE_PROC_STATE_UNINITIALIZED,
+    PRE_PROC_STATE_INITIALIZED,
+    PRE_PROC_STATE_ACTIVE,
+};
+
+class PreProcessingContext final : public EffectContext {
+  public:
+    PreProcessingContext(int statusDepth, const Parameter::Common& common,
+                         const PreProcessingEffectType& type)
+        : EffectContext(statusDepth, common), mType(type) {
+        LOG(DEBUG) << __func__ << type;
+        mState = PRE_PROC_STATE_UNINITIALIZED;
+    }
+    ~PreProcessingContext() override { LOG(DEBUG) << __func__; }
+
+    RetCode init(const Parameter::Common& common);
+    RetCode deInit();
+
+    PreProcessingEffectType getPreProcessingType() const { return mType; }
+
+    RetCode enable();
+    RetCode disable();
+
+    RetCode setCommon(const Parameter::Common& common) override;
+    void updateConfigs(const Parameter::Common& common);
+
+    RetCode setAcousticEchoCancelerEchoDelay(int echoDelayUs);
+    int getAcousticEchoCancelerEchoDelay() const;
+    RetCode setAcousticEchoCancelerMobileMode(bool mobileMode);
+    bool getAcousticEchoCancelerMobileMode() const;
+
+    RetCode setAutomaticGainControlV1TargetPeakLevel(int targetPeakLevel);
+    int getAutomaticGainControlV1TargetPeakLevel() const;
+    RetCode setAutomaticGainControlV1MaxCompressionGain(int maxCompressionGain);
+    int getAutomaticGainControlV1MaxCompressionGain() const;
+    RetCode setAutomaticGainControlV1EnableLimiter(bool enableLimiter);
+    bool getAutomaticGainControlV1EnableLimiter() const;
+
+    RetCode setAutomaticGainControlV2DigitalGain(int gain);
+    int getAutomaticGainControlV2DigitalGain() const;
+    RetCode setAutomaticGainControlV2LevelEstimator(
+            AutomaticGainControlV2::LevelEstimator levelEstimator);
+    AutomaticGainControlV2::LevelEstimator getAutomaticGainControlV2LevelEstimator() const;
+    RetCode setAutomaticGainControlV2SaturationMargin(int saturationMargin);
+    int getAutomaticGainControlV2SaturationMargin() const;
+
+    RetCode setNoiseSuppressionLevel(NoiseSuppression::Level level);
+    NoiseSuppression::Level getNoiseSuppressionLevel() const;
+
+    IEffect::Status lvmProcess(float* in, float* out, int samples);
+
+  private:
+    static constexpr inline int kAgcDefaultTargetLevel = 3;
+    static constexpr inline int kAgcDefaultCompGain = 9;
+    static constexpr inline bool kAgcDefaultLimiter = true;
+    static constexpr inline webrtc::AudioProcessing::Config::NoiseSuppression::Level
+            kNsDefaultLevel = webrtc::AudioProcessing::Config::NoiseSuppression::kModerate;
+
+    std::mutex mMutex;
+    const PreProcessingEffectType mType;
+    PreProcEffectState mState;  // current state
+
+    // handle on webRTC audio processing module (APM)
+    rtc::scoped_refptr<webrtc::AudioProcessing> mAudioProcessingModule GUARDED_BY(mMutex);
+
+    int mEnabledMsk GUARDED_BY(mMutex);       // bit field containing IDs of enabled pre processors
+    int mProcessedMsk GUARDED_BY(mMutex);     // bit field containing IDs of pre processors already
+                                              // processed in current round
+    int mRevEnabledMsk GUARDED_BY(mMutex);    // bit field containing IDs of enabled pre processors
+                                              // with reverse channel
+    int mRevProcessedMsk GUARDED_BY(mMutex);  // bit field containing IDs of pre processors with
+                                              // reverse channel already processed in current round
+
+    webrtc::StreamConfig mInputConfig;   // input stream configuration
+    webrtc::StreamConfig mOutputConfig;  // output stream configuration
+
+    // Acoustic Echo Canceler
+    int mEchoDelayUs = 0;
+    bool mMobileMode = false;
+
+    // Automatic Gain Control V1
+    int mTargetPeakLevel = 0;
+    int mMaxCompressionGain = 0;
+    bool mEnableLimiter = false;
+
+    // Automatic Gain Control V2
+    int mDigitalGain = 0;
+    AutomaticGainControlV2::LevelEstimator mLevelEstimator =
+            AutomaticGainControlV2::LevelEstimator::RMS;
+    int mSaturationMargin = 2;
+
+    // NoiseSuppression
+    NoiseSuppression::Level mLevel = NoiseSuppression::Level::LOW;
+};
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingSession.h b/media/libeffects/preprocessing/aidl/PreProcessingSession.h
new file mode 100644
index 0000000..877292f
--- /dev/null
+++ b/media/libeffects/preprocessing/aidl/PreProcessingSession.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <algorithm>
+#include <memory>
+#include <unordered_map>
+
+#include <android-base/logging.h>
+#include <android-base/thread_annotations.h>
+
+#include "PreProcessingContext.h"
+#include "PreProcessingTypes.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+/**
+ * @brief Maintain all effect pre-processing sessions.
+ *
+ * Sessions are identified with the session ID, maximum of MAX_BUNDLE_SESSIONS is supported by the
+ * pre-processing implementation.
+ */
+class PreProcessingSession {
+  public:
+    static PreProcessingSession& getPreProcessingSession() {
+        static PreProcessingSession instance;
+        return instance;
+    }
+
+    static bool findPreProcessingTypeInList(
+            std::vector<std::shared_ptr<PreProcessingContext>>& list,
+            const PreProcessingEffectType& type, bool remove = false) {
+        auto itor = std::find_if(list.begin(), list.end(),
+                                 [type](const std::shared_ptr<PreProcessingContext>& bundle) {
+                                     return bundle->getPreProcessingType() == type;
+                                 });
+        if (itor == list.end()) {
+            return false;
+        }
+        if (remove) {
+            (*itor)->deInit();
+            list.erase(itor);
+        }
+        return true;
+    }
+
+    /**
+     * Create a certain type of PreProcessingContext in shared_ptr container, each session must not
+     * have more than one session for each type.
+     */
+    std::shared_ptr<PreProcessingContext> createSession(const PreProcessingEffectType& type,
+                                                        int statusDepth,
+                                                        const Parameter::Common& common) {
+        int sessionId = common.session;
+        LOG(DEBUG) << __func__ << type << " with sessionId " << sessionId;
+        std::lock_guard lg(mMutex);
+        if (mSessionMap.count(sessionId) == 0 && mSessionMap.size() >= MAX_PRE_PROC_SESSIONS) {
+            LOG(ERROR) << __func__ << " exceed max bundle session";
+            return nullptr;
+        }
+
+        if (mSessionMap.count(sessionId)) {
+            if (findPreProcessingTypeInList(mSessionMap[sessionId], type)) {
+                LOG(ERROR) << __func__ << type << " already exist in session " << sessionId;
+                return nullptr;
+            }
+        }
+
+        auto& list = mSessionMap[sessionId];
+        auto context = std::make_shared<PreProcessingContext>(statusDepth, common, type);
+        RETURN_VALUE_IF(!context, nullptr, "failedToCreateContext");
+
+        RetCode ret = context->init(common);
+        if (RetCode::SUCCESS != ret) {
+            LOG(ERROR) << __func__ << " context init ret " << ret;
+            return nullptr;
+        }
+        list.push_back(context);
+        return context;
+    }
+
+    void releaseSession(const PreProcessingEffectType& type, int sessionId) {
+        LOG(DEBUG) << __func__ << type << " sessionId " << sessionId;
+        std::lock_guard lg(mMutex);
+        if (mSessionMap.count(sessionId)) {
+            auto& list = mSessionMap[sessionId];
+            if (!findPreProcessingTypeInList(list, type, true /* remove */)) {
+                LOG(ERROR) << __func__ << " can't find " << type << "in session " << sessionId;
+                return;
+            }
+            if (list.empty()) {
+                mSessionMap.erase(sessionId);
+            }
+        }
+    }
+
+  private:
+    // Lock for mSessionMap access.
+    std::mutex mMutex;
+    // Max session number supported.
+    static constexpr int MAX_PRE_PROC_SESSIONS = 8;
+    std::unordered_map<int /* session ID */, std::vector<std::shared_ptr<PreProcessingContext>>>
+            mSessionMap GUARDED_BY(mMutex);
+};
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingTypes.h b/media/libeffects/preprocessing/aidl/PreProcessingTypes.h
new file mode 100644
index 0000000..4c2b8ba
--- /dev/null
+++ b/media/libeffects/preprocessing/aidl/PreProcessingTypes.h
@@ -0,0 +1,113 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <optional>
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+
+#include <audio_effects/effect_aec.h>
+#include <audio_effects/effect_agc.h>
+#include <audio_effects/effect_agc2.h>
+#include <audio_effects/effect_ns.h>
+#include <system/audio_effects/effect_uuid.h>
+
+#include "effect-impl/EffectTypes.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+// Acoustic Echo Cancellation
+static const std::string kAcousticEchoCancelerEffectName = "Acoustic Echo Canceler";
+static const std::vector<Range::AcousticEchoCancelerRange> kAcousticEchoCancelerRanges = {
+        MAKE_RANGE(AcousticEchoCanceler, AcousticEchoCanceler::echoDelayUs, 0, 500)};
+static const Capability kAcousticEchoCancelerCap = {.range = kAcousticEchoCancelerRanges};
+static const Descriptor kAcousticEchoCancelerDesc = {
+        .common = {.id = {.type = getEffectTypeUuidAcousticEchoCanceler(),
+                          .uuid = getEffectImplUuidAcousticEchoCancelerSw(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::PRE_PROC, .deviceIndication = true},
+                   .name = kAcousticEchoCancelerEffectName,
+                   .implementor = "The Android Open Source Project"},
+        .capability = kAcousticEchoCancelerCap};
+
+// Automatic Gain Control 1
+static const std::string kAutomaticGainControlV1EffectName = "Automatic Gain Control V1";
+static const std::vector<Range::AutomaticGainControlV1Range> kAutomaticGainControlV1Ranges = {
+        MAKE_RANGE(AutomaticGainControlV1, AutomaticGainControlV1::targetPeakLevelDbFs, -3100, 0),
+        MAKE_RANGE(AutomaticGainControlV1, AutomaticGainControlV1::maxCompressionGainDb, 0, 9000)};
+static const Capability kAutomaticGainControlV1Cap = {.range = kAutomaticGainControlV1Ranges};
+static const Descriptor kAutomaticGainControlV1Desc = {
+        .common = {.id = {.type = getEffectTypeUuidAutomaticGainControlV1(),
+                          .uuid = getEffectImplUuidAutomaticGainControlV1Sw(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::PRE_PROC, .deviceIndication = true},
+                   .name = kAutomaticGainControlV1EffectName,
+                   .implementor = "The Android Open Source Project"},
+        .capability = kAutomaticGainControlV1Cap};
+
+// Automatic Gain Control 2
+static const std::string kAutomaticGainControlV2EffectName = "Automatic Gain Control V2";
+const std::vector<Range::AutomaticGainControlV2Range> kAutomaticGainControlV2Ranges = {
+        MAKE_RANGE(AutomaticGainControlV2, AutomaticGainControlV2::fixedDigitalGainMb, 0, 90),
+        // extra_staturation_margin_db is no longer configurable in webrtc
+        MAKE_RANGE(AutomaticGainControlV2, AutomaticGainControlV2::saturationMarginMb, 2, 2),
+        // WebRTC only supports RMS level estimator now
+        MAKE_RANGE(AutomaticGainControlV2, AutomaticGainControlV2::levelEstimator,
+                   AutomaticGainControlV2::LevelEstimator::RMS,
+                   AutomaticGainControlV2::LevelEstimator::RMS)};
+static const Capability kAutomaticGainControlV2Cap = {.range = kAutomaticGainControlV2Ranges};
+static const Descriptor kAutomaticGainControlV2Desc = {
+        .common = {.id = {.type = getEffectTypeUuidAutomaticGainControlV2(),
+                          .uuid = getEffectImplUuidAutomaticGainControlV2Sw(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::PRE_PROC, .deviceIndication = true},
+                   .name = kAutomaticGainControlV2EffectName,
+                   .implementor = "The Android Open Source Project"},
+        .capability = kAutomaticGainControlV2Cap};
+
+// Noise suppression
+static const std::string kNoiseSuppressionEffectName = "Noise Suppression";
+static const Descriptor kNoiseSuppressionDesc = {
+        .common = {.id = {.type = getEffectTypeUuidNoiseSuppression(),
+                          .uuid = getEffectImplUuidNoiseSuppressionSw(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::PRE_PROC, .deviceIndication = true},
+                   .name = kNoiseSuppressionEffectName,
+                   .implementor = "The Android Open Source Project"}};
+
+enum class PreProcessingEffectType {
+    ACOUSTIC_ECHO_CANCELLATION,
+    AUTOMATIC_GAIN_CONTROL_V1,
+    AUTOMATIC_GAIN_CONTROL_V2,
+    NOISE_SUPPRESSION,
+};
+
+inline std::ostream& operator<<(std::ostream& out, const PreProcessingEffectType& type) {
+    switch (type) {
+        case PreProcessingEffectType::ACOUSTIC_ECHO_CANCELLATION:
+            return out << kAcousticEchoCancelerEffectName;
+        case PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V1:
+            return out << kAutomaticGainControlV1EffectName;
+        case PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V2:
+            return out << kAutomaticGainControlV2EffectName;
+        case PreProcessingEffectType::NOISE_SUPPRESSION:
+            return out << kNoiseSuppressionEffectName;
+    }
+    return out << "EnumPreProcessingEffectTypeError";
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/visualizer/aidl/Visualizer.cpp b/media/libeffects/visualizer/aidl/Visualizer.cpp
index 6e7833c..53bfb41 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.cpp
+++ b/media/libeffects/visualizer/aidl/Visualizer.cpp
@@ -17,18 +17,21 @@
 #define LOG_TAG "AHAL_VisualizerLibEffects"
 
 #include <android-base/logging.h>
+#include <system/audio_effects/effect_uuid.h>
+
 #include "Visualizer.h"
 
 using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::getEffectImplUuidVisualizer;
+using aidl::android::hardware::audio::effect::getEffectTypeUuidVisualizer;
 using aidl::android::hardware::audio::effect::IEffect;
-using aidl::android::hardware::audio::effect::VisualizerImpl;
-using aidl::android::hardware::audio::effect::kVisualizerImplUUID;
 using aidl::android::hardware::audio::effect::State;
+using aidl::android::hardware::audio::effect::VisualizerImpl;
 using aidl::android::media::audio::common::AudioUuid;
 
 extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
                                            std::shared_ptr<IEffect>* instanceSpp) {
-    if (!in_impl_uuid || *in_impl_uuid != kVisualizerImplUUID) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidVisualizer()) {
         LOG(ERROR) << __func__ << "uuid not supported";
         return EX_ILLEGAL_ARGUMENT;
     }
@@ -43,7 +46,7 @@
 }
 
 extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
-    if (!in_impl_uuid || *in_impl_uuid != kVisualizerImplUUID) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidVisualizer()) {
         LOG(ERROR) << __func__ << "uuid not supported";
         return EX_ILLEGAL_ARGUMENT;
     }
@@ -65,8 +68,8 @@
 const Capability VisualizerImpl::kCapability = {
         .range = Range::make<Range::visualizer>(VisualizerImpl::kRanges)};
 const Descriptor VisualizerImpl::kDescriptor = {
-        .common = {.id = {.type = kVisualizerTypeUUID,
-                          .uuid = kVisualizerImplUUID,
+        .common = {.id = {.type = getEffectTypeUuidVisualizer(),
+                          .uuid = getEffectImplUuidVisualizer(),
                           .proxy = std::nullopt},
                    .flags = {.type = Flags::Type::INSERT,
                              .insert = Flags::Insert::LAST,
diff --git a/media/libeffects/visualizer/aidl/Visualizer.h b/media/libeffects/visualizer/aidl/Visualizer.h
index f6e1d6d..ec725db 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.h
+++ b/media/libeffects/visualizer/aidl/Visualizer.h
@@ -19,7 +19,6 @@
 #include <aidl/android/hardware/audio/effect/BnEffect.h>
 
 #include "effect-impl/EffectImpl.h"
-#include "effect-impl/EffectUUID.h"
 
 #include "VisualizerContext.h"
 
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.cpp b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
index 4405407..5d0d08d 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.cpp
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
@@ -17,18 +17,19 @@
 #include "VisualizerContext.h"
 
 #include <algorithm>
+#include <math.h>
+#include <time.h>
+
 #include <android/binder_status.h>
 #include <audio_utils/primitives.h>
-#include <math.h>
 #include <system/audio.h>
-#include <time.h>
 #include <Utils.h>
 
 #ifndef BUILD_FLOAT
         #error AIDL Visualizer only support float 32bits, make sure add cflags -DBUILD_FLOAT,
 #endif
 
-using android::hardware::audio::common::getChannelCount;
+using aidl::android::hardware::audio::common::getChannelCount;
 
 namespace aidl::android::hardware::audio::effect {
 
@@ -191,9 +192,15 @@
 std::vector<uint8_t> VisualizerContext::capture() {
     std::vector<uint8_t> result;
     std::lock_guard lg(mMutex);
-    RETURN_VALUE_IF(mState != State::ACTIVE, result, "illegalState");
-    const uint32_t deltaMs = getDeltaTimeMsFromUpdatedTime_l();
+    // cts android.media.audio.cts.VisualizerTest expecting silence data when effect not running
+    // RETURN_VALUE_IF(mState != State::ACTIVE, result, "illegalState");
+    if (mState != State::ACTIVE) {
+        result.resize(mCaptureSamples);
+        memset(result.data(), 0x80, mCaptureSamples);
+        return result;
+    }
 
+    const uint32_t deltaMs = getDeltaTimeMsFromUpdatedTime_l();
     // if audio framework has stopped playing audio although the effect is still active we must
     // clear the capture buffer to return silence
     if ((mLastCaptureIdx == mCaptureIdx) && (mBufferUpdateTime.tv_sec != 0) &&
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.h b/media/libeffects/visualizer/aidl/VisualizerContext.h
index 3cb711e..958035f 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.h
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.h
@@ -83,7 +83,7 @@
     uint32_t mLastCaptureIdx GUARDED_BY(mMutex) = 0;
     Visualizer::ScalingMode mScalingMode GUARDED_BY(mMutex) = Visualizer::ScalingMode::NORMALIZED;
     struct timespec mBufferUpdateTime GUARDED_BY(mMutex);
-    // capture buf with 8 bits PCM
+    // capture buf with 8 bits mono PCM samples
     std::array<uint8_t, kMaxCaptureBufSize> mCaptureBuf GUARDED_BY(mMutex);
     uint32_t mDownstreamLatency GUARDED_BY(mMutex) = 0;
     uint32_t mCaptureSamples GUARDED_BY(mMutex) = kMaxCaptureBufSize;
diff --git a/media/libheadtracking/Android.bp b/media/libheadtracking/Android.bp
index f64aedf..9955862 100644
--- a/media/libheadtracking/Android.bp
+++ b/media/libheadtracking/Android.bp
@@ -16,11 +16,13 @@
       "Pose.cpp",
       "PoseBias.cpp",
       "PoseDriftCompensator.cpp",
+      "PosePredictor.cpp",
       "PoseRateLimiter.cpp",
       "QuaternionUtil.cpp",
       "ScreenHeadFusion.cpp",
       "StillnessDetector.cpp",
       "Twist.cpp",
+      "VectorRecorder.cpp",
     ],
     shared_libs: [
         "libaudioutils",
@@ -35,6 +37,15 @@
     export_header_lib_headers: [
         "libeigen",
     ],
+    cflags: [
+        "-Wthread-safety",
+    ],
+    product_variables: {
+        debuggable: {
+            // enable experiments only in userdebug and eng builds
+            cflags: ["-DENABLE_VERIFICATION"],
+        },
+    },
 }
 
 cc_library {
@@ -76,6 +87,7 @@
         "Pose-test.cpp",
         "PoseBias-test.cpp",
         "PoseDriftCompensator-test.cpp",
+        "PosePredictor.cpp",
         "PoseRateLimiter-test.cpp",
         "QuaternionUtil-test.cpp",
         "ScreenHeadFusion-test.cpp",
@@ -84,6 +96,7 @@
     ],
     shared_libs: [
         "libaudioutils",
+        "libbase", // StringAppendF
         "libheadtracking",
     ],
 }
diff --git a/media/libheadtracking/HeadTrackingProcessor-test.cpp b/media/libheadtracking/HeadTrackingProcessor-test.cpp
index 299192f..5190f52 100644
--- a/media/libheadtracking/HeadTrackingProcessor-test.cpp
+++ b/media/libheadtracking/HeadTrackingProcessor-test.cpp
@@ -15,10 +15,10 @@
  */
 
 #include "media/HeadTrackingProcessor.h"
+#include "media/QuaternionUtil.h"
 
 #include <gtest/gtest.h>
 
-#include "QuaternionUtil.h"
 #include "TestUtil.h"
 
 namespace android {
@@ -82,6 +82,8 @@
     std::unique_ptr<HeadTrackingProcessor> processor = createHeadTrackingProcessor(
             Options{.predictionDuration = 2.f}, HeadTrackingMode::WORLD_RELATIVE);
 
+    processor->setPosePredictorType(PosePredictorType::TWIST);
+
     // Establish a baseline for the drift compensators.
     processor->setWorldToHeadPose(0, Pose3f(), Twist3f());
     processor->setWorldToScreenPose(0, Pose3f());
diff --git a/media/libheadtracking/HeadTrackingProcessor.cpp b/media/libheadtracking/HeadTrackingProcessor.cpp
index 101b825..8502af0 100644
--- a/media/libheadtracking/HeadTrackingProcessor.cpp
+++ b/media/libheadtracking/HeadTrackingProcessor.cpp
@@ -18,10 +18,11 @@
 #include <android-base/stringprintf.h>
 #include <audio_utils/SimpleLog.h>
 #include "media/HeadTrackingProcessor.h"
+#include "media/QuaternionUtil.h"
 
 #include "ModeSelector.h"
 #include "PoseBias.h"
-#include "QuaternionUtil.h"
+#include "PosePredictor.h"
 #include "ScreenHeadFusion.h"
 #include "StillnessDetector.h"
 
@@ -59,8 +60,8 @@
 
     void setWorldToHeadPose(int64_t timestamp, const Pose3f& worldToHead,
                             const Twist3f& headTwist) override {
-        Pose3f predictedWorldToHead =
-                worldToHead * integrate(headTwist, mOptions.predictionDuration);
+        const Pose3f predictedWorldToHead = mPosePredictor.predict(
+                timestamp, worldToHead, headTwist, mOptions.predictionDuration);
         mHeadPoseBias.setInput(predictedWorldToHead);
         mHeadStillnessDetector.setInput(timestamp, predictedWorldToHead);
         mWorldToHeadTimestamp = timestamp;
@@ -161,6 +162,10 @@
         }
     }
 
+    void setPosePredictorType(PosePredictorType type) override {
+        mPosePredictor.setPosePredictorType(type);
+    }
+
     std::string toString_l(unsigned level) const override {
         std::string prefixSpace(level, ' ');
         std::string ss = prefixSpace + "HeadTrackingProcessor:\n";
@@ -186,6 +191,7 @@
                       prefixSpace.c_str(), mOptions.screenStillnessRotationalThreshold);
         ss += mModeSelector.toString(level + 1);
         ss += mRateLimiter.toString(level + 1);
+        ss += mPosePredictor.toString(level + 1);
         ss.append(prefixSpace + "ReCenterHistory:\n");
         ss += mLocalLog.dumpToString((prefixSpace + " ").c_str(), mMaxLocalLogLine);
         return ss;
@@ -207,6 +213,7 @@
     ScreenHeadFusion mScreenHeadFusion;
     ModeSelector mModeSelector;
     PoseRateLimiter mRateLimiter;
+    PosePredictor mPosePredictor;
     static constexpr std::size_t mMaxLocalLogLine = 10;
     SimpleLog mLocalLog{mMaxLocalLogLine};
 };
@@ -230,5 +237,26 @@
     return "EnumNotImplemented";
 };
 
+std::string toString(PosePredictorType posePredictorType) {
+    switch (posePredictorType) {
+        case PosePredictorType::AUTO: return "AUTO";
+        case PosePredictorType::LAST: return "LAST";
+        case PosePredictorType::TWIST: return "TWIST";
+        case PosePredictorType::LEAST_SQUARES: return "LEAST_SQUARES";
+    }
+    return "UNKNOWN" + std::to_string((int)posePredictorType);
+}
+
+bool isValidPosePredictorType(PosePredictorType posePredictorType) {
+    switch (posePredictorType) {
+        case PosePredictorType::AUTO:
+        case PosePredictorType::LAST:
+        case PosePredictorType::TWIST:
+        case PosePredictorType::LEAST_SQUARES:
+            return true;
+    }
+    return false;
+}
+
 }  // namespace media
 }  // namespace android
diff --git a/media/libheadtracking/ModeSelector-test.cpp b/media/libheadtracking/ModeSelector-test.cpp
index a136e6b..6925908 100644
--- a/media/libheadtracking/ModeSelector-test.cpp
+++ b/media/libheadtracking/ModeSelector-test.cpp
@@ -18,7 +18,7 @@
 
 #include <gtest/gtest.h>
 
-#include "QuaternionUtil.h"
+#include "media/QuaternionUtil.h"
 #include "TestUtil.h"
 
 namespace android {
diff --git a/media/libheadtracking/ModeSelector.cpp b/media/libheadtracking/ModeSelector.cpp
index 6277090..7ee21b3 100644
--- a/media/libheadtracking/ModeSelector.cpp
+++ b/media/libheadtracking/ModeSelector.cpp
@@ -117,10 +117,12 @@
 std::string ModeSelector::toString(unsigned level) const {
     std::string prefixSpace(level, ' ');
     std::string ss(prefixSpace);
-    StringAppendF(&ss, "ModeSelector: ScreenToStage %s\n",
-                    mScreenToStage.toString().c_str());
-    ss.append(prefixSpace + "Mode downgrade history:\n");
-    ss += mLocalLog.dumpToString((prefixSpace + " ").c_str(), sMaxLocalLogLine);
+    ss.append("ModeSelector: ScreenToStage ")
+        .append(mScreenToStage.toString())
+        .append("\n")
+        .append(prefixSpace)
+        .append("Mode change history:\n")
+        .append(mLocalLog.dumpToString((prefixSpace + " ").c_str(), sMaxLocalLogLine));
     return ss;
 }
 
diff --git a/media/libheadtracking/Pose-test.cpp b/media/libheadtracking/Pose-test.cpp
index a9e18ce..29dba29 100644
--- a/media/libheadtracking/Pose-test.cpp
+++ b/media/libheadtracking/Pose-test.cpp
@@ -18,7 +18,7 @@
 
 #include <gtest/gtest.h>
 
-#include "QuaternionUtil.h"
+#include "media/QuaternionUtil.h"
 #include "TestUtil.h"
 
 using android::media::Pose3f;
diff --git a/media/libheadtracking/Pose.cpp b/media/libheadtracking/Pose.cpp
index 4a4b56a..e03725b 100644
--- a/media/libheadtracking/Pose.cpp
+++ b/media/libheadtracking/Pose.cpp
@@ -16,8 +16,8 @@
 #include <android-base/stringprintf.h>
 
 #include "media/Pose.h"
+#include "media/QuaternionUtil.h"
 #include "media/Twist.h"
-#include "QuaternionUtil.h"
 
 namespace android {
 namespace media {
diff --git a/media/libheadtracking/PoseBias-test.cpp b/media/libheadtracking/PoseBias-test.cpp
index 9f42a2c..659dda0 100644
--- a/media/libheadtracking/PoseBias-test.cpp
+++ b/media/libheadtracking/PoseBias-test.cpp
@@ -17,7 +17,8 @@
 #include <gtest/gtest.h>
 
 #include "PoseBias.h"
-#include "QuaternionUtil.h"
+
+#include "media/QuaternionUtil.h"
 #include "TestUtil.h"
 
 namespace android {
diff --git a/media/libheadtracking/PoseDriftCompensator-test.cpp b/media/libheadtracking/PoseDriftCompensator-test.cpp
index df0a05f..521e3eb 100644
--- a/media/libheadtracking/PoseDriftCompensator-test.cpp
+++ b/media/libheadtracking/PoseDriftCompensator-test.cpp
@@ -18,7 +18,8 @@
 #include <cmath>
 
 #include "PoseDriftCompensator.h"
-#include "QuaternionUtil.h"
+
+#include "media/QuaternionUtil.h"
 #include "TestUtil.h"
 
 namespace android {
diff --git a/media/libheadtracking/PoseDriftCompensator.cpp b/media/libheadtracking/PoseDriftCompensator.cpp
index 0e90cad..2775790 100644
--- a/media/libheadtracking/PoseDriftCompensator.cpp
+++ b/media/libheadtracking/PoseDriftCompensator.cpp
@@ -18,7 +18,7 @@
 
 #include <cmath>
 
-#include "QuaternionUtil.h"
+#include "media/QuaternionUtil.h"
 
 namespace android {
 namespace media {
diff --git a/media/libheadtracking/PosePredictor.cpp b/media/libheadtracking/PosePredictor.cpp
new file mode 100644
index 0000000..f67a966
--- /dev/null
+++ b/media/libheadtracking/PosePredictor.cpp
@@ -0,0 +1,238 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "PosePredictor.h"
+
+namespace android::media {
+
+namespace {
+#ifdef ENABLE_VERIFICATION
+constexpr bool kEnableVerification = true;
+constexpr std::array<int, 3> kLookAheadMs{ 50, 100, 200 };
+#else
+constexpr bool kEnableVerification = false;
+constexpr std::array<int, 0> kLookAheadMs{};
+#endif
+
+} // namespace
+
+void LeastSquaresPredictor::add(int64_t atNs, const Pose3f& pose, const Twist3f& twist)
+{
+    (void)twist;
+    mLastAtNs = atNs;
+    mLastPose = pose;
+    const auto q = pose.rotation();
+    const double datNs = static_cast<double>(atNs);
+    mRw.add({datNs, q.w()});
+    mRx.add({datNs, q.x()});
+    mRy.add({datNs, q.y()});
+    mRz.add({datNs, q.z()});
+}
+
+Pose3f LeastSquaresPredictor::predict(int64_t atNs) const
+{
+    if (mRw.getN() < kMinimumSamplesForPrediction) return mLastPose;
+
+    /*
+     * Using parametric form, we have q(t) = { w(t), x(t), y(t), z(t) }.
+     * We compute the least squares prediction of w, x, y, z.
+     */
+    const double dLookahead = static_cast<double>(atNs);
+    Eigen::Quaternionf lsq(
+        mRw.getYFromX(dLookahead),
+        mRx.getYFromX(dLookahead),
+        mRy.getYFromX(dLookahead),
+        mRz.getYFromX(dLookahead));
+
+     /*
+      * We cheat here, since the result lsq is the least squares prediction
+      * in H (arbitrary quaternion), not the least squares prediction in
+      * SO(3) (unit quaternion).
+      *
+      * In other words, the result for lsq is most likely not a unit quaternion.
+      * To solve this, we normalize, thereby selecting the closest unit quaternion
+      * in SO(3) to the prediction in H.
+      */
+    lsq.normalize();
+    return Pose3f(lsq);
+}
+
+void LeastSquaresPredictor::reset() {
+    mLastAtNs = {};
+    mLastPose = {};
+    mRw.reset();
+    mRx.reset();
+    mRy.reset();
+    mRz.reset();
+}
+
+std::string LeastSquaresPredictor::toString(size_t index) const {
+    std::string s(index, ' ');
+    s.append("LeastSquaresPredictor using alpha: ")
+        .append(std::to_string(mAlpha))
+        .append(" last pose: ")
+        .append(mLastPose.toString())
+        .append("\n");
+    return s;
+}
+
+// Formatting
+static inline std::vector<size_t> createDelimiterIdx(size_t predictors, size_t lookaheads) {
+    if (predictors == 0) return {};
+    --predictors;
+    std::vector<size_t> delimiterIdx(predictors);
+    for (size_t i = 0; i < predictors; ++i) {
+        delimiterIdx[i] = (i + 1) * lookaheads;
+    }
+    return delimiterIdx;
+}
+
+PosePredictor::PosePredictor()
+    : mPredictors{  // must match switch in getCurrentPredictor()
+            std::make_shared<LastPredictor>(),
+            std::make_shared<TwistPredictor>(),
+            std::make_shared<LeastSquaresPredictor>(),
+        }
+    , mLookaheadMs(kLookAheadMs.begin(), kLookAheadMs.end())
+    , mVerifiers(std::size(mLookaheadMs) * std::size(mPredictors))
+    , mDelimiterIdx(createDelimiterIdx(std::size(mPredictors), std::size(mLookaheadMs)))
+    , mPredictionRecorder(
+        std::size(mVerifiers) /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */,
+        mDelimiterIdx)
+    , mPredictionDurableRecorder(
+        std::size(mVerifiers) /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
+        mDelimiterIdx)
+    {
+}
+
+Pose3f PosePredictor::predict(
+        int64_t timestampNs, const Pose3f& pose, const Twist3f& twist, float predictionDurationNs)
+{
+    if (timestampNs - mLastTimestampNs > kMaximumSampleIntervalBeforeResetNs) {
+        for (const auto& predictor : mPredictors) {
+            predictor->reset();
+        }
+        ++mResets;
+    }
+    mLastTimestampNs = timestampNs;
+
+    auto selectedPredictor = getCurrentPredictor();
+    if constexpr (kEnableVerification) {
+        // Update all Predictors
+        for (const auto& predictor : mPredictors) {
+            predictor->add(timestampNs, pose, twist);
+        }
+
+        // Update Verifiers and calculate errors
+        std::vector<float> error(std::size(mVerifiers));
+        for (size_t i = 0; i < mLookaheadMs.size(); ++i) {
+            constexpr float RADIAN_TO_DEGREES = 180 / M_PI;
+            const int64_t atNs =
+                    timestampNs + mLookaheadMs[i] * PosePredictorVerifier::kMillisToNanos;
+
+            for (size_t j = 0; j < mPredictors.size(); ++j) {
+                const size_t idx = i * std::size(mPredictors) + j;
+                mVerifiers[idx].verifyActualPose(timestampNs, pose);
+                mVerifiers[idx].addPredictedPose(atNs, mPredictors[j]->predict(atNs));
+                error[idx] =  RADIAN_TO_DEGREES * mVerifiers[idx].lastError();
+            }
+        }
+        // Record errors
+        mPredictionRecorder.record(error);
+        mPredictionDurableRecorder.record(error);
+    } else /* constexpr */ {
+        selectedPredictor->add(timestampNs, pose, twist);
+    }
+
+    // Deliver prediction
+    const int64_t predictionTimeNs = timestampNs + (int64_t)predictionDurationNs;
+    return selectedPredictor->predict(predictionTimeNs);
+}
+
+void PosePredictor::setPosePredictorType(PosePredictorType type) {
+    if (!isValidPosePredictorType(type)) return;
+    if (type == mSetType) return;
+    mSetType = type;
+    if (type == android::media::PosePredictorType::AUTO) {
+        type = android::media::PosePredictorType::LEAST_SQUARES;
+    }
+    if (type != mCurrentType) {
+        mCurrentType = type;
+        if constexpr (!kEnableVerification) {
+            // Verification keeps all predictors up-to-date.
+            // If we don't enable verification, we must reset the current predictor.
+            getCurrentPredictor()->reset();
+        }
+    }
+}
+
+std::string PosePredictor::toString(size_t index) const {
+    std::string prefixSpace(index, ' ');
+    std::string ss(prefixSpace);
+    ss.append("PosePredictor:\n")
+        .append(prefixSpace)
+        .append(" Current Prediction Type: ")
+        .append(android::media::toString(mCurrentType))
+        .append("\n")
+        .append(prefixSpace)
+        .append(" Resets: ")
+        .append(std::to_string(mResets))
+        .append("\n")
+        .append(getCurrentPredictor()->toString(index + 1));
+    if constexpr (kEnableVerification) {
+        // dump verification
+        ss.append(prefixSpace)
+            .append(" Prediction abs error (L1) degrees [ type (last twist least-squares) x ( ");
+        for (size_t i = 0; i < mLookaheadMs.size(); ++i) {
+            if (i > 0) ss.append(" : ");
+            ss.append(std::to_string(mLookaheadMs[i]));
+        }
+        std::vector<float> cumulativeAverageErrors(std::size(mVerifiers));
+        for (size_t i = 0; i < cumulativeAverageErrors.size(); ++i) {
+            cumulativeAverageErrors[i] = mVerifiers[i].cumulativeAverageError();
+        }
+        ss.append(" ) ms ]\n")
+            .append(prefixSpace)
+            .append("  Cumulative Average Error:\n")
+            .append(prefixSpace)
+            .append("   ")
+            .append(VectorRecorder::toString(cumulativeAverageErrors, mDelimiterIdx, "%.3g"))
+            .append("\n")
+            .append(prefixSpace)
+            .append("  PerMinuteHistory:\n")
+            .append(mPredictionDurableRecorder.toString(index + 3))
+            .append(prefixSpace)
+            .append("  PerSecondHistory:\n")
+            .append(mPredictionRecorder.toString(index + 3));
+    }
+    return ss;
+}
+
+std::shared_ptr<PredictorBase> PosePredictor::getCurrentPredictor() const {
+    // we don't use a map here, we look up directly
+    switch (mCurrentType) {
+    default:
+    case android::media::PosePredictorType::LAST:
+        return mPredictors[0];
+    case android::media::PosePredictorType::TWIST:
+        return mPredictors[1];
+    case android::media::PosePredictorType::AUTO: // shouldn't occur here.
+    case android::media::PosePredictorType::LEAST_SQUARES:
+        return mPredictors[2];
+    }
+}
+
+} // namespace android::media
diff --git a/media/libheadtracking/PosePredictor.h b/media/libheadtracking/PosePredictor.h
new file mode 100644
index 0000000..06983cc
--- /dev/null
+++ b/media/libheadtracking/PosePredictor.h
@@ -0,0 +1,203 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "PosePredictorVerifier.h"
+#include <memory>
+#include <audio_utils/Statistics.h>
+#include <media/PosePredictorType.h>
+#include <media/Twist.h>
+#include <media/VectorRecorder.h>
+
+namespace android::media {
+
+// Interface for generic pose predictors
+class PredictorBase {
+public:
+    virtual ~PredictorBase() = default;
+    virtual void add(int64_t atNs, const Pose3f& pose, const Twist3f& twist) = 0;
+    virtual Pose3f predict(int64_t atNs) const = 0;
+    virtual void reset() = 0;
+    virtual std::string toString(size_t index) const = 0;
+};
+
+/**
+ * LastPredictor uses the last sample Pose for prediction
+ *
+ * This class is not thread-safe.
+ */
+class LastPredictor : public PredictorBase {
+public:
+    void add(int64_t atNs, const Pose3f& pose, const Twist3f& twist) override {
+        (void)atNs;
+        (void)twist;
+        mLastPose = pose;
+    }
+
+    Pose3f predict(int64_t atNs) const override {
+        (void)atNs;
+        return mLastPose;
+    }
+
+    void reset() override {
+        mLastPose = {};
+    }
+
+    std::string toString(size_t index) const override {
+        std::string s(index, ' ');
+        s.append("LastPredictor using last pose: ")
+            .append(mLastPose.toString())
+            .append("\n");
+        return s;
+    }
+
+private:
+    Pose3f mLastPose;
+};
+
+/**
+ * TwistPredictor uses the last sample Twist and Pose for prediction
+ *
+ * This class is not thread-safe.
+ */
+class TwistPredictor : public PredictorBase {
+public:
+    void add(int64_t atNs, const Pose3f& pose, const Twist3f& twist) override {
+        mLastAtNs = atNs;
+        mLastPose = pose;
+        mLastTwist = twist;
+    }
+
+    Pose3f predict(int64_t atNs) const override {
+        return mLastPose * integrate(mLastTwist, atNs - mLastAtNs);
+    }
+
+    void reset() override {
+        mLastAtNs = {};
+        mLastPose = {};
+        mLastTwist = {};
+    }
+
+    std::string toString(size_t index) const override {
+        std::string s(index, ' ');
+        s.append("TwistPredictor using last pose: ")
+            .append(mLastPose.toString())
+            .append(" last twist: ")
+            .append(mLastTwist.toString())
+            .append("\n");
+        return s;
+    }
+
+private:
+    int64_t mLastAtNs{};
+    Pose3f mLastPose;
+    Twist3f mLastTwist;
+};
+
+
+/**
+ * LeastSquaresPredictor uses the Pose history for prediction.
+ *
+ * A exponential weighted least squares is used.
+ *
+ * This class is not thread-safe.
+ */
+class LeastSquaresPredictor : public PredictorBase {
+public:
+    // alpha is the exponential decay.
+    LeastSquaresPredictor(double alpha = kDefaultAlphaEstimator)
+        : mAlpha(alpha)
+        , mRw(alpha)
+        , mRx(alpha)
+        , mRy(alpha)
+        , mRz(alpha)
+        {}
+
+    void add(int64_t atNs, const Pose3f& pose, const Twist3f& twist) override;
+    Pose3f predict(int64_t atNs) const override;
+    void reset() override;
+    std::string toString(size_t index) const override;
+
+private:
+    const double mAlpha;
+    int64_t mLastAtNs{};
+    Pose3f mLastPose;
+    static constexpr double kDefaultAlphaEstimator = 0.5;
+    static constexpr size_t kMinimumSamplesForPrediction = 4;
+    audio_utils::LinearLeastSquaresFit<double> mRw;
+    audio_utils::LinearLeastSquaresFit<double> mRx;
+    audio_utils::LinearLeastSquaresFit<double> mRy;
+    audio_utils::LinearLeastSquaresFit<double> mRz;
+};
+
+/*
+ * PosePredictor predicts the pose given sensor input at a time in the future.
+ *
+ * This class is not thread safe.
+ */
+class PosePredictor {
+public:
+    PosePredictor();
+
+    Pose3f predict(int64_t timestampNs, const Pose3f& pose, const Twist3f& twist,
+            float predictionDurationNs);
+
+    void setPosePredictorType(PosePredictorType type);
+
+    // convert predictions to a printable string
+    std::string toString(size_t index) const;
+
+private:
+    static constexpr int64_t kMaximumSampleIntervalBeforeResetNs =
+            300'000'000;
+
+    // Predictors
+    const std::vector<std::shared_ptr<PredictorBase>> mPredictors;
+
+    // Verifiers, create one for an array of future lookaheads for comparison.
+    const std::vector<int> mLookaheadMs;
+
+    std::vector<PosePredictorVerifier> mVerifiers;
+
+    const std::vector<size_t> mDelimiterIdx;
+
+    // Recorders
+    media::VectorRecorder mPredictionRecorder{
+        std::size(mVerifiers) /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */,
+        mDelimiterIdx};
+    media::VectorRecorder mPredictionDurableRecorder{
+        std::size(mVerifiers) /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
+        mDelimiterIdx};
+
+    // Status
+
+    // SetType is the externally set predictor type.  It may include AUTO.
+    PosePredictorType mSetType = PosePredictorType::LEAST_SQUARES;
+
+    // CurrentType is the actual predictor type used by this class.
+    // It does not include AUTO because that metatype means the class
+    // chooses the best predictor type based on sensor statistics.
+    PosePredictorType mCurrentType = PosePredictorType::LEAST_SQUARES;
+
+    int64_t mResets{};
+    int64_t mLastTimestampNs{};
+
+    // Returns current predictor
+    std::shared_ptr<PredictorBase> getCurrentPredictor() const;
+};
+
+}  // namespace android::media
diff --git a/media/libheadtracking/PosePredictorVerifier.h b/media/libheadtracking/PosePredictorVerifier.h
new file mode 100644
index 0000000..6b4a357
--- /dev/null
+++ b/media/libheadtracking/PosePredictorVerifier.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+
+#include <audio_utils/Statistics.h>
+#include <media/Pose.h>
+
+namespace android::media {
+
+/**
+ * PosePredictorVerifier is used to validate predictions
+ *
+ * This class is not thread-safe
+ */
+class PosePredictorVerifier {
+public:
+    std::string toString() const {
+         return mErrorStats.toString();
+    }
+
+    static constexpr int64_t kMillisToNanos = 1000000;
+
+    void verifyActualPose(int64_t timestampNs, const Pose3f& pose) {
+        for (auto it = mPredictions.begin(); it != mPredictions.end();) {
+            if (it->first < timestampNs) {
+                it = mPredictions.erase(it);
+            } else {
+                int64_t dt = it->first - timestampNs;
+                if (std::abs(dt) < 10 * kMillisToNanos) {
+                    const float angle = pose.rotation().angularDistance(it->second.rotation());
+                    const float error = std::abs(angle); // L1 (absolute difference) here.
+                    mLastError = error;
+                    mErrorStats.add(error);
+                }
+                break;
+            }
+        }
+    }
+
+    void addPredictedPose(int64_t atNs, const Pose3f& pose) {
+        mPredictions.emplace_back(atNs, pose);
+    }
+
+    float lastError() const {
+        return mLastError;
+    }
+
+    float cumulativeAverageError() const {
+        return mErrorStats.getMean();
+    }
+
+private:
+    static constexpr double kCumulativeErrorAlpha = 0.999;
+    std::deque<std::pair<int64_t, Pose3f>> mPredictions;
+    float mLastError{};
+    android::audio_utils::Statistics<double> mErrorStats{kCumulativeErrorAlpha};
+};
+
+}  // namespace androd::media
diff --git a/media/libheadtracking/PoseRateLimiter-test.cpp b/media/libheadtracking/PoseRateLimiter-test.cpp
index f306183..ded874a 100644
--- a/media/libheadtracking/PoseRateLimiter-test.cpp
+++ b/media/libheadtracking/PoseRateLimiter-test.cpp
@@ -17,7 +17,8 @@
 #include <gtest/gtest.h>
 
 #include "PoseRateLimiter.h"
-#include "QuaternionUtil.h"
+
+#include "media/QuaternionUtil.h"
 #include "TestUtil.h"
 
 namespace android {
diff --git a/media/libheadtracking/QuaternionUtil-test.cpp b/media/libheadtracking/QuaternionUtil-test.cpp
index e79e54a..cfeca00 100644
--- a/media/libheadtracking/QuaternionUtil-test.cpp
+++ b/media/libheadtracking/QuaternionUtil-test.cpp
@@ -16,7 +16,7 @@
 
 #include <gtest/gtest.h>
 
-#include "QuaternionUtil.h"
+#include "media/QuaternionUtil.h"
 #include "TestUtil.h"
 
 using Eigen::Quaternionf;
@@ -51,6 +51,92 @@
     EXPECT_EQ(vec, quaternionToRotationVector(rotationVectorToQuaternion(vec)));
 }
 
+// Float precision necessitates this precision (1e-4f fails)
+constexpr float NEAR = 1e-3f;
+
+TEST(QuaternionUtil, quaternionToAngles_basic) {
+    float pitch, roll, yaw;
+
+   // angles as reported.
+   // choose 11 angles between -M_PI / 2 to M_PI / 2
+    for (int step = -5; step <= 5; ++step) {
+        const float angle = M_PI * step * 0.1f;
+
+        quaternionToAngles(rotationVectorToQuaternion({angle, 0.f, 0.f}), &pitch, &roll, &yaw);
+        EXPECT_NEAR(angle, pitch, NEAR);
+        EXPECT_NEAR(0.f, roll, NEAR);
+        EXPECT_NEAR(0.f, yaw, NEAR);
+
+        quaternionToAngles(rotationVectorToQuaternion({0.f, angle, 0.f}), &pitch, &roll, &yaw);
+        EXPECT_NEAR(0.f, pitch, NEAR);
+        EXPECT_NEAR(angle, roll, NEAR);
+        EXPECT_NEAR(0.f, yaw, NEAR);
+
+        quaternionToAngles(rotationVectorToQuaternion({0.f, 0.f, angle}), &pitch, &roll, &yaw);
+        EXPECT_NEAR(0.f, pitch, NEAR);
+        EXPECT_NEAR(0.f, roll, NEAR);
+        EXPECT_NEAR(angle, yaw, NEAR);
+    }
+
+    // Generates a debug string
+    const std::string s = quaternionToAngles<true /* DEBUG */>(
+            rotationVectorToQuaternion({M_PI, 0.f, 0.f}), &pitch, &roll, &yaw);
+    ASSERT_FALSE(s.empty());
+}
+
+TEST(QuaternionUtil, quaternionToAngles_zaxis) {
+    float pitch, roll, yaw;
+
+    for (int rot_step = -10; rot_step <= 10; ++rot_step) {
+        const float rot_angle = M_PI * rot_step * 0.1f;
+        // pitch independent of world Z rotation
+
+        // We don't test the boundaries of pitch +-M_PI/2 as roll can become
+        // degenerate and atan(0, 0) may report 0, PI, or -PI.
+        for (int step = -4; step <= 4; ++step) {
+            const float angle = M_PI * step * 0.1f;
+            auto q = rotationVectorToQuaternion({angle, 0.f, 0.f});
+            auto world_z = rotationVectorToQuaternion({0.f, 0.f, rot_angle});
+
+            // Sequential active rotations (on world frame) compose as R_2 * R_1.
+            quaternionToAngles(world_z * q, &pitch, &roll, &yaw);
+
+            EXPECT_NEAR(angle, pitch, NEAR);
+            EXPECT_NEAR(0.f, roll, NEAR);
+       }
+
+        // roll independent of world Z rotation
+        for (int step = -5; step <= 5; ++step) {
+            const float angle = M_PI * step * 0.1f;
+            auto q = rotationVectorToQuaternion({0.f, angle, 0.f});
+            auto world_z = rotationVectorToQuaternion({0.f, 0.f, rot_angle});
+
+            // Sequential active rotations (on world frame) compose as R_2 * R_1.
+            quaternionToAngles(world_z * q, &pitch, &roll, &yaw);
+
+            EXPECT_NEAR(0.f, pitch, NEAR);
+            EXPECT_NEAR(angle, roll, NEAR);
+
+            // Convert extrinsic (world-based) active rotations to a sequence of
+            // intrinsic rotations (each rotation based off of previous rotation
+            // frame).
+            //
+            // R_1 * R_intrinsic = R_extrinsic * R_1
+            //    implies
+            // R_intrinsic = (R_1)^-1 R_extrinsic R_1
+            //
+            auto world_z_intrinsic = rotationVectorToQuaternion(
+                    q.inverse() * Vector3f(0.f, 0.f, rot_angle));
+
+            // Sequential intrinsic rotations compose as R_1 * R_2.
+            quaternionToAngles(q * world_z_intrinsic, &pitch, &roll, &yaw);
+
+            EXPECT_NEAR(0.f, pitch, NEAR);
+            EXPECT_NEAR(angle, roll, NEAR);
+        }
+    }
+}
+
 }  // namespace
 }  // namespace media
 }  // namespace android
diff --git a/media/libheadtracking/QuaternionUtil.cpp b/media/libheadtracking/QuaternionUtil.cpp
index 5d090de..e245c80 100644
--- a/media/libheadtracking/QuaternionUtil.cpp
+++ b/media/libheadtracking/QuaternionUtil.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#include "QuaternionUtil.h"
+#include "media/QuaternionUtil.h"
 
 #include <cassert>
 
diff --git a/media/libheadtracking/QuaternionUtil.h b/media/libheadtracking/QuaternionUtil.h
deleted file mode 100644
index f7a2ca9..0000000
--- a/media/libheadtracking/QuaternionUtil.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#pragma once
-
-#include <Eigen/Geometry>
-
-namespace android {
-namespace media {
-
-/**
- * Converts a rotation vector to an equivalent quaternion.
- * The rotation vector is given as a 3-vector whose direction represents the rotation axis and its
- * magnitude the rotation angle (in radians) around that axis.
- */
-Eigen::Quaternionf rotationVectorToQuaternion(const Eigen::Vector3f& rotationVector);
-
-/**
- * Converts a quaternion to an equivalent rotation vector.
- * The rotation vector is given as a 3-vector whose direction represents the rotation axis and its
- * magnitude the rotation angle (in radians) around that axis.
- */
-Eigen::Vector3f quaternionToRotationVector(const Eigen::Quaternionf& quaternion);
-
-/**
- * Returns a quaternion representing a rotation around the X-axis with the given amount (in
- * radians).
- */
-Eigen::Quaternionf rotateX(float angle);
-
-/**
- * Returns a quaternion representing a rotation around the Y-axis with the given amount (in
- * radians).
- */
-Eigen::Quaternionf rotateY(float angle);
-
-/**
- * Returns a quaternion representing a rotation around the Z-axis with the given amount (in
- * radians).
- */
-Eigen::Quaternionf rotateZ(float angle);
-
-}  // namespace media
-}  // namespace android
diff --git a/media/libheadtracking/SensorPoseProvider.cpp b/media/libheadtracking/SensorPoseProvider.cpp
index 31d469c..8a29027 100644
--- a/media/libheadtracking/SensorPoseProvider.cpp
+++ b/media/libheadtracking/SensorPoseProvider.cpp
@@ -32,7 +32,7 @@
 #include <sensor/SensorManager.h>
 #include <utils/Looper.h>
 
-#include "QuaternionUtil.h"
+#include "media/QuaternionUtil.h"
 
 namespace android {
 namespace media {
diff --git a/media/libheadtracking/StillnessDetector-test.cpp b/media/libheadtracking/StillnessDetector-test.cpp
index b6cd479..56e7b4e 100644
--- a/media/libheadtracking/StillnessDetector-test.cpp
+++ b/media/libheadtracking/StillnessDetector-test.cpp
@@ -16,8 +16,9 @@
 
 #include <gtest/gtest.h>
 
-#include "QuaternionUtil.h"
 #include "StillnessDetector.h"
+
+#include "media/QuaternionUtil.h"
 #include "TestUtil.h"
 
 namespace android {
diff --git a/media/libheadtracking/Twist-test.cpp b/media/libheadtracking/Twist-test.cpp
index 7984e1e..9fbf81f 100644
--- a/media/libheadtracking/Twist-test.cpp
+++ b/media/libheadtracking/Twist-test.cpp
@@ -16,9 +16,7 @@
 
 #include "media/Twist.h"
 
-#include <gtest/gtest.h>
-
-#include "QuaternionUtil.h"
+#include "media/QuaternionUtil.h"
 #include "TestUtil.h"
 
 using Eigen::Quaternionf;
diff --git a/media/libheadtracking/Twist.cpp b/media/libheadtracking/Twist.cpp
index 664c4d5..fdec694 100644
--- a/media/libheadtracking/Twist.cpp
+++ b/media/libheadtracking/Twist.cpp
@@ -15,8 +15,8 @@
  */
 
 #include "media/Twist.h"
-
-#include "QuaternionUtil.h"
+#include <android-base/stringprintf.h>
+#include "media/QuaternionUtil.h"
 
 namespace android {
 namespace media {
@@ -39,5 +39,11 @@
     return os;
 }
 
+std::string Twist3f::toString() const {
+    return base::StringPrintf("[%0.2f, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f]",
+        mTranslationalVelocity[0], mTranslationalVelocity[1], mTranslationalVelocity[2],
+        mRotationalVelocity[0], mRotationalVelocity[1], mRotationalVelocity[2]);
+}
+
 }  // namespace media
 }  // namespace android
diff --git a/media/libheadtracking/VectorRecorder.cpp b/media/libheadtracking/VectorRecorder.cpp
new file mode 100644
index 0000000..5c87d05
--- /dev/null
+++ b/media/libheadtracking/VectorRecorder.cpp
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "media/VectorRecorder.h"
+
+namespace android::media {
+
+// Convert data to string with level indentation.
+// No need for a lock as the SimpleLog is thread-safe.
+std::string VectorRecorder::toString(size_t indent) const {
+    return mRecordLog.dumpToString(std::string(indent, ' ').c_str(), mMaxLocalLogLine);
+}
+
+// Record into local log when it is time.
+void VectorRecorder::record(const std::vector<float>& record) {
+    if (record.size() != mVectorSize) return;
+
+    // Protect against concurrent calls to record().
+    std::lock_guard lg(mLock);
+
+    // if it is time, record average data and reset.
+    if (shouldRecordLog_l()) {
+        sumToAverage_l();
+        mRecordLog.log(
+                "mean: %s, min: %s, max %s, calculated %zu samples in %0.4f second(s)",
+                toString(mSum, mDelimiterIdx, mFormatString.c_str()).c_str(),
+                toString(mMin, mDelimiterIdx, mFormatString.c_str()).c_str(),
+                toString(mMax, mDelimiterIdx, mFormatString.c_str()).c_str(),
+                mNumberOfSamples,
+                mNumberOfSecondsSinceFirstSample.count());
+        resetRecord_l();
+    }
+
+    // update stream average.
+    if (mNumberOfSamples++ == 0) {
+        mFirstSampleTimestamp = std::chrono::steady_clock::now();
+        for (size_t i = 0; i < mVectorSize; ++i) {
+            const float value = record[i];
+            mSum[i] += value;
+            mMax[i] = value;
+            mMin[i] = value;
+        }
+    } else {
+        for (size_t i = 0; i < mVectorSize; ++i) {
+            const float value = record[i];
+            mSum[i] += value;
+            mMax[i] = std::max(mMax[i], value);
+            mMin[i] = std::min(mMin[i], value);
+        }
+    }
+}
+
+bool VectorRecorder::shouldRecordLog_l() {
+    mNumberOfSecondsSinceFirstSample = std::chrono::duration_cast<std::chrono::seconds>(
+            std::chrono::steady_clock::now() - mFirstSampleTimestamp);
+    return mNumberOfSecondsSinceFirstSample >= mRecordThreshold;
+}
+
+void VectorRecorder::resetRecord_l() {
+    mSum.assign(mVectorSize, 0);
+    mMax.assign(mVectorSize, 0);
+    mMin.assign(mVectorSize, 0);
+    mNumberOfSamples = 0;
+    mNumberOfSecondsSinceFirstSample = std::chrono::seconds(0);
+}
+
+void VectorRecorder::sumToAverage_l() {
+    if (mNumberOfSamples == 0) return;
+    const float reciprocal = 1.f / mNumberOfSamples;
+    for (auto& p : mSum) {
+        p *= reciprocal;
+    }
+}
+
+}  // namespace android::media
diff --git a/media/libheadtracking/include/media/HeadTrackingProcessor.h b/media/libheadtracking/include/media/HeadTrackingProcessor.h
index 8ef8ab0..a3c1e97 100644
--- a/media/libheadtracking/include/media/HeadTrackingProcessor.h
+++ b/media/libheadtracking/include/media/HeadTrackingProcessor.h
@@ -19,6 +19,7 @@
 
 #include "HeadTrackingMode.h"
 #include "Pose.h"
+#include "PosePredictorType.h"
 #include "Twist.h"
 
 namespace android {
@@ -98,6 +99,11 @@
     virtual void recenter(bool recenterHead = true, bool recenterScreen = true) = 0;
 
     /**
+     * Set the predictor type.
+     */
+    virtual void setPosePredictorType(PosePredictorType type) = 0;
+
+    /**
      * Dump HeadTrackingProcessor parameters under caller lock.
      */
     virtual std::string toString_l(unsigned level) const = 0;
diff --git a/media/libheadtracking/include/media/PosePredictorType.h b/media/libheadtracking/include/media/PosePredictorType.h
new file mode 100644
index 0000000..aa76d5d
--- /dev/null
+++ b/media/libheadtracking/include/media/PosePredictorType.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <string>
+
+namespace android::media {
+
+enum class PosePredictorType {
+    /** Use best predictor determined from sensor input */
+    AUTO,
+
+    /** Use last pose for future prediction */
+    LAST,
+
+    /** Use twist angular velocity for future prediction */
+    TWIST,
+
+    /** Use weighted least squares history of prior poses (ignoring twist) */
+    LEAST_SQUARES,
+};
+
+std::string toString(PosePredictorType posePredictorType);
+bool isValidPosePredictorType(PosePredictorType posePredictorType);
+
+}  // namespace android::media
diff --git a/media/libheadtracking/include/media/QuaternionUtil.h b/media/libheadtracking/include/media/QuaternionUtil.h
new file mode 100644
index 0000000..a711d17
--- /dev/null
+++ b/media/libheadtracking/include/media/QuaternionUtil.h
@@ -0,0 +1,297 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <android-base/stringprintf.h>
+#include <Eigen/Geometry>
+#include <media/Pose.h>
+
+namespace android {
+namespace media {
+
+/**
+ * Converts a rotation vector to an equivalent quaternion.
+ * The rotation vector is given as a 3-vector whose direction represents the rotation axis and its
+ * magnitude the rotation angle (in radians) around that axis.
+ */
+Eigen::Quaternionf rotationVectorToQuaternion(const Eigen::Vector3f& rotationVector);
+
+/**
+ * Converts a quaternion to an equivalent rotation vector.
+ * The rotation vector is given as a 3-vector whose direction represents the rotation axis and its
+ * magnitude the rotation angle (in radians) around that axis.
+ */
+Eigen::Vector3f quaternionToRotationVector(const Eigen::Quaternionf& quaternion);
+
+/**
+ * Returns a quaternion representing a rotation around the X-axis with the given amount (in
+ * radians).
+ */
+Eigen::Quaternionf rotateX(float angle);
+
+/**
+ * Returns a quaternion representing a rotation around the Y-axis with the given amount (in
+ * radians).
+ */
+Eigen::Quaternionf rotateY(float angle);
+
+/**
+ * Returns a quaternion representing a rotation around the Z-axis with the given amount (in
+ * radians).
+ */
+Eigen::Quaternionf rotateZ(float angle);
+
+/**
+ * Compute separate roll, pitch, and yaw angles from a quaternion
+ *
+ * The roll, pitch, and yaw follow standard 3DOF virtual reality definitions
+ * with angles increasing counter-clockwise by the right hand rule.
+ *
+ * https://en.wikipedia.org/wiki/Six_degrees_of_freedom
+ *
+ * The roll, pitch, and yaw angles are calculated separately from the device frame
+ * rotation from the world frame.  This is not to be confused with the
+ * intrinsic Euler xyz roll, pitch, yaw 'nautical' angles.
+ *
+ * The input quarternion is the active rotation that transforms the
+ * World/Stage frame to the Head/Screen frame.
+ *
+ * The input quaternion may come from two principal sensors: DEVICE and HEADSET
+ * and are interpreted as below.
+ *
+ * DEVICE SENSOR
+ *
+ * Android sensor stack assumes device coordinates along the x/y axis.
+ *
+ * https://developer.android.com/reference/android/hardware/SensorEvent#sensor.type_rotation_vector:
+ *
+ * Looking down from the clouds. Android Device coordinate system (not used)
+ *        DEVICE --> X (Y goes through top speaker towards the observer)
+ *           | Z
+ *           V
+ *         USER
+ *
+ * Internally within this library, we transform the device sensor coordinate
+ * system by rotating the coordinate system around the X axis by -M_PI/2.
+ * This aligns the device coordinate system to match that of the
+ * Head Tracking sensor (see below), should the user be facing the device in
+ * natural (phone == portrait, tablet == ?) orientation.
+ *
+ * Looking down from the clouds. Spatializer device frame.
+ *           Y
+ *           ^
+ *           |
+ *        DEVICE --> X (Z goes through top of the DEVICE towards the observer)
+ *
+ *         USER
+ *
+ * The reference world frame is the device in vertical
+ * natural (phone == portrait) orientation with the top pointing straight
+ * up from the ground and the front-to-back direction facing north.
+ * The world frame is presumed locally fixed by magnetic and gravitational reference.
+ *
+ * HEADSET SENSOR
+ * https://developer.android.com/reference/android/hardware/SensorEvent#sensor.type_head_tracker:
+ *
+ * Looking down from the clouds. Headset frame.
+ *           Y
+ *           ^
+ *           |
+ *         USER ---> X
+ *         (Z goes through the top of the USER head towards the observer)
+ *
+ * The Z axis goes from the neck to the top of the head, the X axis goes
+ * from the left ear to the right ear, the Y axis goes from the back of the
+ * head through the nose.
+ *
+ * Typically for a headset sensor, the X and Y axes have some arbitrary fixed
+ * reference.
+ *
+ * ROLL
+ * Roll is the counter-clockwise L/R motion around the Y axis (hence ZX plane).
+ * The right hand convention means the plane is ZX not XZ.
+ * This can be considered the azimuth angle in spherical coordinates
+ * with Pitch being the elevation angle.
+ *
+ * Roll has a range of -M_PI to M_PI radians.
+ *
+ * Rolling a device changes between portrait and landscape
+ * modes, and for L/R speakers will limit the amount of crosstalk cancellation.
+ * Roll increases as the device (if vertical like a coin) rolls from left to right.
+ *
+ * By this definition, Roll is less accurate when the device is flat
+ * on a table rather than standing on edge.
+ * When perfectly flat on the table, roll may report as 0, M_PI, or -M_PI
+ * due ambiguity / degeneracy of atan(0, 0) in this case (the device Y axis aligns with
+ * the world Z axis), but exactly flat rarely occurs.
+ *
+ * Roll for a headset is the angle the head is inclined to the right side
+ * (like sleeping).
+ *
+ * PITCH
+ * Pitch is the Surface normal Y deviation (along the Z axis away from the earth).
+ * This can be considered the elevation angle in spherical coordinates using
+ * Roll as the azimuth angle.
+ *
+ * Pitch for a device determines whether the device is "upright" or lying
+ * flat on the table (i.e. surface normal).  Pitch is 0 when upright, decreases
+ * as the device top moves away from the user to -M_PI/2 when lying down face up.
+ * Pitch increases from 0 to M_PI/2 when the device tilts towards the user, and is
+ * M_PI/2 degrees when face down.
+ *
+ * Pitch for a headset is the user tilting the head/chin up or down,
+ * like nodding.
+ *
+ * Pitch has a range of -M_PI/2, M_PI/2 radians.
+ *
+ * YAW
+ * Yaw is the rotational component along the earth's XY tangential plane,
+ * where the Z axis points radially away from the earth.
+ *
+ * Yaw has a range of -M_PI to M_PI radians.  If used for azimuth angle in
+ * spherical coordinates, the elevation angle may be derived from the Z axis.
+ *
+ * A positive increase means the phone is rotating from right to left
+ * when considered flat on the table.
+ * (headset: the user is rotating their head to look left).
+ * If left speaker or right earbud is pointing straight up or down,
+ * this value is imprecise and Pitch or Roll is a more useful measure.
+ *
+ * Yaw for a device is like spinning a vertical device along the axis of
+ * gravity, like spinning a coin.  Yaw increases as the coin / device
+ * spins from right to left, rotating around the Z axis.
+ *
+ * Yaw for a headset is the user turning the head to look left or right
+ * like shaking the head for no. Yaw is the primary angle for a binaural
+ * head tracking device.
+ *
+ * @param q input active rotation Eigen quaternion.
+ * @param pitch output set to pitch if not nullptr
+ * @param roll output set to roll if not nullptr
+ * @param yaw output set to yaw if not nullptr
+ * @return (DEBUG==true) a debug string with intermediate transformation matrix
+ *                       interpreted as the unit basis vectors.
+ */
+
+// DEBUG returns a debug string for analysis.
+// We save unneeded rotation matrix computation by keeping the DEBUG option constexpr.
+template <bool DEBUG = false>
+auto quaternionToAngles(const Eigen::Quaternionf& q, float *pitch, float *roll, float *yaw) {
+    /*
+     * The quaternion here is the active rotation that transforms from the world frame
+     * to the device frame: the observer remains in the world frame,
+     * and the device (frame) moves.
+     *
+     * We use this to map device coordinates to world coordinates.
+     *
+     * Device:  We transform the device right speaker (X == 1), top speaker (Z == 1),
+     * and surface inwards normal (Y == 1) positions to the world frame.
+     *
+     * Headset: We transform the headset right bud (X == 1), top (Z == 1) and
+     * nose normal (Y == 1) positions to the world frame.
+     *
+     * This is the same as the world frame coordinates of the
+     *  unit device vector in the X dimension (ux),
+     *  unit device vector in the Y dimension (uy),
+     *  unit device vector in the Z dimension (uz).
+     *
+     * Rather than doing the rotation on unit vectors individually,
+     * one can simply use the columns of the rotation matrix of
+     * the world-to-body quaternion, so the computation is exceptionally fast.
+     *
+     * Furthermore, Eigen inlines the "toRotationMatrix" method
+     * and we rely on unused expression removal for efficiency
+     * and any elements not used should not be computed.
+     *
+     * Side note: For applying a rotation to several points,
+     * it is more computationally efficient to extract and
+     * use the rotation matrix form than the quaternion.
+     * So use of the rotation matrix is good for many reasons.
+     */
+    const auto rotation = q.toRotationMatrix();
+
+    /*
+     * World location of unit vector right speaker assuming the phone is situated
+     * natural (phone == portrait) mode.
+     * (headset: right bud).
+     *
+     * auto ux = q.rotation() * Eigen::Vector3f{1.f, 0.f, 0.f};
+     *         = rotation.col(0);
+     */
+    [[maybe_unused]] const auto ux_0 = rotation.coeff(0, 0);
+    [[maybe_unused]] const auto ux_1 = rotation.coeff(1, 0);
+    [[maybe_unused]] const auto ux_2 = rotation.coeff(2, 0);
+
+    [[maybe_unused]] std::string coordinates;
+    if constexpr (DEBUG) {
+        base::StringAppendF(&coordinates, "ux: %f %f %f", ux_0, ux_1, ux_2);
+    }
+
+    /*
+     * World location of screen-inwards normal assuming the phone is situated
+     * in natural (phone == portrait) mode.
+     * (headset: user nose).
+     *
+     * auto uy = q.rotation() * Eigen::Vector3f{0.f, 1.f, 0.f};
+     *         = rotation.col(1);
+     */
+    [[maybe_unused]] const auto uy_0 = rotation.coeff(0, 1);
+    [[maybe_unused]] const auto uy_1 = rotation.coeff(1, 1);
+    [[maybe_unused]] const auto uy_2 = rotation.coeff(2, 1);
+    if constexpr (DEBUG) {
+        base::StringAppendF(&coordinates, "uy: %f %f %f", uy_0, uy_1, uy_2);
+    }
+
+    /*
+     * World location of unit vector top speaker.
+     * (headset: top of head).
+     * auto uz = q.rotation() * Eigen::Vector3f{0.f, 0.f, 1.f};
+     *         = rotation.col(2);
+     */
+    [[maybe_unused]] const auto uz_0 = rotation.coeff(0, 2);
+    [[maybe_unused]] const auto uz_1 = rotation.coeff(1, 2);
+    [[maybe_unused]] const auto uz_2 = rotation.coeff(2, 2);
+    if constexpr (DEBUG) {
+        base::StringAppendF(&coordinates, "uz: %f %f %f", uz_0, uz_1, uz_2);
+    }
+
+    // pitch computed from nose world Z coordinate;
+    // hence independent of rotation around world Z.
+    if (pitch != nullptr) {
+        *pitch = asin(std::clamp(uy_2, -1.f, 1.f));
+    }
+
+    // roll computed from head/right world Z coordinate;
+    // hence independent of rotation around world Z.
+    if (roll != nullptr) {
+        // atan2 takes care of implicit scale normalization of Z, X.
+        *roll = -atan2(ux_2, uz_2);
+    }
+
+    // yaw computed from right ear angle projected onto world XY plane
+    // where world Z == 0.  This is the rotation around world Z.
+    if (yaw != nullptr) {
+        // atan2 takes care of implicit scale normalization of X, Y.
+        *yaw =  atan2(ux_1, ux_0);
+    }
+
+    if constexpr (DEBUG) {
+        return coordinates;
+    }
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libheadtracking/include/media/Twist.h b/media/libheadtracking/include/media/Twist.h
index 291cea3..51b83d8 100644
--- a/media/libheadtracking/include/media/Twist.h
+++ b/media/libheadtracking/include/media/Twist.h
@@ -66,6 +66,9 @@
         return Twist3f(mTranslationalVelocity / s, mRotationalVelocity / s);
     }
 
+    // Convert instance to a string representation.
+    std::string toString() const;
+
   private:
     Eigen::Vector3f mTranslationalVelocity;
     Eigen::Vector3f mRotationalVelocity;
diff --git a/media/libheadtracking/include/media/VectorRecorder.h b/media/libheadtracking/include/media/VectorRecorder.h
new file mode 100644
index 0000000..4103a7d
--- /dev/null
+++ b/media/libheadtracking/include/media/VectorRecorder.h
@@ -0,0 +1,151 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/stringprintf.h>
+#include <android-base/thread_annotations.h>
+#include <audio_utils/SimpleLog.h>
+#include <chrono>
+#include <math.h>
+#include <mutex>
+#include <vector>
+
+namespace android::media {
+
+/**
+ * VectorRecorder records a vector of floats computing the average, max, and min
+ * over given time periods.
+ *
+ * The class is thread-safe.
+ */
+class VectorRecorder {
+  public:
+    /**
+     * @param vectorSize is the size of the vector input.
+     *        If the input does not match this size, it is ignored.
+     * @param threshold is the time interval we bucket for averaging.
+     * @param maxLogLine is the number of lines we log.  At this
+     *        threshold, the oldest line will expire when the new line comes in.
+     * @param delimiterIdx is an optional array of delimiter indices that
+     *        replace the ',' with a ':'.  For example if delimiterIdx = { 3 } then
+     *        the above example would format as [0.00, 0.00, 0.00 : -1.29, -0.50, 15.27].
+     * @param formatString is the sprintf format string for the double converted data
+     *        to use.
+     */
+    VectorRecorder(
+        size_t vectorSize, std::chrono::duration<double> threshold, int maxLogLine,
+            std::vector<size_t> delimiterIdx = {},
+            const std::string_view formatString = {})
+        : mVectorSize(vectorSize)
+        , mDelimiterIdx(std::move(delimiterIdx))
+        , mFormatString(formatString)
+        , mRecordLog(maxLogLine)
+        , mRecordThreshold(threshold)
+    {
+        resetRecord_l();  // OK to call - we're in the constructor.
+    }
+
+    /** Convert recorded vector data to string with level indentation */
+    std::string toString(size_t indent) const;
+
+    /**
+     * @brief Record a vector of floats.
+     *
+     * @param record a vector of floats.
+     */
+    void record(const std::vector<float>& record);
+
+    /**
+     * Format vector to a string, [0.00, 0.00, 0.00, -1.29, -0.50, 15.27].
+     *
+     * @param delimiterIdx is an optional array of delimiter indices that
+     *        replace the ',' with a ':'.  For example if delimiterIdx = { 3 } then
+     *        the above example would format as [0.00, 0.00, 0.00 : -1.29, -0.50, 15.27].
+     * @param formatString is the sprintf format string for the double converted data
+     *        to use.
+     */
+    template <typename T>
+    static std::string toString(const std::vector<T>& record,
+            const std::vector<size_t>& delimiterIdx = {},
+            const char * const formatString = nullptr) {
+        if (record.size() == 0) {
+            return "[]";
+        }
+
+        std::string ss = "[";
+        auto nextDelimiter = delimiterIdx.begin();
+        for (size_t i = 0; i < record.size(); ++i) {
+            if (i > 0) {
+                if (nextDelimiter != delimiterIdx.end()
+                        && *nextDelimiter <= i) {
+                     ss.append(" : ");
+                     ++nextDelimiter;
+                } else {
+                    ss.append(", ");
+                }
+            }
+            if (formatString != nullptr && *formatString) {
+                base::StringAppendF(&ss, formatString, static_cast<double>(record[i]));
+            } else {
+                base::StringAppendF(&ss, "%5.2lf", static_cast<double>(record[i]));
+            }
+        }
+        ss.append("]");
+        return ss;
+    }
+
+  private:
+    static constexpr int mMaxLocalLogLine = 10;
+
+    const size_t mVectorSize;
+    const std::vector<size_t> mDelimiterIdx;
+    const std::string mFormatString;
+
+    // Local log for historical vector data.
+    // Locked internally, so does not need mutex below.
+    SimpleLog mRecordLog{mMaxLocalLogLine};
+
+    std::mutex mLock;
+
+    // Time threshold to record vectors in the local log.
+    // Vector data will be recorded into log at least every mRecordThreshold.
+    std::chrono::duration<double> mRecordThreshold GUARDED_BY(mLock);
+
+    // Number of seconds since first sample in mSum.
+    std::chrono::duration<double> mNumberOfSecondsSinceFirstSample GUARDED_BY(mLock);
+
+    // Timestamp of first sample recorded in mSum.
+    std::chrono::time_point<std::chrono::steady_clock> mFirstSampleTimestamp GUARDED_BY(mLock);
+
+    // Number of samples in mSum.
+    size_t mNumberOfSamples GUARDED_BY(mLock) = 0;
+
+    std::vector<double> mSum GUARDED_BY(mLock);
+    std::vector<float> mMax GUARDED_BY(mLock);
+    std::vector<float> mMin GUARDED_BY(mLock);
+
+    // Computes mNumberOfSecondsSinceFirstSample, returns true if time to record.
+    bool shouldRecordLog_l() REQUIRES(mLock);
+
+    // Resets the running mNumberOfSamples, mSum, mMax, mMin.
+    void resetRecord_l() REQUIRES(mLock);
+
+    // Convert mSum to an average.
+    void sumToAverage_l() REQUIRES(mLock);
+};  // VectorRecorder
+
+}  // namespace android::media
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 85768bd..5aa9adc 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -898,10 +898,9 @@
         }
     }
 
-    for (size_t cameraId = 0; cameraId < mCameraIds.size(); ++cameraId) {
+    for (size_t refIndex = 0; refIndex < mCameraIds.size(); ++refIndex) {
+        const int cameraId = mCameraIds[refIndex];
         for (size_t j = 0; j < kNumRequiredProfiles; ++j) {
-            int refIndex = getRequiredProfileRefIndex(cameraId);
-            CHECK(refIndex != -1);
             RequiredProfileRefInfo *info =
                     &mRequiredProfileRefs[refIndex].mRefs[j];
 
@@ -931,14 +930,14 @@
 
                 int index = getCamcorderProfileIndex(cameraId, profile->mQuality);
                 if (index != -1) {
-                    ALOGV("Profile quality %d for camera %zu already exists",
+                    ALOGV("Profile quality %d for camera %d already exists",
                         profile->mQuality, cameraId);
                     CHECK(index == refIndex);
                     continue;
                 }
 
                 // Insert the new profile
-                ALOGV("Add a profile: quality %d=>%d for camera %zu",
+                ALOGV("Add a profile: quality %d=>%d for camera %d",
                         mCamcorderProfiles[info->mRefProfileIndex]->mQuality,
                         profile->mQuality, cameraId);
 
diff --git a/media/libmediahelper/AudioParameter.cpp b/media/libmediahelper/AudioParameter.cpp
index 382a920..9a8156e 100644
--- a/media/libmediahelper/AudioParameter.cpp
+++ b/media/libmediahelper/AudioParameter.cpp
@@ -61,6 +61,12 @@
         AUDIO_PARAMETER_DEVICE_ADDITIONAL_OUTPUT_DELAY;
 const char * const AudioParameter::keyMaxAdditionalOutputDeviceDelay =
         AUDIO_PARAMETER_DEVICE_MAX_ADDITIONAL_OUTPUT_DELAY;
+const char * const AudioParameter::keyOffloadCodecAverageBitRate = AUDIO_OFFLOAD_CODEC_AVG_BIT_RATE;
+const char * const AudioParameter::keyOffloadCodecSampleRate = AUDIO_OFFLOAD_CODEC_SAMPLE_RATE;
+const char * const AudioParameter::keyOffloadCodecChannels = AUDIO_OFFLOAD_CODEC_NUM_CHANNEL;
+const char * const AudioParameter::keyOffloadCodecDelaySamples = AUDIO_OFFLOAD_CODEC_DELAY_SAMPLES;
+const char * const AudioParameter::keyOffloadCodecPaddingSamples =
+        AUDIO_OFFLOAD_CODEC_PADDING_SAMPLES;
 
 AudioParameter::AudioParameter(const String8& keyValuePairs)
 {
@@ -226,4 +232,9 @@
     }
 }
 
+bool AudioParameter::containsKey(const String8& key) const
+{
+    return mParameters.indexOfKey(key) >= 0;
+}
+
 } // namespace android
diff --git a/media/libmediahelper/include/media/AudioParameter.h b/media/libmediahelper/include/media/AudioParameter.h
index 9a6ca8a..41aff7c 100644
--- a/media/libmediahelper/include/media/AudioParameter.h
+++ b/media/libmediahelper/include/media/AudioParameter.h
@@ -107,6 +107,12 @@
     static const char * const keyAdditionalOutputDeviceDelay;
     static const char * const keyMaxAdditionalOutputDeviceDelay;
 
+    static const char * const keyOffloadCodecAverageBitRate;
+    static const char * const keyOffloadCodecSampleRate;
+    static const char * const keyOffloadCodecChannels;
+    static const char * const keyOffloadCodecDelaySamples;
+    static const char * const keyOffloadCodecPaddingSamples;
+
     String8 toString() const { return toStringImpl(true); }
     String8 keysToString() const { return toStringImpl(false); }
 
@@ -117,6 +123,12 @@
 
     status_t remove(const String8& key);
 
+    status_t get(const String8& key, int& value) const {
+        return getInt(key, value);
+    }
+    status_t get(const String8& key, float& value) const {
+        return getFloat(key, value);
+    }
     status_t get(const String8& key, String8& value) const;
     status_t getInt(const String8& key, int& value) const;
     status_t getFloat(const String8& key, float& value) const;
@@ -125,6 +137,7 @@
 
     size_t size() const { return mParameters.size(); }
 
+    bool containsKey(const String8& key) const;
 private:
     String8 mKeyValuePairs;
     KeyedVector <String8, String8> mParameters;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 18bbf7b..ec79b99 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -129,6 +129,7 @@
       mRTPCVOExtMap(-1),
       mRTPCVODegrees(0),
       mRTPSockDscp(0),
+      mRTPSockOptEcn(0),
       mRTPSockNetwork(0),
       mLastSeqNo(0),
       mStarted(false),
@@ -910,6 +911,13 @@
     return OK;
 }
 
+status_t StagefrightRecorder::setParamRtpEcn(int32_t ecn) {
+    ALOGV("setParamRtpEcn: %d", ecn);
+
+    mRTPSockOptEcn = ecn;
+    return OK;
+}
+
 status_t StagefrightRecorder::requestIDRFrame() {
     status_t ret = BAD_VALUE;
     if (mVideoEncoderSource != NULL) {
@@ -1091,6 +1099,11 @@
         if (safe_strtoi32(value.string(), &dscp)) {
             return setParamRtpDscp(dscp);
         }
+    } else if (key == "rtp-param-set-socket-ecn") {
+        int32_t targetEcn;
+        if (safe_strtoi32(value.string(), &targetEcn)) {
+            return setParamRtpEcn(targetEcn);
+        }
     } else if (key == "rtp-param-set-socket-network") {
         int64_t networkHandle;
         if (safe_strtoi64(value.string(), &networkHandle)) {
@@ -1272,6 +1285,9 @@
             if (mRTPSockDscp > 0) {
                 meta->setInt32(kKeyRtpDscp, mRTPSockDscp);
             }
+            if (mRTPSockOptEcn > 0) {
+                meta->setInt32(kKeyRtpEcn, mRTPSockOptEcn);
+            }
 
             status = mWriter->start(meta.get());
             break;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 67c6e20..0b6a5bb 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -153,6 +153,7 @@
     int32_t mRTPCVOExtMap;
     int32_t mRTPCVODegrees;
     int32_t mRTPSockDscp;
+    int32_t mRTPSockOptEcn;
     int64_t mRTPSockNetwork;
     uint32_t mLastSeqNo;
 
@@ -247,6 +248,7 @@
     status_t setRTPCVOExtMap(int32_t extmap);
     status_t setRTPCVODegrees(int32_t cvoDegrees);
     status_t setParamRtpDscp(int32_t dscp);
+    status_t setParamRtpEcn(int32_t ecn);
     status_t setSocketNetwork(int64_t networkHandle);
     status_t requestIDRFrame();
     void clipVideoBitRate();
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
index 6a17972..fd03150 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -115,7 +115,7 @@
 
         int sockRtp, sockRtcp;
         ARTPConnection::MakeRTPSocketPair(&sockRtp, &sockRtcp, info->mLocalIp, info->mRemoteIp,
-                info->mLocalPort, info->mRemotePort, info->mSocketNetwork);
+                info->mLocalPort, info->mRemotePort, info->mSocketNetwork, info->mRtpSockOptEcn);
 
         sp<AMessage> notify = new AMessage('accu', this);
 
@@ -125,6 +125,8 @@
         mRTPConn->addStream(sockRtp, sockRtcp, desc, i + 1, notify, false);
         mRTPConn->setSelfID(info->mSelfID);
         mRTPConn->setStaticJitterTimeMs(info->mJbTimeMs);
+        mRTPConn->setRtpSockOptEcn(info->mRtpSockOptEcn);
+        mRTPConn->setIsIPv6(info->mLocalIp);
 
         unsigned long PT;
         AString formatDesc, formatParams;
@@ -719,6 +721,8 @@
     } else if (key == "rtp-param-set-socket-network") {
         int64_t networkHandle = atoll(value);
         setSocketNetwork(networkHandle);
+    } else if (key == "rtp-param-set-socket-ecn") {
+        info->mRtpSockOptEcn = atoi(value);
     } else if (key == "rtp-param-jitter-buffer-time") {
         // clamping min at 40, max at 3000
         info->mJbTimeMs = std::min(std::max(40, atoi(value)), 3000);
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
index 7d9bb8f..b2afe86 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
@@ -121,6 +121,8 @@
         uint32_t mSelfID;
         /* extmap:<value> for CVO will be set to here */
         int32_t mCVOExtMap;
+        /* To check ECN is supported or not */
+        int32_t mRtpSockOptEcn;
 
         /* a copy of TrackInfo in RTSPSource */
         sp<AnotherPacketSource> mSource;
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index d6028d9..ccbe995 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -6751,6 +6751,8 @@
         info->checkReadFence("onOutputBufferDrained before queueBuffer");
         err = mCodec->mNativeWindow->queueBuffer(
                     mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);
+        // TODO(b/266211548): Poll the native window for rendered buffers, since when queueing
+        // buffers, the frame event history delta is retrieved.
         info->mFenceFd = -1;
         if (err == OK) {
             info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
diff --git a/media/libstagefright/ACodecBufferChannel.cpp b/media/libstagefright/ACodecBufferChannel.cpp
index 88b15ae..529ae97 100644
--- a/media/libstagefright/ACodecBufferChannel.cpp
+++ b/media/libstagefright/ACodecBufferChannel.cpp
@@ -477,6 +477,10 @@
     return OK;
 }
 
+void ACodecBufferChannel::pollForRenderedBuffers() {
+    // TODO(b/266211548): Poll the native window for rendered buffers.
+}
+
 status_t ACodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
     std::shared_ptr<const std::vector<const BufferInfo>> array(
             std::atomic_load(&mInputBuffers));
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 9607425..842327d 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -150,7 +150,7 @@
 
     if (camera == 0) {
         mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true);
         if (mCamera == 0) return -EBUSY;
         mCameraFlags &= ~FLAGS_HOT_CAMERA;
     } else {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index ebbbb5f..9b8cc5e 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -88,6 +88,7 @@
 using aidl::android::media::BnResourceManagerClient;
 using aidl::android::media::IResourceManagerClient;
 using aidl::android::media::IResourceManagerService;
+using aidl::android::media::ClientInfoParcel;
 
 // key for media statistics
 static const char *kCodecKeyName = "codec";
@@ -209,8 +210,8 @@
 ////////////////////////////////////////////////////////////////////////////////
 
 struct ResourceManagerClient : public BnResourceManagerClient {
-    explicit ResourceManagerClient(MediaCodec* codec, int32_t pid) :
-            mMediaCodec(codec), mPid(pid) {}
+    explicit ResourceManagerClient(MediaCodec* codec, int32_t pid, int32_t uid) :
+            mMediaCodec(codec), mPid(pid), mUid(uid) {}
 
     Status reclaimResource(bool* _aidl_return) override {
         sp<MediaCodec> codec = mMediaCodec.promote();
@@ -222,7 +223,10 @@
             if (service == nullptr) {
                 ALOGW("MediaCodec::ResourceManagerClient unable to find ResourceManagerService");
             }
-            service->removeClient(mPid, getId(this));
+            ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                .uid = static_cast<int32_t>(mUid),
+                                .id = getId(this)};
+            service->removeClient(clientInfo);
             *_aidl_return = true;
             return Status::ok();
         }
@@ -260,6 +264,7 @@
 private:
     wp<MediaCodec> mMediaCodec;
     int32_t mPid;
+    int32_t mUid;
 
     DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
 };
@@ -285,10 +290,15 @@
     void markClientForPendingRemoval();
     bool reclaimResource(const std::vector<MediaResourceParcel> &resources);
 
+    inline void setCodecName(const char* name) {
+        mCodecName = name;
+    }
+
 private:
     Mutex mLock;
     pid_t mPid;
     uid_t mUid;
+    std::string mCodecName;
     std::shared_ptr<IResourceManagerService> mService;
     std::shared_ptr<IResourceManagerClient> mClient;
     ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
@@ -353,8 +363,11 @@
 }
 
 //static
-Mutex MediaCodec::ResourceManagerServiceProxy::sLockCookies;
-std::set<void*> MediaCodec::ResourceManagerServiceProxy::sCookies;
+// these are no_destroy to keep them from being destroyed at process exit
+// where some thread calls exit() while other threads are still running.
+// see b/194783918
+[[clang::no_destroy]] Mutex MediaCodec::ResourceManagerServiceProxy::sLockCookies;
+[[clang::no_destroy]] std::set<void*> MediaCodec::ResourceManagerServiceProxy::sCookies;
 
 //static
 void MediaCodec::ResourceManagerServiceProxy::addCookie(void* cookie) {
@@ -392,7 +405,11 @@
     if (mService == nullptr) {
         return;
     }
-    mService->addResource(mPid, mUid, getId(mClient), mClient, resources);
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                .uid = static_cast<int32_t>(mUid),
+                                .id = getId(mClient),
+                                .name = mCodecName};
+    mService->addResource(clientInfo, mClient, resources);
 }
 
 void MediaCodec::ResourceManagerServiceProxy::removeResource(
@@ -404,7 +421,11 @@
     if (mService == nullptr) {
         return;
     }
-    mService->removeResource(mPid, getId(mClient), resources);
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                .uid = static_cast<int32_t>(mUid),
+                                .id = getId(mClient),
+                                .name = mCodecName};
+    mService->removeResource(clientInfo, resources);
 }
 
 void MediaCodec::ResourceManagerServiceProxy::removeClient() {
@@ -412,7 +433,11 @@
     if (mService == nullptr) {
         return;
     }
-    mService->removeClient(mPid, getId(mClient));
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                .uid = static_cast<int32_t>(mUid),
+                                .id = getId(mClient),
+                                .name = mCodecName};
+    mService->removeClient(clientInfo);
 }
 
 void MediaCodec::ResourceManagerServiceProxy::markClientForPendingRemoval() {
@@ -420,7 +445,11 @@
     if (mService == nullptr) {
         return;
     }
-    mService->markClientForPendingRemoval(mPid, getId(mClient));
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                .uid = static_cast<int32_t>(mUid),
+                                .id = getId(mClient),
+                                .name = mCodecName};
+    mService->markClientForPendingRemoval(clientInfo);
 }
 
 bool MediaCodec::ResourceManagerServiceProxy::reclaimResource(
@@ -430,7 +459,11 @@
         return false;
     }
     bool success;
-    Status status = mService->reclaimResource(mPid, resources, &success);
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                .uid = static_cast<int32_t>(mUid),
+                                .id = getId(mClient),
+                                .name = mCodecName};
+    Status status = mService->reclaimResource(clientInfo, resources, &success);
     return status.isOk() && success;
 }
 
@@ -500,6 +533,7 @@
     kWhatOutputFramesRendered = 'outR',
     kWhatOutputBuffersChanged = 'outC',
     kWhatFirstTunnelFrameReady = 'ftfR',
+    kWhatPollForRenderedBuffers = 'plrb',
 };
 
 class BufferCallback : public CodecBase::BufferCallback {
@@ -806,9 +840,7 @@
       mWidth(0),
       mHeight(0),
       mRotationDegrees(0),
-      mConfigColorTransfer(-1),
-      mHDRStaticInfo(false),
-      mHDR10PlusInfo(false),
+      mHdrInfoFlags(0),
       mDequeueInputTimeoutGeneration(0),
       mDequeueInputReplyID(0),
       mDequeueOutputTimeoutGeneration(0),
@@ -835,7 +867,7 @@
       mGetCodecBase(getCodecBase),
       mGetCodecInfo(getCodecInfo) {
     mResourceManagerProxy = new ResourceManagerServiceProxy(pid, uid,
-            ::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid));
+            ::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid, uid));
     if (!mGetCodecBase) {
         mGetCodecBase = [](const AString &name, const char *owner) {
             return GetCodecBase(name, owner);
@@ -967,29 +999,73 @@
                               mIndexOfFirstFrameWhenLowLatencyOn);
     }
 
-    mediametrics_setInt32(mMetricsHandle, kCodecHDRStaticInfo, mHDRStaticInfo ? 1 : 0);
-    mediametrics_setInt32(mMetricsHandle, kCodecHDR10PlusInfo, mHDR10PlusInfo ? 1 : 0);
 #if 0
     // enable for short term, only while debugging
     updateEphemeralMediametrics(mMetricsHandle);
 #endif
 }
 
-void MediaCodec::updateHDRFormatMetric() {
+void MediaCodec::updateHdrMetrics(bool isConfig) {
+    if ((mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) || mMetricsHandle == 0) {
+        return;
+    }
+
+    int32_t colorStandard = -1;
+    if (mOutputFormat->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
+        mediametrics_setInt32(mMetricsHandle,
+                isConfig ? kCodecConfigColorStandard : kCodecParsedColorStandard, colorStandard);
+    }
+    int32_t colorRange = -1;
+    if (mOutputFormat->findInt32(KEY_COLOR_RANGE, &colorRange)) {
+        mediametrics_setInt32(mMetricsHandle,
+                isConfig ? kCodecConfigColorRange : kCodecParsedColorRange, colorRange);
+    }
+    int32_t colorTransfer = -1;
+    if (mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
+        mediametrics_setInt32(mMetricsHandle,
+                isConfig ? kCodecConfigColorTransfer : kCodecParsedColorTransfer, colorTransfer);
+    }
+    HDRStaticInfo info;
+    if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)
+            && ColorUtils::isHDRStaticInfoValid(&info)) {
+        mHdrInfoFlags |= kFlagHasHdrStaticInfo;
+    }
+    mediametrics_setInt32(mMetricsHandle, kCodecHDRStaticInfo,
+            (mHdrInfoFlags & kFlagHasHdrStaticInfo) ? 1 : 0);
+    sp<ABuffer> hdr10PlusInfo;
+    if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
+            && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
+        mHdrInfoFlags |= kFlagHasHdr10PlusInfo;
+    }
+    mediametrics_setInt32(mMetricsHandle, kCodecHDR10PlusInfo,
+            (mHdrInfoFlags & kFlagHasHdr10PlusInfo) ? 1 : 0);
+
+    // hdr format
+    sp<AMessage> codedFormat = (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
+
+    AString mime;
     int32_t profile = -1;
-    AString mediaType;
-    if (mOutputFormat->findInt32(KEY_PROFILE, &profile)
-            && mOutputFormat->findString("mime", &mediaType)) {
-        hdr_format hdrFormat = getHDRFormat(profile, mConfigColorTransfer, mediaType);
+
+    if (codedFormat->findString("mime", &mime)
+            && codedFormat->findInt32(KEY_PROFILE, &profile)
+            && colorTransfer != -1) {
+        hdr_format hdrFormat = getHdrFormat(mime, profile, colorTransfer);
         mediametrics_setInt32(mMetricsHandle, kCodecHDRFormat, static_cast<int>(hdrFormat));
     }
 }
 
-hdr_format MediaCodec::getHDRFormat(const int32_t profile, const int32_t transfer,
-        const AString &mediaType) {
-    switch (transfer) {
+hdr_format MediaCodec::getHdrFormat(const AString &mime, const int32_t profile,
+        const int32_t colorTransfer) {
+    return (mFlags & kFlagIsEncoder)
+            ? getHdrFormatForEncoder(mime, profile, colorTransfer)
+            : getHdrFormatForDecoder(mime, profile, colorTransfer);
+}
+
+hdr_format MediaCodec::getHdrFormatForEncoder(const AString &mime, const int32_t profile,
+        const int32_t colorTransfer) {
+    switch (colorTransfer) {
         case COLOR_TRANSFER_ST2084:
-            if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
+            if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
                 switch (profile) {
                     case VP9Profile2HDR:
                         return HDR_FORMAT_HDR10;
@@ -998,7 +1074,7 @@
                     default:
                         return HDR_FORMAT_NONE;
                 }
-            } else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
+            } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
                 switch (profile) {
                     case AV1ProfileMain10HDR10:
                         return HDR_FORMAT_HDR10;
@@ -1007,7 +1083,7 @@
                     default:
                         return HDR_FORMAT_NONE;
                 }
-            } else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
+            } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
                 switch (profile) {
                     case HEVCProfileMain10HDR10:
                         return HDR_FORMAT_HDR10;
@@ -1020,7 +1096,7 @@
                 return HDR_FORMAT_NONE;
             }
         case COLOR_TRANSFER_HLG:
-            if (!mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+            if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
                 return HDR_FORMAT_HLG;
             } else {
                 // TODO: DOLBY format
@@ -1031,6 +1107,47 @@
     }
 }
 
+hdr_format MediaCodec::getHdrFormatForDecoder(const AString &mime, const int32_t profile,
+        const int32_t colorTransfer) {
+    switch (colorTransfer) {
+        case COLOR_TRANSFER_ST2084:
+            if (!(mHdrInfoFlags & kFlagHasHdrStaticInfo) || !profileSupport10Bits(mime, profile)) {
+                return HDR_FORMAT_NONE;
+            }
+            return mHdrInfoFlags & kFlagHasHdr10PlusInfo ? HDR_FORMAT_HDR10PLUS : HDR_FORMAT_HDR10;
+        case COLOR_TRANSFER_HLG:
+            if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+                return HDR_FORMAT_HLG;
+            }
+            // TODO: DOLBY format
+    }
+    return HDR_FORMAT_NONE;
+}
+
+bool MediaCodec::profileSupport10Bits(const AString &mime, const int32_t profile) {
+    if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
+        return true;
+    } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
+        switch (profile) {
+            case VP9Profile2:
+            case VP9Profile3:
+            case VP9Profile2HDR:
+            case VP9Profile3HDR:
+            case VP9Profile2HDR10Plus:
+            case VP9Profile3HDR10Plus:
+                return true;
+        }
+    } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
+        switch (profile) {
+            case HEVCProfileMain10:
+            case HEVCProfileMain10HDR10:
+            case HEVCProfileMain10HDR10Plus:
+                return true;
+        }
+    }
+    return false;
+}
+
 
 // called to update info being passed back via getMetrics(), which is a
 // unique copy for that call, no concurrent access worries.
@@ -1080,6 +1197,7 @@
 
     // ensure mutex while we do our own work
     Mutex::Autolock _lock(mMetricsLock);
+    mHdrInfoFlags = 0;
     if (mMetricsHandle != 0) {
         if (mediametrics_count(mMetricsHandle) > 0) {
             mediametrics_selfRecord(mMetricsHandle);
@@ -1606,6 +1724,11 @@
 
     std::vector<MediaResourceParcel> resources;
     resources.push_back(MediaResource::CodecResource(secureCodec, toMediaResourceSubType(mDomain)));
+
+    // If the ComponentName is not set yet, use the name passed by the user.
+    if (mComponentName.empty()) {
+        mResourceManagerProxy->setCodecName(name.c_str());
+    }
     for (int i = 0; i <= kMaxRetry; ++i) {
         if (i > 0) {
             // Don't try to reclaim resource for the first time.
@@ -1732,24 +1855,6 @@
                     mediametrics_setInt32(nextMetricsHandle, kCodecPriority, priority);
                 }
             }
-            int32_t colorStandard = -1;
-            if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecConfigColorStandard, colorStandard);
-            }
-            int32_t colorRange = -1;
-            if (format->findInt32(KEY_COLOR_RANGE, &colorRange)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecConfigColorRange, colorRange);
-            }
-            int32_t colorTransfer = -1;
-            if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
-                mConfigColorTransfer = colorTransfer;
-                mediametrics_setInt32(mMetricsHandle, kCodecConfigColorTransfer, colorTransfer);
-            }
-            HDRStaticInfo info;
-            if (ColorUtils::getHDRStaticInfoFromFormat(format, &info)
-                    && ColorUtils::isHDRStaticInfoValid(&info)) {
-                mHDRStaticInfo = true;
-            }
         }
 
         // Prevent possible integer overflow in downstream code.
@@ -3387,6 +3492,8 @@
                     if (mComponentName.c_str()) {
                         mediametrics_setCString(mMetricsHandle, kCodecCodec,
                                                 mComponentName.c_str());
+                        // Update the codec name.
+                        mResourceManagerProxy->setCodecName(mComponentName.c_str());
                     }
 
                     const char *owner = mCodecInfo ? mCodecInfo->getOwnerName() : "";
@@ -3436,8 +3543,6 @@
                     CHECK(msg->findMessage("input-format", &mInputFormat));
                     CHECK(msg->findMessage("output-format", &mOutputFormat));
 
-                    updateHDRFormatMetric();
-
                     // limit to confirming the opt-in behavior to minimize any behavioral change
                     if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
                         // signal frame dropping mode in the input format as this may also be
@@ -3480,6 +3585,7 @@
                         if (interestingFormat->findInt32("level", &level)) {
                             mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
                         }
+                        updateHdrMetrics(true /* isConfig */);
                         // bitrate and bitrate mode, encoder only
                         if (mFlags & kFlagIsEncoder) {
                             // encoder specific values
@@ -3519,7 +3625,6 @@
                                 mComponentName.c_str(),
                                 mInputFormat->debugString(4).c_str(),
                                 mOutputFormat->debugString(4).c_str());
-                        updateHDRFormatMetric();
                         CHECK(obj != NULL);
                         response->setObject("input-surface", obj);
                         mHaveInputSurface = true;
@@ -3544,7 +3649,6 @@
                     if (!msg->findInt32("err", &err)) {
                         CHECK(msg->findMessage("input-format", &mInputFormat));
                         CHECK(msg->findMessage("output-format", &mOutputFormat));
-                        updateHDRFormatMetric();
                         mHaveInputSurface = true;
                     } else {
                         response->setInt32("err", err);
@@ -4541,6 +4645,14 @@
             break;
         }
 
+        case kWhatPollForRenderedBuffers:
+        {
+            if (isExecuting()) {
+                mBufferChannel->pollForRenderedBuffers();
+            }
+            break;
+        }
+
         case kWhatSignalEndOfInputStream:
         {
             if (!isExecuting() || !mHaveInputSurface) {
@@ -4757,7 +4869,6 @@
         buffer->meta()->setObject("changedKeys", changedKeys);
     }
     mOutputFormat = format;
-    updateHDRFormatMetric();
     mapFormat(mComponentName, format, nullptr, true);
     ALOGV("[%s] output format changed to: %s",
             mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
@@ -4783,9 +4894,6 @@
             HDRStaticInfo info;
             if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
                 setNativeWindowHdrMetadata(mSurface.get(), &info);
-                if (ColorUtils::isHDRStaticInfoValid(&info)) {
-                    mHDRStaticInfo = true;
-                }
             }
         }
 
@@ -4794,7 +4902,6 @@
                 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
             native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
                     hdr10PlusInfo->size(), hdr10PlusInfo->data());
-            mHDR10PlusInfo = true;
         }
 
         if (mime.startsWithIgnoreCase("video/")) {
@@ -4840,21 +4947,8 @@
         }
     }
 
-    if (mMetricsHandle != 0) {
-        int32_t colorStandard = -1;
-        if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecParsedColorStandard, colorStandard);
-        }
-        int32_t colorRange = -1;
-        if (format->findInt32( KEY_COLOR_RANGE, &colorRange)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecParsedColorRange, colorRange);
-        }
-        int32_t colorTransfer = -1;
-        if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecParsedColorTransfer, colorTransfer);
-        }
-    }
-}
+    updateHdrMetrics(false /* isConfig */);
+ }
 
 void MediaCodec::extractCSD(const sp<AMessage> &format) {
     mCSD.clear();
@@ -5272,7 +5366,6 @@
 size_t MediaCodec::CreateFramesRenderedMessage(
         const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
     size_t index = 0;
-
     for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
             it != done.cend(); ++it) {
         if (it->getRenderTimeNs() < 0) {
@@ -5321,11 +5414,13 @@
         int64_t mediaTimeUs = -1;
         buffer->meta()->findInt64("timeUs", &mediaTimeUs);
 
+        bool noRenderTime = false;
         int64_t renderTimeNs = 0;
         if (!msg->findInt64("timestampNs", &renderTimeNs)) {
             // use media timestamp if client did not request a specific render timestamp
             ALOGV("using buffer PTS of %lld", (long long)mediaTimeUs);
             renderTimeNs = mediaTimeUs * 1000;
+            noRenderTime = true;
         }
 
         if (mSoftRenderer != NULL) {
@@ -5343,6 +5438,29 @@
                 }
             }
         }
+
+        // If rendering to the screen, then schedule a time in the future to poll to see if this
+        // frame was ever rendered to seed onFrameRendered callbacks.
+        if (mIsSurfaceToScreen) {
+            // can't initialize this in the constructor because the Looper parent class needs to be
+            // initialized first
+            if (mMsgPollForRenderedBuffers == nullptr) {
+                mMsgPollForRenderedBuffers = new AMessage(kWhatPollForRenderedBuffers, this);
+            }
+            // Schedule the poll to occur 100ms after the render time - should be safe for
+            // determining if the frame was ever rendered. If no render time was specified, the
+            // presentation timestamp is used instead, which almost certainly occurs in the past,
+            // since it's almost always a zero-based offset from the start of the stream. In these
+            // scenarios, we expect the frame to be rendered with no delay.
+            int64_t delayUs = noRenderTime ? 0 : renderTimeNs / 1000 - ALooper::GetNowUs();
+            delayUs += 100 * 1000; /* 100ms in microseconds */
+            status_t err =
+                    mMsgPollForRenderedBuffers->postUnique(/* token= */ mMsgPollForRenderedBuffers,
+                                                           delayUs);
+            if (err != OK) {
+                ALOGE("unexpected failure to post pollForRenderedBuffers: %d", err);
+            }
+        }
         status_t err = mBufferChannel->renderOutputBuffer(buffer, renderTimeNs);
 
         if (err == NO_INIT) {
@@ -5584,6 +5702,9 @@
 }
 
 status_t MediaCodec::onSetParameters(const sp<AMessage> &params) {
+    if (mState == UNINITIALIZED || mState == INITIALIZING) {
+        return NO_INIT;
+    }
     updateLowLatency(params);
     mapFormat(mComponentName, params, nullptr, false);
     updateTunnelPeek(params);
diff --git a/media/libstagefright/OWNERS b/media/libstagefright/OWNERS
index e67496e..f02e168 100644
--- a/media/libstagefright/OWNERS
+++ b/media/libstagefright/OWNERS
@@ -7,3 +7,5 @@
 
 # go/android-fwk-media-solutions for info on areas of ownership.
 include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
+
+per-file Camera*.cpp = file:/camera/OWNERS
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index c5b5199..863177d 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -798,6 +798,8 @@
         { "dvb-audio-description", kKeyDvbAudioDescription},
         { "dvb-teletext-magazine-number", kKeyDvbTeletextMagazineNumber},
         { "dvb-teletext-page-number", kKeyDvbTeletextPageNumber},
+        { "profile", kKeyAudioProfile },
+        { "level", kKeyAudioLevel },
     }
 };
 
diff --git a/media/libstagefright/include/ACodecBufferChannel.h b/media/libstagefright/include/ACodecBufferChannel.h
index da962d1..f3b0600 100644
--- a/media/libstagefright/include/ACodecBufferChannel.h
+++ b/media/libstagefright/include/ACodecBufferChannel.h
@@ -97,6 +97,7 @@
             const sp<MediaCodecBuffer> &buffer) override;
     virtual status_t renderOutputBuffer(
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
+    virtual void pollForRenderedBuffers() override;
     virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
     virtual void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
     virtual void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index 48721ec..aa02151 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -407,6 +407,14 @@
      */
     virtual status_t renderOutputBuffer(
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) = 0;
+
+    /**
+     * Poll for updates about rendered buffers.
+     *
+     * Triggers callbacks to CodecCallback::onOutputFramesRendered.
+     */
+    virtual void pollForRenderedBuffers() = 0;
+
     /**
      * Discard a buffer to the underlying CodecBase object.
      *
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 703f7ad..29b196f 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -457,12 +457,19 @@
     int32_t mRotationDegrees;
     int32_t mAllowFrameDroppingBySurface;
 
-    int32_t mConfigColorTransfer;
-    bool mHDRStaticInfo;
-    bool mHDR10PlusInfo;
-    void updateHDRFormatMetric();
-    hdr_format getHDRFormat(const int32_t profile, const int32_t transfer,
-            const AString &mediaType);
+    enum {
+        kFlagHasHdrStaticInfo   = 1,
+        kFlagHasHdr10PlusInfo   = 2,
+    };
+    uint32_t mHdrInfoFlags;
+    void updateHdrMetrics(bool isConfig);
+    hdr_format getHdrFormat(const AString &mime, const int32_t profile,
+            const int32_t colorTransfer);
+    hdr_format getHdrFormatForEncoder(const AString &mime, const int32_t profile,
+            const int32_t colorTransfer);
+    hdr_format getHdrFormatForDecoder(const AString &mime, const int32_t profile,
+            const int32_t colorTransfer);
+    bool profileSupport10Bits(const AString &mime, const int32_t profile);
 
     // initial create parameters
     AString mInitName;
@@ -622,6 +629,9 @@
                                                  // when low latency is on
     int64_t mInputBufferCounter;  // number of input buffers queued since last reset/flush
 
+    // A rescheduleable message that periodically polls for rendered buffers
+    sp<AMessage> mMsgPollForRenderedBuffers;
+
     class ReleaseSurface;
     std::unique_ptr<ReleaseSurface> mReleaseSurface;
 
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 33f224c..a7d2eb9 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -117,6 +117,12 @@
     kKeyVideoProfile      = 'vprf',  // int32_t
     kKeyVideoLevel        = 'vlev',  // int32_t
 
+    // audio profile and level
+    // The codec framework doesn't distinguish between video and audio profiles,
+    // so there is no need to define a separate key
+    kKeyAudioProfile      = 'vprf',  // int32_t
+    kKeyAudioLevel        = 'vlev',  // int32_t
+
     kKey2ByteNalLength    = '2NAL',  // int32_t (bool)
 
     // Identify the file output format for authoring
@@ -267,6 +273,7 @@
     kKeyRtpExtMap        = 'extm', // int32_t, rtp extension ID for cvo on RTP protocol.
     kKeyRtpCvoDegrees    = 'cvod', // int32_t, rtp cvo degrees as per 3GPP 26.114.
     kKeyRtpDscp          = 'dscp', // int32_t, DSCP(Differentiated services codepoint) of RFC 2474.
+    kKeyRtpEcn           = 'sEcn', // int32_t, ECN (Explicit Congestion Notification) of RFC 3168
     kKeySocketNetwork    = 'sNet', // int64_t, socket will be bound to network handle.
 
     // Slow-motion markers
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index 88f7be7..100c0cd 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -46,7 +46,6 @@
       mFirstIFrameProvided(false),
       mLastCvo(-1),
       mLastIFrameProvidedAtMs(0),
-      mLastRtpTimeJitterDataUs(0),
       mWidth(0),
       mHeight(0) {
 }
@@ -123,20 +122,11 @@
     }
 
     sp<ABuffer> buffer = *queue->begin();
+    uint32_t seqNum = (uint32_t)buffer->int32Data();
     buffer->meta()->setObject("source", source);
 
-    /**
-     * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
-     * But that is not useful as an ingredient of buffering time.
-     * Instead, we calculates the time only for all 'NAL units'.
-     */
     int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
     int64_t nowTimeUs = ALooper::GetNowUs();
-    if (rtpTime != mLastRtpTimeJitterDataUs) {
-        source->putBaseJitterData(rtpTime, nowTimeUs);
-        mLastRtpTimeJitterDataUs = rtpTime;
-    }
-    source->putInterArrivalJitterData(rtpTime, nowTimeUs);
 
     const int64_t startTimeMs = source->mSysAnchorTime / 1000;
     const int64_t nowTimeMs = nowTimeUs / 1000;
@@ -168,7 +158,7 @@
     const int32_t dynamicJbTimeMs = std::min(dynamicJitterTimeMs, 150);
     const int64_t dynamicJbTimeRtp = MsToRtp(dynamicJbTimeMs, clockRate);
     /* Fundamental jitter time */
-    const int32_t jitterTimeMs = baseJbTimeMs;
+    const int32_t jitterTimeMs = baseJbTimeMs + dynamicJbTimeMs;
     const int64_t jitterTimeRtp = MsToRtp(jitterTimeMs, clockRate);
 
     // Till (T), this assembler waits unconditionally to collect current NAL unit
@@ -177,7 +167,7 @@
     bool isExpired = (diffTimeRtp >= 0);                    // It's expired if T is passed away
 
     // From (T), this assembler tries to complete the NAL till (T + try)
-    int32_t tryJbTimeMs = baseJitterTimeMs / 2 + dynamicJbTimeMs;
+    int32_t tryJbTimeMs = dynamicJbTimeMs;
     int64_t tryJbTimeRtp = MsToRtp(tryJbTimeMs, clockRate);
     bool isFirstLineBroken = (diffTimeRtp > tryJbTimeRtp);
 
@@ -208,10 +198,10 @@
         String8 info;
         info.appendFormat("RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
                     "Seq# %d \t ExpSeq# %d \t"
-                    "JitterMs %d + (%d + %d * %.3f)",
+                    "JitterMs [%d + (~%d~)] + %d * %.3f",
                     (long long)diffTimeRtp, (long long)totalDiffTimeMs,
-                    buffer->int32Data(), mNextExpectedSeqNo,
-                    jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+                    seqNum, mNextExpectedSeqNo,
+                    baseJbTimeMs, dynamicJbTimeMs, tryJbTimeMs, JITTER_MULTIPLE);
         if (isSecondLineBroken) {
             ALOGE("%s", info.string());
             printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
@@ -223,6 +213,9 @@
     }
 
     if (mNextExpectedSeqNoValid) {
+        if (mNextExpectedSeqNo > seqNum) {
+            ALOGE("Reversed exp seq# %d \t current head %d", mNextExpectedSeqNo, seqNum);
+        }
         mNextExpectedSeqNo = pickStartSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
         int32_t cntRemove = deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
 
@@ -241,10 +234,10 @@
 
     if (!mNextExpectedSeqNoValid) {
         mNextExpectedSeqNoValid = true;
-        mNextExpectedSeqNo = (uint32_t)buffer->int32Data();
-    } else if ((uint32_t)buffer->int32Data() != mNextExpectedSeqNo) {
-        ALOGV("Not the sequence number I expected");
-
+        mNextExpectedSeqNo = seqNum;
+    } else if (seqNum != mNextExpectedSeqNo) {
+        ALOGV("Not the sequence number(%d) I expected. Actual seq# is %d",
+                mNextExpectedSeqNo, seqNum);
         return WRONG_SEQUENCE_NUMBER;
     }
 
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index 72dd981..7b5c24a 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -53,7 +53,6 @@
       mFirstIFrameProvided(false),
       mLastCvo(-1),
       mLastIFrameProvidedAtMs(0),
-      mLastRtpTimeJitterDataUs(0),
       mWidth(0),
       mHeight(0) {
 
@@ -133,20 +132,11 @@
     }
 
     sp<ABuffer> buffer = *queue->begin();
+    uint32_t seqNum = (uint32_t)buffer->int32Data();
     buffer->meta()->setObject("source", source);
 
-    /**
-     * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
-     * But that is not useful as an ingredient of buffering time.
-     * Instead, we calculates the time only for all 'NAL units'.
-     */
     int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
     int64_t nowTimeUs = ALooper::GetNowUs();
-    if (rtpTime != mLastRtpTimeJitterDataUs) {
-        source->putBaseJitterData(rtpTime, nowTimeUs);
-        mLastRtpTimeJitterDataUs = rtpTime;
-    }
-    source->putInterArrivalJitterData(rtpTime, nowTimeUs);
 
     const int64_t startTimeMs = source->mSysAnchorTime / 1000;
     const int64_t nowTimeMs = nowTimeUs / 1000;
@@ -178,7 +168,7 @@
     const int32_t dynamicJbTimeMs = std::min(dynamicJitterTimeMs, 150);
     const int64_t dynamicJbTimeRtp = MsToRtp(dynamicJbTimeMs, clockRate);
     /* Fundamental jitter time */
-    const int32_t jitterTimeMs = baseJbTimeMs;
+    const int32_t jitterTimeMs = baseJbTimeMs + dynamicJbTimeMs;
     const int64_t jitterTimeRtp = MsToRtp(jitterTimeMs, clockRate);
 
     // Till (T), this assembler waits unconditionally to collect current NAL unit
@@ -187,7 +177,7 @@
     bool isExpired = (diffTimeRtp >= 0);                    // It's expired if T is passed away
 
     // From (T), this assembler tries to complete the NAL till (T + try)
-    int32_t tryJbTimeMs = baseJitterTimeMs / 2 + dynamicJbTimeMs;
+    int32_t tryJbTimeMs = dynamicJbTimeMs;
     int64_t tryJbTimeRtp = MsToRtp(tryJbTimeMs, clockRate);
     bool isFirstLineBroken = (diffTimeRtp > tryJbTimeRtp);
 
@@ -218,10 +208,10 @@
         String8 info;
         info.appendFormat("RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
                     "Seq# %d \t ExpSeq# %d \t"
-                    "JitterMs %d + (%d + %d * %.3f)",
+                    "JitterMs [%d + (~%d~)] + %d * %.3f",
                     (long long)diffTimeRtp, (long long)totalDiffTimeMs,
-                    buffer->int32Data(), mNextExpectedSeqNo,
-                    jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+                    seqNum, mNextExpectedSeqNo,
+                    baseJbTimeMs, dynamicJbTimeMs, tryJbTimeMs, JITTER_MULTIPLE);
         if (isSecondLineBroken) {
             ALOGE("%s", info.string());
             printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
@@ -251,10 +241,10 @@
 
     if (!mNextExpectedSeqNoValid) {
         mNextExpectedSeqNoValid = true;
-        mNextExpectedSeqNo = (uint32_t)buffer->int32Data();
-    } else if ((uint32_t)buffer->int32Data() != mNextExpectedSeqNo) {
-        ALOGV("Not the sequence number I expected");
-
+        mNextExpectedSeqNo = seqNum;
+    } else if (seqNum != mNextExpectedSeqNo) {
+        ALOGV("Not the sequence number(%d) I expected. Actual seq# is %d",
+                mNextExpectedSeqNo, seqNum);
         return WRONG_SEQUENCE_NUMBER;
     }
 
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index a61f48f..165c336 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -16,6 +16,12 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ARTPConnection"
+#define INET_ECN_NOT_ECT    0x00    /* ECN was not enabled */
+#define INET_ECN_ECT_1      0x01    /* ECN capable packet */
+#define INET_ECN_ECT_0      0x02    /* ECN capable packet */
+#define INET_ECN_CE         0x03    /* ECN congestion */
+#define INET_ECN_MASK       0x03    /* Mask of ECN bits */
+
 #include <utils/Log.h>
 
 #include <media/stagefright/rtsp/ARTPAssembler.h>
@@ -56,6 +62,7 @@
 
 // static
 const int64_t ARTPConnection::kSelectTimeoutUs = 1000LL;
+const int64_t ARTPConnection::kMinOneSecondNotifyDelayUs = 100000ll;
 
 struct ARTPConnection::StreamInfo {
     bool isIPv6;
@@ -84,7 +91,10 @@
       mPollEventPending(false),
       mLastReceiverReportTimeUs(-1),
       mLastBitrateReportTimeUs(-1),
+      mLastCongestionNotifyTimeUs(-1),
       mTargetBitrate(-1),
+      mRtpSockOptEcn(0),
+      mIsIPv6(false),
       mStaticJitterTimeMs(kStaticJitterTimeMs) {
 }
 
@@ -175,7 +185,7 @@
 // static
 void ARTPConnection::MakeRTPSocketPair(
         int *rtpSocket, int *rtcpSocket, const char *localIp, const char *remoteIp,
-        unsigned localPort, unsigned remotePort, int64_t socketNetwork) {
+        unsigned localPort, unsigned remotePort, int64_t socketNetwork, int32_t sockOptEcn) {
     bool isIPv6 = false;
     if (strchr(localIp, ':') != NULL)
         isIPv6 = true;
@@ -204,6 +214,24 @@
         }
     }
 
+    if (sockOptEcn != 0) {
+        int sockOptForTOS = 1;
+        if (setsockopt(*rtpSocket, isIPv6 ? IPPROTO_IPV6 : IPPROTO_IP,
+               isIPv6 ? IPV6_RECVTCLASS : IP_RECVTOS,
+               (int *)&sockOptForTOS, sizeof(sockOptForTOS)) < 0) {
+            ALOGE("failed to set recv sockopt TOS on rtpsock(%d). err=%s", *rtpSocket,
+                strerror(errno));
+        } else {
+            ALOGD("successfully set recv sockopt TOS on rtpsock(%d)", *rtpSocket);
+            int result = setsockopt(*rtcpSocket, isIPv6 ? IPPROTO_IPV6 : IPPROTO_IP,
+                isIPv6 ? IPV6_RECVTCLASS : IP_RECVTOS,
+                (int *)&sockOptForTOS, sizeof(sockOptForTOS));
+            if (result >= 0) {
+                ALOGD("successfully set recv sockopt TOS on rtcpsock(%d).", *rtcpSocket);
+            }
+        }
+    }
+
     bumpSocketBufferSize(*rtcpSocket);
 
     struct sockaddr *addr;
@@ -593,32 +621,25 @@
 
     sp<ABuffer> buffer = new ABuffer(65536);
 
-    struct sockaddr *pRemoteRTCPAddr;
-    int sizeSockSt;
-    if (s->isIPv6) {
-        pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr6;
-        sizeSockSt = sizeof(struct sockaddr_in6);
-    } else {
-        pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr;
-        sizeSockSt = sizeof(struct sockaddr_in);
-    }
-    socklen_t remoteAddrLen =
-        (!receiveRTP && s->mNumRTCPPacketsReceived == 0)
-            ? sizeSockSt : 0;
+    struct msghdr sMsg = {};
+    struct iovec sIov[1] = {};
 
-    if (mFlags & kViLTEConnection) {
-        remoteAddrLen = 0;
-    }
+    sIov[0].iov_base = (char *) buffer->data();
+    sIov[0].iov_len = buffer->capacity();
+
+    sMsg.msg_iov = sIov;
+    sMsg.msg_iovlen = 1;
+
+    int cMsgSize = sizeof(struct cmsghdr) + sizeof(uint8_t);
+    char buf[CMSG_SPACE(cMsgSize)];
+    sMsg.msg_control = buf;
+    sMsg.msg_controllen = sizeof(buf);
+    sMsg.msg_flags = 0;
 
     ssize_t nbytes;
     do {
-        nbytes = recvfrom(
-            receiveRTP ? s->mRTPSocket : s->mRTCPSocket,
-            buffer->data(),
-            buffer->capacity(),
-            0,
-            remoteAddrLen > 0 ? pRemoteRTCPAddr : NULL,
-            remoteAddrLen > 0 ? &remoteAddrLen : NULL);
+        // Used recvmsg to get the TOS header of incoming packet
+        nbytes = recvmsg(receiveRTP ? s->mRTPSocket : s->mRTCPSocket, &sMsg, 0);
         mCumulativeBytes += nbytes;
     } while (nbytes < 0 && errno == EINTR);
 
@@ -633,6 +654,10 @@
         }
     }
 
+    if (nbytes > 0) {
+        handleIpHeadersIfReceived(s, sMsg);
+    }
+
     buffer->setRange(0, nbytes);
 
     // ALOGI("received %d bytes.", buffer->size());
@@ -647,13 +672,68 @@
     return err;
 }
 
+/* This function will check if TOS is present or not in received IP packet.
+ * After that if it is present then it will notify about congestion to upper
+ * layer if CE bit is set in TOS header.
+ **/
+void ARTPConnection::handleIpHeadersIfReceived(StreamInfo *s, struct msghdr sMsg) {
+    struct cmsghdr *cMsg;
+    cMsg = CMSG_FIRSTHDR(&sMsg);
+
+    if (cMsg == NULL) {
+        ALOGV("cmsg is null");
+    }
+
+    for (; cMsg != NULL; cMsg = CMSG_NXTHDR(&sMsg, cMsg)) {
+        bool isTOSHeader = ((cMsg->cmsg_level == (mIsIPv6 ? IPPROTO_IPV6 : IPPROTO_IP))
+                              && (cMsg->cmsg_type == (mIsIPv6 ? IPV6_TCLASS : IP_TOS))
+                              && (cMsg->cmsg_len));
+        if (isTOSHeader) {
+            uint8_t receivedTOS;
+            receivedTOS = *((uint8_t *) CMSG_DATA(cMsg));
+            // checking CE bit is set
+            bool isCEBitMarked = ((receivedTOS & INET_ECN_MASK) == INET_ECN_CE);
+
+            ALOGV("receivedTos(value -> %d)", receivedTOS);
+
+            if (isCEBitMarked) {
+                ALOGD("receivedTos(value -> %d), is ECN CE marked = %d",
+                    receivedTOS, isCEBitMarked);
+                notifyCongestionToUpperLayerIfNeeded(s);
+            }
+            break;
+        }
+    }
+}
+
+/* this function will be use to notify congestion in video call to upper layer */
+void ARTPConnection::notifyCongestionToUpperLayerIfNeeded(StreamInfo *s) {
+    int64_t nowUs = ALooper::GetNowUs();
+
+    if (mLastCongestionNotifyTimeUs <= 0) {
+        mLastCongestionNotifyTimeUs = nowUs;
+    }
+
+    bool isNeedToUpdate = (mLastCongestionNotifyTimeUs + kMinOneSecondNotifyDelayUs <= nowUs);
+    ALOGD("ECN info set by upper layer=%d, isNeedToUpdate=%d", mRtpSockOptEcn, isNeedToUpdate);
+
+    if ((mRtpSockOptEcn != 0) && (isNeedToUpdate)) {
+        sp<AMessage> notify = s->mNotifyMsg->dup();
+        notify->setInt32("rtcp-event", 1);
+        notify->setInt32("payload-type", ARTPSource::RTP_QUALITY_CD);
+        notify->post();
+        mLastCongestionNotifyTimeUs = nowUs;
+        ALOGD("Congestion detected in n/w, Notify upper layer");
+    }
+}
+
 ssize_t ARTPConnection::send(const StreamInfo *info, const sp<ABuffer> buffer) {
         struct sockaddr* pRemoteRTCPAddr;
         int sizeSockSt;
 
         /* It seems this isIPv6 variable is useless.
          * We should remove it to prevent confusion */
-        if (info->isIPv6) {
+        if (mIsIPv6) {
             pRemoteRTCPAddr = (struct sockaddr *)&info->mRemoteRTCPAddr6;
             sizeSockSt = sizeof(struct sockaddr_in6);
         } else {
@@ -1215,12 +1295,20 @@
     mTargetBitrate = targetBitrate;
 }
 
+void ARTPConnection::setRtpSockOptEcn(int32_t sockOptEcn) {
+    mRtpSockOptEcn = sockOptEcn;
+}
+
+void ARTPConnection::setIsIPv6(const char *localIp) {
+    mIsIPv6 = (strchr(localIp, ':') != nullptr);
+}
+
 void ARTPConnection::checkRxBitrate(int64_t nowUs) {
     if (mLastBitrateReportTimeUs <= 0) {
         mCumulativeBytes = 0;
         mLastBitrateReportTimeUs = nowUs;
     }
-    else if (mLastEarlyNotifyTimeUs + 100000ll <= nowUs) {
+    else if (mLastEarlyNotifyTimeUs + kMinOneSecondNotifyDelayUs <= nowUs) {
         int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
         int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
         mLastEarlyNotifyTimeUs = nowUs;
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index 717d8af..c5b0a1e 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -264,12 +264,12 @@
 
 bool ARTPSource::queuePacket(const sp<ABuffer> &buffer) {
     int64_t nowUs = ALooper::GetNowUs();
+    int64_t rtpTime = 0;
     uint32_t seqNum = (uint32_t)buffer->int32Data();
-    int32_t ssrc = 0, rtpTime = 0;
+    int32_t ssrc = 0;
 
     buffer->meta()->findInt32("ssrc", &ssrc);
     CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
-    mLatestRtpTime = rtpTime;
 
     if (mNumBuffersReceived++ == 0 && mFirstSysTime == 0) {
         mFirstSysTime = nowUs;
@@ -277,7 +277,7 @@
         mLastSysAnchorTimeUpdatedUs = nowUs;
         mHighestSeqNumber = seqNum;
         mBaseSeqNumber = seqNum;
-        mFirstRtpTime = rtpTime;
+        mFirstRtpTime = (uint32_t)rtpTime;
         mFirstSsrc = ssrc;
         ALOGD("first-rtp arrived: first-rtp-time=%u, sys-time=%lld, seq-num=%u, ssrc=%d",
                 mFirstRtpTime, (long long)mFirstSysTime, mHighestSeqNumber, mFirstSsrc);
@@ -352,6 +352,18 @@
 
     mQueue.insert(it, buffer);
 
+    /**
+     * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
+     * We calculate anothor jitter only for all 'Head NAL units'
+     */
+    ALOGV("<======== Insert %d", seqNum);
+    rtpTime = mAssembler->findRTPTime(mFirstRtpTime, buffer);
+    if (rtpTime != mLatestRtpTime) {
+        mJitterCalc->putBaseData(rtpTime, nowUs);
+    }
+    mJitterCalc->putInterArrivalData(rtpTime, nowUs);
+    mLatestRtpTime = rtpTime;
+
     return true;
 }
 
@@ -680,14 +692,6 @@
     mStaticJbTimeMs = jbTimeMs;
 }
 
-void ARTPSource::putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime) {
-    mJitterCalc->putBaseData(timeStamp, arrivalTime);
-}
-
-void ARTPSource::putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime) {
-    mJitterCalc->putInterArrivalData(timeStamp, arrivalTime);
-}
-
 void ARTPSource::setJbTimer(const sp<AMessage> timer) {
     mJbTimer = timer;
 }
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 8990f0c..41f2d67 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -255,9 +255,34 @@
     if (params->findInt32(kKeyRtpCvoDegrees, &rtpCVODegrees))
         mRTPCVODegrees = rtpCVODegrees;
 
+    bool needToSetSockOpt = false;
     int32_t dscp = 0;
-    if (params->findInt32(kKeyRtpDscp, &dscp))
-        updateSocketDscp(dscp);
+    if (params->findInt32(kKeyRtpDscp, &dscp)) {
+        mRtpLayer3Dscp = dscp << 2;
+        needToSetSockOpt = true;
+    }
+
+    int32_t ecn = 0;
+    if (params->findInt32(kKeyRtpEcn, &ecn)) {
+        /*
+         * @ecn, possible value for ECN.
+         *  +-----+-----+
+         *  | ECN FIELD |
+         *  +-----+-----+
+         *    ECT   CE         [Obsolete] RFC 2481 names for the ECN bits.
+         *     0     0         Not-ECT
+         *     0     1         ECT (ECN-Capable Transport) (1)
+         *     1     0         ECT (ECN-Capable Transport) (0)
+         *     1     1         CE (Congestion Experienced)
+         *
+         */
+        mRtpSockOptEcn = ecn;
+        needToSetSockOpt = true;
+    }
+
+    if (needToSetSockOpt) {
+        updateSocketOpt();
+    }
 
     int64_t sockNetwork = 0;
     if (params->findInt64(kKeySocketNetwork, &sockNetwork))
@@ -1438,18 +1463,29 @@
     mPayloadType = payloadType;
 }
 
-void ARTPWriter::updateSocketDscp(int32_t dscp) {
-    mRtpLayer3Dscp = dscp << 2;
+/*
+ * This function will set socket option in IP header
+ */
+void ARTPWriter::updateSocketOpt() {
+    /*
+     * 0     1     2     3     4     5     6     7
+     * +-----+-----+-----+-----+-----+-----+-----+-----+
+     * |          DS FIELD, DSCP           | ECN FIELD |
+     * +-----+-----+-----+-----+-----+-----+-----+-----+
+     */
+    int sockOpt = mRtpLayer3Dscp ^ mRtpSockOptEcn;
+    ALOGD("Update socket opt with sockopt=%d, mRtpLayer3Dscp=%d, mRtpSockOptEcn=%d",
+                sockOpt, mRtpLayer3Dscp, mRtpSockOptEcn);
 
-    /* mRtpLayer3Dscp will be mapped to WMM(Wifi) as per operator's requirement */
-    if (setsockopt(mRTPSocket, IPPROTO_IP, IP_TOS,
-                (int *)&mRtpLayer3Dscp, sizeof(mRtpLayer3Dscp)) < 0) {
-        ALOGE("failed to set dscp on rtpsock. err=%s", strerror(errno));
+    /* sockOpt will be used to set socket option in IP header */
+    if (setsockopt(mRTPSocket, mIsIPv6 ? IPPROTO_IPV6 : IPPROTO_IP, mIsIPv6 ? IPV6_TCLASS : IP_TOS,
+                (int *)&sockOpt, sizeof(sockOpt)) < 0) {
+        ALOGE("failed to set sockopt on rtpsock. err=%s", strerror(errno));
     } else {
-        ALOGD("successfully set dscp on rtpsock. opt=%d", mRtpLayer3Dscp);
-        setsockopt(mRTCPSocket, IPPROTO_IP, IP_TOS,
-                (int *)&mRtpLayer3Dscp, sizeof(mRtpLayer3Dscp));
-        ALOGD("successfully set dscp on rtcpsock. opt=%d", mRtpLayer3Dscp);
+        ALOGD("successfully set sockopt. opt=%d", sockOpt);
+        setsockopt(mRTCPSocket, mIsIPv6 ? IPPROTO_IPV6 : IPPROTO_IP, mIsIPv6 ? IPV6_TCLASS : IP_TOS,
+                (int *)&sockOpt, sizeof(sockOpt));
+        ALOGD("successfully set sockopt rtcpsock. opt=%d", sockOpt);
     }
 }
 
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h
index 2f8b8ba..70ce388 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h
@@ -50,7 +50,6 @@
     bool mFirstIFrameProvided;
     int32_t mLastCvo;
     uint64_t mLastIFrameProvidedAtMs;
-    int64_t mLastRtpTimeJitterDataUs;
     int32_t mWidth;
     int32_t mHeight;
     List<sp<ABuffer> > mNALUnits;
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h
index 9575d8c..ed3f1ae 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h
@@ -51,7 +51,6 @@
     bool mFirstIFrameProvided;
     int32_t mLastCvo;
     uint64_t mLastIFrameProvidedAtMs;
-    int64_t mLastRtpTimeJitterDataUs;
     int32_t mWidth;
     int32_t mHeight;
     List<sp<ABuffer> > mNALUnits;
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
index 39161b6..8f87642 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
@@ -44,6 +44,13 @@
     virtual void onByeReceived() = 0;
     virtual bool initCheck() { return true; }
 
+    // Utility functions
+    inline int64_t findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer);
+    inline int64_t MsToRtp(int64_t ms, int64_t clockRate);
+    inline int64_t RtpToMs(int64_t rtp, int64_t clockRate);
+    inline void printNowTimeMs(int64_t start, int64_t now, int64_t play);
+    inline void printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp);
+
 protected:
     virtual AssemblyStatus assembleMore(const sp<ARTPSource> &source) = 0;
     virtual void packetLost() = 0;
@@ -64,13 +71,6 @@
     bool mShowQueue;
     int32_t mShowQueueCnt;
 
-    // Utility functions
-    inline int64_t findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer);
-    inline int64_t MsToRtp(int64_t ms, int64_t clockRate);
-    inline int64_t RtpToMs(int64_t rtp, int64_t clockRate);
-    inline void printNowTimeMs(int64_t start, int64_t now, int64_t play);
-    inline void printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp);
-
 private:
     int64_t mFirstFailureTimeUs;
 
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
index 73d2866..250de71 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
@@ -20,6 +20,7 @@
 
 #include <media/stagefright/foundation/AHandler.h>
 #include <utils/List.h>
+#include <sys/socket.h>
 
 namespace android {
 
@@ -48,6 +49,8 @@
     void setSelfID(const uint32_t selfID);
     void setStaticJitterTimeMs(const uint32_t jbTimeMs);
     void setTargetBitrate(int32_t targetBitrate);
+    void setRtpSockOptEcn(int32_t sockOptEcn);
+    void setIsIPv6(const char *localIp);
 
     // Creates a pair of UDP datagram sockets bound to adjacent ports
     // (the rtpSocket is bound to an even port, the rtcpSocket to the
@@ -60,7 +63,8 @@
     static void MakeRTPSocketPair(
             int *rtpSocket, int *rtcpSocket,
             const char *localIp, const char *remoteIp,
-            unsigned localPort, unsigned remotePort, int64_t socketNetwork = 0);
+            unsigned localPort, unsigned remotePort, int64_t socketNetwork = 0,
+            int32_t sockOptEcn = 0);
 
 protected:
     virtual ~ARTPConnection();
@@ -77,6 +81,7 @@
     };
 
     static const int64_t kSelectTimeoutUs;
+    static const int64_t kMinOneSecondNotifyDelayUs;
 
     uint32_t mFlags;
 
@@ -87,9 +92,12 @@
     int64_t mLastReceiverReportTimeUs;
     int64_t mLastBitrateReportTimeUs;
     int64_t mLastEarlyNotifyTimeUs;
+    int64_t mLastCongestionNotifyTimeUs;
 
     int32_t mSelfID;
     int32_t mTargetBitrate;
+    int32_t mRtpSockOptEcn;
+    bool mIsIPv6;
 
     uint32_t mStaticJitterTimeMs;
 
@@ -103,6 +111,8 @@
     void onInjectPacket(const sp<AMessage> &msg);
     void onSendReceiverReports();
     void checkRxBitrate(int64_t nowUs);
+    void notifyCongestionToUpperLayerIfNeeded(StreamInfo *s);
+    void handleIpHeadersIfReceived(StreamInfo *s, struct msghdr sMsg);
 
     status_t receive(StreamInfo *info, bool receiveRTP);
     ssize_t send(const StreamInfo *info, const sp<ABuffer> buffer);
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
index e9b4942..7d1faf2 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
@@ -50,6 +50,7 @@
         RTCP_FIRST_PACKET = 101,
         RTP_QUALITY = 102,
         RTP_QUALITY_EMC = 103,
+        RTP_QUALITY_CD = 104,
         RTCP_SR = 200,
         RTCP_RR = 201,
         RTCP_TSFB = 205,
@@ -81,8 +82,6 @@
     int32_t getBaseJitterTimeMs();
     int32_t getInterArrivalJitterTimeMs();
     void setStaticJitterTimeMs(const uint32_t jbTimeMs);
-    void putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime);
-    void putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime);
     void setJbTimer(const sp<AMessage> timer);
     void setJbAlarmTime(int64_t nowTimeUs, int64_t alarmAfterUs);
 
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
index 2982cf6..ecd29d0 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
@@ -50,7 +50,7 @@
     virtual status_t pause();
     void updateCVODegrees(int32_t cvoDegrees);
     void updatePayloadType(int32_t payloadType);
-    void updateSocketDscp(int32_t dscp);
+    void updateSocketOpt();
     void updateSocketNetwork(int64_t socketNetwork);
     uint32_t getSequenceNum();
     virtual uint64_t getAccumulativeBytes() override;
@@ -98,6 +98,7 @@
     struct sockaddr_in6 mRTPAddr6;
     struct sockaddr_in6 mRTCPAddr6;
     int32_t mRtpLayer3Dscp;
+    int32_t mRtpSockOptEcn;
     net_handle_t mRTPSockNetwork;
 
     AString mProfileLevel;
diff --git a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
index a8e64b6..ecdaac5 100644
--- a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
+++ b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
@@ -70,6 +70,7 @@
     MOCK_METHOD(status_t, discardBuffer, (const sp<MediaCodecBuffer> &buffer), (override));
     MOCK_METHOD(void, getInputBufferArray, (Vector<sp<MediaCodecBuffer>> *array), (override));
     MOCK_METHOD(void, getOutputBufferArray, (Vector<sp<MediaCodecBuffer>> *array), (override));
+    MOCK_METHOD(void, pollForRenderedBuffers, (), (override));
 };
 
 class MockCodec : public CodecBase {
diff --git a/media/libstagefright/webm/WebmFrameThread.cpp b/media/libstagefright/webm/WebmFrameThread.cpp
index cdbd745..7d1442b 100644
--- a/media/libstagefright/webm/WebmFrameThread.cpp
+++ b/media/libstagefright/webm/WebmFrameThread.cpp
@@ -336,7 +336,6 @@
 }
 
 void WebmFrameMediaSourceThread::run() {
-    int32_t count = 0;
     int64_t timestampUs = 0xdeadbeef;
     int64_t lastTimestampUs = 0; // Previous sample time stamp
     int64_t lastDurationUs = 0; // Previous sample duration
@@ -367,7 +366,6 @@
             buffer = NULL;
             continue;
         }
-        ++count;
 
         // adjust time-stamps after pause/resume
         if (mResumed) {
diff --git a/media/module/extractors/ogg/OggExtractor.cpp b/media/module/extractors/ogg/OggExtractor.cpp
index 1c6f516..4c106b2 100644
--- a/media/module/extractors/ogg/OggExtractor.cpp
+++ b/media/module/extractors/ogg/OggExtractor.cpp
@@ -34,6 +34,9 @@
 #include <system/audio.h>
 #include <utils/String8.h>
 
+#include <inttypes.h>
+#include <stdint.h>
+
 extern "C" {
     #include <Tremolo/codec_internal.h>
 
@@ -346,66 +349,118 @@
         off64_t startOffset, off64_t *pageOffset) {
     *pageOffset = startOffset;
 
-    for (;;) {
-        char signature[4];
-        ssize_t n = mSource->readAt(*pageOffset, &signature, 4);
+    // balance between larger reads and reducing how much we over-read.
+    const int FIND_BUF_SIZE = 2048;
+    const int lenOggS = strlen("OggS");
+    while(1) {
 
-        if (n < 4) {
+        // work with big buffers to amortize readAt() costs
+        char signatureBuffer[FIND_BUF_SIZE];
+        ssize_t n = mSource->readAt(*pageOffset, &signatureBuffer, sizeof(signatureBuffer));
+
+        if (n < lenOggS) {
             *pageOffset = 0;
-
             return (n < 0) ? n : (status_t)ERROR_END_OF_STREAM;
         }
 
-        if (!memcmp(signature, "OggS", 4)) {
-            if (*pageOffset > startOffset) {
-                ALOGV("skipped %lld bytes of junk to reach next frame",
-                     (long long)(*pageOffset - startOffset));
-            }
-
-            return OK;
-        }
-
-        // see how far ahead to skip; avoid some fruitless comparisons
-        unsigned int i;
-        for (i = 1; i < 4 ; i++) {
-            if (signature[i] == 'O')
+        for(int i = 0; i < n - (lenOggS - 1) ; i++) {
+            // fast scan for 1st character in a signature
+            char *p = (char *)memchr(&signatureBuffer[i], 'O', n - (lenOggS - 1) - i);
+            if (p == NULL) {
+                // no signature start in the rest of this buffer.
                 break;
+            }
+            int jump = (p-&signatureBuffer[i]);
+            i += jump;
+            if (memcmp("OggS", &signatureBuffer[i], lenOggS) == 0) {
+                *pageOffset += i;
+                if (*pageOffset > startOffset) {
+                    ALOGD("skipped %" PRIu64 " bytes of junk to reach next frame",
+                         (*pageOffset - startOffset));
+                }
+                return OK;
+            }
         }
-        *pageOffset += i;
+
+        // on to next block. buffer didn't end with "OggS", but could end with "Ogg".
+        // overlap enough to detect this. n >= lenOggS, so this always advances.
+        *pageOffset += n - (lenOggS - 1);
     }
+    return (status_t)ERROR_END_OF_STREAM;
 }
 
 // Given the offset of the "current" page, find the page immediately preceding
 // it (if any) and return its granule position.
 // To do this we back up from the "current" page's offset until we find any
 // page preceding it and then scan forward to just before the current page.
+//
 status_t MyOggExtractor::findPrevGranulePosition(
         off64_t pageOffset, uint64_t *granulePos) {
     *granulePos = 0;
 
-    off64_t prevPageOffset = 0;
-    off64_t prevGuess = pageOffset;
-    for (;;) {
-        if (prevGuess >= 5000) {
-            prevGuess -= 5000;
+    const int FIND_BUF_SIZE = 2048;
+    const int lenOggS = strlen("OggS");
+
+    if (pageOffset == 0) {
+        ALOGV("no page before the first page");
+        return UNKNOWN_ERROR;
+    }
+
+    off64_t prevPageOffset = pageOffset;
+
+    // we start our search on the byte immediately in front of pageOffset
+    // which could mean "O" immediately before and "ggS" starting at pageOffset
+    //
+    // if there was an "OggS" at pageOffset, we'll have scanned a few extra bytes
+    // but if pageOffset was chosen by a seek operation, we don't know that it
+    // reflects the beginning of a page. By choosing to scan 3 possibly unneeded
+    // bytes at the start we cover both cases.
+    //
+    off64_t firstAfter = pageOffset + lenOggS - 1;    // NOT within our buffer
+    off64_t nextOffset = pageOffset;
+
+    while(prevPageOffset == pageOffset) {
+        // work with big buffers to amortize readAt() costs
+        char signatureBuffer[FIND_BUF_SIZE];
+
+        ssize_t desired = sizeof(signatureBuffer);
+        if (firstAfter >= desired) {
+            nextOffset = firstAfter - desired;
         } else {
-            prevGuess = 0;
+            nextOffset = 0;
+            desired = firstAfter;
         }
+        ssize_t n = mSource->readAt(nextOffset, &signatureBuffer, desired);
 
-        ALOGV("backing up %lld bytes", (long long)(pageOffset - prevGuess));
-
-        status_t err = findNextPage(prevGuess, &prevPageOffset);
-        if (err == ERROR_END_OF_STREAM) {
-            // We are at the last page and didn't back off enough;
-            // back off 5000 bytes more and try again.
-            continue;
-        } else if (err != OK) {
-            return err;
-        }
-
-        if (prevPageOffset < pageOffset || prevGuess == 0) {
+        if (n < lenOggS) {
+            ALOGD("short read, get out");
             break;
         }
+
+        // work backwards
+        // loop control ok for n >= 0
+        for(int i = n - lenOggS; i >= 0 ; i--) {
+            // fast scan for 1st character in the signature
+            char *p = (char *)memrchr(&signatureBuffer[0], 'O', i);
+            if (p == NULL) {
+                // no signature start in the rest of this buffer.
+                break;
+            }
+            i = (p-&signatureBuffer[0]);
+            // loop start chosen to ensure we will always have lenOggS bytes
+            if (memcmp("OggS", &signatureBuffer[i], lenOggS) == 0) {
+                prevPageOffset = nextOffset + i;
+                break;
+            }
+        }
+
+        // back up for next read; make sure we catch overlaps
+        if (nextOffset == 0) {
+            // can't back up any further
+            break;
+        }
+        // current buffer might start with "ggS", include those bytes in the next iteration
+        firstAfter = nextOffset + lenOggS - 1;
     }
 
     if (prevPageOffset == pageOffset) {
@@ -413,8 +468,8 @@
         return UNKNOWN_ERROR;
     }
 
-    ALOGV("prevPageOffset at %lld, pageOffset at %lld",
-            (long long)prevPageOffset, (long long)pageOffset);
+    ALOGV("prevPageOffset at %" PRIu64 ", pageOffset at %" PRIu64,
+          prevPageOffset, pageOffset);
     uint8_t flag = 0;
     for (;;) {
         Page prevPage;
@@ -993,16 +1048,21 @@
     size_t numerator = mTableOfContents.size();
 
     if (numerator > kMaxNumTOCEntries) {
-        size_t denom = numerator - kMaxNumTOCEntries;
+        Vector<TOCEntry> maxTOC;
+        maxTOC.setCapacity(kMaxNumTOCEntries);
 
+        size_t denom = numerator - kMaxNumTOCEntries;
         size_t accum = 0;
-        for (ssize_t i = mTableOfContents.size(); i > 0; --i) {
+        for (ssize_t i = 0; i < mTableOfContents.size(); i++) {
             accum += denom;
             if (accum >= numerator) {
-                mTableOfContents.removeAt(i);
                 accum -= numerator;
+            } else {
+                maxTOC.push(mTableOfContents.itemAt(i));
             }
         }
+
+        mTableOfContents = maxTOC;
     }
 }
 
diff --git a/media/module/foundation/ALooper.cpp b/media/module/foundation/ALooper.cpp
index a276722..61bac02 100644
--- a/media/module/foundation/ALooper.cpp
+++ b/media/module/foundation/ALooper.cpp
@@ -69,6 +69,10 @@
     return systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
 }
 
+int64_t ALooper::getNowUs() {
+    return GetNowUs();
+}
+
 ALooper::ALooper()
     : mRunningLocally(false) {
     // clean up stale AHandlers. Doing it here instead of in the destructor avoids
@@ -170,11 +174,11 @@
 
     int64_t whenUs;
     if (delayUs > 0) {
-        int64_t nowUs = GetNowUs();
+        int64_t nowUs = getNowUs();
         whenUs = (delayUs > INT64_MAX - nowUs ? INT64_MAX : nowUs + delayUs);
 
     } else {
-        whenUs = GetNowUs();
+        whenUs = getNowUs();
     }
 
     List<Event>::iterator it = mEventQueue.begin();
@@ -185,6 +189,7 @@
     Event event;
     event.mWhenUs = whenUs;
     event.mMessage = msg;
+    event.mToken = nullptr;
 
     if (it == mEventQueue.begin()) {
         mQueueChangedCondition.signal();
@@ -193,7 +198,57 @@
     mEventQueue.insert(it, event);
 }
 
+status_t ALooper::postUnique(const sp<AMessage> &msg, const sp<RefBase> &token, int64_t delayUs) {
+    if (token == nullptr) {
+        return -EINVAL;
+    }
+    Mutex::Autolock autoLock(mLock);
+
+    int64_t whenUs;
+    if (delayUs > 0) {
+        int64_t nowUs = getNowUs();
+        whenUs = (delayUs > INT64_MAX - nowUs ? INT64_MAX : nowUs + delayUs);
+    } else {
+        whenUs = getNowUs();
+    }
+
+    // We only need to wake the loop up if we're rescheduling to the earliest event in the queue.
+    // This needs to be checked now, before we reschedule the message, in case this message is
+    // already at the beginning of the queue.
+    bool shouldAwakeLoop = mEventQueue.empty() || whenUs < mEventQueue.begin()->mWhenUs;
+
+    // Erase any previously-posted event with this token.
+    for (auto i = mEventQueue.begin(); i != mEventQueue.end();) {
+        if (i->mToken == token) {
+            i = mEventQueue.erase(i);
+        } else {
+            ++i;
+        }
+    }
+
+    // Find the insertion point for the rescheduled message.
+    List<Event>::iterator i = mEventQueue.begin();
+    while (i != mEventQueue.end() && i->mWhenUs <= whenUs) {
+        ++i;
+    }
+
+    Event event;
+    event.mWhenUs = whenUs;
+    event.mMessage = msg;
+    event.mToken = token;
+    mEventQueue.insert(i, event);
+
+    // If we rescheduled the event to be earlier than the first event, then we need to wake up the
+    // looper earlier than it was previously scheduled to be woken up. Otherwise, it can sleep until
+    // the previous wake-up time and then go to sleep again if needed.
+    if (shouldAwakeLoop){
+        mQueueChangedCondition.signal();
+    }
+    return OK;
+}
+
 bool ALooper::loop() {
+
     Event event;
 
     {
@@ -206,7 +261,7 @@
             return true;
         }
         int64_t whenUs = (*mEventQueue.begin()).mWhenUs;
-        int64_t nowUs = GetNowUs();
+        int64_t nowUs = getNowUs();
 
         if (whenUs > nowUs) {
             int64_t delayUs = whenUs - nowUs;
diff --git a/media/module/foundation/AMessage.cpp b/media/module/foundation/AMessage.cpp
index 5c99cc9..b61dc47 100644
--- a/media/module/foundation/AMessage.cpp
+++ b/media/module/foundation/AMessage.cpp
@@ -430,6 +430,17 @@
     return OK;
 }
 
+status_t AMessage::postUnique(const sp<RefBase> &token, int64_t delayUs) {
+    sp<ALooper> looper = mLooper.promote();
+    if (looper == NULL) {
+        ALOGW("failed to post message as target looper for handler %d is gone.",
+              mTarget);
+        return -ENOENT;
+    }
+
+    return looper->postUnique(this, token, delayUs);
+}
+
 status_t AMessage::postAndAwaitResponse(sp<AMessage> *response) {
     sp<ALooper> looper = mLooper.promote();
     if (looper == NULL) {
diff --git a/media/module/foundation/MediaDefs.cpp b/media/module/foundation/MediaDefs.cpp
index 4a75f90..7abab63 100644
--- a/media/module/foundation/MediaDefs.cpp
+++ b/media/module/foundation/MediaDefs.cpp
@@ -72,6 +72,7 @@
 const char *MEDIA_MIMETYPE_AUDIO_DTS = "audio/vnd.dts";
 const char *MEDIA_MIMETYPE_AUDIO_DTS_HD = "audio/vnd.dts.hd";
 const char *MEDIA_MIMETYPE_AUDIO_DTS_HD_MA = "audio/vnd.dts.hd;profile=dtsma";
+const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD = "audio/vnd.dts.uhd";
 const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1 = "audio/vnd.dts.uhd;profile=p1";
 const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2 = "audio/vnd.dts.uhd;profile=p2";
 const char *MEDIA_MIMETYPE_AUDIO_EVRC = "audio/evrc";
diff --git a/media/module/foundation/include/media/stagefright/foundation/ALooper.h b/media/module/foundation/include/media/stagefright/foundation/ALooper.h
index 09c469b..60bda1f 100644
--- a/media/module/foundation/include/media/stagefright/foundation/ALooper.h
+++ b/media/module/foundation/include/media/stagefright/foundation/ALooper.h
@@ -59,6 +59,9 @@
     }
 
 protected:
+    // overridable by test harness
+    virtual int64_t getNowUs();
+
     virtual ~ALooper();
 
 private:
@@ -67,6 +70,7 @@
     struct Event {
         int64_t mWhenUs;
         sp<AMessage> mMessage;
+        sp<RefBase> mToken;
     };
 
     Mutex mLock;
@@ -87,9 +91,14 @@
 
     // START --- methods used only by AMessage
 
-    // posts a message on this looper with the given timeout
+    // Posts a message on this looper with the given timeout.
     void post(const sp<AMessage> &msg, int64_t delayUs);
 
+    // Post a message uniquely on this looper with the given timeout.
+    // This method ensures that there is exactly one message with the same token pending posted on
+    // this looper after the call returns. A null token will result in an EINVAL error status.
+    status_t postUnique(const sp<AMessage> &msg, const sp<RefBase> &token, int64_t delayUs);
+
     // creates a reply token to be used with this looper
     sp<AReplyToken> createReplyToken();
     // waits for a response for the reply token.  If status is OK, the response
diff --git a/media/module/foundation/include/media/stagefright/foundation/AMessage.h b/media/module/foundation/include/media/stagefright/foundation/AMessage.h
index 960212a..6f73597 100644
--- a/media/module/foundation/include/media/stagefright/foundation/AMessage.h
+++ b/media/module/foundation/include/media/stagefright/foundation/AMessage.h
@@ -141,6 +141,11 @@
 
     status_t post(int64_t delayUs = 0);
 
+    // Post a message uniquely to its target with the given timeout.
+    // This method ensures that there is exactly one message with the same token posted to its
+    // target after the call returns. A null token will result in an EINVAL error status.
+    status_t postUnique(const sp<RefBase> &token, int64_t delayUs = 0);
+
     // Posts the message to its target and waits for a response (or error)
     // before returning.
     status_t postAndAwaitResponse(sp<AMessage> *response);
diff --git a/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h b/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
index 740336a..05ee7fc 100644
--- a/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
+++ b/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
@@ -74,6 +74,7 @@
 extern const char *MEDIA_MIMETYPE_AUDIO_DTS;
 extern const char *MEDIA_MIMETYPE_AUDIO_DTS_HD;
 extern const char *MEDIA_MIMETYPE_AUDIO_DTS_HD_MA;
+extern const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD;
 extern const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1;
 extern const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2;
 extern const char *MEDIA_MIMETYPE_AUDIO_EVRC;
diff --git a/media/module/foundation/tests/AMessage_test.cpp b/media/module/foundation/tests/AMessage_test.cpp
index 2b11326..e08ed77 100644
--- a/media/module/foundation/tests/AMessage_test.cpp
+++ b/media/module/foundation/tests/AMessage_test.cpp
@@ -17,18 +17,43 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "AData_test"
 
+#include <gmock/gmock.h>
 #include <gtest/gtest.h>
 #include <utils/RefBase.h>
 
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ALooper.h>
 
 using namespace android;
 
-class AMessageTest : public ::testing::Test {
+using ::testing::InSequence;
+using ::testing::NiceMock;
+
+class LooperWithSettableClock : public ALooper {
+public:
+  LooperWithSettableClock() : mClockUs(0) {}
+
+  void setClockUs(int64_t nowUs) {
+    mClockUs = nowUs;
+  }
+
+  int64_t getNowUs() override {
+    return mClockUs;
+  }
+
+private:
+  int64_t mClockUs;
 };
 
+timespec millis100 = {0, 100L*1000*1000};
 
-TEST(AMessage_tests, item_manipulation) {
+class MockHandler : public AHandler {
+public:
+    MOCK_METHOD(void, onMessageReceived, (const sp<AMessage>&), (override));
+};
+
+TEST(AMessage_tests, settersAndGetters) {
   sp<AMessage> m1 = new AMessage();
 
   m1->setInt32("value", 2);
@@ -120,6 +145,171 @@
   EXPECT_TRUE(m1->findInt32("alittlelonger", &i32));
 
   EXPECT_NE(OK, m1->removeEntryByName("notpresent"));
-
 }
 
+TEST(AMessage_tests, deliversMultipleMessagesInOrderImmediately) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msgNow1 = new AMessage(0, mockHandler);
+  msgNow1->post();
+  sp<AMessage> msgNow2 = new AMessage(0, mockHandler);
+  msgNow2->post();
+
+  {
+    InSequence inSequence;
+    EXPECT_CALL(*mockHandler, onMessageReceived(msgNow1)).Times(1);
+    EXPECT_CALL(*mockHandler, onMessageReceived(msgNow2)).Times(1);
+  }
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, doesNotDeliverDelayedMessageImmediately) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msgNow = new AMessage(0, mockHandler);
+  msgNow->post();
+  sp<AMessage> msgDelayed = new AMessage(0, mockHandler);
+  msgDelayed->post(100);
+
+  EXPECT_CALL(*mockHandler, onMessageReceived(msgNow)).Times(1);
+  // note: never called
+  EXPECT_CALL(*mockHandler, onMessageReceived(msgDelayed)).Times(0);
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, deliversDelayedMessagesInSequence) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msgIn500 = new AMessage(0, mockHandler);
+  msgIn500->post(500);
+  sp<AMessage> msgNow = new AMessage(0, mockHandler);
+  msgNow->post();
+  sp<AMessage> msgIn100 = new AMessage(0, mockHandler);
+  msgIn100->post(100);
+  // not expected to be received
+  sp<AMessage> msgIn1000 = new AMessage(0, mockHandler);
+  msgIn1000->post(1000);
+
+  looper->setClockUs(500);
+  {
+    InSequence inSequence;
+
+    EXPECT_CALL(*mockHandler, onMessageReceived(msgNow)).Times(1);
+    EXPECT_CALL(*mockHandler, onMessageReceived(msgIn100)).Times(1);
+    EXPECT_CALL(*mockHandler, onMessageReceived(msgIn500)).Times(1);
+  }
+  // note: never called
+  EXPECT_CALL(*mockHandler, onMessageReceived(msgIn1000)).Times(0);
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, deliversDelayedUniqueMessage) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msg = new AMessage(0, mockHandler);
+  msg->postUnique(msg, 50);
+
+  looper->setClockUs(50);
+  EXPECT_CALL(*mockHandler, onMessageReceived(msg)).Times(1);
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, deliversImmediateUniqueMessage) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  // note: we don't need to set the clock, but we do want a stable clock that doesn't advance
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msg = new AMessage(0, mockHandler);
+  msg->postUnique(msg, 0);
+
+  EXPECT_CALL(*mockHandler, onMessageReceived(msg)).Times(1);
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, doesNotDeliverUniqueMessageAfterRescheduleLater) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msg = new AMessage(0, mockHandler);
+  msg->postUnique(msg, 50);
+  msg->postUnique(msg, 100); // reschedule for later
+
+  looper->setClockUs(50); // if the message is correctly rescheduled, it should not be delivered
+  // Never called because the message was rescheduled to a later point in time
+  EXPECT_CALL(*mockHandler, onMessageReceived(msg)).Times(0);
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, deliversUniqueMessageAfterRescheduleEarlier) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msg = new AMessage(0, mockHandler);
+  msg->postUnique(msg, 100);
+  msg->postUnique(msg, 50); // reschedule to fire earlier
+
+  looper->setClockUs(50); // if the message is rescheduled correctly, it should be delivered
+  EXPECT_CALL(*mockHandler, onMessageReceived(msg)).Times(1);
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, deliversSameMessageTwice) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msg = new AMessage(0, mockHandler);
+  msg->post(50);
+  msg->post(100);
+
+  looper->setClockUs(100);
+  EXPECT_CALL(*mockHandler, onMessageReceived(msg)).Times(2);
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+// When messages are posted twice with the same token, it will only be delivered once after being
+// rescheduled.
+TEST(AMessage_tests, deliversUniqueMessageOnce) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msg1 = new AMessage(0, mockHandler);
+  msg1->postUnique(msg1, 50);
+  sp<AMessage> msg2 = new AMessage(0, mockHandler);
+  msg2->postUnique(msg1, 75); // note, using the same token as msg1
+
+  looper->setClockUs(100);
+  EXPECT_CALL(*mockHandler, onMessageReceived(msg1)).Times(0);
+  EXPECT_CALL(*mockHandler, onMessageReceived(msg2)).Times(1);
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, postUnique_withNullToken_returnsInvalidArgument) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<ALooper> looper = new ALooper();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msg = new AMessage(0, mockHandler);
+  EXPECT_EQ(msg->postUnique(nullptr, 0), -EINVAL);
+}
diff --git a/media/module/foundation/tests/Android.bp b/media/module/foundation/tests/Android.bp
index e72ce43..c409dd2 100644
--- a/media/module/foundation/tests/Android.bp
+++ b/media/module/foundation/tests/Android.bp
@@ -20,10 +20,14 @@
 
     shared_libs: [
         "liblog",
-        "libstagefright_foundation",
         "libutils",
     ],
 
+    static_libs: [
+        "libstagefright_foundation",
+        "libgmock",
+    ],
+
     srcs: [
         "AData_test.cpp",
         "AMessage_test.cpp",
diff --git a/media/module/mpeg2ts/ATSParser.cpp b/media/module/mpeg2ts/ATSParser.cpp
index 1482072..6aeea3b 100644
--- a/media/module/mpeg2ts/ATSParser.cpp
+++ b/media/module/mpeg2ts/ATSParser.cpp
@@ -556,7 +556,15 @@
             if (descriptor_length > ES_info_length) {
                 return ERROR_MALFORMED;
             }
-            if (descriptor_tag == DESCRIPTOR_CA && descriptor_length >= 4) {
+
+            // The DTS descriptor is used in the PSI PMT to identify streams which carry
+            // DTS audio(core only). If a DTS descriptor is present, a DTS-HD or DTS-UHD
+            // descriptors shall not be present in the same ES_info descriptor loop.
+            if (descriptor_tag == DESCRIPTOR_DTS) {
+                info.mType = STREAMTYPE_DTS;
+                ES_info_length -= descriptor_length;
+                br->skipBits(descriptor_length * 8);
+            } else if (descriptor_tag == DESCRIPTOR_CA && descriptor_length >= 4) {
                 hasStreamCA = true;
                 streamCA.mSystemID = br->getBits(16);
                 streamCA.mPID = br->getBits(16) & 0x1fff;
@@ -575,6 +583,16 @@
                 if (descTagExt == EXT_DESCRIPTOR_DVB_AC4) {
                     info.mTypeExt = EXT_DESCRIPTOR_DVB_AC4;
                     br->skipBits(descriptor_length * 8);
+                } else if (descTagExt == EXT_DESCRIPTOR_DVB_DTS_HD) {
+                    // DTS HD extended descriptor which can accommodate core only formats
+                    // as well as extension only and core + extension combinations.
+                    info.mTypeExt = EXT_DESCRIPTOR_DVB_DTS_HD;
+                    br->skipBits(descriptor_length * 8);
+                } else if (descTagExt == EXT_DESCRIPTOR_DVB_DTS_UHD) {
+                    // The DTS-UHD descriptor is used in the PSI PMT to identify streams
+                    // which carry DTS-UHD audio
+                    info.mTypeExt = EXT_DESCRIPTOR_DVB_DTS_UHD;
+                    br->skipBits(descriptor_length * 8);
                 } else if (descTagExt == EXT_DESCRIPTOR_DVB_AUDIO_PRESELECTION &&
                            descriptor_length >= 1) {
                     // DVB BlueBook A038 Table 110
@@ -920,9 +938,17 @@
             mode = ElementaryStreamQueue::EAC3;
             break;
 
+        case STREAMTYPE_DTS:
+            mode = ElementaryStreamQueue::DTS;
+            break;
+
         case STREAMTYPE_PES_PRIVATE_DATA:
             if (mStreamTypeExt == EXT_DESCRIPTOR_DVB_AC4) {
                 mode = ElementaryStreamQueue::AC4;
+            } else if (mStreamTypeExt == EXT_DESCRIPTOR_DVB_DTS_HD) {
+                mode = ElementaryStreamQueue::DTS_HD;
+            } else if (mStreamTypeExt == EXT_DESCRIPTOR_DVB_DTS_UHD) {
+                mode = ElementaryStreamQueue::DTS_UHD;
             }
             break;
 
@@ -1158,9 +1184,12 @@
         case STREAMTYPE_EAC3:
         case STREAMTYPE_AAC_ENCRYPTED:
         case STREAMTYPE_AC3_ENCRYPTED:
+        case STREAMTYPE_DTS:
             return true;
         case STREAMTYPE_PES_PRIVATE_DATA:
-            return mStreamTypeExt == EXT_DESCRIPTOR_DVB_AC4;
+            return (mStreamTypeExt == EXT_DESCRIPTOR_DVB_AC4
+                    || mStreamTypeExt == EXT_DESCRIPTOR_DVB_DTS_HD
+                    || mStreamTypeExt == EXT_DESCRIPTOR_DVB_DTS_UHD);
 
         default:
             return false;
diff --git a/media/module/mpeg2ts/ESQueue.cpp b/media/module/mpeg2ts/ESQueue.cpp
index 192ba77..2dc7b0a 100644
--- a/media/module/mpeg2ts/ESQueue.cpp
+++ b/media/module/mpeg2ts/ESQueue.cpp
@@ -362,6 +362,436 @@
     return OK;
 }
 
+#define RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bitstream, size) \
+    do { \
+        if ((bitstream).numBitsLeft() < (size)) { \
+        ALOGE("Not enough bits left for further parsing"); \
+        return ERROR_MALFORMED; } \
+    } while (0)
+
+// Parse DTS Digital Surround and DTS Express(LBR) stream header
+static status_t parseDTSHDSyncFrame(
+    const uint8_t *ptr, size_t size, unsigned &frameSize, sp<MetaData> *metaData) {
+    static const unsigned channelCountTable[] = {1, 2, 2, 2, 2, 3, 3, 4,
+                                                 4, 5, 6, 6, 6, 7, 8, 8};
+    static const unsigned samplingRateTableCoreSS[] = {0, 8000, 16000, 32000, 0, 0, 11025, 22050,
+                                                       44100, 0, 0, 12000, 24000, 48000, 0, 0};
+    static const unsigned samplingRateTableExtSS[] = {8000, 16000, 32000, 64000, 128000,
+                                                      22050, 44100, 88200, 176400, 352800,
+                                                      12000, 24000, 48000, 96000, 192000, 384000};
+
+    const uint32_t DTSHD_SYNC_CORE_16BIT_BE = 0x7ffe8001;
+    const uint32_t DTSHD_SYNC_EXSS_16BIT_BE = 0x64582025;
+
+    uint32_t numChannels = 0, samplingRate = 0;
+    bool isLBR = false;
+
+    ABitReader bits(ptr, size);
+
+    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 32);
+    uint32_t dtshdSyncWord = bits.getBits(32);
+
+    // Expecting DTS Digital Surround or DTS Express(LBR) streams only
+    if (dtshdSyncWord == DTSHD_SYNC_CORE_16BIT_BE) { // DTS Digital Surround Header
+        RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (1 + 5 + 1 + 7 + 14 + 6 + 4 + 15 + 2));
+
+        // FTYPE, SHORT, CRC, NBLKS
+        bits.skipBits(1 + 5 + 1 + 7);
+
+        frameSize = bits.getBits(14) + 1;
+        uint32_t amode = bits.getBits(6);
+        uint32_t freqIndex = bits.getBits(4);
+
+        // RATE, FIXEDBIT, DYNF, TIMEF, AUXF, HDCD, EXT_AUDIO_ID, EXT_AUDIO, ASPF
+        bits.skipBits(5 + 1 + 1 + 1 + 1 + 1 + 3 + 1 + 1);
+
+        uint32_t lfeFlag = bits.getBits(2);
+        numChannels = (amode <= 15) ? channelCountTable[amode] : 0;
+        numChannels += ((lfeFlag == 1) || (lfeFlag == 2)) ? 1 : 0;
+        samplingRate = (freqIndex <= 15) ? samplingRateTableCoreSS[freqIndex] : 0;
+
+        isLBR = false;
+    } else if (dtshdSyncWord == DTSHD_SYNC_EXSS_16BIT_BE) { // DTS Express(LBR) Header
+        RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (8 + 2 + 1));
+
+        uint32_t extHeadersize, extSSFsize;
+        uint32_t numAudioPresent = 1, numAssets = 1;
+        uint32_t nuActiveExSSMask[8];
+
+        // userDefinedBits
+        bits.skipBits(8);
+
+        uint32_t extSSIndex = bits.getBits(2);
+        uint32_t headerSizeType = bits.getBits(1);
+
+        if (headerSizeType == 0) {
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (8 + 16));
+
+            extHeadersize = bits.getBits(8) + 1;
+            extSSFsize = bits.getBits(16) + 1;
+        } else {
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (12 + 20));
+
+            extHeadersize = bits.getBits(12) + 1;
+            extSSFsize = bits.getBits(20) + 1;
+        }
+
+        RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (1));
+
+        uint32_t staticFieldsPresent = bits.getBits(1);
+
+        if (staticFieldsPresent) {
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (2 + 3 + 1));
+
+            // nuRefClockCode, nuExSSFrameDurationCode
+            bits.skipBits(2 + 3);
+
+            if (bits.getBits(1)) {
+                RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (32 + 4));
+
+                bits.skipBits(32 + 4);
+            }
+
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (3 + 3));
+
+            // numAudioPresent, numAssets
+            bits.skipBits(3 + 3);
+
+            for (uint32_t nAuPr = 0; nAuPr < numAudioPresent; nAuPr++) {
+                RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (extSSIndex + 1));
+
+                nuActiveExSSMask[nAuPr] = bits.getBits(extSSIndex + 1);
+            }
+
+            for (uint32_t nAuPr = 0; nAuPr < numAudioPresent; nAuPr++) {
+                for (uint32_t nSS = 0; nSS < extSSIndex + 1; nSS++) {
+                    if (((nuActiveExSSMask[nAuPr] >> nSS) & 0x1) == 1) {
+                        RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 8);
+
+                        // nuActiveAssetMask
+                        bits.skipBits(8);
+                    }
+                }
+            }
+
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 1);
+
+            // bMixMetadataEnbl
+            if (bits.getBits(1)) {
+                RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (2 + 2 + 2));
+
+                // nuMixMetadataAdjLevel
+                bits.skipBits(2);
+
+                uint32_t bits4MixOutMask = (bits.getBits(2) + 1) << 2;
+                uint32_t numMixOutConfigs = bits.getBits(2) + 1;
+
+                for (int ns = 0; ns < numMixOutConfigs; ns++) {
+                    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, bits4MixOutMask);
+
+                    // nuMixOutChMask
+                    bits.skipBits(bits4MixOutMask);
+                }
+            }
+        }
+
+        for (int nAst = 0; nAst < numAssets; nAst++) {
+            int bits4ExSSFsize = (headerSizeType == 0) ? 16 : 20;
+
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, bits4ExSSFsize);
+
+            bits.skipBits(bits4ExSSFsize);
+        }
+
+        /* Asset descriptor */
+        for (int nAst = 0; nAst < numAssets; nAst++) {
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (9 + 3));
+
+            // nuAssetDescriptFsize, nuAssetIndex
+            bits.skipBits(9 + 3);
+
+            if (staticFieldsPresent) {
+                RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 1);
+
+                // bAssetTypeDescrPresent
+                if (bits.getBits(1)) {
+                    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 4);
+
+                    // nuAssetTypeDescriptor
+                    bits.skipBits(4);
+                }
+
+                RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 1);
+
+                // bLanguageDescrPresent
+                if (bits.getBits(1)) {
+                    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 24);
+
+                    // LanguageDescriptor
+                    bits.skipBits(24);
+                }
+
+                RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 1);
+
+                // bInfoTextPresent
+                if (bits.getBits(1)) {
+                    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 10);
+
+                    uint32_t nuInfoTextByteSize = bits.getBits(10) + 1;
+
+                    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (nuInfoTextByteSize * 8));
+
+                    // InfoTextString
+                    bits.skipBits(nuInfoTextByteSize * 8);
+                }
+
+                RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (5 + 4 + 8));
+
+                // nuBitResolution
+                bits.skipBits(5);
+
+                samplingRate = samplingRateTableExtSS[bits.getBits(4)];
+                numChannels = bits.getBits(8) + 1;
+            }
+        }
+
+        frameSize = extHeadersize + extSSFsize;
+        isLBR = true;
+    } else {
+        ALOGE("No valid sync word in DTS/DTSHD header");
+        return ERROR_MALFORMED;
+    }
+
+    if (metaData != NULL) {
+        if (isLBR) {
+            (*metaData)->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_DTS_HD);
+            (*metaData)->setInt32(kKeyAudioProfile, 0x2); // CodecProfileLevel.DTS_HDProfileLBR
+        } else {
+            (*metaData)->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_DTS);
+        }
+        (*metaData)->setInt32(kKeyChannelCount, numChannels);
+        (*metaData)->setInt32(kKeySampleRate, samplingRate);
+    }
+    return OK;
+}
+
+static status_t extractVarLenBitFields(
+    ABitReader *bits, size_t *bitsUsed, uint32_t *value,
+    unsigned ucTable[], bool extractAndAddFlag) {
+
+    static const unsigned bitsUsedTbl[8] = {1, 1, 1, 1, 2, 2, 3, 3}; // prefix code lengths
+    static const unsigned indexTbl[8] = {0, 0, 0, 0, 1, 1, 2, 3}; // code to prefix code index map
+
+    /* Clone the bitstream */
+    ABitReader bitStream(bits->data(), bits->numBitsLeft() / 8);
+    ABitReader bitstreamClone(bits->data(), bits->numBitsLeft() / 8);
+
+    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bitstreamClone, 3);
+
+    unsigned code = bitstreamClone.getBits(3);
+    unsigned totalBitsUsed = bitsUsedTbl[code];
+    unsigned unIndex = indexTbl[code];
+
+    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bitStream, totalBitsUsed);
+
+    bitStream.skipBits(totalBitsUsed);
+
+    uint32_t unValue = 0;
+    if (ucTable[unIndex] > 0) {
+        if (extractAndAddFlag) {
+            for (unsigned un = 0; un < unIndex; un++) {
+                unValue += (1 << ucTable[un]);
+            }
+
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bitStream, ucTable[unIndex]);
+
+            unValue += bitStream.getBits(ucTable[unIndex]);
+            totalBitsUsed += ucTable[unIndex];
+        } else {
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bitStream, ucTable[unIndex]);
+
+            unValue += bitStream.getBits(ucTable[unIndex]);
+            totalBitsUsed += ucTable[unIndex];
+        }
+    }
+
+    *bitsUsed = (size_t)totalBitsUsed;
+    *value = unValue;
+    return OK;
+}
+
+// Parse DTS UHD Profile-2 stream header
+static status_t parseDTSUHDSyncFrame(
+    const uint8_t *ptr, size_t size, unsigned &frameSize, sp<MetaData> *metaData) {
+
+    static const uint32_t DTSUHD_SYNC_CORE_16BIT_BE = 0x40411BF2;
+    static const uint32_t DTSUHD_NONSYNC_CORE_16BIT_BE = 0x71C442E8;
+
+    unsigned audioSamplRate = 0;
+
+    ABitReader bits(ptr, size);
+
+    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 32);
+
+    uint32_t syncWord = bits.getBits(32);
+
+    bool isSyncFrameFlag = false;
+    switch (syncWord) {
+        case DTSUHD_SYNC_CORE_16BIT_BE:
+            isSyncFrameFlag = true;
+            break;
+        case DTSUHD_NONSYNC_CORE_16BIT_BE:
+            isSyncFrameFlag = false;
+            break;
+        default:
+            ALOGE("No valid sync word in DTSUHD header");
+            return ERROR_MALFORMED; // invalid sync word
+    }
+
+    unsigned uctable1[4] = { 5, 8, 10, 12 };
+    uint32_t sizeOfFTOCPayload = 0;
+    size_t nuBitsUsed = 0;
+    status_t status = OK;
+
+    status = extractVarLenBitFields(&bits, &nuBitsUsed, &sizeOfFTOCPayload, uctable1, true);
+
+    if (status != OK) {
+        ALOGE("Failed to extractVarLenBitFields from DTSUHD header");
+        return ERROR_MALFORMED;
+    }
+
+    bits.skipBits(nuBitsUsed);
+
+    if (isSyncFrameFlag) {
+        RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (1 + 2 + 3 + 2 + 1));
+
+        // FullChannelBasedMixFlag, ETSI TS 103 491 V1.2.1, Section 6.4.6.1
+        if (!(bits.getBits(1))) {
+            // This implementation only supports full channel mask-based
+            // audio presentation (i.e. 2.0, 5.1, 11.1 mix without objects)
+            ALOGE("Objects not supported, only DTSUHD full channel mask-based mix");
+            return ERROR_MALFORMED;
+        }
+
+        // BaseDuration, FrameDuration
+        bits.skipBits(2 + 3);
+
+        unsigned clockRateIndex = bits.getBits(2);
+        unsigned clockRateHertz = 0;
+
+        switch (clockRateIndex) {
+            case 0:
+                clockRateHertz = 32000;
+                break;
+            case 1:
+                clockRateHertz = 44100;
+                break;
+            case 2:
+                clockRateHertz = 48000;
+                break;
+            default:
+                ALOGE("Invalid clockRateIndex in DTSUHD header");
+                return ERROR_MALFORMED;
+        }
+
+        if (bits.getBits(1)) {
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (32 + 4));
+
+            bits.skipBits(32 + 4);
+        }
+
+        RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 2);
+
+        unsigned samplRateMultiplier = (1 << bits.getBits(2));
+        audioSamplRate = clockRateHertz * samplRateMultiplier;
+    }
+
+    uint32_t chunkPayloadBytes = 0;
+    int numOfMDChunks = isSyncFrameFlag ? 1 : 0; // Metadata chunks
+    for (int nmdc = 0; nmdc < numOfMDChunks; nmdc++) {
+        unsigned uctable2[4] = {6, 9, 12, 15};
+        uint32_t nuMDChunkSize = 0;
+        nuBitsUsed = 0;
+
+        status = extractVarLenBitFields(&bits, &nuBitsUsed, &nuMDChunkSize, uctable2, true);
+        if (status != OK) {
+            ALOGE("Failed to extractVarLenBitFields from DTSUHD header");
+            return ERROR_MALFORMED;
+        }
+
+        bits.skipBits(nuBitsUsed);
+
+        if (nuMDChunkSize > 32767) {
+            ALOGE("Unsupported number of metadata chunks in DTSUHD header");
+            return ERROR_MALFORMED;
+        }
+        chunkPayloadBytes += nuMDChunkSize;
+    }
+
+    // Ony one audio chunk is supported
+    int numAudioChunks = 1;
+    for (int nac = 0; nac < numAudioChunks; nac++) {
+        uint32_t acID = 256, nuAudioChunkSize = 0;
+
+        // isSyncFrameFlag means that ACID is present
+        if (isSyncFrameFlag) {
+            unsigned uctable3[4] = {2, 4, 6, 8};
+            nuBitsUsed = 0;
+
+            status = extractVarLenBitFields(&bits, &nuBitsUsed, &acID, uctable3, true);
+
+            if (status != OK) {
+                ALOGE("Failed to extractVarLenBitFields from DTSUHD header");
+                return ERROR_MALFORMED;
+            }
+
+            bits.skipBits(nuBitsUsed);
+        }
+
+        nuBitsUsed = 0;
+        if (acID == 0) {
+            nuAudioChunkSize = 0;
+        } else {
+            unsigned uctable4[4] = {9, 11, 13, 16};
+
+            status = extractVarLenBitFields(&bits, &nuBitsUsed, &nuAudioChunkSize, uctable4, true);
+
+            if (status != OK) {
+                ALOGE("Failed to extractVarLenBitFields from DTSUHD header");
+                return ERROR_MALFORMED;
+            }
+        }
+
+        if (nuAudioChunkSize > 65535){
+            ALOGE("Unsupported number of audio chunks in DTSUHD header");
+            return ERROR_MALFORMED;
+        }
+
+        chunkPayloadBytes += nuAudioChunkSize;
+    }
+
+    frameSize = (sizeOfFTOCPayload + 1) + chunkPayloadBytes;
+
+    if (metaData != NULL) {
+        (*metaData)->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_DTS_UHD);
+        (*metaData)->setInt32(kKeyAudioProfile, 0x2); // CodecProfileLevel.DTS_UHDProfileP2
+        (*metaData)->setInt32(kKeyChannelCount, 2); // Setting default channel count as stereo
+        (*metaData)->setInt32(kKeySampleRate, audioSamplRate);
+    }
+
+    return OK;
+}
+
+static status_t isSeeminglyValidDTSHDHeader(const uint8_t *ptr, size_t size,unsigned &frameSize)
+{
+    return parseDTSHDSyncFrame(ptr, size, frameSize, NULL);
+}
+
+static status_t isSeeminglyValidDTSUHDHeader(const uint8_t *ptr, size_t size,unsigned &frameSize)
+{
+    return parseDTSUHDSyncFrame(ptr, size, frameSize, NULL);
+}
+
 static status_t IsSeeminglyValidAC4Header(const uint8_t *ptr, size_t size, unsigned &frameSize) {
     return parseAC4SyncFrame(ptr, size, frameSize, NULL);
 }
@@ -655,6 +1085,70 @@
                 break;
             }
 
+            case DTS: //  Checking for DTS or DTS-HD syncword
+            case DTS_HD:
+            {
+                uint8_t *ptr = (uint8_t *)data;
+                unsigned frameSize = 0;
+                ssize_t startOffset = -1;
+
+                for (size_t i = 0; i < size; ++i) {
+                    if (isSeeminglyValidDTSHDHeader(&ptr[i], size - i, frameSize) == OK) {
+                        startOffset = i;
+                        break;
+                    }
+                }
+
+                if (startOffset < 0) {
+                    return ERROR_MALFORMED;
+                }
+                if (startOffset > 0) {
+                    ALOGI("found something resembling a DTS-HD syncword at "
+                          "offset %zd",
+                          startOffset);
+                }
+
+                if (frameSize != size - startOffset) {
+                    ALOGV("DTS-HD frame size is %u bytes, while the buffer size is %zd bytes.",
+                          frameSize, size - startOffset);
+                }
+
+                data = &ptr[startOffset];
+                size -= startOffset;
+                break;
+            }
+
+            case DTS_UHD:
+            {
+                uint8_t *ptr = (uint8_t *)data;
+                ssize_t startOffset = -1;
+                unsigned frameSize = 0;
+
+                for (size_t i = 0; i < size; ++i) {
+                    if (isSeeminglyValidDTSUHDHeader(&ptr[i], size - i, frameSize) == OK) {
+                        startOffset = i;
+                        break;
+                    }
+                }
+
+                if (startOffset < 0) {
+                    return ERROR_MALFORMED;
+                }
+                if (startOffset >= 0) {
+                    ALOGI("found something resembling a DTS UHD syncword"
+                          "syncword at offset %zd",
+                          startOffset);
+                }
+
+                if (frameSize != size - startOffset) {
+                    ALOGV("DTS-UHD frame size is %u bytes, while the buffer size is %zd bytes.",
+                          frameSize, size - startOffset);
+                }
+                data = &ptr[startOffset];
+                size -= startOffset;
+                break;
+            }
+
             case PCM_AUDIO:
             case METADATA:
             {
@@ -928,6 +1422,11 @@
             return dequeueAccessUnitPCMAudio();
         case METADATA:
             return dequeueAccessUnitMetadata();
+        case DTS: // Using same dequeue function for both DTS and DTS-HD types.
+        case DTS_HD:
+            return dequeueAccessUnitDTSOrDTSHD();
+        case DTS_UHD:
+            return dequeueAccessUnitDTSUHD();
         default:
             if (mMode != MPEG_AUDIO) {
                 ALOGE("Unknown mode");
@@ -937,6 +1436,113 @@
     }
 }
 
+sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitDTSOrDTSHD() {
+    unsigned syncStartPos = 0; // in bytes
+    unsigned payloadSize = 0;
+    sp<MetaData> format = new MetaData;
+
+    ALOGV("dequeueAccessUnitDTSOrDTSHD[%d]: mBuffer %p(%zu)", mAUIndex,
+          mBuffer->data(), mBuffer->size());
+
+    while (true) {
+        if (syncStartPos + 4 >= mBuffer->size()) {
+            return NULL;
+        }
+        uint8_t *ptr = mBuffer->data() + syncStartPos;
+        size_t size = mBuffer->size() - syncStartPos;
+        status_t status = parseDTSHDSyncFrame(ptr, size, payloadSize, &format);
+        if (status == 0) {
+            break;
+        }
+        ++syncStartPos;
+    }
+
+    if (mBuffer->size() < syncStartPos + payloadSize) {
+        ALOGV("Not enough buffer size for DTS/DTS-HD");
+        return NULL;
+    }
+
+    if (mFormat == NULL) {
+        mFormat = format;
+    }
+
+    int64_t timeUs = fetchTimestamp(syncStartPos + payloadSize);
+    if (timeUs < 0LL) {
+        ALOGE("negative timeUs");
+        return NULL;
+    }
+    mAUIndex++;
+
+    sp<ABuffer> accessUnit = new ABuffer(syncStartPos + payloadSize);
+    memcpy(accessUnit->data(), mBuffer->data(), syncStartPos + payloadSize);
+
+    accessUnit->meta()->setInt64("timeUs", timeUs);
+    accessUnit->meta()->setInt32("isSync", 1);
+
+    memmove(
+        mBuffer->data(),
+        mBuffer->data() + syncStartPos + payloadSize,
+        mBuffer->size() - syncStartPos - payloadSize);
+
+    mBuffer->setRange(0, mBuffer->size() - syncStartPos - payloadSize);
+
+    return accessUnit;
+}
+
+sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitDTSUHD()
+{
+    unsigned syncStartPos = 0; // in bytes
+    unsigned payloadSize = 0;
+    sp<MetaData> format = new MetaData;
+
+    ALOGV("dequeueAccessUnitDTSUHD[%d]: mBuffer %p(%zu)", mAUIndex,
+          mBuffer->data(), mBuffer->size());
+
+    while (true) {
+        if (syncStartPos + 4 >= mBuffer->size()) {
+            return NULL;
+        }
+        uint8_t *ptr = mBuffer->data() + syncStartPos;
+        size_t size = mBuffer->size() - syncStartPos;
+        status_t status = parseDTSUHDSyncFrame(ptr, size, payloadSize, &format);
+        if (status == 0) {
+            break;
+        }
+        ++syncStartPos;
+    }
+
+    if (mBuffer->size() < syncStartPos + payloadSize) {
+        ALOGV("Not enough buffer size for DTS-UHD");
+        return NULL;
+    }
+
+    if (mFormat == NULL) {
+        mFormat = format;
+    }
+
+    int64_t timeUs = fetchTimestamp(syncStartPos + payloadSize);
+    if (timeUs < 0LL) {
+        ALOGE("negative timeUs");
+        return NULL;
+    }
+    mAUIndex++;
+
+    sp<ABuffer> accessUnit = new ABuffer(syncStartPos + payloadSize);
+    memcpy(accessUnit->data(), mBuffer->data(), syncStartPos + payloadSize);
+
+    accessUnit->meta()->setInt64("timeUs", timeUs);
+    accessUnit->meta()->setInt32("isSync", 1);
+
+    memmove(
+        mBuffer->data(),
+        mBuffer->data() + syncStartPos + payloadSize,
+        mBuffer->size() - syncStartPos - payloadSize);
+
+    mBuffer->setRange(0, mBuffer->size() - syncStartPos - payloadSize);
+
+    return accessUnit;
+}
+
 sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitEAC3() {
     unsigned syncStartPos = 0;  // in bytes
     unsigned payloadSize = 0;
diff --git a/media/module/mpeg2ts/include/mpeg2ts/ATSParser.h b/media/module/mpeg2ts/include/mpeg2ts/ATSParser.h
index 49578d3..b658c5a 100644
--- a/media/module/mpeg2ts/include/mpeg2ts/ATSParser.h
+++ b/media/module/mpeg2ts/include/mpeg2ts/ATSParser.h
@@ -157,6 +157,9 @@
         STREAMTYPE_LPCM_AC3             = 0x83,
         STREAMTYPE_EAC3                 = 0x87,
 
+        // DTS audio stream type which contains only Core substream
+        STREAMTYPE_DTS                  = 0x8A,
+
         //Sample Encrypted types
         STREAMTYPE_H264_ENCRYPTED       = 0xDB,
         STREAMTYPE_AAC_ENCRYPTED        = 0xCF,
@@ -168,6 +171,7 @@
         DESCRIPTOR_CA                   = 0x09,
 
         // DVB BlueBook A038 Table 12
+        DESCRIPTOR_DTS                  = 0x7B,
         DESCRIPTOR_DVB_EXTENSION        = 0x7F,
     };
 
@@ -175,6 +179,8 @@
     enum {
         EXT_DESCRIPTOR_DVB_AC4                  = 0x15,
         EXT_DESCRIPTOR_DVB_AUDIO_PRESELECTION   = 0x19,
+        EXT_DESCRIPTOR_DVB_DTS_HD               = 0x0E,
+        EXT_DESCRIPTOR_DVB_DTS_UHD              = 0x21,
         EXT_DESCRIPTOR_DVB_RESERVED_MAX         = 0x7F,
     };
 
diff --git a/media/module/mpeg2ts/include/mpeg2ts/ESQueue.h b/media/module/mpeg2ts/include/mpeg2ts/ESQueue.h
index a06bd6a..550a0e4 100644
--- a/media/module/mpeg2ts/include/mpeg2ts/ESQueue.h
+++ b/media/module/mpeg2ts/include/mpeg2ts/ESQueue.h
@@ -45,6 +45,9 @@
         MPEG4_VIDEO,
         PCM_AUDIO,
         METADATA,
+        DTS,
+        DTS_HD,
+        DTS_UHD,
     };
 
     enum Flags {
@@ -125,6 +128,8 @@
     sp<ABuffer> dequeueAccessUnitMPEG4Video();
     sp<ABuffer> dequeueAccessUnitPCMAudio();
     sp<ABuffer> dequeueAccessUnitMetadata();
+    sp<ABuffer> dequeueAccessUnitDTSOrDTSHD();
+    sp<ABuffer> dequeueAccessUnitDTSUHD();
 
     // consume a logical (compressed) access unit of size "size",
     // returns its timestamp in us (or -1 if no time information).
diff --git a/media/mtp/tests/MtpFuzzer/mtp_handle_fuzzer.cpp b/media/mtp/tests/MtpFuzzer/mtp_handle_fuzzer.cpp
index 676345a..7dcdc3f 100644
--- a/media/mtp/tests/MtpFuzzer/mtp_handle_fuzzer.cpp
+++ b/media/mtp/tests/MtpFuzzer/mtp_handle_fuzzer.cpp
@@ -128,10 +128,10 @@
         std::unique_ptr<IMtpHandle> handle;
         if (mFdp.ConsumeBool()) {
             std::unique_ptr<IMtpHandle> mtpCompactHandle(new MtpFfsCompatHandle(controlFd));
-            handle = move(mtpCompactHandle);
+            handle = std::move(mtpCompactHandle);
         } else {
             std::unique_ptr<IMtpHandle> mtpHandle(new MtpFfsHandle(controlFd));
-            handle = move(mtpHandle);
+            handle = std::move(mtpHandle);
         }
 
         int32_t mtpHandle = mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxMtpHandleAPI);
diff --git a/media/ndk/NdkMediaDrm.cpp b/media/ndk/NdkMediaDrm.cpp
index 5005365..0df7636 100644
--- a/media/ndk/NdkMediaDrm.cpp
+++ b/media/ndk/NdkMediaDrm.cpp
@@ -758,6 +758,9 @@
 EXPORT
 media_status_t AMediaDrm_setPropertyByteArray(AMediaDrm *mObj,
         const char *propertyName, const uint8_t *value, size_t valueSize) {
+    if (!mObj || mObj->mDrm == NULL) {
+        return AMEDIA_ERROR_INVALID_OBJECT;
+    }
 
     Vector<uint8_t> byteArray;
     byteArray.appendArray(value, valueSize);
diff --git a/media/utils/ProcessInfo.cpp b/media/utils/ProcessInfo.cpp
index da199c4..8437222 100644
--- a/media/utils/ProcessInfo.cpp
+++ b/media/utils/ProcessInfo.cpp
@@ -40,11 +40,11 @@
     int32_t state;
     int32_t score = INVALID_ADJ;
     status_t err = service->getProcessStatesAndOomScoresFromPids(length, &pid, &state, &score);
+    ALOGV("%s: pid:%d state:%d score:%d err:%d", __FUNCTION__, pid, state, score, err);
     if (err != OK) {
         ALOGE("getProcessStatesAndOomScoresFromPids failed");
         return false;
     }
-    ALOGV("pid %d state %d score %d", pid, state, score);
     if (score <= NATIVE_ADJ) {
         std::scoped_lock lock{mOverrideLock};
 
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index c05aac1..ba7c6b6 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -4585,12 +4585,9 @@
 // ----------------------------------------------------------------------------
 
 status_t AudioFlinger::onTransactWrapper(TransactionCode code,
-                                         const Parcel& data,
-                                         uint32_t flags,
+                                         [[maybe_unused]] const Parcel& data,
+                                         [[maybe_unused]] uint32_t flags,
                                          const std::function<status_t()>& delegate) {
-    (void) data;
-    (void) flags;
-
     // make sure transactions reserved to AudioPolicyManager do not come from other processes
     switch (code) {
         case TransactionCode::SET_STREAM_VOLUME:
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index 26bd92d..61dd3f2 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -79,8 +79,6 @@
     mMasterMono(false),
     mThreadIoHandle(parentIoHandle)
 {
-    (void)mThreadIoHandle; // prevent unused warning, see C++17 [[maybe_unused]]
-
     // FIXME pass sInitial as parameter to base class constructor, and make it static local
     mPrevious = &sInitial;
     mCurrent = &sInitial;
diff --git a/services/audioflinger/FastMixer.h b/services/audioflinger/FastMixer.h
index 97ab635..d71519f 100644
--- a/services/audioflinger/FastMixer.h
+++ b/services/audioflinger/FastMixer.h
@@ -107,7 +107,8 @@
     std::atomic<float> mMasterBalance{};
     std::atomic_int_fast64_t mBoottimeOffset;
 
-    const audio_io_handle_t mThreadIoHandle; // parent thread id for debugging purposes
+    // parent thread id for debugging purposes
+    [[maybe_unused]] const audio_io_handle_t mThreadIoHandle;
 #ifdef TEE_SINK
     NBAIO_Tee       mTee;
 #endif
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 45dd258..b54b41f 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -313,12 +313,19 @@
                         patch->sources[0].config_mask & AUDIO_PORT_CONFIG_FLAGS ?
                         patch->sources[0].flags.input : AUDIO_INPUT_FLAG_NONE;
                 audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
+                audio_source_t source = AUDIO_SOURCE_MIC;
+                // For telephony patches, propagate voice communication use case to record side
+                if (patch->num_sources == 2
+                        && patch->sources[1].ext.mix.usecase.stream
+                                == AUDIO_STREAM_VOICE_CALL) {
+                    source = AUDIO_SOURCE_VOICE_COMMUNICATION;
+                }
                 sp<ThreadBase> thread = mAudioFlinger.openInput_l(srcModule,
                                                                     &input,
                                                                     &config,
                                                                     device,
                                                                     address,
-                                                                    AUDIO_SOURCE_MIC,
+                                                                    source,
                                                                     flags,
                                                                     outputDevice,
                                                                     outputDeviceAddress);
@@ -516,9 +523,14 @@
     audio_output_flags_t outputFlags = mAudioPatch.sinks[0].config_mask & AUDIO_PORT_CONFIG_FLAGS ?
             mAudioPatch.sinks[0].flags.output : AUDIO_OUTPUT_FLAG_NONE;
     audio_stream_type_t streamType = AUDIO_STREAM_PATCH;
+    audio_source_t source = AUDIO_SOURCE_DEFAULT;
     if (mAudioPatch.num_sources == 2 && mAudioPatch.sources[1].type == AUDIO_PORT_TYPE_MIX) {
         // "reuse one existing output mix" case
         streamType = mAudioPatch.sources[1].ext.mix.usecase.stream;
+        // For telephony patches, propagate voice communication use case to record side
+        if (streamType == AUDIO_STREAM_VOICE_CALL) {
+            source = AUDIO_SOURCE_VOICE_COMMUNICATION;
+        }
     }
     if (mPlayback.thread()->hasFastMixer()) {
         // Create a fast track if the playback thread has fast mixer to get better performance.
@@ -546,7 +558,8 @@
                                                  inChannelMask,
                                                  format,
                                                  frameCount,
-                                                 inputFlags);
+                                                 inputFlags,
+                                                 source);
     } else {
         // use a pseudo LCM between input and output framecount
         int playbackShift = __builtin_ctz(playbackFrameCount);
@@ -566,7 +579,9 @@
                                                  frameCount,
                                                  nullptr,
                                                  (size_t)0 /* bufferSize */,
-                                                 inputFlags);
+                                                 inputFlags,
+                                                 {} /* timeout */,
+                                                 source);
     }
     status = mRecord.checkTrack(tempRecordTrack.get());
     if (status != NO_ERROR) {
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index 076417e..f0a5f76 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -88,6 +88,10 @@
                                     && (flags & AUDIO_INPUT_FLAG_HW_AV_SYNC) == 0;
                         }
 
+            using SinkMetadatas = std::vector<record_track_metadata_v7_t>;
+            using MetadataInserter = std::back_insert_iterator<SinkMetadatas>;
+            virtual void    copyMetadataTo(MetadataInserter& backInserter) const;
+
 private:
     friend class AudioFlinger;  // for mState
 
@@ -135,7 +139,8 @@
                 void *buffer,
                 size_t bufferSize,
                 audio_input_flags_t flags,
-                const Timeout& timeout = {});
+                const Timeout& timeout = {},
+                audio_source_t source = AUDIO_SOURCE_DEFAULT);
     virtual             ~PatchRecord();
 
     virtual Source* getSource() { return nullptr; }
@@ -167,7 +172,8 @@
                         audio_channel_mask_t channelMask,
                         audio_format_t format,
                         size_t frameCount,
-                        audio_input_flags_t flags);
+                        audio_input_flags_t flags,
+                        audio_source_t source = AUDIO_SOURCE_DEFAULT);
 
     Source* getSource() override { return static_cast<Source*>(this); }
 
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 0de7e7d..37b8fe8 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -2140,9 +2140,19 @@
     if (!isStreamInitialized()) {
         ALOGE("The stream is not open yet"); // This should not happen.
     } else {
-        // setEventCallback will need a strong pointer as a parameter. Calling it
-        // here instead of constructor of PlaybackThread so that the onFirstRef
-        // callback would not be made on an incompletely constructed object.
+        // Callbacks take strong or weak pointers as a parameter.
+        // Since PlaybackThread passes itself as a callback handler, it can only
+        // be done outside of the constructor. Creating weak and especially strong
+        // pointers to a refcounted object in its own constructor is strongly
+        // discouraged, see comments in system/core/libutils/include/utils/RefBase.h.
+        // Even if a function takes a weak pointer, it is possible that it will
+        // need to convert it to a strong pointer down the line.
+        if (mOutput->flags & AUDIO_OUTPUT_FLAG_NON_BLOCKING &&
+                mOutput->stream->setCallback(this) == OK) {
+            mUseAsyncWrite = true;
+            mCallbackThread = new AudioFlinger::AsyncCallbackThread(this);
+        }
+
         if (mOutput->stream->setEventCallback(this) != OK) {
             ALOGD("Failed to add event callback");
         }
@@ -3007,13 +3017,6 @@
                 mFrameCount);
     }
 
-    if (mOutput->flags & AUDIO_OUTPUT_FLAG_NON_BLOCKING) {
-        if (mOutput->stream->setCallback(this) == OK) {
-            mUseAsyncWrite = true;
-            mCallbackThread = new AudioFlinger::AsyncCallbackThread(this);
-        }
-    }
-
     mHwSupportsPause = false;
     if (mOutput->flags & AUDIO_OUTPUT_FLAG_DIRECT) {
         bool supportsPause = false, supportsResume = false;
@@ -3290,7 +3293,7 @@
 }
 
 void AudioFlinger::PlaybackThread::threadLoop_removeTracks(
-        const Vector< sp<Track> >& tracksToRemove)
+        [[maybe_unused]] const Vector< sp<Track> >& tracksToRemove)
 {
     // Miscellaneous track cleanup when removed from the active list,
     // called without Thread lock but synchronized with threadLoop processing.
@@ -3301,8 +3304,6 @@
             addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStop);
         }
     }
-#else
-    (void)tracksToRemove; // suppress unused warning
 #endif
 }
 
@@ -5826,7 +5827,7 @@
     }
 
     // Push the new FastMixer state if necessary
-    bool pauseAudioWatchdog = false;
+    [[maybe_unused]] bool pauseAudioWatchdog = false;
     if (didModify) {
         state->mFastTracksGen++;
         // if the fast mixer was active, but now there are no fast tracks, then put it in cold idle
@@ -7567,7 +7568,7 @@
     size_t numCounterOffers = 0;
     const NBAIO_Format offers[1] = {Format_from_SR_C(mSampleRate, mChannelCount, mFormat)};
 #if !LOG_NDEBUG
-    ssize_t index =
+    [[maybe_unused]] ssize_t index =
 #else
     (void)
 #endif
@@ -7618,7 +7619,7 @@
         Pipe *pipe = new Pipe(pipeFramesP2, format, pipeBuffer);
         const NBAIO_Format offers[1] = {format};
         size_t numCounterOffers = 0;
-        ssize_t index = pipe->negotiate(offers, 1, NULL, numCounterOffers);
+        [[maybe_unused]] ssize_t index = pipe->negotiate(offers, 1, NULL, numCounterOffers);
         ALOG_ASSERT(index == 0);
         mPipeSink = pipe;
         PipeReader *pipeReader = new PipeReader(*pipe);
@@ -8813,21 +8814,9 @@
         return; // nothing to do
     }
     StreamInHalInterface::SinkMetadata metadata;
+    auto backInserter = std::back_inserter(metadata.tracks);
     for (const sp<RecordTrack> &track : mActiveTracks) {
-        // Do not forward PatchRecord metadata to audio HAL
-        if (track->isPatchTrack()) {
-            continue;
-        }
-        // No track is invalid as this is called after prepareTrack_l in the same critical section
-        record_track_metadata_v7_t trackMetadata;
-        trackMetadata.base = {
-                .source = track->attributes().source,
-                .gain = 1, // capture tracks do not have volumes
-        };
-        trackMetadata.channel_mask = track->channelMask(),
-        strncpy(trackMetadata.tags, track->attributes().tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
-
-        metadata.tracks.push_back(trackMetadata);
+        track->copyMetadataTo(backInserter);
     }
     mInput->stream->updateSinkMetadata(metadata);
 }
@@ -9086,7 +9075,8 @@
     audio_format_t reqFormat = mFormat;
     uint32_t samplingRate = mSampleRate;
     // TODO this may change if we want to support capture from HDMI PCM multi channel (e.g on TVs).
-    audio_channel_mask_t channelMask = audio_channel_in_mask_from_count(mChannelCount);
+    [[maybe_unused]] audio_channel_mask_t channelMask =
+                                audio_channel_in_mask_from_count(mChannelCount);
 
     AudioParameter param = AudioParameter(keyValuePair);
     int value;
@@ -10283,19 +10273,22 @@
 
 void AudioFlinger::MmapThread::checkInvalidTracks_l()
 {
+    sp<MmapStreamCallback> callback;
     for (const sp<MmapTrack> &track : mActiveTracks) {
         if (track->isInvalid()) {
-            sp<MmapStreamCallback> callback = mCallback.promote();
-            if (callback != 0) {
-                mLock.unlock();
-                callback->onTearDown(track->portId());
-                mLock.lock();
-            } else if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
-                ALOGW("Could not notify MMAP stream tear down: no onTearDown callback!");
+            callback = mCallback.promote();
+            if (callback == nullptr &&  mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
+                ALOGW("Could not notify MMAP stream tear down: no onRoutingChanged callback!");
                 mNoCallbackWarningCount++;
             }
+            break;
         }
     }
+    if (callback != 0) {
+        mLock.unlock();
+        callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
+        mLock.lock();
+    }
 }
 
 void AudioFlinger::MmapThread::dumpInternals_l(int fd, const Vector<String16>& args __unused)
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 077a01a..300ad9f 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1479,7 +1479,7 @@
         }
     }
 
-    metadata.channel_mask = mChannelMask,
+    metadata.channel_mask = mChannelMask;
     strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
     *backInserter++ = metadata;
 }
@@ -2016,7 +2016,6 @@
 {
     Buffer *pInBuffer;
     Buffer inBuffer;
-    bool outputBufferFull = false;
     inBuffer.frameCount = frames;
     inBuffer.raw = data;
 
@@ -2046,7 +2045,6 @@
                 ALOGV("%s(%d): thread %d no more output buffers; status %d",
                         __func__, mId,
                         (int)mThreadIoHandle, status);
-                outputBufferFull = true;
                 break;
             }
             uint32_t waitTimeMs = (uint32_t)ns2ms(systemTime() - startTime);
@@ -2737,6 +2735,25 @@
     }
 }
 
+void AudioFlinger::RecordThread::RecordTrack::copyMetadataTo(MetadataInserter& backInserter) const
+{
+
+    // Do not forward PatchRecord metadata with unspecified audio source
+    if (mAttr.source == AUDIO_SOURCE_DEFAULT) {
+        return;
+    }
+
+    // No track is invalid as this is called after prepareTrack_l in the same critical section
+    record_track_metadata_v7_t metadata;
+    metadata.base = {
+            .source = mAttr.source,
+            .gain = 1, // capture tracks do not have volumes
+    };
+    metadata.channel_mask = mChannelMask;
+    strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+
+    *backInserter++ = metadata;
+}
 
 // ----------------------------------------------------------------------------
 #undef LOG_TAG
@@ -2750,9 +2767,10 @@
                                                      void *buffer,
                                                      size_t bufferSize,
                                                      audio_input_flags_t flags,
-                                                     const Timeout& timeout)
+                                                     const Timeout& timeout,
+                                                     audio_source_t source)
     :   RecordTrack(recordThread, NULL,
-                audio_attributes_t{} /* currently unused for patch track */,
+                audio_attributes_t{ .source = source } ,
                 sampleRate, format, channelMask, frameCount,
                 buffer, bufferSize, AUDIO_SESSION_NONE, getpid(),
                 audioServerAttributionSource(getpid()), flags, TYPE_PATCH),
@@ -2863,9 +2881,10 @@
         audio_channel_mask_t channelMask,
         audio_format_t format,
         size_t frameCount,
-        audio_input_flags_t flags)
+        audio_input_flags_t flags,
+        audio_source_t source)
         : PatchRecord(recordThread, sampleRate, channelMask, format, frameCount,
-                nullptr /*buffer*/, 0 /*bufferSize*/, flags),
+                nullptr /*buffer*/, 0 /*bufferSize*/, flags, {} /* timeout */, source),
           mPatchRecordAudioBufferProvider(*this),
           mSinkBuffer(allocAligned(32, mFrameCount * mFrameSize)),
           mStubBuffer(allocAligned(32, mFrameCount * mFrameSize))
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index 0431619..7119b85 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -239,12 +239,13 @@
     }
     void setUseSwBridge() { mUseSwBridge = true; }
     bool useSwBridge() const { return mUseSwBridge; }
+    bool canCloseOutput() const { return mCloseOutput; }
     bool isConnected() const { return mPatchHandle != AUDIO_PATCH_HANDLE_NONE; }
     audio_patch_handle_t getPatchHandle() const { return mPatchHandle; }
     sp<DeviceDescriptor> srcDevice() const { return mSrcDevice; }
     sp<DeviceDescriptor> sinkDevice() const { return mSinkDevice; }
     wp<SwAudioOutputDescriptor> swOutput() const { return mSwOutput; }
-    void setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput);
+    void setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput, bool closeOutput = false);
     wp<HwAudioOutputDescriptor> hwOutput() const { return mHwOutput; }
     void setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput);
 
@@ -258,6 +259,15 @@
     wp<SwAudioOutputDescriptor> mSwOutput;
     wp<HwAudioOutputDescriptor> mHwOutput;
     bool mUseSwBridge = false;
+    /**
+     * For either HW bridge associated to a SwOutput for activity / volume or SwBridge for also
+     * sample rendering / activity & volume, an existing playback thread may be reused (e.g.
+     * not already opened at APM startup or Direct Output).
+     * If reusing an already opened output, when this output is not used anymore, the AudioFlinger
+     * patch must be updated to refine the output device(s) information and ensure the right
+     * behavior of AudioDeviceCallback.
+     */
+    bool mCloseOutput = false;
 };
 
 /**
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 713b0ac..8b6866e 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -105,9 +105,11 @@
 {
 }
 
-void SourceClientDescriptor::setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput)
+void SourceClientDescriptor::setSwOutput(
+        const sp<SwAudioOutputDescriptor>& swOutput, bool closeOutput)
 {
     mSwOutput = swOutput;
+    mCloseOutput = closeOutput;
 }
 
 void SourceClientDescriptor::setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput)
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 3d6bc5b..c64497f 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -785,7 +785,8 @@
     ALOGV("%s between source %s and sink %s", __func__,
             srcDevice->toString().c_str(), sinkDevice->toString().c_str());
     auto callTxSourceClientPortId = PolicyAudioPort::getNextUniqueId();
-    const audio_attributes_t aa = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
+    const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
+
     struct audio_port_config source = {};
     srcDevice->toAudioPortConfig(&source);
     mCallTxSourceClient = new InternalSourceClientDescriptor(
@@ -1530,6 +1531,10 @@
     if ((*flags & (AUDIO_OUTPUT_FLAG_HW_AV_SYNC | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ)) != 0) {
         return AUDIO_IO_HANDLE_NONE;
     }
+    // A request for Tuner cannot fallback to a mixed output
+    if ((directConfig.offload_info.content_id || directConfig.offload_info.sync_id)) {
+        return AUDIO_IO_HANDLE_NONE;
+    }
 
     // ignoring channel mask due to downmix capability in mixer
 
@@ -3026,6 +3031,10 @@
     status_t status = NO_ERROR;
     IVolumeCurves &curves = getVolumeCurves(attributes);
     VolumeSource vs = toVolumeSource(group);
+    // AUDIO_STREAM_BLUETOOTH_SCO is only used for volume control so we remap
+    // to AUDIO_STREAM_VOICE_CALL to match with relevant playback activity
+    VolumeSource activityVs = (vs == toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO, false)) ?
+            toVolumeSource(AUDIO_STREAM_VOICE_CALL, false) : vs;
     product_strategy_t strategy = mEngine->getProductStrategyForAttributes(attributes);
 
     status = setVolumeCurveIndex(index, device, curves);
@@ -3064,7 +3073,8 @@
         if (curDevices.erase(AUDIO_DEVICE_OUT_SPEAKER_SAFE)) {
             curDevices.insert(AUDIO_DEVICE_OUT_SPEAKER);
         }
-        if (!(desc->isActive(vs) || isInCall())) {
+
+        if (!(desc->isActive(activityVs) || isInCallOrScreening())) {
             continue;
         }
         if (device != AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME &&
@@ -3098,7 +3108,7 @@
                 bool isPreempted = false;
                 bool isHigherPriority = productStrategy < strategy;
                 for (const auto &client : activeClients) {
-                    if (isHigherPriority && (client->volumeSource() != vs)) {
+                    if (isHigherPriority && (client->volumeSource() != activityVs)) {
                         ALOGV("%s: Strategy=%d (\nrequester:\n"
                               " group %d, volumeGroup=%d attributes=%s)\n"
                               " higher priority source active:\n"
@@ -3111,7 +3121,7 @@
                         break;
                     }
                     // However, continue for loop to ensure no higher prio clients running on output
-                    if (client->volumeSource() == vs) {
+                    if (client->volumeSource() == activityVs) {
                         applyVolume = true;
                     }
                 }
@@ -4552,7 +4562,7 @@
                 // In case of Hw bridge, it is a Work Around. The mixPort used is the one declared
                 // in config XML to reach the sink so that is can be declared as available.
                 audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
-                sp<SwAudioOutputDescriptor> outputDesc = nullptr;
+                sp<SwAudioOutputDescriptor> outputDesc;
                 if (!sourceDesc->isInternal()) {
                     // take care of dynamic routing for SwOutput selection,
                     audio_attributes_t attributes = sourceDesc->attributes();
@@ -4581,7 +4591,8 @@
                         ALOGE("%s output is duplicated", __func__);
                         return INVALID_OPERATION;
                     }
-                    sourceDesc->setSwOutput(outputDesc);
+                    bool closeOutput = outputDesc->mDirectOpenCount != 0;
+                    sourceDesc->setSwOutput(outputDesc, closeOutput);
                 } else {
                     // Same for "raw patches" aka created from createAudioPatch API
                     SortedVector<audio_io_handle_t> outputs =
@@ -4600,7 +4611,7 @@
                               __func__, sinkDevice->toString().c_str());
                         return INVALID_OPERATION;
                     }
-                    sourceDesc->setSwOutput(outputDesc);
+                    sourceDesc->setSwOutput(outputDesc, /* closeOutput= */ false);
                 }
                 // create a software bridge in PatchPanel if:
                 // - source and sink devices are on different HW modules OR
@@ -4622,7 +4633,8 @@
                         audio_port_config srcMixPortConfig = {};
                         outputDesc->toAudioPortConfig(&srcMixPortConfig, nullptr);
                         // for volume control, we may need a valid stream
-                        srcMixPortConfig.ext.mix.usecase.stream = !sourceDesc->isInternal() ?
+                        srcMixPortConfig.ext.mix.usecase.stream =
+                            (!sourceDesc->isInternal() || isCallTxAudioSource(sourceDesc)) ?
                                     mEngine->getStreamTypeForAttributes(sourceDesc->attributes()) :
                                     AUDIO_STREAM_PATCH;
                         patchBuilder.addSource(srcMixPortConfig);
@@ -4729,17 +4741,29 @@
                 // releaseOutput has already called closeOutput in case of direct output
                 return NO_ERROR;
             }
-            if (!outputDesc->isActive() && !sourceDesc->useSwBridge()) {
-                resetOutputDevice(outputDesc);
-            } else {
-                // Reuse patch handle if still valid / do not force rerouting if still routed
-                patchHandle = outputDesc->getPatchHandle();
-                setOutputDevices(outputDesc,
-                                 getNewOutputDevices(outputDesc, true /*fromCache*/),
-                                 patchHandle == AUDIO_PATCH_HANDLE_NONE, /*force*/
-                                 0,
-                                 patchHandle == AUDIO_PATCH_HANDLE_NONE ? nullptr : &patchHandle);
-            }
+            patchHandle = outputDesc->getPatchHandle();
+            // When a Sw bridge is released, the mixer used by this bridge will release its
+            // patch at AudioFlinger side. Hence, the mixer audio patch must be recreated
+            // Reuse patch handle to force audio flinger removing initial mixer patch removal
+            // updating hal patch handle (prevent leaks).
+            // While using a HwBridge, force reconsidering device only if not reusing an existing
+            // output and no more activity on output (will force to close).
+            bool force = sourceDesc->useSwBridge() ||
+                    (sourceDesc->canCloseOutput() && !outputDesc->isActive());
+            // APM pattern is to have always outputs opened / patch realized for reachable devices.
+            // Update device may result to NONE (empty), coupled with force, it releases the patch.
+            // Reconsider device only for cases:
+            //      1 / Active Output
+            //      2 / Inactive Output previously hosting HwBridge
+            //      3 / Inactive Output previously hosting SwBridge that can be closed.
+            bool updateDevice = outputDesc->isActive() || !sourceDesc->useSwBridge() ||
+                    sourceDesc->canCloseOutput();
+            setOutputDevices(outputDesc,
+                             updateDevice ? getNewOutputDevices(outputDesc, true /*fromCache*/) :
+                                            outputDesc->devices(),
+                             force,
+                             0,
+                             patchHandle == AUDIO_PATCH_HANDLE_NONE ? nullptr : &patchHandle);
         } else {
             return BAD_VALUE;
         }
@@ -6346,10 +6370,10 @@
     SortedVector<audio_io_handle_t> dstOutputs = getOutputsForDevices(newDevices, mOutputs);
 
     uint32_t maxLatency = 0;
-    bool invalidate = false;
+    std::vector<sp<SwAudioOutputDescriptor>> invalidatedOutputs;
     // take into account dynamic audio policies related changes: if a client is now associated
     // to a different policy mix than at creation time, invalidate corresponding stream
-    for (size_t i = 0; i < mPreviousOutputs.size() && !invalidate; i++) {
+    for (size_t i = 0; i < mPreviousOutputs.size(); i++) {
         const sp<SwAudioOutputDescriptor>& desc = mPreviousOutputs.valueAt(i);
         if (desc->isDuplicated()) {
             continue;
@@ -6365,16 +6389,15 @@
                 continue;
             }
             if (client->getPrimaryMix() != primaryMix || client->hasLostPrimaryMix()) {
-                invalidate = true;
-                if (desc->isStrategyActive(psId)) {
+                if (desc->isStrategyActive(psId) && maxLatency < desc->latency()) {
                     maxLatency = desc->latency();
                 }
-                break;
+                invalidatedOutputs.push_back(desc);
             }
         }
     }
 
-    if (srcOutputs != dstOutputs || invalidate) {
+    if (srcOutputs != dstOutputs || !invalidatedOutputs.empty()) {
         // get maximum latency of all source outputs to determine the minimum mute time guaranteeing
         // audio from invalidated tracks will be rendered when unmuting
         for (audio_io_handle_t srcOut : srcOutputs) {
@@ -6385,8 +6408,7 @@
                 maxLatency = desc->latency();
             }
 
-            if (invalidate) continue;
-
+            bool invalidate = false;
             for (auto client : desc->clientsList(false /*activeOnly*/)) {
                 if (desc->isDuplicated() || !desc->mProfile->isDirectOutput()) {
                     // a client on a non direct outputs has necessarily a linear PCM format
@@ -6414,21 +6436,14 @@
                     }
                 }
             }
-        }
-
-        ALOGV_IF(!(srcOutputs.isEmpty() || dstOutputs.isEmpty()),
-              "%s: strategy %d, moving from output %s to output %s", __func__, psId,
-              std::to_string(srcOutputs[0]).c_str(),
-              std::to_string(dstOutputs[0]).c_str());
-        // mute strategy while moving tracks from one output to another
-        for (audio_io_handle_t srcOut : srcOutputs) {
-            sp<SwAudioOutputDescriptor> desc = mPreviousOutputs.valueFor(srcOut);
-            if (desc == nullptr) continue;
-
-            if (desc->isStrategyActive(psId)) {
-                setStrategyMute(psId, true, desc);
-                setStrategyMute(psId, false, desc, maxLatency * LATENCY_MUTE_FACTOR,
-                                newDevices.types());
+            // mute strategy while moving tracks from one output to another
+            if (invalidate) {
+                invalidatedOutputs.push_back(desc);
+                if (desc->isStrategyActive(psId)) {
+                    setStrategyMute(psId, true, desc);
+                    setStrategyMute(psId, false, desc, maxLatency * LATENCY_MUTE_FACTOR,
+                                    newDevices.types());
+                }
             }
             sp<SourceClientDescriptor> source = getSourceForAttributesOnOutput(srcOut, attr);
             if (source != nullptr && !isCallRxAudioSource(source) && !source->isInternal()) {
@@ -6436,19 +6451,21 @@
             }
         }
 
+        ALOGV_IF(!(srcOutputs.isEmpty() || dstOutputs.isEmpty()),
+              "%s: strategy %d, moving from output %s to output %s", __func__, psId,
+              std::to_string(srcOutputs[0]).c_str(),
+              std::to_string(dstOutputs[0]).c_str());
+
         // Move effects associated to this stream from previous output to new output
         if (followsSameRouting(attr, attributes_initializer(AUDIO_USAGE_MEDIA))) {
             selectOutputForMusicEffects();
         }
         // Move tracks associated to this stream (and linked) from previous output to new output
-        if (invalidate) {
+        if (!invalidatedOutputs.empty()) {
             for (auto stream :  mEngine->getStreamTypesForProductStrategy(psId)) {
                 mpClientInterface->invalidateStream(stream);
             }
-            for (audio_io_handle_t srcOut : srcOutputs) {
-                sp<SwAudioOutputDescriptor> desc = mPreviousOutputs.valueFor(srcOut);
-                if (desc == nullptr) continue;
-
+            for (sp<SwAudioOutputDescriptor> desc : invalidatedOutputs) {
                 desc->setTracksInvalidatedStatusByStrategy(psId);
             }
         }
@@ -7341,7 +7358,8 @@
     // if sco and call follow same curves, bypass forceUseForComm
     if ((callVolSrc != btScoVolSrc) &&
             ((isVoiceVolSrc && isScoRequested) ||
-             (isBtScoVolSrc && !(isScoRequested || isHAUsed)))) {
+             (isBtScoVolSrc && !(isScoRequested || isHAUsed))) &&
+            !isSingleDeviceType(deviceTypes, AUDIO_DEVICE_OUT_TELEPHONY_TX)) {
         ALOGV("%s cannot set volume group %d volume when is%srequested for comm", __func__,
              volumeSource, isScoRequested ? " " : " not ");
         // Do not return an error here as AudioService will always set both voice call
@@ -7509,14 +7527,18 @@
     return is_state_in_call(state);
 }
 
-bool AudioPolicyManager::isCallAudioAccessible()
-{
+bool AudioPolicyManager::isCallAudioAccessible() const {
     audio_mode_t mode = mEngine->getPhoneState();
     return (mode == AUDIO_MODE_IN_CALL)
             || (mode == AUDIO_MODE_CALL_SCREEN)
             || (mode == AUDIO_MODE_CALL_REDIRECT);
 }
 
+bool AudioPolicyManager::isInCallOrScreening() const {
+    audio_mode_t mode = mEngine->getPhoneState();
+    return isStateInCall(mode) || mode == AUDIO_MODE_CALL_SCREEN;
+}
+
 void AudioPolicyManager::cleanUpForDevice(const sp<DeviceDescriptor>& deviceDesc)
 {
     for (ssize_t i = (ssize_t)mAudioSources.size() - 1; i >= 0; i--)  {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 2159257..8466d097 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -602,7 +602,9 @@
         // true if given state represents a device in a telephony or VoIP call
         virtual bool isStateInCall(int state) const;
         // true if playback to call TX or capture from call RX is possible
-        bool isCallAudioAccessible();
+        bool isCallAudioAccessible() const;
+        // true if device is in a telephony or VoIP call or call screening is active
+        bool isInCallOrScreening() const;
 
         // when a device is connected, checks if an open output can be routed
         // to this device. If none is open, tries to open one of the available outputs.
@@ -639,6 +641,10 @@
             return mCallRxSourceClient != nullptr && source == mCallRxSourceClient;
         }
 
+        bool isCallTxAudioSource(const sp<SourceClientDescriptor> &source) {
+            return mCallTxSourceClient != nullptr && source == mCallTxSourceClient;
+        }
+
         void connectTelephonyRxAudioSource();
 
         void disconnectTelephonyAudioSource(sp<SourceClientDescriptor> &clientDesc);
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 281785e..09b6f3b 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -1816,12 +1816,14 @@
 void AudioPolicyService::SensorPrivacyPolicy::registerSelf() {
     SensorPrivacyManager spm;
     mSensorPrivacyEnabled = spm.isSensorPrivacyEnabled();
+    (void)spm.addToggleSensorPrivacyListener(this);
     spm.addSensorPrivacyListener(this);
 }
 
 void AudioPolicyService::SensorPrivacyPolicy::unregisterSelf() {
     SensorPrivacyManager spm;
     spm.removeSensorPrivacyListener(this);
+    spm.removeToggleSensorPrivacyListener(this);
 }
 
 bool AudioPolicyService::SensorPrivacyPolicy::isSensorPrivacyEnabled() {
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index 2fe7b9e..5db82f7 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -20,6 +20,7 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
+#include <algorithm>
 #include <inttypes.h>
 #include <limits.h>
 #include <stdint.h>
@@ -33,6 +34,7 @@
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/MediaMetricsItem.h>
+#include <media/QuaternionUtil.h>
 #include <media/ShmemCompat.h>
 #include <mediautils/SchedulingPolicyService.h>
 #include <mediautils/ServiceUtilities.h>
@@ -75,6 +77,34 @@
     return maxMask;
 }
 
+static std::vector<float> recordFromTranslationRotationVector(
+        const std::vector<float>& trVector) {
+    auto headToStageOpt = Pose3f::fromVector(trVector);
+    if (!headToStageOpt) return {};
+
+    const auto stageToHead = headToStageOpt.value().inverse();
+    const auto stageToHeadTranslation = stageToHead.translation();
+    constexpr float RAD_TO_DEGREE = 180.f / M_PI;
+    std::vector<float> record{
+        stageToHeadTranslation[0], stageToHeadTranslation[1], stageToHeadTranslation[2],
+        0.f, 0.f, 0.f};
+    media::quaternionToAngles(stageToHead.rotation(), &record[3], &record[4], &record[5]);
+    record[3] *= RAD_TO_DEGREE;
+    record[4] *= RAD_TO_DEGREE;
+    record[5] *= RAD_TO_DEGREE;
+    return record;
+}
+
+template<typename T>
+static constexpr const T& safe_clamp(const T& value, const T& low, const T& high) {
+    if constexpr (std::is_floating_point_v<T>) {
+        return value != value /* constexpr isnan */
+                ? low : std::clamp(value, low, high);
+    } else /* constexpr */ {
+        return std::clamp(value, low, high);
+    }
+}
+
 // ---------------------------------------------------------------------------
 
 class Spatializer::EngineCallbackHandler : public AHandler {
@@ -185,41 +215,6 @@
 };
 
 // ---------------------------------------------------------------------------
-
-// Convert recorded sensor data to string with level indentation.
-std::string Spatializer::HeadToStagePoseRecorder::toString(unsigned level) const {
-    std::string prefixSpace(level, ' ');
-    return mPoseRecordLog.dumpToString((prefixSpace + " ").c_str(), Spatializer::mMaxLocalLogLine);
-}
-
-// Compute sensor data, record into local log when it is time.
-void Spatializer::HeadToStagePoseRecorder::record(const std::vector<float>& headToStage) {
-    if (headToStage.size() != mPoseVectorSize) return;
-
-    if (mNumOfSampleSinceLastRecord++ == 0) {
-        mFirstSampleTimestamp = std::chrono::steady_clock::now();
-    }
-    // if it's time, do record and reset.
-    if (shouldRecordLog()) {
-        poseSumToAverage();
-        mPoseRecordLog.log(
-                "mean: %s, min: %s, max %s, calculated %d samples in %0.4f second(s)",
-                Spatializer::toString<double>(mPoseRadianSum, true /* radianToDegree */).c_str(),
-                Spatializer::toString<float>(mMinPoseAngle, true /* radianToDegree */).c_str(),
-                Spatializer::toString<float>(mMaxPoseAngle, true /* radianToDegree */).c_str(),
-                mNumOfSampleSinceLastRecord, mNumOfSecondsSinceLastRecord.count());
-        resetRecord();
-    }
-    // update stream average.
-    for (int i = 0; i < mPoseVectorSize; i++) {
-        mPoseRadianSum[i] += headToStage[i];
-        mMaxPoseAngle[i] = std::max(mMaxPoseAngle[i], headToStage[i]);
-        mMinPoseAngle[i] = std::min(mMinPoseAngle[i], headToStage[i]);
-    }
-    return;
-}
-
-// ---------------------------------------------------------------------------
 sp<Spatializer> Spatializer::create(SpatializerPolicyCallback *callback) {
     sp<Spatializer> spatializer;
 
@@ -590,7 +585,8 @@
     }
     std::lock_guard lock(mLock);
     if (mPoseController != nullptr) {
-        mLocalLog.log("%s with screenToStage %s", __func__, toString<float>(screenToStage).c_str());
+        mLocalLog.log("%s with screenToStage %s", __func__,
+                media::VectorRecorder::toString<float>(screenToStage).c_str());
         mPoseController->setScreenToStagePose(maybePose.value());
     }
     return Status::ok();
@@ -653,28 +649,48 @@
 
 Status Spatializer::setDisplayOrientation(float physicalToLogicalAngle) {
     ALOGV("%s physicalToLogicalAngle %f", __func__, physicalToLogicalAngle);
-    if (!mSupportsHeadTracking) {
-        return binderStatusFromStatusT(INVALID_OPERATION);
-    }
-    std::lock_guard lock(mLock);
-    mDisplayOrientation = physicalToLogicalAngle;
     mLocalLog.log("%s with %f", __func__, physicalToLogicalAngle);
+    const float angle = safe_clamp(physicalToLogicalAngle, 0.f, (float)(2. * M_PI));
+    // It is possible due to numerical inaccuracies to exceed the boundaries of 0 to 2 * M_PI.
+    ALOGI_IF(angle != physicalToLogicalAngle,
+            "%s: clamping %f to %f", __func__, physicalToLogicalAngle, angle);
+    std::lock_guard lock(mLock);
+    mDisplayOrientation = angle;
     if (mPoseController != nullptr) {
-        mPoseController->setDisplayOrientation(mDisplayOrientation);
+        // This turns on the rate-limiter.
+        mPoseController->setDisplayOrientation(angle);
     }
     if (mEngine != nullptr) {
         setEffectParameter_l(
-            SPATIALIZER_PARAM_DISPLAY_ORIENTATION, std::vector<float>{physicalToLogicalAngle});
+            SPATIALIZER_PARAM_DISPLAY_ORIENTATION, std::vector<float>{angle});
     }
     return Status::ok();
 }
 
 Status Spatializer::setHingeAngle(float hingeAngle) {
-    std::lock_guard lock(mLock);
     ALOGV("%s hingeAngle %f", __func__, hingeAngle);
+    mLocalLog.log("%s with %f", __func__, hingeAngle);
+    const float angle = safe_clamp(hingeAngle, 0.f, (float)(2. * M_PI));
+    // It is possible due to numerical inaccuracies to exceed the boundaries of 0 to 2 * M_PI.
+    ALOGI_IF(angle != hingeAngle,
+            "%s: clamping %f to %f", __func__, hingeAngle, angle);
+    std::lock_guard lock(mLock);
+    mHingeAngle = angle;
     if (mEngine != nullptr) {
-        mLocalLog.log("%s with %f", __func__, hingeAngle);
-        setEffectParameter_l(SPATIALIZER_PARAM_HINGE_ANGLE, std::vector<float>{hingeAngle});
+        setEffectParameter_l(SPATIALIZER_PARAM_HINGE_ANGLE, std::vector<float>{angle});
+    }
+    return Status::ok();
+}
+
+Status Spatializer::setFoldState(bool folded) {
+    ALOGV("%s foldState %d", __func__, (int)folded);
+    mLocalLog.log("%s with %d", __func__, (int)folded);
+    std::lock_guard lock(mLock);
+    mFoldedState = folded;
+    if (mEngine != nullptr) {
+        // we don't suppress multiple calls with the same folded state - that's
+        // done at the caller.
+        setEffectParameter_l(SPATIALIZER_PARAM_FOLD_STATE, std::vector<uint8_t>{mFoldedState});
     }
     return Status::ok();
 }
@@ -771,8 +787,9 @@
         callback = mHeadTrackingCallback;
         if (mEngine != nullptr) {
             setEffectParameter_l(SPATIALIZER_PARAM_HEAD_TO_STAGE, headToStage);
-            mPoseRecorder.record(headToStage);
-            mPoseDurableRecorder.record(headToStage);
+            const auto record = recordFromTranslationRotationVector(headToStage);
+            mPoseRecorder.record(record);
+            mPoseDurableRecorder.record(record);
         }
     }
 
@@ -822,8 +839,7 @@
             }
         }
         callback = mHeadTrackingCallback;
-        mLocalLog.log("%s: %s, spatializerMode %s", __func__, media::toString(mode).c_str(),
-                      media::toString(spatializerMode).c_str());
+        mLocalLog.log("%s: updating mode to %s", __func__, media::toString(mode).c_str());
     }
     if (callback != nullptr) {
         callback->onHeadTrackingModeChanged(spatializerMode);
@@ -877,6 +893,14 @@
             checkSensorsState_l();
         }
         callback = mSpatializerCallback;
+
+        // Restore common effect state.
+        setEffectParameter_l(SPATIALIZER_PARAM_DISPLAY_ORIENTATION,
+                std::vector<float>{mDisplayOrientation});
+        setEffectParameter_l(SPATIALIZER_PARAM_FOLD_STATE,
+                std::vector<uint8_t>{mFoldedState});
+        setEffectParameter_l(SPATIALIZER_PARAM_HINGE_ANGLE,
+                std::vector<float>{mHingeAngle});
     }
 
     if (outputChanged && callback != nullptr) {
@@ -1048,8 +1072,7 @@
 }
 
 std::string Spatializer::toString(unsigned level) const {
-    std::string prefixSpace;
-    prefixSpace.append(level, ' ');
+    std::string prefixSpace(level, ' ');
     std::string ss = prefixSpace + "Spatializer:\n";
     bool needUnlock = false;
 
@@ -1105,14 +1128,15 @@
 
     // PostController dump.
     if (mPoseController != nullptr) {
-        ss += mPoseController->toString(level + 1);
-        ss.append(prefixSpace +
-                  "Sensor data format - [rx, ry, rz, vx, vy, vz] (units-degree, "
-                  "r-transform, v-angular velocity, x-pitch, y-roll, z-yaw):\n");
-        ss.append(prefixSpace + " PerMinuteHistory:\n");
-        ss += mPoseDurableRecorder.toString(level + 1);
-        ss.append(prefixSpace + " PerSecondHistory:\n");
-        ss += mPoseRecorder.toString(level + 1);
+        ss.append(mPoseController->toString(level + 1))
+            .append(prefixSpace)
+            .append("Pose (active stage-to-head) [tx, ty, tz : pitch, roll, yaw]:\n")
+            .append(prefixSpace)
+            .append(" PerMinuteHistory:\n")
+            .append(mPoseDurableRecorder.toString(level + 3))
+            .append(prefixSpace)
+            .append(" PerSecondHistory:\n")
+            .append(mPoseRecorder.toString(level + 3));
     } else {
         ss.append(prefixSpace).append("SpatializerPoseController not exist\n");
     }
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 0f6bafe..60030bd 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -27,6 +27,7 @@
 #include <audio_utils/SimpleLog.h>
 #include <math.h>
 #include <media/AudioEffect.h>
+#include <media/VectorRecorder.h>
 #include <media/audiohal/EffectHalInterface.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <system/audio_effects/effect_spatializer.h>
@@ -118,6 +119,7 @@
     binder::Status setScreenSensor(int sensorHandle) override;
     binder::Status setDisplayOrientation(float physicalToLogicalAngle) override;
     binder::Status setHingeAngle(float hingeAngle) override;
+    binder::Status setFoldState(bool folded) override;
     binder::Status getSupportedModes(std::vector<media::SpatializationMode>* modes) override;
     binder::Status registerHeadTrackingCallback(
         const sp<media::ISpatializerHeadTrackingCallback>& callback) override;
@@ -172,30 +174,6 @@
                 media::audio::common::toString(*result) : "unknown_latency_mode";
     }
 
-    /**
-     * Format head to stage vector to a string, [0.00, 0.00, 0.00, -1.29, -0.50, 15.27].
-     */
-    template <typename T>
-    static std::string toString(const std::vector<T>& vec, bool radianToDegree = false) {
-        if (vec.size() == 0) {
-            return "[]";
-        }
-
-        std::string ss = "[";
-        for (auto f = vec.begin(); f != vec.end(); ++f) {
-            if (f != vec.begin()) {
-                ss .append(", ");
-            }
-            if (radianToDegree) {
-                base::StringAppendF(&ss, "%0.2f", HeadToStagePoseRecorder::getDegreeWithRadian(*f));
-            } else {
-                base::StringAppendF(&ss, "%f", *f);
-            }
-        }
-        ss.append("]");
-        return ss;
-    };
-
     // If the Spatializer is not created, we send the status for metrics purposes.
     // OK:      Spatializer not expected to be created.
     // NO_INIT: Spatializer creation failed.
@@ -399,8 +377,13 @@
     int32_t mScreenSensor GUARDED_BY(mLock) = SpatializerPoseController::INVALID_SENSOR;
 
     /** Last display orientation received */
-    static constexpr float kDisplayOrientationInvalid = 1000;
-    float mDisplayOrientation GUARDED_BY(mLock) = kDisplayOrientationInvalid;
+    float mDisplayOrientation GUARDED_BY(mLock) = 0.f;  // aligned to natural up orientation.
+
+    /** Last folded state */
+    bool mFoldedState GUARDED_BY(mLock) = false;  // foldable: true means folded.
+
+    /** Last hinge angle */
+    float mHingeAngle GUARDED_BY(mLock) = 0.f;  // foldable: 0.f is closed, M_PI flat open.
 
     std::vector<media::SpatializationLevel> mLevels;
     std::vector<media::SpatializerHeadTrackingMode> mHeadTrackingModes;
@@ -427,92 +410,12 @@
      * @brief Calculate and record sensor data.
      * Dump to local log with max/average pose angle every mPoseRecordThreshold.
      */
-    class HeadToStagePoseRecorder {
-      public:
-        HeadToStagePoseRecorder(std::chrono::duration<double> threshold, int maxLogLine)
-            : mPoseRecordThreshold(threshold), mPoseRecordLog(maxLogLine) {
-            resetRecord();
-        }
-
-        /** Convert recorded sensor data to string with level indentation */
-        std::string toString(unsigned level) const;
-
-        /**
-         * @brief Calculate sensor data, record into local log when it is time.
-         *
-         * @param headToStage The vector from Pose3f::toVector().
-         */
-        void record(const std::vector<float>& headToStage);
-
-        static constexpr float getDegreeWithRadian(const float radian) {
-            float radianToDegreeRatio = (180 / PI);
-            return (radian * radianToDegreeRatio);
-        }
-
-      private:
-        static constexpr float PI = M_PI;
-        /**
-         * Pose recorder time threshold to record sensor data in local log.
-         * Sensor data will be recorded into log at least every mPoseRecordThreshold.
-         */
-        std::chrono::duration<double> mPoseRecordThreshold;
-        // Number of seconds pass since last record.
-        std::chrono::duration<double> mNumOfSecondsSinceLastRecord;
-        /**
-         * According to frameworks/av/media/libheadtracking/include/media/Pose.h
-         * "The vector will have exactly 6 elements, where the first three are a translation vector
-         * and the last three are a rotation vector."
-         */
-        static constexpr size_t mPoseVectorSize = 6;
-        /**
-         * Timestamp of last sensor data record in local log.
-         */
-        std::chrono::time_point<std::chrono::steady_clock> mFirstSampleTimestamp;
-        /**
-         * Number of sensor samples received since last record, sample rate is ~100Hz which produce
-         * ~6k samples/minute.
-         */
-        uint32_t mNumOfSampleSinceLastRecord = 0;
-        /* The sum of pose angle represented by radian since last dump, div
-         * mNumOfSampleSinceLastRecord to get arithmetic mean. Largest possible value: 2PI * 100Hz *
-         * mPoseRecordThreshold.
-         */
-        std::vector<double> mPoseRadianSum;
-        std::vector<float> mMaxPoseAngle;
-        std::vector<float> mMinPoseAngle;
-        // Local log for history sensor data.
-        SimpleLog mPoseRecordLog{mMaxLocalLogLine};
-
-        bool shouldRecordLog() {
-            mNumOfSecondsSinceLastRecord = std::chrono::duration_cast<std::chrono::seconds>(
-                    std::chrono::steady_clock::now() - mFirstSampleTimestamp);
-            return mNumOfSecondsSinceLastRecord >= mPoseRecordThreshold;
-        }
-
-        void resetRecord() {
-            mPoseRadianSum.assign(mPoseVectorSize, 0);
-            mMaxPoseAngle.assign(mPoseVectorSize, -PI);
-            mMinPoseAngle.assign(mPoseVectorSize, PI);
-            mNumOfSampleSinceLastRecord = 0;
-            mNumOfSecondsSinceLastRecord = std::chrono::seconds(0);
-        }
-
-        // Add each sample to sum and only calculate when record.
-        void poseSumToAverage() {
-            if (mNumOfSampleSinceLastRecord == 0) return;
-            for (auto& p : mPoseRadianSum) {
-                const float reciprocal = 1.f / mNumOfSampleSinceLastRecord;
-                p *= reciprocal;
-            }
-        }
-    };  // HeadToStagePoseRecorder
-
     // Record one log line per second (up to mMaxLocalLogLine) to capture most recent sensor data.
-    HeadToStagePoseRecorder mPoseRecorder GUARDED_BY(mLock) =
-            HeadToStagePoseRecorder(std::chrono::seconds(1), mMaxLocalLogLine);
+    media::VectorRecorder mPoseRecorder GUARDED_BY(mLock) {
+        6 /* vectorSize */, std::chrono::seconds(1), mMaxLocalLogLine, { 3 } /* delimiterIdx */};
     // Record one log line per minute (up to mMaxLocalLogLine) to capture durable sensor data.
-    HeadToStagePoseRecorder mPoseDurableRecorder GUARDED_BY(mLock) =
-            HeadToStagePoseRecorder(std::chrono::minutes(1), mMaxLocalLogLine);
+    media::VectorRecorder mPoseDurableRecorder  GUARDED_BY(mLock) {
+        6 /* vectorSize */, std::chrono::minutes(1), mMaxLocalLogLine, { 3 } /* delimiterIdx */};
 };  // Spatializer
 
 }; // namespace android
diff --git a/services/audiopolicy/service/SpatializerPoseController.cpp b/services/audiopolicy/service/SpatializerPoseController.cpp
index 72dba3d..63f53b7 100644
--- a/services/audiopolicy/service/SpatializerPoseController.cpp
+++ b/services/audiopolicy/service/SpatializerPoseController.cpp
@@ -13,6 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 #include "SpatializerPoseController.h"
 #include <android-base/stringprintf.h>
 #include <chrono>
@@ -21,8 +22,10 @@
 
 #define LOG_TAG "SpatializerPoseController"
 //#define LOG_NDEBUG 0
+#include <cutils/properties.h>
 #include <sensor/Sensor.h>
 #include <media/MediaMetricsItem.h>
+#include <media/QuaternionUtil.h>
 #include <utils/Log.h>
 #include <utils/SystemClock.h>
 
@@ -45,11 +48,17 @@
 // This is how fast, in rad/s, we allow rotation angle to shift during rate-limiting.
 constexpr float kMaxRotationalVelocity = 0.8f;
 
-// This is how far into the future we predict the head pose, using linear extrapolation based on
-// twist (velocity). It should be set to a value that matches the characteristic durations of moving
-// one's head. The higher we set this, the more latency we are able to reduce, but setting this too
-// high will result in high prediction errors whenever the head accelerates (changes velocity).
-constexpr auto kPredictionDuration = 50ms;
+// This is how far into the future we predict the head pose.
+// The prediction duration should be based on the actual latency from
+// head-tracker to audio output, though setting the prediction duration too
+// high may result in higher prediction errors when the head accelerates or
+// decelerates (changes velocity).
+//
+// The head tracking predictor will do a best effort to achieve the requested
+// prediction duration.  If the duration is too far in the future based on
+// current sensor variance, the predictor may internally restrict duration to what
+// is achievable with reasonable confidence as the "best prediction".
+constexpr auto kPredictionDuration = 120ms;
 
 // After not getting a pose sample for this long, we would treat the measurement as stale.
 // The max connection interval is 50ms, and HT sensor event interval can differ depending on the
@@ -97,7 +106,15 @@
               .maxTranslationalVelocity = kMaxTranslationalVelocity / kTicksPerSecond,
               .maxRotationalVelocity = kMaxRotationalVelocity / kTicksPerSecond,
               .freshnessTimeout = Ticks(kFreshnessTimeout).count(),
-              .predictionDuration = Ticks(kPredictionDuration).count(),
+              .predictionDuration = []() -> float {
+                  const int duration_ms =
+                          property_get_int32("audio.spatializer.prediction_duration_ms", 0);
+                  if (duration_ms > 0) {
+                      return duration_ms * 1'000'000LL;
+                  } else {
+                      return Ticks(kPredictionDuration).count();
+                  }
+              }(),
               .autoRecenterWindowDuration = Ticks(kAutoRecenterWindowDuration).count(),
               .autoRecenterTranslationalThreshold = kAutoRecenterTranslationThreshold,
               .autoRecenterRotationalThreshold = kAutoRecenterRotationThreshold,
@@ -145,7 +162,14 @@
                   mShouldCalculate = false;
               }
           }
-      }) {}
+      }) {
+          const media::PosePredictorType posePredictorType =
+                  (media::PosePredictorType)
+                  property_get_int32("audio.spatializer.pose_predictor_type", -1);
+          if (isValidPosePredictorType(posePredictorType)) {
+              mProcessor->setPosePredictorType(posePredictorType);
+          }
+      }
 
 SpatializerPoseController::~SpatializerPoseController() {
     {
@@ -282,7 +306,36 @@
 void SpatializerPoseController::onPose(int64_t timestamp, int32_t sensor, const Pose3f& pose,
                                        const std::optional<Twist3f>& twist, bool isNewReference) {
     std::lock_guard lock(mMutex);
+    constexpr float NANOS_TO_MILLIS = 1e-6;
+    constexpr float RAD_TO_DEGREE = 180.f / M_PI;
+
+    const float delayMs = (elapsedRealtimeNano() - timestamp) * NANOS_TO_MILLIS; // CLOCK_BOOTTIME
+
     if (sensor == mHeadSensor) {
+        std::vector<float> pryprydt(8);  // pitch, roll, yaw, d_pitch, d_roll, d_yaw,
+                                         // discontinuity, timestamp_delay
+        media::quaternionToAngles(pose.rotation(), &pryprydt[0], &pryprydt[1], &pryprydt[2]);
+        if (twist) {
+            const auto rotationalVelocity = twist->rotationalVelocity();
+            // The rotational velocity is an intrinsic transform (i.e. based on the head
+            // coordinate system, not the world coordinate system).  It is a 3 element vector:
+            // axis (d theta / dt).
+            //
+            // We leave rotational velocity relative to the head coordinate system,
+            // as the initial head tracking sensor's world frame is arbitrary.
+            media::quaternionToAngles(media::rotationVectorToQuaternion(rotationalVelocity),
+                    &pryprydt[3], &pryprydt[4], &pryprydt[5]);
+        }
+        pryprydt[6] = isNewReference;
+        pryprydt[7] = delayMs;
+        for (size_t i = 0; i < 6; ++i) {
+            // pitch, roll, yaw in degrees, referenced in degrees on the world frame.
+            // d_pitch, d_roll, d_yaw rotational velocity in degrees/s, based on the world frame.
+            pryprydt[i] *= RAD_TO_DEGREE;
+        }
+        mHeadSensorRecorder.record(pryprydt);
+        mHeadSensorDurableRecorder.record(pryprydt);
+
         mProcessor->setWorldToHeadPose(timestamp, pose,
                                        twist.value_or(Twist3f()) / kTicksPerSecond);
         if (isNewReference) {
@@ -290,6 +343,14 @@
         }
     }
     if (sensor == mScreenSensor) {
+        std::vector<float> pryt{ 0.f, 0.f, 0.f, delayMs}; // pitch, roll, yaw, timestamp_delay
+        media::quaternionToAngles(pose.rotation(), &pryt[0], &pryt[1], &pryt[2]);
+        for (size_t i = 0; i < 3; ++i) {
+            pryt[i] *= RAD_TO_DEGREE;
+        }
+        mScreenSensorRecorder.record(pryt);
+        mScreenSensorDurableRecorder.record(pryt);
+
         mProcessor->setWorldToScreenPose(timestamp, pose);
         if (isNewReference) {
             mProcessor->recenter(false, true);
@@ -298,8 +359,7 @@
 }
 
 std::string SpatializerPoseController::toString(unsigned level) const {
-    std::string prefixSpace;
-    prefixSpace.append(level, ' ');
+    std::string prefixSpace(level, ' ');
     std::string ss = prefixSpace + "SpatializerPoseController:\n";
     bool needUnlock = false;
 
@@ -315,14 +375,31 @@
     if (mHeadSensor == INVALID_SENSOR) {
         ss += "HeadSensor: INVALID\n";
     } else {
-        base::StringAppendF(&ss, "HeadSensor: 0x%08x\n", mHeadSensor);
+        base::StringAppendF(&ss, "HeadSensor: 0x%08x "
+            "(active world-to-head : head-relative velocity) "
+            "[ pitch, roll, yaw : d_pitch, d_roll, d_yaw : disc : delay ] "
+            "(degrees, degrees/s, bool, ms)\n", mHeadSensor);
+        ss.append(prefixSpace)
+            .append(" PerMinuteHistory:\n")
+            .append(mHeadSensorDurableRecorder.toString(level + 3))
+            .append(prefixSpace)
+            .append(" PerSecondHistory:\n")
+            .append(mHeadSensorRecorder.toString(level + 3));
     }
 
     ss += prefixSpace;
     if (mScreenSensor == INVALID_SENSOR) {
         ss += "ScreenSensor: INVALID\n";
     } else {
-        base::StringAppendF(&ss, "ScreenSensor: 0x%08x\n", mScreenSensor);
+        base::StringAppendF(&ss, "ScreenSensor: 0x%08x (active world-to-screen) "
+            "[ pitch, roll, yaw : delay ] "
+            "(degrees, ms)\n", mScreenSensor);
+        ss.append(prefixSpace)
+            .append(" PerMinuteHistory:\n")
+            .append(mScreenSensorDurableRecorder.toString(level + 3))
+            .append(prefixSpace)
+            .append(" PerSecondHistory:\n")
+            .append(mScreenSensorRecorder.toString(level + 3));
     }
 
     ss += prefixSpace;
diff --git a/services/audiopolicy/service/SpatializerPoseController.h b/services/audiopolicy/service/SpatializerPoseController.h
index 233f94c..9d78188 100644
--- a/services/audiopolicy/service/SpatializerPoseController.h
+++ b/services/audiopolicy/service/SpatializerPoseController.h
@@ -24,6 +24,7 @@
 
 #include <media/HeadTrackingProcessor.h>
 #include <media/SensorPoseProvider.h>
+#include <media/VectorRecorder.h>
 
 namespace android {
 
@@ -131,6 +132,20 @@
     bool mShouldExit = false;
     bool mCalculated = false;
 
+    media::VectorRecorder mHeadSensorRecorder{
+        8 /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */,
+        { 3, 6, 7 } /* delimiterIdx */};
+    media::VectorRecorder mHeadSensorDurableRecorder{
+        8 /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
+        { 3, 6, 7 } /* delimiterIdx */};
+
+    media::VectorRecorder mScreenSensorRecorder{
+        4 /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */,
+        { 3 } /* delimiterIdx */};
+    media::VectorRecorder mScreenSensorDurableRecorder{
+        4 /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
+        { 3 } /* delimiterIdx */};
+
     // It's important that mThread is the last variable in this class
     // since we starts mThread in initializer list
     std::thread mThread;
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 2388b79..8bfa588 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -203,6 +203,7 @@
     status_t res;
 
     std::vector<std::string> deviceIds;
+    std::unordered_map<std::string, std::set<std::string>> unavailPhysicalIds;
     {
         Mutex::Autolock l(mServiceLock);
 
@@ -233,7 +234,7 @@
             ALOGE("Failed to enumerate flash units: %s (%d)", strerror(-res), res);
         }
 
-        deviceIds = mCameraProviderManager->getCameraDeviceIds();
+        deviceIds = mCameraProviderManager->getCameraDeviceIds(&unavailPhysicalIds);
     }
 
 
@@ -242,6 +243,12 @@
         if (getCameraState(id8) == nullptr) {
             onDeviceStatusChanged(id8, CameraDeviceStatus::PRESENT);
         }
+        if (unavailPhysicalIds.count(cameraId) > 0) {
+            for (const auto& physicalId : unavailPhysicalIds[cameraId]) {
+                String8 physicalId8 = String8(physicalId.c_str());
+                onDeviceStatusChanged(id8, physicalId8, CameraDeviceStatus::NOT_PRESENT);
+            }
+        }
     }
 
     // Derive primary rear/front cameras, and filter their charactierstics.
@@ -335,7 +342,9 @@
         int facing = -1;
         int orientation = 0;
         String8 cameraId8(cameraId.c_str());
-        getDeviceVersion(cameraId8, /*out*/&facing, /*out*/&orientation);
+        int portraitRotation;
+        getDeviceVersion(cameraId8, /*overrideToPortrait*/false, /*out*/&portraitRotation,
+                /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\" facing", __FUNCTION__, cameraId.c_str());
             return;
@@ -495,7 +504,7 @@
 
     if (state == nullptr) {
         ALOGE("%s: Physical camera id %s status change on a non-present ID %s",
-                __FUNCTION__, id.string(), physicalId.string());
+                __FUNCTION__, physicalId.string(), id.string());
         return;
     }
 
@@ -675,7 +684,7 @@
     return Status::ok();
 }
 
-Status CameraService::getCameraInfo(int cameraId,
+Status CameraService::getCameraInfo(int cameraId, bool overrideToPortrait,
         CameraInfo* cameraInfo) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
@@ -703,8 +712,9 @@
     }
 
     Status ret = Status::ok();
+    int portraitRotation;
     status_t err = mCameraProviderManager->getCameraInfo(
-            cameraIdStr.c_str(), cameraInfo);
+            cameraIdStr.c_str(), overrideToPortrait, &portraitRotation, cameraInfo);
     if (err != OK) {
         ret = STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
                 "Error retrieving camera info from device %d: %s (%d)", cameraId,
@@ -739,7 +749,7 @@
 }
 
 Status CameraService::getCameraCharacteristics(const String16& cameraId,
-        int targetSdkVersion, CameraMetadata* cameraInfo) {
+        int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) {
     ATRACE_CALL();
     if (!cameraInfo) {
         ALOGE("%s: cameraInfo is NULL", __FUNCTION__);
@@ -766,7 +776,7 @@
             SessionConfigurationUtils::targetPerfClassPrimaryCamera(mPerfClassPrimaryCameraIds,
                     cameraIdStr, targetSdkVersion);
     status_t res = mCameraProviderManager->getCameraCharacteristics(
-            cameraIdStr, overrideForPerfClass, cameraInfo);
+            cameraIdStr, overrideForPerfClass, cameraInfo, overrideToPortrait);
     if (res != OK) {
         if (res == NAME_NOT_FOUND) {
             return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to retrieve camera "
@@ -887,8 +897,8 @@
     BasicClient::BasicClient::sCameraService = nullptr;
 }
 
-std::pair<int, IPCTransport> CameraService::getDeviceVersion(const String8& cameraId, int* facing,
-        int* orientation) {
+std::pair<int, IPCTransport> CameraService::getDeviceVersion(const String8& cameraId,
+        bool overrideToPortrait, int* portraitRotation, int* facing, int* orientation) {
     ATRACE_CALL();
 
     int deviceVersion = 0;
@@ -907,7 +917,8 @@
 
     hardware::CameraInfo info;
     if (facing) {
-        res = mCameraProviderManager->getCameraInfo(cameraId.string(), &info);
+        res = mCameraProviderManager->getCameraInfo(cameraId.string(), overrideToPortrait,
+                portraitRotation, &info);
         if (res != OK) {
             return std::make_pair(-1, IPCTransport::INVALID);
         }
@@ -942,7 +953,8 @@
         const std::optional<String16>& featureId,  const String8& cameraId,
         int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
         int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
-        apiLevel effectiveApiLevel, bool overrideForPerfClass, /*out*/sp<BasicClient>* client) {
+        apiLevel effectiveApiLevel, bool overrideForPerfClass, bool overrideToPortrait,
+        /*out*/sp<BasicClient>* client) {
     // For HIDL devices
     if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
         // Create CameraClient based on device version reported by the HAL.
@@ -975,13 +987,16 @@
         sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
         *client = new Camera2Client(cameraService, tmp, packageName, featureId,
                 cameraId, api1CameraId, facing, sensorOrientation, clientPid, clientUid,
-                servicePid, overrideForPerfClass);
+                servicePid, overrideForPerfClass, overrideToPortrait);
+        ALOGI("%s: Camera1 API (legacy), override to portrait %d", __FUNCTION__,
+                overrideToPortrait);
     } else { // Camera2 API route
         sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
                 static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
         *client = new CameraDeviceClient(cameraService, tmp, packageName,
                 systemNativeClient, featureId, cameraId, facing, sensorOrientation,
-                clientPid, clientUid, servicePid, overrideForPerfClass);
+                clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait);
+        ALOGI("%s: Camera2 API, override to portrait %d", __FUNCTION__, overrideToPortrait);
     }
     return Status::ok();
 }
@@ -1071,7 +1086,7 @@
             sp<ICameraClient>{nullptr}, id, cameraId,
             internalPackageName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
             API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
-            /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*out*/ tmp)
+            /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true, /*out*/ tmp)
             ).isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().string());
     }
@@ -1587,6 +1602,7 @@
         int clientUid,
         int clientPid,
         int targetSdkVersion,
+        bool overrideToPortrait,
         /*out*/
         sp<ICamera>* device) {
 
@@ -1597,7 +1613,8 @@
     sp<Client> client = nullptr;
     ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId,
             clientPackageName,/*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
-            /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion, /*out*/client);
+            /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
+            overrideToPortrait, /*out*/client);
 
     if(!ret.isOk()) {
         logRejected(id, CameraThreadState::getCallingPid(), String8(clientPackageName),
@@ -1672,6 +1689,7 @@
         const String16& clientPackageName,
         const std::optional<String16>& clientFeatureId,
         int clientUid, int oomScoreOffset, int targetSdkVersion,
+        bool overrideToPortrait,
         /*out*/
         sp<hardware::camera2::ICameraDeviceUser>* device) {
 
@@ -1725,7 +1743,7 @@
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
             /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient,clientFeatureId,
             clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
-            targetSdkVersion, /*out*/client);
+            targetSdkVersion, overrideToPortrait, /*out*/client);
 
     if(!ret.isOk()) {
         logRejected(id, callingPid, String8(clientPackageNameAdj), ret.toString8());
@@ -1787,7 +1805,7 @@
         int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
         const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
         apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
-        /*out*/sp<CLIENT>& device) {
+        bool overrideToPortrait, /*out*/sp<CLIENT>& device) {
     binder::Status ret = binder::Status::ok();
 
     bool isNonSystemNdk = false;
@@ -1886,8 +1904,10 @@
         // give flashlight a chance to close devices if necessary.
         mFlashlight->prepareDeviceOpen(cameraId);
 
+        int portraitRotation;
         auto deviceVersionAndTransport =
-                getDeviceVersion(cameraId, /*out*/&facing, /*out*/&orientation);
+                getDeviceVersion(cameraId, overrideToPortrait, /*out*/&portraitRotation,
+                        /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\"  facing", __FUNCTION__, cameraId.string());
             return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
@@ -1901,7 +1921,7 @@
                 clientFeatureId, cameraId, api1CameraId, facing, orientation,
                 clientPid, clientUid, getpid(),
                 deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
-                /*out*/&tmp)).isOk()) {
+                overrideToPortrait, /*out*/&tmp)).isOk()) {
             return ret;
         }
         client = static_cast<CLIENT*>(tmp.get());
@@ -1961,8 +1981,25 @@
         // Set rotate-and-crop override behavior
         if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
             client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
+        } else if (overrideToPortrait && portraitRotation != 0) {
+            uint8_t rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
+            switch (portraitRotation) {
+                case 90:
+                    rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
+                    break;
+                case 180:
+                    rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_180;
+                    break;
+                case 270:
+                    rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_270;
+                    break;
+                default:
+                    ALOGE("Unexpected portrait rotation: %d", portraitRotation);
+                    break;
+            }
+            client->setRotateAndCropOverride(rotateAndCropMode);
         } else {
-          client->setRotateAndCropOverride(
+            client->setRotateAndCropOverride(
               CameraServiceProxyWrapper::getRotateAndCropOverride(
                   clientPackageName, facing, multiuser_get_user_id(clientUid)));
         }
@@ -2466,6 +2503,11 @@
 
     ATRACE_CALL();
 
+    {
+        Mutex::Autolock lock(mServiceLock);
+        mDeviceState = newState;
+    }
+
     mCameraProviderManager->notifyDeviceStateChange(newState);
 
     return Status::ok();
@@ -2499,12 +2541,12 @@
     for (auto& current : clients) {
         if (current != nullptr) {
             const auto basicClient = current->getValue();
-            if (basicClient.get() != nullptr) {
-              basicClient->setRotateAndCropOverride(
-                  CameraServiceProxyWrapper::getRotateAndCropOverride(
-                      basicClient->getPackageName(),
-                      basicClient->getCameraFacing(),
-                      multiuser_get_user_id(basicClient->getClientUid())));
+            if (basicClient.get() != nullptr && !basicClient->getOverrideToPortrait()) {
+                basicClient->setRotateAndCropOverride(
+                        CameraServiceProxyWrapper::getRotateAndCropOverride(
+                                basicClient->getPackageName(),
+                                basicClient->getCameraFacing(),
+                                multiuser_get_user_id(basicClient->getClientUid())));
             }
         }
     }
@@ -2776,7 +2818,8 @@
             return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
 
-    auto deviceVersionAndTransport = getDeviceVersion(id);
+    int portraitRotation;
+    auto deviceVersionAndTransport = getDeviceVersion(id, false, &portraitRotation);
     if (deviceVersionAndTransport.first == -1) {
         String8 msg = String8::format("Unknown camera ID %s", id.string());
         ALOGE("%s: %s", __FUNCTION__, msg.string());
@@ -3261,13 +3304,13 @@
         const String8& cameraIdStr,
         int api1CameraId, int cameraFacing, int sensorOrientation,
         int clientPid, uid_t clientUid,
-        int servicePid) :
+        int servicePid, bool overrideToPortrait) :
         CameraService::BasicClient(cameraService,
                 IInterface::asBinder(cameraClient),
                 clientPackageName, systemNativeClient, clientFeatureId,
                 cameraIdStr, cameraFacing, sensorOrientation,
                 clientPid, clientUid,
-                servicePid),
+                servicePid, overrideToPortrait),
         mCameraId(api1CameraId)
 {
     int callingPid = CameraThreadState::getCallingPid();
@@ -3297,7 +3340,7 @@
         const String16& clientPackageName, bool nativeClient,
         const std::optional<String16>& clientFeatureId, const String8& cameraIdStr,
         int cameraFacing, int sensorOrientation, int clientPid, uid_t clientUid,
-        int servicePid):
+        int servicePid, bool overrideToPortrait):
         mDestructionStarted(false),
         mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
         mClientPackageName(clientPackageName), mSystemNativeClient(nativeClient),
@@ -3305,6 +3348,7 @@
         mClientPid(clientPid), mClientUid(clientUid),
         mServicePid(servicePid),
         mDisconnected(false), mUidIsTrusted(false),
+        mOverrideToPortrait(overrideToPortrait),
         mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE),
         mRemoteBinder(remoteCallback),
         mOpsActive(false),
@@ -3670,8 +3714,7 @@
 // ----------------------------------------------------------------------------
 
 void CameraService::Client::notifyError(int32_t errorCode,
-        const CaptureResultExtras& resultExtras) {
-    (void) resultExtras;
+        [[maybe_unused]] const CaptureResultExtras& resultExtras) {
     if (mRemoteCallback != NULL) {
         int32_t api1ErrorCode = CAMERA_ERROR_RELEASED;
         if (errorCode == hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISABLED) {
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index f2d15ef..840e9b6 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -127,10 +127,10 @@
     // ICameraService
     virtual binder::Status     getNumberOfCameras(int32_t type, int32_t* numCameras);
 
-    virtual binder::Status     getCameraInfo(int cameraId,
-            hardware::CameraInfo* cameraInfo);
+    virtual binder::Status     getCameraInfo(int cameraId, bool overrideToPortrait,
+            hardware::CameraInfo* cameraInfo) override;
     virtual binder::Status     getCameraCharacteristics(const String16& cameraId,
-            int targetSdkVersion, CameraMetadata* cameraInfo);
+            int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) override;
     virtual binder::Status     getCameraVendorTagDescriptor(
             /*out*/
             hardware::camera2::params::VendorTagDescriptor* desc);
@@ -141,13 +141,14 @@
     virtual binder::Status     connect(const sp<hardware::ICameraClient>& cameraClient,
             int32_t cameraId, const String16& clientPackageName,
             int32_t clientUid, int clientPid, int targetSdkVersion,
+            bool overrideToPortrait,
             /*out*/
-            sp<hardware::ICamera>* device);
+            sp<hardware::ICamera>* device) override;
 
     virtual binder::Status     connectDevice(
             const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb, const String16& cameraId,
             const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
-            int32_t clientUid, int scoreOffset, int targetSdkVersion,
+            int32_t clientUid, int scoreOffset, int targetSdkVersion, bool overrideToPortrait,
             /*out*/
             sp<hardware::camera2::ICameraDeviceUser>* device);
 
@@ -243,8 +244,9 @@
 
     /////////////////////////////////////////////////////////////////////
     // CameraDeviceFactory functionality
-    std::pair<int, IPCTransport>    getDeviceVersion(const String8& cameraId, int* facing = nullptr,
-            int* orientation = nullptr);
+    std::pair<int, IPCTransport>    getDeviceVersion(const String8& cameraId,
+            bool overrideToPortrait, int* portraitRotation,
+            int* facing = nullptr, int* orientation = nullptr);
 
     /////////////////////////////////////////////////////////////////////
     // Methods to be used in CameraService class tests only
@@ -282,6 +284,10 @@
             return mRemoteBinder;
         }
 
+        bool getOverrideToPortrait() const {
+            return mOverrideToPortrait;
+        }
+
         // Disallows dumping over binder interface
         virtual status_t dump(int fd, const Vector<String16>& args);
         // Internal dump method to be called by CameraService
@@ -361,7 +367,8 @@
                 int sensorOrientation,
                 int clientPid,
                 uid_t clientUid,
-                int servicePid);
+                int servicePid,
+                bool overrideToPortrait);
 
         virtual ~BasicClient();
 
@@ -384,6 +391,7 @@
         const pid_t                     mServicePid;
         bool                            mDisconnected;
         bool                            mUidIsTrusted;
+        bool                            mOverrideToPortrait;
 
         mutable Mutex                   mAudioRestrictionLock;
         int32_t                         mAudioRestriction;
@@ -473,7 +481,8 @@
                 int sensorOrientation,
                 int clientPid,
                 uid_t clientUid,
-                int servicePid);
+                int servicePid,
+                bool overrideToPortrait);
         ~Client();
 
         // return our camera client
@@ -843,7 +852,7 @@
             int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
             const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
             apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
-            /*out*/sp<CLIENT>& device);
+            bool overrideToPortrait, /*out*/sp<CLIENT>& device);
 
     // Lock guarding camera service state
     Mutex               mServiceLock;
@@ -1258,7 +1267,7 @@
             const String8& cameraId, int api1CameraId, int facing, int sensorOrientation,
             int clientPid, uid_t clientUid, int servicePid,
             std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
-            bool overrideForPerfClass, /*out*/sp<BasicClient>* client);
+            bool overrideForPerfClass, bool overrideToPortrait, /*out*/sp<BasicClient>* client);
 
     status_t checkCameraAccess(const String16& opPackageName);
 
@@ -1355,6 +1364,9 @@
     // Guard mInjectionInternalCamId and mInjectionInitPending.
     Mutex mInjectionParametersLock;
 
+    // Track the folded/unfoled device state. 0 == UNFOLDED, 4 == FOLDED
+    int64_t mDeviceState;
+
     void updateTorchUidMapLocked(const String16& cameraId, int uid);
 };
 
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 20bf73d..8e3f609 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -61,11 +61,13 @@
         int clientPid,
         uid_t clientUid,
         int servicePid,
-        bool overrideForPerfClass):
+        bool overrideForPerfClass,
+        bool overrideToPortrait):
         Camera2ClientBase(cameraService, cameraClient, clientPackageName,
                 false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
                 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
-                clientUid, servicePid, overrideForPerfClass, /*legacyClient*/ true),
+                clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+                /*legacyClient*/ true),
         mParameters(api1CameraId, cameraFacing)
 {
     ATRACE_CALL();
@@ -1330,21 +1332,18 @@
             || l.mParameters.state == Parameters::VIDEO_SNAPSHOT);
 }
 
-void Camera2Client::releaseRecordingFrame(const sp<IMemory>& mem) {
-    (void)mem;
+void Camera2Client::releaseRecordingFrame([[maybe_unused]] const sp<IMemory>& mem) {
     ATRACE_CALL();
     ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
 }
 
-void Camera2Client::releaseRecordingFrameHandle(native_handle_t *handle) {
-    (void)handle;
+void Camera2Client::releaseRecordingFrameHandle([[maybe_unused]] native_handle_t *handle) {
     ATRACE_CALL();
     ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
 }
 
 void Camera2Client::releaseRecordingFrameHandleBatch(
-        const std::vector<native_handle_t*>& handles) {
-    (void)handles;
+        [[maybe_unused]] const std::vector<native_handle_t*>& handles) {
     ATRACE_CALL();
     ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
 }
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 8081efa..9c540a4 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -107,7 +107,8 @@
             int clientPid,
             uid_t clientUid,
             int servicePid,
-            bool overrideForPerfClass);
+            bool overrideForPerfClass,
+            bool overrideToPortrait);
 
     virtual ~Camera2Client();
 
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index 2daacd1..74423e5 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -59,6 +59,8 @@
         m3aState.aeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
         m3aState.afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
         m3aState.awbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
+
+        mLastFocalLength = l.mParameters.params.getFloat(CameraParameters::KEY_FOCAL_LENGTH);
     }
 }
 
@@ -92,9 +94,32 @@
         client->notifyRequestId(mCurrentRequestId);
     }
 
+    processLensState(frame.mMetadata, client);
+
     return FrameProcessorBase::processSingleFrame(frame, device);
 }
 
+void FrameProcessor::processLensState(const CameraMetadata &frame,
+        const sp<Camera2Client> &client) {
+    ATRACE_CALL();
+    camera_metadata_ro_entry_t entry;
+
+    entry = frame.find(ANDROID_LENS_FOCAL_LENGTH);
+    if (entry.count == 0) {
+        return;
+    }
+
+    if (fabs(entry.data.f[0] - mLastFocalLength) > 0.001f) {
+        SharedParameters::Lock l(client->getParameters());
+        l.mParameters.params.setFloat(
+                CameraParameters::KEY_FOCAL_LENGTH,
+                entry.data.f[0]);
+        l.mParameters.paramsFlattened = l.mParameters.params.flatten();
+
+        mLastFocalLength = entry.data.f[0];
+    }
+}
+
 status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
         const sp<Camera2Client> &client) {
     status_t res = BAD_VALUE;
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.h b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
index bb985f6..6c8d221 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
@@ -57,6 +57,9 @@
     virtual bool processSingleFrame(CaptureResult &frame,
                                     const sp<FrameProducer> &device);
 
+    void processLensState(const CameraMetadata &frame,
+            const sp<Camera2Client> &client);
+
     status_t processFaceDetect(const CameraMetadata &frame,
             const sp<Camera2Client> &client);
 
@@ -110,6 +113,9 @@
     // Emit FaceDetection event to java if faces changed
     void callbackFaceDetection(const sp<Camera2Client>& client,
                                const camera_frame_metadata &metadata);
+
+    // Track most recent focal length sent by the camera device
+    float mLastFocalLength;
 };
 
 
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index b219cc8..202599b 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -61,12 +61,13 @@
         bool systemNativeClient,
         const std::optional<String16>& clientFeatureId,
         const String8& cameraId,
-        int api1CameraId,
+        [[maybe_unused]] int api1CameraId,
         int cameraFacing,
         int sensorOrientation,
         int clientPid,
         uid_t clientUid,
-        int servicePid) :
+        int servicePid,
+        bool overrideToPortrait) :
     BasicClient(cameraService,
             IInterface::asBinder(remoteCallback),
             clientPackageName,
@@ -77,10 +78,9 @@
             sensorOrientation,
             clientPid,
             clientUid,
-            servicePid),
+            servicePid,
+            overrideToPortrait),
     mRemoteCallback(remoteCallback) {
-    // We don't need it for API2 clients, but Camera2ClientBase requires it.
-    (void) api1CameraId;
 }
 
 // Interface used by CameraService
@@ -96,10 +96,11 @@
         int clientPid,
         uid_t clientUid,
         int servicePid,
-        bool overrideForPerfClass) :
+        bool overrideForPerfClass,
+        bool overrideToPortrait) :
     Camera2ClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
                 clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing, sensorOrientation,
-                clientPid, clientUid, servicePid, overrideForPerfClass),
+                clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait),
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
     mRequestIdCounter(0),
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 45915ba..6bb64d6 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -58,7 +58,8 @@
             int sensorOrientation,
             int clientPid,
             uid_t clientUid,
-            int servicePid);
+            int servicePid,
+            bool overrideToPortrait);
 
     sp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
 };
@@ -187,7 +188,8 @@
             int clientPid,
             uid_t clientUid,
             int servicePid,
-            bool overrideForPerfClass);
+            bool overrideForPerfClass,
+            bool overrideToPortrait);
     virtual ~CameraDeviceClient();
 
     virtual status_t      initialize(sp<CameraProviderManager> manager,
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 2cb3397..acc805a 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -309,26 +309,20 @@
     finishCameraStreamingOps();
 }
 
-void CameraOfflineSessionClient::notifyAutoFocus(uint8_t newState, int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void CameraOfflineSessionClient::notifyAutoFocus([[maybe_unused]] uint8_t newState,
+                [[maybe_unused]] int triggerId) {
     ALOGV("%s: Autofocus state now %d, last trigger %d",
           __FUNCTION__, newState, triggerId);
 }
 
-void CameraOfflineSessionClient::notifyAutoExposure(uint8_t newState, int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void CameraOfflineSessionClient::notifyAutoExposure([[maybe_unused]] uint8_t newState,
+                [[maybe_unused]] int triggerId) {
     ALOGV("%s: Autoexposure state now %d, last trigger %d",
             __FUNCTION__, newState, triggerId);
 }
 
-void CameraOfflineSessionClient::notifyAutoWhitebalance(uint8_t newState, int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void CameraOfflineSessionClient::notifyAutoWhitebalance([[maybe_unused]] uint8_t newState,
+                [[maybe_unused]] int triggerId) {
     ALOGV("%s: Auto-whitebalance state now %d, last trigger %d", __FUNCTION__, newState,
             triggerId);
 }
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 9ea1093..8edb64a 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -56,7 +56,8 @@
                     IInterface::asBinder(remoteCallback),
                     // (v)ndk doesn't have offline session support
                     clientPackageName, /*overridePackageName*/false, clientFeatureId,
-                    cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid),
+                    cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid,
+                    /*overrideToPortrait*/false),
             mRemoteCallback(remoteCallback), mOfflineSession(session),
             mCompositeStreamMap(offlineCompositeStreamMap) {}
 
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 7a93cc7..2cc8e33 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -1161,11 +1161,13 @@
     inputFrame.fileFd = -1;
 
     // Fill in HEIC header
-    uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
-    CameraBlob *blobHeader = (CameraBlob *)header;
     // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
-    blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
-    blobHeader->blobSizeBytes = fSize;
+    uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
+    CameraBlob blobHeader = {
+        .blobId = static_cast<CameraBlobId>(0x00FE),
+        .blobSizeBytes = static_cast<int32_t>(fSize)
+    };
+    memcpy(header, &blobHeader, sizeof(CameraBlob));
 
     res = native_window_set_buffers_timestamp(mOutputSurface.get(), inputFrame.timestamp);
     if (res != OK) {
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 7d98a0b..71d0f9e 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -60,10 +60,11 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass,
+        bool overrideToPortrait,
         bool legacyClient):
         TClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
                 clientFeatureId, cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
-                clientUid, servicePid),
+                clientUid, servicePid, overrideToPortrait),
         mSharedCameraCallbacks(remoteCallback),
         mDeviceActive(false), mApi1CameraId(api1CameraId)
 {
@@ -117,12 +118,12 @@
         case IPCTransport::HIDL:
             mDevice =
                     new HidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            mLegacyClient);
+                            TClientBase::mOverrideToPortrait, mLegacyClient);
             break;
         case IPCTransport::AIDL:
             mDevice =
                     new AidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            mLegacyClient);
+                            TClientBase::mOverrideToPortrait, mLegacyClient);
              break;
         default:
             ALOGE("%s Invalid transport for camera id %s", __FUNCTION__,
@@ -379,50 +380,38 @@
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyShutter(const CaptureResultExtras& resultExtras,
-                                                   nsecs_t timestamp) {
-    (void)resultExtras;
-    (void)timestamp;
-
+void Camera2ClientBase<TClientBase>::notifyShutter(
+                [[maybe_unused]] const CaptureResultExtras& resultExtras,
+                [[maybe_unused]] nsecs_t timestamp) {
     ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
             __FUNCTION__, resultExtras.requestId, timestamp);
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyAutoFocus(uint8_t newState,
-                                                     int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void Camera2ClientBase<TClientBase>::notifyAutoFocus([[maybe_unused]] uint8_t newState,
+                                                     [[maybe_unused]] int triggerId) {
     ALOGV("%s: Autofocus state now %d, last trigger %d",
           __FUNCTION__, newState, triggerId);
 
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyAutoExposure(uint8_t newState,
-                                                        int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void Camera2ClientBase<TClientBase>::notifyAutoExposure([[maybe_unused]] uint8_t newState,
+                                                        [[maybe_unused]] int triggerId) {
     ALOGV("%s: Autoexposure state now %d, last trigger %d",
             __FUNCTION__, newState, triggerId);
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyAutoWhitebalance(uint8_t newState,
-                                                            int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void Camera2ClientBase<TClientBase>::notifyAutoWhitebalance(
+                [[maybe_unused]] uint8_t newState,
+                [[maybe_unused]] int triggerId) {
     ALOGV("%s: Auto-whitebalance state now %d, last trigger %d",
             __FUNCTION__, newState, triggerId);
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyPrepared(int streamId) {
-    (void)streamId;
-
+void Camera2ClientBase<TClientBase>::notifyPrepared([[maybe_unused]] int streamId) {
     ALOGV("%s: Stream %d now prepared",
             __FUNCTION__, streamId);
 }
@@ -434,9 +423,8 @@
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyRepeatingRequestError(long lastFrameNumber) {
-    (void)lastFrameNumber;
-
+void Camera2ClientBase<TClientBase>::notifyRepeatingRequestError(
+            [[maybe_unused]] long lastFrameNumber) {
     ALOGV("%s: Repeating request was stopped. Last frame number is %ld",
             __FUNCTION__, lastFrameNumber);
 }
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index e51d25d..d2dcdb1 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -59,6 +59,7 @@
                       uid_t clientUid,
                       int servicePid,
                       bool overrideForPerfClass,
+                      bool overrideToPortrait,
                       bool legacyClient = false);
     virtual ~Camera2ClientBase();
 
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index cd23250..c72986d 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -197,12 +197,17 @@
     return std::make_pair(systemCameraCount, publicCameraCount);
 }
 
-std::vector<std::string> CameraProviderManager::getCameraDeviceIds() const {
+std::vector<std::string> CameraProviderManager::getCameraDeviceIds(std::unordered_map<
+            std::string, std::set<std::string>>* unavailablePhysicalIds) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     std::vector<std::string> deviceIds;
     for (auto& provider : mProviders) {
         for (auto& id : provider->mUniqueCameraIds) {
             deviceIds.push_back(id);
+            if (unavailablePhysicalIds != nullptr &&
+                    provider->mUnavailablePhysicalCameras.count(id) > 0) {
+                (*unavailablePhysicalIds)[id] = provider->mUnavailablePhysicalCameras.at(id);
+            }
         }
     }
     return deviceIds;
@@ -318,13 +323,13 @@
 }
 
 status_t CameraProviderManager::getCameraInfo(const std::string &id,
-        hardware::CameraInfo* info) const {
+        bool overrideToPortrait, int *portraitRotation, hardware::CameraInfo* info) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo == nullptr) return NAME_NOT_FOUND;
 
-    return deviceInfo->getCameraInfo(info);
+    return deviceInfo->getCameraInfo(overrideToPortrait, portraitRotation, info);
 }
 
 status_t CameraProviderManager::isSessionConfigurationSupported(const std::string& id,
@@ -356,9 +361,11 @@
 }
 
 status_t CameraProviderManager::getCameraCharacteristics(const std::string &id,
-        bool overrideForPerfClass, CameraMetadata* characteristics) const {
+        bool overrideForPerfClass, CameraMetadata* characteristics,
+        bool overrideToPortrait) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
-    return getCameraCharacteristicsLocked(id, overrideForPerfClass, characteristics);
+    return getCameraCharacteristicsLocked(id, overrideForPerfClass, characteristics,
+            overrideToPortrait);
 }
 
 status_t CameraProviderManager::getHighestSupportedVersion(const std::string &id,
@@ -843,9 +850,6 @@
 
 void CameraProviderManager::ProviderInfo::initializeProviderInfoCommon(
         const std::vector<std::string> &devices) {
-
-    sp<StatusListener> listener = mManager->getStatusListener();
-
     for (auto& device : devices) {
         std::string id;
         status_t res = addDevice(device, CameraDeviceStatus::PRESENT, &id);
@@ -860,38 +864,22 @@
             mProviderName.c_str(), mDevices.size());
 
     // Process cached status callbacks
-    std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus =
-            std::make_unique<std::vector<CameraStatusInfoT>>();
     {
         std::lock_guard<std::mutex> lock(mInitLock);
 
         for (auto& statusInfo : mCachedStatus) {
             std::string id, physicalId;
-            status_t res = OK;
             if (statusInfo.isPhysicalCameraStatus) {
-                res = physicalCameraDeviceStatusChangeLocked(&id, &physicalId,
+                physicalCameraDeviceStatusChangeLocked(&id, &physicalId,
                     statusInfo.cameraId, statusInfo.physicalCameraId, statusInfo.status);
             } else {
-                res = cameraDeviceStatusChangeLocked(&id, statusInfo.cameraId, statusInfo.status);
-            }
-            if (res == OK) {
-                cachedStatus->emplace_back(statusInfo.isPhysicalCameraStatus,
-                        id.c_str(), physicalId.c_str(), statusInfo.status);
+                cameraDeviceStatusChangeLocked(&id, statusInfo.cameraId, statusInfo.status);
             }
         }
         mCachedStatus.clear();
 
         mInitialized = true;
     }
-
-    // The cached status change callbacks cannot be fired directly from this
-    // function, due to same-thread deadlock trying to acquire mInterfaceMutex
-    // twice.
-    if (listener != nullptr) {
-        mInitialStatusCallbackFuture = std::async(std::launch::async,
-                &CameraProviderManager::ProviderInfo::notifyInitialStatusChange, this,
-                listener, std::move(cachedStatus));
-    }
 }
 
 CameraProviderManager::ProviderInfo::DeviceInfo* CameraProviderManager::findDeviceInfoLocked(
@@ -1870,13 +1858,12 @@
 CameraProviderManager::ProviderInfo::ProviderInfo(
         const std::string &providerName,
         const std::string &providerInstance,
-        CameraProviderManager *manager) :
+        [[maybe_unused]] CameraProviderManager *manager) :
         mProviderName(providerName),
         mProviderInstance(providerInstance),
         mProviderTagid(generateVendorTagId(providerName)),
         mUniqueDeviceCount(0),
         mManager(manager) {
-    (void) mManager;
 }
 
 const std::string& CameraProviderManager::ProviderInfo::getType() const {
@@ -1961,6 +1948,7 @@
     for (auto it = mDevices.begin(); it != mDevices.end(); it++) {
         if ((*it)->mId == id) {
             mUniqueCameraIds.erase(id);
+            mUnavailablePhysicalCameras.erase(id);
             if ((*it)->isAPI1Compatible()) {
                 mUniqueAPI1CompatibleCameraIds.erase(std::remove(
                     mUniqueAPI1CompatibleCameraIds.begin(),
@@ -2031,7 +2019,9 @@
         dprintf(fd, "    Has a flash unit: %s\n",
                 device->hasFlashUnit() ? "true" : "false");
         hardware::CameraInfo info;
-        status_t res = device->getCameraInfo(&info);
+        int portraitRotation;
+        status_t res = device->getCameraInfo(/*overrideToPortrait*/false, &portraitRotation,
+                &info);
         if (res != OK) {
             dprintf(fd, "   <Error reading camera info: %s (%d)>\n",
                     strerror(-res), res);
@@ -2041,7 +2031,8 @@
             dprintf(fd, "    Orientation: %d\n", info.orientation);
         }
         CameraMetadata info2;
-        res = device->getCameraCharacteristics(true /*overrideForPerfClass*/, &info2);
+        res = device->getCameraCharacteristics(true /*overrideForPerfClass*/, &info2,
+                /*overrideToPortrait*/true);
         if (res == INVALID_OPERATION) {
             dprintf(fd, "  API2 not directly supported\n");
         } else if (res != OK) {
@@ -2228,6 +2219,15 @@
         return BAD_VALUE;
     }
 
+    if (mUnavailablePhysicalCameras.count(cameraId) == 0) {
+        mUnavailablePhysicalCameras.emplace(cameraId, std::set<std::string>{});
+    }
+    if (newStatus != CameraDeviceStatus::PRESENT) {
+        mUnavailablePhysicalCameras[cameraId].insert(physicalCameraDeviceName);
+    } else {
+        mUnavailablePhysicalCameras[cameraId].erase(physicalCameraDeviceName);
+    }
+
     *id = cameraId;
     *physicalId = physicalCameraDeviceName.c_str();
     return OK;
@@ -2286,20 +2286,6 @@
     }
 }
 
-void CameraProviderManager::ProviderInfo::notifyInitialStatusChange(
-        sp<StatusListener> listener,
-        std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus) {
-    for (auto& statusInfo : *cachedStatus) {
-        if (statusInfo.isPhysicalCameraStatus) {
-            listener->onDeviceStatusChanged(String8(statusInfo.cameraId.c_str()),
-                    String8(statusInfo.physicalCameraId.c_str()), statusInfo.status);
-        } else {
-            listener->onDeviceStatusChanged(
-                    String8(statusInfo.cameraId.c_str()), statusInfo.status);
-        }
-    }
-}
-
 CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string& name,
         const metadata_vendor_id_t tagId, const std::string &id,
         uint16_t minorVersion,
@@ -2318,6 +2304,7 @@
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraInfo(
+        bool overrideToPortrait, int *portraitRotation,
         hardware::CameraInfo *info) const {
     if (info == nullptr) return BAD_VALUE;
 
@@ -2348,6 +2335,17 @@
         return NAME_NOT_FOUND;
     }
 
+    if (overrideToPortrait && (info->orientation == 0 || info->orientation == 180)) {
+        *portraitRotation = 90;
+        if (info->facing == hardware::CAMERA_FACING_FRONT) {
+            info->orientation = (360 + info->orientation - 90) % 360;
+        } else {
+            info->orientation = (360 + info->orientation + 90) % 360;
+        }
+    } else {
+        *portraitRotation = 0;
+    }
+
     return OK;
 }
 bool CameraProviderManager::ProviderInfo::DeviceInfo3::isAPI1Compatible() const {
@@ -2373,7 +2371,7 @@
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraCharacteristics(
-        bool overrideForPerfClass, CameraMetadata *characteristics) const {
+        bool overrideForPerfClass, CameraMetadata *characteristics, bool overrideToPortrait) {
     if (characteristics == nullptr) return BAD_VALUE;
 
     if (!overrideForPerfClass && mCameraCharNoPCOverride != nullptr) {
@@ -2382,6 +2380,35 @@
         *characteristics = mCameraCharacteristics;
     }
 
+    if (overrideToPortrait) {
+        const auto &lensFacingEntry = characteristics->find(ANDROID_LENS_FACING);
+        const auto &sensorOrientationEntry = characteristics->find(ANDROID_SENSOR_ORIENTATION);
+        if (lensFacingEntry.count > 0 && sensorOrientationEntry.count > 0) {
+            uint8_t lensFacing = lensFacingEntry.data.u8[0];
+            int32_t sensorOrientation = sensorOrientationEntry.data.i32[0];
+            int32_t newSensorOrientation = sensorOrientation;
+
+            if (sensorOrientation == 0 || sensorOrientation == 180) {
+                if (lensFacing == ANDROID_LENS_FACING_FRONT) {
+                    newSensorOrientation = (360 + sensorOrientation - 90) % 360;
+                } else if (lensFacing == ANDROID_LENS_FACING_BACK) {
+                    newSensorOrientation = (360 + sensorOrientation + 90) % 360;
+                }
+            }
+
+            if (newSensorOrientation != sensorOrientation) {
+                ALOGV("%s: Update ANDROID_SENSOR_ORIENTATION for lens facing %d "
+                        "from %d to %d", __FUNCTION__, lensFacing, sensorOrientation,
+                        newSensorOrientation);
+                characteristics->update(ANDROID_SENSOR_ORIENTATION, &newSensorOrientation, 1);
+            }
+        }
+
+        if (characteristics->exists(ANDROID_INFO_DEVICE_STATE_ORIENTATIONS)) {
+            characteristics->erase(ANDROID_INFO_DEVICE_STATE_ORIENTATIONS);
+        }
+    }
+
     return OK;
 }
 
@@ -2649,9 +2676,6 @@
 }
 
 CameraProviderManager::ProviderInfo::~ProviderInfo() {
-    if (mInitialStatusCallbackFuture.valid()) {
-        mInitialStatusCallbackFuture.wait();
-    }
     // Destruction of ProviderInfo is only supposed to happen when the respective
     // CameraProvider interface dies, so do not unregister callbacks.
 }
@@ -2714,10 +2738,12 @@
 }
 
 status_t CameraProviderManager::getCameraCharacteristicsLocked(const std::string &id,
-        bool overrideForPerfClass, CameraMetadata* characteristics) const {
+        bool overrideForPerfClass, CameraMetadata* characteristics,
+        bool overrideToPortrait) const {
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo != nullptr) {
-        return deviceInfo->getCameraCharacteristics(overrideForPerfClass, characteristics);
+        return deviceInfo->getCameraCharacteristics(overrideForPerfClass, characteristics,
+                overrideToPortrait);
     }
 
     // Find hidden physical camera characteristics
@@ -2752,7 +2778,9 @@
         combo.push_back(deviceId);
 
         hardware::CameraInfo info;
-        status_t res = deviceInfo->getCameraInfo(&info);
+        int portraitRotation;
+        status_t res = deviceInfo->getCameraInfo(/*overrideToPortrait*/false, &portraitRotation,
+                &info);
         if (res != OK) {
             ALOGE("%s: Error reading camera info: %s (%d)", __FUNCTION__, strerror(-res), res);
             continue;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index d049aff..8d60afd 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -23,7 +23,6 @@
 #include <set>
 #include <string>
 #include <mutex>
-#include <future>
 
 #include <camera/camera2/ConcurrentCamera.h>
 #include <camera/CameraParameters2.h>
@@ -220,7 +219,14 @@
      */
     std::pair<int, int> getCameraCount() const;
 
-    std::vector<std::string> getCameraDeviceIds() const;
+    /**
+     * Upon the function return, if unavailablePhysicalIds is not nullptr, it
+     * will contain all of the unavailable physical camera Ids represented in
+     * the form of:
+     * {[logicalCamera, {physicalCamera1, physicalCamera2, ...}], ...}.
+     */
+    std::vector<std::string> getCameraDeviceIds(std::unordered_map<
+            std::string, std::set<std::string>>* unavailablePhysicalIds = nullptr) const;
 
     /**
      * Retrieve the number of API1 compatible cameras; these are internal and
@@ -251,14 +257,15 @@
      * Return the old camera API camera info
      */
     status_t getCameraInfo(const std::string &id,
-            hardware::CameraInfo* info) const;
+            bool overrideToPortrait, int *portraitRotation, hardware::CameraInfo* info) const;
 
     /**
      * Return API2 camera characteristics - returns NAME_NOT_FOUND if a device ID does
      * not have a v3 or newer HAL version.
      */
     status_t getCameraCharacteristics(const std::string &id,
-            bool overrideForPerfClass, CameraMetadata* characteristics) const;
+            bool overrideForPerfClass, CameraMetadata* characteristics,
+            bool overrideToPortrait) const;
 
     status_t isConcurrentSessionConfigurationSupported(
             const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>
@@ -560,19 +567,20 @@
             virtual status_t setTorchMode(bool enabled) = 0;
             virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
             virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
-            virtual status_t getCameraInfo(hardware::CameraInfo *info) const = 0;
+            virtual status_t getCameraInfo(bool overrideToPortrait,
+                    int *portraitRotation,
+                    hardware::CameraInfo *info) const = 0;
             virtual bool isAPI1Compatible() const = 0;
             virtual status_t dumpState(int fd) = 0;
-            virtual status_t getCameraCharacteristics(bool overrideForPerfClass,
-                    CameraMetadata *characteristics) const {
-                (void) overrideForPerfClass;
-                (void) characteristics;
+            virtual status_t getCameraCharacteristics(
+                    [[maybe_unused]] bool overrideForPerfClass,
+                    [[maybe_unused]] CameraMetadata *characteristics,
+                    [[maybe_unused]] bool overrideToPortrait) {
                 return INVALID_OPERATION;
             }
-            virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
-                    CameraMetadata *characteristics) const {
-                (void) physicalCameraId;
-                (void) characteristics;
+            virtual status_t getPhysicalCameraCharacteristics(
+                    [[maybe_unused]] const std::string& physicalCameraId,
+                    [[maybe_unused]] CameraMetadata *characteristics) const {
                 return INVALID_OPERATION;
             }
 
@@ -607,6 +615,7 @@
         };
         std::vector<std::unique_ptr<DeviceInfo>> mDevices;
         std::unordered_set<std::string> mUniqueCameraIds;
+        std::unordered_map<std::string, std::set<std::string>> mUnavailablePhysicalCameras;
         int mUniqueDeviceCount;
         std::vector<std::string> mUniqueAPI1CompatibleCameraIds;
         // The initial public camera IDs published by the camera provider.
@@ -622,12 +631,15 @@
             virtual status_t setTorchMode(bool enabled) = 0;
             virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
             virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
-            virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
+            virtual status_t getCameraInfo(bool overrideToPortrait,
+                    int *portraitRotation,
+                    hardware::CameraInfo *info) const override;
             virtual bool isAPI1Compatible() const override;
             virtual status_t dumpState(int fd) = 0;
             virtual status_t getCameraCharacteristics(
                     bool overrideForPerfClass,
-                    CameraMetadata *characteristics) const override;
+                    CameraMetadata *characteristics,
+                    bool overrideToPortrait) override;
             virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
                     CameraMetadata *characteristics) const override;
             virtual status_t isSessionConfigurationSupported(
@@ -715,8 +727,6 @@
         std::vector<CameraStatusInfoT> mCachedStatus;
         // End of scope for mInitLock
 
-        std::future<void> mInitialStatusCallbackFuture;
-
         std::unique_ptr<ProviderInfo::DeviceInfo>
         virtual initializeDeviceInfo(
                 const std::string &name, const metadata_vendor_id_t tagId,
@@ -724,9 +734,6 @@
 
         virtual status_t reCacheConcurrentStreamingCameraIdsLocked() = 0;
 
-        void notifyInitialStatusChange(sp<StatusListener> listener,
-                std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus);
-
         std::vector<std::unordered_set<std::string>> mConcurrentCameraIdCombinations;
 
         // Parse provider instance name for type and id
@@ -830,7 +837,7 @@
         const hardware::camera::common::V1_0::TorchModeStatus&);
 
     status_t getCameraCharacteristicsLocked(const std::string &id, bool overrideForPerfClass,
-            CameraMetadata* characteristics) const;
+            CameraMetadata* characteristics, bool overrideToPortrait) const;
     void filterLogicalCameraIdsLocked(std::vector<std::string>& deviceIds) const;
 
     status_t getSystemCameraKindLocked(const std::string& id, SystemCameraKind *kind) const;
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index ef68f28..d05e235 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -293,7 +293,7 @@
             if (link != STATUS_OK) {
                 ALOGW("%s: Unable to link to provider '%s' death notifications",
                         __FUNCTION__, mProviderName.c_str());
-                mManager->removeProvider(mProviderName);
+                mManager->removeProvider(mProviderInstance);
                 return nullptr;
             }
 
@@ -759,7 +759,8 @@
         bool overrideForPerfClass =
                 SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                         perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
-        res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo);
+        res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
+                /*overrideToPortrait*/true);
         if (res != OK) {
             return res;
         }
@@ -767,7 +768,7 @@
                 [this](const String8 &id, bool overrideForPerfClass) {
                     CameraMetadata physicalDeviceInfo;
                     mManager->getCameraCharacteristicsLocked(id.string(), overrideForPerfClass,
-                                                   &physicalDeviceInfo);
+                                                   &physicalDeviceInfo, /*overrideToPortrait*/true);
                     return physicalDeviceInfo;
                 };
         std::vector<std::string> physicalCameraIds;
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index d60565f..fec7f05 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -388,7 +388,7 @@
                   __FUNCTION__,
                   mProviderName.c_str(),
                   linked.description().c_str());
-              mManager->removeProvider(mProviderName);
+              mManager->removeProvider(mProviderInstance);
               return nullptr;
             } else if (!linked) {
               ALOGW("%s: Unable to link to provider '%s' death notifications",
@@ -442,8 +442,7 @@
 }
 
 void HidlProviderInfo::serviceDied(uint64_t cookie,
-        const wp<hidl::base::V1_0::IBase>& who) {
-    (void) who;
+        [[maybe_unused]] const wp<hidl::base::V1_0::IBase>& who) {
     ALOGI("Camera provider '%s' has died; removing it", mProviderInstance.c_str());
     if (cookie != mId) {
         ALOGW("%s: Unexpected serviceDied cookie %" PRIu64 ", expected %" PRIu32,
@@ -919,7 +918,8 @@
         bool overrideForPerfClass =
                 SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                         perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
-        res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo);
+        res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
+                /*overrideToPortrait*/true);
         if (res != OK) {
             return res;
         }
@@ -927,7 +927,7 @@
                 [this](const String8 &id, bool overrideForPerfClass) {
                     CameraMetadata physicalDeviceInfo;
                     mManager->getCameraCharacteristicsLocked(id.string(), overrideForPerfClass,
-                                                   &physicalDeviceInfo);
+                            &physicalDeviceInfo, /*overrideToPortrait*/true);
                     return physicalDeviceInfo;
                 };
         std::vector<std::string> physicalCameraIds;
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
index a556200..2ac38d5 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
@@ -451,10 +451,9 @@
     return OK;
 }
 
-void Camera3BufferManager::dump(int fd, const Vector<String16>& args) const {
+void Camera3BufferManager::dump(int fd, [[maybe_unused]] const Vector<String16>& args) const {
     Mutex::Autolock l(mLock);
 
-    (void) args;
     String8 lines;
     lines.appendFormat("      Total stream sets: %zu\n", mStreamSetMap.size());
     for (size_t i = 0; i < mStreamSetMap.size(); i++) {
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 445b397..9a627f3 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -73,7 +73,8 @@
 
 namespace android {
 
-Camera3Device::Camera3Device(const String8 &id, bool overrideForPerfClass, bool legacyClient):
+Camera3Device::Camera3Device(const String8 &id, bool overrideForPerfClass, bool overrideToPortrait,
+        bool legacyClient):
         mId(id),
         mLegacyClient(legacyClient),
         mOperatingMode(NO_MODE),
@@ -94,7 +95,8 @@
         mVendorTagId(CAMERA_METADATA_INVALID_VENDOR_ID),
         mLastTemplateId(-1),
         mNeedFixupMonochromeTags(false),
-        mOverrideForPerfClass(overrideForPerfClass)
+        mOverrideForPerfClass(overrideForPerfClass),
+        mOverrideToPortrait(overrideToPortrait)
 {
     ATRACE_CALL();
     ALOGV("%s: Created device for camera %s", __FUNCTION__, mId.string());
@@ -166,7 +168,7 @@
     /** Start up request queue thread */
     mRequestThread = createNewRequestThread(
             this, mStatusTracker, mInterface, sessionParamKeys,
-            mUseHalBufManager, mSupportCameraMute);
+            mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait);
     res = mRequestThread->run(String8::format("C3Dev-%s-ReqQueue", mId.string()).string());
     if (res != OK) {
         SET_ERR_L("Unable to start request queue thread: %s (%d)",
@@ -495,9 +497,8 @@
     return BAD_VALUE;
 }
 
-status_t Camera3Device::dump(int fd, const Vector<String16> &args) {
+status_t Camera3Device::dump(int fd, [[maybe_unused]] const Vector<String16> &args) {
     ATRACE_CALL();
-    (void)args;
 
     // Try to lock, but continue in case of failure (to avoid blocking in
     // deadlocks)
@@ -2887,7 +2888,8 @@
         sp<StatusTracker> statusTracker,
         sp<HalInterface> interface, const Vector<int32_t>& sessionParamKeys,
         bool useHalBufManager,
-        bool supportCameraMute) :
+        bool supportCameraMute,
+        bool overrideToPortrait) :
         Thread(/*canCallJava*/false),
         mParent(parent),
         mStatusTracker(statusTracker),
@@ -2916,7 +2918,8 @@
         mSessionParamKeys(sessionParamKeys),
         mLatestSessionParams(sessionParamKeys.size()),
         mUseHalBufManager(useHalBufManager),
-        mSupportCameraMute(supportCameraMute){
+        mSupportCameraMute(supportCameraMute),
+        mOverrideToPortrait(overrideToPortrait) {
     mStatusId = statusTracker->addComponent("RequestThread");
 }
 
@@ -3581,9 +3584,9 @@
         mPrevTriggers = triggerCount;
 
         // Do not override rotate&crop for stream configurations that include
-        // SurfaceViews(HW_COMPOSER) output. The display rotation there will be
-        // compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
-        bool rotateAndCropChanged = mComposerOutput ? false :
+        // SurfaceViews(HW_COMPOSER) output, unless mOverrideToPortrait is set.
+        // The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
+        bool rotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
             overrideAutoRotateAndCrop(captureRequest);
         bool testPatternChanged = overrideTestPattern(captureRequest);
 
@@ -4629,6 +4632,15 @@
         const sp<CaptureRequest> &request) {
     ATRACE_CALL();
 
+    if (mOverrideToPortrait) {
+        Mutex::Autolock l(mTriggerMutex);
+        uint8_t rotateAndCrop_u8 = mRotateAndCropOverride;
+        CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+        metadata.update(ANDROID_SCALER_ROTATE_AND_CROP,
+                &rotateAndCrop_u8, 1);
+        return true;
+    }
+
     if (request->mRotateAndCropAuto) {
         Mutex::Autolock l(mTriggerMutex);
         CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 78f3e25..1a50c02 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -82,7 +82,8 @@
   friend class AidlCamera3Device;
   public:
 
-    explicit Camera3Device(const String8& id, bool overrideForPerfClass, bool legacyClient = false);
+    explicit Camera3Device(const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+            bool legacyClient = false);
 
     virtual ~Camera3Device();
     // Delete and optionally close native handles and clear the input vector afterward
@@ -810,7 +811,8 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute);
+                bool supportCameraMute,
+                bool overrideToPortrait);
         ~RequestThread();
 
         void     setNotificationListener(wp<NotificationListener> listener);
@@ -1090,6 +1092,7 @@
 
         const bool         mUseHalBufManager;
         const bool         mSupportCameraMute;
+        const bool         mOverrideToPortrait;
     };
 
     virtual sp<RequestThread> createNewRequestThread(wp<Camera3Device> /*parent*/,
@@ -1097,7 +1100,8 @@
                 sp<HalInterface> /*interface*/,
                 const Vector<int32_t>& /*sessionParamKeys*/,
                 bool /*useHalBufManager*/,
-                bool /*supportCameraMute*/) = 0;
+                bool /*supportCameraMute*/,
+                bool /*overrideToPortrait*/) = 0;
 
     sp<RequestThread> mRequestThread;
 
@@ -1367,6 +1371,10 @@
     // performance class.
     bool mOverrideForPerfClass;
 
+    // Whether the camera framework overrides the device characteristics for
+    // app compatibility reasons.
+    bool mOverrideToPortrait;
+
     // The current minimum expected frame duration based on AE_TARGET_FPS_RANGE
     nsecs_t mMinExpectedDuration = 0;
     // Whether the camera device runs at fixed frame rate based on AE_MODE and
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
index 19afd69..8c0ac71 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
@@ -67,8 +67,7 @@
     return INVALID_OPERATION;
 }
 
-void Camera3FakeStream::dump(int fd, const Vector<String16> &args) const {
-    (void) args;
+void Camera3FakeStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
     String8 lines;
     lines.appendFormat("    Stream[%d]: Fake\n", mId);
     write(fd, lines.string(), lines.size());
@@ -82,9 +81,8 @@
     return OK;
 }
 
-status_t Camera3FakeStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
-    (void) buffer;
-    (void) fenceFd;
+status_t Camera3FakeStream::detachBuffer([[maybe_unused]] sp<GraphicBuffer>* buffer,
+                [[maybe_unused]] int* fenceFd) {
     // Do nothing
     return OK;
 }
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index f594f84..314e007 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -73,8 +73,7 @@
     return false;
 }
 
-void Camera3IOStreamBase::dump(int fd, const Vector<String16> &args) const {
-    (void) args;
+void Camera3IOStreamBase::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
     String8 lines;
 
     uint64_t consumerUsage = 0;
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 9a3f7ed..631bb43 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -104,17 +104,14 @@
 
 status_t Camera3InputStream::returnBufferCheckedLocked(
             const camera_stream_buffer &buffer,
-            nsecs_t timestamp,
-            nsecs_t readoutTimestamp,
-            bool output,
+            [[maybe_unused]] nsecs_t timestamp,
+            [[maybe_unused]] nsecs_t readoutTimestamp,
+            [[maybe_unused]] bool output,
             int32_t /*transform*/,
             const std::vector<size_t>&,
             /*out*/
             sp<Fence> *releaseFenceOut) {
 
-    (void)timestamp;
-    (void)readoutTimestamp;
-    (void)output;
     ALOG_ASSERT(!output, "Expected output to be false");
 
     status_t res;
@@ -218,8 +215,7 @@
     return OK;
 }
 
-void Camera3InputStream::dump(int fd, const Vector<String16> &args) const {
-    (void) args;
+void Camera3InputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
     String8 lines;
     lines.appendFormat("    Stream[%d]: Input\n", mId);
     write(fd, lines.string(), lines.size());
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 396104c..3035aa5 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -393,13 +393,12 @@
             const camera_stream_buffer &buffer,
             nsecs_t timestamp,
             nsecs_t readoutTimestamp,
-            bool output,
+            [[maybe_unused]] bool output,
             int32_t transform,
             const std::vector<size_t>& surface_ids,
             /*out*/
             sp<Fence> *releaseFenceOut) {
 
-    (void)output;
     ALOG_ASSERT(output, "Expected output to be true");
 
     status_t res;
@@ -522,8 +521,7 @@
     return res;
 }
 
-void Camera3OutputStream::dump(int fd, const Vector<String16> &args) const {
-    (void) args;
+void Camera3OutputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
     String8 lines;
     lines.appendFormat("    Stream[%d]: Output\n", mId);
     lines.appendFormat("      Consumer name: %s\n", mConsumerName.string());
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index e16982b..6569395 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -521,7 +521,7 @@
         if (result->partial_result != 0)
             request.resultExtras.partialResultCount = result->partial_result;
 
-        if ((result->result != nullptr) && !states.legacyClient) {
+        if ((result->result != nullptr) && !states.legacyClient && !states.overrideToPortrait) {
             camera_metadata_ro_entry entry;
             auto ret = find_camera_metadata_ro_entry(result->result,
                     ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID, &entry);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 8c71c2b..019c8a8 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -107,6 +107,7 @@
         bool legacyClient;
         nsecs_t& minFrameDuration;
         bool& isFixedFps;
+        bool overrideToPortrait;
     };
 
     void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 88be9ff..2c21e7e 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -949,9 +949,8 @@
     }
 }
 
-void Camera3Stream::dump(int fd, const Vector<String16> &args) const
+void Camera3Stream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const
 {
-    (void)args;
     mBufferLimitLatency.dump(fd,
             "      Latency histogram for wait on max_buffers");
 }
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
index b3cb178..83caa00 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
@@ -69,7 +69,9 @@
     }
 
     // Cache the frame to match readout time interval, for up to kMaxFrameWaitTime
-    nsecs_t expectedQueueTime = mLastCameraPresentTime + readoutInterval;
+    // Because the code between here and queueBuffer() takes time to execute, make sure the
+    // presentationInterval is slightly shorter than readoutInterval.
+    nsecs_t expectedQueueTime = mLastCameraPresentTime + readoutInterval - kFrameAdjustThreshold;
     nsecs_t frameWaitTime = std::min(kMaxFrameWaitTime, expectedQueueTime - currentTime);
     if (frameWaitTime > 0 && mPendingBuffers.size() < 2) {
         mBufferCond.waitRelative(mLock, frameWaitTime);
@@ -78,9 +80,9 @@
         }
         currentTime = systemTime();
     }
-    ALOGV("%s: readoutInterval %" PRId64 ", queueInterval %" PRId64 ", waited for %" PRId64
+    ALOGV("%s: readoutInterval %" PRId64 ", waited for %" PRId64
             ", timestamp %" PRId64, __FUNCTION__, readoutInterval,
-            currentTime - mLastCameraPresentTime, frameWaitTime, buffer.timestamp);
+            mPendingBuffers.size() < 2 ? frameWaitTime : 0, buffer.timestamp);
     mPendingBuffers.pop();
     queueBufferToClientLocked(buffer, currentTime);
     return true;
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
index cb9690c..f46de3d 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
@@ -86,6 +86,7 @@
     static constexpr nsecs_t kWaitDuration = 5000000LL; // 50ms
     static constexpr nsecs_t kFrameIntervalThreshold = 80000000LL; // 80ms
     static constexpr nsecs_t kMaxFrameWaitTime = 10000000LL; // 10ms
+    static constexpr nsecs_t kFrameAdjustThreshold = 2000000LL; // 2ms
 };
 
 }; //namespace camera3
diff --git a/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp b/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
index a02e5f6..9cdd365 100644
--- a/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
+++ b/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
@@ -142,13 +142,13 @@
                    ch :                 // pillarbox or 1:1, full height
                    cw / mRotateAspect;  // letterbox, not full height
         switch (rotateMode) {
-            case ANDROID_SCALER_ROTATE_AND_CROP_90:
+            case ANDROID_SCALER_ROTATE_AND_CROP_270:
                 transformMat[1] = -rw / ch; // +y -> -x
                 transformMat[2] =  rh / cw; // +x -> +y
                 xShift = (cw + rw) / 2; // left edge of crop to right edge of rotated
                 yShift = (ch - rh) / 2; // top edge of crop to top edge of rotated
                 break;
-            case ANDROID_SCALER_ROTATE_AND_CROP_270:
+            case ANDROID_SCALER_ROTATE_AND_CROP_90:
                 transformMat[1] =  rw / ch; // +y -> +x
                 transformMat[2] = -rh / cw; // +x -> -y
                 xShift = (cw - rw) / 2; // left edge of crop to left edge of rotated
@@ -271,13 +271,13 @@
         rx = cx + (cw - rw) / 2;
         ry = cy + (ch - rh) / 2;
         switch (rotateMode) {
-            case ANDROID_SCALER_ROTATE_AND_CROP_90:
+            case ANDROID_SCALER_ROTATE_AND_CROP_270:
                 transformMat[1] =  ch / rw; // +y -> +x
                 transformMat[2] = -cw / rh; // +x -> -y
                 xShift = -(cw - rw) / 2; // left edge of rotated to left edge of cropped
                 yShift = ry - cy + ch;   // top edge of rotated to bottom edge of cropped
                 break;
-            case ANDROID_SCALER_ROTATE_AND_CROP_270:
+            case ANDROID_SCALER_ROTATE_AND_CROP_90:
                 transformMat[1] = -ch / rw; // +y -> -x
                 transformMat[2] =  cw / rh; // +x -> +y
                 xShift = (cw + rw) / 2; // left edge of rotated to left edge of cropped
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index ec28d31..1e103f2 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -163,8 +163,9 @@
 }
 
 AidlCamera3Device::AidlCamera3Device(const String8& id, bool overrideForPerfClass,
-            bool legacyClient) : Camera3Device(id, overrideForPerfClass, legacyClient) {
-        mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
+        bool overrideToPortrait, bool legacyClient) :
+        Camera3Device(id, overrideForPerfClass, overrideToPortrait, legacyClient) {
+    mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
 }
 
 status_t AidlCamera3Device::initialize(sp<CameraProviderManager> manager,
@@ -193,7 +194,8 @@
       SET_ERR("Session iface returned is null");
       return INVALID_OPERATION;
     }
-    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo);
+    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo,
+            mOverrideToPortrait);
     if (res != OK) {
         SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
         session->close();
@@ -207,7 +209,8 @@
         for (auto& physicalId : physicalCameraIds) {
             // Do not override characteristics for physical cameras
             res = manager->getCameraCharacteristics(
-                    physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId]);
+                    physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
+                    /*overrideToPortrait*/true);
             if (res != OK) {
                 SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
                         physicalId.c_str(), strerror(-res), res);
@@ -372,8 +375,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
-        *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps},
-        mResultMetadataQueue
+        *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
+        mOverrideToPortrait}, mResultMetadataQueue
     };
 
     for (const auto& result : results) {
@@ -414,8 +417,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
-        *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps},
-        mResultMetadataQueue
+        *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
+        mOverrideToPortrait}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
@@ -1408,9 +1411,10 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) :
+                bool supportCameraMute,
+                bool overrideToPortrait) :
           RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
-                  supportCameraMute) {}
+                  supportCameraMute, overrideToPortrait) {}
 
 status_t AidlCamera3Device::AidlRequestThread::switchToOffline(
         const std::vector<int32_t>& streamsToKeep,
@@ -1579,9 +1583,10 @@
                 sp<Camera3Device::HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) {
+                bool supportCameraMute,
+                bool overrideToPortrait) {
     return new AidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-            useHalBufManager, supportCameraMute);
+            useHalBufManager, supportCameraMute, overrideToPortrait);
 };
 
 sp<Camera3Device::Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index fd66661..630985f 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -39,7 +39,7 @@
     using AidlRequestMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
     class AidlCameraDeviceCallbacks;
     friend class AidlCameraDeviceCallbacks;
-    explicit AidlCamera3Device(const String8& id, bool overrideForPerfClass,
+    explicit AidlCamera3Device(const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
             bool legacyClient = false);
 
     virtual ~AidlCamera3Device() { }
@@ -174,7 +174,8 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute);
+                bool supportCameraMute,
+                bool overrideToPortrait);
 
         status_t switchToOffline(
                 const std::vector<int32_t>& streamsToKeep,
@@ -259,7 +260,8 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) override;
+                bool supportCameraMute,
+                bool overrideToPortrait) override;
 
     virtual sp<Camera3DeviceInjectionMethods>
             createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
index affdda6..816f96b 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
@@ -124,8 +124,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
-        *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
-      mResultMetadataQueue
+        *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+        /*overrideToPortrait*/false}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -170,8 +170,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
-        *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
-      mResultMetadataQueue
+        *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+        /*overrideToPortrait*/false}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 9557692..44c60cf 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -162,7 +162,8 @@
         return res;
     }
 
-    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo);
+    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo,
+            mOverrideToPortrait);
     if (res != OK) {
         SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
         session->close();
@@ -176,7 +177,8 @@
         for (auto& physicalId : physicalCameraIds) {
             // Do not override characteristics for physical cameras
             res = manager->getCameraCharacteristics(
-                    physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId]);
+                    physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
+                    /*overrideToPortrait*/true);
             if (res != OK) {
                 SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
                         physicalId.c_str(), strerror(-res), res);
@@ -363,7 +365,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps}, mResultMetadataQueue
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait},
+        mResultMetadataQueue
     };
 
     //HidlCaptureOutputStates hidlStates {
@@ -425,7 +428,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps}, mResultMetadataQueue
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait},
+        mResultMetadataQueue
     };
 
     for (const auto& result : results) {
@@ -472,7 +476,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps}, mResultMetadataQueue
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait},
+        mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
@@ -698,9 +703,10 @@
                 sp<Camera3Device::HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) {
+                bool supportCameraMute,
+                bool overrideToPortrait) {
         return new HidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-                useHalBufManager, supportCameraMute);
+                useHalBufManager, supportCameraMute, overrideToPortrait);
 };
 
 sp<Camera3Device::Camera3DeviceInjectionMethods>
@@ -1693,9 +1699,10 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) :
+                bool supportCameraMute,
+                bool overrideToPortrait) :
           RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
-                  supportCameraMute) {}
+                  supportCameraMute, overrideToPortrait) {}
 
 status_t HidlCamera3Device::HidlRequestThread::switchToOffline(
         const std::vector<int32_t>& streamsToKeep,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index d56ff53..72343bc 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -31,8 +31,9 @@
             public Camera3Device {
   public:
 
-   explicit HidlCamera3Device(const String8& id, bool overrideForPerfClass,
-          bool legacyClient = false) : Camera3Device(id, overrideForPerfClass, legacyClient) { }
+   explicit HidlCamera3Device(const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+          bool legacyClient = false) : Camera3Device(id, overrideForPerfClass, overrideToPortrait,
+          legacyClient) { }
 
     virtual ~HidlCamera3Device() {}
 
@@ -172,7 +173,8 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute);
+                bool supportCameraMute,
+                bool overrideToPortrait);
 
         status_t switchToOffline(
                 const std::vector<int32_t>& streamsToKeep,
@@ -219,7 +221,8 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) override;
+                bool supportCameraMute,
+                bool overrideToPortrait) override;
 
     virtual sp<Camera3DeviceInjectionMethods>
             createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
index de01ac9..705408d 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
@@ -105,8 +105,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
-      mResultMetadataQueue
+        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+        /*overrideToPortrait*/false}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -146,8 +146,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
-      mResultMetadataQueue
+        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+        /*overrideToPortrait*/false}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -182,8 +182,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
-      mResultMetadataQueue
+        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+        /*overrideToPortrait*/false}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 65a0300..259e8a5 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -65,7 +65,8 @@
     HStatus status = HStatus::NO_ERROR;
     binder::Status serviceRet =
         mAidlICameraService->getCameraCharacteristics(String16(cameraId.c_str()),
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &cameraMetadata);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
+                &cameraMetadata);
     HCameraMetadata hidlMetadata;
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
@@ -116,7 +117,8 @@
     binder::Status serviceRet = mAidlICameraService->connectDevice(
             callbacks, String16(cameraId.c_str()), String16(""), {},
             hardware::ICameraService::USE_CALLING_UID, 0/*oomScoreOffset*/,
-            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*out*/&deviceRemote);
+            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
+            /*out*/&deviceRemote);
     HStatus status = HStatus::NO_ERROR;
     if (!serviceRet.isOk()) {
         ALOGE("%s: Unable to connect to camera device", __FUNCTION__);
diff --git a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
index d3377f4..ae4d5dd 100644
--- a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
@@ -31,47 +31,48 @@
 std::map<int, std::vector<camera_metadata_tag>> static_api_level_to_keys{
       {30, {
           ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES,
+          ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
           ANDROID_CONTROL_ZOOM_RATIO_RANGE,
           ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES,
-          ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
         } },
       {31, {
-          ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
           ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION,
-          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
-          ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED,
-          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
           ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_DEFAULT_SECURE_IMAGE_SIZE,
+          ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
           ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,
+          ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION,
+          ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_DEFAULT_SECURE_IMAGE_SIZE,
+          ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED,
+          ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
+          ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
           ANDROID_SENSOR_INFO_BINNING_FACTOR,
+          ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+          ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+          ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,
         } },
       {32, {
           ANDROID_INFO_DEVICE_STATE_ORIENTATIONS,
         } },
       {33, {
-          ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL,
           ANDROID_AUTOMOTIVE_LENS_FACING,
           ANDROID_AUTOMOTIVE_LOCATION,
+          ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL,
+          ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
+          ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP,
           ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE,
           ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
-          ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP,
-          ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
+          ANDROID_SENSOR_READOUT_TIMESTAMP,
         } },
 };
 
@@ -81,9 +82,9 @@
  */
 std::map<int, std::vector<camera_metadata_tag>> dynamic_api_level_to_keys{
       {30, {
+          ANDROID_CONTROL_EXTENDED_SCENE_MODE,
           ANDROID_CONTROL_ZOOM_RATIO,
           ANDROID_SCALER_ROTATE_AND_CROP,
-          ANDROID_CONTROL_EXTENDED_SCENE_MODE,
         }  },
       {31, {
           ANDROID_SENSOR_PIXEL_MODE,
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 97d7bf4..09f8eb6 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -229,11 +229,11 @@
     mCameraService->getCameraVendorTagCache(&cache);
 
     CameraInfo cameraInfo;
-    mCameraService->getCameraInfo(cameraId, &cameraInfo);
+    mCameraService->getCameraInfo(cameraId, /*overrideToPortrait*/false, &cameraInfo);
 
     CameraMetadata metadata;
     mCameraService->getCameraCharacteristics(cameraIdStr,
-            /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &metadata);
 }
 
 void CameraFuzzer::invokeCameraSound() {
@@ -320,7 +320,8 @@
 
         rc = mCameraService->connect(this, cameraId, String16(),
                 android::CameraService::USE_CALLING_UID, android::CameraService::USE_CALLING_PID,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &cameraDevice);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
+                &cameraDevice);
         if (!rc.isOk()) {
             // camera not connected
             return;
@@ -534,7 +535,8 @@
         sp<hardware::camera2::ICameraDeviceUser> device;
         mCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &device);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
+                &device);
         if (device == nullptr) {
             continue;
         }
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index e9f6979..1a6b2e0 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -102,23 +102,57 @@
     sp<device::V3_2::ICameraDevice> mDeviceInterface;
     hardware::hidl_vec<common::V1_0::VendorTagSection> mVendorTagSections;
 
+    // Whether to call a physical camera unavailable callback upon setCallback
+    bool mHasPhysicalCameraUnavailableCallback;
+    hardware::hidl_string mLogicalCameraId;
+    hardware::hidl_string mUnavailablePhysicalCameraId;
+
     TestICameraProvider(const std::vector<hardware::hidl_string> &devices,
             const hardware::hidl_vec<common::V1_0::VendorTagSection> &vendorSection) :
         mDeviceNames(devices),
         mDeviceInterface(new TestDeviceInterface(devices)),
-        mVendorTagSections (vendorSection) {}
+        mVendorTagSections (vendorSection),
+        mHasPhysicalCameraUnavailableCallback(false) {}
 
     TestICameraProvider(const std::vector<hardware::hidl_string> &devices,
             const hardware::hidl_vec<common::V1_0::VendorTagSection> &vendorSection,
             android::hardware::hidl_vec<uint8_t> chars) :
         mDeviceNames(devices),
         mDeviceInterface(new TestDeviceInterface(devices, chars)),
-        mVendorTagSections (vendorSection) {}
+        mVendorTagSections (vendorSection),
+        mHasPhysicalCameraUnavailableCallback(false) {}
+
+    TestICameraProvider(const std::vector<hardware::hidl_string> &devices,
+            const hardware::hidl_vec<common::V1_0::VendorTagSection> &vendorSection,
+            android::hardware::hidl_vec<uint8_t> chars,
+            const hardware::hidl_string& logicalCameraId,
+            const hardware::hidl_string& unavailablePhysicalCameraId) :
+        mDeviceNames(devices),
+        mDeviceInterface(new TestDeviceInterface(devices, chars)),
+        mVendorTagSections (vendorSection),
+        mHasPhysicalCameraUnavailableCallback(true),
+        mLogicalCameraId(logicalCameraId),
+        mUnavailablePhysicalCameraId(unavailablePhysicalCameraId) {}
 
     virtual hardware::Return<Status> setCallback(
             const sp<provider::V2_4::ICameraProviderCallback>& callbacks) override {
         mCalledCounter[SET_CALLBACK]++;
         mCallbacks = callbacks;
+        if (mHasPhysicalCameraUnavailableCallback) {
+            auto cast26 = provider::V2_6::ICameraProviderCallback::castFrom(callbacks);
+            if (!cast26.isOk()) {
+                ADD_FAILURE() << "Failed to cast ICameraProviderCallback to V2_6";
+            } else {
+                sp<provider::V2_6::ICameraProviderCallback> callback26 = cast26;
+                if (callback26 == nullptr) {
+                    ADD_FAILURE() << "V2_6::ICameraProviderCallback is null after conversion";
+                } else {
+                    callback26->physicalCameraDeviceStatusChange(mLogicalCameraId,
+                            mUnavailablePhysicalCameraId,
+                            android::hardware::camera::common::V1_0::CameraDeviceStatus::NOT_PRESENT);
+                }
+            }
+        }
         return hardware::Return<Status>(Status::OK);
     }
 
@@ -151,9 +185,8 @@
     using getCameraDeviceInterface_V1_x_cb = std::function<void(Status status,
             const sp<device::V1_0::ICameraDevice>& device)>;
     virtual hardware::Return<void> getCameraDeviceInterface_V1_x(
-            const hardware::hidl_string& cameraDeviceName,
+            [[maybe_unused]] const hardware::hidl_string& cameraDeviceName,
             getCameraDeviceInterface_V1_x_cb _hidl_cb) override {
-        (void) cameraDeviceName;
         _hidl_cb(Status::OK, nullptr); //TODO: impl. of ver. 1.0 device interface
                                        //      otherwise enumeration will fail.
         return hardware::Void();
@@ -227,9 +260,8 @@
     virtual ~TestInteractionProxy() {}
 
     virtual bool registerForNotifications(
-            const std::string &serviceName,
+            [[maybe_unused]] const std::string &serviceName,
             const sp<hidl::manager::V1_0::IServiceNotification> &notification) override {
-        (void) serviceName;
         mManagerNotificationInterface = notification;
         return true;
     }
@@ -266,12 +298,16 @@
 };
 
 struct TestStatusListener : public CameraProviderManager::StatusListener {
+    int mPhysicalCameraStatusChangeCount = 0;
+
     ~TestStatusListener() {}
 
     void onDeviceStatusChanged(const String8 &,
             CameraDeviceStatus) override {}
     void onDeviceStatusChanged(const String8 &, const String8 &,
-            CameraDeviceStatus) override {}
+            CameraDeviceStatus) override {
+        mPhysicalCameraStatusChangeCount++;
+    }
     void onTorchStatusChanged(const String8 &,
             TorchModeStatus) override {}
     void onTorchStatusChanged(const String8 &,
@@ -634,3 +670,46 @@
     ASSERT_EQ(deviceCount, deviceNames.size()) <<
             "Unexpected amount of camera devices";
 }
+
+// Test that CameraProviderManager does not trigger
+// onDeviceStatusChanged(NOT_PRESENT) for physical camera before initialize()
+// returns.
+TEST(CameraProviderManagerTest, PhysicalCameraAvailabilityCallbackRaceTest) {
+    std::vector<hardware::hidl_string> deviceNames;
+    deviceNames.push_back("device@3.2/test/0");
+    hardware::hidl_vec<common::V1_0::VendorTagSection> vendorSection;
+
+    sp<CameraProviderManager> providerManager = new CameraProviderManager();
+    sp<TestStatusListener> statusListener = new TestStatusListener();
+    TestInteractionProxy serviceProxy;
+
+    android::hardware::hidl_vec<uint8_t> chars;
+    CameraMetadata meta;
+    int32_t charKeys[] = { ANDROID_REQUEST_AVAILABLE_CAPABILITIES };
+    meta.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, charKeys,
+            sizeof(charKeys) / sizeof(charKeys[0]));
+    uint8_t capabilities[] = { ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA };
+    meta.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, capabilities,
+            sizeof(capabilities)/sizeof(capabilities[0]));
+    uint8_t physicalCameraIds[] = { '2', '\0', '3', '\0' };
+    meta.update(ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS, physicalCameraIds,
+            sizeof(physicalCameraIds)/sizeof(physicalCameraIds[0]));
+    camera_metadata_t* metaBuffer = const_cast<camera_metadata_t*>(meta.getAndLock());
+    chars.setToExternal(reinterpret_cast<uint8_t*>(metaBuffer),
+            get_camera_metadata_size(metaBuffer));
+
+    sp<TestICameraProvider> provider = new TestICameraProvider(deviceNames,
+            vendorSection, chars, "device@3.2/test/0", "2");
+    serviceProxy.setProvider(provider);
+
+    status_t res = providerManager->initialize(statusListener, &serviceProxy);
+    ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
+
+    ASSERT_EQ(statusListener->mPhysicalCameraStatusChangeCount, 0)
+            << "Unexpected physical camera status change callback upon provider init.";
+
+    std::unordered_map<std::string, std::set<std::string>> unavailablePhysicalIds;
+    auto cameraIds = providerManager->getCameraDeviceIds(&unavailablePhysicalIds);
+    ASSERT_TRUE(unavailablePhysicalIds.count("0") > 0 && unavailablePhysicalIds["0"].count("2") > 0)
+        << "Unavailable physical camera Ids not set properly.";
+}
diff --git a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
index 8331136..b367571 100644
--- a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
+++ b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
@@ -355,8 +355,6 @@
 #include "DistortionMapperTest_OpenCvData.h"
 
 TEST(DistortionMapperTest, CompareToOpenCV) {
-    status_t res;
-
     float bigDistortion[] = {0.1, -0.003, 0.004, 0.02, 0.01};
 
     // Expect to match within sqrt(2) radius pixels
@@ -370,7 +368,7 @@
     using namespace openCvData;
 
     DistortionMapperInfo *mapperInfo = m.getMapperInfo();
-    res = m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, mapperInfo, /*clamp*/false,
+    m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, mapperInfo, /*clamp*/false,
             /*simple*/false);
 
     for (size_t i = 0; i < rawCoords.size(); i+=2) {
diff --git a/services/camera/libcameraservice/tests/RotateAndCropMapperTest.cpp b/services/camera/libcameraservice/tests/RotateAndCropMapperTest.cpp
index 3c187cd..9f86526 100644
--- a/services/camera/libcameraservice/tests/RotateAndCropMapperTest.cpp
+++ b/services/camera/libcameraservice/tests/RotateAndCropMapperTest.cpp
@@ -195,6 +195,7 @@
 
     // Round-trip results can't be exact since we've gone from a large int range -> small int range
     // and back, leading to quantization. For 4/3 aspect ratio, no more than +-1 error expected
+
     e = result.find(ANDROID_CONTROL_AE_REGIONS);
     EXPECT_EQUAL_WITHIN_N(full_region, e.data.i32, 1, "Round-tripped AE region isn't right");
 
@@ -209,11 +210,11 @@
     EXPECT_EQUAL_WITHIN_N(full_face, e.data.i32, 1, "App-side face rectangle isn't right");
 
     auto full_landmarks = std::vector<int32_t> {
-        full_crop[0], full_crop[1] + full_crop[3],
         full_crop[0] + full_crop[2], full_crop[1],
-        full_crop[0] + full_crop[2]/4, full_crop[1] + 3*full_crop[3]/4,
+        full_crop[0], full_crop[1] + full_crop[3],
+        full_crop[0] + 3*full_crop[2]/4, full_crop[1] + full_crop[3]/4,
         full_crop[0] + full_crop[2]/2, full_crop[1] + full_crop[3]/2,
-        full_crop[0] + 3*full_crop[2]/4, full_crop[1] + full_crop[3]/4
+        full_crop[0] + full_crop[2]/4, full_crop[1] + 3*full_crop[3]/4
     };
     e = result.find(ANDROID_STATISTICS_FACE_LANDMARKS);
     EXPECT_EQUAL_WITHIN_N(full_landmarks, e.data.i32, 1, "App-side face landmarks aren't right");
@@ -286,7 +287,6 @@
 
     // Round-trip results can't be exact since we've gone from a large int range -> small int range
     // and back, leading to quantization. For 4/3 aspect ratio, no more than +-1 error expected
-
     e = result.find(ANDROID_CONTROL_AE_REGIONS);
     EXPECT_EQUAL_WITHIN_N(full_region, e.data.i32, 1, "Round-tripped AE region isn't right");
 
@@ -301,11 +301,11 @@
     EXPECT_EQUAL_WITHIN_N(full_face, e.data.i32, 1, "App-side face rectangle isn't right");
 
     auto full_landmarks = std::vector<int32_t> {
-        full_crop[0] + full_crop[2], full_crop[1],
         full_crop[0], full_crop[1] + full_crop[3],
-        full_crop[0] + 3*full_crop[2]/4, full_crop[1] + full_crop[3]/4,
+        full_crop[0] + full_crop[2], full_crop[1],
+        full_crop[0] + full_crop[2]/4, full_crop[1] + 3*full_crop[3]/4,
         full_crop[0] + full_crop[2]/2, full_crop[1] + full_crop[3]/2,
-        full_crop[0] + full_crop[2]/4, full_crop[1] + 3*full_crop[3]/4
+        full_crop[0] + 3*full_crop[2]/4, full_crop[1] + full_crop[3]/4
     };
     e = result.find(ANDROID_STATISTICS_FACE_LANDMARKS);
     EXPECT_EQUAL_WITHIN_N(full_landmarks, e.data.i32, 1, "App-side face landmarks aren't right");
diff --git a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
index ff7aafd..b3a1d18 100644
--- a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
+++ b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
@@ -160,11 +160,9 @@
             false/*hasZoomRatioRange*/, zoomRatioRange,
             usePreCorrectArray));
 
-    size_t index = 0;
     int32_t width = testActiveArraySize[2];
     int32_t height = testActiveArraySize[3];
     if (usePreCorrectArray) {
-        index = 1;
         width = testPreCorrActiveArraySize[2];
         height = testPreCorrActiveArraySize[3];
     }
diff --git a/services/mediametrics/statsd_drm.cpp b/services/mediametrics/statsd_drm.cpp
index 863fdbe..9f08eca 100644
--- a/services/mediametrics/statsd_drm.cpp
+++ b/services/mediametrics/statsd_drm.cpp
@@ -248,10 +248,10 @@
     if (!item->getInt32("frontend", &frontend)) return false;
 
     // Optional to be included
-    int64_t apex_version = -1;
-    item->getInt64("apex_version", &apex_version);
+    std::string version = "";
+    item->getString("version", &version);
     const int result = stats_write(stats::media_metrics::MEDIA_DRM_CREATED,
-                    scheme, uuid_lsb, uuid_msb, uid, frontend, apex_version);
+                    scheme, uuid_lsb, uuid_msb, uid, frontend, version.c_str());
 
     std::stringstream log;
     log << "result:" << result << " {"
@@ -262,7 +262,7 @@
             << " uuid_msb:" << uuid_msb
             << " uid:" << uid
             << " frontend:" << frontend
-            << " apex_version:" << apex_version
+            << " version:" << version
             << " }";
     statsdLog->log(stats::media_metrics::MEDIA_DRM_CREATED, log.str());
     return true;
@@ -287,10 +287,10 @@
     if (!item->getInt32("opened_security_level", &opened_security_level)) return false;
 
     // Optional to be included
-    int64_t apex_version = -1;
-    item->getInt64("apex_version", &apex_version);
+    std::string version = "";
+    item->getString("version", &version);
     const int result = stats_write(stats::media_metrics::MEDIA_DRM_SESSION_OPENED,
-                        scheme, uuid_lsb, uuid_msb, uid, frontend, apex_version,
+                        scheme, uuid_lsb, uuid_msb, uid, frontend, version.c_str(),
                         object_nonce.c_str(), requested_security_level,
                         opened_security_level);
 
@@ -303,7 +303,7 @@
             << " uuid_msb:" << uuid_msb
             << " uid:" << uid
             << " frontend:" << frontend
-            << " apex_version:" << apex_version
+            << " version:" << version
             << " object_nonce:" << object_nonce
             << " requested_security_level:" << requested_security_level
             << " opened_security_level:" << opened_security_level
@@ -334,8 +334,8 @@
     if (!item->getInt32("error_code", &error_code)) return false;
 
     // Optional to be included
-    int64_t apex_version = -1;
-    item->getInt64("apex_version", &apex_version);
+    std::string version = "";
+    item->getString("version", &version);
     std::string session_nonce = "";
     item->getString("session_nonce", &session_nonce);
 
@@ -347,7 +347,7 @@
     item->getInt32("error_context", &error_context);
 
     const int result = stats_write(stats::media_metrics::MEDIA_DRM_ERRORED, scheme, uuid_lsb,
-                        uuid_msb, uid, frontend, apex_version, object_nonce.c_str(),
+                        uuid_msb, uid, frontend, version.c_str(), object_nonce.c_str(),
                         session_nonce.c_str(), security_level, api, error_code, cdm_err,
                         oem_err, error_context);
 
@@ -360,7 +360,7 @@
             << " uuid_msb:" << uuid_msb
             << " uid:" << uid
             << " frontend:" << frontend
-            << " apex_version:" << apex_version
+            << " version:" << version
             << " object_nonce:" << object_nonce
             << " session_nonce:" << session_nonce
             << " security_level:" << security_level
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index 5d80744..2b8245e 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -16,6 +16,7 @@
         "aidl/android/media/MediaResourceSubType.aidl",
         "aidl/android/media/MediaResourceParcel.aidl",
         "aidl/android/media/MediaResourcePolicyParcel.aidl",
+        "aidl/android/media/ClientInfoParcel.aidl",
     ],
     path: "aidl",
 }
@@ -87,10 +88,15 @@
         "libbinder_ndk",
         "libutils",
         "liblog",
+        "libstats_media_metrics",
+        "libstatspull",
+        "libstatssocket",
+        "libprotobuf-cpp-lite",
     ],
 
     static_libs: [
         "resourceobserver_aidl_interface-V1-ndk",
+        "libplatformprotos",
     ],
 
     include_dirs: ["frameworks/av/include"],
@@ -101,4 +107,10 @@
     ],
 
     export_include_dirs: ["."],
+
+    export_shared_lib_headers: [
+        "libstats_media_metrics",
+        "libstatspull",
+        "libstatssocket",
+    ],
 }
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 4d18876..5582528 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -34,6 +34,7 @@
 #include <sys/stat.h>
 #include <sys/time.h>
 #include <unistd.h>
+#include <stats_media_metrics.h>
 
 #include "IMediaResourceMonitor.h"
 #include "ResourceManagerService.h"
@@ -42,6 +43,14 @@
 
 namespace android {
 
+using stats::media_metrics::stats_write;
+using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED;
+using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
+using stats::media_metrics::\
+    MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
+using stats::media_metrics::\
+    MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
+
 //static
 std::mutex ResourceManagerService::sCookieLock;
 //static
@@ -97,7 +106,8 @@
 
     service->overridePid(mPid, -1);
     // thiz is freed in the call below, so it must be last call referring thiz
-    service->removeResource(mPid, mClientId, false /*checkValid*/);
+    ClientInfoParcel clientInfo{.pid = mPid, .id = mClientId};
+    service->removeResource(clientInfo, false /*checkValid*/);
 }
 
 class OverrideProcessInfoDeathNotifier : public DeathNotifier {
@@ -183,6 +193,7 @@
 }
 
 static ResourceInfo& getResourceInfoForEdit(uid_t uid, int64_t clientId,
+                                            const std::string& name,
         const std::shared_ptr<IResourceManagerClient>& client, ResourceInfos& infos) {
     ssize_t index = infos.indexOfKey(clientId);
 
@@ -190,6 +201,7 @@
         ResourceInfo info;
         info.uid = uid;
         info.clientId = clientId;
+        info.name = name;
         info.client = client;
         info.cookie = 0;
         info.pendingRemoval = false;
@@ -262,7 +274,15 @@
 
     result.append("  Processes:\n");
     for (size_t i = 0; i < mapCopy.size(); ++i) {
-        snprintf(buffer, SIZE, "    Pid: %d\n", mapCopy.keyAt(i));
+        int pid = mapCopy.keyAt(i);
+        snprintf(buffer, SIZE, "    Pid: %d\n", pid);
+        result.append(buffer);
+        int priority = 0;
+        if (getPriority_l(pid, &priority)) {
+            snprintf(buffer, SIZE, "    Priority: %d\n", priority);
+        } else {
+            snprintf(buffer, SIZE, "    Priority: <unknown>\n");
+        }
         result.append(buffer);
 
         const ResourceInfos &infos = mapCopy.valueAt(i);
@@ -273,7 +293,7 @@
 
             std::string clientName = "<unknown client>";
             if (infos[j].client != nullptr) {
-                Status status = infos[j].client->getName(&clientName);
+                clientName = infos[j].name;
             }
             snprintf(buffer, SIZE, "        Name: %s\n", clientName.c_str());
             result.append(buffer);
@@ -343,7 +363,9 @@
     std::shared_ptr<ResourceManagerService> service =
             ::ndk::SharedRefBase::make<ResourceManagerService>();
     binder_status_t status =
-            AServiceManager_addService(service->asBinder().get(), getServiceName());
+            AServiceManager_addServiceWithFlags(
+                        service->asBinder().get(), getServiceName(),
+                        AServiceManager_AddServiceFlag::ADD_SERVICE_ALLOW_ISOLATED);
     if (status != STATUS_OK) {
         return;
     }
@@ -433,11 +455,15 @@
     }
 }
 
-Status ResourceManagerService::addResource(int32_t pid, int32_t uid, int64_t clientId,
+Status ResourceManagerService::addResource(const ClientInfoParcel& clientInfo,
         const std::shared_ptr<IResourceManagerClient>& client,
         const std::vector<MediaResourceParcel>& resources) {
-    String8 log = String8::format("addResource(pid %d, clientId %lld, resources %s)",
-            pid, (long long) clientId, getString(resources).string());
+    int32_t pid = clientInfo.pid;
+    int32_t uid = clientInfo.uid;
+    int64_t clientId = clientInfo.id;
+    const std::string& name = clientInfo.name;
+    String8 log = String8::format("addResource(pid %d, uid %d clientId %lld, resources %s)",
+            pid, uid, (long long) clientId, getString(resources).string());
     mServiceLog->add(log);
 
     Mutex::Autolock lock(mLock);
@@ -450,7 +476,7 @@
         uid = callingUid;
     }
     ResourceInfos& infos = getResourceInfosForEdit(pid, mMap);
-    ResourceInfo& info = getResourceInfoForEdit(uid, clientId, client, infos);
+    ResourceInfo& info = getResourceInfoForEdit(uid, clientId, name, client, infos);
     ResourceList resourceAdded;
 
     for (size_t i = 0; i < resources.size(); ++i) {
@@ -489,13 +515,50 @@
         mObserverService->onResourceAdded(uid, pid, resourceAdded);
     }
     notifyResourceGranted(pid, resources);
+
+    // Increase the instance count of the resource associated with this client.
+    increaseResourceInstanceCount(clientId, name);
+
     return Status::ok();
 }
 
-Status ResourceManagerService::removeResource(int32_t pid, int64_t clientId,
+void ResourceManagerService::increaseResourceInstanceCount(int64_t clientId,
+                                                           const std::string& name) {
+    // Check whether this client has been looked into already.
+    if (mClientIdSet.find(clientId) == mClientIdSet.end()) {
+        mClientIdSet.insert(clientId);
+        // Update the resource instance count.
+        auto found = mConcurrentResourceCountMap.find(name);
+        if (found == mConcurrentResourceCountMap.end()) {
+            mConcurrentResourceCountMap[name] = 1;
+        } else {
+            found->second++;
+        }
+    }
+}
+
+void ResourceManagerService::decreaseResourceInstanceCount(int64_t clientId,
+                                                           const std::string& name) {
+    // Since this client has been removed, remove it from mClientIdSet
+    mClientIdSet.erase(clientId);
+    // Update the resource instance count also.
+    auto found = mConcurrentResourceCountMap.find(name);
+    if (found != mConcurrentResourceCountMap.end()) {
+        if (found->second == 1) {
+            mConcurrentResourceCountMap.erase(found);
+        } else {
+            found->second--;
+        }
+    }
+}
+
+Status ResourceManagerService::removeResource(const ClientInfoParcel& clientInfo,
         const std::vector<MediaResourceParcel>& resources) {
-    String8 log = String8::format("removeResource(pid %d, clientId %lld, resources %s)",
-            pid, (long long) clientId, getString(resources).string());
+    int32_t pid = clientInfo.pid;
+    int32_t uid = clientInfo.uid;
+    int64_t clientId = clientInfo.id;
+    String8 log = String8::format("removeResource(pid %d, uid %d clientId %lld, resources %s)",
+            pid, uid, (long long) clientId, getString(resources).string());
     mServiceLog->add(log);
 
     Mutex::Autolock lock(mLock);
@@ -555,15 +618,17 @@
     return Status::ok();
 }
 
-Status ResourceManagerService::removeClient(int32_t pid, int64_t clientId) {
-    removeResource(pid, clientId, true /*checkValid*/);
+Status ResourceManagerService::removeClient(const ClientInfoParcel& clientInfo) {
+    removeResource(clientInfo, true /*checkValid*/);
     return Status::ok();
 }
 
-Status ResourceManagerService::removeResource(int pid, int64_t clientId, bool checkValid) {
-    String8 log = String8::format(
-            "removeResource(pid %d, clientId %lld)",
-            pid, (long long) clientId);
+Status ResourceManagerService::removeResource(const ClientInfoParcel& clientInfo, bool checkValid) {
+    int32_t pid = clientInfo.pid;
+    int32_t uid = clientInfo.uid;
+    int64_t clientId = clientInfo.id;
+    String8 log = String8::format("removeResource(pid %d, uid %d clientId %lld)",
+            pid, uid, (long long) clientId);
     mServiceLog->add(log);
 
     Mutex::Autolock lock(mLock);
@@ -591,6 +656,10 @@
         onLastRemoved(it->second, info);
     }
 
+    // Since this client has been removed, decrease the corresponding
+    // resources instance count.
+    decreaseResourceInstanceCount(clientId, info.name);
+
     removeCookieAndUnlink_l(info.client, info.cookie);
 
     if (mObserverService != nullptr && !info.resources.empty()) {
@@ -601,25 +670,30 @@
     return Status::ok();
 }
 
-void ResourceManagerService::getClientForResource_l(int callingPid, const MediaResourceParcel *res,
+void ResourceManagerService::getClientForResource_l(int callingPid,
+        const MediaResourceParcel *res,
+        PidUidVector* idVector,
         Vector<std::shared_ptr<IResourceManagerClient>> *clients) {
     if (res == NULL) {
         return;
     }
     std::shared_ptr<IResourceManagerClient> client;
-    if (getLowestPriorityBiggestClient_l(callingPid, res->type, res->subType, &client)) {
+    if (getLowestPriorityBiggestClient_l(callingPid, res->type, res->subType, idVector, &client)) {
         clients->push_back(client);
     }
 }
 
-Status ResourceManagerService::reclaimResource(int32_t callingPid,
+Status ResourceManagerService::reclaimResource(const ClientInfoParcel& clientInfo,
         const std::vector<MediaResourceParcel>& resources, bool* _aidl_return) {
-    String8 log = String8::format("reclaimResource(callingPid %d, resources %s)",
-            callingPid, getString(resources).string());
+    int32_t callingPid = clientInfo.pid;
+    std::string clientName = clientInfo.name;
+    String8 log = String8::format("reclaimResource(callingPid %d, uid %d resources %s)",
+            callingPid, clientInfo.uid, getString(resources).string());
     mServiceLog->add(log);
     *_aidl_return = false;
 
     Vector<std::shared_ptr<IResourceManagerClient>> clients;
+    PidUidVector idVector;
     {
         Mutex::Autolock lock(mLock);
         if (!mProcessInfo->isPidTrusted(callingPid)) {
@@ -655,13 +729,13 @@
         if (secureCodec != NULL) {
             if (!mSupportsMultipleSecureCodecs) {
                 if (!getAllClients_l(callingPid, MediaResource::Type::kSecureCodec,
-                            secureCodec->subType, &clients)) {
+                            secureCodec->subType, &idVector, &clients)) {
                     return Status::ok();
                 }
             }
             if (!mSupportsSecureWithNonSecureCodec) {
                 if (!getAllClients_l(callingPid, MediaResource::Type::kNonSecureCodec,
-                            secureCodec->subType, &clients)) {
+                            secureCodec->subType, &idVector, &clients)) {
                     return Status::ok();
                 }
             }
@@ -669,13 +743,13 @@
         if (nonSecureCodec != NULL) {
             if (!mSupportsSecureWithNonSecureCodec) {
                 if (!getAllClients_l(callingPid, MediaResource::Type::kSecureCodec,
-                        nonSecureCodec->subType, &clients)) {
+                        nonSecureCodec->subType, &idVector, &clients)) {
                     return Status::ok();
                 }
             }
         }
         if (drmSession != NULL) {
-            getClientForResource_l(callingPid, drmSession, &clients);
+            getClientForResource_l(callingPid, drmSession, &idVector, &clients);
             if (clients.size() == 0) {
                 return Status::ok();
             }
@@ -683,32 +757,108 @@
 
         if (clients.size() == 0) {
             // if no secure/non-secure codec conflict, run second pass to handle other resources.
-            getClientForResource_l(callingPid, graphicMemory, &clients);
+            getClientForResource_l(callingPid, graphicMemory, &idVector, &clients);
         }
 
         if (clients.size() == 0) {
             // if we are here, run the third pass to free one codec with the same type.
-            getClientForResource_l(callingPid, secureCodec, &clients);
-            getClientForResource_l(callingPid, nonSecureCodec, &clients);
+            getClientForResource_l(callingPid, secureCodec, &idVector, &clients);
+            getClientForResource_l(callingPid, nonSecureCodec, &idVector, &clients);
         }
 
         if (clients.size() == 0) {
             // if we are here, run the fourth pass to free one codec with the different type.
             if (secureCodec != NULL) {
                 MediaResource temp(MediaResource::Type::kNonSecureCodec, secureCodec->subType, 1);
-                getClientForResource_l(callingPid, &temp, &clients);
+                getClientForResource_l(callingPid, &temp, &idVector, &clients);
             }
             if (nonSecureCodec != NULL) {
                 MediaResource temp(MediaResource::Type::kSecureCodec, nonSecureCodec->subType, 1);
-                getClientForResource_l(callingPid, &temp, &clients);
+                getClientForResource_l(callingPid, &temp, &idVector, &clients);
             }
         }
     }
 
     *_aidl_return = reclaimUnconditionallyFrom(clients);
+
+    // Log Reclaim Pushed Atom to statsd
+    pushReclaimAtom(clientInfo, clients, idVector, *_aidl_return);
+
     return Status::ok();
 }
 
+void ResourceManagerService::pushReclaimAtom(const ClientInfoParcel& clientInfo,
+                        const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
+                        const PidUidVector& idVector, bool reclaimed) {
+    // Construct the metrics for codec reclaim as a pushed atom.
+    // 1. Information about the requester.
+    //  - UID and the priority (oom score)
+    int32_t callingPid = clientInfo.pid;
+    int32_t requesterUid = clientInfo.uid;
+    std::string clientName = clientInfo.name;
+    int requesterPriority = -1;
+    getPriority_l(callingPid, &requesterPriority);
+
+    //  2. Information about the codec.
+    //  - Name of the codec requested
+    //  - Number of concurrent codecs running.
+    int32_t noOfConcurrentCodecs = 0;
+    auto found = mConcurrentResourceCountMap.find(clientName);
+    if (found != mConcurrentResourceCountMap.end()) {
+        noOfConcurrentCodecs = found->second;
+    }
+
+    // 3. Information about the Reclaim:
+    // - Status of reclaim request
+    // - How many codecs are reclaimed
+    // - For each codecs reclaimed, information of the process that it belonged to:
+    //    - UID and the Priority (oom score)
+    int32_t reclaimStatus = MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
+    if (!reclaimed) {
+      if (clients.size() == 0) {
+        // No clients to reclaim from
+        reclaimStatus =
+            MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
+      } else {
+        // Couldn't reclaim resources from the clients
+        reclaimStatus =
+            MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
+      }
+    }
+    int32_t noOfCodecsReclaimed = clients.size();
+    int32_t targetIndex = 1;
+    for (const auto& id : idVector) {
+        int32_t targetUid = id.second;
+        int targetPriority = -1;
+        getPriority_l(id.first, &targetPriority);
+        // Post the pushed atom
+        int result = stats_write(
+            MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED,
+            requesterUid,
+            requesterPriority,
+            clientName.c_str(),
+            noOfConcurrentCodecs,
+            reclaimStatus,
+            noOfCodecsReclaimed,
+            targetIndex,
+            targetUid,
+            targetPriority);
+        ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
+              "Requester[pid(%d): uid(%d): priority(%d)] "
+              "Codec: [%s] "
+              "No of concurrent codecs: %d "
+              "Reclaim Status: %d "
+              "No of codecs reclaimed: %d "
+              "Target[%d][pid(%d): uid(%d): priority(%d)] "
+              "Atom Size: %d",
+              __func__, callingPid, requesterUid, requesterPriority,
+              clientName.c_str(), noOfConcurrentCodecs,
+              reclaimStatus, noOfCodecsReclaimed,
+              targetIndex, id.first, targetUid, targetPriority, result);
+        targetIndex++;
+    }
+}
+
 bool ResourceManagerService::reclaimUnconditionallyFrom(
         const Vector<std::shared_ptr<IResourceManagerClient>> &clients) {
     if (clients.size() == 0) {
@@ -868,7 +1018,9 @@
     mProcessInfoOverrideMap.erase(pid);
 }
 
-Status ResourceManagerService::markClientForPendingRemoval(int32_t pid, int64_t clientId) {
+Status ResourceManagerService::markClientForPendingRemoval(const ClientInfoParcel& clientInfo) {
+    int32_t pid = clientInfo.pid;
+    int64_t clientId = clientInfo.id;
     String8 log = String8::format(
             "markClientForPendingRemoval(pid %d, clientId %lld)",
             pid, (long long) clientId);
@@ -926,7 +1078,8 @@
                                                            MediaResource::SubType::kVideoCodec,
                                                            MediaResource::SubType::kImageCodec}) {
                         std::shared_ptr<IResourceManagerClient> client;
-                        if (getBiggestClientPendingRemoval_l(pid, type, subType, &client)) {
+                        uid_t uid = 0;
+                        if (getBiggestClientPendingRemoval_l(pid, type, subType, uid, &client)) {
                             clients.add(client);
                             continue;
                         }
@@ -935,8 +1088,9 @@
                 // Non-codec resources are shared by audio, video and image codecs (no subtype).
                 default:
                     std::shared_ptr<IResourceManagerClient> client;
+                    uid_t uid = 0;
                     if (getBiggestClientPendingRemoval_l(pid, type,
-                            MediaResource::SubType::kUnspecifiedSubType, &client)) {
+                            MediaResource::SubType::kUnspecifiedSubType, uid, &client)) {
                         clients.add(client);
                     }
                     break;
@@ -963,8 +1117,12 @@
 }
 
 bool ResourceManagerService::getAllClients_l(int callingPid, MediaResource::Type type,
-        MediaResource::SubType subType, Vector<std::shared_ptr<IResourceManagerClient>> *clients) {
+        MediaResource::SubType subType,
+        PidUidVector* idVector,
+        Vector<std::shared_ptr<IResourceManagerClient>> *clients) {
     Vector<std::shared_ptr<IResourceManagerClient>> temp;
+    PidUidVector tempIdList;
+
     for (size_t i = 0; i < mMap.size(); ++i) {
         ResourceInfos &infos = mMap.editValueAt(i);
         for (size_t j = 0; j < infos.size(); ++j) {
@@ -977,6 +1135,7 @@
                     return false;
                 }
                 temp.push_back(infos[j].client);
+                tempIdList.emplace_back(mMap.keyAt(i), infos[j].uid);
             }
         }
     }
@@ -985,19 +1144,24 @@
         return true;
     }
     clients->appendVector(temp);
+    idVector->insert(std::end(*idVector), std::begin(tempIdList), std::end(tempIdList));
     return true;
 }
 
 bool ResourceManagerService::getLowestPriorityBiggestClient_l(int callingPid,
-        MediaResource::Type type, MediaResource::SubType subType,
+        MediaResource::Type type,
+        MediaResource::SubType subType,
+        PidUidVector* idVector,
         std::shared_ptr<IResourceManagerClient> *client) {
     int lowestPriorityPid;
     int lowestPriority;
     int callingPriority;
+    uid_t uid = 0;
 
     // Before looking into other processes, check if we have clients marked for
     // pending removal in the same process.
-    if (getBiggestClientPendingRemoval_l(callingPid, type, subType, client)) {
+    if (getBiggestClientPendingRemoval_l(callingPid, type, subType, uid, client)) {
+        idVector->emplace_back(callingPid, uid);
         return true;
     }
     if (!getPriority_l(callingPid, &callingPriority)) {
@@ -1014,9 +1178,11 @@
         return false;
     }
 
-    if (!getBiggestClient_l(lowestPriorityPid, type, subType, client)) {
+    if (!getBiggestClient_l(lowestPriorityPid, type, subType, uid, client)) {
         return false;
     }
+
+    idVector->emplace_back(lowestPriorityPid, uid);
     return true;
 }
 
@@ -1068,12 +1234,14 @@
 }
 
 bool ResourceManagerService::getBiggestClientPendingRemoval_l(int pid, MediaResource::Type type,
-        MediaResource::SubType subType, std::shared_ptr<IResourceManagerClient> *client) {
-    return getBiggestClient_l(pid, type, subType, client, true /* pendingRemovalOnly */);
+        MediaResource::SubType subType, uid_t& uid,
+        std::shared_ptr<IResourceManagerClient> *client) {
+    return getBiggestClient_l(pid, type, subType, uid, client, true /* pendingRemovalOnly */);
 }
 
 bool ResourceManagerService::getBiggestClient_l(int pid, MediaResource::Type type,
-        MediaResource::SubType subType, std::shared_ptr<IResourceManagerClient> *client,
+        MediaResource::SubType subType, uid_t& uid,
+        std::shared_ptr<IResourceManagerClient> *client,
         bool pendingRemovalOnly) {
     ssize_t index = mMap.indexOfKey(pid);
     if (index < 0) {
@@ -1096,6 +1264,7 @@
                 if (resource.value > largestValue) {
                     largestValue = resource.value;
                     clientTemp = infos[i].client;
+                    uid = infos[i].uid;
                 }
             }
         }
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index c636a0f..0016a19 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -19,7 +19,9 @@
 #define ANDROID_MEDIA_RESOURCEMANAGERSERVICE_H
 
 #include <map>
+#include <set>
 #include <mutex>
+#include <string>
 
 #include <aidl/android/media/BnResourceManagerService.h>
 #include <arpa/inet.h>
@@ -43,20 +45,24 @@
 using ::aidl::android::media::BnResourceManagerService;
 using ::aidl::android::media::MediaResourceParcel;
 using ::aidl::android::media::MediaResourcePolicyParcel;
+using ::aidl::android::media::ClientInfoParcel;
 
 typedef std::map<std::tuple<
         MediaResource::Type, MediaResource::SubType, std::vector<uint8_t>>,
         MediaResourceParcel> ResourceList;
 
 struct ResourceInfo {
-    int64_t clientId;
     uid_t uid;
+    int64_t clientId;
+    std::string name;
     std::shared_ptr<IResourceManagerClient> client;
     uintptr_t cookie{0};
     ResourceList resources;
     bool pendingRemoval{false};
 };
 
+typedef std::vector<std::pair<int32_t, uid_t>> PidUidVector;
+
 // TODO: convert these to std::map
 typedef KeyedVector<int64_t, ResourceInfo> ResourceInfos;
 typedef KeyedVector<int, ResourceInfos> PidResourceInfosMap;
@@ -85,31 +91,32 @@
     // IResourceManagerService interface
     Status config(const std::vector<MediaResourcePolicyParcel>& policies) override;
 
-    Status addResource(int32_t pid, int32_t uid, int64_t clientId,
-            const std::shared_ptr<IResourceManagerClient>& client,
-            const std::vector<MediaResourceParcel>& resources) override;
+    Status addResource(const ClientInfoParcel& clientInfo,
+                       const std::shared_ptr<IResourceManagerClient>& client,
+                       const std::vector<MediaResourceParcel>& resources) override;
 
-    Status removeResource(int32_t pid, int64_t clientId,
-            const std::vector<MediaResourceParcel>& resources) override;
+    Status removeResource(const ClientInfoParcel& clientInfo,
+                          const std::vector<MediaResourceParcel>& resources) override;
 
-    Status removeClient(int32_t pid, int64_t clientId) override;
+    Status removeClient(const ClientInfoParcel& clientInfo) override;
 
     // Tries to reclaim resource from processes with lower priority than the calling process
     // according to the requested resources.
     // Returns true if any resource has been reclaimed, otherwise returns false.
-    Status reclaimResource(int32_t callingPid, const std::vector<MediaResourceParcel>& resources,
-            bool* _aidl_return) override;
+    Status reclaimResource(const ClientInfoParcel& clientInfo,
+                           const std::vector<MediaResourceParcel>& resources,
+                           bool* _aidl_return) override;
 
-    Status overridePid(int originalPid, int newPid) override;
+    Status overridePid(int32_t originalPid, int32_t newPid) override;
 
-    Status overrideProcessInfo(const std::shared_ptr<IResourceManagerClient>& client, int pid,
-            int procState, int oomScore) override;
+    Status overrideProcessInfo(const std::shared_ptr<IResourceManagerClient>& client,
+                               int32_t pid, int32_t procState, int32_t oomScore) override;
 
-    Status markClientForPendingRemoval(int32_t pid, int64_t clientId) override;
+    Status markClientForPendingRemoval(const ClientInfoParcel& clientInfo) override;
 
     Status reclaimResourcesFromClientsPendingRemoval(int32_t pid) override;
 
-    Status removeResource(int pid, int64_t clientId, bool checkValid);
+    Status removeResource(const ClientInfoParcel& clientInfo, bool checkValid);
 
 private:
     friend class ResourceManagerServiceTest;
@@ -124,13 +131,15 @@
     // Returns false if any client belongs to a process with higher priority than the
     // calling process. The clients will remain unchanged if returns false.
     bool getAllClients_l(int callingPid, MediaResource::Type type, MediaResource::SubType subType,
+            PidUidVector* idList,
             Vector<std::shared_ptr<IResourceManagerClient>> *clients);
 
     // Gets the client who owns specified resource type from lowest possible priority process.
     // Returns false if the calling process priority is not higher than the lowest process
     // priority. The client will remain unchanged if returns false.
     bool getLowestPriorityBiggestClient_l(int callingPid, MediaResource::Type type,
-            MediaResource::SubType subType, std::shared_ptr<IResourceManagerClient> *client);
+            MediaResource::SubType subType, PidUidVector* idList,
+            std::shared_ptr<IResourceManagerClient> *client);
 
     // Gets lowest priority process that has the specified resource type.
     // Returns false if failed. The output parameters will remain unchanged if failed.
@@ -141,17 +150,19 @@
     // Returns false with no change to client if there are no clients holdiing resources of thisi
     // type.
     bool getBiggestClient_l(int pid, MediaResource::Type type, MediaResource::SubType subType,
-            std::shared_ptr<IResourceManagerClient> *client,
+            uid_t& uid, std::shared_ptr<IResourceManagerClient> *client,
             bool pendingRemovalOnly = false);
     // Same method as above, but with pendingRemovalOnly as true.
     bool getBiggestClientPendingRemoval_l(int pid, MediaResource::Type type,
-            MediaResource::SubType subType, std::shared_ptr<IResourceManagerClient> *client);
+            MediaResource::SubType subType, uid_t& uid,
+            std::shared_ptr<IResourceManagerClient> *client);
 
     bool isCallingPriorityHigher_l(int callingPid, int pid);
 
     // A helper function basically calls getLowestPriorityBiggestClient_l and add
     // the result client to the given Vector.
     void getClientForResource_l(int callingPid, const MediaResourceParcel *res,
+            PidUidVector* idList,
             Vector<std::shared_ptr<IResourceManagerClient>> *clients);
 
     void onFirstAdded(const MediaResourceParcel& res, const ResourceInfo& clientInfo);
@@ -171,6 +182,15 @@
     void removeCookieAndUnlink_l(const std::shared_ptr<IResourceManagerClient>& client,
                                  uintptr_t cookie);
 
+    // To increase/decrease the number of instances of a given resource
+    // associated with a client.
+    void increaseResourceInstanceCount(int64_t clientId, const std::string& name);
+    void decreaseResourceInstanceCount(int64_t clientId, const std::string& name);
+
+    void pushReclaimAtom(const ClientInfoParcel& clientInfo,
+                         const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
+                         const PidUidVector& idList, bool reclaimed);
+
     mutable Mutex mLock;
     sp<ProcessInfoInterface> mProcessInfo;
     sp<SystemCallbackInterface> mSystemCB;
@@ -191,6 +211,11 @@
     static std::map<uintptr_t, sp<DeathNotifier> > sCookieToDeathNotifierMap
             GUARDED_BY(sCookieLock);
     std::shared_ptr<ResourceObserverService> mObserverService;
+
+    // List of active clients
+    std::set<int64_t> mClientIdSet;
+    // Map of resources (name) and number of concurrent instances
+    std::map<std::string, int> mConcurrentResourceCountMap;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/services/mediaresourcemanager/ResourceObserverService.cpp b/services/mediaresourcemanager/ResourceObserverService.cpp
index 4e97406..ebe3903 100644
--- a/services/mediaresourcemanager/ResourceObserverService.cpp
+++ b/services/mediaresourcemanager/ResourceObserverService.cpp
@@ -100,8 +100,10 @@
 std::shared_ptr<ResourceObserverService> ResourceObserverService::instantiate() {
     std::shared_ptr<ResourceObserverService> observerService =
             ::ndk::SharedRefBase::make<ResourceObserverService>();
-    binder_status_t status = AServiceManager_addService(observerService->asBinder().get(),
-            ResourceObserverService::getServiceName());
+    binder_status_t status = AServiceManager_addServiceWithFlags(
+      observerService->asBinder().get(),ResourceObserverService::getServiceName(),
+      AServiceManager_AddServiceFlag::ADD_SERVICE_ALLOW_ISOLATED);
+
     if (status != STATUS_OK) {
         return nullptr;
     }
diff --git a/services/mediaresourcemanager/aidl/android/media/ClientInfoParcel.aidl b/services/mediaresourcemanager/aidl/android/media/ClientInfoParcel.aidl
new file mode 100644
index 0000000..eb4bc42
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/ClientInfoParcel.aidl
@@ -0,0 +1,44 @@
+/**
+ * Copyright (c) 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Description of a Client(codec) information.
+ *
+ * {@hide}
+ */
+parcelable ClientInfoParcel {
+    /**
+     * The PID of the client process.
+     */
+    int pid = -1;
+
+    /**
+     * The UID of the client process.
+     */
+    int uid = -1;
+
+    /**
+     * The ID of the client.
+     */
+    long id = 0;
+
+    /**
+     * Name of the resource associated with the client.
+     */
+    @utf8InCpp String name;
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
index 7a0a50f..30ad41b 100644
--- a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
@@ -19,6 +19,7 @@
 import android.media.IResourceManagerClient;
 import android.media.MediaResourceParcel;
 import android.media.MediaResourcePolicyParcel;
+import android.media.ClientInfoParcel;
 
 /**
  * ResourceManagerService interface that keeps track of media resource
@@ -44,46 +45,40 @@
     /**
      * Add a client to a process with a list of resources.
      *
-     * @param pid pid of the client.
-     * @param uid uid of the client.
-     * @param clientId an identifier that uniquely identifies the client within the pid.
+     * @param clientInfo info of the calling client.
      * @param client interface for the ResourceManagerService to call the client.
      * @param resources an array of resources to be added.
      */
     void addResource(
-            int pid,
-            int uid,
-            long clientId,
+            in ClientInfoParcel clientInfo,
             IResourceManagerClient client,
             in MediaResourceParcel[] resources);
 
     /**
      * Remove the listed resources from a client.
      *
-     * @param pid pid from which the list of resources will be removed.
-     * @param clientId clientId within the pid from which the list of resources will be removed.
+     * @param clientInfo info of the calling client.
      * @param resources an array of resources to be removed from the client.
      */
-    void removeResource(int pid, long clientId, in MediaResourceParcel[] resources);
+    void removeResource(in ClientInfoParcel clientInfo, in MediaResourceParcel[] resources);
 
     /**
      * Remove all resources from a client.
      *
-     * @param pid pid from which the client's resources will be removed.
-     * @param clientId clientId within the pid that will be removed.
+     * @param clientInfo info of the calling client.
      */
-    void removeClient(int pid, long clientId);
+    void removeClient(in ClientInfoParcel clientInfo);
 
     /**
      * Tries to reclaim resource from processes with lower priority than the
      * calling process according to the requested resources.
      *
-     * @param callingPid pid of the calling process.
+     * @param clientInfo info of the calling client.
      * @param resources an array of resources to be reclaimed.
      *
      * @return true if the reclaim was successful and false otherwise.
      */
-    boolean reclaimResource(int callingPid, in MediaResourceParcel[] resources);
+    boolean reclaimResource(in ClientInfoParcel clientInfo, in MediaResourceParcel[] resources);
 
     /**
      * Override the pid of original calling process with the pid of the process
@@ -120,10 +115,9 @@
     /**
      * Mark a client for pending removal
      *
-     * @param pid pid from which the client's resources will be removed.
-     * @param clientId clientId within the pid that will be removed.
+     * @param clientInfo info of the calling client.
      */
-    void markClientForPendingRemoval(int pid, long clientId);
+    void markClientForPendingRemoval(in ClientInfoParcel clientInfo);
 
     /**
      * Reclaim resources from clients pending removal, if any.
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index 81c85e5..1d7f14f 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -41,6 +41,9 @@
         "libbinder_ndk",
         "libmedia",
         "libutils",
+        "libstats_media_metrics",
+        "libstatspull",
+        "libstatssocket",
     ],
     fuzz_config: {
         cc: [
diff --git a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
index e4aaea0..5c2fef9 100644
--- a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
+++ b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
@@ -135,11 +135,15 @@
 };
 
 struct TestClient : public BnResourceManagerClient {
-    TestClient(int pid, const shared_ptr<ResourceManagerService>& service)
-        : mReclaimed(false), mPid(pid), mService(service) {}
+    TestClient(int pid, int uid, const shared_ptr<ResourceManagerService>& service)
+        : mReclaimed(false), mPid(pid), mUid(uid), mService(service) {}
 
     Status reclaimResource(bool* aidlReturn) override {
-        mService->removeClient(mPid, getId(ref<TestClient>()));
+        ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                    .uid = static_cast<int32_t>(mUid),
+                                    .id = getId(ref<TestClient>()),
+                                    .name = ""};
+        mService->removeClient(clientInfo);
         mReclaimed = true;
         *aidlReturn = true;
         return Status::ok();
@@ -155,6 +159,7 @@
    private:
     bool mReclaimed;
     int mPid;
+    int mUid;
     shared_ptr<ResourceManagerService> mService;
     DISALLOW_EVIL_CONSTRUCTORS(TestClient);
 };
@@ -176,9 +181,12 @@
     static void* addResource(void* arg) {
         resourceThreadArgs* tArgs = (resourceThreadArgs*)arg;
         if (tArgs) {
+            ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(tArgs->pid),
+                                        .uid = static_cast<int32_t>(tArgs->uid),
+                                        .id = tArgs->testClientId,
+                                        .name = ""};
             (tArgs->service)
-                ->addResource(tArgs->pid, tArgs->uid, tArgs->testClientId, tArgs->testClient,
-                              tArgs->mediaResource);
+                ->addResource(clientInfo, tArgs->testClient, tArgs->mediaResource);
         }
         return nullptr;
     }
@@ -187,10 +195,14 @@
         resourceThreadArgs* tArgs = (resourceThreadArgs*)arg;
         if (tArgs) {
             bool result;
-            (tArgs->service)->markClientForPendingRemoval(tArgs->pid, tArgs->testClientId);
-            (tArgs->service)->removeResource(tArgs->pid, tArgs->testClientId, tArgs->mediaResource);
-            (tArgs->service)->reclaimResource(tArgs->pid, tArgs->mediaResource, &result);
-            (tArgs->service)->removeClient(tArgs->pid, tArgs->testClientId);
+            ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(tArgs->pid),
+                                        .uid = static_cast<int32_t>(tArgs->uid),
+                                        .id = tArgs->testClientId,
+                                        .name = ""};
+            (tArgs->service)->markClientForPendingRemoval(clientInfo);
+            (tArgs->service)->removeResource(clientInfo, tArgs->mediaResource);
+            (tArgs->service)->reclaimResource(clientInfo, tArgs->mediaResource, &result);
+            (tArgs->service)->removeClient(clientInfo);
             (tArgs->service)->overridePid(tArgs->pid, tArgs->pid - 1);
         }
         return nullptr;
@@ -240,7 +252,8 @@
         uint64_t mediaResourceValue = mFuzzedDataProvider->ConsumeIntegral<uint64_t>();
         threadArgs[k].service = mService;
         shared_ptr<IResourceManagerClient> testClient =
-                ::ndk::SharedRefBase::make<TestClient>(threadArgs[k].pid, mService);
+                ::ndk::SharedRefBase::make<TestClient>(threadArgs[k].pid, threadArgs[k].uid,
+                                                       mService);
         threadArgs[k].testClient = testClient;
         threadArgs[k].testClientId = getId(testClient);
         mediaResource[k].push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
@@ -258,7 +271,7 @@
     // No resource was added with pid = 0
     int32_t pidZero = 0;
     shared_ptr<IResourceManagerClient> testClient =
-        ::ndk::SharedRefBase::make<TestClient>(pidZero, mService);
+        ::ndk::SharedRefBase::make<TestClient>(pidZero, 0, mService);
     int32_t mediaResourceType =
         mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(kMinResourceType, kMaxResourceType);
     int32_t mediaResourceSubType =
@@ -269,9 +282,13 @@
                                      static_cast<MedResSubType>(mediaResourceSubType),
                                      mediaResourceValue));
     bool result;
-    mService->reclaimResource(pidZero, mediaRes, &result);
-    mService->removeResource(pidZero, getId(testClient), mediaRes);
-    mService->removeClient(pidZero, getId(testClient));
+    ClientInfoParcel pidZeroClient{.pid = static_cast<int32_t>(pidZero),
+                                   .uid = static_cast<int32_t>(0),
+                                   .id = getId(testClient),
+                                   .name = ""};
+    mService->reclaimResource(pidZeroClient, mediaRes, &result);
+    mService->removeResource(pidZeroClient, mediaRes);
+    mService->removeClient(pidZeroClient);
 }
 
 void ResourceManagerServiceFuzzer::setServiceLog() {
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 618626f..60bb8c3 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -19,6 +19,9 @@
         "liblog",
         "libmedia",
         "libutils",
+        "libstats_media_metrics",
+        "libstatspull",
+        "libstatssocket",
     ],
     include_dirs: [
         "frameworks/av/include",
@@ -64,6 +67,9 @@
         "liblog",
         "libmedia",
         "libutils",
+        "libstats_media_metrics",
+        "libstatspull",
+        "libstatssocket",
     ],
     include_dirs: [
         "frameworks/av/include",
diff --git a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
index 5bf44ce..8194e23 100644
--- a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
+++ b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
@@ -122,11 +122,15 @@
 
 
 struct TestClient : public BnResourceManagerClient {
-    TestClient(int pid, const std::shared_ptr<ResourceManagerService> &service)
-        : mPid(pid), mService(service) {}
+    TestClient(int pid, int uid, const std::shared_ptr<ResourceManagerService> &service)
+        : mPid(pid), mUid(uid), mService(service) {}
 
     Status reclaimResource(bool* _aidl_return) override {
-        mService->removeClient(mPid, getId(ref<TestClient>()));
+        ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                    .uid = static_cast<int32_t>(mUid),
+                                    .id = getId(ref<TestClient>()),
+                                    .name = "none"};
+        mService->removeClient(clientInfo);
         mWasReclaimResourceCalled = true;
         *_aidl_return = true;
         return Status::ok();
@@ -148,6 +152,7 @@
 private:
     bool mWasReclaimResourceCalled = false;
     int mPid;
+    int mUid;
     std::shared_ptr<ResourceManagerService> mService;
     DISALLOW_EVIL_CONSTRUCTORS(TestClient);
 };
@@ -196,13 +201,13 @@
         : mSystemCB(new TestSystemCallback()),
           mService(::ndk::SharedRefBase::make<ResourceManagerService>(
                   new TestProcessInfo, mSystemCB)),
-          mTestClient1(::ndk::SharedRefBase::make<TestClient>(kTestPid1, mService)),
-          mTestClient2(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)),
-          mTestClient3(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)) {
+          mTestClient1(::ndk::SharedRefBase::make<TestClient>(kTestPid1, kTestUid1, mService)),
+          mTestClient2(::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService)),
+          mTestClient3(::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService)) {
     }
 
-    std::shared_ptr<IResourceManagerClient> createTestClient(int pid) {
-        return ::ndk::SharedRefBase::make<TestClient>(pid, mService);
+    std::shared_ptr<IResourceManagerClient> createTestClient(int pid, int uid) {
+        return ::ndk::SharedRefBase::make<TestClient>(pid, uid, mService);
     }
 
     sp<TestSystemCallback> mSystemCB;
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index 8739c3b..41cccb8 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -98,24 +98,36 @@
         // kTestPid1 mTestClient1
         std::vector<MediaResourceParcel> resources1;
         resources1.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(mTestClient1),
+                                     .name = "none"};
+        mService->addResource(client1Info, mTestClient1, resources1);
         resources1.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 200));
         std::vector<MediaResourceParcel> resources11;
         resources11.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 200));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources11);
+        mService->addResource(client1Info, mTestClient1, resources11);
 
         // kTestPid2 mTestClient2
         std::vector<MediaResourceParcel> resources2;
         resources2.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
         resources2.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 300));
-        mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources2);
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient2),
+                                     .name = "none"};
+        mService->addResource(client2Info, mTestClient2, resources2);
 
         // kTestPid2 mTestClient3
         std::vector<MediaResourceParcel> resources3;
-        mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources3);
+        ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient3),
+                                     .name = "none"};
+        mService->addResource(client3Info, mTestClient3, resources3);
         resources3.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
         resources3.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 100));
-        mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources3);
+        mService->addResource(client3Info, mTestClient3, resources3);
 
         const PidResourceInfosMap &map = mService->mMap;
         EXPECT_EQ(2u, map.size());
@@ -138,7 +150,11 @@
         std::vector<MediaResourceParcel> resources1;
         resources1.push_back(MediaResource(MediaResource::Type::kDrmSession, -100));
         resources1.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, -100));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(mTestClient1),
+                                     .name = "none"};
+        mService->addResource(client1Info, mTestClient1, resources1);
 
         // Expected result:
         // 1) the client should have been added;
@@ -155,11 +171,11 @@
         resources1.clear();
         resources1.push_back(MediaResource(MediaResource::Type::kDrmSession, INT64_MAX));
         resources1.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, INT64_MAX));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        mService->addResource(client1Info, mTestClient1, resources1);
         resources1.clear();
         resources1.push_back(MediaResource(MediaResource::Type::kDrmSession, 10));
         resources1.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 10));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        mService->addResource(client1Info, mTestClient1, resources1);
 
         // Expected result:
         // Both values should saturate to INT64_MAX
@@ -170,7 +186,7 @@
         resources1.clear();
         resources1.push_back(MediaResource(MediaResource::Type::kDrmSession, -10));
         resources1.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, -10));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        mService->addResource(client1Info, mTestClient1, resources1);
 
         // Expected result:
         // 1) DrmSession resource should allow negative value addition, and value should drop accordingly
@@ -182,7 +198,7 @@
         resources1.clear();
         resources1.push_back(MediaResource(MediaResource::Type::kDrmSession, INT64_MIN));
         expected.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, INT64_MIN));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        mService->addResource(client1Info, mTestClient1, resources1);
 
         // Expected result:
         // 1) DrmSession resource value should drop to 0, but the entry shouldn't be removed.
@@ -228,11 +244,15 @@
         // kTestPid1 mTestClient1
         std::vector<MediaResourceParcel> resources1;
         resources1.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(mTestClient1),
+                                     .name = "none"};
+        mService->addResource(client1Info, mTestClient1, resources1);
 
         std::vector<MediaResourceParcel> resources11;
         resources11.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 200));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources11);
+        mService->addResource(client1Info, mTestClient1, resources11);
 
         const PidResourceInfosMap &map = mService->mMap;
         EXPECT_EQ(1u, map.size());
@@ -243,7 +263,7 @@
 
         // test adding existing types to combine values
         resources1.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 100));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        mService->addResource(client1Info, mTestClient1, resources1);
 
         std::vector<MediaResourceParcel> expected;
         expected.push_back(MediaResource(MediaResource::Type::kSecureCodec, 2));
@@ -253,7 +273,7 @@
         // test adding new types (including types that differs only in subType)
         resources11.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
         resources11.push_back(MediaResource(MediaResource::Type::kSecureCodec, MediaResource::SubType::kVideoCodec, 1));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources11);
+        mService->addResource(client1Info, mTestClient1, resources11);
 
         expected.clear();
         expected.push_back(MediaResource(MediaResource::Type::kSecureCodec, 2));
@@ -267,11 +287,15 @@
         // kTestPid1 mTestClient1
         std::vector<MediaResourceParcel> resources1;
         resources1.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(mTestClient1),
+                                     .name = "none"};
+        mService->addResource(client1Info, mTestClient1, resources1);
 
         std::vector<MediaResourceParcel> resources11;
         resources11.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 200));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources11);
+        mService->addResource(client1Info, mTestClient1, resources11);
 
         const PidResourceInfosMap &map = mService->mMap;
         EXPECT_EQ(1u, map.size());
@@ -282,7 +306,7 @@
 
         // test partial removal
         resources11[0].value = 100;
-        mService->removeResource(kTestPid1, getId(mTestClient1), resources11);
+        mService->removeResource(client1Info, resources11);
 
         std::vector<MediaResourceParcel> expected;
         expected.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
@@ -291,13 +315,13 @@
 
         // test removal request with negative value, should be ignored
         resources11[0].value = -10000;
-        mService->removeResource(kTestPid1, getId(mTestClient1), resources11);
+        mService->removeResource(client1Info, resources11);
 
         expectEqResourceInfo(infos1.valueFor(getId(mTestClient1)), kTestUid1, mTestClient1, expected);
 
         // test complete removal with overshoot value
         resources11[0].value = 1000;
-        mService->removeResource(kTestPid1, getId(mTestClient1), resources11);
+        mService->removeResource(client1Info, resources11);
 
         expected.clear();
         expected.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
@@ -317,19 +341,35 @@
             mService->mSupportsSecureWithNonSecureCodec = true;
 
             // priority too low to reclaim resource
-            CHECK_STATUS_FALSE(mService->reclaimResource(kLowPriorityPid, resources, &result));
+            ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                        .uid = static_cast<int32_t>(kTestUid1),
+                                        .id = 0,
+                                        .name = "none"};
+            CHECK_STATUS_FALSE(mService->reclaimResource(clientInfo, resources, &result));
 
             // override Low Priority Pid with High Priority Pid
             mService->overridePid(kLowPriorityPid, kHighPriorityPid);
-            CHECK_STATUS_TRUE(mService->reclaimResource(kLowPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(clientInfo, resources, &result));
 
             // restore Low Priority Pid
             mService->overridePid(kLowPriorityPid, -1);
-            CHECK_STATUS_FALSE(mService->reclaimResource(kLowPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(clientInfo, resources, &result));
         }
     }
 
     void testMarkClientForPendingRemoval() {
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(mTestClient1),
+                                     .name = "none"};
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient2),
+                                     .name = "none"};
+        ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient3),
+                                     .name = "none"};
         {
             addResource();
             mService->mSupportsSecureWithNonSecureCodec = true;
@@ -338,24 +378,24 @@
             resources.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
 
             // Remove low priority clients
-            mService->removeClient(kTestPid1, getId(mTestClient1));
+            mService->removeClient(client1Info);
 
             // no lower priority client
-            CHECK_STATUS_FALSE(mService->reclaimResource(kTestPid2, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(client2Info, resources, &result));
             EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_EQ(false, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
-            mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient2));
+            mService->markClientForPendingRemoval(client2Info);
 
             // client marked for pending removal from the same process got reclaimed
-            CHECK_STATUS_TRUE(mService->reclaimResource(kTestPid2, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(client2Info, resources, &result));
             EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_EQ(true, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // clean up client 3 which still left
-            mService->removeClient(kTestPid2, getId(mTestClient3));
+            mService->removeClient(client3Info);
         }
 
         {
@@ -365,30 +405,30 @@
             std::vector<MediaResourceParcel> resources;
             resources.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
 
-            mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient2));
+            mService->markClientForPendingRemoval(client2Info);
 
             // client marked for pending removal from the same process got reclaimed
             // first, even though there are lower priority process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kTestPid2, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(client2Info, resources, &result));
             EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_EQ(true, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // lower priority client got reclaimed
-            CHECK_STATUS_TRUE(mService->reclaimResource(kTestPid2, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(client2Info, resources, &result));
             EXPECT_EQ(true, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_EQ(false, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // clean up client 3 which still left
-            mService->removeClient(kTestPid2, getId(mTestClient3));
+            mService->removeClient(client3Info);
         }
 
         {
             addResource();
             mService->mSupportsSecureWithNonSecureCodec = true;
 
-            mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient2));
+            mService->markClientForPendingRemoval(client2Info);
 
             // client marked for pending removal got reclaimed
             EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
@@ -402,7 +442,7 @@
             EXPECT_EQ(false, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
-            mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient3));
+            mService->markClientForPendingRemoval(client3Info);
 
             // client marked for pending removal got reclaimed
             EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
@@ -411,14 +451,18 @@
             EXPECT_EQ(true, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // clean up client 1 which still left
-            mService->removeClient(kTestPid1, getId(mTestClient1));
+            mService->removeClient(client1Info);
         }
     }
 
     void testRemoveClient() {
         addResource();
 
-        mService->removeClient(kTestPid2, getId(mTestClient2));
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient2),
+                                     .name = "none"};
+        mService->removeClient(client2Info);
 
         const PidResourceInfosMap &map = mService->mMap;
         EXPECT_EQ(2u, map.size());
@@ -437,11 +481,12 @@
         MediaResource::SubType subType = MediaResource::SubType::kUnspecifiedSubType;
 
         Vector<std::shared_ptr<IResourceManagerClient> > clients;
-        EXPECT_FALSE(mService->getAllClients_l(kLowPriorityPid, type, subType, &clients));
+        PidUidVector idList;
+        EXPECT_FALSE(mService->getAllClients_l(kLowPriorityPid, type, subType, &idList, &clients));
         // some higher priority process (e.g. kTestPid2) owns the resource, so getAllClients_l
         // will fail.
-        EXPECT_FALSE(mService->getAllClients_l(kMidPriorityPid, type, subType, &clients));
-        EXPECT_TRUE(mService->getAllClients_l(kHighPriorityPid, type, subType, &clients));
+        EXPECT_FALSE(mService->getAllClients_l(kMidPriorityPid, type, subType, &idList, &clients));
+        EXPECT_TRUE(mService->getAllClients_l(kHighPriorityPid, type, subType, &idList, &clients));
 
         EXPECT_EQ(2u, clients.size());
         // (OK to require ordering in clients[], as the pid map is sorted)
@@ -454,6 +499,19 @@
         resources.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
         resources.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 150));
 
+        ClientInfoParcel lowPriorityClient{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                          .uid = static_cast<int32_t>(kTestUid2),
+                                           .id = 0,
+                                           .name = "none"};
+        ClientInfoParcel midPriorityClient{.pid = static_cast<int32_t>(kMidPriorityPid),
+                                           .uid = static_cast<int32_t>(kTestUid2),
+                                           .id = 0,
+                                           .name = "none"};
+        ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
+                                            .uid = static_cast<int32_t>(kTestUid2),
+                                            .id = 0,
+                                            .name = "none"};
+
         // ### secure codec can't coexist and secure codec can coexist with non-secure codec ###
         {
             addResource();
@@ -461,23 +519,23 @@
             mService->mSupportsSecureWithNonSecureCodec = true;
 
             // priority too low
-            CHECK_STATUS_FALSE(mService->reclaimResource(kLowPriorityPid, resources, &result));
-            CHECK_STATUS_FALSE(mService->reclaimResource(kMidPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(midPriorityClient, resources, &result));
 
             // reclaim all secure codecs
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim one largest graphic memory from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
-            CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, resources, &result));
         }
 
         // ### secure codecs can't coexist and secure codec can't coexist with non-secure codec ###
@@ -487,17 +545,17 @@
             mService->mSupportsSecureWithNonSecureCodec = false;
 
             // priority too low
-            CHECK_STATUS_FALSE(mService->reclaimResource(kLowPriorityPid, resources, &result));
-            CHECK_STATUS_FALSE(mService->reclaimResource(kMidPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(midPriorityClient, resources, &result));
 
             // reclaim all secure and non-secure codecs
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
-            CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, resources, &result));
         }
 
 
@@ -508,29 +566,29 @@
             mService->mSupportsSecureWithNonSecureCodec = false;
 
             // priority too low
-            CHECK_STATUS_FALSE(mService->reclaimResource(kLowPriorityPid, resources, &result));
-            CHECK_STATUS_FALSE(mService->reclaimResource(kMidPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(midPriorityClient, resources, &result));
 
             // reclaim all non-secure codecs
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim one largest graphic memory from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another largest graphic memory from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
-            CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, resources, &result));
         }
 
         // ### secure codecs can coexist and secure codec can coexist with non-secure codec ###
@@ -540,28 +598,28 @@
             mService->mSupportsSecureWithNonSecureCodec = true;
 
             // priority too low
-            CHECK_STATUS_FALSE(mService->reclaimResource(kLowPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
 
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             // one largest graphic memory from lowest process got reclaimed
             EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another graphic memory from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another graphic memory from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
-            CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, resources, &result));
         }
 
         // ### secure codecs can coexist and secure codec can coexist with non-secure codec ###
@@ -573,20 +631,20 @@
             std::vector<MediaResourceParcel> resources;
             resources.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
 
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             // secure codec from lowest process got reclaimed
             EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another secure codec from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // no more secure codec, non-secure codec will be reclaimed.
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
@@ -598,29 +656,42 @@
         resources.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
         resources.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 150));
 
+        ClientInfoParcel lowPriorityClient{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                          .uid = static_cast<int32_t>(kTestUid2),
+                                           .id = 0,
+                                           .name = "none"};
+        ClientInfoParcel midPriorityClient{.pid = static_cast<int32_t>(kMidPriorityPid),
+                                           .uid = static_cast<int32_t>(kTestUid2),
+                                           .id = 0,
+                                           .name = "none"};
+        ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
+                                            .uid = static_cast<int32_t>(kTestUid2),
+                                            .id = 0,
+                                            .name = "none"};
+
         // ### secure codec can't coexist with non-secure codec ###
         {
             addResource();
             mService->mSupportsSecureWithNonSecureCodec = false;
 
             // priority too low
-            CHECK_STATUS_FALSE(mService->reclaimResource(kLowPriorityPid, resources, &result));
-            CHECK_STATUS_FALSE(mService->reclaimResource(kMidPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(midPriorityClient, resources, &result));
 
             // reclaim all secure codecs
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim one graphic memory from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
-            CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, resources, &result));
         }
 
 
@@ -630,28 +701,28 @@
             mService->mSupportsSecureWithNonSecureCodec = true;
 
             // priority too low
-            CHECK_STATUS_FALSE(mService->reclaimResource(kLowPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
 
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             // one largest graphic memory from lowest process got reclaimed
             EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another graphic memory from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another graphic memory from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
-            CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, resources, &result));
         }
 
         // ### secure codec can coexist with non-secure codec ###
@@ -662,20 +733,24 @@
             std::vector<MediaResourceParcel> resources;
             resources.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
 
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             // one non secure codec from lowest process got reclaimed
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // no more non-secure codec, secure codec from lowest priority process will be reclaimed
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // clean up client 3 which still left
-            mService->removeClient(kTestPid2, getId(mTestClient3));
+            ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(kTestPid2),
+                                        .uid = static_cast<int32_t>(kTestUid2),
+                                        .id = getId(mTestClient3),
+                                        .name = "none"};
+            mService->removeClient(clientInfo);
         }
     }
 
@@ -683,15 +758,16 @@
         MediaResource::Type type = MediaResource::Type::kGraphicMemory;
         MediaResource::SubType subType = MediaResource::SubType::kUnspecifiedSubType;
         std::shared_ptr<IResourceManagerClient> client;
+        PidUidVector idList;
         EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, subType,
-                &client));
+                &idList, &client));
 
         addResource();
 
         EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kLowPriorityPid, type, subType,
-                &client));
+                &idList, &client));
         EXPECT_TRUE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, subType,
-                &client));
+                &idList, &client));
 
         // kTestPid1 is the lowest priority process with MediaResource::Type::kGraphicMemory.
         // mTestClient1 has the largest MediaResource::Type::kGraphicMemory within kTestPid1.
@@ -737,33 +813,41 @@
         // new client request should cause VIDEO_ON
         std::vector<MediaResourceParcel> resources1;
         resources1.push_back(MediaResource(MediaResource::Type::kBattery, MediaResource::SubType::kVideoCodec, 1));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(mTestClient1),
+                                     .name = "none"};
+        mService->addResource(client1Info, mTestClient1, resources1);
         EXPECT_EQ(2u, mSystemCB->eventCount());
         EXPECT_EQ(EventEntry({EventType::VIDEO_ON, kTestUid1}), mSystemCB->lastEvent());
 
         // each client should only cause 1 VIDEO_ON
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        mService->addResource(client1Info, mTestClient1, resources1);
         EXPECT_EQ(2u, mSystemCB->eventCount());
 
         // new client request should cause VIDEO_ON
         std::vector<MediaResourceParcel> resources2;
         resources2.push_back(MediaResource(MediaResource::Type::kBattery, MediaResource::SubType::kVideoCodec, 2));
-        mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources2);
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient2),
+                                     .name = "none"};
+        mService->addResource(client2Info, mTestClient2, resources2);
         EXPECT_EQ(3u, mSystemCB->eventCount());
         EXPECT_EQ(EventEntry({EventType::VIDEO_ON, kTestUid2}), mSystemCB->lastEvent());
 
         // partially remove mTestClient1's request, shouldn't be any VIDEO_OFF
-        mService->removeResource(kTestPid1, getId(mTestClient1), resources1);
+        mService->removeResource(client1Info, resources1);
         EXPECT_EQ(3u, mSystemCB->eventCount());
 
         // remove mTestClient1's request, should be VIDEO_OFF for kTestUid1
         // (use resource2 to test removing more instances than previously requested)
-        mService->removeResource(kTestPid1, getId(mTestClient1), resources2);
+        mService->removeResource(client1Info, resources2);
         EXPECT_EQ(4u, mSystemCB->eventCount());
         EXPECT_EQ(EventEntry({EventType::VIDEO_OFF, kTestUid1}), mSystemCB->lastEvent());
 
         // remove mTestClient2, should be VIDEO_OFF for kTestUid2
-        mService->removeClient(kTestPid2, getId(mTestClient2));
+        mService->removeClient(client2Info);
         EXPECT_EQ(5u, mSystemCB->eventCount());
         EXPECT_EQ(EventEntry({EventType::VIDEO_OFF, kTestUid2}), mSystemCB->lastEvent());
     }
@@ -776,32 +860,40 @@
         // new client request should cause CPUSET_ENABLE
         std::vector<MediaResourceParcel> resources1;
         resources1.push_back(MediaResource(MediaResource::Type::kCpuBoost, 1));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(mTestClient1),
+                                     .name = "none"};
+        mService->addResource(client1Info, mTestClient1, resources1);
         EXPECT_EQ(2u, mSystemCB->eventCount());
         EXPECT_EQ(EventType::CPUSET_ENABLE, mSystemCB->lastEventType());
 
         // each client should only cause 1 CPUSET_ENABLE
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        mService->addResource(client1Info, mTestClient1, resources1);
         EXPECT_EQ(2u, mSystemCB->eventCount());
 
         // new client request should cause CPUSET_ENABLE
         std::vector<MediaResourceParcel> resources2;
         resources2.push_back(MediaResource(MediaResource::Type::kCpuBoost, 2));
-        mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources2);
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient2),
+                                     .name = "none"};
+        mService->addResource(client2Info, mTestClient2, resources2);
         EXPECT_EQ(3u, mSystemCB->eventCount());
         EXPECT_EQ(EventType::CPUSET_ENABLE, mSystemCB->lastEventType());
 
         // remove mTestClient2 should not cause CPUSET_DISABLE, mTestClient1 still active
-        mService->removeClient(kTestPid2, getId(mTestClient2));
+        mService->removeClient(client2Info);
         EXPECT_EQ(3u, mSystemCB->eventCount());
 
         // remove 1 cpuboost from mTestClient1, should not be CPUSET_DISABLE (still 1 left)
-        mService->removeResource(kTestPid1, getId(mTestClient1), resources1);
+        mService->removeResource(client1Info, resources1);
         EXPECT_EQ(3u, mSystemCB->eventCount());
 
         // remove 2 cpuboost from mTestClient1, should be CPUSET_DISABLE
         // (use resource2 to test removing more than previously requested)
-        mService->removeResource(kTestPid1, getId(mTestClient1), resources2);
+        mService->removeResource(client1Info, resources2);
         EXPECT_EQ(4u, mSystemCB->eventCount());
         EXPECT_EQ(EventType::CPUSET_DISABLE, mSystemCB->lastEventType());
     }
@@ -814,22 +906,32 @@
         std::vector<MediaResourceParcel> audioImageResources;
         audioImageResources.push_back(createNonSecureAudioCodecResource());
         audioImageResources.push_back(createNonSecureImageCodecResource());
-        mService->addResource(kLowPriorityPid, kTestUid1, getId(audioImageTestClient),
-                audioImageTestClient, audioImageResources);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(audioImageTestClient),
+                                     .name = "none"};
+        mService->addResource(client1Info, audioImageTestClient, audioImageResources);
 
         // Fail to reclaim a video codec resource
         std::vector<MediaResourceParcel> reclaimResources;
         reclaimResources.push_back(createNonSecureVideoCodecResource());
-        CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+        ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
+                                            .uid = static_cast<int32_t>(kTestUid2),
+                                            .id = 0,
+                                            .name = "none"};
+        CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
         // Now add a video codec resource
         std::vector<MediaResourceParcel> videoResources;
         videoResources.push_back(createNonSecureVideoCodecResource());
-        mService->addResource(kLowPriorityPid, kTestUid1, getId(videoTestClient), videoTestClient,
-                videoResources);
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(videoTestClient),
+                                     .name = "none"};
+        mService->addResource(client2Info, videoTestClient, videoResources);
 
         // Verify that the newly-created video codec resource can be reclaimed
-        CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+        CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
         // Verify that the audio and image resources are untouched
         EXPECT_FALSE(toTestClient(audioImageTestClient)->checkIfReclaimedAndReset());
@@ -845,22 +947,32 @@
         std::vector<MediaResourceParcel> videoImageResources;
         videoImageResources.push_back(createNonSecureVideoCodecResource());
         videoImageResources.push_back(createNonSecureImageCodecResource());
-        mService->addResource(kLowPriorityPid, kTestUid1, getId(videoImageTestClient),
-                videoImageTestClient, videoImageResources);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(videoImageTestClient),
+                                     .name = "none"};
+        mService->addResource(client1Info, videoImageTestClient, videoImageResources);
 
         // Fail to reclaim an audio codec resource
         std::vector<MediaResourceParcel> reclaimResources;
         reclaimResources.push_back(createNonSecureAudioCodecResource());
-        CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+        ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
+                                            .uid = static_cast<int32_t>(kTestUid2),
+                                            .id = 0,
+                                            .name = "none"};
+        CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
         // Now add an audio codec resource
         std::vector<MediaResourceParcel> audioResources;
         audioResources.push_back(createNonSecureAudioCodecResource());
-        mService->addResource(kLowPriorityPid, kTestUid2, getId(audioTestClient), audioTestClient,
-                audioResources);
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(audioTestClient),
+                                     .name = "none"};
+        mService->addResource(client2Info, audioTestClient, audioResources);
 
         // Verify that the newly-created audio codec resource can be reclaimed
-        CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+        CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
         // Verify that the video and image resources are untouched
         EXPECT_FALSE(toTestClient(videoImageTestClient)->checkIfReclaimedAndReset());
@@ -876,22 +988,32 @@
         std::vector<MediaResourceParcel> videoAudioResources;
         videoAudioResources.push_back(createNonSecureVideoCodecResource());
         videoAudioResources.push_back(createNonSecureAudioCodecResource());
-        mService->addResource(kLowPriorityPid, kTestUid1, getId(videoAudioTestClient),
-                videoAudioTestClient, videoAudioResources);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(videoAudioTestClient),
+                                     .name = "none"};
+        mService->addResource(client1Info, videoAudioTestClient, videoAudioResources);
 
         // Fail to reclaim an image codec resource
         std::vector<MediaResourceParcel> reclaimResources;
         reclaimResources.push_back(createNonSecureImageCodecResource());
-        CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+        ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
+                                            .uid = static_cast<int32_t>(kTestUid2),
+                                            .id = 0,
+                                            .name = "none"};
+        CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
         // Now add an image codec resource
         std::vector<MediaResourceParcel> imageResources;
         imageResources.push_back(createNonSecureImageCodecResource());
-        mService->addResource(kLowPriorityPid, kTestUid2, getId(imageTestClient), imageTestClient,
-                imageResources);
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(imageTestClient),
+                                     .name = "none"};
+        mService->addResource(client2Info, imageTestClient, imageResources);
 
         // Verify that the newly-created image codec resource can be reclaimed
-        CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+        CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
         // Verify that the video and audio resources are untouched
         EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
@@ -901,20 +1023,27 @@
 
     void testReclaimResources_whenPartialResourceMatch_reclaims() {
         const int onlyUid = kTestUid1;
-        const auto onlyClient = createTestClient(kLowPriorityPid);
+        const auto onlyClient = createTestClient(kLowPriorityPid, onlyUid);
 
         std::vector<MediaResourceParcel> ownedResources;
         ownedResources.push_back(createNonSecureVideoCodecResource());
         ownedResources.push_back(createGraphicMemoryResource(100));
-        mService->addResource(kLowPriorityPid, onlyUid, getId(onlyClient), onlyClient,
-                ownedResources);
+        ClientInfoParcel onlyClientInfo{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                       .uid = static_cast<int32_t>(onlyUid),
+                                       .id = getId(onlyClient),
+                                       .name = "none"};
+        mService->addResource(onlyClientInfo, onlyClient, ownedResources);
 
         // Reclaim an image codec instead of the video codec that is owned, but also reclaim
         // graphics memory, which will trigger the reclaim.
         std::vector<MediaResourceParcel> reclaimResources;
         reclaimResources.push_back(createNonSecureImageCodecResource());
         reclaimResources.push_back(createGraphicMemoryResource(100));
-        CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+        ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
+                                            .uid = static_cast<int32_t>(kTestUid2),
+                                            .id = 0,
+                                            .name = "none"};
+        CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
         // Verify that the video codec resources (including the needed graphic memory) is reclaimed
         EXPECT_TRUE(toTestClient(onlyClient)->checkIfReclaimedAndReset());
@@ -926,200 +1055,278 @@
         const int onlyUid = kTestUid1;
 
         // secure video codec
-        const auto smallSecureVideoMarkedClient = createTestClient(onlyPid);
-        const auto largeSecureVideoMarkedClient = createTestClient(onlyPid);
-        const auto largestSecureVideoActiveClient = createTestClient(onlyPid);
+        const auto smallSecureVideoMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largeSecureVideoMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largestSecureVideoActiveClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientA{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(smallSecureVideoMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientB{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largeSecureVideoMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientC{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largestSecureVideoActiveClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createSecureVideoCodecResource(1));
-            mService->addResource(onlyPid, onlyUid, getId(smallSecureVideoMarkedClient),
-                    smallSecureVideoMarkedClient, resources);
+            mService->addResource(clientA, smallSecureVideoMarkedClient, resources);
             resources.clear();
             resources.push_back(createSecureVideoCodecResource(2));
-            mService->addResource(onlyPid, onlyUid, getId(largeSecureVideoMarkedClient),
-                    largeSecureVideoMarkedClient, resources);
+            mService->addResource(clientB, largeSecureVideoMarkedClient, resources);
             resources.clear();
             resources.push_back(createSecureVideoCodecResource(3));
-            mService->addResource(onlyPid, onlyUid, getId(largestSecureVideoActiveClient),
-                    largestSecureVideoActiveClient, resources);
+            mService->addResource(clientC, largestSecureVideoActiveClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(smallSecureVideoMarkedClient));
-        mService->markClientForPendingRemoval(onlyPid, getId(largeSecureVideoMarkedClient));
+        mService->markClientForPendingRemoval(clientA);
+        mService->markClientForPendingRemoval(clientB);
         // don't mark the largest client
 
         // non-secure video codec
-        const auto smallNonSecureVideoMarkedClient = createTestClient(onlyPid);
-        const auto largeNonSecureVideoMarkedClient = createTestClient(onlyPid);
-        const auto largestNonSecureVideoActiveClient = createTestClient(onlyPid);
+        const auto smallNonSecureVideoMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largeNonSecureVideoMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largestNonSecureVideoActiveClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientD{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(smallNonSecureVideoMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientE{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largeNonSecureVideoMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientF{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largestNonSecureVideoActiveClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createNonSecureVideoCodecResource(1));
-            mService->addResource(onlyPid, onlyUid, getId(smallNonSecureVideoMarkedClient),
-                    smallNonSecureVideoMarkedClient, resources);
+            mService->addResource(clientD, smallNonSecureVideoMarkedClient, resources);
             resources.clear();
             resources.push_back(createNonSecureVideoCodecResource(2));
-            mService->addResource(onlyPid, onlyUid, getId(largeNonSecureVideoMarkedClient),
-                    largeNonSecureVideoMarkedClient, resources);
+            mService->addResource(clientE, largeNonSecureVideoMarkedClient, resources);
             resources.clear();
             resources.push_back(createNonSecureVideoCodecResource(3));
-            mService->addResource(onlyPid, onlyUid, getId(largestNonSecureVideoActiveClient),
-                    largestNonSecureVideoActiveClient, resources);
+            mService->addResource(clientF, largestNonSecureVideoActiveClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(smallNonSecureVideoMarkedClient));
-        mService->markClientForPendingRemoval(onlyPid, getId(largeNonSecureVideoMarkedClient));
+        mService->markClientForPendingRemoval(clientD);
+        mService->markClientForPendingRemoval(clientE);
         // don't mark the largest client
 
         // secure audio codec
-        const auto smallSecureAudioMarkedClient = createTestClient(onlyPid);
-        const auto largeSecureAudioMarkedClient = createTestClient(onlyPid);
-        const auto largestSecureAudioActiveClient = createTestClient(onlyPid);
+        const auto smallSecureAudioMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largeSecureAudioMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largestSecureAudioActiveClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientG{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(smallSecureAudioMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientH{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largeSecureAudioMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientI{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largestSecureVideoActiveClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createSecureAudioCodecResource(1));
-            mService->addResource(onlyPid, onlyUid, getId(smallSecureAudioMarkedClient),
-                    smallSecureAudioMarkedClient, resources);
+            mService->addResource(clientG, smallSecureAudioMarkedClient, resources);
             resources.clear();
             resources.push_back(createSecureAudioCodecResource(2));
-            mService->addResource(onlyPid, onlyUid, getId(largeSecureAudioMarkedClient),
-                    largeSecureAudioMarkedClient, resources);
+            mService->addResource(clientH, largeSecureAudioMarkedClient, resources);
             resources.clear();
             resources.push_back(createSecureAudioCodecResource(3));
-            mService->addResource(onlyPid, onlyUid, getId(largestSecureVideoActiveClient),
-                    largestSecureVideoActiveClient, resources);
+            mService->addResource(clientI, largestSecureVideoActiveClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(smallSecureAudioMarkedClient));
-        mService->markClientForPendingRemoval(onlyPid, getId(largeSecureAudioMarkedClient));
+        mService->markClientForPendingRemoval(clientG);
+        mService->markClientForPendingRemoval(clientH);
         // don't mark the largest client
 
         // non-secure audio codec
-        const auto smallNonSecureAudioMarkedClient = createTestClient(onlyPid);
-        const auto largeNonSecureAudioMarkedClient = createTestClient(onlyPid);
-        const auto largestNonSecureAudioActiveClient = createTestClient(onlyPid);
+        const auto smallNonSecureAudioMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largeNonSecureAudioMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largestNonSecureAudioActiveClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientJ{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(smallNonSecureAudioMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientK{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largeNonSecureAudioMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientL{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largestNonSecureAudioActiveClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createNonSecureAudioCodecResource(1));
-            mService->addResource(onlyPid, onlyUid, getId(smallNonSecureAudioMarkedClient),
-                    smallNonSecureAudioMarkedClient, resources);
+            mService->addResource(clientJ, smallNonSecureAudioMarkedClient, resources);
             resources.clear();
             resources.push_back(createNonSecureAudioCodecResource(2));
-            mService->addResource(onlyPid, onlyUid, getId(largeNonSecureAudioMarkedClient),
-                    largeNonSecureAudioMarkedClient, resources);
+            mService->addResource(clientK, largeNonSecureAudioMarkedClient, resources);
             resources.clear();
             resources.push_back(createNonSecureAudioCodecResource(3));
-            mService->addResource(onlyPid, onlyUid, getId(largestNonSecureAudioActiveClient),
-                    largestNonSecureAudioActiveClient, resources);
+            mService->addResource(clientL, largestNonSecureAudioActiveClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(smallNonSecureAudioMarkedClient));
-        mService->markClientForPendingRemoval(onlyPid, getId(largeNonSecureAudioMarkedClient));
+        mService->markClientForPendingRemoval(clientJ);
+        mService->markClientForPendingRemoval(clientK);
         // don't mark the largest client
 
         // secure image codec
-        const auto smallSecureImageMarkedClient = createTestClient(onlyPid);
-        const auto largeSecureImageMarkedClient = createTestClient(onlyPid);
-        const auto largestSecureImageActiveClient = createTestClient(onlyPid);
+        const auto smallSecureImageMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largeSecureImageMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largestSecureImageActiveClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientM{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(smallSecureImageMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientN{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largeSecureImageMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientO{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largestSecureImageActiveClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createSecureImageCodecResource(1));
-            mService->addResource(onlyPid, onlyUid, getId(smallSecureImageMarkedClient),
-                    smallSecureImageMarkedClient, resources);
+            mService->addResource(clientM, smallSecureImageMarkedClient, resources);
             resources.clear();
             resources.push_back(createSecureImageCodecResource(2));
-            mService->addResource(onlyPid, onlyUid, getId(largeSecureImageMarkedClient),
-                    largeSecureImageMarkedClient, resources);
+            mService->addResource(clientN, largeSecureImageMarkedClient, resources);
             resources.clear();
             resources.push_back(createSecureImageCodecResource(3));
-            mService->addResource(onlyPid, onlyUid, getId(largestSecureImageActiveClient),
-                    largestSecureImageActiveClient, resources);
+            mService->addResource(clientO, largestSecureImageActiveClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(smallSecureImageMarkedClient));
-        mService->markClientForPendingRemoval(onlyPid, getId(largeSecureImageMarkedClient));
+        mService->markClientForPendingRemoval(clientM);
+        mService->markClientForPendingRemoval(clientN);
         // don't mark the largest client
 
         // non-secure image codec
-        const auto smallNonSecureImageMarkedClient = createTestClient(onlyPid);
-        const auto largeNonSecureImageMarkedClient = createTestClient(onlyPid);
-        const auto largestNonSecureImageActiveClient = createTestClient(onlyPid);
+        const auto smallNonSecureImageMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largeNonSecureImageMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largestNonSecureImageActiveClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientP{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(smallNonSecureImageMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientQ{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largeNonSecureImageMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientR{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largestNonSecureImageActiveClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createNonSecureImageCodecResource(1));
-            mService->addResource(onlyPid, onlyUid, getId(smallNonSecureImageMarkedClient),
-                    smallNonSecureImageMarkedClient, resources);
+            mService->addResource(clientP, smallNonSecureImageMarkedClient, resources);
             resources.clear();
             resources.push_back(createNonSecureImageCodecResource(2));
-            mService->addResource(onlyPid, onlyUid, getId(largeNonSecureImageMarkedClient),
-                    largeNonSecureImageMarkedClient, resources);
+            mService->addResource(clientQ, largeNonSecureImageMarkedClient, resources);
             resources.clear();
             resources.push_back(createNonSecureImageCodecResource(3));
-            mService->addResource(onlyPid, onlyUid, getId(largestNonSecureImageActiveClient),
-                    largestNonSecureImageActiveClient, resources);
+            mService->addResource(clientR, largestNonSecureImageActiveClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(smallNonSecureImageMarkedClient));
-        mService->markClientForPendingRemoval(onlyPid, getId(largeNonSecureImageMarkedClient));
+        mService->markClientForPendingRemoval(clientP);
+        mService->markClientForPendingRemoval(clientQ);
         // don't mark the largest client
 
         // graphic memory
-        const auto smallGraphicMemoryMarkedClient = createTestClient(onlyPid);
-        const auto largeGraphicMemoryMarkedClient = createTestClient(onlyPid);
-        const auto largestGraphicMemoryActiveClient = createTestClient(onlyPid);
+        const auto smallGraphicMemoryMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largeGraphicMemoryMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largestGraphicMemoryActiveClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientS{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(smallGraphicMemoryMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientT{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largeGraphicMemoryMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientU{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largestGraphicMemoryActiveClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createGraphicMemoryResource(100));
-            mService->addResource(onlyPid, onlyUid, getId(smallGraphicMemoryMarkedClient),
-                    smallGraphicMemoryMarkedClient, resources);
+            mService->addResource(clientS, smallGraphicMemoryMarkedClient, resources);
             resources.clear();
             resources.push_back(createGraphicMemoryResource(200));
-            mService->addResource(onlyPid, onlyUid, getId(largeGraphicMemoryMarkedClient),
-                    largeGraphicMemoryMarkedClient, resources);
+            mService->addResource(clientT, largeGraphicMemoryMarkedClient, resources);
             resources.clear();
             resources.push_back(createGraphicMemoryResource(300));
-            mService->addResource(onlyPid, onlyUid, getId(largestGraphicMemoryActiveClient),
-                    largestGraphicMemoryActiveClient, resources);
+            mService->addResource(clientU, largestGraphicMemoryActiveClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(smallGraphicMemoryMarkedClient));
-        mService->markClientForPendingRemoval(onlyPid, getId(largeGraphicMemoryMarkedClient));
+        mService->markClientForPendingRemoval(clientS);
+        mService->markClientForPendingRemoval(clientT);
         // don't mark the largest client
 
         // DRM session
-        const auto smallDrmSessionMarkedClient = createTestClient(onlyPid);
-        const auto largeDrmSessionMarkedClient = createTestClient(onlyPid);
-        const auto largestDrmSessionActiveClient = createTestClient(onlyPid);
+        const auto smallDrmSessionMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largeDrmSessionMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largestDrmSessionActiveClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientV{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(smallDrmSessionMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientW{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largeDrmSessionMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientX{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largestDrmSessionActiveClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createDrmSessionResource(1));
-            mService->addResource(onlyPid, onlyUid, getId(smallDrmSessionMarkedClient),
-                    smallDrmSessionMarkedClient, resources);
+            mService->addResource(clientV, smallDrmSessionMarkedClient, resources);
             resources.clear();
             resources.push_back(createDrmSessionResource(2));
-            mService->addResource(onlyPid, onlyUid, getId(largeDrmSessionMarkedClient),
-                    largeDrmSessionMarkedClient, resources);
+            mService->addResource(clientW, largeDrmSessionMarkedClient, resources);
             resources.clear();
             resources.push_back(createDrmSessionResource(3));
-            mService->addResource(onlyPid, onlyUid, getId(largestDrmSessionActiveClient),
-                    largestDrmSessionActiveClient, resources);
+            mService->addResource(clientX, largestDrmSessionActiveClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(smallDrmSessionMarkedClient));
-        mService->markClientForPendingRemoval(onlyPid, getId(largeDrmSessionMarkedClient));
+        mService->markClientForPendingRemoval(clientV);
+        mService->markClientForPendingRemoval(clientW);
         // don't mark the largest client
 
         // battery
-        const auto batteryMarkedClient = createTestClient(onlyPid);
+        const auto batteryMarkedClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientY{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(batteryMarkedClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createBatteryResource());
-            mService->addResource(onlyPid, onlyUid, getId(batteryMarkedClient),
-                    batteryMarkedClient, resources);
+            mService->addResource(clientY, batteryMarkedClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(batteryMarkedClient));
+        mService->markClientForPendingRemoval(clientY);
 
         // CPU boost
-        const auto cpuBoostMarkedClient = createTestClient(onlyPid);
+        const auto cpuBoostMarkedClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientZ{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(cpuBoostMarkedClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createCpuBoostResource());
-            mService->addResource(onlyPid, onlyUid, getId(cpuBoostMarkedClient),
-                    cpuBoostMarkedClient, resources);
+            mService->addResource(clientZ, cpuBoostMarkedClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(cpuBoostMarkedClient));
+        mService->markClientForPendingRemoval(clientZ);
 
         // now we expect that we only reclaim resources from the biggest marked client
         EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(onlyPid).isOk());
diff --git a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
index 003569d..a0d728c 100644
--- a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
@@ -251,17 +251,31 @@
     observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
                    {MediaObservableType::kVideoNonSecureCodec, 1}};
 
+    ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                 .uid = static_cast<int32_t>(kTestUid1),
+                                 .id = getId(mTestClient1),
+                                 .name = "none"};
+
+    ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                 .uid = static_cast<int32_t>(kTestUid2),
+                                 .id = getId(mTestClient2),
+                                 .name = "none"};
+
+    ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+                                 .uid = static_cast<int32_t>(kTestUid2),
+                                 .id = getId(mTestClient3),
+                                 .name = "none"};
     std::vector<MediaResourceParcel> resources;
     // Add secure video codec.
     resources = {createSecureVideoCodecResource()};
-    mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
+    mService->addResource(client1Info, mTestClient1, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
 
     // Add non-secure video codec.
     resources = {createNonSecureVideoCodecResource()};
-    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources);
+    mService->addResource(client2Info, mTestClient2, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
@@ -269,7 +283,7 @@
     // Add secure & non-secure video codecs.
     resources = {createSecureVideoCodecResource(),
                  createNonSecureVideoCodecResource()};
-    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    mService->addResource(client3Info, mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
@@ -277,7 +291,7 @@
     // Add additional audio codecs, should be ignored.
     resources.push_back(createSecureAudioCodecResource());
     resources.push_back(createNonSecureAudioCodecResource());
-    mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
+    mService->addResource(client1Info, mTestClient1, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables3));
@@ -303,7 +317,11 @@
     observables2 = {{MediaObservableType::kVideoNonSecureCodec, 3}};
     observables3 = {{MediaObservableType::kVideoSecureCodec, 2},
                    {MediaObservableType::kVideoNonSecureCodec, 3}};
-    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+                                 .uid = static_cast<int32_t>(kTestUid2),
+                                 .id = getId(mTestClient3),
+                                 .name = "none"};
+    mService->addResource(client3Info, mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
@@ -318,47 +336,61 @@
     observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
                    {MediaObservableType::kVideoNonSecureCodec, 1}};
 
+    ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                 .uid = static_cast<int32_t>(kTestUid1),
+                                 .id = getId(mTestClient1),
+                                 .name = "none"};
+
+    ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                 .uid = static_cast<int32_t>(kTestUid2),
+                                 .id = getId(mTestClient2),
+                                 .name = "none"};
+
+    ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+                                 .uid = static_cast<int32_t>(kTestUid2),
+                                 .id = getId(mTestClient3),
+                                 .name = "none"};
     std::vector<MediaResourceParcel> resources;
     // Add secure video codec to client1.
     resources = {createSecureVideoCodecResource()};
-    mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
+    mService->addResource(client1Info, mTestClient1, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     // Remove secure video codec. observer 1&3 should receive updates.
-    mService->removeResource(kTestPid1, getId(mTestClient1), resources);
+    mService->removeResource(client1Info, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Idle(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid1, kTestPid1, observables1));
     // Remove secure video codec again, should have no event.
-    mService->removeResource(kTestPid1, getId(mTestClient1), resources);
+    mService->removeResource(client1Info, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
     // Remove client1, should have no event.
-    mService->removeClient(kTestPid1, getId(mTestClient1));
+    mService->removeClient(client1Info);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
 
     // Add non-secure video codec to client2.
     resources = {createNonSecureVideoCodecResource()};
-    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources);
+    mService->addResource(client2Info, mTestClient2, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     // Remove client2, observer 2&3 should receive updates.
-    mService->removeClient(kTestPid2, getId(mTestClient2));
+    mService->removeClient(client2Info);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
     // Remove non-secure codec after client2 removed, should have no event.
-    mService->removeResource(kTestPid2, getId(mTestClient2), resources);
+    mService->removeResource(client2Info, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
     // Remove client2 again, should have no event.
-    mService->removeClient(kTestPid2, getId(mTestClient2));
+    mService->removeClient(client2Info);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
@@ -368,13 +400,13 @@
                  createNonSecureVideoCodecResource(),
                  createSecureAudioCodecResource(),
                  createNonSecureAudioCodecResource()};
-    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    mService->addResource(client3Info, mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
     // Remove one audio codec, should have no event.
     resources = {createSecureAudioCodecResource()};
-    mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+    mService->removeResource(client3Info, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
@@ -382,12 +414,12 @@
     // removal should be reported.
     resources = {createNonSecureAudioCodecResource(),
                  createSecureVideoCodecResource()};
-    mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+    mService->removeResource(client3Info, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
     // Remove client3 entirely. Non-secure video codec removal should be reported.
-    mService->removeClient(kTestPid2, getId(mTestClient3));
+    mService->removeClient(client3Info);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
@@ -410,7 +442,12 @@
                  createNonSecureVideoCodecResource(4),
                  createSecureAudioCodecResource(),
                  createNonSecureAudioCodecResource()};
-    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+
+    ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+                                 .uid = static_cast<int32_t>(kTestUid2),
+                                 .id = getId(mTestClient3),
+                                 .name = "none"};
+    mService->addResource(client3Info, mTestClient3, resources);
     observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
     observables2 = {{MediaObservableType::kVideoNonSecureCodec, 4}};
     observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
@@ -424,7 +461,7 @@
                  createSecureVideoCodecResource(),
                  createSecureVideoCodecResource(),
                  createNonSecureVideoCodecResource(2)};
-    mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+    mService->removeResource(client3Info, resources);
     observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
     observables2 = {{MediaObservableType::kVideoNonSecureCodec, 2}};
     observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
@@ -433,7 +470,7 @@
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables3));
     // Remove client3 entirely. 2 non-secure video codecs removal should be reported.
-    mService->removeClient(kTestPid2, getId(mTestClient3));
+    mService->removeClient(client3Info);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
@@ -465,13 +502,27 @@
     // Add secure & non-secure video codecs.
     resources = {createSecureVideoCodecResource(),
                  createNonSecureVideoCodecResource()};
-    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                 .uid = static_cast<int32_t>(kTestUid1),
+                                 .id = getId(mTestClient1),
+                                 .name = "none"};
+
+    ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                 .uid = static_cast<int32_t>(kTestUid2),
+                                 .id = getId(mTestClient2),
+                                 .name = "none"};
+
+    ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+                                 .uid = static_cast<int32_t>(kTestUid2),
+                                 .id = getId(mTestClient3),
+                                 .name = "none"};
+    mService->addResource(client3Info, mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
 
     // Remove secure & non-secure video codecs.
-    mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+    mService->removeResource(client3Info, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
diff --git a/services/tuner/hidl/TunerHidlDvr.cpp b/services/tuner/hidl/TunerHidlDvr.cpp
index 1a619d5..3ea1eb1 100644
--- a/services/tuner/hidl/TunerHidlDvr.cpp
+++ b/services/tuner/hidl/TunerHidlDvr.cpp
@@ -72,7 +72,7 @@
 
     AidlMQDesc aidlMQDesc;
     unsafeHidlToAidlMQDescriptor<uint8_t, int8_t, SynchronizedReadWrite>(dvrMQDesc, &aidlMQDesc);
-    *_aidl_return = move(aidlMQDesc);
+    *_aidl_return = std::move(aidlMQDesc);
     return ::ndk::ScopedAStatus::ok();
 }
 
diff --git a/services/tuner/hidl/TunerHidlFilter.cpp b/services/tuner/hidl/TunerHidlFilter.cpp
index fe74a5c..c82732b 100644
--- a/services/tuner/hidl/TunerHidlFilter.cpp
+++ b/services/tuner/hidl/TunerHidlFilter.cpp
@@ -139,7 +139,7 @@
 
     AidlMQDesc aidlMQDesc;
     unsafeHidlToAidlMQDescriptor<uint8_t, int8_t, SynchronizedReadWrite>(filterMQDesc, &aidlMQDesc);
-    *_aidl_return = move(aidlMQDesc);
+    *_aidl_return = std::move(aidlMQDesc);
 
     return ::ndk::ScopedAStatus::ok();
 }
@@ -1084,8 +1084,8 @@
         }
 
         DemuxFilterEvent filterEvent;
-        filterEvent.set<DemuxFilterEvent::media>(move(media));
-        res.push_back(move(filterEvent));
+        filterEvent.set<DemuxFilterEvent::media>(std::move(media));
+        res.push_back(std::move(filterEvent));
     }
 }
 
@@ -1101,8 +1101,8 @@
         section.dataLength = static_cast<int64_t>(sectionEvent.dataLength);
 
         DemuxFilterEvent filterEvent;
-        filterEvent.set<DemuxFilterEvent::section>(move(section));
-        res.push_back(move(filterEvent));
+        filterEvent.set<DemuxFilterEvent::section>(std::move(section));
+        res.push_back(std::move(filterEvent));
     }
 }
 
@@ -1117,8 +1117,8 @@
         pes.mpuSequenceNumber = static_cast<int32_t>(pesEvent.mpuSequenceNumber);
 
         DemuxFilterEvent filterEvent;
-        filterEvent.set<DemuxFilterEvent::pes>(move(pes));
-        res.push_back(move(filterEvent));
+        filterEvent.set<DemuxFilterEvent::pes>(std::move(pes));
+        res.push_back(std::move(filterEvent));
     }
 }
 
@@ -1167,8 +1167,8 @@
         }
 
         DemuxFilterEvent filterEvent;
-        filterEvent.set<DemuxFilterEvent::tsRecord>(move(tsRecord));
-        res.push_back(move(filterEvent));
+        filterEvent.set<DemuxFilterEvent::tsRecord>(std::move(tsRecord));
+        res.push_back(std::move(filterEvent));
     }
 }
 
@@ -1194,8 +1194,8 @@
         }
 
         DemuxFilterEvent filterEvent;
-        filterEvent.set<DemuxFilterEvent::mmtpRecord>(move(mmtpRecord));
-        res.push_back(move(filterEvent));
+        filterEvent.set<DemuxFilterEvent::mmtpRecord>(std::move(mmtpRecord));
+        res.push_back(std::move(filterEvent));
     }
 }
 
@@ -1213,8 +1213,8 @@
         download.dataLength = static_cast<int32_t>(downloadEvent.dataLength);
 
         DemuxFilterEvent filterEvent;
-        filterEvent.set<DemuxFilterEvent::download>(move(download));
-        res.push_back(move(filterEvent));
+        filterEvent.set<DemuxFilterEvent::download>(std::move(download));
+        res.push_back(std::move(filterEvent));
     }
 }
 
@@ -1227,8 +1227,8 @@
         ipPayload.dataLength = static_cast<int32_t>(ipPayloadEvent.dataLength);
 
         DemuxFilterEvent filterEvent;
-        filterEvent.set<DemuxFilterEvent::ipPayload>(move(ipPayload));
-        res.push_back(move(filterEvent));
+        filterEvent.set<DemuxFilterEvent::ipPayload>(std::move(ipPayload));
+        res.push_back(std::move(filterEvent));
     }
 }
 
@@ -1245,8 +1245,8 @@
         copy(descrData.begin(), descrData.end(), temi.descrData.begin());
 
         DemuxFilterEvent filterEvent;
-        filterEvent.set<DemuxFilterEvent::temi>(move(temi));
-        res.push_back(move(filterEvent));
+        filterEvent.set<DemuxFilterEvent::temi>(std::move(temi));
+        res.push_back(std::move(filterEvent));
     }
 }
 
@@ -1268,15 +1268,15 @@
     }
 
     DemuxFilterEvent filterEvent;
-    filterEvent.set<DemuxFilterEvent::monitorEvent>(move(monitor));
-    res.push_back(move(filterEvent));
+    filterEvent.set<DemuxFilterEvent::monitorEvent>(std::move(monitor));
+    res.push_back(std::move(filterEvent));
 }
 
 void TunerHidlFilter::FilterCallback::getRestartEvent(
         const vector<HidlDemuxFilterEventExt::Event>& eventsExt, vector<DemuxFilterEvent>& res) {
     DemuxFilterEvent filterEvent;
     filterEvent.set<DemuxFilterEvent::startId>(static_cast<int32_t>(eventsExt[0].startId()));
-    res.push_back(move(filterEvent));
+    res.push_back(std::move(filterEvent));
 }
 
 }  // namespace tuner