[automerger skipped] Merge "CCodecBufferChannel: process surface callback asynchronously" into android15-tests-dev am: 0b49e47aba -s ours

am skip reason: Merged-In Ia868ea2a74aa56b14f541e47b5d3d2390f266448 with SHA-1 d1146681fb is already in history

Original change: https://android-review.googlesource.com/c/platform/frameworks/av/+/3369053

Change-Id: Ie75b978845b618d9581b463e4d961cbc560898fa
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/Android.bp b/Android.bp
index 0c7ed6e..c716a06 100644
--- a/Android.bp
+++ b/Android.bp
@@ -135,6 +135,7 @@
 
 aidl_interface {
     name: "av-audio-types-aidl",
+    unstable: true,
     host_supported: true,
     vendor_available: true,
     double_loadable: true,
@@ -154,28 +155,4 @@
             sdk_version: "module_current",
         },
     },
-    versions_with_info: [
-        {
-            version: "1",
-            imports: ["android.hardware.audio.core-V2"],
-        },
-    ],
-    frozen: false,
-
-}
-
-latest_av_audio_types_aidl = "av-audio-types-aidl-V2"
-
-cc_defaults {
-    name: "latest_av_audio_types_aidl_ndk_shared",
-    shared_libs: [
-        latest_av_audio_types_aidl + "-ndk",
-    ],
-}
-
-cc_defaults {
-    name: "latest_av_audio_types_aidl_ndk_static",
-    static_libs: [
-        latest_av_audio_types_aidl + "-ndk",
-    ],
 }
diff --git a/aidl_api/av-audio-types-aidl/1/.hash b/aidl_api/av-audio-types-aidl/1/.hash
deleted file mode 100644
index 0002682..0000000
--- a/aidl_api/av-audio-types-aidl/1/.hash
+++ /dev/null
@@ -1 +0,0 @@
-ef1bc5ed9db445fbfc116cdec6e6ad081458ee40
diff --git a/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl b/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl
deleted file mode 100644
index a9aa2c1..0000000
--- a/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-///////////////////////////////////////////////////////////////////////////////
-// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
-///////////////////////////////////////////////////////////////////////////////
-
-// This file is a snapshot of an AIDL file. Do not edit it manually. There are
-// two cases:
-// 1). this is a frozen version file - do not edit this in any case.
-// 2). this is a 'current' file. If you make a backwards compatible change to
-//     the interface (from the latest frozen version), the build system will
-//     prompt you to update this file with `m <name>-update-api`.
-//
-// You must not make a backward incompatible change to any AIDL file built
-// with the aidl_interface module type with versions property set. The module
-// type is used to build AIDL files in a way that they can be used across
-// independently updatable components of the system. If a device is shipped
-// with such a backward incompatible change, it has a high risk of breaking
-// later when a module using the interface is updated, e.g., Mainline modules.
-
-package android.media.audio;
-/* @hide */
-interface IHalAdapterVendorExtension {
-  @utf8InCpp String[] parseVendorParameterIds(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeys);
-  void parseVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeysAndValues, out android.hardware.audio.core.VendorParameter[] syncParameters, out android.hardware.audio.core.VendorParameter[] asyncParameters);
-  android.hardware.audio.core.VendorParameter[] parseBluetoothA2dpReconfigureOffload(in @utf8InCpp String rawValue);
-  android.hardware.audio.core.VendorParameter[] parseBluetoothLeReconfigureOffload(in @utf8InCpp String rawValue);
-  @utf8InCpp String processVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in android.hardware.audio.core.VendorParameter[] parameters);
-  enum ParameterScope {
-    MODULE = 0,
-    STREAM = 1,
-  }
-}
diff --git a/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl b/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl
deleted file mode 100644
index a9aa2c1..0000000
--- a/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-///////////////////////////////////////////////////////////////////////////////
-// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
-///////////////////////////////////////////////////////////////////////////////
-
-// This file is a snapshot of an AIDL file. Do not edit it manually. There are
-// two cases:
-// 1). this is a frozen version file - do not edit this in any case.
-// 2). this is a 'current' file. If you make a backwards compatible change to
-//     the interface (from the latest frozen version), the build system will
-//     prompt you to update this file with `m <name>-update-api`.
-//
-// You must not make a backward incompatible change to any AIDL file built
-// with the aidl_interface module type with versions property set. The module
-// type is used to build AIDL files in a way that they can be used across
-// independently updatable components of the system. If a device is shipped
-// with such a backward incompatible change, it has a high risk of breaking
-// later when a module using the interface is updated, e.g., Mainline modules.
-
-package android.media.audio;
-/* @hide */
-interface IHalAdapterVendorExtension {
-  @utf8InCpp String[] parseVendorParameterIds(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeys);
-  void parseVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeysAndValues, out android.hardware.audio.core.VendorParameter[] syncParameters, out android.hardware.audio.core.VendorParameter[] asyncParameters);
-  android.hardware.audio.core.VendorParameter[] parseBluetoothA2dpReconfigureOffload(in @utf8InCpp String rawValue);
-  android.hardware.audio.core.VendorParameter[] parseBluetoothLeReconfigureOffload(in @utf8InCpp String rawValue);
-  @utf8InCpp String processVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in android.hardware.audio.core.VendorParameter[] parameters);
-  enum ParameterScope {
-    MODULE = 0,
-    STREAM = 1,
-  }
-}
diff --git a/apex/Android.bp b/apex/Android.bp
index b0d7c02..356bf03 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -173,6 +173,7 @@
         "mediaswcodec",
     ],
     native_shared_libs: [
+        "libapexcodecs",
         "libcodec2_hidl@1.0",
         "libcodec2_hidl@1.1",
         "libcodec2_hidl@1.2",
diff --git a/apex/ld.config.txt b/apex/ld.config.txt
index 4dc5fb1..c24d51f 100644
--- a/apex/ld.config.txt
+++ b/apex/ld.config.txt
@@ -33,7 +33,7 @@
 # TODO: replace the following when apex has a way to auto-generate this list
 # namespace.default.link.platform.shared_libs  = %LLNDK_LIBRARIES%
 # namespace.default.link.platform.shared_libs += %SANITIZER_RUNTIME_LIBRARIES%
-namespace.default.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libandroid_net.so:libc.so:libcgrouprc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libdl_android.so:libvulkan.so:libbinder_ndk.so
+namespace.default.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libandroid_net.so:libc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libdl_android.so:libvulkan.so:libbinder_ndk.so
 
 ###############################################################################
 # "platform" namespace
@@ -138,7 +138,7 @@
 # TODO: replace the following when apex has a way to auto-generate this list
 # namespace.sphal.link.platform.shared_libs  = %LLNDK_LIBRARIES%
 # namespace.sphal.link.platform.shared_libs += %SANITIZER_RUNTIME_LIBRARIES%
-namespace.sphal.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libandroid_net.so:libc.so:libcgrouprc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libvulkan.so:libbinder_ndk.so
+namespace.sphal.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libandroid_net.so:libc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libvulkan.so:libbinder_ndk.so
 
 # Add a link for libz.so which is llndk on devices where VNDK is not enforced.
 namespace.sphal.link.platform.shared_libs += libz.so
diff --git a/camera/Android.bp b/camera/Android.bp
index 9e1efae..b6241f4 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -54,7 +54,13 @@
 cc_aconfig_library {
     name: "camera_platform_flags_c_lib",
     aconfig_declarations: "camera_platform_flags",
+}
+
+cc_aconfig_library {
+    name: "camera_platform_flags_c_lib_for_test",
+    aconfig_declarations: "camera_platform_flags",
     host_supported: true,
+    mode: "test",
 }
 
 java_aconfig_library {
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 6862cb1..d0df90b 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -22,6 +22,11 @@
     default_applicable_licenses: ["frameworks_av_camera_license"],
 }
 
+vintf_fragment {
+    name: "manifest_android.frameworks.cameraservice.service.xml",
+    src: "manifest_android.frameworks.cameraservice.service.xml",
+}
+
 cc_binary {
     name: "cameraserver",
 
@@ -61,7 +66,7 @@
 
     init_rc: ["cameraserver.rc"],
 
-    vintf_fragments: [
+    vintf_fragment_modules: [
         "manifest_android.frameworks.cameraservice.service.xml",
     ],
 }
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index f36a743..0c80e7e 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -177,14 +177,11 @@
 
         sp<IServiceManager> sm = defaultServiceManager();
         sp<IBinder> binder;
-        do {
-            binder = sm->getService(toString16(kCameraServiceName));
-            if (binder != nullptr) {
-                break;
-            }
-            ALOGW("CameraService not published, waiting...");
-            usleep(kCameraServicePollDelay);
-        } while(true);
+        binder = sm->checkService(String16(kCameraServiceName));
+        if (binder == nullptr) {
+            ALOGE("%s: Could not get CameraService instance.", __FUNCTION__);
+            return nullptr;
+        }
         if (mDeathNotifier == nullptr) {
             mDeathNotifier = new DeathNotifier(this);
         }
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index d8bf6b1..f4124ef 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -99,7 +99,6 @@
 
   private:
     sp<hardware::ICameraService> mCameraService;
-    const int                    kCameraServicePollDelay = 500000; // 0.5s
     const char*                  kCameraServiceName      = "media.camera";
     Mutex                        mLock;
 
diff --git a/cmds/stagefright/AudioPlayer.cpp b/cmds/stagefright/AudioPlayer.cpp
index 6cddf47..54885ef 100644
--- a/cmds/stagefright/AudioPlayer.cpp
+++ b/cmds/stagefright/AudioPlayer.cpp
@@ -101,6 +101,10 @@
 
     CHECK(mFirstBuffer == NULL);
 
+    if (!mAudioPlayerWrapper) {
+        mAudioPlayerWrapper = sp<MediaPlayerBase::WeakWrapper<AudioPlayer>>::make(this);
+    }
+
     MediaSource::ReadOptions options;
     if (mSeeking) {
         options.setSeekTo(mSeekTimeUs);
@@ -203,7 +207,7 @@
                 mSampleRate, numChannels, channelMask, audioFormat,
                 DEFAULT_AUDIOSINK_BUFFERCOUNT,
                 &AudioPlayer::AudioSinkCallback,
-                this,
+                mAudioPlayerWrapper,
                 (audio_output_flags_t)flags,
                 useOffload() ? &offloadInfo : NULL);
 
@@ -430,10 +434,11 @@
 
 // static
 size_t AudioPlayer::AudioSinkCallback(
-        MediaPlayerBase::AudioSink * /* audioSink */,
-        void *buffer, size_t size, void *cookie,
+        const sp<MediaPlayerBase::AudioSink>& /* audioSink */,
+        void *buffer, size_t size, const wp<RefBase>& cookie,
         MediaPlayerBase::AudioSink::cb_event_t event) {
-    AudioPlayer *me = (AudioPlayer *)cookie;
+    const auto me = MediaPlayerBase::WeakWrapper<AudioPlayer>::promoteFromRefBase(cookie);
+    if (!me) return 0;
 
     switch(event) {
     case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
diff --git a/cmds/stagefright/AudioPlayer.h b/cmds/stagefright/AudioPlayer.h
index 608f54b..877ac13 100644
--- a/cmds/stagefright/AudioPlayer.h
+++ b/cmds/stagefright/AudioPlayer.h
@@ -29,7 +29,7 @@
 
 struct AwesomePlayer;
 
-class AudioPlayer : AudioTrack::IAudioTrackCallback {
+class AudioPlayer : public AudioTrack::IAudioTrackCallback {
 public:
     enum {
         REACHED_EOS,
@@ -97,14 +97,15 @@
     MediaBufferBase *mFirstBuffer;
 
     sp<MediaPlayerBase::AudioSink> mAudioSink;
+    sp<MediaPlayerBase::WeakWrapper<AudioPlayer>> mAudioPlayerWrapper;
 
     bool mPlaying;
     int64_t mStartPosUs;
     const uint32_t mCreateFlags;
 
     static size_t AudioSinkCallback(
-            MediaPlayerBase::AudioSink *audioSink,
-            void *data, size_t size, void *me,
+            const sp<MediaPlayerBase::AudioSink>& audioSink,
+            void *data, size_t size, const wp<RefBase>& me,
             MediaPlayerBase::AudioSink::cb_event_t event);
 
     size_t fillBuffer(void *data, size_t size);
diff --git a/drm/drmserver/Android.bp b/drm/drmserver/Android.bp
index 81c2003..cee44b9 100644
--- a/drm/drmserver/Android.bp
+++ b/drm/drmserver/Android.bp
@@ -78,9 +78,6 @@
         "libselinux",
         "libstagefright_foundation",
     ],
-    whole_static_libs: [
-        "libc++fs",
-    ],
 
     cflags: [
         "-Wall",
@@ -127,7 +124,6 @@
     ],
 
     static_libs: [
-        "libc++fs",
         "libmediautils",
         "liblog",
         "libdrmframeworkcommon",
diff --git a/drm/libdrmframework/plugins/passthru/Android.bp b/drm/libdrmframework/plugins/passthru/Android.bp
index 0a6cd47..6ac7188 100644
--- a/drm/libdrmframework/plugins/passthru/Android.bp
+++ b/drm/libdrmframework/plugins/passthru/Android.bp
@@ -45,9 +45,6 @@
         "libdl",
         "libdrmframeworkcommon",
     ],
-    whole_static_libs: [
-        "libc++fs",
-    ],
 
     local_include_dirs: ["include"],
 
diff --git a/drm/libmediadrm/DrmHalHidl.cpp b/drm/libmediadrm/DrmHalHidl.cpp
index c8c6e8e..33ea5ea 100644
--- a/drm/libmediadrm/DrmHalHidl.cpp
+++ b/drm/libmediadrm/DrmHalHidl.cpp
@@ -339,7 +339,7 @@
             DrmUtils::LOG2BI("makeDrmFactories: using default passthrough drm instance");
             factories.push_back(passthrough);
         } else {
-            DrmUtils::LOG2BE("Failed to find passthrough drm factories");
+            DrmUtils::LOG2BW("Failed to find passthrough drm factories");
         }
     }
     return factories;
diff --git a/drm/libmediadrmrkp/include/DrmRemotelyProvisionedComponent.h b/drm/libmediadrmrkp/include/DrmRemotelyProvisionedComponent.h
index f046785..97a8cc4 100644
--- a/drm/libmediadrmrkp/include/DrmRemotelyProvisionedComponent.h
+++ b/drm/libmediadrmrkp/include/DrmRemotelyProvisionedComponent.h
@@ -35,7 +35,8 @@
 class DrmRemotelyProvisionedComponent : public BnRemotelyProvisionedComponent {
   public:
     DrmRemotelyProvisionedComponent(std::shared_ptr<IDrmPlugin> drm, std::string drmVendor,
-                                    std::string drmDesc, std::vector<uint8_t> bcc);
+                                    std::string drmDesc, std::vector<uint8_t> bcc,
+                                    std::vector<uint8_t> bcc_signature);
     ScopedAStatus getHardwareInfo(RpcHardwareInfo* info) override;
 
     ScopedAStatus generateEcdsaP256KeyPair(bool testMode, MacedPublicKey* macedPublicKey,
@@ -60,6 +61,7 @@
     std::string mDrmVendor;
     std::string mDrmDesc;
     std::vector<uint8_t> mBcc;
+    std::vector<uint8_t> mBccSignature;
 };
 }  // namespace android::mediadrm
 
diff --git a/drm/libmediadrmrkp/src/DrmRemotelyProvisionedComponent.cpp b/drm/libmediadrmrkp/src/DrmRemotelyProvisionedComponent.cpp
index 440be79..65054b0 100644
--- a/drm/libmediadrmrkp/src/DrmRemotelyProvisionedComponent.cpp
+++ b/drm/libmediadrmrkp/src/DrmRemotelyProvisionedComponent.cpp
@@ -28,11 +28,13 @@
 DrmRemotelyProvisionedComponent::DrmRemotelyProvisionedComponent(std::shared_ptr<IDrmPlugin> drm,
                                                                  std::string drmVendor,
                                                                  std::string drmDesc,
-                                                                 std::vector<uint8_t> bcc)
+                                                                 std::vector<uint8_t> bcc,
+                                                                 std::vector<uint8_t> bcc_signature)
     : mDrm(std::move(drm)),
       mDrmVendor(std::move(drmVendor)),
       mDrmDesc(std::move(drmDesc)),
-      mBcc(std::move(bcc)) {}
+      mBcc(std::move(bcc)),
+      mBccSignature(std::move(bcc_signature)) {}
 
 ScopedAStatus DrmRemotelyProvisionedComponent::getHardwareInfo(RpcHardwareInfo* info) {
     info->versionNumber = 3;
@@ -107,7 +109,7 @@
     for (auto i : keyToProp) {
         auto key = i.first;
         auto prop = i.second;
-        const auto& val= deviceInfoMap.get(key);
+        const auto& val = deviceInfoMap.get(key);
         if (val == nullptr || val->asTstr()->value().empty()) {
             std::string propValue = android::base::GetProperty(prop, "");
             if (propValue.empty()) {
@@ -161,12 +163,16 @@
     }
 
     // assemble AuthenticatedRequest (definition in IRemotelyProvisionedComponent.aidl)
-    *out = cppbor::Array()
-                   .add(1 /* version */)
-                   .add(cppbor::Map() /* UdsCerts */)
-                   .add(cppbor::EncodedItem(mBcc))
-                   .add(cppbor::EncodedItem(std::move(deviceSignedCsrPayload)))
-                   .encode();
+    cppbor::Array request_array = cppbor::Array().add(1 /* version */);
+    if (!mBccSignature.empty()) {
+        request_array.add(cppbor::EncodedItem(mBccSignature) /* UdsCerts */);
+    } else {
+        request_array.add(cppbor::Map() /* empty UdsCerts */);
+    }
+    request_array.add(cppbor::EncodedItem(mBcc))
+            .add(cppbor::EncodedItem(std::move(deviceSignedCsrPayload)));
+    *out = request_array.encode();
+
     return ScopedAStatus::ok();
 }
 }  // namespace android::mediadrm
\ No newline at end of file
diff --git a/drm/libmediadrmrkp/src/DrmRkpAdapter.cpp b/drm/libmediadrmrkp/src/DrmRkpAdapter.cpp
index 515d157..750b51e 100644
--- a/drm/libmediadrmrkp/src/DrmRkpAdapter.cpp
+++ b/drm/libmediadrmrkp/src/DrmRkpAdapter.cpp
@@ -87,13 +87,21 @@
                           status.getDescription().c_str());
                     return;
                 }
-
+                std::vector<uint8_t> bcc_signature;
+                status =
+                        mDrm->getPropertyByteArray("bootCertificateChainSignature", &bcc_signature);
+                if (!status.isOk()) {
+                    ALOGW("mDrm->getPropertyByteArray(\"bootCertificateChainSignature\") failed."
+                          "Detail: [%s].",
+                          status.getDescription().c_str());
+                    // bcc signature is optional, no need to return when it is unavailable.
+                }
                 std::string compName(instance);
                 auto comps = static_cast<
                         std::map<std::string, std::shared_ptr<IRemotelyProvisionedComponent>>*>(
                         context);
                 (*comps)[compName] = ::ndk::SharedRefBase::make<DrmRemotelyProvisionedComponent>(
-                        mDrm, drmVendor, drmDesc, bcc);
+                        mDrm, drmVendor, drmDesc, bcc, bcc_signature);
             });
     return comps;
 }
diff --git a/drm/mediadrm/plugins/clearkey/aidl/Android.bp b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
index 9a06bd2..f46409f 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
@@ -11,13 +11,13 @@
     name: "aidl_clearkey_service_defaults-use-shared-deps",
 
     shared_libs: [
+        "android.hardware.drm-V1-ndk",
         "libbase",
         "libbinder_ndk",
         "libcrypto",
         "liblog",
         "libprotobuf-cpp-lite",
         "libutils",
-        "android.hardware.drm-V1-ndk",
     ],
 
     static_libs: [
@@ -62,7 +62,11 @@
 
     relative_install_path: "hw",
 
-    cflags: ["-Wall", "-Werror", "-Wthread-safety"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wthread-safety",
+    ],
 
     include_dirs: ["frameworks/av/include"],
 
@@ -81,7 +85,7 @@
     ],
     srcs: ["Service.cpp"],
     init_rc: ["android.hardware.drm-service.clearkey.rc"],
-    vintf_fragments: ["android.hardware.drm-service.clearkey.xml"],
+    vintf_fragment_modules: ["android.hardware.drm-service.clearkey.xml_vintf"],
 }
 
 cc_binary {
@@ -93,7 +97,13 @@
     overrides: ["android.hardware.drm-service.clearkey"],
     srcs: ["ServiceLazy.cpp"],
     init_rc: ["android.hardware.drm-service-lazy.clearkey.rc"],
-    vintf_fragments: ["android.hardware.drm-service.clearkey.xml"],
+    vintf_fragment_modules: ["android.hardware.drm-service.clearkey.xml_vintf"],
+}
+
+vintf_fragment {
+    name: "android.hardware.drm-service.clearkey.xml_vintf",
+    src: "android.hardware.drm-service.clearkey.xml",
+    vendor: true,
 }
 
 cc_binary {
@@ -123,6 +133,7 @@
     required: [
         "com.android.hardware.drm.clearkey",
     ],
+    vendor: true,
 }
 
 cc_defaults {
@@ -137,18 +148,22 @@
 
     relative_install_path: "hw",
 
-    cflags: ["-Wall", "-Werror", "-Wthread-safety"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wthread-safety",
+    ],
 
     include_dirs: ["frameworks/av/include"],
 
     shared_libs: [
+        "android.hardware.drm-V1-ndk",
         "libbase",
         "libbinder_ndk",
         "libcrypto",
         "liblog",
         "libprotobuf-cpp-lite",
         "libutils",
-        "android.hardware.drm-V1-ndk",
     ],
 
     static_libs: [
@@ -192,7 +207,7 @@
     ],
     prebuilts: [
         "android.hardware.drm-service.clearkey.apex.rc",
-        "android.hardware.drm-service.clearkey.xml"
+        "android.hardware.drm-service.clearkey.xml",
     ],
     overrides: [
         "android.hardware.drm-service.clearkey",
@@ -233,11 +248,11 @@
     ],
     prebuilts: [
         "android.hardware.drm-service-lazy.clearkey.apex.rc",
-        "android.hardware.drm-service.clearkey.xml"
+        "android.hardware.drm-service.clearkey.xml",
     ],
     overrides: [
-        "android.hardware.drm-service.clearkey",
         "android.hardware.drm-service-lazy.clearkey",
+        "android.hardware.drm-service.clearkey",
         "com.android.hardware.drm.clearkey",
     ],
 }
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
index 31cb7c0..8a93132 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
@@ -37,7 +37,6 @@
 const int kSecureStopIdStart = 100;
 const std::string kOfflineLicense("\"type\":\"persistent-license\"");
 const std::string kStreaming("Streaming");
-const std::string kTemporaryLicense("\"type\":\"temporary\"");
 const std::string kTrue("True");
 
 const std::string kQueryKeyLicenseType("LicenseType");
diff --git a/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp b/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp
index ddbc594..cd129ac 100644
--- a/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp
+++ b/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp
@@ -27,10 +27,7 @@
 const std::string kKeyTypeTag("kty");
 const std::string kKeyTag("k");
 const std::string kKeyIdTag("kid");
-const std::string kMediaSessionType("type");
-const std::string kPersistentLicenseSession("persistent-license");
 const std::string kSymmetricKeyValue("oct");
-const std::string kTemporaryLicenseSession("temporary");
 }  // namespace
 
 namespace clearkeydrm {
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index 1a637ac..695cad6 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -45,6 +45,32 @@
             "file_patterns": ["(?i)drm|crypto"]
         }
     ],
+    "postsubmit": [
+        {
+            "name": "MctsMediaCodecTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.codec.cts.EncodeDecodeTest"
+                }
+            ]
+        },
+        {
+            "name": "MctsMediaCodecTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.codec.cts.DecodeEditEncodeTest"
+                }
+            ]
+        },
+        {
+            "name": "MctsMediaCodecTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.codec.cts.ExtractDecodeEditEncodeMuxTest"
+                }
+            ]
+        }
+    ],
     // Postsubmit tests for TV devices
     "tv-postsubmit": [
         {
diff --git a/media/aconfig/Android.bp b/media/aconfig/Android.bp
index 16beb28..1e5eafb 100644
--- a/media/aconfig/Android.bp
+++ b/media/aconfig/Android.bp
@@ -50,3 +50,22 @@
     ],
     aconfig_declarations: "aconfig_codec_fwk_flags",
 }
+
+aconfig_declarations {
+    name: "aconfig_media_swcodec_flags",
+    package: "android.media.swcodec.flags",
+    container: "com.android.media.swcodec",
+    srcs: ["swcodec_flags.aconfig"],
+}
+
+cc_aconfig_library {
+    name: "android.media.swcodec.flags-aconfig-cc",
+    aconfig_declarations: "aconfig_media_swcodec_flags",
+    min_sdk_version: "apex_inherit",
+    vendor_available: true,
+    double_loadable: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+}
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index d662585..96fb3e3 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -7,12 +7,48 @@
 
 flag {
   name: "aidl_hal_input_surface"
+  is_exported: true
   namespace: "codec_fwk"
   description: "Feature flags for enabling AIDL HAL InputSurface handling"
   bug: "201479783"
 }
 
 flag {
+  name: "apv_support"
+  is_exported: true
+  namespace: "codec_fwk"
+  description: "Feature flag for Android support for APV Content"
+  bug: "375464302"
+}
+
+flag {
+  name: "codec_availability"
+  namespace: "codec_fwk"
+  description: "Feature flag for codec availability HAL API support"
+  bug: "363282971"
+}
+
+flag {
+  name: "codec_buffer_state_cleanup"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for more buffer state cleanup in MediaCodec"
+  bug: "343502509"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
+  name: "dataspace_v0_partial"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for using V0 dataspace in some cases"
+  bug: "313827126"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
   name: "dynamic_color_aspects"
   is_exported: true
   namespace: "codec_fwk"
@@ -83,6 +119,21 @@
 }
 
 flag {
+  name: "num_input_slots"
+  namespace: "codec_fwk"
+  description: "Feature flag for exposing number of input slots"
+  bug: "159891571"
+}
+
+flag {
+  name: "p210_format_support"
+  is_exported: true
+  namespace: "codec_fwk"
+  description: "Feature flag for Android support for P210 YCbCr format"
+  bug: "368395888"
+}
+
+flag {
   name: "region_of_interest"
   is_exported: true
   namespace: "codec_fwk"
@@ -98,6 +149,23 @@
 }
 
 flag {
+  name: "rendering_depth_removal"
+  namespace: "codec_fwk"
+  description: "Feature flag for removing rendering depth"
+  bug: "275527219"
+}
+
+flag {
+  name: "secure_codecs_require_crypto"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for requiring setting crypto for secure codecs"
+  bug: "365162324"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
   name: "set_callback_stall"
   namespace: "codec_fwk"
   description: "Bugfix flag for setCallback stall"
@@ -128,8 +196,22 @@
 }
 
 flag {
+  name: "subsession_metrics"
+  namespace: "codec_fwk"
+  description: "Feature flag for subsession codec metrics"
+  bug: "363382811"
+}
+
+flag {
   name: "teamfood"
   namespace: "codec_fwk"
   description: "Feature flag to track teamfood population"
   bug: "328770262"
 }
+
+flag {
+  name: "thumbnail_block_model"
+  namespace: "codec_fwk"
+  description: "Feature flag for using block model decoder in thumbnail generation"
+  bug: "329521645"
+}
diff --git a/media/aconfig/swcodec_flags.aconfig b/media/aconfig/swcodec_flags.aconfig
new file mode 100644
index 0000000..a435a43
--- /dev/null
+++ b/media/aconfig/swcodec_flags.aconfig
@@ -0,0 +1,14 @@
+# Media SW Codec Flags.
+#
+# !!! Please add flags in alphabetical order. !!!
+package: "android.media.swcodec.flags"
+container: "com.android.media.swcodec"
+
+flag {
+  name: "apv_software_codec"
+  is_exported: true
+  is_fixed_read_only: true
+  namespace: "codec_fwk"
+  description: "Feature flag for APV Software C2 codec"
+  bug: "376770121"
+}
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index de8aca7..5f4a6a1 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -50,6 +50,23 @@
 }
 
 cc_aconfig_library {
+    name: "com.android.media.audioserver-aconfig-cc-ro",
+    aconfig_declarations: "com.android.media.audioserver-aconfig",
+    defaults: ["audio-aconfig-cc-defaults"],
+    double_loadable: true,
+    host_supported: true,
+    product_available: true,
+    vendor_available: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+    min_sdk_version: "29",
+    mode: "force-read-only",
+}
+
+cc_aconfig_library {
     name: "com.android.media.audio-aconfig-cc",
     aconfig_declarations: "com.android.media.audio-aconfig",
     defaults: ["audio-aconfig-cc-defaults"],
@@ -99,7 +116,11 @@
     package: "android.media.audio",
     container: "system",
     srcs: ["audio_framework.aconfig"],
-    visibility: ["//frameworks/base/api"],
+    visibility: [
+        "//frameworks/base/api",
+        "//frameworks/base/core/res",
+    ],
+    exportable: true,
 }
 
 aconfig_declarations {
@@ -132,6 +153,20 @@
 }
 
 java_aconfig_library {
+    name: "android.media.audio-aconfig-exported-java",
+    aconfig_declarations: "android.media.audio-aconfig",
+    defaults: ["framework-minus-apex-aconfig-java-defaults"],
+    min_sdk_version: "Tiramisu",
+    mode: "exported",
+    apex_available: [
+        "com.android.btservices",
+    ],
+    visibility: [
+        "//packages/modules/Bluetooth:__subpackages__",
+    ],
+}
+
+java_aconfig_library {
     name: "android.media.audiopolicy-aconfig-java",
     aconfig_declarations: "android.media.audiopolicy-aconfig",
     defaults: ["framework-minus-apex-aconfig-java-defaults"],
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index 0209e28..b155bac 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -23,6 +23,14 @@
 }
 
 flag {
+    name: "deprecate_stream_bt_sco"
+    namespace: "media_audio"
+    description: "Deprecate STREAM_BLUETOOTH_SCO"
+    is_exported: true
+    bug: "376756660"
+}
+
+flag {
     name: "feature_spatial_audio_headtracking_low_latency"
     is_exported: true
     namespace: "media_audio"
@@ -40,6 +48,14 @@
     bug: "316414750"
 }
 
+flag {
+    name: "dolby_ac4_level4_encoding_api"
+    namespace: "media_audio"
+    description: "Feature flag for Dolby AC-4 level 4 AudioFormat encoding."
+    is_fixed_read_only: true
+    bug: "266537650"
+}
+
 # TODO remove
 flag {
     name: "foreground_audio_control"
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 9eaddce..0682f65 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -739,6 +739,8 @@
             {// Note: not in the IANA registry.
              AUDIO_FORMAT_APTX_HD, make_AudioFormatDescription("audio/vnd.qcom.aptx.hd")},
             {AUDIO_FORMAT_AC4, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AC4)},
+            {AUDIO_FORMAT_AC4_L4, make_AudioFormatDescription(
+                    std::string(::android::MEDIA_MIMETYPE_AUDIO_AC4) + ";version=02.01.04")},
             {// Note: not in the IANA registry.
              AUDIO_FORMAT_LDAC, make_AudioFormatDescription("audio/vnd.sony.ldac")},
             {AUDIO_FORMAT_MAT,
@@ -2311,6 +2313,15 @@
     audio_port_config_device_ext legacy{};
     RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
                     aidl.device, &legacy.type, legacy.address));
+    const bool isInput = false;  // speaker_layout_channel_mask only represents output.
+    if (aidl.speakerLayout.has_value()) {
+        legacy.speaker_layout_channel_mask =
+                VALUE_OR_RETURN(aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+                        aidl.speakerLayout.value(), isInput));
+    } else {
+        // Default to none when the field is null in the AIDL.
+        legacy.speaker_layout_channel_mask = AUDIO_CHANNEL_NONE;
+    }
     return legacy;
 }
 
@@ -2319,6 +2330,14 @@
     AudioPortDeviceExt aidl;
     aidl.device = VALUE_OR_RETURN(
             legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
+    const bool isInput = false;  // speaker_layout_channel_mask only represents output.
+    // The AIDL speakerLayout is nullable and if set, can only be a layoutMask.
+    if (audio_channel_mask_is_valid(legacy.speaker_layout_channel_mask) &&
+        audio_channel_mask_get_representation(legacy.speaker_layout_channel_mask) ==
+                AUDIO_CHANNEL_REPRESENTATION_POSITION) {
+        aidl.speakerLayout = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+                legacy.speaker_layout_channel_mask, isInput));
+    }
     return aidl;
 }
 
diff --git a/media/audioaidlconversion/AidlConversionNdk.cpp b/media/audioaidlconversion/AidlConversionNdk.cpp
index 9b14a5e..5f7260d 100644
--- a/media/audioaidlconversion/AidlConversionNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionNdk.cpp
@@ -35,6 +35,7 @@
 
 using hardware::audio::common::PlaybackTrackMetadata;
 using hardware::audio::common::RecordTrackMetadata;
+using hardware::audio::common::SourceMetadata;
 using ::android::BAD_VALUE;
 using ::android::OK;
 
@@ -194,5 +195,17 @@
     return aidl;
 }
 
+// static
+ConversionResult<SourceMetadata>
+legacy2aidl_playback_track_metadata_v7_SourceMetadata(
+        const std::vector<playback_track_metadata_v7_t>& legacy) {
+    SourceMetadata aidl;
+    aidl.tracks = VALUE_OR_RETURN(
+            convertContainer<std::vector<PlaybackTrackMetadata>>(
+                    legacy,
+                    legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata));
+    return aidl;
+}
+
 }  // namespace android
 }  // aidl
diff --git a/media/audioaidlconversion/include/media/AidlConversionNdk.h b/media/audioaidlconversion/include/media/AidlConversionNdk.h
index 813a728..b8a3110 100644
--- a/media/audioaidlconversion/include/media/AidlConversionNdk.h
+++ b/media/audioaidlconversion/include/media/AidlConversionNdk.h
@@ -28,6 +28,7 @@
 
 #include <aidl/android/hardware/audio/common/PlaybackTrackMetadata.h>
 #include <aidl/android/hardware/audio/common/RecordTrackMetadata.h>
+#include <aidl/android/hardware/audio/common/SourceMetadata.h>
 #include <aidl/android/media/audio/common/AudioConfig.h>
 #include <media/AidlConversionUtil.h>
 
@@ -56,5 +57,9 @@
 ConversionResult<hardware::audio::common::RecordTrackMetadata>
 legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(const record_track_metadata_v7& legacy);
 
+ConversionResult<hardware::audio::common::SourceMetadata>
+legacy2aidl_playback_track_metadata_v7_SourceMetadata(
+        const std::vector<playback_track_metadata_v7_t>& legacy);
+
 }  // namespace android
 }  // namespace aidl
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index ac2fcbe..47b48e3 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -20,13 +20,6 @@
         "-Werror",
     ],
 
-    header_libs: [
-        "audiopolicyservicelocal_headers",
-        "libaudiohal_headers",
-        "libmedia_headers",
-        "libmediametrics_headers",
-    ],
-
     defaults: [
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_media_audio_common_types_cpp_shared",
@@ -40,39 +33,10 @@
         "libaudioflinger",
         "libaudiopolicyservice",
         "libmedialogservice",
-        "libnbaio",
     ],
 
     shared_libs: [
-        "libaudioclient",
-        "libaudioprocessing",
-        "libbinder",
-        "libcutils",
-        "libhidlbase",
-        "liblog",
-        "libmedia",
-        "libmediautils",
-        "libnblog",
-        "libpowermanager",
-        "libutils",
-        "libvibrator",
-    ],
-
-    // TODO check if we still need all of these include directories
-    include_dirs: [
-        "external/sonic",
-        "frameworks/av/media/libaaudio/include",
-        "frameworks/av/media/libaaudio/src",
-        "frameworks/av/media/libaaudio/src/binding",
-        "frameworks/av/services/audioflinger",
-        "frameworks/av/services/audiopolicy",
-        "frameworks/av/services/audiopolicy/common/include",
-        "frameworks/av/services/audiopolicy/common/managerdefinitions/include",
-        "frameworks/av/services/audiopolicy/engine/interface",
-        "frameworks/av/services/audiopolicy/service",
-        "frameworks/av/services/medialog",
-        "frameworks/av/services/oboeservice", // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
-
+        "libhidlbase", // required for threadpool config.
     ],
 
     init_rc: ["audioserver.rc"],
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
index 722b13a..93009c4 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.cpp
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -466,6 +466,7 @@
 
 aom_codec_err_t C2SoftAomEnc::setupCodecParameters() {
     aom_codec_err_t codec_return = AOM_CODEC_OK;
+    const int maxIntraBitratePct = mBitrateControlMode == AOM_CBR ? 300 : 450;
 
     codec_return = aom_codec_control(mCodecContext, AV1E_SET_TARGET_SEQ_LEVEL_IDX, mAV1EncLevel);
     if (codec_return != AOM_CODEC_OK) goto BailOut;
@@ -492,6 +493,10 @@
     codec_return = aom_codec_control(mCodecContext, AV1E_SET_AQ_MODE, 3);
     if (codec_return != AOM_CODEC_OK) goto BailOut;
 
+    codec_return = aom_codec_control(mCodecContext, AOME_SET_MAX_INTRA_BITRATE_PCT,
+                                     maxIntraBitratePct);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
     codec_return = aom_codec_control(mCodecContext, AV1E_SET_COEFF_COST_UPD_FREQ, 3);
     if (codec_return != AOM_CODEC_OK) goto BailOut;
 
diff --git a/media/codec2/components/apv/Android.bp b/media/codec2/components/apv/Android.bp
new file mode 100644
index 0000000..f565978
--- /dev/null
+++ b/media/codec2/components/apv/Android.bp
@@ -0,0 +1,58 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
+    enabled: true,
+    name: "libcodec2_soft_apvenc",
+    defaults: [
+        "libcodec2_soft-defaults",
+        "libcodec2_soft_sanitize_signed-defaults",
+        "libcodec2_soft_sanitize_cfi-defaults",
+    ],
+
+    static_libs: [
+        "libopenapv",
+        "android.media.swcodec.flags-aconfig-cc",
+    ],
+
+    srcs: ["C2SoftApvEnc.cpp"],
+
+    cflags: [
+        "-DOAPV_STATIC_DEFINE",
+        "-Wno-unused-variable",
+        "-Wno-unused-parameter",
+        "-Wno-unused-function",
+        "-Wno-reorder-ctor",
+    ],
+}
+
+cc_library {
+    enabled: true,
+    name: "libcodec2_soft_apvdec",
+    defaults: [
+        "libcodec2_soft-defaults",
+        "libcodec2_soft_sanitize_signed-defaults",
+        "libcodec2_soft_sanitize_cfi-defaults",
+    ],
+
+    static_libs: [
+        "libopenapv",
+        "android.media.swcodec.flags-aconfig-cc",
+    ],
+
+    srcs: ["C2SoftApvDec.cpp"],
+
+    cflags: [
+        "-DOAPV_STATIC_DEFINE",
+        "-Wno-unused-variable",
+        "-Wno-unused-parameter",
+        "-Wno-unused-function",
+        "-Wno-reorder-ctor",
+    ],
+}
diff --git a/media/codec2/components/apv/C2SoftApvCommon.h b/media/codec2/components/apv/C2SoftApvCommon.h
new file mode 100644
index 0000000..9325f28
--- /dev/null
+++ b/media/codec2/components/apv/C2SoftApvCommon.h
@@ -0,0 +1,169 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_APV_COMMON_H__
+#define ANDROID_C2_SOFT_APV_COMMON_H__
+
+typedef enum {
+    PIX_CHROMA_NA = 0xFFFFFFFF,
+    PIX_YUV_420P = 0x1,
+    PIX_YUV_422P = 0x2,
+    PIX_420_UV_INTL = 0x3,
+    PIX_YUV_422IBE = 0x4,
+    PIX_YUV_422ILE = 0x5,
+    PIX_YUV_444P = 0x6,
+    PIX_YUV_411P = 0x7,
+    PIX_GRAY = 0x8,
+    PIX_RGB_565 = 0x9,
+    PIX_RGB_24 = 0xa,
+    PIX_YUV_420SP_UV = 0xb,
+    PIX_YUV_420SP_VU = 0xc,
+    PIX_YUV_422SP_UV = 0xd,
+    PIX_YUV_422SP_VU = 0xe
+} PIX_COLOR_FORMAT_T;
+
+#define CLIP_VAL(n, min, max) (((n) > (max)) ? (max) : (((n) < (min)) ? (min) : (n)))
+#define ALIGN_VAL(val, align) ((((val) + (align) - 1) / (align)) * (align))
+
+static int atomic_inc(volatile int* pcnt) {
+    int ret;
+    ret = *pcnt;
+    ret++;
+    *pcnt = ret;
+    return ret;
+}
+
+static int atomic_dec(volatile int* pcnt) {
+    int ret;
+    ret = *pcnt;
+    ret--;
+    *pcnt = ret;
+    return ret;
+}
+
+/* Function to allocate memory for picture buffer:
+   This function might need to modify according to O/S or CPU platform
+*/
+static void* picbuf_alloc(int size) {
+    return malloc(size);
+}
+
+/* Function to free memory allocated for picture buffer:
+   This function might need to modify according to O/S or CPU platform
+*/
+static void picbuf_free(void* p) {
+    if (p) {
+        free(p);
+    }
+}
+
+static int imgb_addref(oapv_imgb_t* imgb) {
+    return atomic_inc(&imgb->refcnt);
+}
+
+static int imgb_getref(oapv_imgb_t* imgb) {
+    return imgb->refcnt;
+}
+
+static int imgb_release(oapv_imgb_t* imgb) {
+    int refcnt, i;
+    refcnt = atomic_dec(&imgb->refcnt);
+    if (refcnt == 0) {
+        for (i = 0; i < OAPV_MAX_CC; i++) {
+            if (imgb->baddr[i]) picbuf_free(imgb->baddr[i]);
+        }
+        free(imgb);
+    }
+    return refcnt;
+}
+
+static oapv_imgb_t* imgb_create(int w, int h, int cs) {
+    int i, bd;
+    oapv_imgb_t* imgb;
+
+    imgb = (oapv_imgb_t*)malloc(sizeof(oapv_imgb_t));
+    if (imgb == NULL) goto ERR;
+    memset(imgb, 0, sizeof(oapv_imgb_t));
+
+    bd = OAPV_CS_GET_BYTE_DEPTH(cs); /* byte unit */
+
+    imgb->w[0] = w;
+    imgb->h[0] = h;
+    switch (OAPV_CS_GET_FORMAT(cs)) {
+        case OAPV_CF_YCBCR400:
+            imgb->w[1] = imgb->w[2] = w;
+            imgb->h[1] = imgb->h[2] = h;
+            imgb->np = 1;
+            break;
+        case OAPV_CF_YCBCR420:
+            imgb->w[1] = imgb->w[2] = (w + 1) >> 1;
+            imgb->h[1] = imgb->h[2] = (h + 1) >> 1;
+            imgb->np = 3;
+            break;
+        case OAPV_CF_YCBCR422:
+            imgb->w[1] = imgb->w[2] = (w + 1) >> 1;
+            imgb->h[1] = imgb->h[2] = h;
+            imgb->np = 3;
+            break;
+        case OAPV_CF_YCBCR444:
+            imgb->w[1] = imgb->w[2] = w;
+            imgb->h[1] = imgb->h[2] = h;
+            imgb->np = 3;
+            break;
+        case OAPV_CF_YCBCR4444:
+            imgb->w[1] = imgb->w[2] = imgb->w[3] = w;
+            imgb->h[1] = imgb->h[2] = imgb->h[3] = h;
+            imgb->np = 4;
+            break;
+        case OAPV_CF_PLANAR2:
+            imgb->w[1] = w;
+            imgb->h[1] = h;
+            imgb->np = 2;
+            break;
+        default:
+            goto ERR;
+    }
+
+    for (i = 0; i < imgb->np; i++) {
+        // width and height need to be aligned to macroblock size
+        imgb->aw[i] = ALIGN_VAL(imgb->w[i], OAPV_MB_W);
+        imgb->s[i] = imgb->aw[i] * bd;
+        imgb->ah[i] = ALIGN_VAL(imgb->h[i], OAPV_MB_H);
+        imgb->e[i] = imgb->ah[i];
+
+        imgb->bsize[i] = imgb->s[i] * imgb->e[i];
+        imgb->a[i] = imgb->baddr[i] = picbuf_alloc(imgb->bsize[i]);
+        memset(imgb->a[i], 0, imgb->bsize[i]);
+    }
+    imgb->cs = cs;
+    imgb->addref = imgb_addref;
+    imgb->getref = imgb_getref;
+    imgb->release = imgb_release;
+
+    imgb->addref(imgb); /* increase reference count */
+    return imgb;
+
+ERR:
+    if (imgb) {
+        for (int i = 0; i < OAPV_MAX_CC; i++) {
+            if (imgb->a[i]) picbuf_free(imgb->a[i]);
+        }
+        free(imgb);
+    }
+    return NULL;
+}
+
+#endif  // ANDROID_C2_SOFT_APV_COMMON_H__
\ No newline at end of file
diff --git a/media/codec2/components/apv/C2SoftApvDec.cpp b/media/codec2/components/apv/C2SoftApvDec.cpp
new file mode 100644
index 0000000..77305ce
--- /dev/null
+++ b/media/codec2/components/apv/C2SoftApvDec.cpp
@@ -0,0 +1,1349 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftApvDec"
+#include <log/log.h>
+
+#include <android_media_swcodec_flags.h>
+
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
+#include <SimpleC2Interface.h>
+#include "C2SoftApvDec.h"
+
+#include <cutils/properties.h>
+
+const char* MEDIA_MIMETYPE_VIDEO_APV = "video/apv";
+
+#define MAX_NUM_FRMS (1)  // supports only 1-frame output
+#define FRM_IDX (0)       // supports only 1-frame output
+// check generic frame or not
+#define IS_NON_AUX_FRM(frm)                              \
+    (((frm)->pbu_type == OAPV_PBU_TYPE_PRIMARY_FRAME) || \
+     ((frm)->pbu_type == OAPV_PBU_TYPE_NON_PRIMARY_FRAME))
+// check auxiliary frame or not
+#define IS_AUX_FRM(frm) (!(IS_NON_AUX_FRM(frm)))
+#define OUTPUT_CSP_NATIVE (0)
+#define OUTPUT_CSP_P210 (1)
+
+namespace android {
+namespace {
+constexpr char COMPONENT_NAME[] = "c2.android.apv.decoder";
+constexpr uint32_t kDefaultOutputDelay = 8;
+constexpr uint32_t kMaxOutputDelay = 16;
+constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
+}  // namespace
+
+class C2SoftApvDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+  public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+        : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_DECODER,
+                                            C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_APV) {
+        noPrivateBuffers();  // TODO: account for our buffers here.
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
+
+        addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                             .withConstValue(new C2ComponentAttributesSetting(
+                                     C2Component::ATTRIB_IS_TEMPORAL))
+                             .build());
+
+        addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+                             .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 4096),
+                                     C2F(mSize, height).inRange(2, 4096),
+                             })
+                             .withSetter(SizeSetter)
+                             .build());
+
+        addParameter(
+                DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                        .withDefault(new C2StreamProfileLevelInfo::input(
+                                0u, C2Config::PROFILE_APV_422_10))
+                        .withFields(
+                                {C2F(mProfileLevel, profile).oneOf({C2Config::PROFILE_APV_422_10}),
+                                 C2F(mProfileLevel, level)
+                                         .oneOf({
+                                                C2Config::LEVEL_APV_1_BAND_0,
+                                                C2Config::LEVEL_APV_1_1_BAND_0,
+                                                C2Config::LEVEL_APV_2_BAND_0,
+                                                C2Config::LEVEL_APV_2_1_BAND_0,
+                                                C2Config::LEVEL_APV_3_BAND_0,
+                                                C2Config::LEVEL_APV_3_1_BAND_0,
+                                                C2Config::LEVEL_APV_4_BAND_0,
+                                                C2Config::LEVEL_APV_4_1_BAND_0,
+                                                C2Config::LEVEL_APV_5_BAND_0,
+                                                C2Config::LEVEL_APV_5_1_BAND_0,
+                                                C2Config::LEVEL_APV_6_BAND_0,
+                                                C2Config::LEVEL_APV_6_1_BAND_0,
+                                                C2Config::LEVEL_APV_7_BAND_0,
+                                                C2Config::LEVEL_APV_7_1_BAND_0,
+                                                C2Config::LEVEL_APV_1_BAND_1,
+                                                C2Config::LEVEL_APV_1_1_BAND_1,
+                                                C2Config::LEVEL_APV_2_BAND_1,
+                                                C2Config::LEVEL_APV_2_1_BAND_1,
+                                                C2Config::LEVEL_APV_3_BAND_1,
+                                                C2Config::LEVEL_APV_3_1_BAND_1,
+                                                C2Config::LEVEL_APV_4_BAND_1,
+                                                C2Config::LEVEL_APV_4_1_BAND_1,
+                                                C2Config::LEVEL_APV_5_BAND_1,
+                                                C2Config::LEVEL_APV_5_1_BAND_1,
+                                                C2Config::LEVEL_APV_6_BAND_1,
+                                                C2Config::LEVEL_APV_6_1_BAND_1,
+                                                C2Config::LEVEL_APV_7_BAND_1,
+                                                C2Config::LEVEL_APV_7_1_BAND_1,
+                                                C2Config::LEVEL_APV_1_BAND_2,
+                                                C2Config::LEVEL_APV_1_1_BAND_2,
+                                                C2Config::LEVEL_APV_2_BAND_2,
+                                                C2Config::LEVEL_APV_2_1_BAND_2,
+                                                C2Config::LEVEL_APV_3_BAND_2,
+                                                C2Config::LEVEL_APV_3_1_BAND_2,
+                                                C2Config::LEVEL_APV_4_BAND_2,
+                                                C2Config::LEVEL_APV_4_1_BAND_2,
+                                                C2Config::LEVEL_APV_5_BAND_2,
+                                                C2Config::LEVEL_APV_5_1_BAND_2,
+                                                C2Config::LEVEL_APV_6_BAND_2,
+                                                C2Config::LEVEL_APV_6_1_BAND_2,
+                                                C2Config::LEVEL_APV_7_BAND_2,
+                                                C2Config::LEVEL_APV_7_1_BAND_2,
+                                                C2Config::LEVEL_APV_1_BAND_3,
+                                                C2Config::LEVEL_APV_1_1_BAND_3,
+                                                C2Config::LEVEL_APV_2_BAND_3,
+                                                C2Config::LEVEL_APV_2_1_BAND_3,
+                                                C2Config::LEVEL_APV_3_BAND_3,
+                                                C2Config::LEVEL_APV_3_1_BAND_3,
+                                                C2Config::LEVEL_APV_4_BAND_3,
+                                                C2Config::LEVEL_APV_4_1_BAND_3,
+                                                C2Config::LEVEL_APV_5_BAND_3,
+                                                C2Config::LEVEL_APV_5_1_BAND_3,
+                                                C2Config::LEVEL_APV_6_BAND_3,
+                                                C2Config::LEVEL_APV_6_1_BAND_3,
+                                                C2Config::LEVEL_APV_7_BAND_3,
+                                                C2Config::LEVEL_APV_7_1_BAND_3,
+                                                 })})
+                        .withSetter(ProfileLevelSetter, mSize)
+                        .build());
+
+        mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
+        addParameter(DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
+                             .withDefault(mHdr10PlusInfoInput)
+                             .withFields({
+                                     C2F(mHdr10PlusInfoInput, m.value).any(),
+                             })
+                             .withSetter(Hdr10PlusInfoInputSetter)
+                             .build());
+
+        mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
+        addParameter(DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
+                             .withDefault(mHdr10PlusInfoOutput)
+                             .withFields({
+                                     C2F(mHdr10PlusInfoOutput, m.value).any(),
+                             })
+                             .withSetter(Hdr10PlusInfoOutputSetter)
+                             .build());
+
+        // default static info
+        C2HdrStaticMetadataStruct defaultStaticInfo{};
+        helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
+        addParameter(
+                DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
+                        .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
+                        .withFields({C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
+                                     C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
+                                     C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
+                                     C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)})
+                        .withSetter(HdrStaticInfoSetter)
+                        .build());
+
+        addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+                             .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 4096, 2),
+                                     C2F(mSize, height).inRange(2, 4096, 2),
+                             })
+                             .withSetter(MaxPictureSizeSetter, mSize)
+                             .build());
+
+        addParameter(
+                DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+                        .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
+                        .withFields({
+                                C2F(mMaxInputSize, value).any(),
+                        })
+                        .calculatedAs(MaxInputSizeSetter, mMaxSize)
+                        .build());
+
+        C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
+        std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+                C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+        memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+        defaultColorInfo = C2StreamColorInfo::output::AllocShared(
+                {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+        helper->addStructDescriptors<C2ChromaOffsetStruct>();
+        addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+                             .withConstValue(defaultColorInfo)
+                             .build());
+
+        addParameter(DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsTuning::output(
+                                     0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mDefaultColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mDefaultColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mDefaultColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mDefaultColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(DefaultColorAspectsSetter)
+                             .build());
+
+        addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsInfo::input(
+                                     0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mCodedColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mCodedColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mCodedColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mCodedColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(CodedColorAspectsSetter)
+                             .build());
+
+        addParameter(
+                DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+                        .withDefault(new C2StreamColorAspectsInfo::output(
+                                0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                        .withFields(
+                                {C2F(mColorAspects, range)
+                                         .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+                                 C2F(mColorAspects, primaries)
+                                         .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                  C2Color::PRIMARIES_OTHER),
+                                 C2F(mColorAspects, transfer)
+                                         .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                  C2Color::TRANSFER_OTHER),
+                                 C2F(mColorAspects, matrix)
+                                         .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                  C2Color::MATRIX_OTHER)})
+                        .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+                        .build());
+
+        // TODO: support more formats?
+        std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+            pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+        }
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)AHARDWAREBUFFER_FORMAT_YCbCr_P210)) {
+            pixelFormats.push_back(AHARDWAREBUFFER_FORMAT_YCbCr_P210);
+        }
+
+        // If color format surface isn't added to supported formats, there is no way to know
+        // when the color-format is configured to surface. This is necessary to be able to
+        // choose 10-bit format while decoding 10-bit clips in surface mode.
+        pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+        addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+                             .withDefault(new C2StreamPixelFormatInfo::output(
+                                     0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+                             .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+                             .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+                             .build());
+    }
+
+    static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
+                          C2P<C2StreamPictureSizeInfo::output>& me) {
+        (void)mayBlock;
+        ALOGV("%s - %d x %d", __FUNCTION__, me.v.width, me.v.height);
+        C2R res = C2R::Ok();
+        if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+            me.set().width = oldMe.v.width;
+        }
+        if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+            me.set().height = oldMe.v.height;
+        }
+        return res;
+    }
+
+    static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output>& me,
+                                    const C2P<C2StreamPictureSizeInfo::output>& size) {
+        (void)mayBlock;
+        ALOGV("%s - %d x %d", __FUNCTION__, me.v.width, me.v.height);
+        // TODO: get max width/height from the size's field helpers vs.
+        // hardcoding
+        me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
+        me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
+        return C2R::Ok();
+    }
+
+    static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& me,
+                                  const C2P<C2StreamMaxPictureSizeTuning::output>& maxSize) {
+        (void)mayBlock;
+        ALOGV("%s", __FUNCTION__);
+        // assume compression ratio of 2, but enforce a floor
+        me.set().value =
+                c2_max((((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072),
+                       kMinInputBufferSize);
+        return C2R::Ok();
+    }
+
+    static C2R DefaultColorAspectsSetter(bool mayBlock,
+                                         C2P<C2StreamColorAspectsTuning::output>& me) {
+        (void)mayBlock;
+        ALOGV("%s - range: %u, primary: %u, transfer: %u, matrix: %u", __FUNCTION__, me.v.range,
+              me.v.primaries, me.v.transfer, me.v.matrix);
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
+        (void)mayBlock;
+        ALOGV("%s - range: %u, primaries: %u, transfer: %u, matrix: %u", __func__, me.v.range,
+              me.v.primaries, me.v.transfer, me.v.matrix);
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+                                  const C2P<C2StreamColorAspectsTuning::output>& def,
+                                  const C2P<C2StreamColorAspectsInfo::input>& coded) {
+        (void)mayBlock;
+        ALOGV("%s", __FUNCTION__);
+        // take default values for all unspecified fields, and coded values for specified ones
+        ALOGV("%s - coded range: %u, primaries: %u, transfer: %u, matrix: %u",
+             __func__, coded.v.range, coded.v.primaries, coded.v.transfer, coded.v.matrix);
+        me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+        me.set().primaries =
+                coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
+        me.set().transfer =
+                coded.v.transfer == TRANSFER_UNSPECIFIED ? def.v.transfer : coded.v.transfer;
+        me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+        ALOGV("%s - me.v.range = %u, me.v.primaries = %u, me.v.transfer = %u, me.v.matrix = %u",
+              __func__, me.v.range, me.v.primaries, me.v.transfer, me.v.matrix);
+        return C2R::Ok();
+    }
+
+    static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input>& me,
+                                  const C2P<C2StreamPictureSizeInfo::output>& size) {
+        (void)mayBlock;
+        ALOGV("%s", __FUNCTION__);
+        (void)size;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
+        ALOGV("%s - mDefaultColorAspects: %u", __FUNCTION__, mDefaultColorAspects->primaries);
+        return mDefaultColorAspects;
+    }
+
+    std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
+        ALOGV("%s - mColorAspects: %u", __FUNCTION__, mColorAspects->primaries);
+        return mColorAspects;
+    }
+
+    static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input>& me) {
+        (void)mayBlock;
+        ALOGV("%s", __FUNCTION__);
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    static C2R Hdr10PlusInfoOutputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::output>& me) {
+        (void)mayBlock;
+        ALOGV("%s", __FUNCTION__);
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    // unsafe getters
+    std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
+        return mPixelFormat;
+    }
+
+    static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
+        (void)mayBlock;
+        ALOGV("%s", __FUNCTION__);
+        if (me.v.mastering.red.x > 1) {
+            me.set().mastering.red.x = 1;
+        }
+        if (me.v.mastering.red.y > 1) {
+            me.set().mastering.red.y = 1;
+        }
+        if (me.v.mastering.green.x > 1) {
+            me.set().mastering.green.x = 1;
+        }
+        if (me.v.mastering.green.y > 1) {
+            me.set().mastering.green.y = 1;
+        }
+        if (me.v.mastering.blue.x > 1) {
+            me.set().mastering.blue.x = 1;
+        }
+        if (me.v.mastering.blue.y > 1) {
+            me.set().mastering.blue.y = 1;
+        }
+        if (me.v.mastering.white.x > 1) {
+            me.set().mastering.white.x = 1;
+        }
+        if (me.v.mastering.white.y > 1) {
+            me.set().mastering.white.y = 1;
+        }
+        if (me.v.mastering.maxLuminance > 65535.0) {
+            me.set().mastering.maxLuminance = 65535.0;
+        }
+        if (me.v.mastering.minLuminance > 6.5535) {
+            me.set().mastering.minLuminance = 6.5535;
+        }
+        if (me.v.maxCll > 65535.0) {
+            me.set().maxCll = 65535.0;
+        }
+        if (me.v.maxFall > 65535.0) {
+            me.set().maxFall = 65535.0;
+        }
+        return C2R::Ok();
+    }
+
+  private:
+    std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+    std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+    std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+    std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+    std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+    std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+    std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
+    std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
+    std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
+};
+
+static void ivd_aligned_free(void* ctxt, void* mem) {
+    (void)ctxt;
+    free(mem);
+}
+
+C2SoftApvDec::C2SoftApvDec(const char* name, c2_node_id_t id,
+                           const std::shared_ptr<IntfImpl>& intfImpl)
+    : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+      mIntf(intfImpl),
+      mDecHandle(nullptr),
+      mOutBufferFlush(nullptr),
+      mIvColorformat(IV_YUV_420P),
+      mOutputDelay(kDefaultOutputDelay),
+      mHeaderDecoded(false),
+      mOutIndex(0u),
+      mHalPixelFormat(HAL_PIXEL_FORMAT_YV12),
+      mWidth(320),
+      mHeight(240),
+      mSignalledOutputEos(false),
+      mSignalledError(false) {
+    oapvdHandle = NULL;
+    oapvmHandle = NULL;
+    outputCsp = OUTPUT_CSP_NATIVE;
+}
+
+C2SoftApvDec::~C2SoftApvDec() {
+    onRelease();
+}
+
+c2_status_t C2SoftApvDec::onInit() {
+    ALOGV("%s", __FUNCTION__);
+    status_t err = initDecoder();
+    return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftApvDec::onStop() {
+    ALOGV("%s", __FUNCTION__);
+    if (OK != resetDecoder()) return C2_CORRUPTED;
+    resetPlugin();
+    return C2_OK;
+}
+
+void C2SoftApvDec::onReset() {
+    ALOGV("%s", __FUNCTION__);
+    (void)onStop();
+}
+
+status_t C2SoftApvDec::deleteDecoder() {
+    ALOGV("%s", __FUNCTION__);
+    if (oapvdHandle) {
+        oapvd_delete(oapvdHandle);
+        oapvdHandle = NULL;
+    }
+    if (oapvmHandle) {
+        oapvm_delete(oapvmHandle);
+        oapvmHandle = NULL;
+    }
+    for (int i = 0; i < ofrms.num_frms; i++) {
+        if (ofrms.frm[i].imgb != NULL) {
+            ofrms.frm[i].imgb->release(ofrms.frm[i].imgb);
+            ofrms.frm[i].imgb = NULL;
+        }
+    }
+    return OK;
+}
+
+void C2SoftApvDec::onRelease() {
+    ALOGV("%s", __FUNCTION__);
+    (void)deleteDecoder();
+    if (mOutBufferFlush) {
+        ivd_aligned_free(nullptr, mOutBufferFlush);
+        mOutBufferFlush = nullptr;
+    }
+    if (mOutBlock) {
+        mOutBlock.reset();
+    }
+}
+
+c2_status_t C2SoftApvDec::onFlush_sm() {
+    ALOGV("%s", __FUNCTION__);
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+    return C2_OK;
+}
+
+status_t C2SoftApvDec::createDecoder() {
+    ALOGV("%s", __FUNCTION__);
+    return OK;
+}
+
+status_t C2SoftApvDec::initDecoder() {
+    int ret;
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+
+    mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        mPixelFormatInfo = mIntf->getPixelFormat_l();
+        ALOGW("Hal pixel format = %d", mPixelFormatInfo->value);
+    }
+    memset(&cdesc, 0, sizeof(oapvd_cdesc_t));
+
+    cdesc.threads = 1;  // default
+    oapvdHandle = oapvd_create(&cdesc, &ret);
+    if (oapvdHandle == NULL) {
+        ALOGE("ERROR: cannot create APV decoder (err=%d)\n", ret);
+        return C2_NO_INIT;
+    }
+
+    memset(&ofrms, 0, sizeof(oapv_frms_t));
+
+    oapvmHandle = oapvm_create(&ret);
+    if (OAPV_FAILED(ret)) {
+        ALOGE("oapvm create failed");
+        oapvd_delete(oapvdHandle);
+        oapvdHandle = NULL;
+        return C2_NO_INIT;
+    }
+
+    ALOGV("oapvd init done");
+    return OK;
+}
+
+status_t C2SoftApvDec::setFlushMode() {
+    ALOGV("%s", __FUNCTION__);
+    return OK;
+}
+
+status_t C2SoftApvDec::resetDecoder() {
+    ALOGV("%s", __FUNCTION__);
+    return OK;
+}
+
+void C2SoftApvDec::resetPlugin() {
+    ALOGV("%s", __FUNCTION__);
+    mSignalledOutputEos = false;
+    if (mOutBlock) {
+        mOutBlock.reset();
+    }
+}
+
+void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
+    uint32_t flags = 0;
+    if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+        flags |= C2FrameData::FLAG_END_OF_STREAM;
+        ALOGV("signalling eos");
+    }
+    work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+    work->worklets.front()->output.buffers.clear();
+    work->worklets.front()->output.ordinal = work->input.ordinal;
+    work->workletsProcessed = 1u;
+}
+
+void C2SoftApvDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                              const std::shared_ptr<C2GraphicBlock>& block) {
+    std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
+
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        buffer->setInfo(mIntf->getColorAspects_l());
+    }
+
+    class FillWork {
+      public:
+        FillWork(uint32_t flags, C2WorkOrdinalStruct ordinal,
+                 const std::shared_ptr<C2Buffer>& buffer)
+            : mFlags(flags), mOrdinal(ordinal), mBuffer(buffer) {}
+        ~FillWork() = default;
+
+        void operator()(const std::unique_ptr<C2Work>& work) {
+            work->worklets.front()->output.flags = (C2FrameData::flags_t)mFlags;
+            work->worklets.front()->output.buffers.clear();
+            work->worklets.front()->output.ordinal = mOrdinal;
+            work->workletsProcessed = 1u;
+            work->result = C2_OK;
+            if (mBuffer) {
+                work->worklets.front()->output.buffers.push_back(mBuffer);
+            }
+            ALOGV("timestamp = %lld, index = %lld, w/%s buffer", mOrdinal.timestamp.peekll(),
+                  mOrdinal.frameIndex.peekll(), mBuffer ? "" : "o");
+        }
+
+      private:
+        const uint32_t mFlags;
+        const C2WorkOrdinalStruct mOrdinal;
+        const std::shared_ptr<C2Buffer> mBuffer;
+    };
+
+    auto fillWork = [buffer](const std::unique_ptr<C2Work>& work) {
+        work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.buffers.push_back(buffer);
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->workletsProcessed = 1u;
+    };
+
+    if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+        bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+        // TODO: Check if cloneAndSend can be avoided by tracking number of frames remaining
+        if (eos) {
+            if (buffer) {
+                mOutIndex = index;
+                C2WorkOrdinalStruct outOrdinal = work->input.ordinal;
+                cloneAndSend(mOutIndex, work,
+                             FillWork(C2FrameData::FLAG_INCOMPLETE, outOrdinal, buffer));
+                buffer.reset();
+            }
+        } else {
+            fillWork(work);
+        }
+    } else {
+        finish(index, fillWork);
+    }
+}
+
+static void copyBufferFromYUV420ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
+                                       const uint8_t* srcY, const uint8_t* srcU,
+                                       const uint8_t* srcV, size_t srcYStride, size_t srcUStride,
+                                       size_t srcVStride, size_t dstYStride, size_t dstUStride,
+                                       size_t dstVStride, uint32_t width, uint32_t height) {
+    for (size_t i = 0; i < height; ++i) {
+        memcpy(dstY, srcY, width);
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    for (size_t i = 0; i < height / 2; ++i) {
+        memcpy(dstU, srcU, width / 2);
+        memcpy(dstV, srcV, width / 2);
+        dstU += dstUStride;
+        srcU += srcUStride;
+        dstV += dstVStride;
+        srcV += srcVStride;
+    }
+}
+
+static void copyBufferP210(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY,
+            const uint16_t *srcUV, size_t srcYStride, size_t srcUVStride, size_t dstYStride,
+            size_t dstUVStride, size_t width, size_t height) {
+    for (size_t y = 0; y < height; ++y) {
+        memcpy(dstY, srcY, width * sizeof(uint16_t));
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    for (size_t y = 0; y < height; ++y) {
+        memcpy(dstUV, srcUV, width * sizeof(uint16_t));
+        srcUV += srcUVStride;
+        dstUV += dstUVStride;
+    }
+}
+
+static void copyBufferFromYUV422ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
+                                       const uint8_t* srcY, const uint8_t* srcU,
+                                       const uint8_t* srcV, size_t srcYStride, size_t srcUStride,
+                                       size_t srcVStride, size_t dstYStride, size_t dstUStride,
+                                       size_t dstVStride, uint32_t width, uint32_t height) {
+    for (size_t i = 0; i < height; ++i) {
+        memcpy(dstY, srcY, width);
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    for (size_t i = 0; i < height / 2; ++i) {
+        memcpy(dstU, srcU, width / 2);
+        memcpy(dstV, srcV, width / 2);
+        dstU += dstUStride;
+        srcU += srcUStride * 2;
+        dstV += dstVStride;
+        srcV += srcVStride * 2;
+    }
+}
+
+static void copyBufferFromYUV42010bitToP010(uint16_t* dstY, uint16_t* dstUV, const uint16_t* srcY,
+                                            const uint16_t* srcU, const uint16_t* srcV,
+                                            size_t srcYStride, size_t srcUStride, size_t srcVStride,
+                                            size_t dstYStride, size_t dstUVStride, size_t width,
+                                            size_t height) {
+    for (size_t y = 0; y < height; ++y) {
+        for (size_t x = 0; x < width; ++x) {
+            dstY[x] = srcY[x] << 6;
+        }
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    for (size_t y = 0; y < height / 2; ++y) {
+        for (size_t x = 0; x < width / 2; ++x) {
+            dstUV[2 * x] = srcU[x] << 6;
+            dstUV[2 * x + 1] = srcV[x] << 6;
+        }
+        srcU += srcUStride;
+        srcV += srcVStride;
+        dstUV += dstUVStride;
+    }
+}
+
+static void copyBufferFromYUV42210bitToP010(uint16_t* dstY, uint16_t* dstUV, const uint16_t* srcY,
+                                            const uint16_t* srcU, const uint16_t* srcV,
+                                            size_t srcYStride, size_t srcUStride, size_t srcVStride,
+                                            size_t dstYStride, size_t dstUVStride, size_t width,
+                                            size_t height) {
+    for (size_t y = 0; y < height; ++y) {
+        for (size_t x = 0; x < width; ++x) {
+            dstY[x] = srcY[x] << 6;
+        }
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    for (size_t y = 0; y < height / 2; ++y) {
+        for (size_t x = 0; x < width / 2; ++x) {
+            dstUV[2 * x] = srcU[x] << 6;
+            dstUV[2 * x + 1] = srcV[x] << 6;
+        }
+        srcU += srcUStride * 2;
+        srcV += srcVStride * 2;
+        dstUV += dstUVStride;
+    }
+}
+
+static void copyBufferFromP210ToP010(uint16_t* dstY, uint16_t* dstUV, const uint16_t* srcY,
+                                     const uint16_t* srcUV, size_t srcYStride, size_t srcUVStride,
+                                     size_t dstYStride, size_t dstUVStride, size_t width,
+                                     size_t height) {
+    for (size_t y = 0; y < height; ++y) {
+        memcpy(dstY, srcY, width * sizeof(uint16_t));
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    for (size_t y = 0; y < height / 2; ++y) {
+        memcpy(dstUV, srcUV, width * 2);
+        srcUV += srcUVStride * 2;
+        dstUV += dstUVStride;
+    }
+}
+
+static void copyBufferFromYUV42010bitToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
+                                            const uint16_t* srcY, const uint16_t* srcU,
+                                            const uint16_t* srcV, size_t srcYStride,
+                                            size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                                            size_t dstUStride, size_t dstVStride, uint32_t width,
+                                            uint32_t height) {
+    for (size_t i = 0; i < height; ++i) {
+        for (size_t j = 0; j < width; ++j) {
+            dstY[i * dstYStride + j] = (srcY[i * srcYStride + j] >> 2) & 0xFF;
+        }
+    }
+
+    for (size_t i = 0; i < height / 2; ++i) {
+        for (size_t j = 0; j < width / 2; ++j) {
+            dstU[i * dstUStride + j] = (srcU[i * srcUStride + j] >> 2) & 0xFF;
+        }
+    }
+
+    for (size_t i = 0; i < height / 2; ++i) {
+        for (size_t j = 0; j < width / 2; ++j) {
+            dstV[i * dstVStride + j] = (srcV[i * srcVStride + j] >> 2) & 0xFF;
+        }
+    }
+}
+
+static void copyBufferFromYUV42210bitToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
+                                            const uint16_t* srcY, const uint16_t* srcU,
+                                            const uint16_t* srcV, size_t srcYStride,
+                                            size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                                            size_t dstUStride, size_t dstVStride, uint32_t width,
+                                            uint32_t height) {
+    for (size_t i = 0; i < height; ++i) {
+        for (size_t j = 0; j < width; ++j) {
+            dstY[i * dstYStride + j] = (srcY[i * srcYStride + j] >> 2) & 0xFF;
+        }
+    }
+
+    for (size_t i = 0; i < height / 2; ++i) {
+        for (size_t j = 0; j < width / 2; ++j) {
+            dstU[i * dstUStride + j] = (srcU[i * srcUStride * 2 + j] >> 2) & 0xFF;
+        }
+    }
+
+    for (size_t i = 0; i < height / 2; ++i) {
+        for (size_t j = 0; j < width / 2; ++j) {
+            dstV[i * dstVStride + j] = (srcV[i * srcVStride * 2 + j] >> 2) & 0xFF;
+        }
+    }
+}
+
+static void copyBufferFromP210ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
+                                     const uint16_t* srcY, const uint16_t* srcUV, size_t srcYStride,
+                                     size_t srcUVStride, size_t dstYStride, size_t dstUStride,
+                                     size_t dstVStride, size_t width, size_t height) {
+    for (size_t i = 0; i < height; ++i) {
+        for (size_t j = 0; j < width; ++j) {
+            dstY[i * dstYStride + j] = (srcY[i * srcYStride + j] >> 8) & 0xFF;
+        }
+    }
+
+    for (size_t i = 0; i < height / 2; ++i) {
+        for (size_t j = 0; j < width / 2; ++j) {
+            dstV[i * dstVStride + j] = (srcUV[i * srcUVStride * 2 + j * 2] >> 8) & 0xFF;
+            dstU[i * dstUStride + j] = (srcUV[i * srcUVStride * 2 + j * 2 + 1] >> 8) & 0xFF;
+        }
+    }
+}
+
+void C2SoftApvDec::process(const std::unique_ptr<C2Work>& work,
+                           const std::shared_ptr<C2BlockPool>& pool) {
+    // Initialize output work
+    work->result = C2_OK;
+    work->workletsProcessed = 0u;
+    work->worklets.front()->output.configUpdate.clear();
+    work->worklets.front()->output.flags = work->input.flags;
+    if (mSignalledError || mSignalledOutputEos) {
+        work->result = C2_BAD_VALUE;
+        return;
+    }
+
+    int ret = 0;
+    size_t inOffset = 0u;
+    size_t inSize = 0u;
+    C2ReadView rView = mDummyReadView;
+    if (!work->input.buffers.empty()) {
+        rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+        inSize = rView.capacity();
+        if (inSize && rView.error()) {
+            ALOGE("read view map failed %d", rView.error());
+            work->result = C2_CORRUPTED;
+            return;
+        }
+    }
+
+    bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
+    bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+
+    ALOGV("in buffer attr. size %zu timestamp %llu frameindex %d, flags %x", inSize,
+          work->input.ordinal.timestamp.peekull(), (int)work->input.ordinal.frameIndex.peeku(),
+          work->input.flags);
+
+    if (codecConfig) {
+        fillEmptyWork(work);
+        return;
+    }
+
+    if (inSize > 0) {
+        uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
+        oapv_au_info_t aui;
+        oapv_bitb_t bitb;
+        bitb.addr = bitstream + 4;  // skip au
+        bitb.ssize = inSize - 4;
+
+        if (OAPV_FAILED(oapvd_info(bitb.addr, bitb.ssize, &aui))) {
+            ALOGE("cannot get information from bitstream");
+            return;
+        }
+
+        /* create decoding frame buffers */
+        ofrms.num_frms = aui.num_frms;
+        if (ofrms.num_frms <= 0) {
+            ALOGE("Parse error - no output frame(%d)", ofrms.num_frms);
+            fillEmptyWork(work);
+            return;
+        }
+        for (int i = 0; i < ofrms.num_frms; i++) {
+            oapv_frm_info_t* finfo = &aui.frm_info[FRM_IDX];
+            oapv_frm_t* frm = &ofrms.frm[i];
+
+            if (mWidth != finfo->w || mHeight != finfo->w) {
+                mWidth = finfo->w;
+                mHeight = finfo->h;
+            }
+
+            if (frm->imgb != NULL && (frm->imgb->w[0] != finfo->w || frm->imgb->h[0] != finfo->h)) {
+                frm->imgb->release(frm->imgb);
+                frm->imgb = NULL;
+            }
+
+            if (frm->imgb == NULL) {
+                if (outputCsp == OUTPUT_CSP_P210) {
+                    frm->imgb = imgb_create(finfo->w, finfo->h, OAPV_CS_P210);
+                } else {
+                    frm->imgb = imgb_create(finfo->w, finfo->h, finfo->cs);
+                }
+                if (frm->imgb == NULL) {
+                    ALOGE("cannot allocate image buffer (w:%d, h:%d, cs:%d)", finfo->w, finfo->h,
+                          finfo->cs);
+                    fillEmptyWork(work);
+                    return;
+                }
+            }
+        }
+
+        oapvd_stat_t stat;
+        ret = oapvd_decode(oapvdHandle, &bitb, &ofrms, oapvmHandle, &stat);
+        if (bitb.ssize != stat.read) {
+            ALOGW("decode done, input size: %d, processed size: %d", bitb.ssize, stat.read);
+        }
+
+        if (OAPV_FAILED(ret)) {
+            ALOGE("failed to decode bitstream\n");
+            fillEmptyWork(work);
+            return;
+        }
+
+        status_t err = outputBuffer(pool, work);
+        if (err == NOT_ENOUGH_DATA) {
+            if (inSize > 0) {
+                ALOGV("Maybe non-display frame at %lld.", work->input.ordinal.frameIndex.peekll());
+                // send the work back with empty buffer.
+                inSize = 0;
+            }
+        } else if (err != OK) {
+            ALOGD("Error while getting the output frame out");
+            // work->result would be already filled; do fillEmptyWork() below to
+            // send the work back.
+            inSize = 0;
+        }
+    }
+
+    if (eos) {
+        drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+        mSignalledOutputEos = true;
+    } else if (!inSize) {
+        fillEmptyWork(work);
+    }
+}
+
+void C2SoftApvDec::getVuiParams(VuiColorAspects* buffer) {
+    VuiColorAspects vuiColorAspects;
+    vuiColorAspects.primaries = buffer->primaries;
+    vuiColorAspects.transfer = buffer->transfer;
+    vuiColorAspects.coeffs = buffer->coeffs;
+    vuiColorAspects.fullRange = buffer->fullRange;
+
+    // convert vui aspects to C2 values if changed
+    if (!(vuiColorAspects == mBitstreamColorAspects)) {
+        mBitstreamColorAspects = vuiColorAspects;
+        ColorAspects sfAspects;
+        C2StreamColorAspectsInfo::input codedAspects = { 0u };
+        ColorUtils::convertIsoColorAspectsToCodecAspects(
+                vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+                vuiColorAspects.fullRange, sfAspects);
+        if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+            codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+            codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+            codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+            codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+        }
+        ALOGV("colorAspects: primaries:%d, transfer:%d, coeffs:%d, fullRange:%d",
+                codedAspects.primaries, codedAspects.transfer, codedAspects.matrix,
+                codedAspects.range);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+    }
+}
+
+status_t C2SoftApvDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                                    const std::unique_ptr<C2Work>& work) {
+    if (!(work && pool)) return BAD_VALUE;
+
+    oapv_imgb_t* imgbOutput = nullptr;
+    std::shared_ptr<C2GraphicBlock> block;
+
+    if (ofrms.num_frms > 0) {
+        for(int i = 0; i < ofrms.num_frms; i++) {
+            oapv_frm_t* frm = &ofrms.frm[0];
+            if(frm->pbu_type == OAPV_PBU_TYPE_PRIMARY_FRAME) {
+                imgbOutput = frm->imgb;
+                break;
+            }
+        }
+        if(imgbOutput == nullptr) {
+            ALOGW("No OAPV primary frame");
+            return false;
+        }
+    } else {
+        ALOGW("No output frames");
+        return false;
+    }
+    bool isMonochrome = OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CS_YCBCR400;
+
+    // TODO: use bitstream color aspect after vui parsing
+    VuiColorAspects colorAspect;
+    colorAspect.primaries = 2;
+    colorAspect.transfer = 2;
+    colorAspect.coeffs = 2;
+    colorAspect.fullRange = 1;
+    getVuiParams(&colorAspect);
+
+    uint32_t format = HAL_PIXEL_FORMAT_YV12;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
+    if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10 &&
+        mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+        IntfImpl::Lock lock = mIntf->lock();
+        codedColorAspects = mIntf->getColorAspects_l();
+
+        bool allowRGBA1010102 = false;
+        if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
+            codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
+            codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+            allowRGBA1010102 = true;
+        }
+        format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
+    }
+
+    if (mHalPixelFormat != format) {
+        C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = mIntf->config({&pixelFormat}, C2_MAY_BLOCK, &failures);
+        if (err == C2_OK) {
+            work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(pixelFormat));
+        } else {
+            ALOGE("Config update pixelFormat failed");
+            mSignalledError = true;
+            work->workletsProcessed = 1u;
+            work->result = C2_CORRUPTED;
+            return UNKNOWN_ERROR;
+        }
+        mHalPixelFormat = format;
+    }
+    ALOGV("mHalPixelFormat: %u, format: %d", mHalPixelFormat, format);
+
+    C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
+    // check. align height to 2 times does not work.
+    c2_status_t err =
+            pool->fetchGraphicBlock(align(mWidth, 16), align(mHeight, 16), format, usage, &block);
+
+    if (err != C2_OK) {
+        ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+        work->result = err;
+        return false;
+    }
+
+    C2GraphicView wView = block->map().get();
+    if (wView.error()) {
+        ALOGE("graphic view map failed %d", wView.error());
+        work->result = C2_CORRUPTED;
+        return false;
+    }
+
+    ALOGV("provided (%dx%d) required (%dx%d)", block->width(), block->height(), mWidth, mHeight);
+
+    uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
+    uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
+
+    C2PlanarLayout layout = wView.layout();
+    size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+    size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+    if(format == AHARDWAREBUFFER_FORMAT_YCbCr_P210) {
+        if(OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10) {
+            const uint16_t *srcY = (const uint16_t *)imgbOutput->a[0];
+            const uint16_t *srcU = (const uint16_t *)imgbOutput->a[1];
+            const uint16_t *srcV = (const uint16_t *)imgbOutput->a[2];
+            size_t srcYStride = imgbOutput->s[0] / 2;
+            size_t srcUStride = imgbOutput->s[1] / 2;
+            size_t srcVStride = imgbOutput->s[2] / 2;
+            dstYStride /= 2;
+            dstUStride /= 2;
+            dstVStride /= 2;
+            ALOGV("OAPV_CS_P210 buffer");
+            copyBufferP210((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU,
+                            srcYStride, srcUStride, dstYStride, dstUStride, mWidth, mHeight);
+        } else {
+            ALOGE("Not supported convder from bd:%d, format: %d(%s), to format: %d(%s)",
+                OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs),
+                OAPV_CS_GET_FORMAT(imgbOutput->cs),
+                OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420 ?
+                    "YUV420" : (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422 ?
+                                 "YUV422" : "UNKNOWN"),
+                format,
+                format == HAL_PIXEL_FORMAT_YCBCR_P010 ?
+                    "P010" : (format == HAL_PIXEL_FORMAT_YCBCR_420_888 ?
+                         "YUV420" : (format == HAL_PIXEL_FORMAT_YV12 ? "YV12" : "UNKNOWN"))
+                );
+        }
+    } else if(format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+        if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10) {
+            const uint16_t* srcY = (const uint16_t*)imgbOutput->a[0];
+            const uint16_t* srcU = (const uint16_t*)imgbOutput->a[1];
+            const uint16_t* srcV = (const uint16_t*)imgbOutput->a[2];
+            size_t srcYStride = imgbOutput->s[0] / 2;
+            size_t srcUStride = imgbOutput->s[1] / 2;
+            size_t srcVStride = imgbOutput->s[2] / 2;
+            dstYStride /= 2;
+            dstUStride /= 2;
+            dstVStride /= 2;
+            if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420) {
+                ALOGV("OAPV_CS_YUV420 10bit to P010");
+                copyBufferFromYUV42010bitToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV,
+                                                srcYStride, srcUStride, srcVStride, dstYStride,
+                                                dstUStride, mWidth, mHeight);
+            } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422) {
+                ALOGV("OAPV_CS_YUV422 10bit to P010");
+                copyBufferFromYUV42210bitToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV,
+                                                srcYStride, srcUStride, srcVStride, dstYStride,
+                                                dstUStride, mWidth, mHeight);
+            } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_PLANAR2) {
+                ALOGV("OAPV_CS_P210 to P010");
+                copyBufferFromP210ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcYStride,
+                                         srcUStride, dstYStride, dstUStride, mWidth, mHeight);
+            } else {
+                ALOGE("Not supported convert format : %d", OAPV_CS_GET_FORMAT(imgbOutput->cs));
+            }
+        } else {
+            ALOGE("Not supported convder from bd:%d, format: %d(%s), to format: %d(%s)",
+                  OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs), OAPV_CS_GET_FORMAT(imgbOutput->cs),
+                  OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420
+                          ? "YUV420"
+                          : (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422 ? "YUV422"
+                                                                                    : "UNKNOWN"),
+                  format,
+                  format == HAL_PIXEL_FORMAT_YCBCR_P010
+                          ? "P010"
+                          : (format == HAL_PIXEL_FORMAT_YCBCR_420_888
+                                     ? "YUV420"
+                                     : (format == HAL_PIXEL_FORMAT_YV12 ? "YV12" : "UNKNOWN")));
+        }
+    } else {  // HAL_PIXEL_FORMAT_YV12
+        if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10) {
+            const uint16_t* srcY = (const uint16_t*)imgbOutput->a[0];
+            const uint16_t* srcV = (const uint16_t*)imgbOutput->a[1];
+            const uint16_t* srcU = (const uint16_t*)imgbOutput->a[2];
+            size_t srcYStride = imgbOutput->s[0] / 2;
+            size_t srcVStride = imgbOutput->s[1] / 2;
+            size_t srcUStride = imgbOutput->s[2] / 2;
+            if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420) {
+                ALOGV("OAPV_CS_YUV420 10bit to YV12");
+                copyBufferFromYUV42010bitToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+                                                srcUStride, srcVStride, dstYStride, dstUStride,
+                                                dstVStride, mWidth, mHeight);
+            } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422) {
+                ALOGV("OAPV_CS_YUV422 10bit to YV12");
+                copyBufferFromYUV42210bitToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+                                                srcUStride, srcVStride, dstYStride, dstUStride,
+                                                dstVStride, mWidth, mHeight);
+            } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_PLANAR2) {
+                ALOGV("OAPV_CS_P210 to YV12");
+                copyBufferFromP210ToYV12(dstY, dstU, dstV, srcY, srcV, srcYStride, srcVStride,
+                                         dstYStride, dstUStride, dstVStride, mWidth, mHeight);
+            } else {
+                ALOGE("Not supported convert format : %d", OAPV_CS_GET_FORMAT(imgbOutput->cs));
+            }
+        } else if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 8) {
+            const uint8_t* srcY = (const uint8_t*)imgbOutput->a[0];
+            const uint8_t* srcV = (const uint8_t*)imgbOutput->a[1];
+            const uint8_t* srcU = (const uint8_t*)imgbOutput->a[2];
+            size_t srcYStride = imgbOutput->s[0];
+            size_t srcVStride = imgbOutput->s[1];
+            size_t srcUStride = imgbOutput->s[2];
+            if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420) {
+                ALOGV("OAPV_CS_YUV420 to YV12");
+                copyBufferFromYUV420ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+                                           srcUStride, srcVStride, dstYStride, dstUStride,
+                                           dstVStride, mWidth, mHeight);
+            } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422) {
+                ALOGV("OAPV_CS_YUV422 to YV12");
+                copyBufferFromYUV422ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+                                           srcUStride, srcVStride, dstYStride, dstUStride,
+                                           dstVStride, mWidth, mHeight);
+            } else {
+                ALOGE("Not supported convert format : %d", OAPV_CS_GET_FORMAT(imgbOutput->cs));
+            }
+        } else {
+            ALOGE("Not supported convert from bd:%d, format: %d(%s), to format: %d(%s)",
+                  OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs), OAPV_CS_GET_FORMAT(imgbOutput->cs),
+                  OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420
+                          ? "YUV420"
+                          : (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422 ? "YUV422"
+                                                                                    : "UNKNOWN"),
+                  format,
+                  format == HAL_PIXEL_FORMAT_YCBCR_P010
+                          ? "P010"
+                          : (format == HAL_PIXEL_FORMAT_YCBCR_420_888
+                                     ? "YUV420"
+                                     : (format == HAL_PIXEL_FORMAT_YV12 ? "YV12" : "UNKNOWN")));
+        }
+    }
+
+    finishWork(work->input.ordinal.frameIndex.peekll(), work, std::move(block));
+    return OK;
+}
+
+c2_status_t C2SoftApvDec::drainInternal(uint32_t drainMode,
+                                        const std::shared_ptr<C2BlockPool>& pool,
+                                        const std::unique_ptr<C2Work>& work) {
+    if (drainMode == NO_DRAIN) {
+        ALOGW("drain with NO_DRAIN: no-op");
+        return C2_OK;
+    }
+    if (drainMode == DRAIN_CHAIN) {
+        ALOGW("DRAIN_CHAIN not supported");
+        return C2_OMITTED;
+    }
+
+    if (drainMode == DRAIN_COMPONENT_WITH_EOS && work && work->workletsProcessed == 0u) {
+        fillEmptyWork(work);
+    }
+    return C2_OK;
+}
+
+c2_status_t C2SoftApvDec::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
+    return drainInternal(drainMode, pool, nullptr);
+}
+
+class C2SoftApvDecFactory : public C2ComponentFactory {
+  public:
+    C2SoftApvDecFactory()
+        : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+                  GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+    virtual c2_status_t createComponent(c2_node_id_t id,
+                                        std::shared_ptr<C2Component>* const component,
+                                        std::function<void(C2Component*)> deleter) override {
+        *component = std::shared_ptr<C2Component>(
+                new C2SoftApvDec(COMPONENT_NAME, id,
+                                 std::make_shared<C2SoftApvDec::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual c2_status_t createInterface(
+            c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+            std::function<void(C2ComponentInterface*)> deleter) override {
+        *interface = std::shared_ptr<C2ComponentInterface>(
+                new SimpleInterface<C2SoftApvDec::IntfImpl>(
+                        COMPONENT_NAME, id, std::make_shared<C2SoftApvDec::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual ~C2SoftApvDecFactory() override = default;
+
+  private:
+    std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+}  // namespace android
+
+__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+    if (!android::media::swcodec::flags::apv_software_codec()) {
+        ALOGV("APV SW Codec is not enabled");
+        return nullptr;
+    }
+    return new ::android::C2SoftApvDecFactory();
+}
+
+__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
+        ::C2ComponentFactory* factory) {
+    delete factory;
+}
diff --git a/media/codec2/components/apv/C2SoftApvDec.h b/media/codec2/components/apv/C2SoftApvDec.h
new file mode 100644
index 0000000..05afdb2
--- /dev/null
+++ b/media/codec2/components/apv/C2SoftApvDec.h
@@ -0,0 +1,156 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_APV_DEC_H_
+#define ANDROID_C2_SOFT_APV_DEC_H_
+
+#include <media/stagefright/foundation/ColorUtils.h>
+
+#include <SimpleC2Component.h>
+#include <inttypes.h>
+#include <atomic>
+
+#include "oapv.h"
+#include <C2SoftApvCommon.h>
+
+typedef unsigned int UWORD32;
+
+typedef enum {
+    IV_CHROMA_NA = 0xFFFFFFFF,
+    IV_YUV_420P = 0x1,
+    IV_YUV_422P = 0x2,
+    IV_420_UV_INTL = 0x3,
+    IV_YUV_422IBE = 0x4,
+    IV_YUV_422ILE = 0x5,
+    IV_YUV_444P = 0x6,
+    IV_YUV_411P = 0x7,
+    IV_GRAY = 0x8,
+    IV_RGB_565 = 0x9,
+    IV_RGB_24 = 0xa,
+    IV_YUV_420SP_UV = 0xb,
+    IV_YUV_420SP_VU = 0xc,
+    IV_YUV_422SP_UV = 0xd,
+    IV_YUV_422SP_VU = 0xe
+
+} IV_COLOR_FORMAT_T;
+
+typedef struct {
+    /**
+     * u4_size of the structure
+     */
+    UWORD32 u4_size;
+
+    /**
+     * Pointer to the API function pointer table of the codec
+     */
+    void* pv_fxns;
+
+    /**
+     * Pointer to the handle of the codec
+     */
+    void* pv_codec_handle;
+} iv_obj_t;
+
+namespace android {
+
+struct C2SoftApvDec final : public SimpleC2Component {
+    class IntfImpl;
+
+    C2SoftApvDec(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+    virtual ~C2SoftApvDec();
+
+    // From SimpleC2Component
+    c2_status_t onInit() override;
+    c2_status_t onStop() override;
+    void onReset() override;
+    void onRelease() override;
+    c2_status_t onFlush_sm() override;
+    void process(const std::unique_ptr<C2Work>& work,
+                 const std::shared_ptr<C2BlockPool>& pool) override;
+    c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override;
+
+  private:
+    status_t createDecoder();
+    status_t initDecoder();
+    bool isConfigured() const;
+    void drainDecoder();
+    status_t setFlushMode();
+    status_t resetDecoder();
+    void resetPlugin();
+    status_t deleteDecoder();
+    void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                    const std::shared_ptr<C2GraphicBlock>& block);
+    void drainRingBuffer(const std::unique_ptr<C2Work>& work,
+                         const std::shared_ptr<C2BlockPool>& pool, bool eos);
+    c2_status_t drainInternal(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool,
+                              const std::unique_ptr<C2Work>& work);
+
+    status_t outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                          const std::unique_ptr<C2Work>& work);
+
+    std::shared_ptr<IntfImpl> mIntf;
+    iv_obj_t* mDecHandle;
+    uint8_t* mOutBufferFlush;
+    IV_COLOR_FORMAT_T mIvColorformat;
+    uint32_t mOutputDelay;
+    bool mHeaderDecoded;
+    std::atomic_uint64_t mOutIndex;
+    std::shared_ptr<C2GraphicBlock> mOutBlock;
+
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+
+    std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+    uint32_t mHalPixelFormat;
+    uint32_t mWidth;
+    uint32_t mHeight;
+    bool mSignalledOutputEos;
+    bool mSignalledError;
+    // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+    // converting them to C2 values for each frame
+    struct VuiColorAspects {
+        uint8_t primaries;
+        uint8_t transfer;
+        uint8_t coeffs;
+        uint8_t fullRange;
+
+        // default color aspects
+        VuiColorAspects()
+            : primaries(C2Color::PRIMARIES_UNSPECIFIED),
+            transfer(C2Color::TRANSFER_UNSPECIFIED),
+            coeffs(C2Color::MATRIX_UNSPECIFIED),
+            fullRange(C2Color::RANGE_UNSPECIFIED) { }
+
+        bool operator==(const VuiColorAspects &o) {
+            return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs
+                && fullRange == o.fullRange;
+        }
+    } mBitstreamColorAspects;
+
+    oapvd_t oapvdHandle;
+    oapvm_t oapvmHandle;
+    oapvd_cdesc_t cdesc;
+    oapv_frms_t ofrms;
+
+    int outputCsp;
+
+    void getVuiParams(VuiColorAspects* buffer);
+
+    C2_DO_NOT_COPY(C2SoftApvDec);
+};
+
+}  // namespace android
+
+#endif
diff --git a/media/codec2/components/apv/C2SoftApvEnc.cpp b/media/codec2/components/apv/C2SoftApvEnc.cpp
new file mode 100644
index 0000000..9c5e0b2
--- /dev/null
+++ b/media/codec2/components/apv/C2SoftApvEnc.cpp
@@ -0,0 +1,1230 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftApvEnc"
+#include <log/log.h>
+
+#include <android_media_swcodec_flags.h>
+
+#include <media/hardware/VideoAPI.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/AUtils.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
+#include <SimpleC2Interface.h>
+#include <media/stagefright/foundation/ABitReader.h>
+#include <util/C2InterfaceHelper.h>
+#include <cmath>
+#include "C2SoftApvEnc.h"
+
+namespace android {
+
+namespace {
+
+constexpr char COMPONENT_NAME[] = "c2.android.apv.encoder";
+constexpr uint32_t kMinOutBufferSize = 524288;
+constexpr uint32_t kMaxBitstreamBufSize = 16 * 1024 * 1024;
+constexpr int32_t kApvQpMin = 0;
+constexpr int32_t kApvQpMax = 51;
+constexpr int32_t kApvDefaultQP = 32;
+
+#define PROFILE_APV_DEFAULT 0
+#define LEVEL_APV_DEFAULT 0
+#define MAX_NUM_FRMS (1)  // supports only 1-frame input
+
+}  // namespace
+
+class C2SoftApvEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
+  public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+        : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_ENCODER,
+                                            C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_APV) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noTimeStretch();
+        setDerivedInstance(this);
+
+        addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                             .withConstValue(new C2ComponentAttributesSetting(
+                                     C2Component::ATTRIB_IS_TEMPORAL))
+                             .build());
+
+        addParameter(DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
+                             .withConstValue(new C2StreamUsageTuning::input(
+                                     0u, (uint64_t)C2MemoryUsage::CPU_READ))
+                             .build());
+
+        // matches size limits in codec library
+        addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+                             .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 4096, 2),
+                                     C2F(mSize, height).inRange(2, 4096, 2),
+                             })
+                             .withSetter(SizeSetter)
+                             .build());
+
+        // matches limits in codec library
+        addParameter(DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+                             .withDefault(new C2StreamBitrateModeTuning::output(
+                                     0u, C2Config::BITRATE_VARIABLE))
+                             .withFields({C2F(mBitrateMode, value)
+                                                  .oneOf({C2Config::BITRATE_CONST,
+                                                          C2Config::BITRATE_VARIABLE,
+                                                          C2Config::BITRATE_IGNORE})})
+                             .withSetter(Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+                             .build());
+
+        addParameter(DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+                             .withDefault(new C2StreamBitrateInfo::output(0u, 512000))
+                             .withFields({C2F(mBitrate, value).inRange(512000, 240000000)})
+                             .withSetter(BitrateSetter)
+                             .build());
+
+        addParameter(DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
+                             .withDefault(new C2StreamFrameRateInfo::output(0u, 15.))
+                             .withFields({C2F(mFrameRate, value).greaterThan(0.)})
+                             .withSetter(Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
+                             .build());
+
+        addParameter(DefineParam(mQuality, C2_PARAMKEY_QUALITY)
+                             .withDefault(new C2StreamQualityTuning::output(0u, 40))
+                             .withFields({C2F(mQuality, value).inRange(0, 100)})
+                             .withSetter(Setter<decltype(*mQuality)>::NonStrictValueWithNoDeps)
+                             .build());
+
+        addParameter(
+                DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                        .withDefault(new C2StreamProfileLevelInfo::output(
+                                0u, C2Config::PROFILE_APV_422_10, LEVEL_APV_1_BAND_0))
+                        .withFields({
+                                C2F(mProfileLevel, profile).oneOf({C2Config::PROFILE_APV_422_10}),
+                                C2F(mProfileLevel, level)
+                                        .oneOf({
+                                                C2Config::LEVEL_APV_1_BAND_0,
+                                                C2Config::LEVEL_APV_1_1_BAND_0,
+                                                C2Config::LEVEL_APV_2_BAND_0,
+                                                C2Config::LEVEL_APV_2_1_BAND_0,
+                                                C2Config::LEVEL_APV_3_BAND_0,
+                                                C2Config::LEVEL_APV_3_1_BAND_0,
+                                                C2Config::LEVEL_APV_4_BAND_0,
+                                                C2Config::LEVEL_APV_4_1_BAND_0,
+                                                C2Config::LEVEL_APV_5_BAND_0,
+                                                C2Config::LEVEL_APV_5_1_BAND_0,
+                                                C2Config::LEVEL_APV_6_BAND_0,
+                                                C2Config::LEVEL_APV_6_1_BAND_0,
+                                                C2Config::LEVEL_APV_7_BAND_0,
+                                                C2Config::LEVEL_APV_7_1_BAND_0,
+                                                C2Config::LEVEL_APV_1_BAND_1,
+                                                C2Config::LEVEL_APV_1_1_BAND_1,
+                                                C2Config::LEVEL_APV_2_BAND_1,
+                                                C2Config::LEVEL_APV_2_1_BAND_1,
+                                                C2Config::LEVEL_APV_3_BAND_1,
+                                                C2Config::LEVEL_APV_3_1_BAND_1,
+                                                C2Config::LEVEL_APV_4_BAND_1,
+                                                C2Config::LEVEL_APV_4_1_BAND_1,
+                                                C2Config::LEVEL_APV_5_BAND_1,
+                                                C2Config::LEVEL_APV_5_1_BAND_1,
+                                                C2Config::LEVEL_APV_6_BAND_1,
+                                                C2Config::LEVEL_APV_6_1_BAND_1,
+                                                C2Config::LEVEL_APV_7_BAND_1,
+                                                C2Config::LEVEL_APV_7_1_BAND_1,
+                                                C2Config::LEVEL_APV_1_BAND_2,
+                                                C2Config::LEVEL_APV_1_1_BAND_2,
+                                                C2Config::LEVEL_APV_2_BAND_2,
+                                                C2Config::LEVEL_APV_2_1_BAND_2,
+                                                C2Config::LEVEL_APV_3_BAND_2,
+                                                C2Config::LEVEL_APV_3_1_BAND_2,
+                                                C2Config::LEVEL_APV_4_BAND_2,
+                                                C2Config::LEVEL_APV_4_1_BAND_2,
+                                                C2Config::LEVEL_APV_5_BAND_2,
+                                                C2Config::LEVEL_APV_5_1_BAND_2,
+                                                C2Config::LEVEL_APV_6_BAND_2,
+                                                C2Config::LEVEL_APV_6_1_BAND_2,
+                                                C2Config::LEVEL_APV_7_BAND_2,
+                                                C2Config::LEVEL_APV_7_1_BAND_2,
+                                                C2Config::LEVEL_APV_1_BAND_3,
+                                                C2Config::LEVEL_APV_1_1_BAND_3,
+                                                C2Config::LEVEL_APV_2_BAND_3,
+                                                C2Config::LEVEL_APV_2_1_BAND_3,
+                                                C2Config::LEVEL_APV_3_BAND_3,
+                                                C2Config::LEVEL_APV_3_1_BAND_3,
+                                                C2Config::LEVEL_APV_4_BAND_3,
+                                                C2Config::LEVEL_APV_4_1_BAND_3,
+                                                C2Config::LEVEL_APV_5_BAND_3,
+                                                C2Config::LEVEL_APV_5_1_BAND_3,
+                                                C2Config::LEVEL_APV_6_BAND_3,
+                                                C2Config::LEVEL_APV_6_1_BAND_3,
+                                                C2Config::LEVEL_APV_7_BAND_3,
+                                                C2Config::LEVEL_APV_7_1_BAND_3,
+                                        }),
+                        })
+                        .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
+                        .build());
+
+        addParameter(DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsInfo::input(
+                                     0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(ColorAspectsSetter)
+                             .build());
+
+        addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsInfo::output(
+                                     0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mCodedColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mCodedColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mCodedColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mCodedColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(CodedColorAspectsSetter, mColorAspects)
+                             .build());
+        std::vector<uint32_t> pixelFormats = {
+            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+        };
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+            pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+        }
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)AHARDWAREBUFFER_FORMAT_YCbCr_P210)) {
+            pixelFormats.push_back(AHARDWAREBUFFER_FORMAT_YCbCr_P210);
+        }
+        addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+                             .withDefault(new C2StreamPixelFormatInfo::input(
+                                     0u, HAL_PIXEL_FORMAT_YCBCR_P010))
+                             .withFields({C2F(mPixelFormat, value).oneOf({pixelFormats})})
+                             .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+                             .build());
+    }
+
+    static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output>& me) {
+        (void)mayBlock;
+        C2R res = C2R::Ok();
+        if (me.v.value < 1000000) {
+            me.set().value = 1000000;
+        }
+        return res;
+    }
+
+    static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input>& oldMe,
+                          C2P<C2StreamPictureSizeInfo::input>& me) {
+        (void)mayBlock;
+        C2R res = C2R::Ok();
+        if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+            me.set().width = oldMe.v.width;
+        }
+        if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+            me.set().height = oldMe.v.height;
+        }
+        return res;
+    }
+
+    static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::output>& me,
+                                  const C2P<C2StreamPictureSizeInfo::input>& size,
+                                  const C2P<C2StreamFrameRateInfo::output>& frameRate,
+                                  const C2P<C2StreamBitrateInfo::output>& bitrate) {
+        (void)mayBlock;
+        if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+            me.set().profile = C2Config::PROFILE_APV_422_10;
+        }
+        if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+            me.set().level = LEVEL_APV_1_BAND_0;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+                                       const C2P<C2StreamColorAspectsInfo::input>& coded) {
+        (void)mayBlock;
+        me.set().range = coded.v.range;
+        me.set().primaries = coded.v.primaries;
+        me.set().transfer = coded.v.transfer;
+        me.set().matrix = coded.v.matrix;
+        return C2R::Ok();
+    }
+
+    uint32_t getProfile_l() const {
+        int32_t profile = PROFILE_UNUSED;
+
+        switch (mProfileLevel->profile) {
+            case C2Config::PROFILE_APV_422_10:
+                profile = 33;
+                break;
+            case C2Config::PROFILE_APV_422_12:
+                profile = 44;
+                break;
+            case C2Config::PROFILE_APV_444_10:
+                profile = 55;
+                break;
+            case C2Config::PROFILE_APV_444_12:
+                profile = 66;
+                break;
+            case C2Config::PROFILE_APV_4444_10:
+                profile = 77;
+                break;
+            case C2Config::PROFILE_APV_4444_12:
+                profile = 88;
+                break;
+            case C2Config::PROFILE_APV_400_10:
+                profile = 99;
+                break;
+            default:
+                ALOGD("Unrecognized profile: %x", mProfileLevel->profile);
+        }
+        return profile;
+    }
+
+    uint32_t getLevel_l() const {
+        int32_t level = LEVEL_UNUSED;
+
+        // TODO: Add Band settings
+        switch (mProfileLevel->level) {
+            case C2Config::LEVEL_APV_1_BAND_0:
+                level = 10;
+                break;
+            case C2Config::LEVEL_APV_1_1_BAND_0:
+                level = 11;
+                break;
+            case C2Config::LEVEL_APV_2_BAND_0:
+                level = 20;
+                break;
+            case C2Config::LEVEL_APV_2_1_BAND_0:
+                level = 21;
+                break;
+            case C2Config::LEVEL_APV_3_BAND_0:
+                level = 30;
+                break;
+            case C2Config::LEVEL_APV_3_1_BAND_0:
+                level = 31;
+                break;
+            case C2Config::LEVEL_APV_4_BAND_0:
+                level = 40;
+                break;
+            case C2Config::LEVEL_APV_4_1_BAND_0:
+                level = 41;
+                break;
+            case C2Config::LEVEL_APV_5_BAND_0:
+                level = 50;
+                break;
+            case C2Config::LEVEL_APV_5_1_BAND_0:
+                level = 51;
+                break;
+            case C2Config::LEVEL_APV_6_BAND_0:
+                level = 60;
+                break;
+            case C2Config::LEVEL_APV_6_1_BAND_0:
+                level = 61;
+                break;
+            case C2Config::LEVEL_APV_7_BAND_0:
+                level = 70;
+                break;
+            case C2Config::LEVEL_APV_7_1_BAND_0:
+                level = 71;
+                break;
+            default:
+                ALOGD("Unrecognized level: %x", mProfileLevel->level);
+        }
+        // Convert to APV level_idc according to APV spec
+        return level * 3;
+    }
+
+    int32_t getBitrateMode_l() const {
+        int32_t bitrateMode = C2Config::BITRATE_CONST;
+
+        switch (mBitrateMode->value) {
+            case C2Config::BITRATE_CONST:
+                bitrateMode = OAPV_RC_CQP;
+                break;
+            case C2Config::BITRATE_VARIABLE:
+                bitrateMode = OAPV_RC_ABR;
+                break;
+            case C2Config::BITRATE_IGNORE:
+                bitrateMode = 0;
+                break;
+            default:
+                ALOGE("Unrecognized bitrate mode: %x", mBitrateMode->value);
+        }
+        return bitrateMode;
+    }
+
+    std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
+    std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
+    std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
+    std::shared_ptr<C2StreamQualityTuning::output> getQuality_l() const { return mQuality; }
+    std::shared_ptr<C2StreamColorAspectsInfo::input> getColorAspects_l() const {
+        return mColorAspects;
+    }
+    std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() const {
+        return mCodedColorAspects;
+    }
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const {
+        return mPictureQuantization;
+    }
+    std::shared_ptr<C2StreamProfileLevelInfo::output> getProfileLevel_l() const {
+        return mProfileLevel;
+    }
+    std::shared_ptr<C2StreamPixelFormatInfo::input> getPixelFormat_l() const {
+        return mPixelFormat;
+    }
+
+  private:
+    std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+    std::shared_ptr<C2StreamUsageTuning::input> mUsage;
+    std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+    std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+    std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+    std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
+    std::shared_ptr<C2StreamQualityTuning::output> mQuality;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
+    std::shared_ptr<C2StreamColorInfo::input> mColorFormat;
+    std::shared_ptr<C2StreamPixelFormatInfo::input> mPixelFormat;
+};
+
+C2SoftApvEnc::C2SoftApvEnc(const char* name, c2_node_id_t id,
+                           const std::shared_ptr<IntfImpl>& intfImpl)
+    : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+      mIntf(intfImpl),
+      mColorFormat(OAPV_CF_PLANAR2),
+      mStarted(false),
+      mSignalledEos(false),
+      mSignalledError(false),
+      mOutBlock(nullptr) {
+    reset();
+}
+
+C2SoftApvEnc::~C2SoftApvEnc() {
+    onRelease();
+}
+
+c2_status_t C2SoftApvEnc::onInit() {
+    return C2_OK;
+}
+
+c2_status_t C2SoftApvEnc::onStop() {
+    return C2_OK;
+}
+
+void C2SoftApvEnc::onReset() {
+    releaseEncoder();
+    reset();
+}
+
+void C2SoftApvEnc::onRelease() {
+    releaseEncoder();
+}
+
+c2_status_t C2SoftApvEnc::onFlush_sm() {
+    return C2_OK;
+}
+
+static void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
+    uint32_t flags = 0;
+    if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+        flags |= C2FrameData::FLAG_END_OF_STREAM;
+        ALOGV("Signalling EOS");
+    }
+    work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+    work->worklets.front()->output.buffers.clear();
+    work->worklets.front()->output.ordinal = work->input.ordinal;
+    work->workletsProcessed = 1u;
+}
+
+int32_t C2SoftApvEnc::getQpFromQuality(int32_t quality) {
+    int32_t qp = ((kApvQpMin - kApvQpMax) * quality / 100) + kApvQpMax;
+    qp = std::min(qp, (int)kApvQpMax);
+    qp = std::max(qp, (int)kApvQpMin);
+    return qp;
+}
+
+c2_status_t C2SoftApvEnc::reset() {
+    ALOGV("reset");
+    mInitEncoder = false;
+    mStarted = false;
+    mSignalledEos = false;
+    mSignalledError = false;
+    mBitDepth = 10;
+    mMaxFrames = MAX_NUM_FRMS;
+    mReceivedFrames = 0;
+    mReceivedFirstFrame = false;
+    mColorFormat = OAPV_CF_PLANAR2;
+    memset(&mInputFrames, 0, sizeof(mInputFrames));
+    memset(&mReconFrames, 0, sizeof(mReconFrames));
+    return C2_OK;
+}
+
+c2_status_t C2SoftApvEnc::releaseEncoder() {
+    for (int32_t i = 0; i < MAX_NUM_FRMS; i++) {
+        if (mInputFrames.frm[i].imgb != nullptr) {
+            imgb_release(mInputFrames.frm[i].imgb);
+            mInputFrames.frm[i].imgb = nullptr;
+        }
+    }
+
+    if (mBitstreamBuf) {
+        std::free(mBitstreamBuf);
+        mBitstreamBuf = nullptr;
+    }
+    return C2_OK;
+}
+
+c2_status_t C2SoftApvEnc::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
+    return drainInternal(drainMode, pool, nullptr);
+}
+
+void C2SoftApvEnc::showEncoderParams(oapve_cdesc_t* cdsc) {
+    std::string title = "APV encoder params:";
+    ALOGD("%s width = %d, height = %d", title.c_str(), cdsc->param[0].w, cdsc->param[0].h);
+    ALOGD("%s FrameRate = %f", title.c_str(),
+          (double)cdsc->param[0].fps_num / cdsc->param[0].fps_den);
+    ALOGD("%s BitRate = %d Kbps", title.c_str(), cdsc->param[0].bitrate);
+    ALOGD("%s QP = %d", title.c_str(), cdsc->param[0].qp);
+    ALOGD("%s profile_idc = %d, level_idc = %d, band_idc = %d", title.c_str(),
+          cdsc->param[0].profile_idc, cdsc->param[0].level_idc / 3, cdsc->param[0].band_idc);
+    ALOGD("%s Bitrate Mode: %d", title.c_str(), cdsc->param[0].rc_type);
+    ALOGD("%s mColorAspects primaries: %d, transfer: %d, matrix: %d, range: %d", title.c_str(),
+          mColorAspects->primaries, mColorAspects->transfer, mColorAspects->matrix,
+          mColorAspects->range);
+    ALOGD("%s mCodedColorAspects primaries: %d, transfer: %d, matrix: %d, range: %d", title.c_str(),
+          mCodedColorAspects->primaries, mCodedColorAspects->transfer, mCodedColorAspects->matrix,
+          mCodedColorAspects->range);
+    ALOGD("%s Input color format: %s", title.c_str(),
+          mColorFormat == OAPV_CF_YCBCR422 ? "YUV422P10LE" : "P210");
+    ALOGD("%s max_num_frms: %d", title.c_str(), cdsc->max_num_frms);
+}
+
+c2_status_t C2SoftApvEnc::initEncoder() {
+    if (mInitEncoder) {
+        return C2_OK;
+    }
+    ALOGV("initEncoder");
+
+    mSize = mIntf->getSize_l();
+    mFrameRate = mIntf->getFrameRate_l();
+    mBitrate = mIntf->getBitrate_l();
+    mQuality = mIntf->getQuality_l();
+    mColorAspects = mIntf->getColorAspects_l();
+    mCodedColorAspects = mIntf->getCodedColorAspects_l();
+    mProfileLevel = mIntf->getProfileLevel_l();
+    mPixelFormat = mIntf->getPixelFormat_l();
+
+    mCodecDesc = std::make_unique<oapve_cdesc_t>();
+    if (mCodecDesc == nullptr) {
+        ALOGE("Allocate ctx failed");
+        return C2_NO_INIT;
+    }
+    mCodecDesc->max_bs_buf_size = kMaxBitstreamBufSize;
+    mCodecDesc->max_num_frms = MAX_NUM_FRMS;
+    // TODO: Bound parameters to CPU count
+    mCodecDesc->threads = 4;
+
+    int32_t ret = C2_OK;
+    /* set params */
+    for (int32_t i = 0; i < mMaxFrames; i++) {
+        oapve_param_t* param = &mCodecDesc->param[i];
+        ret = oapve_param_default(param);
+        if (OAPV_FAILED(ret)) {
+            ALOGE("cannot set default parameter");
+            return C2_NO_INIT;
+        }
+        setParams(*param);
+    }
+
+    showEncoderParams(mCodecDesc.get());
+
+    /* create encoder */
+    mEncoderId = oapve_create(mCodecDesc.get(), NULL);
+    if (mEncoderId == NULL) {
+        ALOGE("cannot create APV encoder");
+        return C2_CORRUPTED;
+    }
+
+    /* create metadata */
+    mMetaId = oapvm_create(&ret);
+    if (mMetaId == NULL) {
+        ALOGE("cannot create APV encoder");
+        return C2_NO_MEMORY;
+    }
+
+    /* create image buffers */
+    for (int32_t i = 0; i < mMaxFrames; i++) {
+        if (mBitDepth == 10) {
+            mInputFrames.frm[i].imgb = imgb_create(mCodecDesc->param[0].w, mCodecDesc->param[0].h,
+                                                  OAPV_CS_SET(mColorFormat, mBitDepth, 0));
+            mReconFrames.frm[i].imgb = nullptr;
+        } else {
+            mInputFrames.frm[i].imgb = imgb_create(mCodecDesc->param[0].w, mCodecDesc->param[0].h,
+                                                  OAPV_CS_SET(mColorFormat, 10, 0));
+            mReconFrames.frm[i].imgb = nullptr;
+        }
+    }
+
+    /* allocate bitstream buffer */
+    mBitstreamBuf = new unsigned char[kMaxBitstreamBufSize];
+    if (mBitstreamBuf == nullptr) {
+        ALOGE("cannot allocate bitstream buffer, size= %d", kMaxBitstreamBufSize);
+        return C2_NO_MEMORY;
+    }
+
+    mStarted = true;
+    mInitEncoder = true;
+    return C2_OK;
+}
+
+void C2SoftApvEnc::setParams(oapve_param_t& param) {
+    param.w = mSize->width;
+    param.h = mSize->height;
+    param.fps_num = (int)(mFrameRate->value * 100);
+    param.fps_den = 100;
+    param.bitrate = mBitrate->value / 1000;
+    param.rc_type = mIntf->getBitrateMode_l();
+
+    int ApvQP = kApvDefaultQP;
+    if (param.rc_type == OAPV_RC_CQP) {
+        ApvQP = getQpFromQuality(mQuality->value);
+        ALOGI("Bitrate mode is CQ, so QP value is derived from Quality. Quality is %d, QP is %d",
+              mQuality->value, ApvQP);
+    }
+    param.qp = ApvQP;
+    param.band_idc = 0;  // TODO: Get from the Level setting
+    param.profile_idc = mIntf->getProfile_l();
+    C2Config::level_t level = decisionApvLevel(
+            param.w, param.h, (int)(param.fps_num / param.fps_den), param.bitrate, param.band_idc);
+    if (mProfileLevel->level != level) {
+        mProfileLevel->level = level;
+        ALOGI("Need to update level to %d", mIntf->getLevel_l());
+    }
+    param.level_idc = mIntf->getLevel_l();
+}
+
+c2_status_t C2SoftApvEnc::setEncodeArgs(oapv_frms_t* inputFrames, const C2GraphicView* const input,
+                                        uint64_t workIndex) {
+    if (input->width() < mSize->width || input->height() < mSize->height) {
+        /* Expect width height to be configured */
+        ALOGW("unexpected Capacity Aspect %d(%d) x %d(%d)", input->width(), mSize->width,
+              input->height(), mSize->height);
+        return C2_BAD_VALUE;
+    }
+    const C2PlanarLayout& layout = input->layout();
+    uint8_t* yPlane = const_cast<uint8_t*>(input->data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t* uPlane = const_cast<uint8_t*>(input->data()[C2PlanarLayout::PLANE_U]);
+    uint8_t* vPlane = const_cast<uint8_t*>(input->data()[C2PlanarLayout::PLANE_V]);
+    int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+    int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+    uint32_t width = mSize->width;
+    uint32_t height = mSize->height;
+
+    /* width and height must be even */
+    if (width & 1u || height & 1u) {
+        ALOGW("height(%u) and width(%u) must both be even", height, width);
+        return C2_BAD_VALUE;
+    }
+
+    /* Set num frames */
+    inputFrames->num_frms = MAX_NUM_FRMS;
+    inputFrames->frm[mReceivedFrames].group_id = 1;
+    inputFrames->frm[mReceivedFrames].pbu_type = OAPV_PBU_TYPE_PRIMARY_FRAME;
+
+    switch (layout.type) {
+        case C2PlanarLayout::TYPE_RGB:
+            ALOGE("Not supported RGB color format");
+            return C2_BAD_VALUE;
+        case C2PlanarLayout::TYPE_RGBA: {
+            [[fallthrough]];
+        }
+        case C2PlanarLayout::TYPE_YUVA: {
+            ALOGV("Convert from ABGR2101010 to P210");
+            uint16_t *dstY, *dstU, *dstV;
+            dstY = (uint16_t*)inputFrames->frm[0].imgb->a[0];
+            dstU = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+            dstV = (uint16_t*)inputFrames->frm[0].imgb->a[2];
+            convertRGBA1010102ToYUV420Planar16(dstY, dstU, dstV, (uint32_t*)(input->data()[0]),
+                                                layout.planes[layout.PLANE_Y].rowInc / 4, width,
+                                                height, mColorAspects->matrix,
+                                                mColorAspects->range);
+            break;
+        }
+        case C2PlanarLayout::TYPE_YUV: {
+            if (IsP010(*input)) {
+                if (mColorFormat == OAPV_CF_YCBCR422) {
+                    ColorConvertP010ToYUV422P10le(input, inputFrames->frm[0].imgb);
+                } else if (mColorFormat == OAPV_CF_PLANAR2) {
+                    uint16_t *srcY  = (uint16_t*)(input->data()[0]);
+                    uint16_t *srcUV = (uint16_t*)(input->data()[1]);
+                    uint16_t *dstY  = (uint16_t*)inputFrames->frm[0].imgb->a[0];
+                    uint16_t *dstUV = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+                    convertP010ToP210(dstY, dstUV, srcY, srcUV,
+                                      input->width(), input->width(), input->width(),
+                                      input->height());
+                } else {
+                    ALOGE("Not supported color format. %d", mColorFormat);
+                    return C2_BAD_VALUE;
+                }
+            } else if (IsNV12(*input)) {
+                uint8_t  *srcY  = (uint8_t*)input->data()[0];
+                uint8_t  *srcUV = (uint8_t*)input->data()[1];
+                uint16_t *dstY  = (uint16_t*)inputFrames->frm[0].imgb->a[0];
+                uint16_t *dstUV = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+                convertSemiPlanar8ToP210(dstY, dstUV, srcY, srcUV,
+                                         input->width(), input->width(), input->width(),
+                                         input->width(), input->width(), input->height(),
+                                         CONV_FORMAT_I420);
+            } else if (IsYUV420(*input)) {
+                return C2_BAD_VALUE;
+            } else if (IsI420(*input)) {
+                return C2_BAD_VALUE;
+            } else {
+                ALOGE("Not supported color format. %d", mColorFormat);
+                return C2_BAD_VALUE;
+            }
+            break;
+        }
+
+        default:
+            ALOGE("Unrecognized plane type: %d", layout.type);
+            return C2_BAD_VALUE;
+    }
+
+    return C2_OK;
+}
+
+C2Config::level_t C2SoftApvEnc::decisionApvLevel(int32_t width, int32_t height, int32_t fps,
+                                                 int32_t bitrate, int32_t band) {
+    C2Config::level_t level = C2Config::LEVEL_APV_1_BAND_0;
+
+    struct LevelLimits {
+        C2Config::level_t level;
+        uint64_t samplesPerSec;
+        uint32_t bitratesOfBand;
+    };
+
+    constexpr LevelLimits kLimitsBand0[] = {
+            {LEVEL_APV_1_BAND_0, 3'041'280, 7'000},
+            {LEVEL_APV_1_1_BAND_0, 6'082'560, 14'000},
+            {LEVEL_APV_2_BAND_0, 15'667'200, 36'000},
+            {LEVEL_APV_2_1_BAND_0, 31'334'400, 71'000},
+            {LEVEL_APV_3_BAND_0, 66'846'720, 101'000},
+            {LEVEL_APV_3_1_BAND_0, 133'693'440, 201'000},
+            {LEVEL_APV_4_BAND_0, 265'420'800, 401'000},
+            {LEVEL_APV_4_1_BAND_0, 530'841'600, 780'000},
+            {LEVEL_APV_5_BAND_0, 1'061'683'200, 1'560'000},
+            {LEVEL_APV_5_1_BAND_0, 2'123'366'400, 3'324'000},
+            {LEVEL_APV_6_BAND_0, 4'777'574'400, 6'648'000},
+            {LEVEL_APV_6_1_BAND_0, 8'493'465'600, 13'296'000},
+            {LEVEL_APV_7_BAND_0, 16'986'931'200, 26'592'000},
+            {LEVEL_APV_7_1_BAND_0, 33'973'862'400, 53'184'000},
+    };
+
+    constexpr LevelLimits kLimitsBand1[] = {
+            {LEVEL_APV_1_BAND_1, 3'041'280, 11'000},
+            {LEVEL_APV_1_1_BAND_1, 6'082'560, 21'000},
+            {LEVEL_APV_2_BAND_1, 15'667'200, 53'000},
+            {LEVEL_APV_2_1_BAND_1, 31'334'400, 106'00},
+            {LEVEL_APV_3_BAND_1, 66'846'720, 151'000},
+            {LEVEL_APV_3_1_BAND_1, 133'693'440, 301'000},
+            {LEVEL_APV_4_BAND_1, 265'420'800, 602'000},
+            {LEVEL_APV_4_1_BAND_1, 530'841'600, 1'170'000},
+            {LEVEL_APV_5_BAND_1, 1'061'683'200, 2'340'000},
+            {LEVEL_APV_5_1_BAND_1, 2'123'366'400, 4'986'000},
+            {LEVEL_APV_6_BAND_1, 4'777'574'400, 9'972'000},
+            {LEVEL_APV_6_1_BAND_1, 8'493'465'600, 19'944'000},
+            {LEVEL_APV_7_BAND_1, 16'986'931'200, 39'888'000},
+            {LEVEL_APV_7_1_BAND_1, 33'973'862'400, 79'776'000},
+    };
+
+    constexpr LevelLimits kLimitsBand2[] = {
+            {LEVEL_APV_1_BAND_2, 3'041'280, 14'000},
+            {LEVEL_APV_1_1_BAND_2, 6'082'560, 28'000},
+            {LEVEL_APV_2_BAND_2, 15'667'200, 71'000},
+            {LEVEL_APV_2_1_BAND_2, 31'334'400, 141'000},
+            {LEVEL_APV_3_BAND_2, 66'846'720, 201'000},
+            {LEVEL_APV_3_1_BAND_2, 133'693'440, 401'000},
+            {LEVEL_APV_4_BAND_2, 265'420'800, 780'000},
+            {LEVEL_APV_4_1_BAND_2, 530'841'600, 1'560'000},
+            {LEVEL_APV_5_BAND_2, 1'061'683'200, 3'324'000},
+            {LEVEL_APV_5_1_BAND_2, 2'123'366'400, 6'648'000},
+            {LEVEL_APV_6_BAND_2, 4'777'574'400, 13'296'000},
+            {LEVEL_APV_6_1_BAND_2, 8'493'465'600, 26'592'000},
+            {LEVEL_APV_7_BAND_2, 16'986'931'200, 53'184'000},
+            {LEVEL_APV_7_1_BAND_2, 33'973'862'400, 106'368'000},
+    };
+
+    constexpr LevelLimits kLimitsBand3[] = {
+            {LEVEL_APV_1_BAND_3, 3'041'280, 21'000},
+            {LEVEL_APV_1_1_BAND_3, 6'082'560, 42'000},
+            {LEVEL_APV_2_BAND_3, 15'667'200, 106'000},
+            {LEVEL_APV_2_1_BAND_3, 31'334'400, 212'000},
+            {LEVEL_APV_3_BAND_3, 66'846'720, 301'000},
+            {LEVEL_APV_3_1_BAND_3, 133'693'440, 602'000},
+            {LEVEL_APV_4_BAND_3, 265'420'800, 1'170'000},
+            {LEVEL_APV_4_1_BAND_3, 530'841'600, 2'340'000},
+            {LEVEL_APV_5_BAND_3, 1'061'683'200, 4'986'000},
+            {LEVEL_APV_5_1_BAND_3, 2'123'366'400, 9'972'000},
+            {LEVEL_APV_6_BAND_3, 4'777'574'400, 19'944'000},
+            {LEVEL_APV_6_1_BAND_3, 8'493'465'600, 39'888'000},
+            {LEVEL_APV_7_BAND_3, 16'986'931'200, 79'776'000},
+            {LEVEL_APV_7_1_BAND_3, 33'973'862'400, 159'552'000},
+    };
+
+    uint64_t samplesPerSec = width * height * fps;
+    if (band == 0) {
+        for (const LevelLimits& limit : kLimitsBand0) {
+            if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
+                level = limit.level;
+                break;
+            }
+        }
+    } else if (band == 1) {
+        for (const LevelLimits& limit : kLimitsBand1) {
+            if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
+                level = limit.level;
+                break;
+            }
+        }
+    } else if (band == 2) {
+        for (const LevelLimits& limit : kLimitsBand2) {
+            if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
+                level = limit.level;
+                break;
+            }
+        }
+    } else if (band == 3) {
+        for (const LevelLimits& limit : kLimitsBand3) {
+            if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
+                level = limit.level;
+                break;
+            }
+        }
+    } else {
+        ALOGE("Invalid band_idc on calculte level");
+    }
+
+    return level;
+}
+
+void C2SoftApvEnc::ColorConvertP010ToYUV422P10le(const C2GraphicView* const input,
+                                                 oapv_imgb_t* imgb) {
+    uint32_t width = input->width();
+    uint32_t height = input->height();
+
+    uint8_t* yPlane = (uint8_t*)input->data()[0];
+    auto* uvPlane = (uint8_t*)input->data()[1];
+    uint32_t stride[3];
+    stride[0] = width * 2;
+    stride[1] = stride[2] = width;
+
+    uint8_t *dst, *src;
+    uint16_t tmp;
+    for (int32_t y = 0; y < height; ++y) {
+        src = yPlane + y * stride[0];
+        dst = (uint8_t*)imgb->a[0] + y * stride[0];
+        for (int32_t x = 0; x < stride[0]; x += 2) {
+            tmp = (src[x + 1] << 2) | (src[x] >> 6);
+            dst[x] = tmp & 0xFF;
+            dst[x + 1] = tmp >> 8;
+        }
+    }
+
+    uint8_t *dst_u, *dst_v;
+    for (int32_t y = 0; y < height / 2; ++y) {
+        src = uvPlane + y * stride[1] * 2;
+        dst_u = (uint8_t*)imgb->a[1] + (y * 2) * stride[1];
+        dst_v = (uint8_t*)imgb->a[2] + (y * 2) * stride[2];
+        for (int32_t x = 0; x < stride[1] * 2; x += 4) {
+            tmp = (src[x + 1] << 2) | (src[x] >> 6);  // cb
+            dst_u[x / 2] = tmp & 0xFF;
+            dst_u[x / 2 + 1] = tmp >> 8;
+            dst_u[x / 2 + stride[1]] = dst_u[x / 2];
+            dst_u[x / 2 + stride[1] + 1] = dst_u[x / 2 + 1];
+
+            tmp = (src[x + 3] << 2) | (src[x + 2] >> 6);  // cr
+            dst_v[x / 2] = tmp & 0xFF;
+            dst_v[x / 2 + 1] = tmp >> 8;
+            dst_v[x / 2 + stride[2]] = dst_v[x / 2];
+            dst_v[x / 2 + stride[2] + 1] = dst_v[x / 2 + 1];
+        }
+    }
+}
+
+void C2SoftApvEnc::finishWork(uint64_t workIndex, const std::unique_ptr<C2Work>& work,
+                              const std::shared_ptr<C2BlockPool>& pool, oapv_bitb_t* bitb,
+                              oapve_stat_t* stat) {
+    std::shared_ptr<C2LinearBlock> block;
+    C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+    c2_status_t status = pool->fetchLinearBlock(stat->write, usage, &block);
+    if (C2_OK != status) {
+        ALOGE("fetchLinearBlock for Output failed with status 0x%x", status);
+        mSignalledError = true;
+        work->result = status;
+        work->workletsProcessed = 1u;
+        return;
+    }
+
+    C2WriteView wView = block->map().get();
+    if (C2_OK != wView.error()) {
+        ALOGE("write view map failed with status 0x%x", wView.error());
+        mSignalledError = true;
+        work->result = wView.error();
+        work->workletsProcessed = 1u;
+        return;
+    }
+    if ((!mReceivedFirstFrame)) {
+        createCsdData(work, bitb, stat->write);
+        mReceivedFirstFrame = true;
+    }
+
+    memcpy(wView.data(), bitb->addr, stat->write);
+    std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block, 0, stat->write);
+
+    /* All frames are SYNC FRAME */
+    buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(0u /* stream id */,
+                                                                          C2Config::SYNC_FRAME));
+
+    auto fillWork = [buffer](const std::unique_ptr<C2Work>& work) {
+        work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.buffers.push_back(buffer);
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->workletsProcessed = 1u;
+    };
+    if (work && c2_cntr64_t(workIndex) == work->input.ordinal.frameIndex) {
+        fillWork(work);
+        if (mSignalledEos) {
+            work->worklets.front()->output.flags = C2FrameData::FLAG_END_OF_STREAM;
+        }
+    } else {
+        finish(workIndex, fillWork);
+    }
+}
+
+void C2SoftApvEnc::createCsdData(const std::unique_ptr<C2Work>& work,
+                                 oapv_bitb_t* bitb,
+                                 uint32_t encodedSize) {
+    if (encodedSize < 31) {
+        ALOGE("the first frame size is too small, so no csd data will be created.");
+        return;
+    }
+    ABitReader reader((uint8_t*)bitb->addr, encodedSize);
+
+    uint8_t number_of_configuration_entry = 0;
+    uint8_t pbu_type = 0;
+    uint8_t number_of_frame_info = 0;
+    bool color_description_present_flag = false;
+    bool capture_time_distance_ignored = false;
+    uint8_t profile_idc = 0;
+    uint8_t level_idc = 0;
+    uint8_t band_idc = 0;
+    uint32_t frame_width_minus1 = 0;
+    uint32_t frame_height_minus1 = 0;
+    uint8_t chroma_format_idc = 0;
+    uint8_t bit_depth_minus8 = 0;
+    uint8_t capture_time_distance = 0;
+    uint8_t color_primaries = 0;
+    uint8_t transfer_characteristics = 0;
+    uint8_t matrix_coefficients = 0;
+
+    /* pbu_header() */
+    reader.skipBits(32);           // pbu_size
+    reader.skipBits(32);           // currReadSize
+    pbu_type = reader.getBits(8);  // pbu_type
+    reader.skipBits(16);           // group_id
+    reader.skipBits(8);            // reserved_zero_8bits
+
+    /* frame info() */
+    profile_idc = reader.getBits(8);            // profile_idc
+    level_idc = reader.getBits(8);              // level_idc
+    band_idc = reader.getBits(3);               // band_idc
+    reader.skipBits(5);                         // reserved_zero_5bits
+    frame_width_minus1 = reader.getBits(32);    // width
+    frame_height_minus1 = reader.getBits(32);   // height
+    chroma_format_idc = reader.getBits(4);      // chroma_format_idc
+    bit_depth_minus8 = reader.getBits(4);       // bit_depth
+    capture_time_distance = reader.getBits(8);  // capture_time_distance
+    reader.skipBits(8);                         // reserved_zero_8bits
+
+    /* frame header() */
+    reader.skipBits(8);  // reserved_zero_8bit
+    color_description_present_flag = reader.getBits(1);  // color_description_present_flag
+    if (color_description_present_flag) {
+        color_primaries = reader.getBits(8);           // color_primaries
+        transfer_characteristics = reader.getBits(8);  // transfer_characteristics
+        matrix_coefficients = reader.getBits(8);       // matrix_coefficients
+    }
+
+    number_of_configuration_entry = 1;  // The real-time encoding on the device is assumed to be 1.
+    number_of_frame_info = 1;  // The real-time encoding on the device is assumed to be 1.
+
+    std::vector<uint8_t> csdData;
+    csdData.push_back((uint8_t)0x1);
+    csdData.push_back(number_of_configuration_entry);
+
+    for (uint8_t i = 0; i < number_of_configuration_entry; i++) {
+        csdData.push_back(pbu_type);
+        csdData.push_back(number_of_frame_info);
+        for (uint8_t j = 0; j < number_of_frame_info; j++) {
+            csdData.push_back((uint8_t)((color_description_present_flag << 1) |
+                                      capture_time_distance_ignored));
+            csdData.push_back(profile_idc);
+            csdData.push_back(level_idc);
+            csdData.push_back(band_idc);
+            csdData.push_back((uint8_t)((frame_width_minus1 >> 24) & 0xff));
+            csdData.push_back((uint8_t)((frame_width_minus1 >> 16) & 0xff));
+            csdData.push_back((uint8_t)((frame_width_minus1 >> 8) & 0xff));
+            csdData.push_back((uint8_t)(frame_width_minus1 & 0xff));
+            csdData.push_back((uint8_t)((frame_height_minus1 >> 24) & 0xff));
+            csdData.push_back((uint8_t)((frame_height_minus1 >> 16) & 0xff));
+            csdData.push_back((uint8_t)((frame_height_minus1 >> 8) & 0xff));
+            csdData.push_back((uint8_t)(frame_height_minus1 & 0xff));
+            csdData.push_back((uint8_t)(((chroma_format_idc << 4) & 0xf0) |
+                                      (bit_depth_minus8 & 0xf)));
+            csdData.push_back((uint8_t)(capture_time_distance));
+            if (color_description_present_flag) {
+                csdData.push_back(color_primaries);
+                csdData.push_back(transfer_characteristics);
+                csdData.push_back(matrix_coefficients);
+            }
+        }
+    }
+
+    std::unique_ptr<C2StreamInitDataInfo::output> csd =
+        C2StreamInitDataInfo::output::AllocUnique(csdData.size(), 0u);
+    if (!csd) {
+        ALOGE("CSD allocation failed");
+        mSignalledError = true;
+        work->result = C2_NO_MEMORY;
+        work->workletsProcessed = 1u;
+        return;
+    }
+
+    memcpy(csd->m.value, csdData.data(), csdData.size());
+    work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+}
+
+c2_status_t C2SoftApvEnc::drainInternal(uint32_t drainMode,
+                                        const std::shared_ptr<C2BlockPool>& pool,
+                                        const std::unique_ptr<C2Work>& work) {
+    fillEmptyWork(work);
+    return C2_OK;
+}
+
+void C2SoftApvEnc::process(const std::unique_ptr<C2Work>& work,
+                           const std::shared_ptr<C2BlockPool>& pool) {
+    c2_status_t error;
+    work->result = C2_OK;
+    work->workletsProcessed = 0u;
+    work->worklets.front()->output.flags = work->input.flags;
+
+    nsecs_t timeDelay = 0;
+    uint64_t workIndex = work->input.ordinal.frameIndex.peekull();
+
+    mSignalledEos = false;
+    mOutBlock = nullptr;
+
+    if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+        ALOGV("Got FLAG_END_OF_STREAM");
+        mSignalledEos = true;
+    }
+
+    /* Initialize encoder if not already initialized */
+    if (initEncoder() != C2_OK) {
+        ALOGE("Failed to initialize encoder");
+        mSignalledError = true;
+        work->result = C2_CORRUPTED;
+        work->workletsProcessed = 1u;
+        ALOGE("[%s] Failed to make Codec context", __func__);
+        return;
+    }
+    if (mSignalledError) {
+        ALOGE("[%s] Received signalled error", __func__);
+        return;
+    }
+
+    if (mSignalledEos) {
+        drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+        return;
+    }
+
+    if (work->input.buffers.empty()) {
+        return;
+    }
+
+    std::shared_ptr<C2GraphicView> view;
+    std::shared_ptr<C2Buffer> inputBuffer = nullptr;
+    if (!work->input.buffers.empty()) {
+        inputBuffer = work->input.buffers[0];
+        view = std::make_shared<C2GraphicView>(
+                inputBuffer->data().graphicBlocks().front().map().get());
+        if (view->error() != C2_OK) {
+            ALOGE("graphic view map err = %d", view->error());
+            work->workletsProcessed = 1u;
+            return;
+        }
+    }
+    if (!inputBuffer) {
+        fillEmptyWork(work);
+        return;
+    }
+
+    oapve_stat_t stat;
+    auto outBufferSize =
+            mCodecDesc->param[mReceivedFrames].w * mCodecDesc->param[mReceivedFrames].h * 4;
+    if (!mOutBlock) {
+        C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+        c2_status_t err = pool->fetchLinearBlock(outBufferSize, usage, &mOutBlock);
+        if (err != C2_OK) {
+            work->result = err;
+            work->workletsProcessed = 1u;
+            ALOGE("fetchLinearBlock has failed. err = %d", err);
+            return;
+        }
+    }
+
+    C2WriteView wView = mOutBlock->map().get();
+    if (wView.error() != C2_OK) {
+        work->result = wView.error();
+        work->workletsProcessed = 1u;
+        return;
+    }
+
+    error = setEncodeArgs(&mInputFrames, view.get(), workIndex);
+    if (error != C2_OK) {
+        mSignalledError = true;
+        work->result = error;
+        work->workletsProcessed = 1u;
+        return;
+    }
+
+    if (++mReceivedFrames < mMaxFrames) {
+        return;
+    }
+    mReceivedFrames = 0;
+
+    std::shared_ptr<oapv_bitb_t> bits = std::make_shared<oapv_bitb_t>();
+    std::memset(mBitstreamBuf, 0, kMaxBitstreamBufSize);
+    bits->addr = mBitstreamBuf;
+    bits->bsize = kMaxBitstreamBufSize;
+    bits->err = C2_OK;
+
+    if (mInputFrames.frm[0].imgb) {
+        int32_t status =
+                oapve_encode(mEncoderId, &mInputFrames, mMetaId, bits.get(), &stat, &mReconFrames);
+        if (status != C2_OK) {
+            mSignalledError = true;
+            work->result = C2_CORRUPTED;
+            work->workletsProcessed = 1u;
+            return;
+        }
+    } else if (!mSignalledEos) {
+        fillEmptyWork(work);
+    }
+    finishWork(workIndex, work, pool, bits.get(), &stat);
+}
+
+class C2SoftApvEncFactory : public C2ComponentFactory {
+  public:
+    C2SoftApvEncFactory()
+        : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+                  GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+    virtual c2_status_t createComponent(c2_node_id_t id,
+                                        std::shared_ptr<C2Component>* const component,
+                                        std::function<void(C2Component*)> deleter) override {
+        *component = std::shared_ptr<C2Component>(
+                new C2SoftApvEnc(COMPONENT_NAME, id,
+                                 std::make_shared<C2SoftApvEnc::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    c2_status_t createInterface(c2_node_id_t id,
+                                std::shared_ptr<C2ComponentInterface>* const interface,
+                                std::function<void(C2ComponentInterface*)> deleter) override {
+        *interface = std::shared_ptr<C2ComponentInterface>(
+                new SimpleInterface<C2SoftApvEnc::IntfImpl>(
+                        COMPONENT_NAME, id, std::make_shared<C2SoftApvEnc::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    ~C2SoftApvEncFactory() override = default;
+
+  private:
+    std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+}  // namespace android
+
+__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+    if (!android::media::swcodec::flags::apv_software_codec()) {
+        ALOGV("APV SW Codec is not enabled");
+        return nullptr;
+    }
+    return new ::android::C2SoftApvEncFactory();
+}
+
+__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
+        ::C2ComponentFactory* factory) {
+    delete factory;
+}
diff --git a/media/codec2/components/apv/C2SoftApvEnc.h b/media/codec2/components/apv/C2SoftApvEnc.h
new file mode 100644
index 0000000..fc4ad7d
--- /dev/null
+++ b/media/codec2/components/apv/C2SoftApvEnc.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_APV_ENC_H_
+#define ANDROID_C2_SOFT_APV_ENC_H_
+
+#include <SimpleC2Component.h>
+#include <utils/Vector.h>
+#include <map>
+#include "oapv.h"
+
+#include <C2SoftApvCommon.h>
+
+namespace android {
+
+#define CODEC_MAX_CORES 4
+
+#define APV_QP_MIN 1
+#define APV_QP_MAX 51
+
+struct C2SoftApvEnc final : public SimpleC2Component {
+    class IntfImpl;
+
+    C2SoftApvEnc(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+    virtual ~C2SoftApvEnc();
+
+    // From SimpleC2Component
+    c2_status_t onInit() override;
+    c2_status_t onStop() override;
+    void onReset() override;
+    void onRelease() override;
+    c2_status_t onFlush_sm() override;
+    void process(const std::unique_ptr<C2Work>& work,
+                 const std::shared_ptr<C2BlockPool>& pool) override;
+    c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override;
+
+  private:
+    c2_status_t reset();
+    c2_status_t initEncoder();
+    c2_status_t releaseEncoder();
+    c2_status_t setEncodeArgs(oapv_frms_t* imgb_inp, const C2GraphicView* const input,
+                              uint64_t workIndex);
+    void finishWork(uint64_t workIndex, const std::unique_ptr<C2Work>& work,
+                    const std::shared_ptr<C2BlockPool>& pool, oapv_bitb_t* bitb,
+                    oapve_stat_t* stat);
+    c2_status_t drainInternal(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool,
+                              const std::unique_ptr<C2Work>& work);
+    void setParams(oapve_param_t& param);
+    int32_t getQpFromQuality(int quality);
+    C2Config::level_t decisionApvLevel(int32_t width, int32_t height, int32_t fps, int32_t bitrate,
+                                       int32_t band);
+
+    void showEncoderParams(oapve_cdesc_t* cdsc);
+
+    void ColorConvertP010ToYUV422P10le(const C2GraphicView* const input, oapv_imgb_t* imgb);
+
+    void createCsdData(const std::unique_ptr<C2Work>& work, oapv_bitb_t* bitb,
+                       uint32_t encodedSize);
+
+    std::shared_ptr<IntfImpl> mIntf;
+    int32_t mBitDepth;
+    int32_t mColorFormat;
+
+    bool mStarted;
+    bool mSignalledEos;
+    bool mSignalledError;
+
+    std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+    std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+    std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+    std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
+    std::shared_ptr<C2StreamQualityTuning::output> mQuality;
+    std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
+    std::shared_ptr<C2LinearBlock> mOutBlock;
+    std::shared_ptr<C2StreamComplexityTuning::output> mComplexity;
+    std::shared_ptr<C2StreamPixelFormatInfo::input> mPixelFormat;
+
+    std::map<const void*, std::shared_ptr<C2Buffer>> mBuffers;
+    MemoryBlockPool mConversionBuffers;
+    std::map<const void*, MemoryBlock> mConversionBuffersInUse;
+
+    bool mInitEncoder;
+    int32_t mMaxFrames;
+    int32_t mReceivedFrames;
+    std::unique_ptr<oapve_cdesc_t> mCodecDesc;
+    oapv_frms_t mInputFrames;
+    oapv_frms_t mReconFrames;
+    oapve_t mEncoderId;
+    oapvm_t mMetaId;
+    uint8_t* mBitstreamBuf = nullptr;
+    bool mReceivedFirstFrame = false;
+    C2_DO_NOT_COPY(C2SoftApvEnc);
+};
+}  // namespace android
+
+#endif  // ANDROID_C2_SOFT_APV_ENC_H_
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index aec6523..a03f24f 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -463,6 +463,19 @@
     }
 }
 
+void convertP010ToP210(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY, const uint16_t *srcUV,
+                       size_t srcUVStride, size_t dstUVStride, size_t width, size_t height) {
+    std::memcpy(dstY, srcY, width * height * sizeof(uint16_t));
+
+    int32_t offsetTop, offsetBot;
+    for (size_t y = 0; y < (height + 1) / 2; ++y) {
+        offsetTop = (y * 2) * dstUVStride;
+        offsetBot = (y * 2 + 1) * dstUVStride;
+        std::memcpy(dstUV + offsetTop, srcUV + (y * srcUVStride), srcUVStride * sizeof(uint16_t));
+        std::memcpy(dstUV + offsetBot, srcUV + (y * srcUVStride), srcUVStride * sizeof(uint16_t));
+    }
+}
+
 static const int16_t bt709Matrix_10bit[2][3][3] = {
     { { 218, 732, 74 }, { -117, -395, 512 }, { 512, -465, -47 } }, /* RANGE_FULL */
     { { 186, 627, 63 }, { -103, -345, 448 }, { 448, -407, -41 } }, /* RANGE_LIMITED */
@@ -517,6 +530,45 @@
     }
 }
 
+void convertRGBA1010102ToP210(uint16_t* dstY, uint16_t* dstUV, const uint32_t* srcRGBA,
+                              size_t srcRGBStride, size_t width, size_t height,
+                              C2Color::matrix_t colorMatrix, C2Color::range_t colorRange) {
+    uint16_t r, g, b;
+    int32_t i32Y, i32U, i32V;
+    uint16_t zeroLvl =  colorRange == C2Color::RANGE_FULL ? 0 : 64;
+    uint16_t maxLvlLuma =  colorRange == C2Color::RANGE_FULL ? 1023 : 940;
+    uint16_t maxLvlChroma =  colorRange == C2Color::RANGE_FULL ? 1023 : 960;
+    // set default range as limited
+    if (colorRange != C2Color::RANGE_FULL) {
+        colorRange = C2Color::RANGE_LIMITED;
+    }
+    const int16_t(*weights)[3] = (colorMatrix == C2Color::MATRIX_BT709)
+                                         ? bt709Matrix_10bit[colorRange - 1]
+                                         : bt2020Matrix_10bit[colorRange - 1];
+
+    for (size_t y = 0; y < height; ++y) {
+        for (size_t x = 0; x < width; ++x) {
+            b = (srcRGBA[x]  >> 20) & 0x3FF;
+            g = (srcRGBA[x]  >> 10) & 0x3FF;
+            r = srcRGBA[x] & 0x3FF;
+
+            i32Y = ((r * weights[0][0] + g * weights[0][1] + b * weights[0][2] + 512) >> 10) +
+                   zeroLvl;
+            dstY[x] = (CLIP3(zeroLvl, i32Y, maxLvlLuma) << 6) & 0xFFC0;
+            if (x % 2 == 0) {
+                i32U = ((r * weights[1][0] + g * weights[1][1] + b * weights[1][2] + 512) >> 10) +
+                       512;
+                i32V = ((r * weights[2][0] + g * weights[2][1] + b * weights[2][2] + 512) >> 10) +
+                       512;
+                dstUV[x] = (CLIP3(zeroLvl, i32U, maxLvlChroma) << 6) & 0xFFC0;
+                dstUV[x + 1] = (CLIP3(zeroLvl, i32V, maxLvlChroma) << 6) & 0xFFC0;
+            }
+        }
+        srcRGBA += srcRGBStride;
+        dstY += width;
+    }
+}
+
 void convertPlanar16ToY410OrRGBA1010102(uint8_t* dst, const uint16_t* srcY, const uint16_t* srcU,
                                         const uint16_t* srcV, size_t srcYStride, size_t srcUStride,
                                         size_t srcVStride, size_t dstStride, size_t width,
@@ -631,6 +683,36 @@
                                    isMonochrome);
     }
 }
+
+void convertSemiPlanar8ToP210(uint16_t *dstY, uint16_t *dstUV,
+                              const uint8_t *srcY, const uint8_t *srcUV,
+                              size_t srcYStride, size_t srcUVStride,
+                              size_t dstYStride, size_t dstUVStride,
+                              uint32_t width, uint32_t height,
+                              CONV_FORMAT_T format) {
+  if (format != CONV_FORMAT_I420) {
+    ALOGE("No support for semi-planar8 to P210. format is %d", format);
+    return;
+  }
+
+  for (int32_t y = 0; y < height; ++y) {
+    for (int32_t x = 0; x < width; ++x) {
+      dstY[x] = ((uint16_t)((double)srcY[x] * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+    }
+    dstY += dstYStride;
+    srcY += srcYStride;
+  }
+
+  for (int32_t y = 0; y < height / 2; ++y) {
+    for (int32_t x = 0; x < width; ++x) {
+      dstUV[x] = dstUV[dstUVStride + x] =
+          ((uint16_t)((double)srcUV[x] * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+    }
+    srcUV += srcUVStride;
+    dstUV += dstUVStride << 1;
+  }
+}
+
 std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
     std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
     mQueue.pop_front();
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index b28c47e..4306e55 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -68,10 +68,19 @@
                                  size_t dstUStride, size_t dstVStride, size_t width,
                                  size_t height, bool isMonochrome = false);
 
+void convertP010ToP210(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY,
+                       const uint16_t *srcUV, size_t srcUVStride, size_t dstUVStride,
+                       size_t width, size_t height);
+
 void convertRGBA1010102ToYUV420Planar16(uint16_t* dstY, uint16_t* dstU, uint16_t* dstV,
                                         const uint32_t* srcRGBA, size_t srcRGBStride, size_t width,
                                         size_t height, C2Color::matrix_t colorMatrix,
                                         C2Color::range_t colorRange);
+
+void convertRGBA1010102ToP210(uint16_t* dstY, uint16_t* dstUV, const uint32_t* srcRGBA,
+                              size_t srcRGBStride, size_t width, size_t height,
+                              C2Color::matrix_t colorMatrix, C2Color::range_t colorRange);
+
 void convertPlanar16ToY410OrRGBA1010102(uint8_t* dst, const uint16_t* srcY, const uint16_t* srcU,
                                         const uint16_t* srcV, size_t srcYStride, size_t srcUStride,
                                         size_t srcVStride, size_t dstStride, size_t width,
@@ -96,6 +105,12 @@
                           size_t srcUStride, size_t srcVStride, size_t dstYStride,
                           size_t dstUStride, size_t dstVStride, uint32_t width, uint32_t height,
                           bool isMonochrome, CONV_FORMAT_T format);
+void convertSemiPlanar8ToP210(uint16_t *dstY, uint16_t *dstUV,
+                              const uint8_t *srcY, const uint8_t *srcUV,
+                              size_t srcYStride, size_t srcUVStride,
+                              size_t dstYStride, size_t dstUVStride,
+                              uint32_t width, uint32_t height,
+                              CONV_FORMAT_T format);
 
 class SimpleC2Component
         : public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.cpp b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
index 149c6ee..aed5e68 100644
--- a/media/codec2/components/mp3/C2SoftMp3Dec.cpp
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
@@ -114,7 +114,9 @@
 c2_status_t C2SoftMP3::onStop() {
     // Make sure that the next buffer output does not still
     // depend on fragments from the last one decoded.
-    pvmp3_InitDecoder(mConfig, mDecoderBuf);
+    if (mDecoderBuf) {
+        pvmp3_InitDecoder(mConfig, mDecoderBuf);
+    }
     mSignalledError = false;
     mIsFirst = true;
     mSignalledOutputEos = false;
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 562dcf5..52920c2 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -743,6 +743,25 @@
 }
 
 status_t C2SoftMpeg2Dec::deleteDecoder() {
+    // API call to IV_CMD_RETRIEVE_MEMREC not only retrieves the memory records
+    // but also joins active threads and destroys conditional thread variables and
+    // mutex locks for each thread.
+    iv_retrieve_mem_rec_ip_t s_retrieve_mem_ip;
+    iv_retrieve_mem_rec_op_t s_retrieve_mem_op;
+
+    s_retrieve_mem_ip.pv_mem_rec_location = (iv_mem_rec_t *)mMemRecords;
+    s_retrieve_mem_ip.e_cmd = IV_CMD_RETRIEVE_MEMREC;
+    s_retrieve_mem_ip.u4_size = sizeof(iv_retrieve_mem_rec_ip_t);
+    s_retrieve_mem_op.u4_size = sizeof(iv_retrieve_mem_rec_op_t);
+
+    IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+                                                    &s_retrieve_mem_ip,
+                                                    &s_retrieve_mem_op);
+    if (IV_SUCCESS != status) {
+        ALOGE("error in %s: 0x%x", __func__, s_retrieve_mem_op.u4_error_code);
+        return UNKNOWN_ERROR;
+    }
+
     if (mMemRecords) {
         iv_mem_rec_t *ps_mem_rec = mMemRecords;
 
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index e6782a9..fa5ce77 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -70,6 +70,7 @@
     enum platform_level_t : uint32_t;       ///< platform level
     enum prepend_header_mode_t : uint32_t;  ///< prepend header operational modes
     enum profile_t : uint32_t;              ///< coding profile
+    enum resource_kind_t : uint32_t;        ///< resource kinds
     enum scaling_method_t : uint32_t;       ///< scaling methods
     enum scan_order_t : uint32_t;           ///< scan orders
     enum secure_mode_t : uint32_t;          ///< secure/protected modes
@@ -101,6 +102,7 @@
     kParamIndexMasteringDisplayColorVolume,
     kParamIndexChromaOffset,
     kParamIndexGopLayer,
+    kParamIndexSystemResource,
 
     /* =================================== parameter indices =================================== */
 
@@ -167,6 +169,10 @@
     /* Region of Interest Encoding parameters */
     kParamIndexQpOffsetMapBuffer, // info-buffer, used to signal qp-offset map for a frame
 
+    /* resource capacity and resources excluded */
+    kParamIndexResourcesCapacity,
+    kParamIndexResourcesExcluded,
+
     // deprecated
     kParamIndexDelayRequest = kParamIndexDelay | C2Param::CoreIndex::IS_REQUEST_FLAG,
 
@@ -430,6 +436,7 @@
     _C2_PL_AV1_BASE  = 0x9000,
     _C2_PL_VP8_BASE  = 0xA000,
     _C2_PL_MPEGH_BASE = 0xB000,     // MPEG-H 3D Audio
+    _C2_PL_APV_BASE = 0xC000,     // APV
 
     C2_PROFILE_LEVEL_VENDOR_START = 0x70000000,
 };
@@ -597,6 +604,15 @@
     PROFILE_MPEGH_HIGH,                         ///< MPEG-H High
     PROFILE_MPEGH_LC,                           ///< MPEG-H Low-complexity
     PROFILE_MPEGH_BASELINE,                     ///< MPEG-H Baseline
+
+    // Advanced Professional VideoCodec (APV)
+    PROFILE_APV_422_10 = _C2_PL_APV_BASE,       ///< APV 422-10 Profile
+    PROFILE_APV_422_12,                         ///< APV 422-12 Profile
+    PROFILE_APV_444_10,                         ///< APV 444-10 Profile
+    PROFILE_APV_444_12,                         ///< APV 444-12 Profile
+    PROFILE_APV_4444_10,                        ///< APV 4444-10 Profile
+    PROFILE_APV_4444_12,                        ///< APV 4444-12 Profile
+    PROFILE_APV_400_10,                         ///< APV 400-10 Profile
 };
 
 enum C2Config::level_t : uint32_t {
@@ -752,6 +768,68 @@
     LEVEL_MPEGH_3,                              ///< MPEG-H L3
     LEVEL_MPEGH_4,                              ///< MPEG-H L4
     LEVEL_MPEGH_5,                              ///< MPEG-H L5
+
+    // Advanced Professional VideoCodec(APV) levels/bands
+    LEVEL_APV_1_BAND_0 = _C2_PL_APV_BASE,            ///< APV L 1, BAND 0
+    LEVEL_APV_1_1_BAND_0,                            ///< APV L 1.1, BAND 0
+    LEVEL_APV_2_BAND_0,                              ///< APV L 2, BAND 0
+    LEVEL_APV_2_1_BAND_0,                            ///< APV L 2.1, BAND 0
+    LEVEL_APV_3_BAND_0,                              ///< APV L 3, BAND 0
+    LEVEL_APV_3_1_BAND_0,                            ///< APV L 3.1, BAND 0
+    LEVEL_APV_4_BAND_0,                              ///< APV L 4, BAND 0
+    LEVEL_APV_4_1_BAND_0,                            ///< APV L 4.1, BAND 0
+    LEVEL_APV_5_BAND_0,                              ///< APV L 5, BAND 0
+    LEVEL_APV_5_1_BAND_0,                            ///< APV L 5.1, BAND 0
+    LEVEL_APV_6_BAND_0,                              ///< APV L 6, BAND 0
+    LEVEL_APV_6_1_BAND_0,                            ///< APV L 6.1, BAND 0
+    LEVEL_APV_7_BAND_0,                              ///< APV L 7, BAND 0
+    LEVEL_APV_7_1_BAND_0,                            ///< APV L 7.1, BAND 0
+
+    LEVEL_APV_1_BAND_1 = _C2_PL_APV_BASE + 0x100,    ///< APV L 1, BAND 1
+    LEVEL_APV_1_1_BAND_1,                            ///< APV L 1.1, BAND 1
+    LEVEL_APV_2_BAND_1,                              ///< APV L 2, BAND 1
+    LEVEL_APV_2_1_BAND_1,                            ///< APV L 2.1, BAND 1
+    LEVEL_APV_3_BAND_1,                              ///< APV L 3, BAND 1
+    LEVEL_APV_3_1_BAND_1,                            ///< APV L 3.1, BAND 1
+    LEVEL_APV_4_BAND_1,                              ///< APV L 4, BAND 1
+    LEVEL_APV_4_1_BAND_1,                            ///< APV L 4.1, BAND 1
+    LEVEL_APV_5_BAND_1,                              ///< APV L 5, BAND 1
+    LEVEL_APV_5_1_BAND_1,                            ///< APV L 5.1, BAND 1
+    LEVEL_APV_6_BAND_1,                              ///< APV L 6, BAND 1
+    LEVEL_APV_6_1_BAND_1,                            ///< APV L 6.1, BAND 1
+    LEVEL_APV_7_BAND_1,                              ///< APV L 7, BAND 1
+    LEVEL_APV_7_1_BAND_1,                            ///< APV L 7.1, BAND 1
+
+    LEVEL_APV_1_BAND_2 = _C2_PL_APV_BASE + 0x200,    ///< APV L 1, BAND 2
+    LEVEL_APV_1_1_BAND_2,                            ///< APV L 1.1, BAND 2
+    LEVEL_APV_2_BAND_2,                              ///< APV L 2, BAND 2
+    LEVEL_APV_2_1_BAND_2,                            ///< APV L 2.1, BAND 2
+    LEVEL_APV_3_BAND_2,                              ///< APV L 3, BAND 2
+    LEVEL_APV_3_1_BAND_2,                            ///< APV L 3.1, BAND 2
+    LEVEL_APV_4_BAND_2,                              ///< APV L 4, BAND 2
+    LEVEL_APV_4_1_BAND_2,                            ///< APV L 4.1, BAND 2
+    LEVEL_APV_5_BAND_2,                              ///< APV L 5, BAND 2
+    LEVEL_APV_5_1_BAND_2,                            ///< APV L 5.1, BAND 2
+    LEVEL_APV_6_BAND_2,                              ///< APV L 6, BAND 2
+    LEVEL_APV_6_1_BAND_2,                            ///< APV L 6.1, BAND 2
+    LEVEL_APV_7_BAND_2,                              ///< APV L 7, BAND 2
+    LEVEL_APV_7_1_BAND_2,                            ///< APV L 7.1, BAND 2
+
+    LEVEL_APV_1_BAND_3 = _C2_PL_APV_BASE + 0x300,    ///< APV L 1, BAND 3
+    LEVEL_APV_1_1_BAND_3,                            ///< APV L 1.1, BAND 3
+    LEVEL_APV_2_BAND_3,                              ///< APV L 2, BAND 3
+    LEVEL_APV_2_1_BAND_3,                            ///< APV L 2.1, BAND 3
+    LEVEL_APV_3_BAND_3,                              ///< APV L 3, BAND 3
+    LEVEL_APV_3_1_BAND_3,                            ///< APV L 3.1, BAND 3
+    LEVEL_APV_4_BAND_3,                              ///< APV L 4, BAND 3
+    LEVEL_APV_4_1_BAND_3,                            ///< APV L 4.1, BAND 3
+    LEVEL_APV_5_BAND_3,                              ///< APV L 5, BAND 3
+    LEVEL_APV_5_1_BAND_3,                            ///< APV L 5.1, BAND 3
+    LEVEL_APV_6_BAND_3,                              ///< APV L 6, BAND 3
+    LEVEL_APV_6_1_BAND_3,                            ///< APV L 6.1, BAND 3
+    LEVEL_APV_7_BAND_3,                              ///< APV L 7, BAND 3
+    LEVEL_APV_7_1_BAND_3,                            ///< APV L 7.1, BAND 3
+
 };
 
 struct C2ProfileLevelStruct {
@@ -1185,21 +1263,114 @@
 /* ----------------------------------------- resources ----------------------------------------- */
 
 /**
- * Resources needed and resources reserved for current configuration.
- *
- * Resources are tracked as a vector of positive numbers. Available resources are defined by
- * the vendor.
- *
- * By default, no resources are reserved for a component. If resource reservation is successful,
- * the component shall be able to use those resources exclusively. If however, the component is
- * not using all of the reserved resources, those may be shared with other components.
- *
- * TODO: define some of the resources.
+ * Resource kind.
  */
-typedef C2GlobalParam<C2Tuning, C2Uint64Array, kParamIndexResourcesNeeded> C2ResourcesNeededTuning;
-typedef C2GlobalParam<C2Tuning, C2Uint64Array, kParamIndexResourcesReserved>
-        C2ResourcesReservedTuning;
+C2ENUM(C2Config::resource_kind_t, uint32_t,
+    CONST,
+    PER_FRAME,
+    PER_INPUT_BLOCK,
+    PER_OUTPUT_BLOCK
+)
+
+/**
+ * Definition of a system resource use.
+ *
+ * [PROPOSED]
+ *
+ * System resources are defined by the default component store.
+ * They represent any physical or abstract entities of limited availability
+ * that is required for a component instance to execute and process work.
+ *
+ * Each defined resource has an id.
+ * The use of a resource is specified by the amount and the kind (e.g. whether the amount
+ * of resources is required for each frame processed, or whether they are required
+ * regardless of the processing rate (const amount)).
+ *
+ * Note: implementations can shadow this structure with their own custom resource
+ * structure where a uint32_t based enum is used for id.
+ * This can be used to provide a name for each resource, via parameter descriptors.
+ */
+
+struct C2SystemResourceStruct {
+    C2SystemResourceStruct(uint32_t id_,
+                           C2Config::resource_kind_t kind_,
+                           uint64_t amount_)
+        : id(id_), kind(kind_), amount(amount_) { }
+    uint32_t id;
+    C2Config::resource_kind_t kind;
+    uint64_t amount;
+
+    DEFINE_AND_DESCRIBE_C2STRUCT(SystemResource)
+    C2FIELD(id, "id")
+    C2FIELD(kind, "kind")
+    C2FIELD(amount, "amount")
+};
+
+/**
+ * Total system resource capacity.
+ *
+ * [PROPOSED]
+ *
+ * This setting is implemented by the default component store.
+ * The total resource capacity is specified as the maximum amount for each resource ID
+ * that is supported by the device hardware or firmware.
+ * As such, the kind must be CONST for each element.
+ */
+typedef C2GlobalParam<C2Tuning,
+                      C2SimpleArrayStruct<C2SystemResourceStruct>,
+                      kParamIndexResourcesCapacity> C2ResourcesCapacityTuning;
+constexpr char C2_PARAMKEY_RESOURCES_CAPACITY[] = "resources.capacity";
+
+/**
+ * Excluded system resources.
+ *
+ * [PROPOSED]
+ *
+ * This setting is implemented by the default component store.
+ * Some system resources may be used by components and not tracked by the Codec 2.0 API.
+ * This is communicated by this tuning.
+ * Excluded resources are the total resources that are used by non-Codec 2.0 components.
+ * It is specified as the excluded amount for each resource ID that is used by
+ * a non-Codec 2.0 component. As such, the kind must be CONST for each element.
+ *
+ * The platform can calculate the available resources as total capacity minus
+ * excluded resource minus sum of needed resources for each component.
+ */
+typedef C2GlobalParam<C2Tuning,
+                      C2SimpleArrayStruct<C2SystemResourceStruct>,
+                      kParamIndexResourcesExcluded> C2ResourcesExcludedTuning;
+constexpr char C2_PARAMKEY_RESOURCES_EXCLUDED[] = "resources.excluded";
+
+/**
+ * System resources needed for the current configuration.
+ *
+ * [PROPOSED]
+ *
+ * Resources are tracked as a list of individual resource use specifications.
+ * The resource kind can be CONST, PER_FRAME, PER_INPUT_BLODCK or PER_OUTPUT_BLOCK.
+ */
+typedef C2GlobalParam<C2Tuning,
+                      C2SimpleArrayStruct<C2SystemResourceStruct>,
+                      kParamIndexResourcesNeeded> C2ResourcesNeededTuning;
 constexpr char C2_PARAMKEY_RESOURCES_NEEDED[] = "resources.needed";
+
+/**
+ * System resources reserved for this component
+ *
+ * [FUTURE]
+ *
+ * This allows the platform to set aside system resources for the component.
+ * Since this is a static resource reservation, kind must be CONST for each element.
+ * This resource reservation only considers CONST and PER_FRAME use.
+ *
+ * By default, no resources are reserved for a component.
+ * If resource reservation is successful, the component shall be able to use those
+ * resources exclusively. If however, the component is not using all of the
+ * reserved resources, those may be shared with other components.
+ */
+typedef C2GlobalParam<C2Tuning,
+                      C2SimpleArrayStruct<C2SystemResourceStruct>,
+                      kParamIndexResourcesReserved> C2ResourcesReservedTuning;
 constexpr char C2_PARAMKEY_RESOURCES_RESERVED[] = "resources.reserved";
 
 /**
diff --git a/media/codec2/hal/aidl/ComponentStore.cpp b/media/codec2/hal/aidl/ComponentStore.cpp
index ea4d045..de9332b 100644
--- a/media/codec2/hal/aidl/ComponentStore.cpp
+++ b/media/codec2/hal/aidl/ComponentStore.cpp
@@ -153,6 +153,13 @@
         mParamReflectors.push_back(paramReflector);
     }
 #endif
+    // MultiAccessUnit reflector helper is allocated once per store.
+    // All components in this store can reuse this reflector helper.
+    if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+        std::shared_ptr<C2ReflectorHelper> helper = std::make_shared<C2ReflectorHelper>();
+        mParamReflectors.push_back(helper);
+        mMultiAccessUnitReflector = helper;
+    }
 
     // Retrieve supported parameters from store
     using namespace std::placeholders;
@@ -212,12 +219,20 @@
     if (c2interface == nullptr) {
         return nullptr;
     }
+    // Framework support for Large audio frame feature depends on:
+    // 1. All feature flags enabled on platform
+    // 2. The capability of the implementation to use the same input buffer
+    //    for different C2Work (C2Config::api_feature_t::API_SAME_INPUT_BUFFER)
+    // 3. Implementation does not inherently support C2LargeFrame::output::PARAM_TYPE param.
     if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
         c2_status_t err = C2_OK;
         C2ComponentDomainSetting domain;
         std::vector<std::unique_ptr<C2Param>> heapParams;
-        err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
-        if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+        C2ApiFeaturesSetting features = (C2Config::api_feature_t)0;
+        err = c2interface->query_vb({&domain, &features}, {}, C2_MAY_BLOCK, &heapParams);
+        if (err == C2_OK
+                && (domain.value == C2Component::DOMAIN_AUDIO)
+                && ((features.value & C2Config::api_feature_t::API_SAME_INPUT_BUFFER) != 0)) {
             std::vector<std::shared_ptr<C2ParamDescriptor>> params;
             bool isComponentSupportsLargeAudioFrame = false;
             c2interface->querySupportedParams_nb(&params);
@@ -234,7 +249,7 @@
                 // thus we pass the component's param reflector, which is mParamReflectors[0].
                 multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
                         c2interface,
-                        std::static_pointer_cast<C2ReflectorHelper>(mParamReflectors[0]));
+                        mMultiAccessUnitReflector);
             }
         }
     }
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
index b2158a6..bb4c596 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
@@ -52,6 +52,13 @@
 using ::aidl::android::hardware::media::bufferpool2::IClientManager;
 
 struct ComponentStore : public BnComponentStore {
+    /**
+     * Constructor for ComponentStore.
+     *
+     * IMPORTANT: SetPreferredCodec2ComponentStore() is called in the constructor.
+     * Be careful about the order of SetPreferredCodec2ComponentStore() and
+     * ComponentStore() in the code.
+     */
     ComponentStore(const std::shared_ptr<C2ComponentStore>& store);
     virtual ~ComponentStore();
 
@@ -120,6 +127,9 @@
     std::shared_ptr<C2ComponentStore> mStore;
     std::vector<std::shared_ptr<C2ParamReflector>> mParamReflectors;
 
+    // Reflector helper for MultiAccessUnitHelper
+    std::shared_ptr<C2ReflectorHelper> mMultiAccessUnitReflector;
+
     std::map<C2Param::CoreIndex, std::shared_ptr<C2StructDescriptor>> mStructDescriptors;
     std::set<C2Param::CoreIndex> mUnsupportedStructDescriptors;
     std::set<C2String> mLoadedInterfaces;
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index 8d9e76e..bdfc409 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -34,7 +34,7 @@
 
 c2_status_t retrieveAHardwareBufferId(const C2ConstGraphicBlock &blk, uint64_t *bid) {
     std::shared_ptr<const _C2BlockPoolData> bpData = _C2BlockFactory::GetGraphicBlockPoolData(blk);
-    if (bpData->getType() != _C2BlockPoolData::TYPE_AHWBUFFER) {
+    if (!bpData || bpData->getType() != _C2BlockPoolData::TYPE_AHWBUFFER) {
         return C2_BAD_VALUE;
     }
     if (__builtin_available(android __ANDROID_API_T__, *)) {
@@ -824,6 +824,10 @@
     std::shared_ptr<BufferCache> cache;
     int slotId;
     sp<Fence> rFence;
+    if (mStopped.load() == true) {
+        ALOGE("cannot deallocate due to being stopped");
+        return C2_BAD_STATE;
+    }
     c2_status_t res = requestDeallocate(bid, fence, &completed, &updateDequeue,
                                         &cache, &slotId, &rFence);
     if (res != C2_OK) {
@@ -900,7 +904,10 @@
         cache->unblockSlot(buffer->mSlot);
         if (oldBuffer) {
             // migrated, register the new buffer to the cache.
-            cache->mBuffers.emplace(buffer->mSlot, buffer);
+            auto ret = cache->mBuffers.emplace(buffer->mSlot, buffer);
+            if (!ret.second) {
+                ret.first->second = buffer;
+            }
         }
     }
     mDeallocating.erase(origBid);
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index a137dbb..6348e42 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -21,7 +21,9 @@
 #include <utils/Trace.h>
 
 #include <codec2/aidl/GraphicBufferAllocator.h>
+#include <codec2/common/HalSelection.h>
 #include <codec2/hidl/client.h>
+
 #include <C2Debug.h>
 #include <C2BufferPriv.h>
 #include <C2Config.h> // for C2StreamUsageTuning
@@ -1832,9 +1834,7 @@
 
 std::shared_ptr<Codec2Client::InputSurface> Codec2Client::CreateInputSurface(
         char const* serviceName) {
-    int32_t inputSurfaceSetting = ::android::base::GetIntProperty(
-            "debug.stagefright.c2inputsurface", int32_t(0));
-    if (inputSurfaceSetting <= 0) {
+    if (!IsCodec2AidlInputSurfaceSelected()) {
         return nullptr;
     }
     size_t index = GetServiceNames().size();
@@ -2391,7 +2391,12 @@
                        "GraphicBufferAllocator was not created.";
             return C2_CORRUPTED;
         }
+        // Note: Consumer usage is set ahead of the HAL allocator(gba) being set.
+        // This is same as HIDL.
+        uint64_t consumerUsage = configConsumerUsage(surface);
         bool ret = gba->configure(surface, generation, maxDequeueCount);
+        ALOGD("setOutputSurface -- generation=%u consumer usage=%#llx",
+              generation, (long long)consumerUsage);
         return ret ? C2_OK : C2_CORRUPTED;
     }
     uint64_t bqId = 0;
@@ -2419,41 +2424,9 @@
                                       mHidlBase1_2 ? &syncObj : nullptr);
     }
 
-    // set consumer bits
-    // TODO: should this get incorporated into setOutputSurface method so that consumer bits
-    // can be set atomically?
-    uint64_t consumerUsage = kDefaultConsumerUsage;
-    {
-        if (surface) {
-            uint64_t usage = 0;
-            status_t err = surface->getConsumerUsage(&usage);
-            if (err != NO_ERROR) {
-                ALOGD("setOutputSurface -- failed to get consumer usage bits (%d/%s). ignoring",
-                        err, asString(err));
-            } else {
-                // Note: we are adding the default usage because components must support
-                // producing output frames that can be displayed an all output surfaces.
-
-                // TODO: do not set usage for tunneled scenario. It is unclear if consumer usage
-                // is meaningful in a tunneled scenario; on one hand output buffers exist, but
-                // they do not exist inside of C2 scope. Any buffer usage shall be communicated
-                // through the sideband channel.
-
-                consumerUsage = usage | kDefaultConsumerUsage;
-            }
-        }
-
-        C2StreamUsageTuning::output outputUsage{
-                0u, C2AndroidMemoryUsage::FromGrallocUsage(consumerUsage).expected};
-        std::vector<std::unique_ptr<C2SettingResult>> failures;
-        c2_status_t err = config({&outputUsage}, C2_MAY_BLOCK, &failures);
-        if (err != C2_OK) {
-            ALOGD("setOutputSurface -- failed to set consumer usage (%d/%s)",
-                    err, asString(err));
-        }
-    }
+    uint64_t consumerUsage = configConsumerUsage(surface);
     ALOGD("setOutputSurface -- generation=%u consumer usage=%#llx%s",
-            generation, (long long)consumerUsage, syncObj ? " sync" : "");
+          generation, (long long)consumerUsage, syncObj ? " sync" : "");
 
     Return<c2_hidl::Status> transStatus = syncObj ?
             mHidlBase1_2->setOutputSurfaceWithSyncObj(
@@ -2495,6 +2468,44 @@
     return mOutputBufferQueue->outputBuffer(block, input, output);
 }
 
+uint64_t Codec2Client::Component::configConsumerUsage(
+        const sp<IGraphicBufferProducer>& surface) {
+    // set consumer bits
+    // TODO: should this get incorporated into setOutputSurface method so that consumer bits
+    // can be set atomically?
+    uint64_t consumerUsage = kDefaultConsumerUsage;
+    {
+        if (surface) {
+            uint64_t usage = 0;
+            status_t err = surface->getConsumerUsage(&usage);
+            if (err != NO_ERROR) {
+                ALOGD("setOutputSurface -- failed to get consumer usage bits (%d/%s). ignoring",
+                        err, asString(err));
+            } else {
+                // Note: we are adding the default usage because components must support
+                // producing output frames that can be displayed an all output surfaces.
+
+                // TODO: do not set usage for tunneled scenario. It is unclear if consumer usage
+                // is meaningful in a tunneled scenario; on one hand output buffers exist, but
+                // they do not exist inside of C2 scope. Any buffer usage shall be communicated
+                // through the sideband channel.
+
+                consumerUsage = usage | kDefaultConsumerUsage;
+            }
+        }
+
+        C2StreamUsageTuning::output outputUsage{
+                0u, C2AndroidMemoryUsage::FromGrallocUsage(consumerUsage).expected};
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = config({&outputUsage}, C2_MAY_BLOCK, &failures);
+        if (err != C2_OK) {
+            ALOGD("setOutputSurface -- failed to set consumer usage (%d/%s)",
+                    err, asString(err));
+        }
+    }
+    return consumerUsage;
+}
+
 void Codec2Client::Component::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
     if (mAidlBase) {
         // TODO b/311348680
diff --git a/media/codec2/hal/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
index 413e92e..7923f04 100644
--- a/media/codec2/hal/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -467,6 +467,9 @@
             const QueueBufferInput& input,
             QueueBufferOutput* output);
 
+    // configure consumer usage.
+    uint64_t configConsumerUsage(const sp<IGraphicBufferProducer>& surface);
+
     // Retrieve frame event history from the output surface.
     void pollForRenderedFrames(FrameEventHistoryDelta* delta);
 
diff --git a/media/codec2/hal/client/include/codec2/hidl/output.h b/media/codec2/hal/client/include/codec2/hidl/output.h
index ddb9855..108f0a6 100644
--- a/media/codec2/hal/client/include/codec2/hidl/output.h
+++ b/media/codec2/hal/client/include/codec2/hidl/output.h
@@ -96,6 +96,7 @@
     uint64_t mBqId;
     int32_t mMaxDequeueBufferCount;
     std::shared_ptr<int> mOwner;
+    std::shared_ptr<int> mConsumerAttachCount;
     // To migrate existing buffers
     sp<GraphicBuffer> mBuffers[BufferQueueDefs::NUM_BUFFER_SLOTS]; // find a better way
     std::weak_ptr<_C2BlockPoolData> mPoolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
diff --git a/media/codec2/hal/client/output.cpp b/media/codec2/hal/client/output.cpp
index 54d78a0..2eb381b 100644
--- a/media/codec2/hal/client/output.cpp
+++ b/media/codec2/hal/client/output.cpp
@@ -261,6 +261,7 @@
         mGeneration = generation;
         mBqId = bqId;
         mOwner = std::make_shared<int>(0);
+        mConsumerAttachCount = std::make_shared<int>(0);
         mMaxDequeueBufferCount = maxDequeueBufferCount;
         if (igbp == nullptr) {
             return false;
@@ -522,6 +523,7 @@
     std::shared_ptr<C2SurfaceSyncMemory> syncMem;
     sp<IGraphicBufferProducer> outputIgbp;
     uint32_t outputGeneration = 0;
+    std::shared_ptr<int> consumerAttachCount;
     {
         std::unique_lock<std::mutex> l(mMutex);
         if (mStopped) {
@@ -529,6 +531,7 @@
         }
         outputIgbp = mIgbp;
         outputGeneration = mGeneration;
+        consumerAttachCount = mConsumerAttachCount;
         syncMem = mSyncMem;
     }
 
@@ -536,15 +539,42 @@
         auto syncVar = syncMem ? syncMem->mem() : nullptr;
         if (syncVar) {
             syncVar->lock();
-            syncVar->notifyQueuedLocked();
+            if (consumerAttachCount && *consumerAttachCount > 0) {
+                (*consumerAttachCount)--;
+            } else {
+                syncVar->notifyQueuedLocked();
+            }
             syncVar->unlock();
         }
     }
 }
 
 void OutputBufferQueue::onBufferAttached(uint32_t generation) {
-    // TODO
-    (void) generation;
+    std::shared_ptr<C2SurfaceSyncMemory> syncMem;
+    sp<IGraphicBufferProducer> outputIgbp;
+    uint32_t outputGeneration = 0;
+    std::shared_ptr<int> consumerAttachCount;
+    {
+        std::unique_lock<std::mutex> l(mMutex);
+        if (mStopped) {
+            return;
+        }
+        outputIgbp = mIgbp;
+        outputGeneration = mGeneration;
+        consumerAttachCount = mConsumerAttachCount;
+        syncMem = mSyncMem;
+    }
+
+    if (outputIgbp && generation == outputGeneration) {
+        auto syncVar = syncMem ? syncMem->mem() : nullptr;
+        if (syncVar) {
+            syncVar->lock();
+            if (consumerAttachCount) {
+                (*consumerAttachCount)++;
+            }
+            syncVar->unlock();
+        }
+    }
 }
 
 void OutputBufferQueue::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
diff --git a/media/codec2/hal/common/Android.bp b/media/codec2/hal/common/Android.bp
index 0638363..886391b 100644
--- a/media/codec2/hal/common/Android.bp
+++ b/media/codec2/hal/common/Android.bp
@@ -56,13 +56,17 @@
         "libaconfig_storage_read_api_cc",
     ],
 
-    static_libs: ["aconfig_mediacodec_flags_c_lib"],
+    static_libs: [
+        "aconfig_mediacodec_flags_c_lib",
+        "android.media.codec-aconfig-cc",
+    ],
 }
 
 cc_defaults {
     name: "libcodec2_hal_selection",
     static_libs: [
         "aconfig_mediacodec_flags_c_lib",
+        "android.media.codec-aconfig-cc",
         "libcodec2_hal_selection_static",
     ],
     shared_libs: [
diff --git a/media/codec2/hal/common/HalSelection.cpp b/media/codec2/hal/common/HalSelection.cpp
index d3ea181..5bf4fbe 100644
--- a/media/codec2/hal/common/HalSelection.cpp
+++ b/media/codec2/hal/common/HalSelection.cpp
@@ -21,6 +21,7 @@
 // NOTE: due to dependency from mainline modules cannot use libsysprop
 // #include <android/sysprop/MediaProperties.sysprop.h>
 #include <android-base/properties.h>
+#include <android_media_codec.h>
 #include <com_android_media_codec_flags.h>
 
 #include <codec2/common/HalSelection.h>
@@ -66,4 +67,20 @@
     return false;
 }
 
+bool IsCodec2AidlInputSurfaceSelected() {
+    if (!IsCodec2AidlHalSelected()) {
+        return false;
+    }
+
+    int32_t inputSurfaceSetting = ::android::base::GetIntProperty(
+            "debug.stagefright.c2inputsurface", int32_t(0));
+    if (inputSurfaceSetting <= 0) {
+        return false;
+    }
+    if (!android::media::codec::provider_->aidl_hal_input_surface()) {
+        return false;
+    }
+    return true;
+}
+
 }  // namespace android
diff --git a/media/codec2/hal/common/MultiAccessUnitHelper.cpp b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
index 55bf7a9..b287b91 100644
--- a/media/codec2/hal/common/MultiAccessUnitHelper.cpp
+++ b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
@@ -327,10 +327,10 @@
                 newWork->worklets.front()->component = inWork->worklets.front()->component;
                 std::vector<std::unique_ptr<C2Tuning>> tunings;
                 for (std::unique_ptr<C2Tuning>& tuning : inWork->worklets.front()->tunings) {
-                    tunings.push_back(std::move(
+                    tunings.push_back(
                             std::unique_ptr<C2Tuning>(
                                     static_cast<C2Tuning*>(
-                                            C2Param::Copy(*(tuning.get())).release()))));
+                                            C2Param::Copy(*(tuning.get())).release())));
                 }
                 newWork->worklets.front()->tunings = std::move(tunings);
             }
@@ -344,7 +344,7 @@
                     << inputOrdinal.frameIndex.peekull()
                     << ") -> newFrameIndex " << newFrameIdx
                     <<" : input ts " << inputOrdinal.timestamp.peekull();
-            sliceWork.push_back(std::move(cloneInputWork(w, w->input.flags)));
+            sliceWork.push_back(cloneInputWork(w, w->input.flags));
             if (!w->input.buffers.empty() && w->input.buffers.front() != nullptr) {
                 sliceWork.back()->input.buffers = std::move(w->input.buffers);
             }
@@ -853,4 +853,4 @@
     mLargeWork.reset();
 }
 
-}  // namespace android
\ No newline at end of file
+}  // namespace android
diff --git a/media/codec2/hal/common/include/codec2/common/BqPoolInvalidateHelper.h b/media/codec2/hal/common/include/codec2/common/BqPoolInvalidateHelper.h
new file mode 100644
index 0000000..859f703
--- /dev/null
+++ b/media/codec2/hal/common/include/codec2/common/BqPoolInvalidateHelper.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <C2BqBufferPriv.h>
+#include <C2PlatformSupport.h>
+
+namespace android {
+
+// filter fn from component's blockpool container to bqpool conatainer
+static inline bool BqPoolFilterFn(
+        std::pair<const uint64_t, std::shared_ptr<C2BlockPool>> pool) {
+    return (pool.second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE);
+}
+
+// convert fn from component's blockpool container to bqpool container
+static inline std::shared_ptr<C2BufferQueueBlockPool> BqPoolConvertFn(
+        std::pair<const uint64_t, std::shared_ptr<C2BlockPool>> pool) {
+    return std::static_pointer_cast<C2BufferQueueBlockPool>(pool.second);
+}
+
+// This is similar to std::transform excpet there is \pred functor parameter.
+// The elements with \pred function value \true only will be transformed and
+// added to the dest container. (For portability std::ranges are not used.)
+template <class InputIt, class OutputIt, class Pred, class Fct>
+void transform_if(InputIt first, InputIt last, OutputIt dest, Pred pred, Fct transform)
+{
+   while (first != last) {
+      if (pred(*first)) {
+         *dest++ = transform(*first);
+      }
+      ++first;
+   }
+}
+
+}  // namespace android
diff --git a/media/codec2/hal/common/include/codec2/common/HalSelection.h b/media/codec2/hal/common/include/codec2/common/HalSelection.h
index 7c77515..bf0c7c5 100644
--- a/media/codec2/hal/common/include/codec2/common/HalSelection.h
+++ b/media/codec2/hal/common/include/codec2/common/HalSelection.h
@@ -22,6 +22,9 @@
 // Returns true iff AIDL c2 HAL is selected for the system
 bool IsCodec2AidlHalSelected();
 
+// Returns true iff AIDL c2 InputSurface interface is selected for the system
+bool IsCodec2AidlInputSurfaceSelected();
+
 }  // namespace android
 
 #endif  // CODEC2_HAL_SELECTION_H
diff --git a/media/codec2/hal/hidl/1.0/utils/Component.cpp b/media/codec2/hal/hidl/1.0/utils/Component.cpp
index 62f0e25..162a80e 100644
--- a/media/codec2/hal/hidl/1.0/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/Component.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "Codec2-Component"
 #include <android-base/logging.h>
 
+#include <codec2/common/BqPoolInvalidateHelper.h>
 #include <codec2/hidl/1.0/Component.h>
 #include <codec2/hidl/1.0/ComponentStore.h>
 #include <codec2/hidl/1.0/InputBufferManager.h>
@@ -30,6 +31,7 @@
 #include <utils/Timers.h>
 
 #include <C2BqBufferPriv.h>
+#include <C2BqPoolInvalidator.h>
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
@@ -270,16 +272,17 @@
 }
 
 void Component::onDeathReceived() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
     {
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mClientDied = true;
-        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
-            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
-                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
-                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
-                bqPool->invalidate();
-            }
-        }
+        transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                BqPoolFilterFn, BqPoolConvertFn);
+    }
+    if (!bqPools.empty()) {
+        std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem =
+                std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        bqInvalidateItem->invalidate();
     }
     release();
 }
@@ -549,7 +552,26 @@
 }
 
 Return<Status> Component::release() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        if (!mClientDied) {
+            transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                     BqPoolFilterFn, BqPoolConvertFn);
+        }
+    }
+    std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem;
+    if (!bqPools.empty()) {
+        // handling rare cases of process death just after release() called.
+        bqInvalidateItem = std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        C2BqPoolInvalidator::getInstance().queue(bqInvalidateItem);
+    }
     Status status = static_cast<Status>(mComponent->release());
+    if (bqInvalidateItem) {
+        // If release is not blocked,
+        // skip invalidation and finish ASAP.
+        bqInvalidateItem->skip();
+    }
     {
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
@@ -637,6 +659,18 @@
 }
 
 Component::~Component() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                     BqPoolFilterFn, BqPoolConvertFn);
+    }
+    if (!bqPools.empty()) {
+        LOG(ERROR) << "blockpools are not cleared yet at dtor";
+        std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem =
+                std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        C2BqPoolInvalidator::getInstance().queue(bqInvalidateItem);
+    }
     InputBufferManager::unregisterFrameData(mListener);
     mStore->reportComponentDeath(this);
 }
diff --git a/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
index 1ba1889..108ba06 100644
--- a/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
@@ -149,6 +149,14 @@
     }
 #endif
 
+    // MultiAccessUnit reflector helper is allocated once per store.
+    // All components in this store can reuse this reflector helper.
+    if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+        std::shared_ptr<C2ReflectorHelper> helper = std::make_shared<C2ReflectorHelper>();
+        mParamReflectors.push_back(helper);
+        mMultiAccessUnitReflector = helper;
+    }
+
     // Retrieve supported parameters from store
     using namespace std::placeholders;
     mInit = mConfigurable->init(mParameterCache);
@@ -207,12 +215,20 @@
     if (c2interface == nullptr) {
         return nullptr;
     }
+    // Framework support for Large audio frame feature depends on:
+    // 1. All feature flags enabled on platform
+    // 2. The capability of the implementation to use the same input buffer
+    //    for different C2Work (C2Config::api_feature_t::API_SAME_INPUT_BUFFER)
+    // 3. Implementation does not inherently support C2LargeFrame::output::PARAM_TYPE param.
     if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
         c2_status_t err = C2_OK;
         C2ComponentDomainSetting domain;
         std::vector<std::unique_ptr<C2Param>> heapParams;
-        err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
-        if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+        C2ApiFeaturesSetting features = (C2Config::api_feature_t)0;
+        err = c2interface->query_vb({&domain, &features}, {}, C2_MAY_BLOCK, &heapParams);
+        if (err == C2_OK
+                && (domain.value == C2Component::DOMAIN_AUDIO)
+                && ((features.value & C2Config::api_feature_t::API_SAME_INPUT_BUFFER) != 0)) {
             std::vector<std::shared_ptr<C2ParamDescriptor>> params;
             bool isComponentSupportsLargeAudioFrame = false;
             c2interface->querySupportedParams_nb(&params);
@@ -225,7 +241,7 @@
             if (!isComponentSupportsLargeAudioFrame) {
                 multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
                         c2interface,
-                        std::static_pointer_cast<C2ReflectorHelper>(mParamReflectors[0]));
+                        mMultiAccessUnitReflector);
             }
         }
     }
diff --git a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
index 44b8ec1..028238b 100644
--- a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
@@ -55,6 +55,13 @@
 using ::android::sp;
 
 struct ComponentStore : public IComponentStore {
+    /**
+     * Constructor for ComponentStore.
+     *
+     * IMPORTANT: SetPreferredCodec2ComponentStore() is called in the constructor.
+     * Be careful about the order of SetPreferredCodec2ComponentStore() and
+     * ComponentStore() in the code.
+     */
     ComponentStore(const std::shared_ptr<C2ComponentStore>& store);
     virtual ~ComponentStore();
 
@@ -124,6 +131,9 @@
     std::shared_ptr<C2ComponentStore> mStore;
     std::vector<std::shared_ptr<C2ParamReflector>> mParamReflectors;
 
+    // Reflector helper for MultiAccessUnitHelper
+    std::shared_ptr<C2ReflectorHelper> mMultiAccessUnitReflector;
+
     std::map<C2Param::CoreIndex, std::shared_ptr<C2StructDescriptor>> mStructDescriptors;
     std::set<C2Param::CoreIndex> mUnsupportedStructDescriptors;
     std::set<C2String> mLoadedInterfaces;
diff --git a/media/codec2/hal/hidl/1.1/utils/Component.cpp b/media/codec2/hal/hidl/1.1/utils/Component.cpp
index 7f2c4dd..1c2a49a 100644
--- a/media/codec2/hal/hidl/1.1/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/Component.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "Codec2-Component@1.1"
 #include <android-base/logging.h>
 
+#include <codec2/common/BqPoolInvalidateHelper.h>
 #include <codec2/hidl/1.1/Component.h>
 #include <codec2/hidl/1.1/ComponentStore.h>
 #include <codec2/hidl/1.1/InputBufferManager.h>
@@ -32,6 +33,7 @@
 #include <codec2/common/MultiAccessUnitHelper.h>
 
 #include <C2BqBufferPriv.h>
+#include <C2BqPoolInvalidator.h>
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
@@ -274,16 +276,17 @@
 }
 
 void Component::onDeathReceived() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
     {
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mClientDied = true;
-        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
-            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
-                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
-                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
-                bqPool->invalidate();
-            }
-        }
+        transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                BqPoolFilterFn, BqPoolConvertFn);
+    }
+    if (!bqPools.empty()) {
+        std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem =
+                std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        bqInvalidateItem->invalidate();
     }
     release();
 }
@@ -555,7 +558,26 @@
 }
 
 Return<Status> Component::release() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        if (!mClientDied) {
+            transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                    BqPoolFilterFn, BqPoolConvertFn);
+        }
+    }
+    std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem;
+    if (!bqPools.empty()) {
+        // handling rare cases of process death just after release() called.
+        bqInvalidateItem = std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        C2BqPoolInvalidator::getInstance().queue(bqInvalidateItem);
+    }
     Status status = static_cast<Status>(mComponent->release());
+    if (bqInvalidateItem) {
+        // If release is not blocked,
+        // skip invalidation and finish ASAP.
+        bqInvalidateItem->skip();
+    }
     {
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
@@ -649,6 +671,18 @@
 }
 
 Component::~Component() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                BqPoolFilterFn, BqPoolConvertFn);
+    }
+    if (!bqPools.empty()) {
+        LOG(ERROR) << "blockpools are not cleared yet at dtor";
+        std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem =
+                std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        C2BqPoolInvalidator::getInstance().queue(bqInvalidateItem);
+    }
     InputBufferManager::unregisterFrameData(mListener);
     mStore->reportComponentDeath(this);
 }
diff --git a/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
index 1b86958..84f5d26 100644
--- a/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
@@ -149,6 +149,14 @@
     }
 #endif
 
+    // MultiAccessUnit reflector helper is allocated once per store.
+    // All components in this store can reuse this reflector helper.
+    if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+        std::shared_ptr<C2ReflectorHelper> helper = std::make_shared<C2ReflectorHelper>();
+        mParamReflectors.push_back(helper);
+        mMultiAccessUnitReflector = helper;
+    }
+
     // Retrieve supported parameters from store
     using namespace std::placeholders;
     mInit = mConfigurable->init(mParameterCache);
@@ -207,12 +215,20 @@
     if (c2interface == nullptr) {
         return nullptr;
     }
+    // Framework support for Large audio frame feature depends on:
+    // 1. All feature flags enabled on platform
+    // 2. The capability of the implementation to use the same input buffer
+    //    for different C2Work (C2Config::api_feature_t::API_SAME_INPUT_BUFFER)
+    // 3. Implementation does not inherently support C2LargeFrame::output::PARAM_TYPE param.
     if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
         c2_status_t err = C2_OK;
         C2ComponentDomainSetting domain;
         std::vector<std::unique_ptr<C2Param>> heapParams;
-        err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
-        if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+        C2ApiFeaturesSetting features = (C2Config::api_feature_t)0;
+        err = c2interface->query_vb({&domain, &features}, {}, C2_MAY_BLOCK, &heapParams);
+        if (err == C2_OK
+                && (domain.value == C2Component::DOMAIN_AUDIO)
+                && ((features.value & C2Config::api_feature_t::API_SAME_INPUT_BUFFER) != 0)) {
             std::vector<std::shared_ptr<C2ParamDescriptor>> params;
             bool isComponentSupportsLargeAudioFrame = false;
             c2interface->querySupportedParams_nb(&params);
@@ -222,11 +238,10 @@
                     break;
                 }
             }
-
             if (!isComponentSupportsLargeAudioFrame) {
                 multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
                         c2interface,
-                        std::static_pointer_cast<C2ReflectorHelper>(mParamReflectors[0]));
+                        mMultiAccessUnitReflector);
             }
         }
     }
diff --git a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
index 52d2945..b023115 100644
--- a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
@@ -56,6 +56,13 @@
 using ::android::sp;
 
 struct ComponentStore : public IComponentStore {
+    /**
+     * Constructor for ComponentStore.
+     *
+     * IMPORTANT: SetPreferredCodec2ComponentStore() is called in the constructor.
+     * Be careful about the order of SetPreferredCodec2ComponentStore() and
+     * ComponentStore() in the code.
+     */
     ComponentStore(const std::shared_ptr<C2ComponentStore>& store);
     virtual ~ComponentStore();
 
@@ -132,6 +139,9 @@
     std::shared_ptr<C2ComponentStore> mStore;
     std::vector<std::shared_ptr<C2ParamReflector>> mParamReflectors;
 
+    // Reflector helper for MultiAccessUnitHelper
+    std::shared_ptr<C2ReflectorHelper> mMultiAccessUnitReflector;
+
     std::map<C2Param::CoreIndex, std::shared_ptr<C2StructDescriptor>> mStructDescriptors;
     std::set<C2Param::CoreIndex> mUnsupportedStructDescriptors;
     std::set<C2String> mLoadedInterfaces;
diff --git a/media/codec2/hal/hidl/1.2/utils/Component.cpp b/media/codec2/hal/hidl/1.2/utils/Component.cpp
index 7b0aa9b..a15febe 100644
--- a/media/codec2/hal/hidl/1.2/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/Component.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "Codec2-Component@1.2"
 #include <android-base/logging.h>
 
+#include <codec2/common/BqPoolInvalidateHelper.h>
 #include <codec2/hidl/1.2/Component.h>
 #include <codec2/hidl/1.2/ComponentStore.h>
 #include <codec2/hidl/1.2/InputBufferManager.h>
@@ -30,6 +31,7 @@
 #include <utils/Timers.h>
 
 #include <C2BqBufferPriv.h>
+#include <C2BqPoolInvalidator.h>
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
@@ -272,16 +274,17 @@
 }
 
 void Component::onDeathReceived() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
     {
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mClientDied = true;
-        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
-            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
-                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
-                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
-                bqPool->invalidate();
-            }
-        }
+        transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                BqPoolFilterFn, BqPoolConvertFn);
+    }
+    if (!bqPools.empty()) {
+        std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem =
+                std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        bqInvalidateItem->invalidate();
     }
     release();
 }
@@ -551,7 +554,26 @@
 }
 
 Return<Status> Component::release() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        if (!mClientDied) {
+            transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                    BqPoolFilterFn, BqPoolConvertFn);
+        }
+    }
+    std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem;
+    if (!bqPools.empty()) {
+        // handling rare cases of process death just after release() called.
+        bqInvalidateItem = std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        C2BqPoolInvalidator::getInstance().queue(bqInvalidateItem);
+    }
     Status status = static_cast<Status>(mComponent->release());
+    if (bqInvalidateItem) {
+        // If release is not blocked,
+        // skip invalidation and finish ASAP.
+        bqInvalidateItem->skip();
+    }
     {
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
@@ -676,6 +698,18 @@
 }
 
 Component::~Component() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                BqPoolFilterFn, BqPoolConvertFn);
+    }
+    if (!bqPools.empty()) {
+        LOG(ERROR) << "blockpools are not cleared yet at dtor";
+        std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem =
+                std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        C2BqPoolInvalidator::getInstance().queue(bqInvalidateItem);
+    }
     InputBufferManager::unregisterFrameData(mListener);
     mStore->reportComponentDeath(this);
 }
diff --git a/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
index 2e0386f..5585be8 100644
--- a/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
@@ -149,6 +149,14 @@
     }
 #endif
 
+    // MultiAccessUnit reflector helper is allocated once per store.
+    // All components in this store can reuse this reflector helper.
+    if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+        std::shared_ptr<C2ReflectorHelper> helper = std::make_shared<C2ReflectorHelper>();
+        mParamReflectors.push_back(helper);
+        mMultiAccessUnitReflector = helper;
+    }
+
     // Retrieve supported parameters from store
     using namespace std::placeholders;
     mInit = mConfigurable->init(mParameterCache);
@@ -207,12 +215,20 @@
     if (c2interface == nullptr) {
         return nullptr;
     }
+    // Framework support for Large audio frame feature depends on:
+    // 1. All feature flags enabled on platform
+    // 2. The capability of the implementation to use the same input buffer
+    //    for different C2Work (C2Config::api_feature_t::API_SAME_INPUT_BUFFER)
+    // 3. Implementation does not inherently support C2LargeFrame::output::PARAM_TYPE param.
     if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
         c2_status_t err = C2_OK;
         C2ComponentDomainSetting domain;
         std::vector<std::unique_ptr<C2Param>> heapParams;
-        err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
-        if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+        C2ApiFeaturesSetting features = (C2Config::api_feature_t)0;
+        err = c2interface->query_vb({&domain, &features}, {}, C2_MAY_BLOCK, &heapParams);
+        if (err == C2_OK
+                && (domain.value == C2Component::DOMAIN_AUDIO)
+                && ((features.value & C2Config::api_feature_t::API_SAME_INPUT_BUFFER) != 0)) {
             std::vector<std::shared_ptr<C2ParamDescriptor>> params;
             bool isComponentSupportsLargeAudioFrame = false;
             c2interface->querySupportedParams_nb(&params);
@@ -225,7 +241,7 @@
             if (!isComponentSupportsLargeAudioFrame) {
                 multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
                         c2interface,
-                        std::static_pointer_cast<C2ReflectorHelper>(mParamReflectors[0]));
+                        mMultiAccessUnitReflector);
             }
         }
     }
diff --git a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
index 1b209e2..a7e043b 100644
--- a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
@@ -56,6 +56,13 @@
 using ::android::sp;
 
 struct ComponentStore : public IComponentStore {
+    /**
+     * Constructor for ComponentStore.
+     *
+     * IMPORTANT: SetPreferredCodec2ComponentStore() is called in the constructor.
+     * Be careful about the order of SetPreferredCodec2ComponentStore() and
+     * ComponentStore() in the code.
+     */
     ComponentStore(const std::shared_ptr<C2ComponentStore>& store);
     virtual ~ComponentStore();
 
@@ -139,6 +146,9 @@
     std::shared_ptr<C2ComponentStore> mStore;
     std::vector<std::shared_ptr<C2ParamReflector>> mParamReflectors;
 
+    // Reflector helper for MultiAccessUnitHelper
+    std::shared_ptr<C2ReflectorHelper> mMultiAccessUnitReflector;
+
     std::map<C2Param::CoreIndex, std::shared_ptr<C2StructDescriptor>> mStructDescriptors;
     std::set<C2Param::CoreIndex> mUnsupportedStructDescriptors;
     std::set<C2String> mLoadedInterfaces;
diff --git a/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp b/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
index 47412b7..34872f0 100644
--- a/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
+++ b/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
@@ -856,21 +856,31 @@
     C2String getName() const override { return "android.sample.filter-plugin-store"; }
     c2_status_t createComponent(
             C2String name, std::shared_ptr<C2Component>* const component) override {
-        if (mFactories.count(name) == 0) {
+        auto it = std::find_if(
+                mFactories.begin(), mFactories.end(),
+                [&name](const std::unique_ptr<ComponentFactory> &factory) {
+                    return name == factory->getTraits()->name;
+                });
+        if (it == mFactories.end()) {
             return C2_BAD_VALUE;
         }
-        return mFactories.at(name)->createComponent(++mNodeId, component);
+        return (*it)->createComponent(++mNodeId, component);
     }
     c2_status_t createInterface(
             C2String name, std::shared_ptr<C2ComponentInterface>* const interface) override {
-        if (mFactories.count(name) == 0) {
+        auto it = std::find_if(
+                mFactories.begin(), mFactories.end(),
+                [&name](const std::unique_ptr<ComponentFactory> &factory) {
+                    return name == factory->getTraits()->name;
+                });
+        if (it == mFactories.end()) {
             return C2_BAD_VALUE;
         }
-        return mFactories.at(name)->createInterface(++mNodeId, interface);
+        return (*it)->createInterface(++mNodeId, interface);
     }
     std::vector<std::shared_ptr<const C2Component::Traits>> listComponents() override {
         std::vector<std::shared_ptr<const C2Component::Traits>> ret;
-        for (const auto &[name, factory] : mFactories) {
+        for (const auto &factory : mFactories) {
             ret.push_back(factory->getTraits());
         }
         return ret;
@@ -951,20 +961,18 @@
 
     template <class T>
     static void AddFactory(
-            std::map<C2String, std::unique_ptr<ComponentFactory>> *factories,
+            std::vector<std::unique_ptr<ComponentFactory>> *factories,
             const std::shared_ptr<C2ReflectorHelper> &reflector) {
         std::shared_ptr<C2ComponentInterface> intf{new typename T::Interface(0, reflector)};
         std::shared_ptr<C2Component::Traits> traits(new (std::nothrow) C2Component::Traits);
         CHECK(C2InterfaceUtils::FillTraitsFromInterface(traits.get(), intf))
                 << "Failed to fill traits from interface";
-        factories->emplace(
-                traits->name,
-                new ComponentFactoryImpl<T>(traits, reflector));
+        factories->emplace_back(new ComponentFactoryImpl<T>(traits, reflector));
     }
 
-    static std::map<C2String, std::unique_ptr<ComponentFactory>> CreateFactories(
+    static std::vector<std::unique_ptr<ComponentFactory>> CreateFactories(
             const std::shared_ptr<C2ReflectorHelper> &reflector) {
-        std::map<C2String, std::unique_ptr<ComponentFactory>> factories;
+        std::vector<std::unique_ptr<ComponentFactory>> factories;
         AddFactory<SampleToneMappingFilter>(&factories, reflector);
         return factories;
     }
@@ -977,7 +985,7 @@
         }
     } mIntf;
 
-    const std::map<C2String, std::unique_ptr<ComponentFactory>> mFactories;
+    const std::vector<std::unique_ptr<ComponentFactory>> mFactories;
 
     std::atomic_int32_t mNodeId{0};
 };
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 3c8c1b7..cc5d10c 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -16,6 +16,10 @@
 cc_library_shared {
     name: "libsfplugin_ccodec",
 
+    defaults: [
+        "android.hardware.graphics.common-ndk_shared",
+    ],
+
     export_include_dirs: ["include"],
 
     srcs: [
@@ -46,6 +50,7 @@
     ],
 
     static_libs: [
+        "libPlatformProperties",
         "libSurfaceFlingerProperties",
         "aconfig_mediacodec_flags_c_lib",
         "android.media.codec-aconfig-cc",
@@ -56,7 +61,6 @@
         "android.hardware.drm@1.0",
         "android.hardware.media.c2@1.0",
         "android.hardware.media.omx@1.0",
-        "android.hardware.graphics.common-V5-ndk",
         "graphicbuffersource-aidl-ndk",
         "libbase",
         "libbinder",
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 64d8f07..72b5a61 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -96,11 +96,14 @@
 
 public:
     static sp<CCodecWatchdog> getInstance() {
-        static sp<CCodecWatchdog> instance(new CCodecWatchdog);
-        static std::once_flag flag;
-        // Call Init() only once.
-        std::call_once(flag, Init, instance);
-        return instance;
+        static sp<CCodecWatchdog> sInstance = [] {
+            sp<CCodecWatchdog> instance = new CCodecWatchdog;
+            // the instance should never get destructed
+            instance->incStrong((void *)CCodecWatchdog::getInstance);
+            instance->init();
+            return instance;
+        }();
+        return sInstance;
     }
 
     ~CCodecWatchdog() = default;
@@ -146,11 +149,11 @@
 private:
     CCodecWatchdog() : mLooper(new ALooper) {}
 
-    static void Init(const sp<CCodecWatchdog> &thiz) {
-        ALOGV("Init");
-        thiz->mLooper->setName("CCodecWatchdog");
-        thiz->mLooper->registerHandler(thiz);
-        thiz->mLooper->start();
+    void init() {
+        ALOGV("init");
+        mLooper->setName("CCodecWatchdog");
+        mLooper->registerHandler(this);
+        mLooper->start();
     }
 
     sp<ALooper> mLooper;
@@ -509,9 +512,14 @@
         uint64_t usage = mConfig.mUsage;
         (void)(*node)->setConsumerUsage((int64_t)usage);
 
+        // AIDL does not define legacy dataspace.
+        android_dataspace_t dataspace = mDataSpace;
+        if (android::media::codec::provider_->dataspace_v0_partial()) {
+            ColorUtils::convertDataSpaceToV0(dataspace);
+        }
         return fromAidlStatus(mSource->configure(
                 (*node), static_cast<::aidl::android::hardware::graphics::common::Dataspace>(
-                        mDataSpace)));
+                        dataspace)));
     }
 
     void disconnect() override {
@@ -2673,7 +2681,7 @@
                         mChannel->setInfoBuffer(std::make_shared<C2InfoBuffer>(info));
                     }
                 } else {
-                    ALOGE("Ignoring param key %s as buffer size %d is less than expected "
+                    ALOGE("Ignoring param key %s as buffer size %zu is less than expected "
                           "buffer size %d",
                           PARAMETER_KEY_QP_OFFSET_MAP, mapSize, expectedMapSize);
                 }
@@ -2869,6 +2877,7 @@
 
             // handle configuration changes in work done
             std::shared_ptr<const C2StreamInitDataInfo::output> initData;
+            sp<AMessage> inputFormat = nullptr;
             sp<AMessage> outputFormat = nullptr;
             {
                 Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
@@ -2956,10 +2965,12 @@
                             initData->m.value, initData->flexCount(), config->mCodingMediaType,
                             config->mOutputFormat);
                 }
+                inputFormat = config->mInputFormat;
                 outputFormat = config->mOutputFormat;
             }
             mChannel->onWorkDone(
-                    std::move(work), outputFormat, initData ? initData.get() : nullptr);
+                    std::move(work), inputFormat, outputFormat,
+                    initData ? initData.get() : nullptr);
             // log metrics to MediaCodec
             if (mMetrics->countEntries() == 0) {
                 Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 45353a0..d67a876 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -38,6 +38,7 @@
 
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <android/hardware/drm/1.0/types.h>
+#include <android/sysprop/MediaProperties.sysprop.h>
 #include <android-base/parseint.h>
 #include <android-base/properties.h>
 #include <android-base/no_destructor.h>
@@ -311,8 +312,18 @@
         Mutexed<BlockPools>::Locked pools(mBlockPools);
         pools->outputPoolId = C2BlockPool::BASIC_LINEAR;
     }
-    std::string value = GetServerConfigurableFlag("media_native", "ccodec_rendering_depth", "3");
-    android::base::ParseInt(value, &mRenderingDepth);
+    if (android::media::codec::provider_->rendering_depth_removal()) {
+        constexpr int kAndroidApi202404 = 202404;
+        int vendorVersion = ::android::base::GetIntProperty("ro.vendor.api_level", -1);
+        using ::android::sysprop::MediaProperties::codec2_remove_rendering_depth;
+        if (vendorVersion > kAndroidApi202404 || codec2_remove_rendering_depth().value_or(false)) {
+            mRenderingDepth = 0;
+        }
+    } else {
+        std::string value = GetServerConfigurableFlag(
+                "media_native", "ccodec_rendering_depth", "3");
+        android::base::ParseInt(value, &mRenderingDepth);
+    }
     mOutputSurface.lock()->maxDequeueBuffers = kSmoothnessFactor + mRenderingDepth;
 }
 
@@ -324,7 +335,7 @@
 
 void CCodecBufferChannel::setComponent(
         const std::shared_ptr<Codec2Client::Component> &component) {
-    mComponent = component;
+    std::atomic_store(&mComponent, component);
     mComponentName = component->getName() + StringPrintf("#%d", int(uintptr_t(component.get()) % 997));
     mName = mComponentName.c_str();
 }
@@ -340,7 +351,7 @@
     inputSurface->numProcessingBuffersBalance = 0;
     inputSurface->surface = surface;
     mHasInputSurface = true;
-    return inputSurface->surface->connect(mComponent);
+    return inputSurface->surface->connect(std::atomic_load(&mComponent));
 }
 
 status_t CCodecBufferChannel::signalEndOfInputStream() {
@@ -536,7 +547,7 @@
                         now);
             }
         }
-        err = mComponent->queue(&items);
+        err = std::atomic_load(&mComponent)->queue(&items);
     }
     if (err != C2_OK) {
         Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
@@ -1446,7 +1457,7 @@
     qbi.setSurfaceDamage(Region::INVALID_REGION); // we don't have dirty regions
     qbi.getFrameTimestamps = true; // we need to know when a frame is rendered
     IGraphicBufferProducer::QueueBufferOutput qbo;
-    status_t result = mComponent->queueToOutputSurface(block, qbi, &qbo);
+    status_t result = std::atomic_load(&mComponent)->queueToOutputSurface(block, qbi, &qbo);
     if (result != OK) {
         ALOGI("[%s] queueBuffer failed: %d", mName, result);
         if (result == NO_INIT) {
@@ -1585,7 +1596,7 @@
 
 void CCodecBufferChannel::pollForRenderedBuffers() {
     FrameEventHistoryDelta delta;
-    mComponent->pollForRenderedFrames(&delta);
+    std::atomic_load(&mComponent)->pollForRenderedFrames(&delta);
     processRenderedFrames(delta);
 }
 
@@ -1594,7 +1605,7 @@
     // knowing the internal state of CCodec/CCodecBufferChannel,
     // prevent mComponent from being destroyed by holding the shared reference
     // during this interface being executed.
-    std::shared_ptr<Codec2Client::Component> comp = mComponent;
+    std::shared_ptr<Codec2Client::Component> comp = std::atomic_load(&mComponent);
     if (comp) {
       SurfaceCallbackHandler::GetInstance().post(
                 SurfaceCallbackHandler::ON_BUFFER_RELEASED, comp, generation);
@@ -1606,7 +1617,7 @@
     // knowing the internal state of CCodec/CCodecBufferChannel,
     // prevent mComponent from being destroyed by holding the shared reference
     // during this interface being executed.
-    std::shared_ptr<Codec2Client::Component> comp = mComponent;
+    std::shared_ptr<Codec2Client::Component> comp = std::atomic_load(&mComponent);
     if (comp) {
       SurfaceCallbackHandler::GetInstance().post(
                 SurfaceCallbackHandler::ON_BUFFER_ATTACHED, comp, generation);
@@ -1680,7 +1691,7 @@
     C2ActualPipelineDelayTuning pipelineDelay(0);
     C2SecureModeTuning secureMode(C2Config::SM_UNPROTECTED);
 
-    c2_status_t err = mComponent->query(
+    c2_status_t err = std::atomic_load(&mComponent)->query(
             {
                 &iStreamFormat,
                 &oStreamFormat,
@@ -1711,7 +1722,7 @@
     size_t numOutputSlots = outputDelayValue + kSmoothnessFactor;
 
     // TODO: get this from input format
-    bool secure = mComponent->getName().find(".secure") != std::string::npos;
+    bool secure = std::atomic_load(&mComponent)->getName().find(".secure") != std::string::npos;
 
     // secure mode is a static parameter (shall not change in the executing state)
     mSendEncryptedInfoBuffer = secureMode.value == C2Config::SM_READ_PROTECTED_WITH_ENCRYPTED;
@@ -1757,7 +1768,7 @@
                 channelCount.invalidate();
                 pcmEncoding.invalidate();
             }
-            err = mComponent->query(stackParams,
+            err = std::atomic_load(&mComponent)->query(stackParams,
                                     { C2PortAllocatorsTuning::input::PARAM_TYPE },
                                     C2_DONT_BLOCK,
                                     &params);
@@ -1825,6 +1836,9 @@
                     channelCount.value,
                     pcmEncoding ? pcmEncoding.value : C2Config::PCM_16);
         }
+        if (!buffersBoundToCodec) {
+            inputFormat->setInt32(KEY_NUM_SLOTS, numInputSlots);
+        }
         bool conforming = (apiFeatures & API_SAME_INPUT_BUFFER);
         // For encrypted content, framework decrypts source buffer (ashmem) into
         // C2Buffers. Thus non-conforming codecs can process these.
@@ -1918,7 +1932,7 @@
             // query C2PortAllocatorsTuning::output from component, or use default allocator if
             // unsuccessful.
             std::vector<std::unique_ptr<C2Param>> params;
-            err = mComponent->query({ },
+            err = std::atomic_load(&mComponent)->query({ },
                                     { C2PortAllocatorsTuning::output::PARAM_TYPE },
                                     C2_DONT_BLOCK,
                                     &params);
@@ -1946,7 +1960,7 @@
             // if unsuccessful.
             if (outputSurface) {
                 params.clear();
-                err = mComponent->query({ },
+                err = std::atomic_load(&mComponent)->query({ },
                                         { C2PortSurfaceAllocatorTuning::output::PARAM_TYPE },
                                         C2_DONT_BLOCK,
                                         &params);
@@ -1977,7 +1991,7 @@
             }
 
             if ((poolMask >> pools->outputAllocatorId) & 1) {
-                err = mComponent->createBlockPool(
+                err = std::atomic_load(&mComponent)->createBlockPool(
                         pools->outputAllocatorId, &pools->outputPoolId, &pools->outputPoolIntf);
                 ALOGI("[%s] Created output block pool with allocatorID %u => poolID %llu - %s",
                         mName, pools->outputAllocatorId,
@@ -1998,7 +2012,8 @@
                     C2PortBlockPoolsTuning::output::AllocUnique({ pools->outputPoolId });
 
             std::vector<std::unique_ptr<C2SettingResult>> failures;
-            err = mComponent->config({ poolIdsTuning.get() }, C2_MAY_BLOCK, &failures);
+            err = std::atomic_load(&mComponent)->config(
+                    { poolIdsTuning.get() }, C2_MAY_BLOCK, &failures);
             ALOGD("[%s] Configured output block pool ids %llu => %s",
                     mName, (unsigned long long)poolIdsTuning->m.values[0], asString(err));
             outputPoolId_ = pools->outputPoolId;
@@ -2006,7 +2021,7 @@
 
         if (prevOutputPoolId != C2BlockPool::BASIC_LINEAR
                 && prevOutputPoolId != C2BlockPool::BASIC_GRAPHIC) {
-            c2_status_t err = mComponent->destroyBlockPool(prevOutputPoolId);
+            c2_status_t err = std::atomic_load(&mComponent)->destroyBlockPool(prevOutputPoolId);
             if (err != C2_OK) {
                 ALOGW("Failed to clean up previous block pool %llu - %s (%d)\n",
                         (unsigned long long) prevOutputPoolId, asString(err), err);
@@ -2038,7 +2053,7 @@
 
         // Try to set output surface to created block pool if given.
         if (outputSurface) {
-            mComponent->setOutputSurface(
+            std::atomic_load(&mComponent)->setOutputSurface(
                     outputPoolId_,
                     outputSurface,
                     outputGeneration,
@@ -2047,7 +2062,7 @@
             // configure CPU read consumer usage
             C2StreamUsageTuning::output outputUsage{0u, C2MemoryUsage::CPU_READ};
             std::vector<std::unique_ptr<C2SettingResult>> failures;
-            err = mComponent->config({ &outputUsage }, C2_MAY_BLOCK, &failures);
+            err = std::atomic_load(&mComponent)->config({ &outputUsage }, C2_MAY_BLOCK, &failures);
             // do not print error message for now as most components may not yet
             // support this setting
             ALOGD_IF(err != C2_BAD_INDEX, "[%s] Configured output usage [%#llx]",
@@ -2159,9 +2174,18 @@
 
 status_t CCodecBufferChannel::requestInitialInputBuffers(
         std::map<size_t, sp<MediaCodecBuffer>> &&clientInputBuffers) {
+    std::optional<QueueGuard> guard;
+    if (android::media::codec::provider_->codec_buffer_state_cleanup()) {
+        guard.emplace(mSync);
+        if (!guard->isRunning()) {
+            ALOGD("[%s] skip requestInitialInputBuffers when not running", mName);
+            return OK;
+        }
+    }
     C2StreamBufferTypeSetting::output oStreamFormat(0u);
     C2PrependHeaderModeSetting prepend(PREPEND_HEADER_TO_NONE);
-    c2_status_t err = mComponent->query({ &oStreamFormat, &prepend }, {}, C2_DONT_BLOCK, nullptr);
+    c2_status_t err = std::atomic_load(&mComponent)->query(
+            { &oStreamFormat, &prepend }, {}, C2_DONT_BLOCK, nullptr);
     if (err != C2_OK && err != C2_BAD_INDEX) {
         return UNKNOWN_ERROR;
     }
@@ -2179,7 +2203,7 @@
                         now);
             }
         }
-        err = mComponent->queue(&flushedConfigs);
+        err = std::atomic_load(&mComponent)->queue(&flushedConfigs);
         if (err != C2_OK) {
             ALOGW("[%s] Error while queueing a flushed config", mName);
             return UNKNOWN_ERROR;
@@ -2230,7 +2254,8 @@
             Mutexed<BlockPools>::Locked pools(mBlockPools);
             outputPoolId = pools->outputPoolId;
         }
-        if (mComponent) mComponent->stopUsingOutputSurface(outputPoolId);
+        std::shared_ptr<Codec2Client::Component> comp = std::atomic_load(&mComponent);
+        if (comp) comp->stopUsingOutputSurface(outputPoolId);
 
         if (pushBlankBuffer) {
             sp<ANativeWindow> anw = static_cast<ANativeWindow *>(surface.get());
@@ -2264,7 +2289,8 @@
 
 void CCodecBufferChannel::release() {
     mInfoBuffers.clear();
-    mComponent.reset();
+    std::shared_ptr<Codec2Client::Component> nullComp;
+    std::atomic_store(&mComponent, nullComp);
     mInputAllocator.reset();
     mOutputSurface.lock()->surface.clear();
     {
@@ -2333,9 +2359,11 @@
 }
 
 void CCodecBufferChannel::onWorkDone(
-        std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
+        std::unique_ptr<C2Work> work,
+        const sp<AMessage> &inputFormat,
+        const sp<AMessage> &outputFormat,
         const C2StreamInitDataInfo::output *initData) {
-    if (handleWork(std::move(work), outputFormat, initData)) {
+    if (handleWork(std::move(work), inputFormat, outputFormat, initData)) {
         feedInputBufferIfAvailable();
     }
 }
@@ -2365,6 +2393,7 @@
 
 bool CCodecBufferChannel::handleWork(
         std::unique_ptr<C2Work> work,
+        const sp<AMessage> &inputFormat,
         const sp<AMessage> &outputFormat,
         const C2StreamInitDataInfo::output *initData) {
     {
@@ -2538,6 +2567,9 @@
         } else {
             input->numSlots = newNumSlots;
         }
+        if (inputFormat->contains(KEY_NUM_SLOTS)) {
+            inputFormat->setInt32(KEY_NUM_SLOTS, input->numSlots);
+        }
     }
     size_t numOutputSlots = 0;
     uint32_t reorderDepth = 0;
@@ -2586,7 +2618,7 @@
             }
         }
         if (maxDequeueCount > 0) {
-            mComponent->setOutputSurfaceMaxDequeueCount(maxDequeueCount);
+            std::atomic_load(&mComponent)->setOutputSurfaceMaxDequeueCount(maxDequeueCount);
         }
     }
 
@@ -2753,8 +2785,8 @@
                                 bufferMetadata->m.values[nMeta];
                         flag = convertFlags(bufferMetadataStruct.flags, false);
                         accessUnitInfos.emplace_back(flag,
-                                static_cast<size_t>(bufferMetadataStruct.size),
-                                static_cast<size_t>(bufferMetadataStruct.timestamp));
+                                bufferMetadataStruct.size,
+                                bufferMetadataStruct.timestamp);
                     }
                     sp<WrapperObject<std::vector<AccessUnitInfo>>> obj{
                         new WrapperObject<std::vector<AccessUnitInfo>>{accessUnitInfos}};
@@ -2834,7 +2866,7 @@
     }
 
     if (outputPoolIntf) {
-        if (mComponent->setOutputSurface(
+        if (std::atomic_load(&mComponent)->setOutputSurface(
                 outputPoolId,
                 producer,
                 generation,
@@ -2948,7 +2980,6 @@
         work->input.infoBuffers.emplace_back(*buffer);
         work->worklets.emplace_back(new C2Worklet);
         items.push_back(std::move(work));
-        c2_status_t err = mComponent->queue(&items);
     }
 }
 
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 4d296fd..6493b87 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -193,12 +193,15 @@
     /**
      * Notify input client about work done.
      *
-     * @param workItems   finished work item.
+     * @param workItems    finished work item.
+     * @param inputFormat  input format
      * @param outputFormat new output format if it has changed, otherwise nullptr
-     * @param initData    new init data (CSD) if it has changed, otherwise nullptr
+     * @param initData     new init data (CSD) if it has changed, otherwise nullptr
      */
     void onWorkDone(
-            std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
+            std::unique_ptr<C2Work> work,
+            const sp<AMessage> &inputFormat,
+            const sp<AMessage> &outputFormat,
             const C2StreamInitDataInfo::output *initData);
 
     /**
@@ -311,7 +314,9 @@
                                       std::shared_ptr<C2LinearBlock> encryptedBlock = nullptr,
                                       size_t blockSize = 0);
     bool handleWork(
-            std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
+            std::unique_ptr<C2Work> work,
+            const sp<AMessage> &inputFormat,
+            const sp<AMessage> &outputFormat,
             const C2StreamInitDataInfo::output *initData);
     void sendOutputBuffers();
     void ensureDecryptDestination(size_t size);
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 36725ec..897a696 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -1896,7 +1896,8 @@
                 std::vector<C2QpOffsetRectStruct> c2QpOffsetRects;
                 char mutableStrQpOffsetRects[strlen(qpOffsetRects.c_str()) + 1];
                 strcpy(mutableStrQpOffsetRects, qpOffsetRects.c_str());
-                char* box = strtok(mutableStrQpOffsetRects, ";");
+                char* savePtr;
+                char* box = strtok_r(mutableStrQpOffsetRects, ";", &savePtr);
                 while (box != nullptr) {
                     int top, left, bottom, right, offset;
                     if (sscanf(box, "%d,%d-%d,%d=%d", &top, &left, &bottom, &right, &offset) == 5) {
@@ -1906,7 +1907,7 @@
                         bottom = c2_min(bottom, height);
                         if (right > left && bottom > top) {
                             C2Rect rect(right - left, bottom - top);
-                            rect.at(left, top);
+                            rect = rect.at(left, top);
                             c2QpOffsetRects.push_back(C2QpOffsetRectStruct(rect, offset));
                         } else {
                             ALOGE("Rects configuration %s is not valid.", box);
@@ -1914,7 +1915,7 @@
                     } else {
                         ALOGE("Rects configuration %s doesn't follow the string pattern.", box);
                     }
-                    box = strtok(nullptr, ";");
+                    box = strtok_r(nullptr, ";", &savePtr);
                 }
                 if (c2QpOffsetRects.size() != 0) {
                     const std::unique_ptr<C2StreamQpOffsetRects::output> regions =
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 2550dcf..7d4e8ab 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -20,6 +20,8 @@
 #include <utils/Log.h>
 #include <utils/Trace.h>
 
+#include <android_media_codec.h>
+
 #include <aidl/android/hardware/graphics/common/Cta861_3.h>
 #include <aidl/android/hardware/graphics/common/Smpte2086.h>
 #include <android-base/no_destructor.h>
@@ -33,6 +35,7 @@
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/ColorUtils.h>
 #include <mediadrm/ICrypto.h>
 #include <nativebase/nativebase.h>
 #include <ui/GraphicBufferMapper.h>
@@ -43,6 +46,7 @@
 #include <C2Debug.h>
 
 #include "Codec2Buffer.h"
+#include "Codec2BufferUtils.h"
 
 namespace android {
 
@@ -215,482 +219,6 @@
     mBufferRef.reset();
 }
 
-// GraphicView2MediaImageConverter
-
-namespace {
-
-class GraphicView2MediaImageConverter {
-public:
-    /**
-     * Creates a C2GraphicView <=> MediaImage converter
-     *
-     * \param view C2GraphicView object
-     * \param format buffer format
-     * \param copy whether the converter is used for copy or not
-     */
-    GraphicView2MediaImageConverter(
-            const C2GraphicView &view, const sp<AMessage> &format, bool copy)
-        : mInitCheck(NO_INIT),
-          mView(view),
-          mWidth(view.width()),
-          mHeight(view.height()),
-          mAllocatedDepth(0),
-          mBackBufferSize(0),
-          mMediaImage(new ABuffer(sizeof(MediaImage2))) {
-        ATRACE_CALL();
-        if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
-            mClientColorFormat = COLOR_FormatYUV420Flexible;
-        }
-        if (!format->findInt32("android._color-format", &mComponentColorFormat)) {
-            mComponentColorFormat = COLOR_FormatYUV420Flexible;
-        }
-        if (view.error() != C2_OK) {
-            ALOGD("Converter: view.error() = %d", view.error());
-            mInitCheck = BAD_VALUE;
-            return;
-        }
-        MediaImage2 *mediaImage = (MediaImage2 *)mMediaImage->base();
-        const C2PlanarLayout &layout = view.layout();
-        if (layout.numPlanes == 0) {
-            ALOGD("Converter: 0 planes");
-            mInitCheck = BAD_VALUE;
-            return;
-        }
-        memset(mediaImage, 0, sizeof(*mediaImage));
-        mAllocatedDepth = layout.planes[0].allocatedDepth;
-        uint32_t bitDepth = layout.planes[0].bitDepth;
-
-        // align width and height to support subsampling cleanly
-        uint32_t stride = align(view.crop().width, 2) * divUp(layout.planes[0].allocatedDepth, 8u);
-        uint32_t vStride = align(view.crop().height, 2);
-
-        bool tryWrapping = !copy;
-
-        switch (layout.type) {
-            case C2PlanarLayout::TYPE_YUV: {
-                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
-                if (layout.numPlanes != 3) {
-                    ALOGD("Converter: %d planes for YUV layout", layout.numPlanes);
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                std::optional<int> clientBitDepth = {};
-                switch (mClientColorFormat) {
-                    case COLOR_FormatYUVP010:
-                        clientBitDepth = 10;
-                        break;
-                    case COLOR_FormatYUV411PackedPlanar:
-                    case COLOR_FormatYUV411Planar:
-                    case COLOR_FormatYUV420Flexible:
-                    case COLOR_FormatYUV420PackedPlanar:
-                    case COLOR_FormatYUV420PackedSemiPlanar:
-                    case COLOR_FormatYUV420Planar:
-                    case COLOR_FormatYUV420SemiPlanar:
-                    case COLOR_FormatYUV422Flexible:
-                    case COLOR_FormatYUV422PackedPlanar:
-                    case COLOR_FormatYUV422PackedSemiPlanar:
-                    case COLOR_FormatYUV422Planar:
-                    case COLOR_FormatYUV422SemiPlanar:
-                    case COLOR_FormatYUV444Flexible:
-                    case COLOR_FormatYUV444Interleaved:
-                        clientBitDepth = 8;
-                        break;
-                    default:
-                        // no-op; used with optional
-                        break;
-
-                }
-                // conversion fails if client bit-depth and the component bit-depth differs
-                if ((clientBitDepth) && (bitDepth != clientBitDepth.value())) {
-                    ALOGD("Bit depth of client: %d and component: %d differs",
-                        *clientBitDepth, bitDepth);
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                C2PlaneInfo yPlane = layout.planes[C2PlanarLayout::PLANE_Y];
-                C2PlaneInfo uPlane = layout.planes[C2PlanarLayout::PLANE_U];
-                C2PlaneInfo vPlane = layout.planes[C2PlanarLayout::PLANE_V];
-                if (yPlane.channel != C2PlaneInfo::CHANNEL_Y
-                        || uPlane.channel != C2PlaneInfo::CHANNEL_CB
-                        || vPlane.channel != C2PlaneInfo::CHANNEL_CR) {
-                    ALOGD("Converter: not YUV layout");
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                bool yuv420888 = yPlane.rowSampling == 1 && yPlane.colSampling == 1
-                        && uPlane.rowSampling == 2 && uPlane.colSampling == 2
-                        && vPlane.rowSampling == 2 && vPlane.colSampling == 2;
-                if (yuv420888) {
-                    for (uint32_t i = 0; i < 3; ++i) {
-                        const C2PlaneInfo &plane = layout.planes[i];
-                        if (plane.allocatedDepth != 8 || plane.bitDepth != 8) {
-                            yuv420888 = false;
-                            break;
-                        }
-                    }
-                    yuv420888 = yuv420888 && yPlane.colInc == 1 && uPlane.rowInc == vPlane.rowInc;
-                }
-                int32_t copyFormat = mClientColorFormat;
-                if (yuv420888 && mClientColorFormat == COLOR_FormatYUV420Flexible) {
-                    if (uPlane.colInc == 2 && vPlane.colInc == 2
-                            && yPlane.rowInc == uPlane.rowInc) {
-                        copyFormat = COLOR_FormatYUV420PackedSemiPlanar;
-                    } else if (uPlane.colInc == 1 && vPlane.colInc == 1
-                            && yPlane.rowInc == uPlane.rowInc * 2) {
-                        copyFormat = COLOR_FormatYUV420PackedPlanar;
-                    }
-                }
-                ALOGV("client_fmt=0x%x y:{colInc=%d rowInc=%d} u:{colInc=%d rowInc=%d} "
-                        "v:{colInc=%d rowInc=%d}",
-                        mClientColorFormat,
-                        yPlane.colInc, yPlane.rowInc,
-                        uPlane.colInc, uPlane.rowInc,
-                        vPlane.colInc, vPlane.rowInc);
-                switch (copyFormat) {
-                    case COLOR_FormatYUV420Flexible:
-                    case COLOR_FormatYUV420Planar:
-                    case COLOR_FormatYUV420PackedPlanar:
-                        mediaImage->mPlane[mediaImage->Y].mOffset = 0;
-                        mediaImage->mPlane[mediaImage->Y].mColInc = 1;
-                        mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
-                        mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
-
-                        mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
-                        mediaImage->mPlane[mediaImage->U].mColInc = 1;
-                        mediaImage->mPlane[mediaImage->U].mRowInc = stride / 2;
-                        mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
-
-                        mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride * 5 / 4;
-                        mediaImage->mPlane[mediaImage->V].mColInc = 1;
-                        mediaImage->mPlane[mediaImage->V].mRowInc = stride / 2;
-                        mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
-
-                        if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
-                            tryWrapping = yuv420888 && uPlane.colInc == 1 && vPlane.colInc == 1
-                                    && yPlane.rowInc == uPlane.rowInc * 2
-                                    && view.data()[0] < view.data()[1]
-                                    && view.data()[1] < view.data()[2];
-                        }
-                        break;
-
-                    case COLOR_FormatYUV420SemiPlanar:
-                    case COLOR_FormatYUV420PackedSemiPlanar:
-                        mediaImage->mPlane[mediaImage->Y].mOffset = 0;
-                        mediaImage->mPlane[mediaImage->Y].mColInc = 1;
-                        mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
-                        mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
-
-                        mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
-                        mediaImage->mPlane[mediaImage->U].mColInc = 2;
-                        mediaImage->mPlane[mediaImage->U].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
-
-                        mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 1;
-                        mediaImage->mPlane[mediaImage->V].mColInc = 2;
-                        mediaImage->mPlane[mediaImage->V].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
-
-                        if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
-                            tryWrapping = yuv420888 && uPlane.colInc == 2 && vPlane.colInc == 2
-                                    && yPlane.rowInc == uPlane.rowInc
-                                    && view.data()[0] < view.data()[1]
-                                    && view.data()[1] < view.data()[2];
-                        }
-                        break;
-
-                    case COLOR_FormatYUVP010:
-                        // stride is in bytes
-                        mediaImage->mPlane[mediaImage->Y].mOffset = 0;
-                        mediaImage->mPlane[mediaImage->Y].mColInc = 2;
-                        mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
-                        mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
-
-                        mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
-                        mediaImage->mPlane[mediaImage->U].mColInc = 4;
-                        mediaImage->mPlane[mediaImage->U].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
-
-                        mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 2;
-                        mediaImage->mPlane[mediaImage->V].mColInc = 4;
-                        mediaImage->mPlane[mediaImage->V].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
-                        if (tryWrapping) {
-                            tryWrapping = yPlane.allocatedDepth == 16
-                                    && uPlane.allocatedDepth == 16
-                                    && vPlane.allocatedDepth == 16
-                                    && yPlane.bitDepth == 10
-                                    && uPlane.bitDepth == 10
-                                    && vPlane.bitDepth == 10
-                                    && yPlane.rightShift == 6
-                                    && uPlane.rightShift == 6
-                                    && vPlane.rightShift == 6
-                                    && yPlane.rowSampling == 1 && yPlane.colSampling == 1
-                                    && uPlane.rowSampling == 2 && uPlane.colSampling == 2
-                                    && vPlane.rowSampling == 2 && vPlane.colSampling == 2
-                                    && yPlane.colInc == 2
-                                    && uPlane.colInc == 4
-                                    && vPlane.colInc == 4
-                                    && yPlane.rowInc == uPlane.rowInc
-                                    && yPlane.rowInc == vPlane.rowInc;
-                        }
-                        break;
-
-                    default: {
-                        // default to fully planar format --- this will be overridden if wrapping
-                        // TODO: keep interleaved format
-                        int32_t colInc = divUp(mAllocatedDepth, 8u);
-                        int32_t rowInc = stride * colInc / yPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->Y].mOffset = 0;
-                        mediaImage->mPlane[mediaImage->Y].mColInc = colInc;
-                        mediaImage->mPlane[mediaImage->Y].mRowInc = rowInc;
-                        mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = yPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->Y].mVertSubsampling = yPlane.rowSampling;
-                        int32_t offset = rowInc * vStride / yPlane.rowSampling;
-
-                        rowInc = stride * colInc / uPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->U].mOffset = offset;
-                        mediaImage->mPlane[mediaImage->U].mColInc = colInc;
-                        mediaImage->mPlane[mediaImage->U].mRowInc = rowInc;
-                        mediaImage->mPlane[mediaImage->U].mHorizSubsampling = uPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->U].mVertSubsampling = uPlane.rowSampling;
-                        offset += rowInc * vStride / uPlane.rowSampling;
-
-                        rowInc = stride * colInc / vPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->V].mOffset = offset;
-                        mediaImage->mPlane[mediaImage->V].mColInc = colInc;
-                        mediaImage->mPlane[mediaImage->V].mRowInc = rowInc;
-                        mediaImage->mPlane[mediaImage->V].mHorizSubsampling = vPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->V].mVertSubsampling = vPlane.rowSampling;
-                        break;
-                    }
-                }
-                break;
-            }
-
-            case C2PlanarLayout::TYPE_YUVA:
-                ALOGD("Converter: unrecognized color format "
-                        "(client %d component %d) for YUVA layout",
-                        mClientColorFormat, mComponentColorFormat);
-                mInitCheck = NO_INIT;
-                return;
-            case C2PlanarLayout::TYPE_RGB:
-                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGB;
-                // TODO: support MediaImage layout
-                switch (mClientColorFormat) {
-                    case COLOR_FormatSurface:
-                    case COLOR_FormatRGBFlexible:
-                    case COLOR_Format24bitBGR888:
-                    case COLOR_Format24bitRGB888:
-                        ALOGD("Converter: accept color format "
-                                "(client %d component %d) for RGB layout",
-                                mClientColorFormat, mComponentColorFormat);
-                        break;
-                    default:
-                        ALOGD("Converter: unrecognized color format "
-                                "(client %d component %d) for RGB layout",
-                                mClientColorFormat, mComponentColorFormat);
-                        mInitCheck = BAD_VALUE;
-                        return;
-                }
-                if (layout.numPlanes != 3) {
-                    ALOGD("Converter: %d planes for RGB layout", layout.numPlanes);
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                break;
-            case C2PlanarLayout::TYPE_RGBA:
-                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGBA;
-                // TODO: support MediaImage layout
-                switch (mClientColorFormat) {
-                    case COLOR_FormatSurface:
-                    case COLOR_FormatRGBAFlexible:
-                    case COLOR_Format32bitABGR8888:
-                    case COLOR_Format32bitARGB8888:
-                    case COLOR_Format32bitBGRA8888:
-                        ALOGD("Converter: accept color format "
-                                "(client %d component %d) for RGBA layout",
-                                mClientColorFormat, mComponentColorFormat);
-                        break;
-                    default:
-                        ALOGD("Converter: unrecognized color format "
-                                "(client %d component %d) for RGBA layout",
-                                mClientColorFormat, mComponentColorFormat);
-                        mInitCheck = BAD_VALUE;
-                        return;
-                }
-                if (layout.numPlanes != 4) {
-                    ALOGD("Converter: %d planes for RGBA layout", layout.numPlanes);
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                break;
-            default:
-                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
-                if (layout.numPlanes == 1) {
-                    const C2PlaneInfo &plane = layout.planes[0];
-                    if (plane.colInc < 0 || plane.rowInc < 0) {
-                        // Copy-only if we have negative colInc/rowInc
-                        tryWrapping = false;
-                    }
-                    mediaImage->mPlane[0].mOffset = 0;
-                    mediaImage->mPlane[0].mColInc = std::abs(plane.colInc);
-                    mediaImage->mPlane[0].mRowInc = std::abs(plane.rowInc);
-                    mediaImage->mPlane[0].mHorizSubsampling = plane.colSampling;
-                    mediaImage->mPlane[0].mVertSubsampling = plane.rowSampling;
-                } else {
-                    ALOGD("Converter: unrecognized layout: color format (client %d component %d)",
-                            mClientColorFormat, mComponentColorFormat);
-                    mInitCheck = NO_INIT;
-                    return;
-                }
-                break;
-        }
-        if (tryWrapping) {
-            // try to map directly. check if the planes are near one another
-            const uint8_t *minPtr = mView.data()[0];
-            const uint8_t *maxPtr = mView.data()[0];
-            int32_t planeSize = 0;
-            for (uint32_t i = 0; i < layout.numPlanes; ++i) {
-                const C2PlaneInfo &plane = layout.planes[i];
-                int64_t planeStride = std::abs(plane.rowInc / plane.colInc);
-                ssize_t minOffset = plane.minOffset(
-                        mWidth / plane.colSampling, mHeight / plane.rowSampling);
-                ssize_t maxOffset = plane.maxOffset(
-                        mWidth / plane.colSampling, mHeight / plane.rowSampling);
-                if (minPtr > mView.data()[i] + minOffset) {
-                    minPtr = mView.data()[i] + minOffset;
-                }
-                if (maxPtr < mView.data()[i] + maxOffset) {
-                    maxPtr = mView.data()[i] + maxOffset;
-                }
-                planeSize += planeStride * divUp(mAllocatedDepth, 8u)
-                        * align(mHeight, 64) / plane.rowSampling;
-            }
-
-            if (minPtr == mView.data()[0] && (maxPtr - minPtr) <= planeSize) {
-                // FIXME: this is risky as reading/writing data out of bound results
-                //        in an undefined behavior, but gralloc does assume a
-                //        contiguous mapping
-                for (uint32_t i = 0; i < layout.numPlanes; ++i) {
-                    const C2PlaneInfo &plane = layout.planes[i];
-                    mediaImage->mPlane[i].mOffset = mView.data()[i] - minPtr;
-                    mediaImage->mPlane[i].mColInc = plane.colInc;
-                    mediaImage->mPlane[i].mRowInc = plane.rowInc;
-                    mediaImage->mPlane[i].mHorizSubsampling = plane.colSampling;
-                    mediaImage->mPlane[i].mVertSubsampling = plane.rowSampling;
-                }
-                mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr), maxPtr - minPtr);
-                ALOGV("Converter: wrapped (capacity=%zu)", mWrapped->capacity());
-            }
-        }
-        mediaImage->mNumPlanes = layout.numPlanes;
-        mediaImage->mWidth = view.crop().width;
-        mediaImage->mHeight = view.crop().height;
-        mediaImage->mBitDepth = bitDepth;
-        mediaImage->mBitDepthAllocated = mAllocatedDepth;
-
-        uint32_t bufferSize = 0;
-        for (uint32_t i = 0; i < layout.numPlanes; ++i) {
-            const C2PlaneInfo &plane = layout.planes[i];
-            if (plane.allocatedDepth < plane.bitDepth
-                    || plane.rightShift != plane.allocatedDepth - plane.bitDepth) {
-                ALOGD("rightShift value of %u unsupported", plane.rightShift);
-                mInitCheck = BAD_VALUE;
-                return;
-            }
-            if (plane.allocatedDepth > 8 && plane.endianness != C2PlaneInfo::NATIVE) {
-                ALOGD("endianness value of %u unsupported", plane.endianness);
-                mInitCheck = BAD_VALUE;
-                return;
-            }
-            if (plane.allocatedDepth != mAllocatedDepth || plane.bitDepth != bitDepth) {
-                ALOGD("different allocatedDepth/bitDepth per plane unsupported");
-                mInitCheck = BAD_VALUE;
-                return;
-            }
-            // stride is in bytes
-            bufferSize += stride * vStride / plane.rowSampling / plane.colSampling;
-        }
-
-        mBackBufferSize = bufferSize;
-        mInitCheck = OK;
-    }
-
-    status_t initCheck() const { return mInitCheck; }
-
-    uint32_t backBufferSize() const { return mBackBufferSize; }
-
-    /**
-     * Wrap C2GraphicView using a MediaImage2. Note that if not wrapped, the content is not mapped
-     * in this function --- the caller should use CopyGraphicView2MediaImage() function to copy the
-     * data into a backing buffer explicitly.
-     *
-     * \return media buffer. This is null if wrapping failed.
-     */
-    sp<ABuffer> wrap() const {
-        if (mBackBuffer == nullptr) {
-            return mWrapped;
-        }
-        return nullptr;
-    }
-
-    bool setBackBuffer(const sp<ABuffer> &backBuffer) {
-        if (backBuffer == nullptr) {
-            return false;
-        }
-        if (backBuffer->capacity() < mBackBufferSize) {
-            return false;
-        }
-        backBuffer->setRange(0, mBackBufferSize);
-        mBackBuffer = backBuffer;
-        return true;
-    }
-
-    /**
-     * Copy C2GraphicView to MediaImage2.
-     */
-    status_t copyToMediaImage() {
-        ATRACE_CALL();
-        if (mInitCheck != OK) {
-            return mInitCheck;
-        }
-        return ImageCopy(mBackBuffer->base(), getMediaImage(), mView);
-    }
-
-    const sp<ABuffer> &imageData() const { return mMediaImage; }
-
-private:
-    status_t mInitCheck;
-
-    const C2GraphicView mView;
-    uint32_t mWidth;
-    uint32_t mHeight;
-    int32_t mClientColorFormat;  ///< SDK color format for MediaImage
-    int32_t mComponentColorFormat;  ///< SDK color format from component
-    sp<ABuffer> mWrapped;  ///< wrapped buffer (if we can map C2Buffer to an ABuffer)
-    uint32_t mAllocatedDepth;
-    uint32_t mBackBufferSize;
-    sp<ABuffer> mMediaImage;
-    std::function<sp<ABuffer>(size_t)> mAlloc;
-
-    sp<ABuffer> mBackBuffer;    ///< backing buffer if we have to copy C2Buffer <=> ABuffer
-
-    MediaImage2 *getMediaImage() {
-        return (MediaImage2 *)mMediaImage->base();
-    }
-};
-
-}  // namespace
-
 // GraphicBlockBuffer
 
 // static
@@ -1210,6 +738,10 @@
         // Gralloc4 not supported; nothing to do
         return err;
     }
+    // Use V0 dataspaces for Gralloc4+
+    if (android::media::codec::provider_->dataspace_v0_partial()) {
+        ColorUtils::convertDataSpaceToV0(dataSpace);
+    }
     status_t status = mapper.setDataspace(buffer.get(), static_cast<ui::Dataspace>(dataSpace));
     if (status != OK) {
        err = C2_CORRUPTED;
diff --git a/media/codec2/sfplugin/Codec2Buffer.h b/media/codec2/sfplugin/Codec2Buffer.h
index 5e96921..8c5e909 100644
--- a/media/codec2/sfplugin/Codec2Buffer.h
+++ b/media/codec2/sfplugin/Codec2Buffer.h
@@ -44,28 +44,6 @@
 }  // namespace drm
 }  // namespace hardware
 
-/**
- * Copies a graphic view into a media image.
- *
- * \param imgBase base of MediaImage
- * \param img MediaImage data
- * \param view graphic view
- *
- * \return OK on success
- */
-status_t ImageCopy(uint8_t *imgBase, const MediaImage2 *img, const C2GraphicView &view);
-
-/**
- * Copies a media image into a graphic view.
- *
- * \param view graphic view
- * \param imgBase base of MediaImage
- * \param img MediaImage data
- *
- * \return OK on success
- */
-status_t ImageCopy(C2GraphicView &view, const uint8_t *imgBase, const MediaImage2 *img);
-
 class Codec2Buffer : public MediaCodecBuffer {
 public:
     using MediaCodecBuffer::MediaCodecBuffer;
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 692f700..0f5cdd6 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -16,6 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "Codec2InfoBuilder"
+
+#include <cstdlib>
+
 #include <log/log.h>
 
 #include <strings.h>
@@ -508,6 +511,10 @@
                 && !hasPrefix(v.first, "domain-")
                 && !hasPrefix(v.first, "variant-")) {
             writer->addGlobalSetting(v.first.c_str(), v.second.c_str());
+            if (v.first == "max-concurrent-instances") {
+                MediaCodecInfoWriter::SetMaxSupportedInstances(
+                        (int32_t)strtol(v.second.c_str(), NULL, 10));
+            }
         }
     }
 
@@ -740,6 +747,7 @@
                     pixelFormatMap[HAL_PIXEL_FORMAT_YCBCR_P010]    = COLOR_FormatYUVP010;
                     pixelFormatMap[HAL_PIXEL_FORMAT_RGBA_1010102]  = COLOR_Format32bitABGR2101010;
                     pixelFormatMap[HAL_PIXEL_FORMAT_RGBA_FP16]     = COLOR_Format64bitABGRFloat;
+                    pixelFormatMap[AHARDWAREBUFFER_FORMAT_YCbCr_P210]    = COLOR_FormatYUVP210;
 
                     std::shared_ptr<C2StoreFlexiblePixelFormatDescriptorsInfo> pixelFormatInfo;
                     std::vector<std::unique_ptr<C2Param>> heapParams;
@@ -796,6 +804,7 @@
                     }
                 }
             }
+            codecInfo->createCodecCaps();
         }
     }
     return OK;
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 75e9bbc..574f1b9 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -27,7 +27,10 @@
 
 #include <android/hardware_buffer.h>
 #include <media/hardware/HardwareAPI.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/MediaCodecConstants.h>
 
 #include <C2Debug.h>
 
@@ -787,4 +790,438 @@
     return MemoryBlockPool().fetch(size);
 }
 
+GraphicView2MediaImageConverter::GraphicView2MediaImageConverter(
+        const C2GraphicView &view, const sp<AMessage> &format, bool copy)
+    : mInitCheck(NO_INIT),
+        mView(view),
+        mWidth(view.width()),
+        mHeight(view.height()),
+        mAllocatedDepth(0),
+        mBackBufferSize(0),
+        mMediaImage(new ABuffer(sizeof(MediaImage2))) {
+    ATRACE_CALL();
+    if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
+        mClientColorFormat = COLOR_FormatYUV420Flexible;
+    }
+    if (!format->findInt32("android._color-format", &mComponentColorFormat)) {
+        mComponentColorFormat = COLOR_FormatYUV420Flexible;
+    }
+    if (view.error() != C2_OK) {
+        ALOGD("Converter: view.error() = %d", view.error());
+        mInitCheck = BAD_VALUE;
+        return;
+    }
+    MediaImage2 *mediaImage = (MediaImage2 *)mMediaImage->base();
+    const C2PlanarLayout &layout = view.layout();
+    if (layout.numPlanes == 0) {
+        ALOGD("Converter: 0 planes");
+        mInitCheck = BAD_VALUE;
+        return;
+    }
+    memset(mediaImage, 0, sizeof(*mediaImage));
+    mAllocatedDepth = layout.planes[0].allocatedDepth;
+    uint32_t bitDepth = layout.planes[0].bitDepth;
+
+    // align width and height to support subsampling cleanly
+    uint32_t stride = align(view.crop().width, 2) * divUp(layout.planes[0].allocatedDepth, 8u);
+    uint32_t vStride = align(view.crop().height, 2);
+
+    bool tryWrapping = !copy;
+
+    switch (layout.type) {
+        case C2PlanarLayout::TYPE_YUV: {
+            mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
+            if (layout.numPlanes != 3) {
+                ALOGD("Converter: %d planes for YUV layout", layout.numPlanes);
+                mInitCheck = BAD_VALUE;
+                return;
+            }
+            std::optional<int> clientBitDepth = {};
+            switch (mClientColorFormat) {
+                case COLOR_FormatYUVP010:
+                    clientBitDepth = 10;
+                    break;
+                case COLOR_FormatYUV411PackedPlanar:
+                case COLOR_FormatYUV411Planar:
+                case COLOR_FormatYUV420Flexible:
+                case COLOR_FormatYUV420PackedPlanar:
+                case COLOR_FormatYUV420PackedSemiPlanar:
+                case COLOR_FormatYUV420Planar:
+                case COLOR_FormatYUV420SemiPlanar:
+                case COLOR_FormatYUV422Flexible:
+                case COLOR_FormatYUV422PackedPlanar:
+                case COLOR_FormatYUV422PackedSemiPlanar:
+                case COLOR_FormatYUV422Planar:
+                case COLOR_FormatYUV422SemiPlanar:
+                case COLOR_FormatYUV444Flexible:
+                case COLOR_FormatYUV444Interleaved:
+                    clientBitDepth = 8;
+                    break;
+                default:
+                    // no-op; used with optional
+                    break;
+
+            }
+            // conversion fails if client bit-depth and the component bit-depth differs
+            if ((clientBitDepth) && (bitDepth != clientBitDepth.value())) {
+                ALOGD("Bit depth of client: %d and component: %d differs",
+                    *clientBitDepth, bitDepth);
+                mInitCheck = BAD_VALUE;
+                return;
+            }
+            C2PlaneInfo yPlane = layout.planes[C2PlanarLayout::PLANE_Y];
+            C2PlaneInfo uPlane = layout.planes[C2PlanarLayout::PLANE_U];
+            C2PlaneInfo vPlane = layout.planes[C2PlanarLayout::PLANE_V];
+            if (yPlane.channel != C2PlaneInfo::CHANNEL_Y
+                    || uPlane.channel != C2PlaneInfo::CHANNEL_CB
+                    || vPlane.channel != C2PlaneInfo::CHANNEL_CR) {
+                ALOGD("Converter: not YUV layout");
+                mInitCheck = BAD_VALUE;
+                return;
+            }
+            bool yuv420888 = yPlane.rowSampling == 1 && yPlane.colSampling == 1
+                    && uPlane.rowSampling == 2 && uPlane.colSampling == 2
+                    && vPlane.rowSampling == 2 && vPlane.colSampling == 2;
+            if (yuv420888) {
+                for (uint32_t i = 0; i < 3; ++i) {
+                    const C2PlaneInfo &plane = layout.planes[i];
+                    if (plane.allocatedDepth != 8 || plane.bitDepth != 8) {
+                        yuv420888 = false;
+                        break;
+                    }
+                }
+                yuv420888 = yuv420888 && yPlane.colInc == 1 && uPlane.rowInc == vPlane.rowInc;
+            }
+            int32_t copyFormat = mClientColorFormat;
+            if (yuv420888 && mClientColorFormat == COLOR_FormatYUV420Flexible) {
+                if (uPlane.colInc == 2 && vPlane.colInc == 2
+                        && yPlane.rowInc == uPlane.rowInc) {
+                    copyFormat = COLOR_FormatYUV420PackedSemiPlanar;
+                } else if (uPlane.colInc == 1 && vPlane.colInc == 1
+                        && yPlane.rowInc == uPlane.rowInc * 2) {
+                    copyFormat = COLOR_FormatYUV420PackedPlanar;
+                }
+            }
+            ALOGV("client_fmt=0x%x y:{colInc=%d rowInc=%d} u:{colInc=%d rowInc=%d} "
+                    "v:{colInc=%d rowInc=%d}",
+                    mClientColorFormat,
+                    yPlane.colInc, yPlane.rowInc,
+                    uPlane.colInc, uPlane.rowInc,
+                    vPlane.colInc, vPlane.rowInc);
+            switch (copyFormat) {
+                case COLOR_FormatYUV420Flexible:
+                case COLOR_FormatYUV420Planar:
+                case COLOR_FormatYUV420PackedPlanar:
+                    mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+                    mediaImage->mPlane[mediaImage->Y].mColInc = 1;
+                    mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
+                    mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
+
+                    mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
+                    mediaImage->mPlane[mediaImage->U].mColInc = 1;
+                    mediaImage->mPlane[mediaImage->U].mRowInc = stride / 2;
+                    mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
+
+                    mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride * 5 / 4;
+                    mediaImage->mPlane[mediaImage->V].mColInc = 1;
+                    mediaImage->mPlane[mediaImage->V].mRowInc = stride / 2;
+                    mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
+
+                    if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
+                        tryWrapping = yuv420888 && uPlane.colInc == 1 && vPlane.colInc == 1
+                                && yPlane.rowInc == uPlane.rowInc * 2
+                                && view.data()[0] < view.data()[1]
+                                && view.data()[1] < view.data()[2];
+                    }
+                    break;
+
+                case COLOR_FormatYUV420SemiPlanar:
+                case COLOR_FormatYUV420PackedSemiPlanar:
+                    mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+                    mediaImage->mPlane[mediaImage->Y].mColInc = 1;
+                    mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
+                    mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
+
+                    mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
+                    mediaImage->mPlane[mediaImage->U].mColInc = 2;
+                    mediaImage->mPlane[mediaImage->U].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
+
+                    mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 1;
+                    mediaImage->mPlane[mediaImage->V].mColInc = 2;
+                    mediaImage->mPlane[mediaImage->V].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
+
+                    if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
+                        tryWrapping = yuv420888 && uPlane.colInc == 2 && vPlane.colInc == 2
+                                && yPlane.rowInc == uPlane.rowInc
+                                && view.data()[0] < view.data()[1]
+                                && view.data()[1] < view.data()[2];
+                    }
+                    break;
+
+                case COLOR_FormatYUVP010:
+                    // stride is in bytes
+                    mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+                    mediaImage->mPlane[mediaImage->Y].mColInc = 2;
+                    mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
+                    mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
+
+                    mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
+                    mediaImage->mPlane[mediaImage->U].mColInc = 4;
+                    mediaImage->mPlane[mediaImage->U].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
+
+                    mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 2;
+                    mediaImage->mPlane[mediaImage->V].mColInc = 4;
+                    mediaImage->mPlane[mediaImage->V].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
+                    if (tryWrapping) {
+                        tryWrapping = yPlane.allocatedDepth == 16
+                                && uPlane.allocatedDepth == 16
+                                && vPlane.allocatedDepth == 16
+                                && yPlane.bitDepth == 10
+                                && uPlane.bitDepth == 10
+                                && vPlane.bitDepth == 10
+                                && yPlane.rightShift == 6
+                                && uPlane.rightShift == 6
+                                && vPlane.rightShift == 6
+                                && yPlane.rowSampling == 1 && yPlane.colSampling == 1
+                                && uPlane.rowSampling == 2 && uPlane.colSampling == 2
+                                && vPlane.rowSampling == 2 && vPlane.colSampling == 2
+                                && yPlane.colInc == 2
+                                && uPlane.colInc == 4
+                                && vPlane.colInc == 4
+                                && yPlane.rowInc == uPlane.rowInc
+                                && yPlane.rowInc == vPlane.rowInc;
+                    }
+                    break;
+
+                default: {
+                    // default to fully planar format --- this will be overridden if wrapping
+                    // TODO: keep interleaved format
+                    int32_t colInc = divUp(mAllocatedDepth, 8u);
+                    int32_t rowInc = stride * colInc / yPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+                    mediaImage->mPlane[mediaImage->Y].mColInc = colInc;
+                    mediaImage->mPlane[mediaImage->Y].mRowInc = rowInc;
+                    mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = yPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->Y].mVertSubsampling = yPlane.rowSampling;
+                    int32_t offset = rowInc * vStride / yPlane.rowSampling;
+
+                    rowInc = stride * colInc / uPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->U].mOffset = offset;
+                    mediaImage->mPlane[mediaImage->U].mColInc = colInc;
+                    mediaImage->mPlane[mediaImage->U].mRowInc = rowInc;
+                    mediaImage->mPlane[mediaImage->U].mHorizSubsampling = uPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->U].mVertSubsampling = uPlane.rowSampling;
+                    offset += rowInc * vStride / uPlane.rowSampling;
+
+                    rowInc = stride * colInc / vPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->V].mOffset = offset;
+                    mediaImage->mPlane[mediaImage->V].mColInc = colInc;
+                    mediaImage->mPlane[mediaImage->V].mRowInc = rowInc;
+                    mediaImage->mPlane[mediaImage->V].mHorizSubsampling = vPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->V].mVertSubsampling = vPlane.rowSampling;
+                    break;
+                }
+            }
+            break;
+        }
+
+        case C2PlanarLayout::TYPE_YUVA:
+            ALOGD("Converter: unrecognized color format "
+                    "(client %d component %d) for YUVA layout",
+                    mClientColorFormat, mComponentColorFormat);
+            mInitCheck = NO_INIT;
+            return;
+        case C2PlanarLayout::TYPE_RGB:
+            mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGB;
+            // TODO: support MediaImage layout
+            switch (mClientColorFormat) {
+                case COLOR_FormatSurface:
+                case COLOR_FormatRGBFlexible:
+                case COLOR_Format24bitBGR888:
+                case COLOR_Format24bitRGB888:
+                    ALOGD("Converter: accept color format "
+                            "(client %d component %d) for RGB layout",
+                            mClientColorFormat, mComponentColorFormat);
+                    break;
+                default:
+                    ALOGD("Converter: unrecognized color format "
+                            "(client %d component %d) for RGB layout",
+                            mClientColorFormat, mComponentColorFormat);
+                    mInitCheck = BAD_VALUE;
+                    return;
+            }
+            if (layout.numPlanes != 3) {
+                ALOGD("Converter: %d planes for RGB layout", layout.numPlanes);
+                mInitCheck = BAD_VALUE;
+                return;
+            }
+            break;
+        case C2PlanarLayout::TYPE_RGBA:
+            mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGBA;
+            // TODO: support MediaImage layout
+            switch (mClientColorFormat) {
+                case COLOR_FormatSurface:
+                case COLOR_FormatRGBAFlexible:
+                case COLOR_Format32bitABGR8888:
+                case COLOR_Format32bitARGB8888:
+                case COLOR_Format32bitBGRA8888:
+                    ALOGD("Converter: accept color format "
+                            "(client %d component %d) for RGBA layout",
+                            mClientColorFormat, mComponentColorFormat);
+                    break;
+                default:
+                    ALOGD("Converter: unrecognized color format "
+                            "(client %d component %d) for RGBA layout",
+                            mClientColorFormat, mComponentColorFormat);
+                    mInitCheck = BAD_VALUE;
+                    return;
+            }
+            if (layout.numPlanes != 4) {
+                ALOGD("Converter: %d planes for RGBA layout", layout.numPlanes);
+                mInitCheck = BAD_VALUE;
+                return;
+            }
+            break;
+        default:
+            mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
+            if (layout.numPlanes == 1) {
+                const C2PlaneInfo &plane = layout.planes[0];
+                if (plane.colInc < 0 || plane.rowInc < 0) {
+                    // Copy-only if we have negative colInc/rowInc
+                    tryWrapping = false;
+                }
+                mediaImage->mPlane[0].mOffset = 0;
+                mediaImage->mPlane[0].mColInc = std::abs(plane.colInc);
+                mediaImage->mPlane[0].mRowInc = std::abs(plane.rowInc);
+                mediaImage->mPlane[0].mHorizSubsampling = plane.colSampling;
+                mediaImage->mPlane[0].mVertSubsampling = plane.rowSampling;
+            } else {
+                ALOGD("Converter: unrecognized layout: color format (client %d component %d)",
+                        mClientColorFormat, mComponentColorFormat);
+                mInitCheck = NO_INIT;
+                return;
+            }
+            break;
+    }
+    if (tryWrapping) {
+        // try to map directly. check if the planes are near one another
+        const uint8_t *minPtr = mView.data()[0];
+        const uint8_t *maxPtr = mView.data()[0];
+        int32_t planeSize = 0;
+        for (uint32_t i = 0; i < layout.numPlanes; ++i) {
+            const C2PlaneInfo &plane = layout.planes[i];
+            int64_t planeStride = std::abs(plane.rowInc / plane.colInc);
+            ssize_t minOffset = plane.minOffset(
+                    mWidth / plane.colSampling, mHeight / plane.rowSampling);
+            ssize_t maxOffset = plane.maxOffset(
+                    mWidth / plane.colSampling, mHeight / plane.rowSampling);
+            if (minPtr > mView.data()[i] + minOffset) {
+                minPtr = mView.data()[i] + minOffset;
+            }
+            if (maxPtr < mView.data()[i] + maxOffset) {
+                maxPtr = mView.data()[i] + maxOffset;
+            }
+            planeSize += planeStride * divUp(mAllocatedDepth, 8u)
+                    * align(mHeight, 64) / plane.rowSampling;
+        }
+
+        if (minPtr == mView.data()[0] && (maxPtr - minPtr) <= planeSize) {
+            // FIXME: this is risky as reading/writing data out of bound results
+            //        in an undefined behavior, but gralloc does assume a
+            //        contiguous mapping
+            for (uint32_t i = 0; i < layout.numPlanes; ++i) {
+                const C2PlaneInfo &plane = layout.planes[i];
+                mediaImage->mPlane[i].mOffset = mView.data()[i] - minPtr;
+                mediaImage->mPlane[i].mColInc = plane.colInc;
+                mediaImage->mPlane[i].mRowInc = plane.rowInc;
+                mediaImage->mPlane[i].mHorizSubsampling = plane.colSampling;
+                mediaImage->mPlane[i].mVertSubsampling = plane.rowSampling;
+            }
+            mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr), maxPtr - minPtr);
+            ALOGV("Converter: wrapped (capacity=%zu)", mWrapped->capacity());
+        }
+    }
+    mediaImage->mNumPlanes = layout.numPlanes;
+    mediaImage->mWidth = view.crop().width;
+    mediaImage->mHeight = view.crop().height;
+    mediaImage->mBitDepth = bitDepth;
+    mediaImage->mBitDepthAllocated = mAllocatedDepth;
+
+    uint32_t bufferSize = 0;
+    for (uint32_t i = 0; i < layout.numPlanes; ++i) {
+        const C2PlaneInfo &plane = layout.planes[i];
+        if (plane.allocatedDepth < plane.bitDepth
+                || plane.rightShift != plane.allocatedDepth - plane.bitDepth) {
+            ALOGD("rightShift value of %u unsupported", plane.rightShift);
+            mInitCheck = BAD_VALUE;
+            return;
+        }
+        if (plane.allocatedDepth > 8 && plane.endianness != C2PlaneInfo::NATIVE) {
+            ALOGD("endianness value of %u unsupported", plane.endianness);
+            mInitCheck = BAD_VALUE;
+            return;
+        }
+        if (plane.allocatedDepth != mAllocatedDepth || plane.bitDepth != bitDepth) {
+            ALOGD("different allocatedDepth/bitDepth per plane unsupported");
+            mInitCheck = BAD_VALUE;
+            return;
+        }
+        // stride is in bytes
+        bufferSize += stride * vStride / plane.rowSampling / plane.colSampling;
+    }
+
+    mBackBufferSize = bufferSize;
+    mInitCheck = OK;
+}
+
+status_t GraphicView2MediaImageConverter::initCheck() const { return mInitCheck; }
+
+uint32_t GraphicView2MediaImageConverter::backBufferSize() const { return mBackBufferSize; }
+
+sp<ABuffer> GraphicView2MediaImageConverter::wrap() const {
+    if (mBackBuffer == nullptr) {
+        return mWrapped;
+    }
+    return nullptr;
+}
+
+bool GraphicView2MediaImageConverter::setBackBuffer(const sp<ABuffer> &backBuffer) {
+    if (backBuffer == nullptr) {
+        return false;
+    }
+    if (backBuffer->capacity() < mBackBufferSize) {
+        return false;
+    }
+    backBuffer->setRange(0, mBackBufferSize);
+    mBackBuffer = backBuffer;
+    return true;
+}
+
+status_t GraphicView2MediaImageConverter::copyToMediaImage() {
+    ATRACE_CALL();
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+    return ImageCopy(mBackBuffer->base(), getMediaImage(), mView);
+}
+
+const sp<ABuffer> &GraphicView2MediaImageConverter::imageData() const { return mMediaImage; }
+
+MediaImage2 *GraphicView2MediaImageConverter::getMediaImage() {
+    return (MediaImage2 *)mMediaImage->base();
+}
+
 }  // namespace android
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.h b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
index 6b0ba7f..8daf3d8 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
@@ -22,6 +22,7 @@
 #include <C2ParamDef.h>
 
 #include <media/hardware/VideoAPI.h>
+#include <utils/StrongPointer.h>
 #include <utils/Errors.h>
 
 namespace android {
@@ -194,6 +195,61 @@
     std::shared_ptr<Impl> mImpl;
 };
 
+struct ABuffer;
+struct AMessage;
+
+class GraphicView2MediaImageConverter {
+public:
+    /**
+     * Creates a C2GraphicView <=> MediaImage converter
+     *
+     * \param view C2GraphicView object
+     * \param format buffer format
+     * \param copy whether the converter is used for copy or not
+     */
+    GraphicView2MediaImageConverter(
+            const C2GraphicView &view, const sp<AMessage> &format, bool copy);
+
+    status_t initCheck() const;
+
+    uint32_t backBufferSize() const;
+
+    /**
+     * Wrap C2GraphicView using a MediaImage2. Note that if not wrapped, the content is not mapped
+     * in this function --- the caller should use CopyGraphicView2MediaImage() function to copy the
+     * data into a backing buffer explicitly.
+     *
+     * \return media buffer. This is null if wrapping failed.
+     */
+    sp<ABuffer> wrap() const;
+
+    bool setBackBuffer(const sp<ABuffer> &backBuffer);
+
+    /**
+     * Copy C2GraphicView to MediaImage2.
+     */
+    status_t copyToMediaImage();
+
+    const sp<ABuffer> &imageData() const;
+
+private:
+    status_t mInitCheck;
+
+    const C2GraphicView mView;
+    uint32_t mWidth;
+    uint32_t mHeight;
+    int32_t mClientColorFormat;  ///< SDK color format for MediaImage
+    int32_t mComponentColorFormat;  ///< SDK color format from component
+    sp<ABuffer> mWrapped;  ///< wrapped buffer (if we can map C2Buffer to an ABuffer)
+    uint32_t mAllocatedDepth;
+    uint32_t mBackBufferSize;
+    sp<ABuffer> mMediaImage;
+
+    sp<ABuffer> mBackBuffer;    ///< backing buffer if we have to copy C2Buffer <=> ABuffer
+
+    MediaImage2 *getMediaImage();
+};
+
 } // namespace android
 
 #endif  // CODEC2_BUFFER_UTILS_H_
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
index 7a33af4..aa87e97 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -64,6 +64,13 @@
     return kVendorApiLevel >= __ANDROID_API_T__;
 }
 
+static bool isP210Allowed() {
+    static const int32_t kVendorApiLevel =
+        base::GetIntProperty<int32_t>("ro.vendor.api_level", 0);
+
+    return kVendorApiLevel > __ANDROID_API_V__;
+}
+
 bool isHalPixelFormatSupported(AHardwareBuffer_Format format) {
     // HAL_PIXEL_FORMAT_YCBCR_P010 requirement was added in T VSR, although it could have been
     // supported prior to this.
@@ -76,6 +83,12 @@
         return false;
     }
 
+    // P210 is not available before Android B
+    if (format == (AHardwareBuffer_Format)AHARDWAREBUFFER_FORMAT_YCbCr_P210 &&
+            !isP210Allowed()) {
+        return false;
+    }
+
     // Default scenario --- the consumer is display or GPU
     const AHardwareBuffer_Desc consumableForDisplayOrGpu = {
             .width = 320,
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 9297520..3841831 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -436,6 +436,86 @@
     { C2Config::hdr_format_t::HDR10_PLUS, AV1ProfileMain10HDR10Plus },
 };
 
+// APV
+ALookup<C2Config::profile_t, int32_t> sApvProfiles = {
+    { C2Config::PROFILE_APV_422_10, APVProfile422_10 },
+    { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10 },
+    { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10Plus },
+};
+
+ALookup<C2Config::profile_t, int32_t> sApvHdrProfiles = {
+    { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10 },
+};
+
+ALookup<C2Config::profile_t, int32_t> sApvHdr10PlusProfiles = {
+    { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10Plus },
+};
+
+ALookup<C2Config::level_t, int32_t> sApvLevels = {
+    { C2Config::LEVEL_APV_1_BAND_0, APVLevel1Band0 },
+    { C2Config::LEVEL_APV_1_BAND_1, APVLevel1Band1 },
+    { C2Config::LEVEL_APV_1_BAND_2, APVLevel1Band2 },
+    { C2Config::LEVEL_APV_1_BAND_3, APVLevel1Band3 },
+    { C2Config::LEVEL_APV_1_1_BAND_0, APVLevel11Band0 },
+    { C2Config::LEVEL_APV_1_1_BAND_1, APVLevel11Band1 },
+    { C2Config::LEVEL_APV_1_1_BAND_2, APVLevel11Band2 },
+    { C2Config::LEVEL_APV_1_1_BAND_3, APVLevel11Band3 },
+    { C2Config::LEVEL_APV_2_BAND_0, APVLevel2Band0 },
+    { C2Config::LEVEL_APV_2_BAND_1, APVLevel2Band1 },
+    { C2Config::LEVEL_APV_2_BAND_2, APVLevel2Band2 },
+    { C2Config::LEVEL_APV_2_BAND_3, APVLevel2Band3 },
+    { C2Config::LEVEL_APV_2_1_BAND_0, APVLevel21Band0 },
+    { C2Config::LEVEL_APV_2_1_BAND_1, APVLevel21Band1 },
+    { C2Config::LEVEL_APV_2_1_BAND_2, APVLevel21Band2 },
+    { C2Config::LEVEL_APV_2_1_BAND_3, APVLevel21Band3 },
+    { C2Config::LEVEL_APV_3_BAND_0, APVLevel3Band0 },
+    { C2Config::LEVEL_APV_3_BAND_1, APVLevel3Band1 },
+    { C2Config::LEVEL_APV_3_BAND_2, APVLevel3Band2 },
+    { C2Config::LEVEL_APV_3_BAND_3, APVLevel3Band3 },
+    { C2Config::LEVEL_APV_3_1_BAND_0, APVLevel31Band0 },
+    { C2Config::LEVEL_APV_3_1_BAND_1, APVLevel31Band1 },
+    { C2Config::LEVEL_APV_3_1_BAND_2, APVLevel31Band2 },
+    { C2Config::LEVEL_APV_3_1_BAND_3, APVLevel31Band3 },
+    { C2Config::LEVEL_APV_4_BAND_0, APVLevel4Band0 },
+    { C2Config::LEVEL_APV_4_BAND_1, APVLevel4Band1 },
+    { C2Config::LEVEL_APV_4_BAND_2, APVLevel4Band2 },
+    { C2Config::LEVEL_APV_4_BAND_3, APVLevel4Band3 },
+    { C2Config::LEVEL_APV_4_1_BAND_0, APVLevel41Band0 },
+    { C2Config::LEVEL_APV_4_1_BAND_1, APVLevel41Band1 },
+    { C2Config::LEVEL_APV_4_1_BAND_2, APVLevel41Band2 },
+    { C2Config::LEVEL_APV_4_1_BAND_3, APVLevel41Band3 },
+    { C2Config::LEVEL_APV_5_BAND_0, APVLevel5Band0 },
+    { C2Config::LEVEL_APV_5_BAND_1, APVLevel5Band1 },
+    { C2Config::LEVEL_APV_5_BAND_2, APVLevel5Band2 },
+    { C2Config::LEVEL_APV_5_BAND_3, APVLevel5Band3 },
+    { C2Config::LEVEL_APV_5_1_BAND_0, APVLevel51Band0 },
+    { C2Config::LEVEL_APV_5_1_BAND_1, APVLevel51Band1 },
+    { C2Config::LEVEL_APV_5_1_BAND_2, APVLevel51Band2 },
+    { C2Config::LEVEL_APV_5_1_BAND_3, APVLevel51Band3 },
+    { C2Config::LEVEL_APV_6_BAND_0, APVLevel6Band0 },
+    { C2Config::LEVEL_APV_6_BAND_1, APVLevel6Band1 },
+    { C2Config::LEVEL_APV_6_BAND_2, APVLevel6Band2 },
+    { C2Config::LEVEL_APV_6_BAND_3, APVLevel6Band3 },
+    { C2Config::LEVEL_APV_6_1_BAND_0, APVLevel61Band0 },
+    { C2Config::LEVEL_APV_6_1_BAND_1, APVLevel61Band1 },
+    { C2Config::LEVEL_APV_6_1_BAND_2, APVLevel61Band2 },
+    { C2Config::LEVEL_APV_6_1_BAND_3, APVLevel61Band3 },
+    { C2Config::LEVEL_APV_7_BAND_0, APVLevel7Band0 },
+    { C2Config::LEVEL_APV_7_BAND_1, APVLevel7Band1 },
+    { C2Config::LEVEL_APV_7_BAND_2, APVLevel7Band2 },
+    { C2Config::LEVEL_APV_7_BAND_3, APVLevel7Band3 },
+    { C2Config::LEVEL_APV_7_1_BAND_0, APVLevel71Band0 },
+    { C2Config::LEVEL_APV_7_1_BAND_1, APVLevel71Band1 },
+    { C2Config::LEVEL_APV_7_1_BAND_2, APVLevel71Band2 },
+    { C2Config::LEVEL_APV_7_1_BAND_3, APVLevel71Band3 },
+};
+
+ALookup<C2Config::hdr_format_t, int32_t> sApvHdrFormats = {
+    { C2Config::hdr_format_t::HLG, APVProfile422_10 },
+    { C2Config::hdr_format_t::HDR10, APVProfile422_10HDR10 },
+    { C2Config::hdr_format_t::HDR10_PLUS, APVProfile422_10HDR10Plus },
+};
+
 // HAL_PIXEL_FORMAT_* -> COLOR_Format*
 ALookup<uint32_t, int32_t> sPixelFormats = {
     { HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, COLOR_FormatSurface },
@@ -720,6 +800,37 @@
     int32_t mBitDepth;
 };
 
+// APV
+struct ApvProfileLevelMapper : ProfileLevelMapperHelper {
+    ApvProfileLevelMapper(bool isHdr = false, bool isHdr10Plus = false) :
+        ProfileLevelMapperHelper(),
+        mIsHdr(isHdr), mIsHdr10Plus(isHdr10Plus) {}
+
+    virtual bool simpleMap(C2Config::level_t from, int32_t *to) {
+        return sApvLevels.map(from, to);
+    }
+    virtual bool simpleMap(int32_t from, C2Config::level_t *to) {
+        return sApvLevels.map(from, to);
+    }
+    virtual bool simpleMap(C2Config::profile_t from, int32_t *to) {
+        return mIsHdr10Plus ? sApvHdr10PlusProfiles.map(from, to) :
+                     mIsHdr ? sApvHdrProfiles.map(from, to) :
+                              sApvProfiles.map(from, to);
+    }
+    virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
+        return mIsHdr10Plus ? sApvHdr10PlusProfiles.map(from, to) :
+                     mIsHdr ? sApvHdrProfiles.map(from, to) :
+                              sApvProfiles.map(from, to);
+    }
+    virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+        return sApvHdrFormats.map(from, to);
+    }
+
+private:
+    bool mIsHdr;
+    bool mIsHdr10Plus;
+};
+
 } // namespace
 
 // the default mapper is used for media types that do not support HDR
@@ -753,6 +864,8 @@
         return std::make_shared<Vp9ProfileLevelMapper>();
     } else if (mediaType == MIMETYPE_VIDEO_AV1) {
         return std::make_shared<Av1ProfileLevelMapper>();
+    } else if (mediaType == MIMETYPE_VIDEO_APV) {
+        return std::make_shared<ApvProfileLevelMapper>();
     }
     return nullptr;
 }
@@ -767,6 +880,8 @@
         return std::make_shared<Vp9ProfileLevelMapper>(true, isHdr10Plus);
     } else if (mediaType == MIMETYPE_VIDEO_AV1) {
         return std::make_shared<Av1ProfileLevelMapper>(true, isHdr10Plus);
+    } else if (mediaType == MIMETYPE_VIDEO_APV) {
+        return std::make_shared<ApvProfileLevelMapper>(true, isHdr10Plus);
     }
     return nullptr;
 }
@@ -779,6 +894,8 @@
         return GetProfileLevelMapper(mediaType);
     } else if (mediaType == MIMETYPE_VIDEO_AV1 && bitDepth == 10) {
         return std::make_shared<Av1ProfileLevelMapper>(false, false, bitDepth);
+    } else if (mediaType == MIMETYPE_VIDEO_APV) {
+        return std::make_shared<ApvProfileLevelMapper>();
     }
     return nullptr;
 }
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index dc06ee6..9d1cbff 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -53,7 +53,7 @@
     ],
 
     defaults: [
-	"aconfig_lib_cc_static_link.defaults",
+        "aconfig_lib_cc_static_link.defaults",
         "libcodec2_hal_selection",
     ],
 
@@ -68,6 +68,7 @@
         "C2PlatformStorePluginLoader.cpp",
         "C2Store.cpp",
         "platform/C2BqBuffer.cpp",
+        "platform/C2BqPoolInvalidator.cpp",
         "platform/C2SurfaceSyncObj.cpp",
         "platform/C2IgbaBuffer.cpp",
         "types.cpp",
diff --git a/media/codec2/vndk/C2Buffer.cpp b/media/codec2/vndk/C2Buffer.cpp
index 7b9b80d..bff953d 100644
--- a/media/codec2/vndk/C2Buffer.cpp
+++ b/media/codec2/vndk/C2Buffer.cpp
@@ -1311,8 +1311,7 @@
                 for (size_t planeIx = 0; planeIx < mLayout.numPlanes; ++planeIx) {
                     const uint32_t colSampling = mLayout.planes[planeIx].colSampling;
                     const uint32_t rowSampling = mLayout.planes[planeIx].rowSampling;
-                    if (crop.left % colSampling || crop.right() % colSampling
-                            || crop.top % rowSampling || crop.bottom() % rowSampling) {
+                    if (crop.left % colSampling || crop.top % rowSampling) {
                         // cannot calculate data pointer
                         mImpl->getAllocation()->unmap(mData, crop, nullptr);
                         memset(&mLayout, 0, sizeof(mLayout));
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index 0987da2..6ec9d6b 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -1206,7 +1206,8 @@
     emplace("libcodec2_soft_vp8enc.so");
     emplace("libcodec2_soft_vp9dec.so");
     emplace("libcodec2_soft_vp9enc.so");
-
+    emplace("libcodec2_soft_apvenc.so");
+    emplace("libcodec2_soft_apvdec.so");
 }
 
 // For testing only
diff --git a/media/codec2/vndk/include/C2BqPoolInvalidator.h b/media/codec2/vndk/include/C2BqPoolInvalidator.h
new file mode 100644
index 0000000..612d023
--- /dev/null
+++ b/media/codec2/vndk/include/C2BqPoolInvalidator.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/no_destructor.h>
+#include <media/stagefright/foundation/ABase.h>
+
+#include <condition_variable>
+#include <deque>
+#include <list>
+#include <memory>
+#include <thread>
+
+class C2BufferQueueBlockPool;
+
+namespace android {
+
+/**
+ * Container class in order to invalidate C2BufferQueueBlockPool(s) and their resources
+ * when the client process is dead abruptly.
+ */
+class C2BqPoolInvalidateItem {
+public:
+
+    /**
+     * invalidate contained C2BufferQueueBlockPool(s) and their resources
+     */
+    void invalidate();
+
+    /**
+     * skip invalidate(), if it is scheduled and not yet invalidated.
+     */
+    void skip();
+
+    /**
+     * returns whether invalidate() is reuqired or not.
+     */
+    bool needsInvalidate();
+
+    C2BqPoolInvalidateItem(std::list<std::shared_ptr<C2BufferQueueBlockPool>> &&pools);
+
+    ~C2BqPoolInvalidateItem() = default;
+private:
+
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>>  mPools;
+    bool mNeedsInvalidate;
+    std::mutex mLock;
+
+    DISALLOW_EVIL_CONSTRUCTORS(C2BqPoolInvalidateItem);
+};
+
+/**
+ * Asynchronous C2BufferQueueBlockPool invalidator.
+ *
+ * this has C2BqPoolInvalidateItem inside. and call invalidate() from a separate
+ * thread asynchronously.
+ */
+class C2BqPoolInvalidator {
+public:
+    /**
+     * This gets the singleton instance of the class.
+     */
+    static C2BqPoolInvalidator &getInstance();
+
+    /**
+     * queue invalidation items. the item will be invalidated after certain
+     * amount of delay from a separate thread.
+     */
+    void queue(std::shared_ptr<C2BqPoolInvalidateItem> &item);
+
+    ~C2BqPoolInvalidator();
+private:
+
+    C2BqPoolInvalidator();
+
+    void run();
+
+    std::thread mThread;
+    bool mDone;
+
+    std::mutex mMutex;
+    std::condition_variable mCv;
+
+    std::deque<std::pair<int64_t, std::shared_ptr<C2BqPoolInvalidateItem>>> mItems;
+
+    friend class ::android::base::NoDestructor<C2BqPoolInvalidator>;
+
+    DISALLOW_EVIL_CONSTRUCTORS(C2BqPoolInvalidator);
+};
+
+}  // namespace android
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 665f9fc..17dfe9c 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -997,7 +997,7 @@
         return -1;
     }
 
-    if (toUsage != graphicBuffer->getUsage()) {
+    if ((toUsage & graphicBuffer->getUsage()) != toUsage) {
         sp<GraphicBuffer> newBuffer = new GraphicBuffer(
             graphicBuffer->handle, GraphicBuffer::CLONE_HANDLE,
             graphicBuffer->width, graphicBuffer->height, graphicBuffer->format,
diff --git a/media/codec2/vndk/platform/C2BqPoolInvalidator.cpp b/media/codec2/vndk/platform/C2BqPoolInvalidator.cpp
new file mode 100644
index 0000000..2666cd3
--- /dev/null
+++ b/media/codec2/vndk/platform/C2BqPoolInvalidator.cpp
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2BqPoolInvalidator"
+#include <utils/Log.h>
+#include <utils/SystemClock.h>
+
+#include <C2BqBufferPriv.h>
+#include <C2BqPoolInvalidator.h>
+
+namespace android {
+
+namespace {
+    static constexpr int64_t kBqPoolInvalidateDelayMs = 1000;
+} // anonymous namespace
+
+C2BqPoolInvalidateItem::C2BqPoolInvalidateItem(
+        std::list<std::shared_ptr<C2BufferQueueBlockPool>> &&pools) : mPools(std::move(pools)) {
+    if (!mPools.empty()) {
+        mNeedsInvalidate = true;
+    } else {
+        mNeedsInvalidate = false;
+    }
+}
+
+void C2BqPoolInvalidateItem::invalidate() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> pools;
+    {
+        std::unique_lock<std::mutex> l(mLock);
+        if (!mNeedsInvalidate) {
+            return;
+        }
+        pools = std::move(mPools);
+        mNeedsInvalidate = false;
+    }
+    for(auto it = pools.begin(); it != pools.end(); ++it) {
+        (*it)->invalidate();
+    }
+}
+
+void C2BqPoolInvalidateItem::skip() {
+    std::unique_lock<std::mutex> l(mLock);
+    mNeedsInvalidate = false;
+    mPools.clear();
+}
+
+bool C2BqPoolInvalidateItem::needsInvalidate() {
+    std::unique_lock<std::mutex> l(mLock);
+    return mNeedsInvalidate;
+}
+
+C2BqPoolInvalidator &C2BqPoolInvalidator::getInstance() {
+    static android::base::NoDestructor<C2BqPoolInvalidator> sInvalidator;
+    return *sInvalidator;
+}
+
+C2BqPoolInvalidator::C2BqPoolInvalidator() : mDone(false) {
+    mThread = std::thread(&C2BqPoolInvalidator::run, this);
+}
+
+C2BqPoolInvalidator::~C2BqPoolInvalidator() {
+    {
+        std::unique_lock<std::mutex> l(mMutex);
+        mDone = true;
+        mCv.notify_one();
+    }
+    if (mThread.joinable()) {
+        mThread.join();
+    }
+}
+
+void C2BqPoolInvalidator::queue(std::shared_ptr<C2BqPoolInvalidateItem> &item) {
+    std::unique_lock<std::mutex> l(mMutex);
+    std::pair<int64_t, std::shared_ptr<C2BqPoolInvalidateItem>> p =
+            std::make_pair(::android::elapsedRealtime() + kBqPoolInvalidateDelayMs, item);
+    mItems.push_back(p);
+    mCv.notify_one();
+}
+
+void C2BqPoolInvalidator::run() {
+    while(true) {
+        int64_t nowMs = ::android::elapsedRealtime();
+        std::unique_lock<std::mutex> l(mMutex);
+        if (mDone) {
+            break;
+        }
+        std::list<std::shared_ptr<C2BqPoolInvalidateItem>> items;
+        while (!mItems.empty()) {
+            if (mItems.front().first <= nowMs) {
+                items.push_back(mItems.front().second);
+                mItems.pop_front();
+            } else {
+                break;
+            }
+        }
+        if (items.empty()) {
+            if (mItems.empty()) {
+                mCv.wait(l);
+            } else {
+                int64_t nextMs = mItems.front().first;
+                if (nextMs > nowMs) {
+                    mCv.wait_for(l, std::chrono::milliseconds(nextMs - nowMs));
+                }
+            }
+        } else {
+            l.unlock();
+            int invalidated = 0;
+            for (auto it = items.begin(); it != items.end(); ++it, ++invalidated) {
+                (*it)->invalidate();
+            }
+            ALOGD("invalidated %d bqpool items", invalidated);
+        }
+    }
+}
+
+} // android
diff --git a/media/janitors/media_solutions_OWNERS b/media/janitors/media_solutions_OWNERS
index 17bc7dd..004fa30 100644
--- a/media/janitors/media_solutions_OWNERS
+++ b/media/janitors/media_solutions_OWNERS
@@ -1,22 +1,24 @@
 # Bug component: 1344
 # go/android-fwk-media-solutions for info on areas of ownership.
 
-# MediaRouter and native mirroring only:
-adadukin@google.com
-aquilescanta@google.com
-bishoygendy@google.com
-ivanbuper@google.com
-
-# MediaMuxer, MediaRecorder, and seamless transcoding only:
 andrewlewis@google.com
-claincly@google.com
-
-# Everything in go/android-fwk-media-solutions not covered above:
 bachinger@google.com
-christosts@google.com
+claincly@google.com
+dancho@google.com
 ibaker@google.com
+ivanbuper@google.com
 jbibik@google.com
 michaelkatz@google.com
 rohks@google.com
+sheenachhabra@google.com
+simakova@google.com
 tianyifeng@google.com
 tonihei@google.com
+
+# MediaRouter and native mirroring only:
+aquilescanta@google.com
+
+# Emergency rollbacks and fixes outside LON timezone
+jmtrivi@google.com # US-MTV
+lajos@google.com # US-MTV
+scottnien@google.com # TW-NTC
diff --git a/media/libaaudio/Android.bp b/media/libaaudio/Android.bp
index 4b417a7..add28e0 100644
--- a/media/libaaudio/Android.bp
+++ b/media/libaaudio/Android.bp
@@ -36,9 +36,6 @@
     symbol_file: "src/libaaudio.map.txt",
     first_version: "26",
     unversioned_until: "current",
-    export_header_libs: [
-        "libAAudio_headers",
-    ],
 }
 
 cc_library_headers {
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index ba231c1..67c6715 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -58,14 +58,13 @@
         "libaudioclient",
         "libaudioutils",
         "libbase_ndk",
-        "libcgrouprc",
-        "libcgrouprc_format",
         "libcutils",
         "libjsoncpp",
         "liblog",
         "libmedia_helper",
         "libmediametrics",
         "libprocessgroup",
+        "libprocessgroup_util",
         "mediametricsservice-aidl-cpp",
         "shared-file-region-aidl-cpp",
     ],
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 5785537..6dfb327 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -158,6 +158,7 @@
         "framework-permission-aidl-cpp",
         "libbinder",
         "libmediametrics",
+        "libmediautils",
         "spatializer-aidl-cpp",
     ],
 
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index f729e1b..8c330d4 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -79,6 +79,17 @@
     return NO_ERROR;
 }
 
+status_t AudioRecord::logIfErrorAndReturnStatus(status_t status, const std::string& errorMessage,
+                                                const std::string& func) {
+    if (status != NO_ERROR) {
+        if (!func.empty()) mMediaMetrics.markError(status, func.c_str());
+        ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
+        reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
+    }
+    mStatus = status;
+    return mStatus;
+}
+
 // ---------------------------------------------------------------------------
 
 void AudioRecord::MediaMetrics::gather(const AudioRecord *record)
@@ -246,13 +257,28 @@
     if (pid == -1 || (callingPid != myPid)) {
         adjPid = callingPid;
     }
-    mClientAttributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(adjPid));
-
+    auto clientAttributionSourcePid = legacy2aidl_pid_t_int32_t(adjPid);
+    if (!clientAttributionSourcePid.ok()) {
+        return logIfErrorAndReturnStatus(BAD_VALUE,
+                                         StringPrintf("%s: received invalid client attribution "
+                                                      "source pid, pid: %d, sessionId: %d",
+                                                      __func__, pid, sessionId),
+                                         __func__);
+    }
+    mClientAttributionSource.pid = clientAttributionSourcePid.value();
     uid_t adjUid = uid;
     if (uid == -1 || (callingPid != myPid)) {
         adjUid = IPCThreadState::self()->getCallingUid();
     }
-    mClientAttributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(adjUid));
+    auto clientAttributionSourceUid = legacy2aidl_uid_t_int32_t(adjUid);
+    if (!clientAttributionSourceUid.ok()) {
+        return logIfErrorAndReturnStatus(BAD_VALUE,
+                                         StringPrintf("%s: received invalid client attribution "
+                                                      "source uid, pid: %d, session id: %d",
+                                                      __func__, pid, sessionId),
+                                         __func__);
+    }
+    mClientAttributionSource.uid = clientAttributionSourceUid.value();
 
     mTracker.reset(new RecordingActivityTracker());
 
@@ -261,7 +287,6 @@
     mSelectedMicFieldDimension = microphoneFieldDimension;
     mMaxSharedAudioHistoryMs = maxSharedAudioHistoryMs;
 
-    std::string errorMessage;
     // Copy the state variables early so they are available for error reporting.
     if (pAttributes == nullptr) {
         mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
@@ -304,38 +329,48 @@
         break;
     case TRANSFER_CALLBACK:
         if (callback == nullptr) {
-            errorMessage = StringPrintf(
-                    "%s: Transfer type TRANSFER_CALLBACK but callback == nullptr", __func__);
-            status = BAD_VALUE;
-            goto error;
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s: Transfer type TRANSFER_CALLBACK but callback == nullptr, "
+                                 "pid: %d, session id: %d",
+                                 __func__, pid, sessionId),
+                    __func__);
         }
         break;
     case TRANSFER_OBTAIN:
     case TRANSFER_SYNC:
         break;
     default:
-        errorMessage = StringPrintf("%s: Invalid transfer type %d", __func__, mTransfer);
-        status = BAD_VALUE;
-        goto error;
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE,
+                StringPrintf("%s: Invalid transfer type %d, pid: %d, session id: %d", __func__,
+                             mTransfer, pid, sessionId),
+                __func__);
     }
 
     // invariant that mAudioRecord != 0 is true only after set() returns successfully
     if (mAudioRecord != 0) {
-        errorMessage = StringPrintf("%s: Track already in use", __func__);
-        status = INVALID_OPERATION;
-        goto error;
+        return logIfErrorAndReturnStatus(
+                INVALID_OPERATION,
+                StringPrintf("%s: Track already in use, pid: %d, session id: %d", __func__, pid,
+                             sessionId),
+                __func__);
     }
 
     if (!audio_is_valid_format(mFormat)) {
-        errorMessage = StringPrintf("%s: Format %#x is not valid", __func__, mFormat);
-        status = BAD_VALUE;
-        goto error;
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE,
+                StringPrintf("%s: Format %#x is not valid, pid: %d, session id: %d", __func__,
+                             mFormat, pid, sessionId),
+                __func__);
     }
 
     if (!audio_is_input_channel(mChannelMask)) {
-        errorMessage = StringPrintf("%s: Invalid channel mask %#x", __func__, mChannelMask);
-        status = BAD_VALUE;
-        goto error;
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE,
+                StringPrintf("%s: Invalid channel mask %#x, pid: %d, session id: %d", __func__,
+                             mChannelMask, pid, sessionId),
+                __func__);
     }
 
     mChannelCount = audio_channel_count_from_in_mask(mChannelMask);
@@ -369,7 +404,8 @@
             mAudioRecordThread.clear();
         }
         // bypass error message to avoid logging twice (createRecord_l logs the error).
-        goto exit;
+        mStatus = status;
+        return mStatus;
     }
 
     // TODO: add audio hardware input latency here
@@ -385,15 +421,7 @@
     mFramesRead = 0;
     mFramesReadServerOffset = 0;
 
-error:
-    if (status != NO_ERROR) {
-        mMediaMetrics.markError(status, __FUNCTION__);
-        ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
-        reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
-    }
-exit:
-    mStatus = status;
-    return status;
+    return logIfErrorAndReturnStatus(status, "", __func__);
 }
 
 // -------------------------------------------------------------------------
@@ -783,12 +811,10 @@
     status_t status;
     static const int32_t kMaxCreateAttempts = 3;
     int32_t remainingAttempts = kMaxCreateAttempts;
-    std::string errorMessage;
 
     if (audioFlinger == 0) {
-        errorMessage = StringPrintf("%s(%d): Could not get audioflinger", __func__, mPortId);
-        status = NO_INIT;
-        goto exit;
+        return logIfErrorAndReturnStatus(
+                NO_INIT, StringPrintf("%s(%d): Could not get audioflinger", __func__, mPortId), "");
     }
 
     // mFlags (not mOrigFlags) is modified depending on whether fast request is accepted.
@@ -846,16 +872,34 @@
 
     do {
         media::CreateRecordResponse response;
-        status = audioFlinger->createRecord(VALUE_OR_FATAL(input.toAidl()), response);
-        output = VALUE_OR_FATAL(IAudioFlinger::CreateRecordOutput::fromAidl(response));
+        auto aidlInput = input.toAidl();
+        if (!aidlInput.ok()) {
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s(%d): Could not create record due to invalid input", __func__,
+                                 mPortId),
+                    "");
+        }
+        status = audioFlinger->createRecord(aidlInput.value(), response);
+
+        auto recordOutput = IAudioFlinger::CreateRecordOutput::fromAidl(response);
+        if (!recordOutput.ok()) {
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s(%d): Could not create record output due to invalid response",
+                                 __func__, mPortId),
+                    "");
+        }
+        output = recordOutput.value();
         if (status == NO_ERROR) {
             break;
         }
         if (status != FAILED_TRANSACTION || --remainingAttempts <= 0) {
-            errorMessage = StringPrintf(
-                    "%s(%d): AudioFlinger could not create record track, status: %d",
-                    __func__, mPortId, status);
-            goto exit;
+            return logIfErrorAndReturnStatus(
+                    status,
+                    StringPrintf("%s(%d): AudioFlinger could not create record track, status: %d",
+                                 __func__, mPortId, status),
+                    "");
         }
         // FAILED_TRANSACTION happens under very specific conditions causing a state mismatch
         // between audio policy manager and audio flinger during the input stream open sequence
@@ -890,9 +934,9 @@
     mHalFormat = output.halConfig.format;
 
     if (output.cblk == 0) {
-        errorMessage = StringPrintf("%s(%d): Could not get control block", __func__, mPortId);
-        status = NO_INIT;
-        goto exit;
+        return logIfErrorAndReturnStatus(
+                NO_INIT, StringPrintf("%s(%d): Could not get control block", __func__, mPortId),
+                "");
     }
     // TODO: Using unsecurePointer() has some associated security pitfalls
     //       (see declaration for details).
@@ -900,10 +944,9 @@
     //       issue (e.g. by copying).
     iMemPointer = output.cblk ->unsecurePointer();
     if (iMemPointer == NULL) {
-        errorMessage = StringPrintf(
-                "%s(%d): Could not get control block pointer", __func__, mPortId);
-        status = NO_INIT;
-        goto exit;
+        return logIfErrorAndReturnStatus(
+                NO_INIT,
+                StringPrintf("%s(%d): Could not get control block pointer", __func__, mPortId), "");
     }
     cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
 
@@ -920,10 +963,9 @@
         //       issue (e.g. by copying).
         buffers = output.buffers->unsecurePointer();
         if (buffers == NULL) {
-            errorMessage = StringPrintf(
-                    "%s(%d): Could not get buffer pointer", __func__, mPortId);
-            status = NO_INIT;
-            goto exit;
+            return logIfErrorAndReturnStatus(
+                    NO_INIT,
+                    StringPrintf("%s(%d): Could not get buffer pointer", __func__, mPortId), "");
         }
     }
 
@@ -1015,15 +1057,8 @@
         .set(AMEDIAMETRICS_PROP_SELECTEDMICFIELDDIRECTION, (double)mSelectedMicFieldDimension)
         .record();
 
-exit:
-    if (status != NO_ERROR) {
-        ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
-        reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
-    }
-
-    mStatus = status;
     // sp<IAudioTrack> track destructor will cause releaseOutput() to be called by AudioFlinger
-    return status;
+    return logIfErrorAndReturnStatus(status, "", "");
 }
 
 // Report error associated with the event and some configuration details.
@@ -1106,11 +1141,10 @@
             // start of lock scope
             AutoMutex lock(mLock);
 
-            uint32_t newSequence = mSequence;
             // did previous obtainBuffer() fail due to media server death or voluntary invalidation?
             if (status == DEAD_OBJECT) {
                 // re-create track, unless someone else has already done so
-                if (newSequence == oldSequence) {
+                if (mSequence == oldSequence) {
                     if (!audio_is_linear_pcm(mFormat)) {
                         // If compressed capture, don't attempt to restore the track.
                         // Return a DEAD_OBJECT error and let the caller recreate.
@@ -1126,7 +1160,7 @@
                     }
                 }
             }
-            oldSequence = newSequence;
+            oldSequence = mSequence;
 
             // Keep the extra references
             proxy = mProxy;
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index e2386ca..ecf7436 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -188,6 +188,14 @@
     return result.value_or(false);
 }
 
+status_t AudioTrack::logIfErrorAndReturnStatus(status_t status, const std::string& errorMessage) {
+    if (status != NO_ERROR) {
+        ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
+        reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
+    }
+    mStatus = status;
+    return mStatus;
+}
 // ---------------------------------------------------------------------------
 
 void AudioTrack::MediaMetrics::gather(const AudioTrack *track)
@@ -365,6 +373,10 @@
                 mSessionId, IPCThreadState::self()->getCallingPid(), clientPid);
         AudioSystem::releaseAudioSessionId(mSessionId, clientPid);
     }
+
+    if (mOutput != AUDIO_IO_HANDLE_NONE) {
+        AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
+    }
 }
 
 void AudioTrack::stopAndJoinCallbacks() {
@@ -413,9 +425,16 @@
     uint32_t channelCount;
     pid_t callingPid;
     pid_t myPid;
-    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
-    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
-    std::string errorMessage;
+    auto uid = aidl2legacy_int32_t_uid_t(attributionSource.uid);
+    auto pid = aidl2legacy_int32_t_pid_t(attributionSource.pid);
+    if (!uid.ok()) {
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE, StringPrintf("%s: received invalid attribution source uid", __func__));
+    }
+    if (!pid.ok()) {
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE, StringPrintf("%s: received invalid attribution source pid", __func__));
+    }
     // Note mPortId is not valid until the track is created, so omit mPortId in ALOG for set.
     ALOGV("%s(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
           "flags %#x, notificationFrames %d, sessionId %d, transferType %d, uid %d, pid %d",
@@ -486,34 +505,33 @@
     case TRANSFER_CALLBACK:
     case TRANSFER_SYNC_NOTIF_CALLBACK:
         if (callback == nullptr || sharedBuffer != 0) {
-            errorMessage = StringPrintf(
-                    "%s: Transfer type %s but callback == nullptr || sharedBuffer != 0",
-                    convertTransferToText(transferType), __func__);
-            status = BAD_VALUE;
-            goto error;
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf(
+                            "%s: Transfer type %s but callback == nullptr || sharedBuffer != 0",
+                            convertTransferToText(transferType), __func__));
         }
         break;
     case TRANSFER_OBTAIN:
     case TRANSFER_SYNC:
         if (sharedBuffer != 0) {
-            errorMessage = StringPrintf(
-                    "%s: Transfer type TRANSFER_OBTAIN but sharedBuffer != 0", __func__);
-            status = BAD_VALUE;
-            goto error;
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s: Transfer type TRANSFER_OBTAIN but sharedBuffer != 0",
+                                 __func__));
         }
         break;
     case TRANSFER_SHARED:
         if (sharedBuffer == 0) {
-            errorMessage = StringPrintf(
-                    "%s: Transfer type TRANSFER_SHARED but sharedBuffer == 0", __func__);
-            status = BAD_VALUE;
-            goto error;
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s: Transfer type TRANSFER_SHARED but sharedBuffer == 0",
+                                 __func__));
         }
         break;
     default:
-        errorMessage = StringPrintf("%s: Invalid transfer type %d", __func__, transferType);
-        status = BAD_VALUE;
-        goto error;
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE, StringPrintf("%s: Invalid transfer type %d", __func__, transferType));
     }
     mSharedBuffer = sharedBuffer;
     mTransfer = transferType;
@@ -524,9 +542,8 @@
 
     // invariant that mAudioTrack != 0 is true only after set() returns successfully
     if (mAudioTrack != 0) {
-        errorMessage = StringPrintf("%s: Track already in use", __func__);
-        status = INVALID_OPERATION;
-        goto error;
+        return logIfErrorAndReturnStatus(INVALID_OPERATION,
+                                         StringPrintf("%s: Track already in use", __func__));
     }
 
     // handle default values first.
@@ -535,9 +552,8 @@
     }
     if (pAttributes == NULL) {
         if (uint32_t(streamType) >= AUDIO_STREAM_PUBLIC_CNT) {
-            errorMessage = StringPrintf("%s: Invalid stream type %d", __func__, streamType);
-            status = BAD_VALUE;
-            goto error;
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE, StringPrintf("%s: Invalid stream type %d", __func__, streamType));
         }
         mOriginalStreamType = streamType;
     } else {
@@ -546,15 +562,13 @@
 
     // validate parameters
     if (!audio_is_valid_format(format)) {
-        errorMessage = StringPrintf("%s: Invalid format %#x", __func__, format);
-        status = BAD_VALUE;
-        goto error;
+        return logIfErrorAndReturnStatus(BAD_VALUE,
+                                         StringPrintf("%s: Invalid format %#x", __func__, format));
     }
 
     if (!audio_is_output_channel(channelMask)) {
-        errorMessage = StringPrintf("%s: Invalid channel mask %#x",  __func__, channelMask);
-        status = BAD_VALUE;
-        goto error;
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE, StringPrintf("%s: Invalid channel mask %#x", __func__, channelMask));
     }
     channelCount = audio_channel_count_from_out_mask(channelMask);
     mChannelCount = channelCount;
@@ -569,10 +583,9 @@
 
     // sampling rate must be specified for direct outputs
     if (sampleRate == 0 && (mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) {
-        errorMessage = StringPrintf(
-                "%s: sample rate must be specified for direct outputs", __func__);
-        status = BAD_VALUE;
-        goto error;
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE,
+                StringPrintf("%s: sample rate must be specified for direct outputs", __func__));
     }
     // 1.0 <= mMaxRequiredSpeed <= AUDIO_TIMESTRETCH_SPEED_MAX
     mMaxRequiredSpeed = min(max(maxRequiredSpeed, 1.0f), AUDIO_TIMESTRETCH_SPEED_MAX);
@@ -600,17 +613,16 @@
         mNotificationsPerBufferReq = 0;
     } else {
         if (!(mFlags & AUDIO_OUTPUT_FLAG_FAST)) {
-            errorMessage = StringPrintf(
-                    "%s: notificationFrames=%d not permitted for non-fast track",
-                    __func__, notificationFrames);
-            status = BAD_VALUE;
-            goto error;
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s: notificationFrames=%d not permitted for non-fast track",
+                                 __func__, notificationFrames));
         }
         if (frameCount > 0) {
-            ALOGE("%s(): notificationFrames=%d not permitted with non-zero frameCount=%zu",
-                    __func__, notificationFrames, frameCount);
-            status = BAD_VALUE;
-            goto error;
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE, StringPrintf("%s(): notificationFrames=%d not permitted "
+                                            "with non-zero frameCount=%zu",
+                                            __func__, notificationFrames, frameCount));
         }
         mNotificationFramesReq = 0;
         const uint32_t minNotificationsPerBuffer = 1;
@@ -627,12 +639,24 @@
     mClientAttributionSource = AttributionSourceState(attributionSource);
     callingPid = IPCThreadState::self()->getCallingPid();
     myPid = getpid();
-    if (uid == -1 || (callingPid != myPid)) {
-        mClientAttributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(
-            IPCThreadState::self()->getCallingUid()));
+    if (uid.value() == -1 || (callingPid != myPid)) {
+        auto clientAttributionSourceUid =
+                legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid());
+        if (!clientAttributionSourceUid.ok()) {
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s: received invalid client attribution source uid", __func__));
+        }
+        mClientAttributionSource.uid = clientAttributionSourceUid.value();
     }
-    if (pid == (pid_t)-1 || (callingPid != myPid)) {
-        mClientAttributionSource.pid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(callingPid));
+    if (pid.value() == (pid_t)-1 || (callingPid != myPid)) {
+        auto clientAttributionSourcePid = legacy2aidl_uid_t_int32_t(callingPid);
+        if (!clientAttributionSourcePid.ok()) {
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s: received invalid client attribution source pid", __func__));
+        }
+        mClientAttributionSource.pid = clientAttributionSourcePid.value();
     }
     mAuxEffectId = 0;
     mCallback = callback;
@@ -655,7 +679,8 @@
             mAudioTrackThread.clear();
         }
         // We do not goto error to prevent double-logging errors.
-        goto exit;
+        mStatus = status;
+        return mStatus;
     }
 
     mLoopCount = 0;
@@ -670,7 +695,7 @@
     mReleased = 0;
     mStartNs = 0;
     mStartFromZeroUs = 0;
-    AudioSystem::acquireAudioSessionId(mSessionId, pid, uid);
+    AudioSystem::acquireAudioSessionId(mSessionId, pid.value(), uid.value());
     mSequence = 1;
     mObservedSequence = mSequence;
     mInUnderrun = false;
@@ -688,15 +713,7 @@
     mFramesWrittenAtRestore = -1; // -1 is a unique initializer.
     mVolumeHandler = new media::VolumeHandler();
 
-error:
-    if (status != NO_ERROR) {
-        ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
-        reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
-    }
-    // fall through
-exit:
-    mStatus = status;
-    return status;
+    return logIfErrorAndReturnStatus(status, "");
 }
 
 
@@ -723,8 +740,22 @@
         audio_port_handle_t selectedDeviceId)
 {
     AttributionSourceState attributionSource;
-    attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(uid));
-    attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(pid));
+    auto attributionSourceUid = legacy2aidl_uid_t_int32_t(uid);
+    if (!attributionSourceUid.ok()) {
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE,
+                StringPrintf("%s: received invalid attribution source uid, uid: %d, session id: %d",
+                             __func__, uid, sessionId));
+    }
+    attributionSource.uid = attributionSourceUid.value();
+    auto attributionSourcePid = legacy2aidl_pid_t_int32_t(pid);
+    if (!attributionSourcePid.ok()) {
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE,
+                StringPrintf("%s: received invalid attribution source pid, pid: %d, sessionId: %d",
+                             __func__, pid, sessionId));
+    }
+    attributionSource.pid = attributionSourcePid.value();
     attributionSource.token = sp<BBinder>::make();
     if (callback) {
         mLegacyCallbackWrapper = sp<LegacyCallbackWrapper>::make(callback, user);
@@ -1792,15 +1823,11 @@
 status_t AudioTrack::createTrack_l()
 {
     status_t status;
-    bool callbackAdded = false;
-    std::string errorMessage;
 
     const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
     if (audioFlinger == 0) {
-        errorMessage = StringPrintf("%s(%d): Could not get audioflinger",
-                __func__, mPortId);
-        status = DEAD_OBJECT;
-        goto exit;
+        return logIfErrorAndReturnStatus(
+                DEAD_OBJECT, StringPrintf("%s(%d): Could not get audioflinger", __func__, mPortId));
     }
 
     {
@@ -1868,21 +1895,31 @@
     input.audioTrackCallback = mAudioTrackCallback;
 
     media::CreateTrackResponse response;
-    status = audioFlinger->createTrack(VALUE_OR_FATAL(input.toAidl()), response);
+    auto aidlInput = input.toAidl();
+    if (!aidlInput.ok()) {
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE, StringPrintf("%s(%d): Could not create track due to invalid input",
+                                        __func__, mPortId));
+    }
+    status = audioFlinger->createTrack(aidlInput.value(), response);
 
     IAudioFlinger::CreateTrackOutput output{};
     if (status == NO_ERROR) {
-        output = VALUE_OR_FATAL(IAudioFlinger::CreateTrackOutput::fromAidl(response));
+        auto trackOutput = IAudioFlinger::CreateTrackOutput::fromAidl(response);
+        if (!trackOutput.ok()) {
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s(%d): Could not create track output due to invalid response",
+                                 __func__, mPortId));
+        }
+        output = trackOutput.value();
     }
 
     if (status != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
-        errorMessage = StringPrintf(
-                "%s(%d): AudioFlinger could not create track, status: %d output %d",
-                __func__, mPortId, status, output.outputId);
-        if (status == NO_ERROR) {
-            status = INVALID_OPERATION; // device not ready
-        }
-        goto exit;
+        return logIfErrorAndReturnStatus(
+                status == NO_ERROR ? INVALID_OPERATION : status,  // device not ready
+                StringPrintf("%s(%d): AudioFlinger could not create track, status: %d output %d",
+                             __func__, mPortId, status, output.outputId));
     }
     ALOG_ASSERT(output.audioTrack != 0);
 
@@ -1912,22 +1949,22 @@
     // FIXME compare to AudioRecord
     std::optional<media::SharedFileRegion> sfr;
     output.audioTrack->getCblk(&sfr);
-    sp<IMemory> iMem = VALUE_OR_FATAL(aidl2legacy_NullableSharedFileRegion_IMemory(sfr));
-    if (iMem == 0) {
-        errorMessage = StringPrintf("%s(%d): Could not get control block", __func__, mPortId);
-        status = FAILED_TRANSACTION;
-        goto exit;
+    auto iMemory = aidl2legacy_NullableSharedFileRegion_IMemory(sfr);
+    if (!iMemory.ok() || iMemory.value() == 0) {
+        return logIfErrorAndReturnStatus(
+                FAILED_TRANSACTION,
+                StringPrintf("%s(%d): Could not get control block", __func__, mPortId));
     }
+    sp<IMemory> iMem = iMemory.value();
     // TODO: Using unsecurePointer() has some associated security pitfalls
     //       (see declaration for details).
     //       Either document why it is safe in this case or address the
     //       issue (e.g. by copying).
     void *iMemPointer = iMem->unsecurePointer();
     if (iMemPointer == NULL) {
-        errorMessage = StringPrintf(
-                "%s(%d): Could not get control block pointer", __func__, mPortId);
-        status = FAILED_TRANSACTION;
-        goto exit;
+        return logIfErrorAndReturnStatus(
+                FAILED_TRANSACTION,
+                StringPrintf("%s(%d): Could not get control block pointer", __func__, mPortId));
     }
     // invariant that mAudioTrack != 0 is true only after set() returns successfully
     if (mAudioTrack != 0) {
@@ -1962,7 +1999,6 @@
             AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
         }
         AudioSystem::addAudioDeviceCallback(this, output.outputId, output.portId);
-        callbackAdded = true;
     }
 
     mPortId = output.portId;
@@ -1987,11 +2023,9 @@
         //       issue (e.g. by copying).
         buffers = mSharedBuffer->unsecurePointer();
         if (buffers == NULL) {
-            errorMessage = StringPrintf(
-                    "%s(%d): Could not get buffer pointer", __func__, mPortId);
-            ALOGE("%s", errorMessage.c_str());
-            status = FAILED_TRANSACTION;
-            goto exit;
+            return logIfErrorAndReturnStatus(
+                    FAILED_TRANSACTION,
+                    StringPrintf("%s(%d): Could not get buffer pointer", __func__, mPortId));
         }
     }
 
@@ -2088,19 +2122,8 @@
 
     }
 
-exit:
-    if (status != NO_ERROR) {
-        if (callbackAdded) {
-            // note: mOutput is always valid is callbackAdded is true
-            AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
-        }
-        ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
-        reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
-    }
-    mStatus = status;
-
     // sp<IAudioTrack> track destructor will cause releaseOutput() to be called by AudioFlinger
-    return status;
+    return logIfErrorAndReturnStatus(status, "");
 }
 
 void AudioTrack::reportError(status_t status, const char *event, const char *message) const
@@ -2186,11 +2209,10 @@
         {   // start of lock scope
             AutoMutex lock(mLock);
 
-            uint32_t newSequence = mSequence;
             // did previous obtainBuffer() fail due to media server death or voluntary invalidation?
             if (status == DEAD_OBJECT) {
                 // re-create track, unless someone else has already done so
-                if (newSequence == oldSequence) {
+                if (mSequence == oldSequence) {
                     status = restoreTrack_l("obtainBuffer");
                     if (status != NO_ERROR) {
                         buffer.mFrameCount = 0;
@@ -2200,7 +2222,7 @@
                     }
                 }
             }
-            oldSequence = newSequence;
+            oldSequence = mSequence;
 
             if (status == NOT_ENOUGH_DATA) {
                 restartIfDisabled();
@@ -3189,7 +3211,12 @@
         media::AudioTimestampInternal ts;
         mAudioTrack->getTimestamp(&ts, &status);
         if (status == OK) {
-            timestamp = VALUE_OR_FATAL(aidl2legacy_AudioTimestampInternal_AudioTimestamp(ts));
+            auto legacyTs = aidl2legacy_AudioTimestampInternal_AudioTimestamp(ts);
+            if (!legacyTs.ok()) {
+                return logIfErrorAndReturnStatus(
+                        BAD_VALUE, StringPrintf("%s: received invalid audio timestamp", __func__));
+            }
+            timestamp = legacyTs.value();
         }
     } else {
         // read timestamp from shared memory
diff --git a/media/libaudioclient/TEST_MAPPING b/media/libaudioclient/TEST_MAPPING
index 68dba34..29b876c 100644
--- a/media/libaudioclient/TEST_MAPPING
+++ b/media/libaudioclient/TEST_MAPPING
@@ -47,12 +47,7 @@
       "name": "audioeffect_analysis"
     },
     {
-      "name": "CtsVirtualDevicesTestCases",
-      "options" : [
-        {
-          "include-filter": "android.virtualdevice.cts.VirtualAudioTest"
-        }
-      ]
+      "name": "CtsVirtualDevicesAudioTestCases"
     }
   ]
 }
diff --git a/media/libaudioclient/TrackPlayerBase.cpp b/media/libaudioclient/TrackPlayerBase.cpp
index 4fc1c44..bc38251 100644
--- a/media/libaudioclient/TrackPlayerBase.cpp
+++ b/media/libaudioclient/TrackPlayerBase.cpp
@@ -38,12 +38,12 @@
                            player_type_t playerType, audio_usage_t usage,
                            audio_session_t sessionId) {
     PlayerBase::init(playerType, usage, sessionId);
-    mAudioTrack = pat;
-    if (mAudioTrack != 0) {
+    mAudioTrack.store(pat);
+    if (pat != 0) {
         mCallbackHandle = callback;
         mSelfAudioDeviceCallback = new SelfAudioDeviceCallback(*this);
-        mAudioTrack->addAudioDeviceCallback(mSelfAudioDeviceCallback);
-        mAudioTrack->setPlayerIId(mPIId); // set in PlayerBase::init().
+        pat->addAudioDeviceCallback(mSelfAudioDeviceCallback);
+        pat->setPlayerIId(mPIId);  // set in PlayerBase::init().
     }
 }
 
@@ -65,12 +65,15 @@
 }
 
 void TrackPlayerBase::doDestroy() {
-    if (mAudioTrack != 0) {
-        mAudioTrack->stop();
-        mAudioTrack->removeAudioDeviceCallback(mSelfAudioDeviceCallback);
+    sp<AudioTrack> audioTrack = getAudioTrack();
+
+    // Note that there may still be another reference in post-unlock phase of SetPlayState
+    clearAudioTrack();
+
+    if (audioTrack != 0) {
+        audioTrack->stop();
+        audioTrack->removeAudioDeviceCallback(mSelfAudioDeviceCallback);
         mSelfAudioDeviceCallback.clear();
-        // Note that there may still be another reference in post-unlock phase of SetPlayState
-        mAudioTrack.clear();
     }
 }
 
@@ -87,16 +90,16 @@
 // Implementation of IPlayer
 status_t TrackPlayerBase::playerStart() {
     status_t status = NO_INIT;
-    if (mAudioTrack != 0) {
-        status = mAudioTrack->start();
+    if (sp<AudioTrack> audioTrack = getAudioTrack(); audioTrack != 0) {
+        status = audioTrack->start();
     }
     return status;
 }
 
 status_t TrackPlayerBase::playerPause() {
     status_t status = NO_INIT;
-    if (mAudioTrack != 0) {
-        mAudioTrack->pause();
+    if (sp<AudioTrack> audioTrack = getAudioTrack(); audioTrack != 0) {
+        audioTrack->pause();
         status = NO_ERROR;
     }
     return status;
@@ -105,8 +108,8 @@
 
 status_t TrackPlayerBase::playerStop() {
     status_t status = NO_INIT;
-    if (mAudioTrack != 0) {
-        mAudioTrack->stop();
+    if (sp<AudioTrack> audioTrack = getAudioTrack(); audioTrack != 0) {
+        audioTrack->stop();
         status = NO_ERROR;
     }
     return status;
@@ -118,10 +121,10 @@
 
 status_t TrackPlayerBase::doSetVolume() {
     status_t status = NO_INIT;
-    if (mAudioTrack != 0) {
+    if (sp<AudioTrack> audioTrack = getAudioTrack(); audioTrack != 0) {
         float tl = mPlayerVolumeL * mPanMultiplierL * mVolumeMultiplierL;
         float tr = mPlayerVolumeR * mPanMultiplierR * mVolumeMultiplierR;
-        mAudioTrack->setVolume(tl, tr);
+        audioTrack->setVolume(tl, tr);
         status = NO_ERROR;
     }
     return status;
@@ -140,10 +143,9 @@
     if (s != OK) {
         return binderStatusFromStatusT(s);
     }
-
-    if (mAudioTrack != 0) {
+    if (sp<AudioTrack> audioTrack = getAudioTrack(); audioTrack != 0) {
         ALOGD("TrackPlayerBase::applyVolumeShaper() from IPlayer");
-        VolumeShaper::Status status = mAudioTrack->applyVolumeShaper(spConfiguration, spOperation);
+        VolumeShaper::Status status = audioTrack->applyVolumeShaper(spConfiguration, spOperation);
         if (status < 0) { // a non-negative value is the volume shaper id.
             ALOGE("TrackPlayerBase::applyVolumeShaper() failed with status %d", status);
         }
diff --git a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
index ddda8bb..73610a8 100644
--- a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
@@ -19,6 +19,7 @@
 import android.media.AudioPortFw;
 import android.media.audio.common.AudioConfig;
 import android.media.audio.common.AudioConfigBase;
+import android.media.audio.common.AudioAttributes;
 
 /**
  * {@hide}
@@ -32,4 +33,5 @@
     AudioPortFw device;
     /** Bitmask, indexed by AudioOutputFlag. */
     int flags;
+    AudioAttributes attributes;
 }
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index 11955c9..14e528f 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -24,57 +24,23 @@
         "android.hardware.audio.common@7.0-enums",
         "audiopermissioncontroller",
         "libaudiomockhal",
-        "libcgrouprc",
-        "libcgrouprc_format",
         "libfakeservicemanager",
         "libjsoncpp",
-        "liblog",
-        "libmedia_helper",
         "libmediametricsservice",
         "libprocessgroup",
+        "libprocessgroup_util",
         "shared-file-region-aidl-cpp",
     ],
     shared_libs: [
         "android.hardware.audio.common-util",
-        "audioclient-types-aidl-cpp",
-        "audioflinger-aidl-cpp",
-        "audiopolicy-aidl-cpp",
-        "audiopolicy-types-aidl-cpp",
-        "av-types-aidl-cpp",
-        "capture_state_listener-aidl-cpp",
-        "effect-aidl-cpp",
-        "framework-permission-aidl-cpp",
-        "libactivitymanager_aidl",
-        "libaudioclient",
-        "libaudioclient_aidl_conversion",
         "libaudioflinger",
-        "libaudiofoundation",
-        "libaudiohal",
-        "libaudiomanager",
-        "libaudiopolicy",
-        "libaudiopolicymanagerdefault",
         "libaudiopolicyservice",
-        "libaudioprocessing",
-        "libaudioutils",
         "libdl",
-        "libheadtracking",
-        "libmediametrics",
-        "libmediautils",
-        "libnbaio",
-        "libnblog",
-        "libpowermanager",
-        "libvibrator",
         "libvndksupport",
-        "libxml2",
         "mediametricsservice-aidl-cpp",
-        "packagemanager_aidl-cpp",
     ],
     header_libs: [
-        "libaudioflinger_headers",
-        "libaudiofoundation_headers",
-        "libaudiohal_headers",
         "libaudiopolicymanager_interface_headers",
-        "libbinder_headers",
         "libmedia_headers",
     ],
     fuzz_config: {
@@ -99,6 +65,8 @@
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_hardware_audio_effect_ndk_shared",
         "libaudioclient_aidl_fuzzer_defaults",
+        "libaudioflinger_dependencies",
+        "libaudiopolicyservice_dependencies",
         "service_fuzzer_defaults",
     ],
 }
diff --git a/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-0 b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..c1e1de5
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-1 b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-1
new file mode 100644
index 0000000..8e49acd
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-1
Binary files differ
diff --git a/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-2 b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-2
new file mode 100644
index 0000000..a8ffcae
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-2
Binary files differ
diff --git a/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-3 b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-3
new file mode 100644
index 0000000..7c25f6e
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-3
Binary files differ
diff --git a/media/libaudioclient/fuzzer/Android.bp b/media/libaudioclient/fuzzer/Android.bp
index a95c700..65ada70 100644
--- a/media/libaudioclient/fuzzer/Android.bp
+++ b/media/libaudioclient/fuzzer/Android.bp
@@ -37,8 +37,6 @@
         "effect-aidl-cpp",
         "libaudioclient",
         "libbase",
-        "libcgrouprc",
-        "libcgrouprc_format",
         "libcutils",
         "libjsoncpp",
         "liblog",
@@ -46,6 +44,7 @@
         "libmediametrics",
         "libmediametricsservice",
         "libprocessgroup",
+        "libprocessgroup_util",
         "shared-file-region-aidl-cpp",
     ],
     shared_libs: [
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index d4479ef..25d91d3 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -138,6 +138,12 @@
                                       audio_format_t format,
                                       audio_channel_mask_t channelMask);
 
+    /* Checks for erroneous status, marks error in MediaMetrics, logs the error message.
+     * Updates and returns mStatus.
+     */
+    status_t logIfErrorAndReturnStatus(status_t status, const std::string& errorMessage,
+                                       const std::string& func);
+
     /* How data is transferred from AudioRecord
      */
     enum transfer_type {
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 3a001a4..de97863 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -233,8 +233,7 @@
      * FIXME This API assumes a route, and so should be deprecated.
      */
 
-    static status_t getMinFrameCount(size_t* frameCount,
-                                     audio_stream_type_t streamType,
+    static status_t getMinFrameCount(size_t* frameCount, audio_stream_type_t streamType,
                                      uint32_t sampleRate);
 
     /* Check if direct playback is possible for the given audio configuration and attributes.
@@ -243,6 +242,11 @@
     static bool isDirectOutputSupported(const audio_config_base_t& config,
                                         const audio_attributes_t& attributes);
 
+    /* Checks for erroneous status, logs the error message.
+     * Updates and returns mStatus.
+     */
+    status_t logIfErrorAndReturnStatus(status_t status, const std::string& errorMessage);
+
     /* How data is transferred to AudioTrack
      */
     enum transfer_type {
diff --git a/media/libaudioclient/include/media/TrackPlayerBase.h b/media/libaudioclient/include/media/TrackPlayerBase.h
index fe88116..8df9ff8 100644
--- a/media/libaudioclient/include/media/TrackPlayerBase.h
+++ b/media/libaudioclient/include/media/TrackPlayerBase.h
@@ -19,6 +19,7 @@
 
 #include <media/AudioTrack.h>
 #include <media/PlayerBase.h>
+#include <mediautils/Synchronization.h>
 
 namespace android {
 
@@ -37,10 +38,11 @@
             const media::VolumeShaperConfiguration& configuration,
             const media::VolumeShaperOperation& operation);
 
-    //FIXME move to protected field, so far made public to minimize changes to AudioTrack logic
-    sp<AudioTrack> mAudioTrack;
+    sp<AudioTrack> getAudioTrack() { return mAudioTrack.load(); }
 
-            void setPlayerVolume(float vl, float vr);
+    void clearAudioTrack() { mAudioTrack.store(nullptr); }
+
+    void setPlayerVolume(float vl, float vr);
 
 protected:
 
@@ -68,6 +70,7 @@
     float mPlayerVolumeL, mPlayerVolumeR;
     sp<AudioTrack::IAudioTrackCallback> mCallbackHandle;
     sp<SelfAudioDeviceCallback> mSelfAudioDeviceCallback;
+    mediautils::atomic_sp<AudioTrack> mAudioTrack;
 };
 
 } // namespace android
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index ddf14a3..3941280 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -107,7 +107,6 @@
         "framework-permission-aidl-cpp",
         "libaudioutils",
         "libbase",
-        "libcgrouprc",
         "libdl",
         "libmedia",
         "libmedia_helper",
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
index 7f55e48..aa6cb0d 100644
--- a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -670,6 +670,25 @@
                                          AudioEncapsulationMetadataType::FRAMEWORK_TUNER,
                                          AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR));
 
+TEST(AudioPortDeviceExt_speakerLayoutRoundTripTest, Aidl2Legacy2Aidl_layoutMask) {
+    AudioPortDeviceExt initial{};
+    initial.speakerLayout = make_ACL_Stereo();
+    auto conv = aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+
+TEST(AudioPortDeviceExt_speakerLayoutRoundTripTest, Aidl2Legacy2Aidl_null) {
+    const AudioPortDeviceExt initial{};  // speakerLayout is null
+    auto conv = aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+
 class AudioGainModeRoundTripTest : public testing::TestWithParam<AudioGainMode> {};
 TEST_P(AudioGainModeRoundTripTest, Aidl2Legacy2Aidl) {
     const auto initial = GetParam();
diff --git a/media/libaudioclient/tests/audioeffect_tests.cpp b/media/libaudioclient/tests/audioeffect_tests.cpp
index 59d0c6a..bedeff9 100644
--- a/media/libaudioclient/tests/audioeffect_tests.cpp
+++ b/media/libaudioclient/tests/audioeffect_tests.cpp
@@ -70,8 +70,8 @@
     return effect;
 }
 
-status_t isEffectExistsOnAudioSession(const effect_uuid_t* type, const effect_uuid_t* uuid,
-                                      int priority, audio_session_t sessionId) {
+status_t createAndInitCheckEffect(const effect_uuid_t* type, const effect_uuid_t* uuid,
+                                  int priority, audio_session_t sessionId) {
     sp<AudioEffect> effect = createEffect(type, uuid, priority, sessionId);
     return effect->initCheck();
 }
@@ -272,10 +272,9 @@
     EXPECT_FALSE(isEffectDefaultOnRecord(selectedEffectType, selectedEffectUuid,
                                          capture->getAudioRecordHandle()))
             << "Effect should not have been default on record. " << type;
-    EXPECT_EQ(NO_ERROR,
-              isEffectExistsOnAudioSession(selectedEffectType, selectedEffectUuid,
-                                           kDefaultInputEffectPriority - 1,
-                                           capture->getAudioRecordHandle()->getSessionId()))
+    EXPECT_EQ(NO_ERROR, createAndInitCheckEffect(selectedEffectType, selectedEffectUuid,
+                                                 kDefaultInputEffectPriority - 1,
+                                                 capture->getAudioRecordHandle()->getSessionId()))
             << "Effect should not have been added. " << type;
     EXPECT_EQ(OK, capture->audioProcess());
     EXPECT_EQ(OK, capture->stop());
@@ -296,9 +295,9 @@
                                         capture->getAudioRecordHandle()))
             << "Effect should have been default on record. " << type;
     EXPECT_EQ(ALREADY_EXISTS,
-              isEffectExistsOnAudioSession(selectedEffectType, selectedEffectUuid,
-                                           kDefaultInputEffectPriority - 1,
-                                           capture->getAudioRecordHandle()->getSessionId()))
+              createAndInitCheckEffect(selectedEffectType, selectedEffectUuid,
+                                       kDefaultInputEffectPriority - 1,
+                                       capture->getAudioRecordHandle()->getSessionId()))
             << "Effect should have been added. " << type;
     EXPECT_EQ(OK, capture->audioProcess());
     EXPECT_EQ(OK, capture->stop());
@@ -313,10 +312,9 @@
     EXPECT_FALSE(isEffectDefaultOnRecord(selectedEffectType, selectedEffectUuid,
                                          capture->getAudioRecordHandle()))
             << "Effect should not have been default on record. " << type;
-    EXPECT_EQ(NO_ERROR,
-              isEffectExistsOnAudioSession(selectedEffectType, selectedEffectUuid,
-                                           kDefaultInputEffectPriority - 1,
-                                           capture->getAudioRecordHandle()->getSessionId()))
+    EXPECT_EQ(NO_ERROR, createAndInitCheckEffect(selectedEffectType, selectedEffectUuid,
+                                                 kDefaultInputEffectPriority - 1,
+                                                 capture->getAudioRecordHandle()->getSessionId()))
             << "Effect should not have been added. " << type;
     EXPECT_EQ(OK, capture->audioProcess());
     EXPECT_EQ(OK, capture->stop());
@@ -421,8 +419,8 @@
     EXPECT_EQ(NO_ERROR, playback->create());
     EXPECT_EQ(NO_ERROR, playback->start());
     EXPECT_EQ(compatCheck ? NO_ERROR : NO_INIT,
-              isEffectExistsOnAudioSession(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
-                                           playback->getAudioTrackHandle()->getSessionId()))
+              createAndInitCheckEffect(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
+                                       playback->getAudioTrackHandle()->getSessionId()))
             << "Effect should not have been added. " << mTypeStr;
     EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
     playback->stop();
@@ -445,8 +443,8 @@
     EXPECT_EQ(NO_ERROR, playback->start());
     // If effect chosen is not compatible with the session, then effect won't be applied
     EXPECT_EQ(compatCheck ? ALREADY_EXISTS : NO_INIT,
-              isEffectExistsOnAudioSession(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
-                                           playback->getAudioTrackHandle()->getSessionId()))
+              createAndInitCheckEffect(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
+                                       playback->getAudioTrackHandle()->getSessionId()))
             << "Effect should have been added. " << mTypeStr;
     EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
     if (mSelectFastMode) {
@@ -467,8 +465,8 @@
     EXPECT_EQ(NO_ERROR, playback->create());
     EXPECT_EQ(NO_ERROR, playback->start());
     EXPECT_EQ(compatCheck ? NO_ERROR : NO_INIT,
-              isEffectExistsOnAudioSession(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
-                                           playback->getAudioTrackHandle()->getSessionId()))
+              createAndInitCheckEffect(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
+                                       playback->getAudioTrackHandle()->getSessionId()))
             << "Effect should not have been added. " << mTypeStr;
     EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
     playback->stop();
@@ -502,8 +500,8 @@
     EXPECT_EQ(NO_ERROR, playback->create());
     EXPECT_EQ(NO_ERROR, playback->start());
 
-    EXPECT_EQ(ALREADY_EXISTS, isEffectExistsOnAudioSession(
-                                      &mType, &mUuid, kDefaultOutputEffectPriority - 1, sessionId))
+    EXPECT_EQ(ALREADY_EXISTS,
+              createAndInitCheckEffect(&mType, &mUuid, kDefaultOutputEffectPriority - 1, sessionId))
             << "Effect should have been added. " << mTypeStr;
     if (mSelectFastMode) {
         EXPECT_EQ(mIsFastCompatibleEffect ? AUDIO_OUTPUT_FLAG_FAST : 0,
@@ -556,8 +554,8 @@
     ASSERT_EQ(NO_ERROR, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"));
     EXPECT_EQ(NO_ERROR, playback->create());
     EXPECT_EQ(NO_ERROR, playback->start());
-    EXPECT_TRUE(isEffectExistsOnAudioSession(selectedEffectType, selectedEffectUuid,
-                                             kDefaultOutputEffectPriority - 1, sessionId))
+    ASSERT_EQ(ALREADY_EXISTS, createAndInitCheckEffect(selectedEffectType, selectedEffectUuid,
+                                                       kDefaultOutputEffectPriority - 1, sessionId))
             << "Effect should have been added. " << type;
     EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
     playback->stop();
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index 8151d39..a3ab9d2 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -86,7 +86,18 @@
     }
 }
 
-TEST(AudioTrackTest, DefaultRoutingTest) {
+class AudioTrackTest
+        : public ::testing::TestWithParam<int> {
+
+public:
+    AudioTrackTest()
+            : mSampleRate(GetParam()){};
+
+    const uint32_t mSampleRate;
+
+};
+
+TEST_P(AudioTrackTest, DefaultRoutingTest) {
     audio_port_v7 port;
     if (OK != getPortByAttributes(AUDIO_PORT_ROLE_SOURCE, AUDIO_PORT_TYPE_DEVICE,
                                   AUDIO_DEVICE_IN_REMOTE_SUBMIX, "0", port)) {
@@ -95,7 +106,8 @@
 
     // create record instance
     sp<AudioCapture> capture = sp<AudioCapture>::make(
-            AUDIO_SOURCE_REMOTE_SUBMIX, 48000, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO);
+            AUDIO_SOURCE_REMOTE_SUBMIX, mSampleRate, AUDIO_FORMAT_PCM_16_BIT,
+            AUDIO_CHANNEL_IN_STEREO);
     ASSERT_NE(nullptr, capture);
     ASSERT_EQ(OK, capture->create()) << "record creation failed";
     sp<OnAudioDeviceUpdateNotifier> cbCapture = sp<OnAudioDeviceUpdateNotifier>::make();
@@ -103,7 +115,7 @@
 
     // create playback instance
     sp<AudioPlayback> playback = sp<AudioPlayback>::make(
-            48000 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            mSampleRate, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
             AUDIO_OUTPUT_FLAG_NONE, AUDIO_SESSION_NONE);
     ASSERT_NE(nullptr, playback);
     ASSERT_EQ(OK, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
@@ -133,6 +145,12 @@
     playback->stop();
 }
 
+INSTANTIATE_TEST_SUITE_P(
+        AudioTrackParameterizedTest,
+        AudioTrackTest,
+        ::testing::Values(44100, 48000)
+);
+
 class AudioRoutingTest : public ::testing::Test {
   public:
     void SetUp() override {
diff --git a/media/libaudioclient/tests/trackplayerbase_tests.cpp b/media/libaudioclient/tests/trackplayerbase_tests.cpp
index 7317bf0..a4dba9b 100644
--- a/media/libaudioclient/tests/trackplayerbase_tests.cpp
+++ b/media/libaudioclient/tests/trackplayerbase_tests.cpp
@@ -54,7 +54,7 @@
         mPlayer = new TrackPlayer();
         mPlayer->init(track.get(), mPlayer, PLAYER_TYPE_AAUDIO, AUDIO_USAGE_MEDIA,
                       AUDIO_SESSION_NONE);
-        sp<AudioTrack> playerTrack = mPlayer->mAudioTrack;
+        sp<AudioTrack> playerTrack = mPlayer->getAudioTrack();
         ASSERT_EQ(playerTrack->initCheck(), NO_ERROR);
 
         mBufferSize = mFrameCount * playerTrack->frameSize();
@@ -74,7 +74,7 @@
 
     void playBuffer() {
         bool blocking = true;
-        ssize_t nbytes = mPlayer->mAudioTrack->write(mBuffer.data(), mBufferSize, blocking);
+        ssize_t nbytes = mPlayer->getAudioTrack()->write(mBuffer.data(), mBufferSize, blocking);
         EXPECT_EQ(nbytes, mBufferSize) << "Did not write all data in blocking mode";
     }
 
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index 639c7aa..74a64bf 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -23,7 +23,6 @@
     ],
 
     required: [
-        "libaudiohal@5.0",
         "libaudiohal@6.0",
         "libaudiohal@7.0",
         "libaudiohal@7.1",
@@ -45,6 +44,8 @@
         "liberror_headers",
         "libmediautils_headers",
     ],
+
+    export_include_dirs: ["include"],
 }
 
 cc_library_shared {
diff --git a/media/libaudiohal/FactoryHal.cpp b/media/libaudiohal/FactoryHal.cpp
index 15cb297..2c30693 100644
--- a/media/libaudiohal/FactoryHal.cpp
+++ b/media/libaudiohal/FactoryHal.cpp
@@ -50,12 +50,11 @@
  * This list need to keep sync with AudioHalVersionInfo.VERSIONS in
  * media/java/android/media/AudioHalVersionInfo.java.
  */
-static const std::array<AudioHalVersionInfo, 5> sAudioHALVersions = {
+static const std::array<AudioHalVersionInfo, 4> sAudioHALVersions = {
     AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, 1, 0),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 0),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 6, 0),
-    AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 5, 0),
 };
 
 static const std::map<AudioHalVersionInfo::Type, InterfaceName> sDevicesHALInterfaces = {
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index 1a6b949..0dd0f74 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -83,32 +83,6 @@
 }
 
 cc_library_shared {
-    name: "libaudiohal@5.0",
-    defaults: [
-        "libaudiohal_default",
-        "libaudiohal_hidl_default",
-    ],
-    srcs: [
-        ":audio_core_hal_client_sources",
-        ":audio_effect_hidl_hal_client_sources",
-        "EffectsFactoryHalEntry.cpp",
-    ],
-    shared_libs: [
-        "android.hardware.audio.common@5.0",
-        "android.hardware.audio.common@5.0-util",
-        "android.hardware.audio.effect@5.0",
-        "android.hardware.audio.effect@5.0-util",
-        "android.hardware.audio@5.0",
-        "android.hardware.audio@5.0-util",
-    ],
-    cflags: [
-        "-DMAJOR_VERSION=5",
-        "-DMINOR_VERSION=0",
-        "-include common/all-versions/VersionMacro.h",
-    ],
-}
-
-cc_library_shared {
     name: "libaudiohal@6.0",
     defaults: [
         "libaudiohal_default",
@@ -227,11 +201,11 @@
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_hardware_audio_effect_ndk_shared",
         "latest_android_media_audio_common_types_ndk_shared",
-        "latest_av_audio_types_aidl_ndk_shared",
     ],
     shared_libs: [
         "android.hardware.common-V2-ndk",
         "android.hardware.common.fmq-V1-ndk",
+        "av-audio-types-aidl-ndk",
         "libaudio_aidl_conversion_common_cpp",
         "libaudio_aidl_conversion_common_ndk",
         "libaudio_aidl_conversion_common_ndk_cpp",
diff --git a/media/libaudiohal/impl/ConversionHelperAidl.h b/media/libaudiohal/impl/ConversionHelperAidl.h
index 0fadd9c..fe00fb2 100644
--- a/media/libaudiohal/impl/ConversionHelperAidl.h
+++ b/media/libaudiohal/impl/ConversionHelperAidl.h
@@ -32,6 +32,28 @@
 
 namespace android {
 
+/*
+ * Helper macro to add instance name, function name in logs
+ * classes should provide getInstanceName API to use these macros.
+ * print function names along with instance name.
+ *
+ * Usage:
+ *  AUGMENT_LOG(D);
+ *  AUGMENT_LOG(I, "hello!");
+ *  AUGMENT_LOG(W, "value: %d", value);
+ *
+ *  AUGMENT_LOG_IF(D, value < 0, "negative");
+ *  AUGMENT_LOG_IF(E, value < 0, "bad value: %d", value);
+ */
+
+#define AUGMENT_LOG(level, ...)                                                  \
+    ALOG##level("[%s] %s" __VA_OPT__(": " __android_second(0, __VA_ARGS__, "")), \
+                getInstanceName().c_str(), __func__ __VA_OPT__(__android_rest(__VA_ARGS__)))
+
+#define AUGMENT_LOG_IF(level, cond, ...)                                                    \
+    ALOG##level##_IF(cond, "[%s] %s" __VA_OPT__(": " __android_second(0, __VA_ARGS__, "")), \
+                     getInstanceName().c_str(), __func__ __VA_OPT__(__android_rest(__VA_ARGS__)))
+
 class Args {
   public:
     explicit Args(const Vector<String16>& args)
@@ -49,13 +71,15 @@
 
 class ConversionHelperAidl {
   protected:
-    ConversionHelperAidl(std::string_view className) : mClassName(className) {}
+    ConversionHelperAidl(std::string_view className, std::string_view instanceName)
+        : mClassName(className), mInstanceName(instanceName) {}
 
-    const std::string& getClassName() const {
-        return mClassName;
-    }
+    const std::string& getClassName() const { return mClassName; }
+
+    const std::string& getInstanceName() const { return mInstanceName; }
 
     const std::string mClassName;
+    const std::string mInstanceName;
 };
 
 // 'action' must accept a value of type 'T' and return 'status_t'.
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 024589b..629cd7c 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -22,15 +22,20 @@
 #include <aidl/android/hardware/audio/core/BnStreamCallback.h>
 #include <aidl/android/hardware/audio/core/BnStreamOutEventCallback.h>
 #include <aidl/android/hardware/audio/core/StreamDescriptor.h>
+#include <android/binder_ibinder_platform.h>
 #include <error/expected_utils.h>
 #include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionNdk.h>
 #include <media/AidlConversionNdkCpp.h>
 #include <media/AidlConversionUtil.h>
 #include <mediautils/TimeCheck.h>
 #include <system/audio.h>
+#include <system/thread_defs.h>
+
 #include <Utils.h>
 #include <utils/Log.h>
 
+#include "AidlUtils.h"
 #include "DeviceHalAidl.h"
 #include "EffectHalAidl.h"
 #include "StreamHalAidl.h"
@@ -60,6 +65,8 @@
 using aidl::android::hardware::audio::common::isBitPositionFlagSet;
 using aidl::android::hardware::audio::common::kDumpFromAudioServerArgument;
 using aidl::android::hardware::audio::common::RecordTrackMetadata;
+using aidl::android::hardware::audio::common::PlaybackTrackMetadata;
+using aidl::android::hardware::audio::common::SourceMetadata;
 using aidl::android::hardware::audio::core::sounddose::ISoundDose;
 using aidl::android::hardware::audio::core::AudioPatch;
 using aidl::android::hardware::audio::core::AudioRoute;
@@ -71,6 +78,18 @@
 using aidl::android::hardware::audio::core::ModuleDebug;
 using aidl::android::hardware::audio::core::VendorParameter;
 
+#define RETURN_IF_MODULE_NOT_INIT(retVal)         \
+    if (mModule == nullptr) {                     \
+        AUGMENT_LOG(E, "module not initialized"); \
+        return retVal;                            \
+    }
+
+#define RETURN_IF_TELEPHONY_NOT_INIT(retVal)         \
+    if (mTelephony == nullptr) {                     \
+        AUGMENT_LOG(E, "telephony not initialized"); \
+        return retVal;                               \
+    }
+
 namespace android {
 
 namespace {
@@ -103,15 +122,16 @@
 
 DeviceHalAidl::DeviceHalAidl(const std::string& instance, const std::shared_ptr<IModule>& module,
                              const std::shared_ptr<IHalAdapterVendorExtension>& vext)
-        : ConversionHelperAidl("DeviceHalAidl"),
-          mInstance(instance), mModule(module), mVendorExt(vext),
-          mTelephony(retrieveSubInterface<ITelephony>(module, &IModule::getTelephony)),
-          mBluetooth(retrieveSubInterface<IBluetooth>(module, &IModule::getBluetooth)),
-          mBluetoothA2dp(retrieveSubInterface<IBluetoothA2dp>(module, &IModule::getBluetoothA2dp)),
-          mBluetoothLe(retrieveSubInterface<IBluetoothLe>(module, &IModule::getBluetoothLe)),
-          mSoundDose(retrieveSubInterface<ISoundDose>(module, &IModule::getSoundDose)),
-          mMapper(instance, module), mMapperAccessor(mMapper, mLock) {
-}
+    : ConversionHelperAidl("DeviceHalAidl", instance),
+      mModule(module),
+      mVendorExt(vext),
+      mTelephony(retrieveSubInterface<ITelephony>(module, &IModule::getTelephony)),
+      mBluetooth(retrieveSubInterface<IBluetooth>(module, &IModule::getBluetooth)),
+      mBluetoothA2dp(retrieveSubInterface<IBluetoothA2dp>(module, &IModule::getBluetoothA2dp)),
+      mBluetoothLe(retrieveSubInterface<IBluetoothLe>(module, &IModule::getBluetoothLe)),
+      mSoundDose(retrieveSubInterface<ISoundDose>(module, &IModule::getSoundDose)),
+      mMapper(instance, module),
+      mMapperAccessor(mMapper, mLock) {}
 
 status_t DeviceHalAidl::getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) {
     std::lock_guard l(mLock);
@@ -124,11 +144,13 @@
 }
 
 status_t DeviceHalAidl::getSupportedModes(std::vector<media::audio::common::AudioMode> *modes) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
-    if (mTelephony == nullptr) return INVALID_OPERATION;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+    RETURN_IF_TELEPHONY_NOT_INIT(INVALID_OPERATION);
+
     if (modes == nullptr) {
+        AUGMENT_LOG(E, "uninitialized modes");
         return BAD_VALUE;
     }
     std::vector<AudioMode> aidlModes;
@@ -146,48 +168,53 @@
 }
 
 status_t DeviceHalAidl::initCheck() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     std::lock_guard l(mLock);
     return mMapper.initialize();
 }
 
 status_t DeviceHalAidl::setVoiceVolume(float volume) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "volume %f", volume);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
-    if (mTelephony == nullptr) return INVALID_OPERATION;
-    ITelephony::TelecomConfig inConfig{ .voiceVolume = Float{volume} }, outConfig;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+    RETURN_IF_TELEPHONY_NOT_INIT(INVALID_OPERATION);
+
+    ITelephony::TelecomConfig inConfig{.voiceVolume = Float{volume}}, outConfig;
     RETURN_STATUS_IF_ERROR(
             statusTFromBinderStatus(mTelephony->setTelecomConfig(inConfig, &outConfig)));
-    ALOGW_IF(outConfig.voiceVolume.has_value() && volume != outConfig.voiceVolume.value().value,
-            "%s: the resulting voice volume %f is not the same as requested %f",
-            __func__, outConfig.voiceVolume.value().value, volume);
+    AUGMENT_LOG_IF(
+            W, outConfig.voiceVolume.has_value() && volume != outConfig.voiceVolume.value().value,
+            "the resulting voice volume %f is not the same as requested %f",
+            outConfig.voiceVolume.value().value, volume);
     return OK;
 }
 
 status_t DeviceHalAidl::setMasterVolume(float volume) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "volume %f", volume);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     return statusTFromBinderStatus(mModule->setMasterVolume(volume));
 }
 
 status_t DeviceHalAidl::getMasterVolume(float *volume) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (volume == nullptr) {
+        AUGMENT_LOG(E, "uninitialized volumes");
         return BAD_VALUE;
     }
     return statusTFromBinderStatus(mModule->getMasterVolume(volume));
 }
 
 status_t DeviceHalAidl::setMode(audio_mode_t mode) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "mode %d", mode);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     AudioMode audioMode = VALUE_OR_FATAL(::aidl::android::legacy2aidl_audio_mode_t_AudioMode(mode));
     if (mTelephony != nullptr) {
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mTelephony->switchAudioMode(audioMode)));
@@ -196,90 +223,99 @@
 }
 
 status_t DeviceHalAidl::setMicMute(bool state) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "mute %d", state);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     return statusTFromBinderStatus(mModule->setMicMute(state));
 }
 
 status_t DeviceHalAidl::getMicMute(bool *state) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (state == nullptr) {
+        AUGMENT_LOG(E, "uninitialized mute state");
         return BAD_VALUE;
     }
     return statusTFromBinderStatus(mModule->getMicMute(state));
 }
 
 status_t DeviceHalAidl::setMasterMute(bool state) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "mute %d", state);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     return statusTFromBinderStatus(mModule->setMasterMute(state));
 }
 
 status_t DeviceHalAidl::getMasterMute(bool *state) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (state == nullptr) {
+        AUGMENT_LOG(E, "uninitialized mute state");
         return BAD_VALUE;
     }
     return statusTFromBinderStatus(mModule->getMasterMute(state));
 }
 
 status_t DeviceHalAidl::setParameters(const String8& kvPairs) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     AudioParameter parameters(kvPairs);
-    ALOGD("%s: parameters: \"%s\"", __func__, parameters.toString().c_str());
+    AUGMENT_LOG(D, "parameters: \"%s\"", parameters.toString().c_str());
 
     if (status_t status = filterAndUpdateBtA2dpParameters(parameters); status != OK) {
-        ALOGW("%s: filtering or updating BT A2DP parameters failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filterAndUpdateBtA2dpParameters failed: %d", status);
     }
     if (status_t status = filterAndUpdateBtHfpParameters(parameters); status != OK) {
-        ALOGW("%s: filtering or updating BT HFP parameters failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filterAndUpdateBtHfpParameters failed: %d", status);
     }
     if (status_t status = filterAndUpdateBtLeParameters(parameters); status != OK) {
-        ALOGW("%s: filtering or updating BT LE parameters failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filterAndUpdateBtLeParameters failed: %d", status);
     }
     if (status_t status = filterAndUpdateBtScoParameters(parameters); status != OK) {
-        ALOGW("%s: filtering or updating BT SCO parameters failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filterAndUpdateBtScoParameters failed: %d", status);
     }
     if (status_t status = filterAndUpdateScreenParameters(parameters); status != OK) {
-        ALOGW("%s: filtering or updating screen parameters failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filterAndUpdateScreenParameters failed: %d", status);
     }
     if (status_t status = filterAndUpdateTelephonyParameters(parameters); status != OK) {
-        ALOGW("%s: filtering or updating telephony parameters failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filterAndUpdateTelephonyParameters failed: %d", status);
     }
     return parseAndSetVendorParameters(mVendorExt, mModule, parameters);
 }
 
 status_t DeviceHalAidl::getParameters(const String8& keys, String8 *values) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "keys: \"%s\"", keys.c_str());
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (values == nullptr) {
+        AUGMENT_LOG(E, "invalid values");
         return BAD_VALUE;
     }
     AudioParameter parameterKeys(keys), result;
     if (status_t status = filterAndRetrieveBtA2dpParameters(parameterKeys, &result); status != OK) {
-        ALOGW("%s: filtering or retrieving BT A2DP parameters failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filterAndRetrieveBtA2dpParameters failed: %d", status);
     }
     if (status_t status = filterAndRetrieveBtLeParameters(parameterKeys, &result); status != OK) {
-        ALOGW("%s: filtering or retrieving BT LE parameters failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filterAndRetrieveBtLeParameters failed: %d", status);
     }
     *values = result.toString();
     return parseAndGetVendorParameters(mVendorExt, mModule, parameterKeys, values);
 }
 
 status_t DeviceHalAidl::getInputBufferSize(struct audio_config* config, size_t* size) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (config == nullptr || size == nullptr) {
+        AUGMENT_LOG(E, "invalid config or size");
         return BAD_VALUE;
     }
     constexpr bool isInput = true;
@@ -427,11 +463,14 @@
         audio_io_handle_t handle, audio_devices_t devices,
         audio_output_flags_t flags, struct audio_config* config,
         const char* address,
-        sp<StreamOutHalInterface>* outStream) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+        sp<StreamOutHalInterface>* outStream,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata) {
+    AUGMENT_LOG(D, "handle: %d devices %0x flags %0x", handle, devices, flags);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (outStream == nullptr || config == nullptr) {
+        AUGMENT_LOG(E, "invalid outStream or config");
         return BAD_VALUE;
     }
     constexpr bool isInput = false;
@@ -443,9 +482,12 @@
             ::aidl::android::legacy2aidl_audio_device_AudioDevice(devices, address));
     int32_t aidlOutputFlags = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+    SourceMetadata aidlMetadata = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_playback_track_metadata_v7_SourceMetadata(sourceMetadata));
     AudioIoFlags aidlFlags = AudioIoFlags::make<AudioIoFlags::Tag::output>(aidlOutputFlags);
     AudioPortConfig mixPortConfig;
     AudioPatch aidlPatch;
+
     Hal2AidlMapper::Cleanups cleanups(mMapperAccessor);
     {
         std::lock_guard l(mLock);
@@ -465,8 +507,15 @@
     std::shared_ptr<OutputStreamCallbackAidl> streamCb;
     if (isOffload) {
         streamCb = ndk::SharedRefBase::make<OutputStreamCallbackAidl>(this);
+        ndk::SpAIBinder binder = streamCb->asBinder();
+        AIBinder_setMinSchedulerPolicy(binder.get(), SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+        AIBinder_setInheritRt(binder.get(), true);
     }
     auto eventCb = ndk::SharedRefBase::make<OutputStreamEventCallbackAidl>(this);
+    ndk::SpAIBinder binder = eventCb->asBinder();
+    AIBinder_setMinSchedulerPolicy(binder.get(), SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+    AIBinder_setInheritRt(binder.get(), true);
+
     if (isOffload || isHwAvSync) {
         args.offloadInfo = aidlConfig.offloadInfo;
     }
@@ -475,12 +524,13 @@
     }
     args.bufferSizeFrames = aidlConfig.frameCount;
     args.eventCallback = eventCb;
+    args.sourceMetadata = aidlMetadata;
     ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamReturn ret;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openOutputStream(args, &ret)));
-    StreamContextAidl context(ret.desc, isOffload);
+    StreamContextAidl context(ret.desc, isOffload, aidlHandle);
     if (!context.isValid()) {
-        ALOGE("%s: Failed to created a valid stream context from the descriptor: %s",
-                __func__, ret.desc.toString().c_str());
+        AUGMENT_LOG(E, "Failed to created a valid stream context from the descriptor: %s",
+                    ret.desc.toString().c_str());
         return NO_INIT;
     }
     auto stream = sp<StreamOutHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
@@ -488,8 +538,11 @@
     *outStream = stream;
     /* StreamOutHalInterface* */ void* cbCookie = (*outStream).get();
     {
-        std::lock_guard l(mLock);
+        std::lock_guard l(mCallbacksLock);
         mCallbacks.emplace(cbCookie, Callbacks{});
+    }
+    {
+        std::lock_guard l(mLock);
         mMapper.addStream(*outStream, mixPortConfig.id, aidlPatch.id);
     }
     if (streamCb) {
@@ -509,10 +562,11 @@
         const char* address, audio_source_t source,
         audio_devices_t outputDevice, const char* outputDeviceAddress,
         sp<StreamInHalInterface>* inStream) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "handle: %d devices %0x flags %0x", handle, devices, flags);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (inStream == nullptr || config == nullptr) {
+        AUGMENT_LOG(E, "invalid inStream or config");
         return BAD_VALUE;
     }
     constexpr bool isInput = true;
@@ -552,10 +606,10 @@
     args.bufferSizeFrames = aidlConfig.frameCount;
     ::aidl::android::hardware::audio::core::IModule::OpenInputStreamReturn ret;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openInputStream(args, &ret)));
-    StreamContextAidl context(ret.desc, false /*isAsynchronous*/);
+    StreamContextAidl context(ret.desc, false /*isAsynchronous*/, aidlHandle);
     if (!context.isValid()) {
-        ALOGE("%s: Failed to created a valid stream context from the descriptor: %s",
-                __func__, ret.desc.toString().c_str());
+        AUGMENT_LOG(E, "Failed to created a valid stream context from the descriptor: %s",
+                    ret.desc.toString().c_str());
         return NO_INIT;
     }
     *inStream = sp<StreamInHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
@@ -569,7 +623,10 @@
 }
 
 status_t DeviceHalAidl::supportsAudioPatches(bool* supportsPatches) {
+    AUGMENT_LOG(V);
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (supportsPatches == nullptr) {
+        AUGMENT_LOG(E, "uninitialized supportsPatches");
         return BAD_VALUE;
     }
     *supportsPatches = true;
@@ -581,13 +638,20 @@
                                          unsigned int num_sinks,
                                          const struct audio_port_config* sinks,
                                          audio_patch_handle_t* patch) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "sources: %d sinks %d", num_sources, num_sinks);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
-    if (num_sinks > AUDIO_PATCH_PORTS_MAX || num_sources > AUDIO_PATCH_PORTS_MAX ||
-        sources == nullptr || sinks == nullptr || patch == nullptr) {
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+    if (num_sinks > AUDIO_PATCH_PORTS_MAX || num_sources > AUDIO_PATCH_PORTS_MAX) {
+        AUGMENT_LOG(E, "invalid sources %d or sinks %d ", num_sources, num_sinks);
         return BAD_VALUE;
     }
+
+    if (sources == nullptr || sinks == nullptr || patch == nullptr) {
+        AUGMENT_LOG(E, "uninitialized sources %d or sinks %d or patches %d", (sources == nullptr),
+                    (sinks == nullptr), (patch == nullptr));
+        return BAD_VALUE;
+    }
+
     // When the patch handle (*patch) is AUDIO_PATCH_HANDLE_NONE, it means
     // the framework wants to create a new patch. The handle has to be generated
     // by the HAL. Since handles generated this way can only be unique within
@@ -649,9 +713,10 @@
 }
 
 status_t DeviceHalAidl::releaseAudioPatch(audio_patch_handle_t patch) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "patch: %d", patch);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     static_assert(AUDIO_PATCH_HANDLE_NONE == 0);
     if (patch == AUDIO_PATCH_HANDLE_NONE) {
         return BAD_VALUE;
@@ -674,7 +739,10 @@
 }
 
 status_t DeviceHalAidl::getAudioPort(struct audio_port* port) {
+    AUGMENT_LOG(V);
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (port == nullptr) {
+        AUGMENT_LOG(E, "port not initialized");
         return BAD_VALUE;
     }
     audio_port_v7 portV7;
@@ -684,10 +752,12 @@
 }
 
 status_t DeviceHalAidl::getAudioPort(struct audio_port_v7 *port) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (port == nullptr) {
+        AUGMENT_LOG(E, "port not initialized");
         return BAD_VALUE;
     }
     bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(port->role, port->type)) ==
@@ -695,8 +765,7 @@
     auto aidlPort = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
     if (aidlPort.ext.getTag() != AudioPortExt::device) {
-        ALOGE("%s: provided port is not a device port (module %s): %s",
-                __func__, mInstance.c_str(), aidlPort.toString().c_str());
+        AUGMENT_LOG(E, "provided port is not a device port %s", aidlPort.toString().c_str());
         return BAD_VALUE;
     }
     const auto& matchDevice = aidlPort.ext.get<AudioPortExt::device>().device;
@@ -715,11 +784,13 @@
 
 status_t DeviceHalAidl::getAudioMixPort(const struct audio_port_v7 *devicePort,
                                         struct audio_port_v7 *mixPort) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
-    if (devicePort == nullptr || mixPort == nullptr ||
-            devicePort->type != AUDIO_PORT_TYPE_DEVICE || mixPort->type != AUDIO_PORT_TYPE_MIX) {
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+
+    if (devicePort == nullptr || mixPort == nullptr || devicePort->type != AUDIO_PORT_TYPE_DEVICE ||
+        mixPort->type != AUDIO_PORT_TYPE_MIX) {
+        AUGMENT_LOG(E, "invalid device or mix port");
         return BAD_VALUE;
     }
     const int32_t aidlHandle = VALUE_OR_RETURN_STATUS(
@@ -737,10 +808,12 @@
 }
 
 status_t DeviceHalAidl::setAudioPortConfig(const struct audio_port_config* config) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (config == nullptr) {
+        AUGMENT_LOG(E, "config not initialized");
         return BAD_VALUE;
     }
     bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
@@ -754,9 +827,10 @@
 }
 
 MicrophoneInfoProvider::Info const* DeviceHalAidl::getMicrophoneInfo() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (!mModule) return {};
+    RETURN_IF_MODULE_NOT_INIT({});
     std::lock_guard l(mLock);
     if (mMicrophones.status == Microphones::Status::UNKNOWN) {
         TIME_CHECK();
@@ -768,7 +842,7 @@
         } else if (status == INVALID_OPERATION) {
             mMicrophones.status = Microphones::Status::NOT_SUPPORTED;
         } else {
-            ALOGE("%s: Unexpected status from 'IModule.getMicrophones': %d", __func__, status);
+            AUGMENT_LOG(E, "Unexpected status from HAL: %d", status);
             return {};
         }
     }
@@ -780,10 +854,12 @@
 
 status_t DeviceHalAidl::getMicrophones(
         std::vector<audio_microphone_characteristic_t>* microphones) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (microphones == nullptr) {
+        AUGMENT_LOG(E, "microphones not initialized");
         return BAD_VALUE;
     }
     auto staticInfo = getMicrophoneInfo();
@@ -802,10 +878,12 @@
 
 status_t DeviceHalAidl::addDeviceEffect(
         const struct audio_port_config *device, sp<EffectHalInterface> effect) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (device == nullptr || effect == nullptr) {
+        AUGMENT_LOG(E, "device or effect not initialized");
         return BAD_VALUE;
     }
     bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
@@ -814,8 +892,8 @@
             ::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
                     *device, isInput, 0));
     if (requestedPortConfig.ext.getTag() != AudioPortExt::Tag::device) {
-        ALOGE("%s: provided port config is not a device port config: %s",
-                __func__, requestedPortConfig.toString().c_str());
+        AUGMENT_LOG(E, "provided port config is not a device port config: %s",
+                    requestedPortConfig.toString().c_str());
         return BAD_VALUE;
     }
     AudioPortConfig devicePortConfig;
@@ -833,10 +911,11 @@
 }
 status_t DeviceHalAidl::removeDeviceEffect(
         const struct audio_port_config *device, sp<EffectHalInterface> effect) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (device == nullptr || effect == nullptr) {
+        AUGMENT_LOG(E, "device or effect not initialized");
         return BAD_VALUE;
     }
     bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
@@ -845,8 +924,8 @@
             ::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
                     *device, isInput, 0));
     if (requestedPortConfig.ext.getTag() != AudioPortExt::Tag::device) {
-        ALOGE("%s: provided port config is not a device port config: %s",
-                __func__, requestedPortConfig.toString().c_str());
+        AUGMENT_LOG(E, "provided port config is not a device port config: %s",
+                    requestedPortConfig.toString().c_str());
         return BAD_VALUE;
     }
     AudioPortConfig devicePortConfig;
@@ -864,11 +943,13 @@
 status_t DeviceHalAidl::getMmapPolicyInfos(
         media::audio::common::AudioMMapPolicyType policyType,
         std::vector<media::audio::common::AudioMMapPolicyInfo>* policyInfos) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
-    AudioMMapPolicyType mmapPolicyType = VALUE_OR_RETURN_STATUS(
-            cpp2ndk_AudioMMapPolicyType(policyType));
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+
+    AudioMMapPolicyType mmapPolicyType =
+            VALUE_OR_RETURN_STATUS(cpp2ndk_AudioMMapPolicyType(policyType));
 
     std::vector<AudioMMapPolicyInfo> mmapPolicyInfos;
 
@@ -884,9 +965,10 @@
 }
 
 int32_t DeviceHalAidl::getAAudioMixerBurstCount() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     int32_t mixerBurstCount = 0;
     if (mModule->getAAudioMixerBurstCount(&mixerBurstCount).isOk()) {
         return mixerBurstCount;
@@ -895,9 +977,10 @@
 }
 
 int32_t DeviceHalAidl::getAAudioHardwareBurstMinUsec() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     int32_t hardwareBurstMinUsec = 0;
     if (mModule->getAAudioHardwareBurstMinUsec(&hardwareBurstMinUsec).isOk()) {
         return hardwareBurstMinUsec;
@@ -906,9 +989,10 @@
 }
 
 error::Result<audio_hw_sync_t> DeviceHalAidl::getHwAvSync() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     int32_t aidlHwAvSync;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->generateHwAvSyncId(&aidlHwAvSync)));
     return VALUE_OR_RETURN_STATUS(
@@ -924,55 +1008,59 @@
 }
 
 status_t DeviceHalAidl::supportsBluetoothVariableLatency(bool* supports) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (supports == nullptr) {
         return BAD_VALUE;
     }
     return statusTFromBinderStatus(mModule->supportsVariableLatency(supports));
 }
 
-status_t DeviceHalAidl::getSoundDoseInterface(const std::string& module,
-                                              ::ndk::SpAIBinder* soundDoseBinder)  {
+status_t DeviceHalAidl::getSoundDoseInterface([[maybe_unused]] const std::string& module,
+                                              ::ndk::SpAIBinder* soundDoseBinder) {
+    AUGMENT_LOG(V);
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+
     if (soundDoseBinder == nullptr) {
         return BAD_VALUE;
     }
     if (mSoundDose == nullptr) {
-        ALOGE("%s failed to retrieve the sound dose interface for module %s",
-                __func__, module.c_str());
+        AUGMENT_LOG(E, "failed to retrieve the sound dose interface");
         return BAD_VALUE;
     }
 
     if (mSoundDose == nullptr) {
-        ALOGE("%s failed to return the sound dose interface for module %s: not implemented",
-                  __func__,
-                  module.c_str());
+        AUGMENT_LOG(E, "failed to return the sound dose interface not implemented");
         return NO_INIT;
     }
 
     *soundDoseBinder = mSoundDose->asBinder();
-    ALOGI("%s using audio AIDL HAL sound dose interface", __func__);
+    AUGMENT_LOG(I, "using audio AIDL HAL sound dose interface");
     return OK;
 }
 
 status_t DeviceHalAidl::prepareToDisconnectExternalDevice(const struct audio_port_v7* port) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(V);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (port == nullptr) {
+        AUGMENT_LOG(E, "port not initialized");
         return BAD_VALUE;
     }
-    const bool isInput = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::portDirection(port->role, port->type)) ==
-                    ::aidl::android::AudioPortDirection::INPUT;
+    const bool isInput =
+            VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(port->role, port->type)) ==
+            ::aidl::android::AudioPortDirection::INPUT;
     AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
     if (aidlPort.ext.getTag() != AudioPortExt::device) {
-        ALOGE("%s: provided port is not a device port (module %s): %s",
-              __func__, mInstance.c_str(), aidlPort.toString().c_str());
+        AUGMENT_LOG(E, "provided port is not a device port: %s", aidlPort.toString().c_str());
         return BAD_VALUE;
     }
+
+    AUGMENT_LOG(D, "device %s", aidlPort.toString().c_str());
+
     status_t status = NO_ERROR;
     {
         std::lock_guard l(mLock);
@@ -993,10 +1081,11 @@
 }
 
 status_t DeviceHalAidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(V);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (port == nullptr) {
+        AUGMENT_LOG(E, "port not initialized");
         return BAD_VALUE;
     }
     if (!connected) {
@@ -1015,17 +1104,18 @@
     AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
     if (aidlPort.ext.getTag() != AudioPortExt::device) {
-        ALOGE("%s: provided port is not a device port (module %s): %s",
-                __func__, mInstance.c_str(), aidlPort.toString().c_str());
+        AUGMENT_LOG(E, "provided port is not a device port: %s", aidlPort.toString().c_str());
         return BAD_VALUE;
     }
+    AUGMENT_LOG(D, "connected %d port: %s", connected, aidlPort.toString().c_str());
     std::lock_guard l(mLock);
     return mMapper.setDevicePortConnectedState(aidlPort, connected);
 }
 
 status_t DeviceHalAidl::setSimulateDeviceConnections(bool enabled) {
+    AUGMENT_LOG(V);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     {
         std::lock_guard l(mLock);
         mMapper.resetUnusedPatchesAndPortConfigs();
@@ -1034,9 +1124,9 @@
     status_t status = statusTFromBinderStatus(mModule->setModuleDebug(debug));
     // This is important to log as it affects HAL behavior.
     if (status == OK) {
-        ALOGI("%s: set enabled: %d", __func__, enabled);
+        AUGMENT_LOG(I, "set enabled: %d", enabled);
     } else {
-        ALOGW("%s: set enabled to %d failed: %d", __func__, enabled, status);
+        AUGMENT_LOG(W, "set enabled to %d failed: %d", enabled, status);
     }
     return status;
 }
@@ -1051,7 +1141,7 @@
                             mBluetoothA2dp->supportsOffloadReconfiguration(&supports)));
             result->addInt(key, supports ? 1 : 0);
         } else {
-            ALOGI("%s: no IBluetoothA2dp on %s", __func__, mInstance.c_str());
+            AUGMENT_LOG(I, "no IBluetoothA2dp");
             result->addInt(key, 0);
         }
     }
@@ -1068,7 +1158,7 @@
                             mBluetoothLe->supportsOffloadReconfiguration(&supports)));
             result->addInt(key, supports ? 1 : 0);
         } else {
-            ALOGI("%s: no mBluetoothLe on %s", __func__, mInstance.c_str());
+            AUGMENT_LOG(I, "no mBluetoothLe");
             result->addInt(key, 0);
         }
     }
@@ -1079,29 +1169,29 @@
     std::optional<bool> a2dpEnabled;
     std::optional<std::vector<VendorParameter>> reconfigureOffload;
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyBtA2dpSuspended),
-                    [&a2dpEnabled](const String8& trueOrFalse) {
-                        if (trueOrFalse == AudioParameter::valueTrue) {
-                            a2dpEnabled = false;  // 'suspended' == true
-                            return OK;
-                        } else if (trueOrFalse == AudioParameter::valueFalse) {
-                            a2dpEnabled = true;  // 'suspended' == false
-                            return OK;
-                        }
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                AudioParameter::keyBtA2dpSuspended, trueOrFalse.c_str());
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyBtA2dpSuspended),
+            [&a2dpEnabled, this](const String8& trueOrFalse) {
+                if (trueOrFalse == AudioParameter::valueTrue) {
+                    a2dpEnabled = false;  // 'suspended' == true
+                    return OK;
+                } else if (trueOrFalse == AudioParameter::valueFalse) {
+                    a2dpEnabled = true;  // 'suspended' == false
+                    return OK;
+                }
+                AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                            AudioParameter::keyBtA2dpSuspended, trueOrFalse.c_str());
+                return BAD_VALUE;
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyReconfigA2dp),
-                    [&](const String8& value) -> status_t {
-                        std::vector<VendorParameter> result;
-                        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-                                mVendorExt->parseBluetoothA2dpReconfigureOffload(
-                                        std::string(value.c_str()), &result)));
-                        reconfigureOffload = std::move(result);
-                        return OK;
-                    }));
+            parameters, String8(AudioParameter::keyReconfigA2dp),
+            [&](const String8& value) -> status_t {
+                std::vector<VendorParameter> result;
+                RETURN_STATUS_IF_ERROR(
+                        statusTFromBinderStatus(mVendorExt->parseBluetoothA2dpReconfigureOffload(
+                                std::string(value.c_str()), &result)));
+                reconfigureOffload = std::move(result);
+                return OK;
+            }));
     if (mBluetoothA2dp != nullptr && a2dpEnabled.has_value()) {
         return statusTFromBinderStatus(mBluetoothA2dp->setEnabled(a2dpEnabled.value()));
     }
@@ -1115,34 +1205,33 @@
 status_t DeviceHalAidl::filterAndUpdateBtHfpParameters(AudioParameter &parameters) {
     IBluetooth::HfpConfig hfpConfig;
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyBtHfpEnable),
-                    [&hfpConfig](const String8& trueOrFalse) {
-                        if (trueOrFalse == AudioParameter::valueTrue) {
-                            hfpConfig.isEnabled = Boolean{ .value = true };
-                            return OK;
-                        } else if (trueOrFalse == AudioParameter::valueFalse) {
-                            hfpConfig.isEnabled = Boolean{ .value = false };
-                            return OK;
-                        }
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                AudioParameter::keyBtHfpEnable, trueOrFalse.c_str());
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyBtHfpEnable),
+            [&hfpConfig, this](const String8& trueOrFalse) {
+                if (trueOrFalse == AudioParameter::valueTrue) {
+                    hfpConfig.isEnabled = Boolean{.value = true};
+                    return OK;
+                } else if (trueOrFalse == AudioParameter::valueFalse) {
+                    hfpConfig.isEnabled = Boolean{.value = false};
+                    return OK;
+                }
+                AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                            AudioParameter::keyBtHfpEnable, trueOrFalse.c_str());
+                return BAD_VALUE;
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
-                    parameters, String8(AudioParameter::keyBtHfpSamplingRate),
-                    [&hfpConfig](int sampleRate) {
-                        return sampleRate > 0 ?
-                                hfpConfig.sampleRate = Int{ .value = sampleRate }, OK : BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyBtHfpSamplingRate),
+            [&hfpConfig](int sampleRate) {
+                return sampleRate > 0 ? hfpConfig.sampleRate = Int{.value = sampleRate},
+                                        OK : BAD_VALUE;
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
-                    parameters, String8(AudioParameter::keyBtHfpVolume),
-                    [&hfpConfig](int volume0to15) {
-                        if (volume0to15 >= 0 && volume0to15 <= 15) {
-                            hfpConfig.volume = Float{ .value = volume0to15 / 15.0f };
-                            return OK;
-                        }
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyBtHfpVolume), [&hfpConfig](int volume0to15) {
+                if (volume0to15 >= 0 && volume0to15 <= 15) {
+                    hfpConfig.volume = Float{.value = volume0to15 / 15.0f};
+                    return OK;
+                }
+                return BAD_VALUE;
+            }));
     if (mBluetooth != nullptr && hfpConfig != IBluetooth::HfpConfig{}) {
         IBluetooth::HfpConfig newHfpConfig;
         return statusTFromBinderStatus(mBluetooth->setHfpConfig(hfpConfig, &newHfpConfig));
@@ -1154,39 +1243,39 @@
     std::optional<bool> leEnabled;
     std::optional<std::vector<VendorParameter>> reconfigureOffload;
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyBtLeSuspended),
-                    [&leEnabled](const String8& trueOrFalse) {
-                        if (trueOrFalse == AudioParameter::valueTrue) {
-                            leEnabled = false;  // 'suspended' == true
-                            return OK;
-                        } else if (trueOrFalse == AudioParameter::valueFalse) {
-                            leEnabled = true;  // 'suspended' == false
-                            return OK;
-                        }
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                AudioParameter::keyBtLeSuspended, trueOrFalse.c_str());
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyBtLeSuspended),
+            [&leEnabled, this](const String8& trueOrFalse) {
+                if (trueOrFalse == AudioParameter::valueTrue) {
+                    leEnabled = false;  // 'suspended' == true
+                    return OK;
+                } else if (trueOrFalse == AudioParameter::valueFalse) {
+                    leEnabled = true;  // 'suspended' == false
+                    return OK;
+                }
+                AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                            AudioParameter::keyBtLeSuspended, trueOrFalse.c_str());
+                return BAD_VALUE;
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyReconfigLe),
-                    [&](const String8& value) -> status_t {
-                        if (mVendorExt != nullptr) {
-                            std::vector<VendorParameter> result;
-                            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-                                    mVendorExt->parseBluetoothLeReconfigureOffload(
-                                            std::string(value.c_str()), &result)));
-                            reconfigureOffload = std::move(result);
-                        } else {
-                            reconfigureOffload = std::vector<VendorParameter>();
-                        }
-                        return OK;
-                    }));
+            parameters, String8(AudioParameter::keyReconfigLe),
+            [&](const String8& value) -> status_t {
+                if (mVendorExt != nullptr) {
+                    std::vector<VendorParameter> result;
+                    RETURN_STATUS_IF_ERROR(
+                            statusTFromBinderStatus(mVendorExt->parseBluetoothLeReconfigureOffload(
+                                    std::string(value.c_str()), &result)));
+                    reconfigureOffload = std::move(result);
+                } else {
+                    reconfigureOffload = std::vector<VendorParameter>();
+                }
+                return OK;
+            }));
     if (mBluetoothLe != nullptr && leEnabled.has_value()) {
         return statusTFromBinderStatus(mBluetoothLe->setEnabled(leEnabled.value()));
     }
     if (mBluetoothLe != nullptr && reconfigureOffload.has_value()) {
-        return statusTFromBinderStatus(mBluetoothLe->reconfigureOffload(
-                        reconfigureOffload.value()));
+        return statusTFromBinderStatus(
+                mBluetoothLe->reconfigureOffload(reconfigureOffload.value()));
     }
     return OK;
 }
@@ -1194,53 +1283,53 @@
 status_t DeviceHalAidl::filterAndUpdateBtScoParameters(AudioParameter &parameters) {
     IBluetooth::ScoConfig scoConfig;
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyBtSco),
-                    [&scoConfig](const String8& onOrOff) {
-                        if (onOrOff == AudioParameter::valueOn) {
-                            scoConfig.isEnabled = Boolean{ .value = true };
-                            return OK;
-                        } else if (onOrOff == AudioParameter::valueOff) {
-                            scoConfig.isEnabled = Boolean{ .value = false };
-                            return OK;
-                        }
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                AudioParameter::keyBtSco, onOrOff.c_str());
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyBtSco),
+            [&scoConfig, this](const String8& onOrOff) {
+                if (onOrOff == AudioParameter::valueOn) {
+                    scoConfig.isEnabled = Boolean{.value = true};
+                    return OK;
+                } else if (onOrOff == AudioParameter::valueOff) {
+                    scoConfig.isEnabled = Boolean{.value = false};
+                    return OK;
+                }
+                AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                            AudioParameter::keyBtSco, onOrOff.c_str());
+                return BAD_VALUE;
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyBtScoHeadsetName),
-                    [&scoConfig](const String8& name) {
-                        scoConfig.debugName = name;
-                        return OK;
-                    }));
+            parameters, String8(AudioParameter::keyBtScoHeadsetName),
+            [&scoConfig](const String8& name) {
+                scoConfig.debugName = name;
+                return OK;
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyBtNrec),
-                    [&scoConfig](const String8& onOrOff) {
-                        if (onOrOff == AudioParameter::valueOn) {
-                            scoConfig.isNrecEnabled = Boolean{ .value = true };
-                            return OK;
-                        } else if (onOrOff == AudioParameter::valueOff) {
-                            scoConfig.isNrecEnabled = Boolean{ .value = false };
-                            return OK;
-                        }
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                AudioParameter::keyBtNrec, onOrOff.c_str());
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyBtNrec),
+            [&scoConfig, this](const String8& onOrOff) {
+                if (onOrOff == AudioParameter::valueOn) {
+                    scoConfig.isNrecEnabled = Boolean{.value = true};
+                    return OK;
+                } else if (onOrOff == AudioParameter::valueOff) {
+                    scoConfig.isNrecEnabled = Boolean{.value = false};
+                    return OK;
+                }
+                AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                            AudioParameter::keyBtNrec, onOrOff.c_str());
+                return BAD_VALUE;
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyBtScoWb),
-                    [&scoConfig](const String8& onOrOff) {
-                        if (onOrOff == AudioParameter::valueOn) {
-                            scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO_WB;
-                            return OK;
-                        } else if (onOrOff == AudioParameter::valueOff) {
-                            scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO;
-                            return OK;
-                        }
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                AudioParameter::keyBtScoWb, onOrOff.c_str());
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyBtScoWb),
+            [&scoConfig, this](const String8& onOrOff) {
+                if (onOrOff == AudioParameter::valueOn) {
+                    scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO_WB;
+                    return OK;
+                } else if (onOrOff == AudioParameter::valueOff) {
+                    scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO;
+                    return OK;
+                }
+                AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                            AudioParameter::keyBtScoWb, onOrOff.c_str());
+                return BAD_VALUE;
+            }));
     if (mBluetooth != nullptr && scoConfig != IBluetooth::ScoConfig{}) {
         IBluetooth::ScoConfig newScoConfig;
         return statusTFromBinderStatus(mBluetooth->setScoConfig(scoConfig, &newScoConfig));
@@ -1250,34 +1339,41 @@
 
 status_t DeviceHalAidl::filterAndUpdateScreenParameters(AudioParameter &parameters) {
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyScreenState),
-                    [&](const String8& onOrOff) -> status_t {
-                        std::optional<bool> isTurnedOn;
-                        if (onOrOff == AudioParameter::valueOn) {
-                            isTurnedOn = true;
-                        } else if (onOrOff == AudioParameter::valueOff) {
-                            isTurnedOn = false;
-                        }
-                        if (!isTurnedOn.has_value()) {
-                            ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                    AudioParameter::keyScreenState, onOrOff.c_str());
-                            return BAD_VALUE;
-                        }
-                        return statusTFromBinderStatus(
-                                mModule->updateScreenState(isTurnedOn.value()));
-                    }));
+            parameters, String8(AudioParameter::keyScreenState),
+            [&, this](const String8& onOrOff) -> status_t {
+                std::optional<bool> isTurnedOn;
+                if (onOrOff == AudioParameter::valueOn) {
+                    isTurnedOn = true;
+                } else if (onOrOff == AudioParameter::valueOff) {
+                    isTurnedOn = false;
+                }
+                if (!isTurnedOn.has_value()) {
+                    AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                AudioParameter::keyScreenState, onOrOff.c_str());
+                    return BAD_VALUE;
+                }
+                return statusTFromBinderStatus(mModule->updateScreenState(isTurnedOn.value()));
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
-                    parameters, String8(AudioParameter::keyScreenRotation),
-            [&](int rotationDegrees) -> status_t {
+            parameters, String8(AudioParameter::keyScreenRotation),
+            [&, this](int rotationDegrees) -> status_t {
                 IModule::ScreenRotation rotation;
                 switch (rotationDegrees) {
-                    case 0: rotation = IModule::ScreenRotation::DEG_0; break;
-                    case 90: rotation = IModule::ScreenRotation::DEG_90; break;
-                    case 180: rotation = IModule::ScreenRotation::DEG_180; break;
-                    case 270: rotation = IModule::ScreenRotation::DEG_270; break;
+                    case 0:
+                        rotation = IModule::ScreenRotation::DEG_0;
+                        break;
+                    case 90:
+                        rotation = IModule::ScreenRotation::DEG_90;
+                        break;
+                    case 180:
+                        rotation = IModule::ScreenRotation::DEG_180;
+                        break;
+                    case 270:
+                        rotation = IModule::ScreenRotation::DEG_270;
+                        break;
                     default:
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value %d",
-                                AudioParameter::keyScreenRotation, rotationDegrees);
+                        AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value %d",
+                                    AudioParameter::keyScreenRotation, rotationDegrees);
                         return BAD_VALUE;
                 }
                 return statusTFromBinderStatus(mModule->updateScreenRotation(rotation));
@@ -1289,49 +1385,48 @@
     using TtyMode = ITelephony::TelecomConfig::TtyMode;
     ITelephony::TelecomConfig telConfig;
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyTtyMode),
-                    [&telConfig](const String8& mode) {
-                        if (mode == AudioParameter::valueTtyModeOff) {
-                            telConfig.ttyMode = TtyMode::OFF;
-                            return OK;
-                        } else if (mode == AudioParameter::valueTtyModeFull) {
-                            telConfig.ttyMode = TtyMode::FULL;
-                            return OK;
-                        } else if (mode == AudioParameter::valueTtyModeHco) {
-                            telConfig.ttyMode = TtyMode::HCO;
-                            return OK;
-                        } else if (mode == AudioParameter::valueTtyModeVco) {
-                            telConfig.ttyMode = TtyMode::VCO;
-                            return OK;
-                        }
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                AudioParameter::keyTtyMode, mode.c_str());
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyTtyMode),
+            [&telConfig, this](const String8& mode) {
+                if (mode == AudioParameter::valueTtyModeOff) {
+                    telConfig.ttyMode = TtyMode::OFF;
+                    return OK;
+                } else if (mode == AudioParameter::valueTtyModeFull) {
+                    telConfig.ttyMode = TtyMode::FULL;
+                    return OK;
+                } else if (mode == AudioParameter::valueTtyModeHco) {
+                    telConfig.ttyMode = TtyMode::HCO;
+                    return OK;
+                } else if (mode == AudioParameter::valueTtyModeVco) {
+                    telConfig.ttyMode = TtyMode::VCO;
+                    return OK;
+                }
+                AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                            AudioParameter::keyTtyMode, mode.c_str());
+                return BAD_VALUE;
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyHacSetting),
-                    [&telConfig](const String8& onOrOff) {
-                        if (onOrOff == AudioParameter::valueHacOn) {
-                            telConfig.isHacEnabled = Boolean{ .value = true };
-                            return OK;
-                        } else if (onOrOff == AudioParameter::valueHacOff) {
-                            telConfig.isHacEnabled = Boolean{ .value = false };
-                            return OK;
-                        }
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                AudioParameter::keyHacSetting, onOrOff.c_str());
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyHacSetting),
+            [&telConfig, this](const String8& onOrOff) {
+                if (onOrOff == AudioParameter::valueHacOn) {
+                    telConfig.isHacEnabled = Boolean{.value = true};
+                    return OK;
+                } else if (onOrOff == AudioParameter::valueHacOff) {
+                    telConfig.isHacEnabled = Boolean{.value = false};
+                    return OK;
+                }
+                AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                            AudioParameter::keyHacSetting, onOrOff.c_str());
+                return BAD_VALUE;
+            }));
     if (mTelephony != nullptr && telConfig != ITelephony::TelecomConfig{}) {
         ITelephony::TelecomConfig newTelConfig;
-        return statusTFromBinderStatus(
-                mTelephony->setTelecomConfig(telConfig, &newTelConfig));
+        return statusTFromBinderStatus(mTelephony->setTelecomConfig(telConfig, &newTelConfig));
     }
     return OK;
 }
 
 void DeviceHalAidl::clearCallbacks(void* cookie) {
-    std::lock_guard l(mLock);
+    std::lock_guard l(mCallbacksLock);
     mCallbacks.erase(cookie);
 }
 
@@ -1364,18 +1459,21 @@
     setCallbackImpl(cookie, &Callbacks::latency, cb);
 }
 
-template<class C>
+template <class C>
 sp<C> DeviceHalAidl::getCallbackImpl(void* cookie, wp<C> DeviceHalAidl::Callbacks::* field) {
-    std::lock_guard l(mLock);
-    if (auto it = mCallbacks.find(cookie); it != mCallbacks.end()) {
-        return ((it->second).*field).promote();
+    wp<C> result;
+    {
+        std::lock_guard l(mCallbacksLock);
+        if (auto it = mCallbacks.find(cookie); it != mCallbacks.end()) {
+            result = (it->second).*field;
+        }
     }
-    return nullptr;
+    return result.promote();
 }
 template<class C>
 void DeviceHalAidl::setCallbackImpl(
         void* cookie, wp<C> DeviceHalAidl::Callbacks::* field, const sp<C>& cb) {
-    std::lock_guard l(mLock);
+    std::lock_guard l(mCallbacksLock);
     if (auto it = mCallbacks.find(cookie); it != mCallbacks.end()) {
         (it->second).*field = cb;
     }
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index bcd495d..6ae6402 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -119,7 +119,9 @@
     // by releasing all references to the returned object.
     status_t openOutputStream(audio_io_handle_t handle, audio_devices_t devices,
                               audio_output_flags_t flags, struct audio_config* config,
-                              const char* address, sp<StreamOutHalInterface>* outStream) override;
+                              const char* address, sp<StreamOutHalInterface>* outStream,
+                              const std::vector<playback_track_metadata_v7_t>&
+                                                               sourceMetadata = {}) override;
 
     // Creates and opens the audio hardware input stream. The stream is closed
     // by releasing all references to the returned object.
@@ -233,7 +235,6 @@
     // MicrophoneInfoProvider implementation
     MicrophoneInfoProvider::Info const* getMicrophoneInfo() override;
 
-    const std::string mInstance;
     const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
     const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
     const std::shared_ptr<::aidl::android::hardware::audio::core::ITelephony> mTelephony;
@@ -242,8 +243,11 @@
     const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothLe> mBluetoothLe;
     const std::shared_ptr<::aidl::android::hardware::audio::core::sounddose::ISoundDose> mSoundDose;
 
+    std::mutex mCallbacksLock;
+    // Use 'mCallbacksLock' only to implement exclusive access to 'mCallbacks'. Never hold it
+    // while making any calls.
+    std::map<void*, Callbacks> mCallbacks GUARDED_BY(mCallbacksLock);
     std::mutex mLock;
-    std::map<void*, Callbacks> mCallbacks GUARDED_BY(mLock);
     std::set<audio_port_handle_t> mDeviceDisconnectionNotified GUARDED_BY(mLock);
     Hal2AidlMapper mMapper GUARDED_BY(mLock);
     LockedAccessor<Hal2AidlMapper> mMapperAccessor;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index ea4258c..263ef96 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -259,7 +259,8 @@
         audio_output_flags_t flags,
         struct audio_config *config,
         const char *address,
-        sp<StreamOutHalInterface> *outStream) {
+        sp<StreamOutHalInterface> *outStream,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata) {
     TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     DeviceAddress hidlDevice;
@@ -273,6 +274,16 @@
         return status;
     }
 
+#if MAJOR_VERSION == 4
+    ::android::hardware::audio::CORE_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
+#else
+    ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
+#endif
+
+    RETURN_STATUS_IF_ERROR(CoreUtils::sourceMetadataFromHalV7(
+            sourceMetadata, true /*ignoreNonVendorTags*/, &hidlMetadata
+            ));
+
 #if !(MAJOR_VERSION == 7 && MINOR_VERSION == 1)
     //TODO: b/193496180 use spatializer flag at audio HAL when available
     if ((flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0) {
@@ -294,7 +305,7 @@
 #endif
             handle, hidlDevice, hidlConfig, hidlFlags,
 #if MAJOR_VERSION >= 4
-            {} /* metadata */,
+            hidlMetadata /* metadata */,
 #endif
             [&](Result r, const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& result,
                     const AudioConfig& suggestedConfig) {
@@ -608,7 +619,14 @@
             result != NO_ERROR) {
         return result;
     }
-    return processReturn("setConnectedState", mDevice->setConnectedState(hidlAddress, connected));
+    Return<Result> ret = mDevice->setConnectedState(hidlAddress, connected);
+    if (ret.isOk() || ret == Result::NOT_SUPPORTED) {
+        // The framework is only interested in errors occurring due to connection state handling,
+        // so it can decide whether retrying is needed. If the HAL does not support this operation,
+        // it's not an error.
+        return NO_ERROR;
+    }
+    return processReturn("setConnectedState", ret);
 }
 
 error::Result<audio_hw_sync_t> DeviceHalHidl::getHwAvSync() {
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 1362dab..5f3e08c 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -73,7 +73,9 @@
     // by releasing all references to the returned object.
     status_t openOutputStream(audio_io_handle_t handle, audio_devices_t devices,
                               audio_output_flags_t flags, struct audio_config* config,
-                              const char* address, sp<StreamOutHalInterface>* outStream) override;
+                              const char* address, sp<StreamOutHalInterface>* outStream,
+                              const std::vector<playback_track_metadata_v7_t>&
+                                                                sourceMetadata = {}) override;
 
     // Creates and opens the audio hardware input stream. The stream is closed
     // by releasing all references to the returned object.
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
index a13903b..f719d97 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
@@ -532,5 +532,13 @@
                            AudioChannelLayout::LAYOUT_HAPTIC_AB /* mask */);
 }
 
+size_t EffectConversionHelperAidl::getInputChannelCount() const {
+    return getChannelCount(mCommon.input.base.channelMask);
+}
+
+size_t EffectConversionHelperAidl::getOutputChannelCount() const {
+    return getChannelCount(mCommon.output.base.channelMask);
+}
+
 }  // namespace effect
 }  // namespace android
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.h b/media/libaudiohal/impl/EffectConversionHelperAidl.h
index 50b47a9..e9e9fc2 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.h
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.h
@@ -51,6 +51,8 @@
 
     size_t getAudioChannelCount() const;
     size_t getHapticChannelCount() const;
+    size_t getInputChannelCount() const;
+    size_t getOutputChannelCount() const;
 
     uint8_t mOutputAccessMode = EFFECT_BUFFER_ACCESS_WRITE;
 
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index ea4dbf6..9fdde49 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -75,7 +75,14 @@
       mEffect(effect),
       mSessionId(sessionId),
       mIoId(ioId),
-      mIsProxyEffect(isProxyEffect) {
+      mIsProxyEffect(isProxyEffect),
+      mHalVersion([factory]() {
+          int version = 0;
+          // use factory HAL version because effect can be an EffectProxy instance
+          return factory->getInterfaceVersion(&version).isOk() ? version : 0;
+      }()),
+      mEventFlagDataMqNotEmpty(mHalVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty
+                                                                      : kEventFlagNotEmpty) {
     assert(mFactory != nullptr);
     assert(mEffect != nullptr);
     createAidlConversion(effect, sessionId, ioId, desc);
@@ -159,6 +166,7 @@
         mConversion = std::make_unique<android::effect::AidlConversionVendorExtension>(
                 effect, sessionId, ioId, desc, mIsProxyEffect);
     }
+    mEffectName = mConversion->getDescriptor().common.name;
     return OK;
 }
 
@@ -174,100 +182,153 @@
 
 // write to input FMQ here, wait for statusMQ STATUS_OK, and read from output FMQ
 status_t EffectHalAidl::process() {
-    const std::string effectName = mConversion->getDescriptor().common.name;
     State state = State::INIT;
     if (mConversion->isBypassing() || !mEffect->getState(&state).isOk() ||
         state != State::PROCESSING) {
-        ALOGI("%s skipping %s process because it's %s", __func__, effectName.c_str(),
+        ALOGI("%s skipping process because it's %s", mEffectName.c_str(),
               mConversion->isBypassing()
                       ? "bypassing"
                       : aidl::android::hardware::audio::effect::toString(state).c_str());
         return -ENODATA;
     }
 
-    // check if the DataMq needs any update, timeout at 1ns to avoid being blocked
-    auto efGroup = mConversion->getEventFlagGroup();
+    const std::shared_ptr<android::hardware::EventFlag> efGroup = mConversion->getEventFlagGroup();
     if (!efGroup) {
-        ALOGE("%s invalid efGroup", __func__);
+        ALOGE("%s invalid efGroup", mEffectName.c_str());
         return INVALID_OPERATION;
     }
 
-    // use IFactory HAL version because IEffect can be an EffectProxy instance
-    static const int halVersion = [&]() {
-        int version = 0;
-        return mFactory->getInterfaceVersion(&version).isOk() ? version : 0;
-    }();
+    // reopen if halVersion >= kReopenSupportedVersion and receive kEventFlagDataMqUpdate
+    RETURN_STATUS_IF_ERROR(maybeReopen(efGroup));
+    const size_t samplesWritten = writeToHalInputFmqAndSignal(efGroup);
+    if (0 == samplesWritten) {
+        return INVALID_OPERATION;
+    }
 
-    if (uint32_t efState = 0; halVersion >= kReopenSupportedVersion &&
-                              ::android::OK == efGroup->wait(kEventFlagDataMqUpdate, &efState,
+    RETURN_STATUS_IF_ERROR(waitHalStatusFmq(samplesWritten));
+    RETURN_STATUS_IF_ERROR(readFromHalOutputFmq(samplesWritten));
+    return OK;
+}
+
+status_t EffectHalAidl::maybeReopen(
+        const std::shared_ptr<android::hardware::EventFlag>& efGroup) const {
+    if (mHalVersion < kReopenSupportedVersion) {
+        return OK;
+    }
+
+    // check if the DataMq needs any update, timeout at 1ns to avoid being blocked
+    if (uint32_t efState = 0; ::android::OK == efGroup->wait(kEventFlagDataMqUpdate, &efState,
                                                              1 /* ns */, true /* retry */) &&
                               efState & kEventFlagDataMqUpdate) {
-        ALOGD("%s %s V%d receive dataMQUpdate eventFlag from HAL", __func__, effectName.c_str(),
-              halVersion);
-
-        mConversion->reopen();
+        ALOGD("%s V%d receive dataMQUpdate eventFlag from HAL", mEffectName.c_str(), mHalVersion);
+        return mConversion->reopen();
     }
-    auto statusQ = mConversion->getStatusMQ();
-    auto inputQ = mConversion->getInputMQ();
-    auto outputQ = mConversion->getOutputMQ();
-    if (!statusQ || !statusQ->isValid() || !inputQ || !inputQ->isValid() || !outputQ ||
-        !outputQ->isValid()) {
-        ALOGE("%s invalid FMQ [Status %d I %d O %d]", __func__, statusQ ? statusQ->isValid() : 0,
-              inputQ ? inputQ->isValid() : 0, outputQ ? outputQ->isValid() : 0);
-        return INVALID_OPERATION;
+    return OK;
+}
+
+size_t EffectHalAidl::writeToHalInputFmqAndSignal(
+        const std::shared_ptr<android::hardware::EventFlag>& efGroup) const {
+    const auto inputQ = mConversion->getInputMQ();
+    if (!inputQ || !inputQ->isValid()) {
+        ALOGE("%s invalid input FMQ", mEffectName.c_str());
+        return 0;
     }
 
-    size_t available = inputQ->availableToWrite();
-    const size_t floatsToWrite = std::min(available, mInBuffer->getSize() / sizeof(float));
-    if (floatsToWrite == 0) {
-        ALOGE("%s not able to write, floats in buffer %zu, space in FMQ %zu", __func__,
-              mInBuffer->getSize() / sizeof(float), available);
-        return INVALID_OPERATION;
-    }
-    if (!mInBuffer->audioBuffer() ||
-        !inputQ->write((float*)mInBuffer->audioBuffer()->f32, floatsToWrite)) {
-        ALOGE("%s failed to write %zu floats from audiobuffer %p to inputQ [avail %zu]", __func__,
-              floatsToWrite, mInBuffer->audioBuffer(), inputQ->availableToWrite());
-        return INVALID_OPERATION;
+    const size_t fmqSpaceSamples = inputQ->availableToWrite();
+    const size_t samplesInBuffer =
+            mInBuffer->audioBuffer()->frameCount * mConversion->getInputChannelCount();
+    const size_t samplesToWrite = std::min(fmqSpaceSamples, samplesInBuffer);
+    if (samplesToWrite == 0) {
+        ALOGE("%s not able to write, samplesInBuffer %zu, fmqSpaceSamples %zu", mEffectName.c_str(),
+              samplesInBuffer, fmqSpaceSamples);
+        return 0;
     }
 
-    // for V2 audio effect HAL, expect different EventFlag to avoid bit conflict with FMQ_NOT_EMPTY
-    efGroup->wake(halVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty
-                                                        : kEventFlagNotEmpty);
+    const float* const inputRawBuffer = static_cast<const float*>(mInBuffer->audioBuffer()->f32);
+    if (!inputQ->write(inputRawBuffer, samplesToWrite)) {
+        ALOGE("%s failed to write %zu samples to inputQ [avail %zu]", mEffectName.c_str(),
+              samplesToWrite, inputQ->availableToWrite());
+        return 0;
+    }
+
+    efGroup->wake(mEventFlagDataMqNotEmpty);
+    return samplesToWrite;
+}
+
+void EffectHalAidl::writeHapticGeneratorData(size_t totalSamples, float* const outputRawBuffer,
+                                             float* const fmqOutputBuffer) const {
+    const auto audioChNum = mConversion->getAudioChannelCount();
+    const auto audioSamples =
+            totalSamples * audioChNum / (audioChNum + mConversion->getHapticChannelCount());
+
+    static constexpr float kHalFloatSampleLimit = 2.0f;
+    // for HapticGenerator, the input data buffer will be updated
+    float* const inputRawBuffer = static_cast<float*>(mInBuffer->audioBuffer()->f32);
+    // accumulate or copy input to output, haptic samples remains all zero
+    if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+        accumulate_float(outputRawBuffer, inputRawBuffer, audioSamples);
+    } else {
+        memcpy_to_float_from_float_with_clamping(outputRawBuffer, inputRawBuffer, audioSamples,
+                                                 kHalFloatSampleLimit);
+    }
+    // append the haptic sample at the end of input audio samples
+    memcpy_to_float_from_float_with_clamping(inputRawBuffer + audioSamples,
+                                             fmqOutputBuffer + audioSamples,
+                                             totalSamples - audioSamples, kHalFloatSampleLimit);
+}
+
+status_t EffectHalAidl::waitHalStatusFmq(size_t samplesWritten) const {
+    const auto statusQ = mConversion->getStatusMQ();
+    if (const bool statusValid = statusQ && statusQ->isValid(); !statusValid) {
+        ALOGE("%s statusFMQ %s", mEffectName.c_str(), statusValid ? "valid" : "invalid");
+        return INVALID_OPERATION;
+    }
 
     IEffect::Status retStatus{};
     if (!statusQ->readBlocking(&retStatus, 1)) {
-        ALOGE("%s %s V%d read status from status FMQ failed", __func__, effectName.c_str(),
-              halVersion);
+        ALOGE("%s V%d read status from status FMQ failed", mEffectName.c_str(), mHalVersion);
         return INVALID_OPERATION;
     }
-    if (retStatus.status != OK || (size_t)retStatus.fmqConsumed != floatsToWrite ||
+    if (retStatus.status != OK || (size_t)retStatus.fmqConsumed != samplesWritten ||
         retStatus.fmqProduced == 0) {
-        ALOGE("%s read status failed: %s, consumed %d (of %zu) produced %d", __func__,
-              retStatus.toString().c_str(), retStatus.fmqConsumed, floatsToWrite,
-              retStatus.fmqProduced);
+        ALOGE("%s read status failed: %s, FMQ consumed %d (of %zu) produced %d",
+              mEffectName.c_str(), retStatus.toString().c_str(), retStatus.fmqConsumed,
+              samplesWritten, retStatus.fmqProduced);
         return INVALID_OPERATION;
     }
 
-    available = outputQ->availableToRead();
-    const size_t floatsToRead = std::min(available, mOutBuffer->getSize() / sizeof(float));
-    if (floatsToRead == 0) {
-        ALOGE("%s not able to read, buffer space %zu, floats in FMQ %zu", __func__,
-              mOutBuffer->getSize() / sizeof(float), available);
+    return OK;
+}
+
+status_t EffectHalAidl::readFromHalOutputFmq(size_t samplesWritten) const {
+    const auto outputQ = mConversion->getOutputMQ();
+    if (const bool outputValid = outputQ && outputQ->isValid(); !outputValid) {
+        ALOGE("%s outputFMQ %s", mEffectName.c_str(), outputValid ? "valid" : "invalid");
         return INVALID_OPERATION;
     }
 
-    float *outputRawBuffer = mOutBuffer->audioBuffer()->f32;
+    const size_t fmqProducedSamples = outputQ->availableToRead();
+    const size_t bufferSpaceSamples =
+            mOutBuffer->audioBuffer()->frameCount * mConversion->getOutputChannelCount();
+    const size_t samplesToRead = std::min(fmqProducedSamples, bufferSpaceSamples);
+    if (samplesToRead == 0) {
+        ALOGE("%s unable to read, bufferSpace %zu, fmqProduced %zu samplesWritten %zu",
+              mEffectName.c_str(), bufferSpaceSamples, fmqProducedSamples, samplesWritten);
+        return INVALID_OPERATION;
+    }
+
+    float* const outputRawBuffer = static_cast<float*>(mOutBuffer->audioBuffer()->f32);
+    float* fmqOutputBuffer = outputRawBuffer;
     std::vector<float> tempBuffer;
     // keep original data in the output buffer for accumulate mode or HapticGenerator effect
     if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE || mIsHapticGenerator) {
-        tempBuffer.resize(floatsToRead);
-        outputRawBuffer = tempBuffer.data();
+        tempBuffer.resize(samplesToRead, 0);
+        fmqOutputBuffer = tempBuffer.data();
     }
     // always read floating point data for AIDL
-    if (!outputQ->read(outputRawBuffer, floatsToRead)) {
-        ALOGE("%s failed to read %zu from outputQ to audioBuffer %p", __func__, floatsToRead,
-              mOutBuffer->audioBuffer());
+    if (!outputQ->read(fmqOutputBuffer, samplesToRead)) {
+        ALOGE("%s failed to read %zu from outputQ to audioBuffer %p", mEffectName.c_str(),
+              samplesToRead, fmqOutputBuffer);
         return INVALID_OPERATION;
     }
 
@@ -276,26 +337,10 @@
     // offset as input buffer, here we skip the audio samples in output FMQ and append haptic
     // samples to the end of input buffer
     if (mIsHapticGenerator) {
-        static constexpr float kHalFloatSampleLimit = 2.0f;
-        assert(floatsToRead == floatsToWrite);
-        const auto audioChNum = mConversion->getAudioChannelCount();
-        const auto audioSamples =
-                floatsToWrite * audioChNum / (audioChNum + mConversion->getHapticChannelCount());
-        // accumulate or copy input to output, haptic samples remains all zero
-        if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
-            accumulate_float(mOutBuffer->audioBuffer()->f32, mInBuffer->audioBuffer()->f32,
-                             audioSamples);
-        } else {
-            memcpy_to_float_from_float_with_clamping(mOutBuffer->audioBuffer()->f32,
-                                                     mInBuffer->audioBuffer()->f32, audioSamples,
-                                                     kHalFloatSampleLimit);
-        }
-        // append the haptic sample at the end of input audio samples
-        memcpy_to_float_from_float_with_clamping(mInBuffer->audioBuffer()->f32 + audioSamples,
-                                                 outputRawBuffer + audioSamples,
-                                                 floatsToRead - audioSamples, kHalFloatSampleLimit);
+        assert(samplesRead == samplesWritten);
+        writeHapticGeneratorData(samplesToRead, outputRawBuffer, fmqOutputBuffer);
     } else if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
-        accumulate_float(mOutBuffer->audioBuffer()->f32, outputRawBuffer, floatsToRead);
+        accumulate_float(outputRawBuffer, fmqOutputBuffer, samplesToRead);
     }
 
     return OK;
diff --git a/media/libaudiohal/impl/EffectHalAidl.h b/media/libaudiohal/impl/EffectHalAidl.h
index 4f7de7c..a775337 100644
--- a/media/libaudiohal/impl/EffectHalAidl.h
+++ b/media/libaudiohal/impl/EffectHalAidl.h
@@ -73,7 +73,12 @@
     const int32_t mSessionId;
     const int32_t mIoId;
     const bool mIsProxyEffect;
+    const int32_t mHalVersion;
+    // Audio effect HAL v2+ changes flag to kEventFlagDataMqNotEmpty to avoid conflict from using
+    // kEventFlagNotEmpty
+    const uint32_t mEventFlagDataMqNotEmpty;
     bool mIsHapticGenerator = false;
+    std::string mEffectName;
 
     std::unique_ptr<EffectConversionHelperAidl> mConversion;
 
@@ -93,6 +98,14 @@
     bool setEffectReverse(bool reverse);
     bool needUpdateReturnParam(uint32_t cmdCode);
 
+    status_t maybeReopen(const std::shared_ptr<android::hardware::EventFlag>& efGroup) const;
+    void writeHapticGeneratorData(size_t totalSamples, float* const outputRawBuffer,
+                                  float* const fmqOutputBuffer) const;
+    size_t writeToHalInputFmqAndSignal(
+            const std::shared_ptr<android::hardware::EventFlag>& efGroup) const;
+    status_t waitHalStatusFmq(size_t samplesWritten) const;
+    status_t readFromHalOutputFmq(size_t samplesWritten) const;
+
     // The destructor automatically releases the effect.
     virtual ~EffectHalAidl();
 };
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index 740bf60..2753906 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -42,6 +42,8 @@
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::IFactory;
 using ::aidl::android::hardware::audio::effect::Processing;
+using ::aidl::android::media::audio::common::AudioDevice;
+using ::aidl::android::media::audio::common::AudioDeviceAddress;
 using ::aidl::android::media::audio::common::AudioSource;
 using ::aidl::android::media::audio::common::AudioStreamType;
 using ::aidl::android::media::audio::common::AudioUuid;
@@ -281,7 +283,8 @@
 
     auto getConfigProcessingWithAidlProcessing =
             [&](const auto& aidlProcess, std::vector<effectsConfig::InputStream>& preprocess,
-                std::vector<effectsConfig::OutputStream>& postprocess) {
+                std::vector<effectsConfig::OutputStream>& postprocess,
+                std::vector<effectsConfig::DeviceEffects>& deviceprocess) {
                 if (aidlProcess.type.getTag() == Processing::Type::streamType) {
                     AudioStreamType aidlType =
                             aidlProcess.type.template get<Processing::Type::streamType>();
@@ -313,6 +316,25 @@
                     effectsConfig::InputStream stream = {.type = type.value(),
                                                          .effects = std::move(effects)};
                     preprocess.emplace_back(stream);
+                } else if (aidlProcess.type.getTag() == Processing::Type::device) {
+                    AudioDevice aidlDevice =
+                            aidlProcess.type.template get<Processing::Type::device>();
+                    std::vector<std::shared_ptr<const effectsConfig::Effect>> effects;
+                    std::transform(aidlProcess.ids.begin(), aidlProcess.ids.end(),
+                                   std::back_inserter(effects), getConfigEffectWithDescriptor);
+                    audio_devices_t type;
+                    char address[AUDIO_DEVICE_MAX_ADDRESS_LEN];
+                    status_t status = ::aidl::android::aidl2legacy_AudioDevice_audio_device(
+                            aidlDevice, &type, address);
+                    if (status != NO_ERROR) {
+                        ALOGE("%s device effect has invalid device type / address", __func__);
+                        return;
+                    }
+                    effectsConfig::DeviceEffects device = {
+                            {.type = type, .effects = std::move(effects)},
+                            .address = address,
+                    };
+                    deviceprocess.emplace_back(device);
                 }
             };
 
@@ -320,17 +342,21 @@
             [&]() -> std::shared_ptr<const effectsConfig::Processings> {
                 std::vector<effectsConfig::InputStream> preprocess;
                 std::vector<effectsConfig::OutputStream> postprocess;
+                std::vector<effectsConfig::DeviceEffects> deviceprocess;
                 for (const auto& processing : mAidlProcessings) {
-                    getConfigProcessingWithAidlProcessing(processing, preprocess, postprocess);
+                    getConfigProcessingWithAidlProcessing(processing, preprocess, postprocess,
+                                                          deviceprocess);
                 }
 
-                if (0 == preprocess.size() && 0 == postprocess.size()) {
+                if (0 == preprocess.size() && 0 == postprocess.size() &&
+                    0 == deviceprocess.size()) {
                     return nullptr;
                 }
 
                 return std::make_shared<const effectsConfig::Processings>(
                         effectsConfig::Processings({.preprocess = std::move(preprocess),
-                                                    .postprocess = std::move(postprocess)}));
+                                                    .postprocess = std::move(postprocess),
+                                                    .deviceprocess = std::move(deviceprocess)}));
             }());
 
     return processings;
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
index a01ac4b..0cdf0f2 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.cpp
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -25,6 +25,7 @@
 #include <Utils.h>
 #include <utils/Log.h>
 
+#include "AidlUtils.h"
 #include "Hal2AidlMapper.h"
 
 using aidl::android::aidl_utils::statusTFromBinderStatus;
@@ -99,8 +100,7 @@
 }  // namespace
 
 Hal2AidlMapper::Hal2AidlMapper(const std::string& instance, const std::shared_ptr<IModule>& module)
-        : mInstance(instance), mModule(module) {
-}
+    : ConversionHelperAidl("Hal2AidlMapper", instance), mModule(module) {}
 
 void Hal2AidlMapper::addStream(
         const sp<StreamHalInterface>& stream, int32_t mixPortConfigId, int32_t patchId) {
@@ -137,9 +137,9 @@
     // 'sinks' will not be updated because 'setAudioPatch' only needs IDs. Here we log
     // the source arguments, where only the audio configuration and device specifications
     // are relevant.
-    ALOGD("%s: patch ID: %d, [disregard IDs] sources: %s, sinks: %s",
-            __func__, *patchId, ::android::internal::ToString(sources).c_str(),
-            ::android::internal::ToString(sinks).c_str());
+    AUGMENT_LOG(D, "patch ID: %d, [disregard IDs] sources: %s, sinks: %s", *patchId,
+                ::android::internal::ToString(sources).c_str(),
+                ::android::internal::ToString(sinks).c_str());
     auto fillPortConfigs = [&](
             const std::vector<AudioPortConfig>& configs,
             const std::set<int32_t>& destinationPortIds,
@@ -152,18 +152,20 @@
                     // See b/315528763. Despite that the framework knows the actual format of
                     // the mix port, it still uses the original format. Luckily, there is
                     // the I/O handle which can be used to find the mix port.
-                    ALOGI("fillPortConfigs: retrying to find a mix port config with default "
-                            "configuration");
+                    AUGMENT_LOG(I,
+                                "fillPortConfigs: retrying to find a mix port config with"
+                                " default configuration");
                     if (auto it = findPortConfig(std::nullopt, s.flags,
                                     s.ext.get<AudioPortExt::mix>().handle);
                             it != mPortConfigs.end()) {
                         portConfig = it->second;
                     } else {
-                        const std::string flags = s.flags.has_value() ?
-                                s.flags->toString() : "<unspecified>";
-                        ALOGE("fillPortConfigs: existing port config for flags %s, handle %d "
-                                "not found in module %s", flags.c_str(),
-                                s.ext.get<AudioPortExt::mix>().handle, mInstance.c_str());
+                        const std::string flags =
+                                s.flags.has_value() ? s.flags->toString() : "<unspecified>";
+                        AUGMENT_LOG(E,
+                                    "fillPortConfigs: existing port config for flags %s, "
+                                    " handle %d not found",
+                                    flags.c_str(), s.ext.get<AudioPortExt::mix>().handle);
                         return BAD_VALUE;
                     }
                 } else {
@@ -171,8 +173,8 @@
                 }
             }
             LOG_ALWAYS_FATAL_IF(portConfig.id == 0,
-                    "fillPortConfigs: initial config: %s, port config: %s",
-                    s.toString().c_str(), portConfig.toString().c_str());
+                                "fillPortConfigs: initial config: %s, port config: %s",
+                                s.toString().c_str(), portConfig.toString().c_str());
             ids->push_back(portConfig.id);
             if (portIds != nullptr) {
                 portIds->insert(portConfig.portId);
@@ -218,8 +220,8 @@
         if (!created) {
             requestedPatch.id = patch.id;
             if (patch != requestedPatch) {
-                ALOGI("%s: Updating transient patch. Current: %s, new: %s",
-                        __func__, patch.toString().c_str(), requestedPatch.toString().c_str());
+                AUGMENT_LOG(I, "Updating transient patch. Current: %s, new: %s",
+                            patch.toString().c_str(), requestedPatch.toString().c_str());
                 // Since matching may be done by mix port only, update the patch if the device port
                 // config has changed.
                 patch = requestedPatch;
@@ -252,7 +254,7 @@
     int32_t id = result->id;
     if (requestedPortConfig.id != 0 && requestedPortConfig.id != id) {
         LOG_ALWAYS_FATAL("%s: requested port config id %d changed to %d", __func__,
-                requestedPortConfig.id, id);
+                         requestedPortConfig.id, id);
     }
 
     auto [_, inserted] = mPortConfigs.insert_or_assign(id, *result);
@@ -272,8 +274,8 @@
         RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(suggestedOrAppliedPortConfig,
                         &appliedPortConfig, created));
         if (appliedPortConfig.id == 0) {
-            ALOGE("%s: module %s did not apply suggested config %s", __func__,
-                    mInstance.c_str(), suggestedOrAppliedPortConfig.toString().c_str());
+            AUGMENT_LOG(E, "did not apply suggested config %s",
+                        suggestedOrAppliedPortConfig.toString().c_str());
             return NO_INIT;
         }
         *result = appliedPortConfig;
@@ -289,7 +291,7 @@
     if (mDisconnectedPortReplacement.first == portId) {
         const auto& port = mDisconnectedPortReplacement.second;
         mPorts.insert(std::make_pair(port.id, port));
-        ALOGD("%s: disconnected port replacement: %s", __func__, port.toString().c_str());
+        AUGMENT_LOG(D, "disconnected port replacement: %s", port.toString().c_str());
         mDisconnectedPortReplacement = std::pair<int32_t, AudioPort>();
     }
     updateDynamicMixPorts();
@@ -331,8 +333,7 @@
     if (auto portConfigIt = findPortConfig(device); portConfigIt == mPortConfigs.end()) {
         auto portsIt = findPort(device);
         if (portsIt == mPorts.end()) {
-            ALOGE("%s: device port for device %s is not found in the module %s",
-                    __func__, device.toString().c_str(), mInstance.c_str());
+            AUGMENT_LOG(E, "device port for device %s is not found", device.toString().c_str());
             return BAD_VALUE;
         }
         AudioPortConfig requestedPortConfig;
@@ -367,16 +368,21 @@
         const AudioConfig& config, const std::optional<AudioIoFlags>& flags, int32_t ioHandle,
         AudioSource source, const std::set<int32_t>& destinationPortIds,
         AudioPortConfig* portConfig, bool* created) {
-    // These flags get removed one by one in this order when retrying port finding.
-    static const std::vector<AudioInputFlags> kOptionalInputFlags{
-        AudioInputFlags::FAST, AudioInputFlags::RAW, AudioInputFlags::VOIP_TX };
     if (auto portConfigIt = findPortConfig(config, flags, ioHandle);
             portConfigIt == mPortConfigs.end() && flags.has_value()) {
-        auto optionalInputFlagsIt = kOptionalInputFlags.begin();
+        // These input flags get removed one by one in this order when retrying port finding.
+        std::vector<AudioInputFlags> optionalInputFlags {
+            AudioInputFlags::FAST, AudioInputFlags::RAW, AudioInputFlags::VOIP_TX };
+        // For remote submix input, retry with direct input flag removed as the remote submix
+        // input is not expected to manipulate the contents of the audio stream.
+        if (mRemoteSubmixIn.has_value()) {
+            optionalInputFlags.push_back(AudioInputFlags::DIRECT);
+        }
+        auto optionalInputFlagsIt = optionalInputFlags.begin();
         AudioIoFlags matchFlags = flags.value();
         auto portsIt = findPort(config, matchFlags, destinationPortIds);
         while (portsIt == mPorts.end() && matchFlags.getTag() == AudioIoFlags::Tag::input
-                && optionalInputFlagsIt != kOptionalInputFlags.end()) {
+                && optionalInputFlagsIt != optionalInputFlags.end()) {
             if (!isBitPositionFlagSet(
                             matchFlags.get<AudioIoFlags::Tag::input>(), *optionalInputFlagsIt)) {
                 ++optionalInputFlagsIt;
@@ -385,15 +391,45 @@
             matchFlags.set<AudioIoFlags::Tag::input>(matchFlags.get<AudioIoFlags::Tag::input>() &
                     ~makeBitPositionFlagMask(*optionalInputFlagsIt++));
             portsIt = findPort(config, matchFlags, destinationPortIds);
-            ALOGI("%s: mix port for config %s, flags %s was not found in the module %s, "
-                    "retried with flags %s", __func__, config.toString().c_str(),
-                    flags.value().toString().c_str(), mInstance.c_str(),
-                    matchFlags.toString().c_str());
+            AUGMENT_LOG(I,
+                        "mix port for config %s, flags %s was not found"
+                        "retried with flags %s",
+                        config.toString().c_str(), flags.value().toString().c_str(),
+                        matchFlags.toString().c_str());
         }
+        // These output flags get removed one by one in this order when retrying port finding.
+        std::vector<AudioOutputFlags> optionalOutputFlags { };
+        // For remote submix output, retry with these output flags removed one by one:
+        // 1. DIRECT: remote submix outputs are expected not to manipulate the contents of the
+        //            audio stream.
+        // 2. IEC958_NONAUDIO: remote submix outputs are not connected to ALSA and do not require
+        //                     non audio signalling.
+        if (mRemoteSubmixOut.has_value()) {
+            optionalOutputFlags.push_back(AudioOutputFlags::DIRECT);
+            optionalOutputFlags.push_back(AudioOutputFlags::IEC958_NONAUDIO);
+        }
+        auto optionalOutputFlagsIt = optionalOutputFlags.begin();
+        matchFlags = flags.value();
+        while (portsIt == mPorts.end() && matchFlags.getTag() == AudioIoFlags::Tag::output
+                && optionalOutputFlagsIt != optionalOutputFlags.end()) {
+            if (!isBitPositionFlagSet(
+                            matchFlags.get<AudioIoFlags::Tag::output>(),*optionalOutputFlagsIt)) {
+                ++optionalOutputFlagsIt;
+                continue;
+            }
+            matchFlags.set<AudioIoFlags::Tag::output>(matchFlags.get<AudioIoFlags::Tag::output>() &
+                    ~makeBitPositionFlagMask(*optionalOutputFlagsIt++));
+            portsIt = findPort(config, matchFlags, destinationPortIds);
+            AUGMENT_LOG(I,
+                        "mix port for config %s, flags %s was not found"
+                        "retried with flags %s",
+                        config.toString().c_str(), flags.value().toString().c_str(),
+                        matchFlags.toString().c_str());
+        }
+
         if (portsIt == mPorts.end()) {
-            ALOGE("%s: mix port for config %s, flags %s is not found in the module %s",
-                    __func__, config.toString().c_str(), matchFlags.toString().c_str(),
-                    mInstance.c_str());
+            AUGMENT_LOG(E, "mix port for config %s, flags %s is not found",
+                        config.toString().c_str(), matchFlags.toString().c_str());
             return BAD_VALUE;
         }
         AudioPortConfig requestedPortConfig;
@@ -408,9 +444,10 @@
         }
         return createOrUpdatePortConfig(requestedPortConfig, portConfig, created);
     } else if (portConfigIt == mPortConfigs.end() && !flags.has_value()) {
-        ALOGW("%s: mix port config for %s, handle %d not found in the module %s, "
-                "and was not created as flags are not specified",
-                __func__, config.toString().c_str(), ioHandle, mInstance.c_str());
+        AUGMENT_LOG(W,
+                    "mix port config for %s, handle %d not found "
+                    "and was not created as flags are not specified",
+                    config.toString().c_str(), ioHandle);
         return BAD_VALUE;
     } else {
         AudioPortConfig requestedPortConfig = portConfigIt->second;
@@ -440,8 +477,8 @@
         if (const auto& p = requestedPortConfig;
                 !p.sampleRate.has_value() || !p.channelMask.has_value() ||
                 !p.format.has_value()) {
-            ALOGW("%s: provided mix port config is not fully specified: %s",
-                    __func__, p.toString().c_str());
+            AUGMENT_LOG(W, "provided mix port config is not fully specified: %s",
+                        p.toString().c_str());
             return BAD_VALUE;
         }
         AudioConfig config;
@@ -470,14 +507,13 @@
                     requestedPortConfig.ext.get<Tag::device>().device, configPtr, gainConfigPtr,
                     portConfig, created);
         } else {
-            ALOGD("%s: device port config does not have audio or gain config specified", __func__);
+            AUGMENT_LOG(D, "device port config does not have audio or gain config specified");
             return findOrCreateDevicePortConfig(
                     requestedPortConfig.ext.get<Tag::device>().device, nullptr /*config*/,
                     nullptr /*gainConfig*/, portConfig, created);
         }
     }
-    ALOGW("%s: unsupported audio port config: %s",
-            __func__, requestedPortConfig.toString().c_str());
+    AUGMENT_LOG(W, "unsupported audio port config: %s", requestedPortConfig.toString().c_str());
     return BAD_VALUE;
 }
 
@@ -486,8 +522,7 @@
         *portConfig = it->second;
         return OK;
     }
-    ALOGE("%s: could not find a device port config for device %s",
-            __func__, device.toString().c_str());
+    AUGMENT_LOG(E, "could not find a device port config for device %s", device.toString().c_str());
     return BAD_VALUE;
 }
 
@@ -593,9 +628,10 @@
             }
             optionalFlags |= makeBitPositionFlagMask(*optionalOutputFlagsIt++);
             result = std::find_if(mPorts.begin(), mPorts.end(), matcher);
-            ALOGI("%s: port for config %s, flags %s was not found in the module %s, "
-                  "retried with excluding optional flags %#x", __func__, config.toString().c_str(),
-                    flags.toString().c_str(), mInstance.c_str(), optionalFlags);
+            AUGMENT_LOG(I,
+                        "port for config %s, flags %s was not found "
+                        "retried with excluding optional flags %#x",
+                        config.toString().c_str(), flags.toString().c_str(), optionalFlags);
         }
     }
     return result;
@@ -629,7 +665,7 @@
 status_t Hal2AidlMapper::getAudioMixPort(int32_t ioHandle, AudioPort* port) {
     auto it = findPortConfig(std::nullopt /*config*/, std::nullopt /*flags*/, ioHandle);
     if (it == mPortConfigs.end()) {
-        ALOGE("%s, cannot find mix port config for handle %u", __func__, ioHandle);
+        AUGMENT_LOG(E, "cannot find mix port config for handle %u", ioHandle);
         return BAD_VALUE;
     }
     return updateAudioPort(it->second.portId, port);
@@ -638,21 +674,18 @@
 status_t Hal2AidlMapper::getAudioPortCached(
         const ::aidl::android::media::audio::common::AudioDevice& device,
         ::aidl::android::media::audio::common::AudioPort* port) {
-
     if (auto portsIt = findPort(device); portsIt != mPorts.end()) {
         *port = portsIt->second;
         return OK;
     }
-    ALOGE("%s: device port for device %s is not found in the module %s",
-            __func__, device.toString().c_str(), mInstance.c_str());
+    AUGMENT_LOG(E, "device port for device %s is not found", device.toString().c_str());
     return BAD_VALUE;
 }
 
 status_t Hal2AidlMapper::initialize() {
     std::vector<AudioPort> ports;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->getAudioPorts(&ports)));
-    ALOGW_IF(ports.empty(), "%s: module %s returned an empty list of audio ports",
-            __func__, mInstance.c_str());
+    AUGMENT_LOG_IF(W, ports.empty(), "returned an empty list of audio ports");
     mDefaultInputPortId = mDefaultOutputPortId = -1;
     const int defaultDeviceFlag = 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE;
     for (auto it = ports.begin(); it != ports.end(); ) {
@@ -685,8 +718,9 @@
         }
     }
     if (mRemoteSubmixIn.has_value() != mRemoteSubmixOut.has_value()) {
-        ALOGE("%s: The configuration only has input or output remote submix device, must have both",
-                __func__);
+        AUGMENT_LOG(E,
+                    "The configuration only has input or output remote submix device, "
+                    "must have both");
         mRemoteSubmixIn.reset();
         mRemoteSubmixOut.reset();
     }
@@ -694,7 +728,7 @@
         AudioPort connectedRSubmixIn = *mRemoteSubmixIn;
         connectedRSubmixIn.ext.get<AudioPortExt::Tag::device>().device.address =
                 AUDIO_REMOTE_SUBMIX_DEVICE_ADDRESS;
-        ALOGD("%s: connecting remote submix input", __func__);
+        AUGMENT_LOG(D, "connecting remote submix input");
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->connectExternalDevice(
                                 connectedRSubmixIn, &connectedRSubmixIn)));
         // The template port for the remote submix input couldn't be "default" because it is not
@@ -711,7 +745,7 @@
         AudioPort tempConnectedRSubmixOut = *mRemoteSubmixOut;
         tempConnectedRSubmixOut.ext.get<AudioPortExt::Tag::device>().device.address =
                 AUDIO_REMOTE_SUBMIX_DEVICE_ADDRESS;
-        ALOGD("%s: temporarily connecting and disconnecting remote submix output", __func__);
+        AUGMENT_LOG(D, "temporarily connecting and disconnecting remote submix output");
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->connectExternalDevice(
                                 tempConnectedRSubmixOut, &tempConnectedRSubmixOut)));
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->disconnectExternalDevice(
@@ -720,8 +754,8 @@
         ports.push_back(std::move(tempConnectedRSubmixOut));
     }
 
-    ALOGI("%s: module %s default port ids: input %d, output %d",
-            __func__, mInstance.c_str(), mDefaultInputPortId, mDefaultOutputPortId);
+    AUGMENT_LOG(I, "default port ids: input %d, output %d", mDefaultInputPortId,
+                mDefaultOutputPortId);
     std::transform(ports.begin(), ports.end(), std::inserter(mPorts, mPorts.end()),
             [](const auto& p) { return std::make_pair(p.id, p); });
     RETURN_STATUS_IF_ERROR(updateRoutes());
@@ -774,10 +808,10 @@
         int32_t ioHandle, const AudioDevice& device, const AudioIoFlags& flags,
         AudioSource source, Cleanups* cleanups, AudioConfig* config,
         AudioPortConfig* mixPortConfig, AudioPatch* patch) {
-    ALOGD("%p %s: handle %d, device %s, flags %s, source %s, config %s, mix port config %s",
-            this, __func__, ioHandle, device.toString().c_str(),
-            flags.toString().c_str(), toString(source).c_str(),
-            config->toString().c_str(), mixPortConfig->toString().c_str());
+    AUGMENT_LOG(D, "handle %d, device %s, flags %s, source %s, config %s, mixport config %s",
+                ioHandle, device.toString().c_str(), flags.toString().c_str(),
+                toString(source).c_str(), config->toString().c_str(),
+                mixPortConfig->toString().c_str());
     resetUnusedPatchesAndPortConfigs();
     const AudioConfig initialConfig = *config;
     // Find / create AudioPortConfigs for the device port and the mix port,
@@ -793,15 +827,16 @@
     status_t status = prepareToOpenStreamHelper(ioHandle, devicePortConfig.portId,
             devicePortConfig.id, flags, source, initialConfig, cleanups, config,
             mixPortConfig, patch);
-    if (status != OK) {
+    if (status != OK && !(mRemoteSubmixOut.has_value() &&
+                initialConfig.base.format.type != AudioFormatType::PCM)) {
         // If using the client-provided config did not work out for establishing a mix port config
         // or patching, try with the device port config. Note that in general device port config and
         // mix port config are not required to be the same, however they must match if the HAL
         // module can't perform audio stream conversions.
         AudioConfig deviceConfig = initialConfig;
         if (setConfigFromPortConfig(&deviceConfig, devicePortConfig)->base != initialConfig.base) {
-            ALOGD("%s: retrying with device port config: %s", __func__,
-                    devicePortConfig.toString().c_str());
+            AUGMENT_LOG(D, "retrying with device port config: %s",
+                        devicePortConfig.toString().c_str());
             status = prepareToOpenStreamHelper(ioHandle, devicePortConfig.portId,
                     devicePortConfig.id, flags, source, initialConfig, cleanups,
                     &deviceConfig, mixPortConfig, patch);
@@ -845,8 +880,8 @@
         retryWithSuggestedConfig = true;
     }
     if (mixPortConfig->id == 0 && retryWithSuggestedConfig) {
-        ALOGD("%s: retrying to find/create a mix port config using config %s", __func__,
-                config->toString().c_str());
+        AUGMENT_LOG(D, "retrying to find/create a mix port config using config %s",
+                    config->toString().c_str());
         RETURN_STATUS_IF_ERROR(findOrCreateMixPortConfig(*config, flags, ioHandle, source,
                         std::set<int32_t>{devicePortId}, mixPortConfig, &created));
         if (created) {
@@ -855,8 +890,8 @@
         setConfigFromPortConfig(config, *mixPortConfig);
     }
     if (mixPortConfig->id == 0) {
-        ALOGD("%p %s: returning suggested config for the stream: %s", this, __func__,
-                config->toString().c_str());
+        AUGMENT_LOG(D, "returning suggested config for the stream: %s",
+                    config->toString().c_str());
         return OK;
     }
     if (isInput) {
@@ -894,9 +929,10 @@
 // Note: does not reset port configs.
 status_t Hal2AidlMapper::releaseAudioPatch(Patches::iterator it) {
     const int32_t patchId = it->first;
+    AUGMENT_LOG(D, "patchId %d", patchId);
     if (ndk::ScopedAStatus status = mModule->resetAudioPatch(patchId); !status.isOk()) {
-        ALOGE("%s: error while resetting patch %d: %s",
-                __func__, patchId, status.getDescription().c_str());
+        AUGMENT_LOG(E, "error while resetting patch %d: %s", patchId,
+                    status.getDescription().c_str());
         return statusTFromBinderStatus(status);
     }
     mPatches.erase(it);
@@ -915,7 +951,7 @@
         if (auto it = mPatches.find(patchId); it != mPatches.end()) {
             releaseAudioPatch(it);
         } else {
-            ALOGE("%s: patch id %d not found", __func__, patchId);
+            AUGMENT_LOG(E, "patch id %d not found", patchId);
             result = BAD_VALUE;
         }
     }
@@ -925,16 +961,17 @@
 
 void Hal2AidlMapper::resetPortConfig(int32_t portConfigId) {
     if (auto it = mPortConfigs.find(portConfigId); it != mPortConfigs.end()) {
+        AUGMENT_LOG(D, "%s", it->second.toString().c_str());
         if (ndk::ScopedAStatus status = mModule->resetAudioPortConfig(portConfigId);
                 !status.isOk()) {
-            ALOGE("%s: error while resetting port config %d: %s",
-                    __func__, portConfigId, status.getDescription().c_str());
+            AUGMENT_LOG(E, "error while resetting port config %d: %s", portConfigId,
+                        status.getDescription().c_str());
             return;
         }
         mPortConfigs.erase(it);
         return;
     }
-    ALOGE("%s: port config id %d not found", __func__, portConfigId);
+    AUGMENT_LOG(E, "port config id %d not found", portConfigId);
 }
 
 void Hal2AidlMapper::resetUnusedPatchesAndPortConfigs() {
@@ -979,6 +1016,8 @@
 }
 
 status_t Hal2AidlMapper::setDevicePortConnectedState(const AudioPort& devicePort, bool connected) {
+    AUGMENT_LOG(D, "state %s, device %s", (connected ? "connected" : "disconnected"),
+                devicePort.toString().c_str());
     resetUnusedPatchesAndPortConfigs();
     if (connected) {
         AudioDevice matchDevice = devicePort.ext.get<AudioPortExt::device>().device;
@@ -1009,8 +1048,7 @@
                 // port not found in every one of them.
                 return BAD_VALUE;
             } else {
-                ALOGD("%s: device port for device %s found in the module %s",
-                        __func__, matchDevice.toString().c_str(), mInstance.c_str());
+                AUGMENT_LOG(D, "device port for device %s found", matchDevice.toString().c_str());
             }
             templatePort = portsIt->second;
         }
@@ -1021,10 +1059,9 @@
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->connectExternalDevice(
                                 connectedPort, &connectedPort)));
         const auto [it, inserted] = mPorts.insert(std::make_pair(connectedPort.id, connectedPort));
-        LOG_ALWAYS_FATAL_IF(!inserted,
-                "%s: module %s, duplicate port ID received from HAL: %s, existing port: %s",
-                __func__, mInstance.c_str(), connectedPort.toString().c_str(),
-                it->second.toString().c_str());
+        LOG_ALWAYS_FATAL_IF(
+                !inserted, "%s duplicate port ID received from HAL: %s, existing port: %s",
+                __func__, connectedPort.toString().c_str(), it->second.toString().c_str());
         mConnectedPorts.insert(connectedPort.id);
         if (erasePortAfterConnectionIt != mPorts.end()) {
             mPorts.erase(erasePortAfterConnectionIt);
@@ -1037,8 +1074,7 @@
             // port not found in every one of them.
             return BAD_VALUE;
         } else {
-            ALOGD("%s: device port for device %s found in the module %s",
-                    __func__, matchDevice.toString().c_str(), mInstance.c_str());
+            AUGMENT_LOG(D, "device port for device %s found", matchDevice.toString().c_str());
         }
 
         // Disconnection of remote submix out with address "0" is a special case. We need to replace
@@ -1094,8 +1130,8 @@
             }
             portIt->second = *port;
         } else {
-            ALOGW("%s, port(%d) returned successfully from the HAL but not it is not cached",
-                  __func__, portId);
+            AUGMENT_LOG(W, "port(%d) returned successfully from the HAL but not it is not cached",
+                        portId);
         }
     }
     return status;
@@ -1104,8 +1140,7 @@
 status_t Hal2AidlMapper::updateRoutes() {
     RETURN_STATUS_IF_ERROR(
             statusTFromBinderStatus(mModule->getAudioRoutes(&mRoutes)));
-    ALOGW_IF(mRoutes.empty(), "%s: module %s returned an empty list of audio routes",
-            __func__, mInstance.c_str());
+    AUGMENT_LOG_IF(W, mRoutes.empty(), "returned an empty list of audio routes");
     if (mRemoteSubmixIn.has_value()) {
         // Remove mentions of the template remote submix input from routes.
         int32_t rSubmixInId = mRemoteSubmixIn->id;
@@ -1146,7 +1181,7 @@
             updateAudioPort(portId, &it->second);
         } else {
             // This must not happen
-            ALOGE("%s, cannot find port for id=%d", __func__, portId);
+            AUGMENT_LOG(E, "cannot find port for id=%d", portId);
         }
     }
 }
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.h b/media/libaudiohal/impl/Hal2AidlMapper.h
index 710b43e..2548752 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.h
+++ b/media/libaudiohal/impl/Hal2AidlMapper.h
@@ -26,6 +26,7 @@
 #include <media/AidlConversionUtil.h>
 
 #include "Cleanups.h"
+#include "ConversionHelperAidl.h"
 
 namespace android {
 
@@ -41,7 +42,7 @@
 // but still consider some of the outputs to be valid (for example, in 'open{Input|Output}Stream'),
 // 'Hal2AidlMapper' follows the Binder convention. It means that if a method returns an error,
 // the outputs may not be initialized at all and should not be considered by the caller.
-class Hal2AidlMapper {
+class Hal2AidlMapper : public ConversionHelperAidl {
   public:
     using Cleanups = Cleanups<Hal2AidlMapper>;
 
@@ -135,7 +136,6 @@
 
     enum PatchMatch { MATCH_SOURCES, MATCH_SINKS, MATCH_BOTH };
 
-    const std::string mInstance;
     const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
 
     bool audioDeviceMatches(const ::aidl::android::media::audio::common::AudioDevice& device,
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 030ee2b..e138cea 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -32,6 +32,7 @@
 #include <Utils.h>
 #include <utils/Log.h>
 
+#include "AidlUtils.h"
 #include "DeviceHalAidl.h"
 #include "EffectHalAidl.h"
 #include "StreamHalAidl.h"
@@ -58,6 +59,16 @@
 template<HalCommand::Tag cmd, typename T> HalCommand makeHalCommand(T data) {
     return HalCommand::make<cmd>(data);
 }
+
+template <typename MQTypeError>
+auto fmqErrorHandler(const char* mqName) {
+    return [m = std::string(mqName)](MQTypeError fmqError, std::string&& errorMessage) {
+        mediautils::TimeCheck::signalAudioHals();
+        LOG_ALWAYS_FATAL_IF(fmqError != MQTypeError::NONE, "%s: %s",
+                m.c_str(), errorMessage.c_str());
+    };
+}
+
 }  // namespace
 
 // static
@@ -74,12 +85,12 @@
     return streamCommon;
 }
 
-StreamHalAidl::StreamHalAidl(
-        std::string_view className, bool isInput, const audio_config& config,
-        int32_t nominalLatency, StreamContextAidl&& context,
-        const std::shared_ptr<IStreamCommon>& stream,
-        const std::shared_ptr<IHalAdapterVendorExtension>& vext)
-        : ConversionHelperAidl(className),
+StreamHalAidl::StreamHalAidl(std::string_view className, bool isInput, const audio_config& config,
+                             int32_t nominalLatency, StreamContextAidl&& context,
+                             const std::shared_ptr<IStreamCommon>& stream,
+                             const std::shared_ptr<IHalAdapterVendorExtension>& vext)
+    : ConversionHelperAidl(className, std::string(isInput ? "in" : "out") + "|ioHandle:" +
+            std::to_string(context.getIoHandle())),
           mIsInput(isInput),
           mConfig(configToBase(config)),
           mContext(std::move(context)),
@@ -90,7 +101,7 @@
                            mContext.getBufferDurationMs(mConfig.sample_rate))
                   * NANOS_PER_MILLISECOND)
 {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     {
         std::lock_guard l(mLock);
         mLastReply.latencyMs = nominalLatency;
@@ -102,18 +113,29 @@
             StreamHalAidl::getAudioProperties(&config) == NO_ERROR) {
         mStreamPowerLog.init(config.sample_rate, config.channel_mask, config.format);
     }
+
+    if (mStream != nullptr) {
+        mContext.getCommandMQ()->setErrorHandler(
+                fmqErrorHandler<StreamContextAidl::CommandMQ::Error>("CommandMQ"));
+        mContext.getReplyMQ()->setErrorHandler(
+                fmqErrorHandler<StreamContextAidl::ReplyMQ::Error>("ReplyMQ"));
+        if (mContext.getDataMQ() != nullptr) {
+            mContext.getDataMQ()->setErrorHandler(
+                    fmqErrorHandler<StreamContextAidl::DataMQ::Error>("DataMQ"));
+        }
+    }
 }
 
 StreamHalAidl::~StreamHalAidl() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     if (mStream != nullptr) {
         ndk::ScopedAStatus status = mStream->close();
-        ALOGE_IF(!status.isOk(), "%s: status %s", __func__, status.getDescription().c_str());
+        AUGMENT_LOG_IF(E, !status.isOk(), "status %s", status.getDescription().c_str());
     }
 }
 
 status_t StreamHalAidl::getBufferSize(size_t *size) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     if (size == nullptr) {
         return BAD_VALUE;
     }
@@ -122,11 +144,12 @@
         return NO_INIT;
     }
     *size = mContext.getBufferSizeBytes();
+    AUGMENT_LOG(I, "size: %zu", *size);
     return OK;
 }
 
 status_t StreamHalAidl::getAudioProperties(audio_config_base_t *configBase) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     if (configBase == nullptr) {
         return BAD_VALUE;
     }
@@ -136,10 +159,11 @@
 }
 
 status_t StreamHalAidl::setParameters(const String8& kvPairs) {
+    AUGMENT_LOG(V);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     AudioParameter parameters(kvPairs);
-    ALOGD("%s: parameters: %s", __func__, parameters.toString().c_str());
+    AUGMENT_LOG(D, "parameters: %s", parameters.toString().c_str());
 
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
                     parameters, String8(AudioParameter::keyStreamHwAvSync),
@@ -150,6 +174,7 @@
 }
 
 status_t StreamHalAidl::getParameters(const String8& keys __unused, String8 *values) {
+    AUGMENT_LOG(V);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (values == nullptr) {
@@ -161,7 +186,7 @@
 }
 
 status_t StreamHalAidl::getFrameSize(size_t *size) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     if (size == nullptr) {
         return BAD_VALUE;
     }
@@ -173,7 +198,7 @@
 }
 
 status_t StreamHalAidl::addEffect(sp<EffectHalInterface> effect) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (effect == nullptr) {
@@ -184,7 +209,7 @@
 }
 
 status_t StreamHalAidl::removeEffect(sp<EffectHalInterface> effect) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (effect == nullptr) {
@@ -195,7 +220,7 @@
 }
 
 status_t StreamHalAidl::standby() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     const auto state = getState();
@@ -207,9 +232,11 @@
             RETURN_STATUS_IF_ERROR(pause(&reply));
             if (reply.state != StreamDescriptor::State::PAUSED &&
                     reply.state != StreamDescriptor::State::DRAIN_PAUSED &&
-                    reply.state != StreamDescriptor::State::TRANSFER_PAUSED) {
-                ALOGE("%s: unexpected stream state: %s (expected PAUSED)",
-                        __func__, toString(reply.state).c_str());
+                    reply.state != StreamDescriptor::State::TRANSFER_PAUSED &&
+                    (state != StreamDescriptor::State::DRAINING ||
+                        reply.state != StreamDescriptor::State::IDLE)) {
+                AUGMENT_LOG(E, "unexpected stream state: %s (expected PAUSED)",
+                            toString(reply.state).c_str());
                 return INVALID_OPERATION;
             }
             FALLTHROUGH_INTENDED;
@@ -219,8 +246,8 @@
             if (mIsInput) return flush();
             RETURN_STATUS_IF_ERROR(flush(&reply));
             if (reply.state != StreamDescriptor::State::IDLE) {
-                ALOGE("%s: unexpected stream state: %s (expected IDLE)",
-                        __func__, toString(reply.state).c_str());
+                AUGMENT_LOG(E, "unexpected stream state: %s (expected IDLE)",
+                            toString(reply.state).c_str());
                 return INVALID_OPERATION;
             }
             FALLTHROUGH_INTENDED;
@@ -228,22 +255,22 @@
             RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::standby>(),
                             &reply, true /*safeFromNonWorkerThread*/));
             if (reply.state != StreamDescriptor::State::STANDBY) {
-                ALOGE("%s: unexpected stream state: %s (expected STANDBY)",
-                        __func__, toString(reply.state).c_str());
+                AUGMENT_LOG(E, "unexpected stream state: %s (expected STANDBY)",
+                            toString(reply.state).c_str());
                 return INVALID_OPERATION;
             }
             FALLTHROUGH_INTENDED;
         case StreamDescriptor::State::STANDBY:
             return OK;
         default:
-            ALOGE("%s: not supported from %s stream state %s",
-                    __func__, mIsInput ? "input" : "output", toString(state).c_str());
+            AUGMENT_LOG(E, "not supported from %s stream state %s", mIsInput ? "input" : "output",
+                        toString(state).c_str());
             return INVALID_OPERATION;
     }
 }
 
 status_t StreamHalAidl::dump(int fd, const Vector<String16>& args) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     Vector<String16> newArgs = args;
@@ -254,7 +281,7 @@
 }
 
 status_t StreamHalAidl::start() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (!mContext.isMmapped()) {
@@ -267,8 +294,8 @@
             RETURN_STATUS_IF_ERROR(
                     sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
             if (reply.state != StreamDescriptor::State::IDLE) {
-                ALOGE("%s: unexpected stream state: %s (expected IDLE)",
-                        __func__, toString(reply.state).c_str());
+                AUGMENT_LOG(E, "unexpected stream state: %s (expected IDLE)",
+                            toString(reply.state).c_str());
                 return INVALID_OPERATION;
             }
             FALLTHROUGH_INTENDED;
@@ -276,8 +303,8 @@
             RETURN_STATUS_IF_ERROR(
                     sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true));
             if (reply.state != StreamDescriptor::State::ACTIVE) {
-                ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
-                        __func__, toString(reply.state).c_str());
+                AUGMENT_LOG(E, "unexpected stream state: %s (expected ACTIVE)",
+                            toString(reply.state).c_str());
                 return INVALID_OPERATION;
             }
             FALLTHROUGH_INTENDED;
@@ -287,20 +314,20 @@
             RETURN_STATUS_IF_ERROR(
                     sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
             if (reply.state != StreamDescriptor::State::ACTIVE) {
-                ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
-                        __func__, toString(reply.state).c_str());
+                AUGMENT_LOG(E, "unexpected stream state: %s (expected ACTIVE)",
+                            toString(reply.state).c_str());
                 return INVALID_OPERATION;
             }
             return OK;
         default:
-            ALOGE("%s: not supported from %s stream state %s",
-                    __func__, mIsInput ? "input" : "output", toString(reply.state).c_str());
+            AUGMENT_LOG(E, "not supported from %s stream state %s", mIsInput ? "input" : "output",
+                        toString(reply.state).c_str());
             return INVALID_OPERATION;
     }
 }
 
 status_t StreamHalAidl::stop() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (!mContext.isMmapped()) {
@@ -317,57 +344,68 @@
         return flush();
     } else if (state != StreamDescriptor::State::IDLE &&
             state != StreamDescriptor::State::STANDBY) {
-        ALOGE("%s: not supported from %s stream state %s",
-                __func__, mIsInput ? "input" : "output", toString(state).c_str());
+        AUGMENT_LOG(E, "not supported from %s stream state %s", mIsInput ? "input" : "output",
+                    toString(state).c_str());
         return INVALID_OPERATION;
     }
     return OK;
 }
 
 status_t StreamHalAidl::getLatency(uint32_t *latency) {
-    ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(V);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
     RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
     *latency = std::clamp(std::max<int32_t>(0, reply.latencyMs), 1, 3000);
-    ALOGW_IF(reply.latencyMs != static_cast<int32_t>(*latency),
-             "Suspicious latency value reported by HAL: %d, clamped to %u", reply.latencyMs,
-             *latency);
+    AUGMENT_LOG_IF(W, reply.latencyMs != static_cast<int32_t>(*latency),
+                   "Suspicious latency value reported by HAL: %d, clamped to %u", reply.latencyMs,
+                   *latency);
     return OK;
 }
 
 status_t StreamHalAidl::getObservablePosition(int64_t* frames, int64_t* timestamp,
         StatePositions* statePositions) {
-    ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(V);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
     RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, statePositions));
-    *frames = std::max<int64_t>(0, reply.observable.frames);
-    *timestamp = std::max<int64_t>(0, reply.observable.timeNs);
+    if (reply.observable.frames == StreamDescriptor::Position::UNKNOWN ||
+        reply.observable.timeNs == StreamDescriptor::Position::UNKNOWN) {
+        return INVALID_OPERATION;
+    }
+    *frames = reply.observable.frames;
+    *timestamp = reply.observable.timeNs;
     return OK;
 }
 
 status_t StreamHalAidl::getHardwarePosition(int64_t *frames, int64_t *timestamp) {
-    ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(V);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
     RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
-    *frames = std::max<int64_t>(0, reply.hardware.frames);
-    *timestamp = std::max<int64_t>(0, reply.hardware.timeNs);
+    if (reply.hardware.frames == StreamDescriptor::Position::UNKNOWN ||
+        reply.hardware.timeNs == StreamDescriptor::Position::UNKNOWN) {
+        return INVALID_OPERATION;
+    }
+    *frames = reply.hardware.frames;
+    *timestamp = reply.hardware.timeNs;
     return OK;
 }
 
 status_t StreamHalAidl::getXruns(int32_t *frames) {
-    ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(V);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
     RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
-    *frames = std::max<int32_t>(0, reply.xrunFrames);
+    if (reply.xrunFrames == StreamDescriptor::Position::UNKNOWN) {
+        return INVALID_OPERATION;
+    }
+    *frames = reply.xrunFrames;
     return OK;
 }
 
 status_t StreamHalAidl::transfer(void *buffer, size_t bytes, size_t *transferred) {
-    ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(V);
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (!mStream || mContext.getDataMQ() == nullptr) return NO_INIT;
     mWorkerTid.store(gettid(), std::memory_order_release);
@@ -379,22 +417,19 @@
         StreamDescriptor::Reply reply;
         RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply));
         if (reply.state != StreamDescriptor::State::IDLE) {
-            ALOGE("%s: failed to get the stream out of standby, actual state: %s",
-                    __func__, toString(reply.state).c_str());
+            AUGMENT_LOG(E, "failed to get the stream out of standby, actual state: %s",
+                        toString(reply.state).c_str());
             return INVALID_OPERATION;
         }
     }
-    StreamContextAidl::DataMQ::Error fmqError = StreamContextAidl::DataMQ::Error::NONE;
-    std::string fmqErrorMsg;
     if (!mIsInput) {
-        bytes = std::min(bytes,
-                mContext.getDataMQ()->availableToWrite(&fmqError, &fmqErrorMsg));
+        bytes = std::min(bytes, mContext.getDataMQ()->availableToWrite());
     }
     StreamDescriptor::Command burst =
             StreamDescriptor::Command::make<StreamDescriptor::Command::Tag::burst>(bytes);
     if (!mIsInput) {
         if (!mContext.getDataMQ()->write(static_cast<const int8_t*>(buffer), bytes)) {
-            ALOGE("%s: failed to write %zu bytes to data MQ", __func__, bytes);
+            AUGMENT_LOG(E, "failed to write %zu bytes to data MQ", bytes);
             return NOT_ENOUGH_DATA;
         }
     }
@@ -405,28 +440,54 @@
         LOG_ALWAYS_FATAL_IF(*transferred > bytes,
                 "%s: HAL module read %zu bytes, which exceeds requested count %zu",
                 __func__, *transferred, bytes);
-        if (auto toRead = mContext.getDataMQ()->availableToRead(&fmqError, &fmqErrorMsg);
+        if (auto toRead = mContext.getDataMQ()->availableToRead();
                 toRead != 0 && !mContext.getDataMQ()->read(static_cast<int8_t*>(buffer), toRead)) {
-            ALOGE("%s: failed to read %zu bytes to data MQ", __func__, toRead);
+            AUGMENT_LOG(E, "failed to read %zu bytes to data MQ", toRead);
             return NOT_ENOUGH_DATA;
         }
     }
-    LOG_ALWAYS_FATAL_IF(fmqError != StreamContextAidl::DataMQ::Error::NONE,
-            "%s", fmqErrorMsg.c_str());
     mStreamPowerLog.log(buffer, *transferred);
     return OK;
 }
 
 status_t StreamHalAidl::pause(StreamDescriptor::Reply* reply) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return sendCommand(makeHalCommand<HalCommand::Tag::pause>(), reply,
-            true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+
+    if (const auto state = getState(); isInPlayOrRecordState(state)) {
+        StreamDescriptor::Reply localReply{};
+        StreamDescriptor::Reply* innerReply = reply ?: &localReply;
+        auto status = sendCommand(
+                makeHalCommand<HalCommand::Tag::pause>(), innerReply,
+                true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+        if (status == STATUS_INVALID_OPERATION &&
+                !isInPlayOrRecordState(innerReply->state)) {
+            /**
+             * In case of transient states like DRAINING, the HAL may change its
+             * StreamDescriptor::State on its own and may not be in synchronization with client.
+             * Thus, client can send the unexpected command and HAL returns failure. such failure is
+             * natural. The client handles it gracefully.
+             * Example where HAL change its state,
+             * 1) DRAINING -> IDLE (on empty buffer)
+             * 2) DRAINING -> IDLE (on IStreamCallback::onDrainReady)
+             **/
+            AUGMENT_LOG(D,
+                        "HAL failed to handle the 'pause' command, but stream state is in one of"
+                        " the PAUSED kind of states, current state: %s",
+                        toString(state).c_str());
+            return OK;
+        }
+        return status;
+    } else {
+        AUGMENT_LOG(D, "already stream in one of the PAUSED kind of states, current state: %s",
+                toString(state).c_str());
+        return OK;
+    }
 }
 
 status_t StreamHalAidl::resume(StreamDescriptor::Reply* reply) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (mIsInput) {
@@ -440,30 +501,26 @@
             RETURN_STATUS_IF_ERROR(
                     sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), innerReply));
             if (innerReply->state != StreamDescriptor::State::ACTIVE) {
-                ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
-                        __func__, toString(innerReply->state).c_str());
+                AUGMENT_LOG(E, "unexpected stream state: %s (expected ACTIVE)",
+                            toString(innerReply->state).c_str());
                 return INVALID_OPERATION;
             }
             return OK;
-        } else if (state == StreamDescriptor::State::PAUSED ||
-                   state == StreamDescriptor::State::TRANSFER_PAUSED ||
-                   state == StreamDescriptor::State::DRAIN_PAUSED) {
+        } else if (isInPausedState(state)) {
             return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
-        } else if (state == StreamDescriptor::State::ACTIVE ||
-                   state == StreamDescriptor::State::TRANSFERRING ||
-                   state == StreamDescriptor::State::DRAINING) {
-            ALOGD("%s: already in stream state: %s", __func__, toString(state).c_str());
+        } else if (isInPlayOrRecordState(state)) {
+            AUGMENT_LOG(D, "already in stream state: %s", toString(state).c_str());
             return OK;
         } else {
-            ALOGE("%s: unexpected stream state: %s (expected IDLE or one of *PAUSED states)",
-                        __func__, toString(state).c_str());
+            AUGMENT_LOG(E, "unexpected stream state: %s (expected IDLE or one of *PAUSED states)",
+                        toString(state).c_str());
             return INVALID_OPERATION;
         }
     }
 }
 
 status_t StreamHalAidl::drain(bool earlyNotify, StreamDescriptor::Reply* reply) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     return sendCommand(makeHalCommand<HalCommand::Tag::drain>(
@@ -474,33 +531,58 @@
 }
 
 status_t StreamHalAidl::flush(StreamDescriptor::Reply* reply) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return sendCommand(makeHalCommand<HalCommand::Tag::flush>(), reply,
-            true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+
+    if (const auto state = getState(); isInPausedState(state)) {
+        return sendCommand(
+                makeHalCommand<HalCommand::Tag::flush>(), reply,
+                true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+    } else if (isInPlayOrRecordState(state)) {
+        AUGMENT_LOG(E, "found stream in non-flushable state: %s", toString(state).c_str());
+        return INVALID_OPERATION;
+    } else {
+        AUGMENT_LOG(D, "already stream in one of the flushable state: current state: %s",
+                    toString(state).c_str());
+        return OK;
+    }
 }
 
 status_t StreamHalAidl::exit() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     return statusTFromBinderStatus(mStream->prepareToClose());
 }
 
 void StreamHalAidl::onAsyncTransferReady() {
-    if (auto state = getState(); state == StreamDescriptor::State::TRANSFERRING) {
+    StreamDescriptor::State state;
+    {
+        // Use 'mCommandReplyLock' to ensure that 'sendCommand' has finished updating the state
+        // after the reply from the 'burst' command.
+        std::lock_guard l(mCommandReplyLock);
+        state = getState();
+    }
+    if (state == StreamDescriptor::State::TRANSFERRING) {
         // Retrieve the current state together with position counters unconditionally
         // to ensure that the state on our side gets updated.
         sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
                 nullptr, true /*safeFromNonWorkerThread */);
     } else {
-        ALOGW("%s: unexpected onTransferReady in the state %s", __func__, toString(state).c_str());
+        AUGMENT_LOG(W, "unexpected onTransferReady in the state %s", toString(state).c_str());
     }
 }
 
 void StreamHalAidl::onAsyncDrainReady() {
-    if (auto state = getState(); state == StreamDescriptor::State::DRAINING) {
+    StreamDescriptor::State state;
+    {
+        // Use 'mCommandReplyLock' to ensure that 'sendCommand' has finished updating the state
+        // after the reply from the 'drain' command.
+        std::lock_guard l(mCommandReplyLock);
+        state = getState();
+    }
+    if (state == StreamDescriptor::State::DRAINING) {
         // Retrieve the current state together with position counters unconditionally
         // to ensure that the state on our side gets updated.
         sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), nullptr,
@@ -508,21 +590,23 @@
         // For compatibility with HIDL behavior, apply a "soft" position reset
         // after receiving the "drain ready" callback.
         std::lock_guard l(mLock);
-        mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames;
+        if (mLastReply.observable.frames != StreamDescriptor::Position::UNKNOWN) {
+            mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames;
+        }
     } else {
-        ALOGW("%s: unexpected onDrainReady in the state %s", __func__, toString(state).c_str());
+        AUGMENT_LOG(W, "unexpected onDrainReady in the state %s", toString(state).c_str());
     }
 }
 
 void StreamHalAidl::onAsyncError() {
     std::lock_guard l(mLock);
-    ALOGW("%s: received in the state %s", __func__, toString(mLastReply.state).c_str());
+    AUGMENT_LOG(W, "received in the state %s", toString(mLastReply.state).c_str());
     mLastReply.state = StreamDescriptor::State::ERROR;
 }
 
 status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
                                          struct audio_mmap_buffer_info *info) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (!mContext.isMmapped()) {
@@ -582,15 +666,14 @@
     {
         std::lock_guard l(mCommandReplyLock);
         if (!mContext.getCommandMQ()->writeBlocking(&command, 1)) {
-            ALOGE("%s: failed to write command %s to MQ", __func__, command.toString().c_str());
+            AUGMENT_LOG(E, "failed to write command %s to MQ", command.toString().c_str());
             return NOT_ENOUGH_DATA;
         }
         if (reply == nullptr) {
             reply = &localReply;
         }
         if (!mContext.getReplyMQ()->readBlocking(reply, 1)) {
-            ALOGE("%s: failed to read from reply MQ, command %s",
-                    __func__, command.toString().c_str());
+            AUGMENT_LOG(E, "failed to read from reply MQ, command %s", command.toString().c_str());
             return NOT_ENOUGH_DATA;
         }
         {
@@ -602,7 +685,8 @@
             }
             mLastReply = *reply;
             mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
-            if (!mIsInput && reply->status == STATUS_OK) {
+            if (!mIsInput && reply->status == STATUS_OK &&
+                    reply->observable.frames != StreamDescriptor::Position::UNKNOWN) {
                 if (command.getTag() == StreamDescriptor::Command::standby &&
                         reply->state == StreamDescriptor::State::STANDBY) {
                     mStatePositions.framesAtStandby = reply->observable.frames;
@@ -627,8 +711,8 @@
         case STATUS_INVALID_OPERATION: return INVALID_OPERATION;
         case STATUS_NOT_ENOUGH_DATA: return NOT_ENOUGH_DATA;
         default:
-            ALOGE("%s: unexpected status %d returned for command %s",
-                    __func__, reply->status, command.toString().c_str());
+            AUGMENT_LOG(E, "unexpected status %d returned for command %s", reply->status,
+                        command.toString().c_str());
             return INVALID_OPERATION;
     }
 }
@@ -693,10 +777,10 @@
     if (!mStream) return NO_INIT;
 
     AudioParameter parameters(kvPairs);
-    ALOGD("%s: parameters: \"%s\"", __func__, parameters.toString().c_str());
+    AUGMENT_LOG(D, "parameters: \"%s\"", parameters.toString().c_str());
 
     if (status_t status = filterAndUpdateOffloadMetadata(parameters); status != OK) {
-        ALOGW("%s: filtering or updating offload metadata failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filtering or updating offload metadata failed: %d", status);
     }
 
     return StreamHalAidl::setParameters(parameters.toString());
@@ -707,6 +791,7 @@
 }
 
 status_t StreamOutHalAidl::setVolume(float left, float right) {
+    AUGMENT_LOG(V, "left %f right %f", left, right);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     size_t channelCount = audio_channel_count_from_out_mask(mConfig.channel_mask);
@@ -760,11 +845,11 @@
 }
 
 status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
-    ALOGD("%p %s", this, __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (!mContext.isAsynchronous()) {
-        ALOGE("%s: the callback is intended for asynchronous streams only", __func__);
+        AUGMENT_LOG(E, "the callback is intended for asynchronous streams only");
         return INVALID_OPERATION;
     }
     mClientCallback = callback;
@@ -802,9 +887,9 @@
 status_t StreamOutHalAidl::drain(bool earlyNotify) {
     if (!mStream) return NO_INIT;
 
-    if(const auto state = getState(); state == StreamDescriptor::State::IDLE) {
-        ALOGD("%p %s stream already in IDLE state", this, __func__);
-        if(mContext.isAsynchronous()) onDrainReady();
+    if (const auto state = getState(); isInDrainedState(state)) {
+        AUGMENT_LOG(D, "stream already in %s state", toString(state).c_str());
+        if (mContext.isAsynchronous()) onDrainReady();
         return OK;
     }
 
@@ -836,7 +921,7 @@
 }
 
 status_t StreamOutHalAidl::presentationComplete() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     return OK;
 }
 
@@ -1027,10 +1112,10 @@
         updateMetadata = true;
     }
     if (updateMetadata) {
-        ALOGD("%s set offload metadata %s", __func__, mOffloadMetadata.toString().c_str());
+        AUGMENT_LOG(D, "set offload metadata %s", mOffloadMetadata.toString().c_str());
         if (status_t status = statusTFromBinderStatus(
                         mStream->updateOffloadMetadata(mOffloadMetadata)); status != OK) {
-            ALOGE("%s: updateOffloadMetadata failed %d", __func__, status);
+            AUGMENT_LOG(E, "updateOffloadMetadata failed %d", status);
             return status;
         }
     }
@@ -1117,7 +1202,7 @@
             // Note: info.portId is not filled because it's a bit of framework info.
             result.push_back(std::move(info));
         } else {
-            ALOGE("%s: no static info for active microphone with id '%s'", __func__, d.id.c_str());
+            AUGMENT_LOG(E, "no static info for active microphone with id '%s'", d.id.c_str());
         }
     }
     *microphones = std::move(result);
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index 0587640..a1cdac4 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -53,7 +53,7 @@
 
     StreamContextAidl(
             ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor,
-            bool isAsynchronous)
+            bool isAsynchronous, int ioHandle)
         : mFrameSizeBytes(descriptor.frameSizeBytes),
           mCommandMQ(new CommandMQ(descriptor.command)),
           mReplyMQ(new ReplyMQ(descriptor.reply)),
@@ -61,7 +61,8 @@
           mDataMQ(maybeCreateDataMQ(descriptor)),
           mIsAsynchronous(isAsynchronous),
           mIsMmapped(isMmapped(descriptor)),
-          mMmapBufferDescriptor(maybeGetMmapBuffer(descriptor)) {}
+          mMmapBufferDescriptor(maybeGetMmapBuffer(descriptor)),
+          mIoHandle(ioHandle) {}
     StreamContextAidl(StreamContextAidl&& other) :
             mFrameSizeBytes(other.mFrameSizeBytes),
             mCommandMQ(std::move(other.mCommandMQ)),
@@ -70,7 +71,8 @@
             mDataMQ(std::move(other.mDataMQ)),
             mIsAsynchronous(other.mIsAsynchronous),
             mIsMmapped(other.mIsMmapped),
-            mMmapBufferDescriptor(std::move(other.mMmapBufferDescriptor)) {}
+            mMmapBufferDescriptor(std::move(other.mMmapBufferDescriptor)),
+            mIoHandle(other.mIoHandle) {}
     StreamContextAidl& operator=(StreamContextAidl&& other) {
         mFrameSizeBytes = other.mFrameSizeBytes;
         mCommandMQ = std::move(other.mCommandMQ);
@@ -80,6 +82,7 @@
         mIsAsynchronous = other.mIsAsynchronous;
         mIsMmapped = other.mIsMmapped;
         mMmapBufferDescriptor = std::move(other.mMmapBufferDescriptor);
+        mIoHandle = other.mIoHandle;
         return *this;
     }
     bool isValid() const {
@@ -105,7 +108,9 @@
     bool isAsynchronous() const { return mIsAsynchronous; }
     bool isMmapped() const { return mIsMmapped; }
     const MmapBufferDescriptor& getMmapBufferDescriptor() const { return mMmapBufferDescriptor; }
-    size_t getMmapBurstSize() const { return mMmapBufferDescriptor.burstSizeFrames;}
+    size_t getMmapBurstSize() const { return mMmapBufferDescriptor.burstSizeFrames; }
+    int getIoHandle() const { return mIoHandle; }
+
   private:
     static std::unique_ptr<DataMQ> maybeCreateDataMQ(
             const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
@@ -137,6 +142,7 @@
     bool mIsAsynchronous;
     bool mIsMmapped;
     MmapBufferDescriptor mMmapBufferDescriptor;
+    int mIoHandle;
 };
 
 class StreamHalAidl : public virtual StreamHalInterface, public ConversionHelperAidl {
@@ -220,6 +226,41 @@
         return mLastReply.state;
     }
 
+    bool isInDrainedState(
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::State state) {
+        if (state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::IDLE ||
+            state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::STANDBY) {
+            // drain equivalent states
+            return true;
+        }
+        return false;
+    }
+
+    bool isInPlayOrRecordState(
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::State state) {
+        if (state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::ACTIVE ||
+            state ==
+                    ::aidl::android::hardware::audio::core::StreamDescriptor::State::TRANSFERRING ||
+            state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::DRAINING) {
+            // play or record equivalent states
+            return true;
+        }
+        return false;
+    }
+
+    bool isInPausedState(
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::State& state) {
+        if (state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::PAUSED ||
+            state ==
+                    ::aidl::android::hardware::audio::core::StreamDescriptor::State::DRAIN_PAUSED ||
+            state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::
+                             TRANSFER_PAUSED) {
+            // pause equivalent states
+            return true;
+        }
+        return false;
+    }
+
     status_t getLatency(uint32_t *latency);
 
     // Always returns non-negative values.
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 7f6c1fb..3f16526 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -90,7 +90,8 @@
             audio_output_flags_t flags,
             struct audio_config *config,
             const char *address,
-            sp<StreamOutHalInterface> *outStream) = 0;
+            sp<StreamOutHalInterface> *outStream,
+            const std::vector<playback_track_metadata_v7_t>& sourceMetadata = {}) = 0;
 
     // Creates and opens the audio hardware input stream. The stream is closed
     // by releasing all references to the returned object.
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index 50b748e..0f5334f 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -18,6 +18,7 @@
 #include <memory>
 #include <mutex>
 #include <string>
+#include <thread>
 #include <vector>
 
 #define LOG_TAG "CoreAudioHalAidlTest"
@@ -28,6 +29,7 @@
 #include <StreamHalAidl.h>
 #include <aidl/android/hardware/audio/core/BnModule.h>
 #include <aidl/android/hardware/audio/core/BnStreamCommon.h>
+#include <aidl/android/hardware/audio/core/BnStreamOut.h>
 #include <aidl/android/media/audio/BnHalAdapterVendorExtension.h>
 #include <aidl/android/media/audio/common/AudioGainMode.h>
 #include <aidl/android/media/audio/common/Int.h>
@@ -64,13 +66,13 @@
     const std::vector<VendorParameter>& getSyncParameters() const { return mSyncParameters; }
 
   protected:
-    ndk::ScopedAStatus getVendorParametersImpl(const std::vector<std::string>& in_parameterIds) {
+    ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds) {
         mGetParameterIds.insert(mGetParameterIds.end(), in_parameterIds.begin(),
                                 in_parameterIds.end());
         return ndk::ScopedAStatus::ok();
     }
-    ndk::ScopedAStatus setVendorParametersImpl(const std::vector<VendorParameter>& in_parameters,
-                                               bool async) {
+    ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
+                                           bool async) {
         if (async) {
             mAsyncParameters.insert(mAsyncParameters.end(), in_parameters.begin(),
                                     in_parameters.end());
@@ -187,6 +189,11 @@
     speakerOutDevice.profiles = standardPcmAudioProfiles;
     c.ports.push_back(speakerOutDevice);
 
+    AudioPort primaryOutMix =
+            createPort(c.nextPortId++, "primary output", 0, false, createPortMixExt(1, 1));
+    primaryOutMix.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(primaryOutMix);
+
     AudioPort btOutDevice =
             createPort(c.nextPortId++, "BT A2DP Out", 0, false,
                        createPortDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
@@ -200,11 +207,141 @@
     c.ports.push_back(btOutMix);
 
     c.routes.push_back(createRoute({micInDevice, micInBackDevice}, primaryInMix));
+    c.routes.push_back(createRoute({primaryOutMix}, speakerOutDevice));
     c.routes.push_back(createRoute({btOutMix}, btOutDevice));
 
     return c;
 }
 
+class StreamCommonMock : public ::aidl::android::hardware::audio::core::BnStreamCommon,
+                         public VendorParameterMock {
+    ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus prepareToClose() override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus updateHwAvSyncId(int32_t) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds,
+                                           std::vector<VendorParameter>*) override {
+        return VendorParameterMock::getVendorParameters(in_parameterIds);
+    }
+    ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
+                                           bool async) override {
+        return VendorParameterMock::setVendorParameters(in_parameters, async);
+    }
+    ndk::ScopedAStatus addEffect(
+            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus removeEffect(
+            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+};
+
+class StreamContext {
+  public:
+    using Descriptor = ::aidl::android::hardware::audio::core::StreamDescriptor;
+    typedef ::android::AidlMessageQueue<
+            Descriptor::Command, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
+            CommandMQ;
+    typedef ::android::AidlMessageQueue<
+            Descriptor::Reply, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
+            ReplyMQ;
+    typedef ::android::AidlMessageQueue<
+            int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
+            DataMQ;
+
+    StreamContext() = default;
+    StreamContext(std::unique_ptr<CommandMQ> commandMQ, std::unique_ptr<ReplyMQ> replyMQ,
+                  std::unique_ptr<DataMQ> dataMQ)
+        : mCommandMQ(std::move(commandMQ)),
+          mReplyMQ(std::move(replyMQ)),
+          mDataMQ(std::move(dataMQ)) {}
+    void fillDescriptor(Descriptor* desc) {
+        if (mCommandMQ) {
+            desc->command = mCommandMQ->dupeDesc();
+        }
+        if (mReplyMQ) {
+            desc->reply = mReplyMQ->dupeDesc();
+        }
+        if (mDataMQ) {
+            desc->frameSizeBytes = 2;
+            desc->bufferSizeFrames = 48;
+            desc->audio.set<Descriptor::AudioBuffer::Tag::fmq>(mDataMQ->dupeDesc());
+        }
+    }
+
+  private:
+    std::unique_ptr<CommandMQ> mCommandMQ =
+            std::make_unique<CommandMQ>(1, true /*configureEventFlagWord*/);
+    std::unique_ptr<ReplyMQ> mReplyMQ =
+            std::make_unique<ReplyMQ>(1, true /*configureEventFlagWord*/);
+    std::unique_ptr<DataMQ> mDataMQ = std::make_unique<DataMQ>(96);
+};
+
+class StreamOutMock : public ::aidl::android::hardware::audio::core::BnStreamOut {
+  public:
+    explicit StreamOutMock(StreamContext&& ctx) : mContext(std::move(ctx)) {}
+
+  private:
+    ndk::ScopedAStatus getStreamCommon(
+            std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon>* _aidl_return)
+            override {
+        if (!mCommon) {
+            mCommon = ndk::SharedRefBase::make<StreamCommonMock>();
+        }
+        *_aidl_return = mCommon;
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus updateMetadata(
+            const ::aidl::android::hardware::audio::common::SourceMetadata&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus updateOffloadMetadata(
+            const ::aidl::android::hardware::audio::common::AudioOffloadMetadata&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getHwVolume(std::vector<float>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setHwVolume(const std::vector<float>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAudioDescriptionMixLevel(float*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setAudioDescriptionMixLevel(float) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getDualMonoMode(
+            ::aidl::android::media::audio::common::AudioDualMonoMode*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setDualMonoMode(
+            ::aidl::android::media::audio::common::AudioDualMonoMode) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getRecommendedLatencyModes(
+            std::vector<::aidl::android::media::audio::common::AudioLatencyMode>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setLatencyMode(
+            ::aidl::android::media::audio::common::AudioLatencyMode) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getPlaybackRateParameters(
+            ::aidl::android::media::audio::common::AudioPlaybackRate*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setPlaybackRateParameters(
+            const ::aidl::android::media::audio::common::AudioPlaybackRate&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus selectPresentation(int32_t, int32_t) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    StreamContext mContext;
+    std::shared_ptr<StreamCommonMock> mCommon;
+};
+
 class ModuleMock : public ::aidl::android::hardware::audio::core::BnModule,
                    public VendorParameterMock {
   public:
@@ -339,7 +476,10 @@
         return ndk::ScopedAStatus::ok();
     }
     ndk::ScopedAStatus openOutputStream(const OpenOutputStreamArguments&,
-                                        OpenOutputStreamReturn*) override {
+                                        OpenOutputStreamReturn* _aidl_return) override {
+        StreamContext context;
+        context.fillDescriptor(&_aidl_return->desc);
+        _aidl_return->stream = ndk::SharedRefBase::make<StreamOutMock>(std::move(context));
         return ndk::ScopedAStatus::ok();
     }
     ndk::ScopedAStatus getSupportedPlaybackRateFactors(SupportedPlaybackRateFactors*) override {
@@ -351,6 +491,7 @@
         if (requested.id == 0) {
             *patch = requested;
             patch->id = mConfig.nextPatchId++;
+            patch->latenciesMs.push_back(100);
             mConfig.patches.push_back(*patch);
             ALOGD("%s: returning %s", __func__, patch->toString().c_str());
         } else {
@@ -437,11 +578,11 @@
     ndk::ScopedAStatus generateHwAvSyncId(int32_t*) override { return ndk::ScopedAStatus::ok(); }
     ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds,
                                            std::vector<VendorParameter>*) override {
-        return getVendorParametersImpl(in_parameterIds);
+        return VendorParameterMock::getVendorParameters(in_parameterIds);
     }
     ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
                                            bool async) override {
-        return setVendorParametersImpl(in_parameters, async);
+        return VendorParameterMock::setVendorParameters(in_parameters, async);
     }
     ndk::ScopedAStatus addDeviceEffect(
             int32_t,
@@ -474,29 +615,6 @@
     ScreenRotation mScreenRotation = ScreenRotation::DEG_0;
 };
 
-class StreamCommonMock : public ::aidl::android::hardware::audio::core::BnStreamCommon,
-                         public VendorParameterMock {
-    ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
-    ndk::ScopedAStatus prepareToClose() override { return ndk::ScopedAStatus::ok(); }
-    ndk::ScopedAStatus updateHwAvSyncId(int32_t) override { return ndk::ScopedAStatus::ok(); }
-    ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds,
-                                           std::vector<VendorParameter>*) override {
-        return getVendorParametersImpl(in_parameterIds);
-    }
-    ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
-                                           bool async) override {
-        return setVendorParametersImpl(in_parameters, async);
-    }
-    ndk::ScopedAStatus addEffect(
-            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
-        return ndk::ScopedAStatus::ok();
-    }
-    ndk::ScopedAStatus removeEffect(
-            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
-        return ndk::ScopedAStatus::ok();
-    }
-};
-
 VendorParameter makeVendorParameter(const std::string& id, int value) {
     VendorParameter result{.id = id};
     // Note: in real life, a parcelable type defined by vendor must be used,
@@ -708,7 +826,7 @@
 class DeviceHalAidlTest : public testing::Test {
   public:
     void SetUp() override {
-        mModule = ndk::SharedRefBase::make<ModuleMock>();
+        mModule = ndk::SharedRefBase::make<ModuleMock>(getTestConfiguration());
         mDevice = sp<DeviceHalAidl>::make("test", mModule, nullptr /*vext*/);
     }
     void TearDown() override {
@@ -750,6 +868,46 @@
     EXPECT_EQ(ScreenRotation::DEG_0, mModule->getScreenRotation());
 }
 
+// See http://b/357487484#comment6
+TEST_F(DeviceHalAidlTest, StreamReleaseOnMapperCleanup) {
+    ASSERT_EQ(OK, mDevice->initCheck());
+    // Since the test is in effect probabilistic, try multiple times.
+    for (int i = 0; i < 100; ++i) {
+        sp<StreamOutHalInterface> stream1;
+        struct audio_config config = AUDIO_CONFIG_INITIALIZER;
+        config.sample_rate = 48000;
+        config.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+        config.format = AUDIO_FORMAT_PCM_16_BIT;
+        ASSERT_EQ(OK, mDevice->openOutputStream(42 /*handle*/, AUDIO_DEVICE_OUT_SPEAKER,
+                                                AUDIO_OUTPUT_FLAG_NONE, &config, "" /*address*/,
+                                                &stream1));
+        ASSERT_EQ(1, stream1->getStrongCount());
+        std::atomic<bool> stopReleaser = false;
+        // Try to catch the moment when Hal2AidlMapper promotes its wp<StreamHalInterface> to sp<>
+        // in Hal2AidlMapper::resetUnusedPatchesAndPortConfigs and release on our side in order to
+        // make Hal2AidlMapper the sole owner via a temporary sp and enforce destruction of the
+        // stream while the DeviceHalAidl::mLock is held.
+        std::thread releaser([&stream1, &stopReleaser]() {
+            while (!stopReleaser) {
+                if (stream1->getStrongCount() > 1) {
+                    stream1.clear();
+                    break;
+                }
+                std::this_thread::yield();
+            }
+        });
+        sp<StreamOutHalInterface> stream2;
+        // Opening another stream triggers a call to
+        // Hal2AidlMapper::resetUnusedPatchesAndPortConfigs.  It must not cause a deadlock of the
+        // test (main) thread.
+        ASSERT_EQ(OK, mDevice->openOutputStream(43 /*handle*/, AUDIO_DEVICE_OUT_SPEAKER,
+                                                AUDIO_OUTPUT_FLAG_NONE, &config, "" /*address*/,
+                                                &stream2));
+        stopReleaser = true;
+        releaser.join();
+    }
+}
+
 class DeviceHalAidlVendorParametersTest : public testing::Test {
   public:
     void SetUp() override {
@@ -830,9 +988,9 @@
         mVendorExt = ndk::SharedRefBase::make<TestHalAdapterVendorExtension>();
         struct audio_config config = AUDIO_CONFIG_INITIALIZER;
         ::aidl::android::hardware::audio::core::StreamDescriptor descriptor;
+        StreamContextAidl context(descriptor, false /*isAsynchronous*/, 0);
         mStream = sp<StreamHalAidl>::make("test", false /*isInput*/, config, 0 /*nominalLatency*/,
-                                          StreamContextAidl(descriptor, false /*isAsynchronous*/),
-                                          mStreamCommon, mVendorExt);
+                                          std::move(context), mStreamCommon, mVendorExt);
     }
     void TearDown() override {
         mStream.clear();
diff --git a/media/libaudioprocessing/AudioMixerOps.h b/media/libaudioprocessing/AudioMixerOps.h
index ab6a8b6..8f60d29 100644
--- a/media/libaudioprocessing/AudioMixerOps.h
+++ b/media/libaudioprocessing/AudioMixerOps.h
@@ -347,6 +347,7 @@
         [6] = AUDIO_CHANNEL_OUT_5POINT1,
         [7] = AUDIO_CHANNEL_OUT_6POINT1,
         [8] = AUDIO_CHANNEL_OUT_7POINT1,
+        [10] = AUDIO_CHANNEL_OUT_5POINT1POINT4,
         [12] = AUDIO_CHANNEL_OUT_7POINT1POINT4,
         [14] = AUDIO_CHANNEL_OUT_9POINT1POINT4,
         [16] = AUDIO_CHANNEL_OUT_9POINT1POINT6,
diff --git a/media/libaudioprocessing/tests/mixerops_tests.cpp b/media/libaudioprocessing/tests/mixerops_tests.cpp
index 2500ba9..235129f 100644
--- a/media/libaudioprocessing/tests/mixerops_tests.cpp
+++ b/media/libaudioprocessing/tests/mixerops_tests.cpp
@@ -154,6 +154,9 @@
 TEST(mixerops, stereovolume_8) {
     MixerOpsBasicTest<MIXTYPE_MULTI_STEREOVOL, 8>::testStereoVolume();
 }
+TEST(mixerops, stereovolume_10) {
+    MixerOpsBasicTest<MIXTYPE_MULTI_STEREOVOL, 10>::testStereoVolume();
+}
 TEST(mixerops, stereovolume_12) {
     if constexpr (FCC_LIMIT >= 12) { // NOTE: FCC_LIMIT is an enum, so can't #if
         MixerOpsBasicTest<MIXTYPE_MULTI_STEREOVOL, 12>::testStereoVolume();
diff --git a/media/libeffects/downmix/aidl/DownmixContext.cpp b/media/libeffects/downmix/aidl/DownmixContext.cpp
index 593e16f..3a55361 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.cpp
+++ b/media/libeffects/downmix/aidl/DownmixContext.cpp
@@ -100,11 +100,6 @@
     return RetCode::SUCCESS;
 }
 
-void DownmixContext::reset() {
-    disable();
-    resetBuffer();
-}
-
 IEffect::Status DownmixContext::downmixProcess(float* in, float* out, int samples) {
     IEffect::Status status = {EX_ILLEGAL_ARGUMENT, 0, 0};
 
diff --git a/media/libeffects/downmix/aidl/DownmixContext.h b/media/libeffects/downmix/aidl/DownmixContext.h
index a381d7f..1be1508 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.h
+++ b/media/libeffects/downmix/aidl/DownmixContext.h
@@ -32,9 +32,8 @@
   public:
     DownmixContext(int statusDepth, const Parameter::Common& common);
     ~DownmixContext();
-    RetCode enable();
-    RetCode disable();
-    void reset();
+    RetCode enable() override;
+    RetCode disable() override;
 
     RetCode setDmType(Downmix::Type type) {
         mType = type;
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.cpp b/media/libeffects/downmix/aidl/EffectDownmix.cpp
index 883d41d..10c7c4f 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.cpp
+++ b/media/libeffects/downmix/aidl/EffectDownmix.cpp
@@ -71,26 +71,6 @@
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus DownmixImpl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->reset();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 ndk::ScopedAStatus DownmixImpl::setParameterSpecific(const Parameter::Specific& specific) {
     RETURN_IF(Parameter::Specific::downmix != specific.getTag(), EX_ILLEGAL_ARGUMENT,
               "EffectNotSupported");
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.h b/media/libeffects/downmix/aidl/EffectDownmix.h
index b7d621a..cea6d1b 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.h
+++ b/media/libeffects/downmix/aidl/EffectDownmix.h
@@ -31,7 +31,6 @@
     DownmixImpl() = default;
     ~DownmixImpl() { cleanUp(); }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
     ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
             REQUIRES(mImplMutex) override;
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
index 836e034..8324473 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
@@ -244,27 +244,6 @@
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus DynamicsProcessingImpl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            return ndk::ScopedAStatus::ok();
-        case CommandId::STOP:
-            mContext->disable();
-            return ndk::ScopedAStatus::ok();
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            return ndk::ScopedAStatus::ok();
-        default:
-            // Need this default handling for vendor extendable CommandId::VENDOR_COMMAND_*
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-}
-
 bool DynamicsProcessingImpl::isParamInRange(const Parameter::Specific& specific) {
     auto& dp = specific.get<Parameter::Specific::dynamicsProcessing>();
     return DynamicsProcessingRanges::isParamInRange(dp, kRanges);
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
index e850ba4..b34cdcf 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
@@ -36,7 +36,6 @@
     ndk::ScopedAStatus open(const Parameter::Common& common,
                             const std::optional<Parameter::Specific>& specific,
                             OpenEffectReturn* ret) override;
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
     ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
             REQUIRES(mImplMutex) override;
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index ada301b..dd14ac2 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -48,10 +48,12 @@
     return RetCode::SUCCESS;
 }
 
-void DynamicsProcessingContext::reset() {
+RetCode DynamicsProcessingContext::reset() {
     if (mDpFreq != nullptr) {
-        mDpFreq.reset();
+        mDpFreq->reset();
     }
+    mEngineInited = false;
+    return RetCode::SUCCESS;
 }
 
 RetCode DynamicsProcessingContext::setCommon(const Parameter::Common& common) {
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
index ce657db..15c6811 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
@@ -37,9 +37,9 @@
   public:
     DynamicsProcessingContext(int statusDepth, const Parameter::Common& common);
     ~DynamicsProcessingContext() = default;
-    RetCode enable();
-    RetCode disable();
-    void reset();
+    RetCode enable() override;
+    RetCode disable() override;
+    RetCode reset() override;
 
     // override EffectContext::setCommon to update mChannelCount
     RetCode setCommon(const Parameter::Common& common) override;
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
index b803ee4..55e07fb 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
@@ -70,26 +70,6 @@
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus HapticGeneratorImpl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->reset();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 ndk::ScopedAStatus HapticGeneratorImpl::setParameterSpecific(const Parameter::Specific& specific) {
     RETURN_IF(Parameter::Specific::hapticGenerator != specific.getTag(), EX_ILLEGAL_ARGUMENT,
               "EffectNotSupported");
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
index a775f06..8bae024 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
@@ -30,7 +30,6 @@
     HapticGeneratorImpl() = default;
     ~HapticGeneratorImpl() { cleanUp(); }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
     ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
             REQUIRES(mImplMutex) override;
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
index 9b2f443..e040fec 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
@@ -71,7 +71,7 @@
     return RetCode::SUCCESS;
 }
 
-void HapticGeneratorContext::reset() {
+RetCode HapticGeneratorContext::reset() {
     for (auto& filter : mProcessorsRecord.filters) {
         filter->clear();
     }
@@ -81,6 +81,7 @@
     for (auto& distortion : mProcessorsRecord.distortions) {
         distortion->clear();
     }
+    return RetCode::SUCCESS;
 }
 
 RetCode HapticGeneratorContext::setHgHapticScales(
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
index 8a736e3..cf38e47 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
@@ -65,9 +65,9 @@
   public:
     HapticGeneratorContext(int statusDepth, const Parameter::Common& common);
     ~HapticGeneratorContext();
-    RetCode enable();
-    RetCode disable();
-    void reset();
+    RetCode enable() override;
+    RetCode disable() override;
+    RetCode reset() override;
 
     RetCode setHgHapticScales(const std::vector<HapticGenerator::HapticScale>& hapticScales);
     std::vector<HapticGenerator::HapticScale> getHgHapticScales() const;
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
index f89606e..592fd60 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
@@ -70,27 +70,6 @@
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus LoudnessEnhancerImpl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 ndk::ScopedAStatus LoudnessEnhancerImpl::setParameterSpecific(const Parameter::Specific& specific) {
     RETURN_IF(Parameter::Specific::loudnessEnhancer != specific.getTag(), EX_ILLEGAL_ARGUMENT,
               "EffectNotSupported");
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
index 98bdc6b..1e050f3 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
@@ -30,7 +30,6 @@
     LoudnessEnhancerImpl() = default;
     ~LoudnessEnhancerImpl() { cleanUp(); }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
     ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
             REQUIRES(mImplMutex) override;
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
index d8bcfc0..ac8b14a 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
@@ -43,17 +43,13 @@
     return RetCode::SUCCESS;
 }
 
-void LoudnessEnhancerContext::reset() {
-    float targetAmp = pow(10, mGain / 2000.0f);  // mB to linear amplification
+RetCode LoudnessEnhancerContext::setLeGain(int gainMb) {
+    float targetAmp = pow(10, gainMb / 2000.0f);  // mB to linear amplification
     if (mCompressor != nullptr) {
         // Get samplingRate from input
         mCompressor->Initialize(targetAmp, mCommon.input.base.sampleRate);
     }
-}
-
-RetCode LoudnessEnhancerContext::setLeGain(int gainMb) {
     mGain = gainMb;
-    reset();  // apply parameter update
     return RetCode::SUCCESS;
 }
 
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
index 192b212..67ccd24 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
@@ -34,9 +34,8 @@
     LoudnessEnhancerContext(int statusDepth, const Parameter::Common& common);
     ~LoudnessEnhancerContext() = default;
 
-    RetCode enable();
-    RetCode disable();
-    void reset();
+    RetCode enable() override;
+    RetCode disable() override;
 
     RetCode setLeGain(int gainMb);
     int getLeGain() const { return mGain; }
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
index 044c8dd..e5ab40d 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -35,9 +35,9 @@
     void deInit();
     lvm::BundleEffectType getBundleType() const { return mType; }
 
-    RetCode enable();
+    RetCode enable() override;
     RetCode enableOperatingMode();
-    RetCode disable();
+    RetCode disable() override;
     RetCode disableOperatingMode();
 
     bool isDeviceSupportedBassBoost(
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
index 70c276d..2a81673 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
@@ -428,27 +428,6 @@
     return RetCode::SUCCESS;
 }
 
-ndk::ScopedAStatus EffectBundleAidl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 // Processing method running in EffectWorker thread.
 IEffect::Status EffectBundleAidl::effectProcessImpl(float* in, float* out, int sampleToProcess) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
index 429e941..479579b 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
@@ -49,8 +49,6 @@
     IEffect::Status effectProcessImpl(float* in, float* out, int samples)
             REQUIRES(mImplMutex) override;
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
-
     std::string getEffectName() override { return *mEffectName; }
 
   private:
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
index 4d369b1..201c659 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
@@ -361,27 +361,6 @@
     return RetCode::SUCCESS;
 }
 
-ndk::ScopedAStatus EffectReverb::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 // Processing method running in EffectWorker thread.
 IEffect::Status EffectReverb::effectProcessImpl(float* in, float* out, int sampleToProcess) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
index e0771a1..4acac1d 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
@@ -42,8 +42,6 @@
     IEffect::Status effectProcessImpl(float* in, float* out, int samples)
             REQUIRES(mImplMutex) override;
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
-
     std::string getEffectName() override { return *mEffectName; }
 
   private:
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
index 44391f2..f55eac5 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
@@ -51,8 +51,8 @@
     RetCode init();
     void deInit();
 
-    RetCode enable();
-    RetCode disable();
+    RetCode enable() override;
+    RetCode disable() override;
 
     bool isAuxiliary();
     bool isPreset();
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index 44b7d97..d791fab 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -55,8 +55,8 @@
     defaults: ["libaudiopreprocessing-defaults"],
     relative_install_path: "soundfx",
     srcs: ["PreProcessing.cpp"],
-    header_libs: [
-        "libwebrtc_absl_headers",
+    static_libs: [
+        "libabsl",
     ],
 }
 
@@ -77,6 +77,7 @@
         "libutils",
     ],
     static_libs: [
+        "libabsl",
         "webrtc_audio_processing",
     ],
     header_libs: [
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
index 87d267b..4bc34e7 100644
--- a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
@@ -417,27 +417,6 @@
     return RetCode::SUCCESS;
 }
 
-ndk::ScopedAStatus EffectPreProcessing::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 // Processing method running in EffectWorker thread.
 IEffect::Status EffectPreProcessing::effectProcessImpl(float* in, float* out, int sampleToProcess) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.h b/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
index 9ce5597..31f5737 100644
--- a/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
@@ -43,8 +43,6 @@
     IEffect::Status effectProcessImpl(float* in, float* out, int samples)
             REQUIRES(mImplMutex) override;
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
-
     std::string getEffectName() override { return *mEffectName; }
 
   private:
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingContext.h b/media/libeffects/preprocessing/aidl/PreProcessingContext.h
index 11a2bea..1b9b77b 100644
--- a/media/libeffects/preprocessing/aidl/PreProcessingContext.h
+++ b/media/libeffects/preprocessing/aidl/PreProcessingContext.h
@@ -45,8 +45,8 @@
 
     PreProcessingEffectType getPreProcessingType() const { return mType; }
 
-    RetCode enable();
-    RetCode disable();
+    RetCode enable() override;
+    RetCode disable() override;
 
     RetCode setCommon(const Parameter::Common& common) override;
     void updateConfigs(const Parameter::Common& common);
diff --git a/media/libeffects/visualizer/aidl/Visualizer.cpp b/media/libeffects/visualizer/aidl/Visualizer.cpp
index 9b493d4..f4b9b25 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.cpp
+++ b/media/libeffects/visualizer/aidl/Visualizer.cpp
@@ -85,27 +85,6 @@
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus VisualizerImpl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 ndk::ScopedAStatus VisualizerImpl::setParameterSpecific(const Parameter::Specific& specific) {
     RETURN_IF(Parameter::Specific::visualizer != specific.getTag(), EX_ILLEGAL_ARGUMENT,
               "EffectNotSupported");
@@ -222,6 +201,7 @@
 RetCode VisualizerImpl::releaseContext() {
     if (mContext) {
         mContext->disable();
+        mContext->reset();
         mContext->resetBuffer();
     }
     return RetCode::SUCCESS;
diff --git a/media/libeffects/visualizer/aidl/Visualizer.h b/media/libeffects/visualizer/aidl/Visualizer.h
index 3180972..f25b78d 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.h
+++ b/media/libeffects/visualizer/aidl/Visualizer.h
@@ -32,7 +32,6 @@
     VisualizerImpl() = default;
     ~VisualizerImpl() { cleanUp(); }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
     ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
             REQUIRES(mImplMutex) override;
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.cpp b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
index 1e08674..a368e52 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.cpp
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
@@ -57,7 +57,7 @@
 #endif
     mChannelCount = channelCount;
     mCommon = common;
-    std::fill(mCaptureBuf.begin(), mCaptureBuf.end(), 0x80);
+    reset();
     return RetCode::SUCCESS;
 }
 
@@ -77,8 +77,9 @@
     return RetCode::SUCCESS;
 }
 
-void VisualizerContext::reset() {
+RetCode VisualizerContext::reset() {
     std::fill(mCaptureBuf.begin(), mCaptureBuf.end(), 0x80);
+    return RetCode::SUCCESS;
 }
 
 RetCode VisualizerContext::setCaptureSamples(int samples) {
@@ -109,7 +110,6 @@
     mDownstreamLatency = latency;
     return RetCode::SUCCESS;
 }
-
 int VisualizerContext::getDownstreamLatency() {
     return mDownstreamLatency;
 }
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.h b/media/libeffects/visualizer/aidl/VisualizerContext.h
index 9715e20..d4abbd3 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.h
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.h
@@ -36,10 +36,10 @@
 
     RetCode initParams(const Parameter::Common& common);
 
-    RetCode enable();
-    RetCode disable();
+    RetCode enable() override;
+    RetCode disable() override;
     // keep all parameters and reset buffer.
-    void reset();
+    RetCode reset() override;
 
     RetCode setCaptureSamples(int32_t captureSize);
     int32_t getCaptureSamples();
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index d4b4b6c..ee6ad00 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -255,6 +255,47 @@
 }
 
 cc_library_shared {
+    name: "libmedia_codeclist_capabilities",
+
+    srcs: [
+        "AudioCapabilities.cpp",
+        "CodecCapabilities.cpp",
+        "EncoderCapabilities.cpp",
+        "VideoCapabilities.cpp",
+        "CodecCapabilitiesUtils.cpp",
+    ],
+
+    local_include_dirs: [
+        "include",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "liblog",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wno-error=deprecated-declarations",
+        "-Wall",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+}
+
+cc_library_shared {
     name: "libmedia_codeclist",
 
     srcs: [
@@ -270,6 +311,7 @@
         "android.hardware.media.omx@1.0",
         "libbinder",
         "liblog",
+        "libmedia_codeclist_capabilities",
         "libstagefright_foundation",
         "libutils",
     ],
diff --git a/media/libmedia/AudioCapabilities.cpp b/media/libmedia/AudioCapabilities.cpp
new file mode 100644
index 0000000..dbbe9e8
--- /dev/null
+++ b/media/libmedia/AudioCapabilities.cpp
@@ -0,0 +1,399 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AudioCapabilities"
+
+#include <android-base/strings.h>
+#include <android-base/properties.h>
+
+#include <media/AudioCapabilities.h>
+#include <media/CodecCapabilities.h>
+#include <media/stagefright/MediaCodecConstants.h>
+
+namespace android {
+
+const Range<int32_t>& AudioCapabilities::getBitrateRange() const {
+    return mBitrateRange;
+}
+
+const std::vector<int32_t>& AudioCapabilities::getSupportedSampleRates() const {
+    return mSampleRates;
+}
+
+const std::vector<Range<int32_t>>&
+        AudioCapabilities::getSupportedSampleRateRanges() const {
+    return mSampleRateRanges;
+}
+
+int32_t AudioCapabilities::getMaxInputChannelCount() const {
+    int32_t overallMax = 0;
+    for (int i = mInputChannelRanges.size() - 1; i >= 0; i--) {
+        int32_t lmax = mInputChannelRanges[i].upper();
+        if (lmax > overallMax) {
+            overallMax = lmax;
+        }
+    }
+    return overallMax;
+}
+
+int32_t AudioCapabilities::getMinInputChannelCount() const {
+    int32_t overallMin = MAX_INPUT_CHANNEL_COUNT;
+    for (int i = mInputChannelRanges.size() - 1; i >= 0; i--) {
+        int32_t lmin = mInputChannelRanges[i].lower();
+        if (lmin < overallMin) {
+            overallMin = lmin;
+        }
+    }
+    return overallMin;
+}
+
+const std::vector<Range<int32_t>>&
+        AudioCapabilities::getInputChannelCountRanges() const {
+    return mInputChannelRanges;
+}
+
+// static
+std::shared_ptr<AudioCapabilities> AudioCapabilities::Create(std::string mediaType,
+        std::vector<ProfileLevel> profLevs, const sp<AMessage> &format) {
+    std::shared_ptr<AudioCapabilities> caps(new AudioCapabilities());
+    caps->init(mediaType, profLevs, format);
+    return caps;
+}
+
+void AudioCapabilities::init(std::string mediaType, std::vector<ProfileLevel> profLevs,
+        const sp<AMessage> &format) {
+    mMediaType = mediaType;
+    mProfileLevels = profLevs;
+    mError = 0;
+
+    initWithPlatformLimits();
+    applyLevelLimits();
+    parseFromInfo(format);
+}
+
+void AudioCapabilities::initWithPlatformLimits() {
+    mBitrateRange = Range<int32_t>(0, INT32_MAX);
+    mInputChannelRanges.push_back(Range<int32_t>(1, MAX_INPUT_CHANNEL_COUNT));
+
+    const int32_t minSampleRate = base::GetIntProperty("ro.mediacodec.min_sample_rate", 7350);
+    const int32_t maxSampleRate = base::GetIntProperty("ro.mediacodec.max_sample_rate", 192000);
+    mSampleRateRanges.push_back(Range<int32_t>(minSampleRate, maxSampleRate));
+}
+
+bool AudioCapabilities::supports(std::optional<int32_t> sampleRate,
+        std::optional<int32_t> inputChannels) {
+    // channels and sample rates are checked orthogonally
+    if (inputChannels
+            && !std::any_of(mInputChannelRanges.begin(), mInputChannelRanges.end(),
+            [inputChannels](const Range<int32_t> &a) {
+                    return a.contains(inputChannels.value()); })) {
+        return false;
+    }
+    if (sampleRate
+            && !std::any_of(mSampleRateRanges.begin(), mSampleRateRanges.end(),
+            [sampleRate](const Range<int32_t> &a) { return a.contains(sampleRate.value()); })) {
+        return false;
+    }
+    return true;
+}
+
+bool AudioCapabilities::isSampleRateSupported(int32_t sampleRate) {
+    return supports(std::make_optional<int32_t>(sampleRate), std::nullopt);
+}
+
+void AudioCapabilities::limitSampleRates(std::vector<int32_t> rates) {
+    std::vector<Range<int32_t>> sampleRateRanges;
+    std::sort(rates.begin(), rates.end());
+    for (int32_t rate : rates) {
+        if (supports(std::make_optional<int32_t>(rate), std::nullopt /* channels */)) {
+            sampleRateRanges.push_back(Range<int32_t>(rate, rate));
+        }
+    }
+    mSampleRateRanges = intersectSortedDistinctRanges(mSampleRateRanges, sampleRateRanges);
+    createDiscreteSampleRates();
+}
+
+void AudioCapabilities::createDiscreteSampleRates() {
+    mSampleRates.clear();
+    for (int i = 0; i < mSampleRateRanges.size(); i++) {
+        mSampleRates.push_back(mSampleRateRanges[i].lower());
+    }
+}
+
+void AudioCapabilities::limitSampleRates(std::vector<Range<int32_t>> rateRanges) {
+    sortDistinctRanges(&rateRanges);
+    mSampleRateRanges = intersectSortedDistinctRanges(mSampleRateRanges, rateRanges);
+    // check if all values are discrete
+    for (Range<int32_t> range: mSampleRateRanges) {
+        if (range.lower() != range.upper()) {
+            mSampleRates.clear();
+            return;
+        }
+    }
+    createDiscreteSampleRates();
+}
+
+void AudioCapabilities::applyLevelLimits() {
+    std::vector<int32_t> sampleRates;
+    std::optional<Range<int32_t>> sampleRateRange;
+    std::optional<Range<int32_t>> bitRates;
+    int32_t maxChannels = MAX_INPUT_CHANNEL_COUNT;
+
+    // const char *mediaType = mMediaType.c_str();
+    if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_MPEG)) {
+        sampleRates = {
+                8000, 11025, 12000,
+                16000, 22050, 24000,
+                32000, 44100, 48000 };
+        bitRates = Range<int32_t>(8000, 320000);
+        maxChannels = 2;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AMR_NB)) {
+        sampleRates = { 8000 };
+        bitRates = Range<int32_t>(4750, 12200);
+        maxChannels = 1;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AMR_WB)) {
+        sampleRates = { 16000 };
+        bitRates = Range<int32_t>(6600, 23850);
+        maxChannels = 1;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AAC)) {
+        sampleRates = {
+                7350, 8000,
+                11025, 12000, 16000,
+                22050, 24000, 32000,
+                44100, 48000, 64000,
+                88200, 96000 };
+        bitRates = Range<int32_t>(8000, 510000);
+        maxChannels = 48;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_VORBIS)) {
+        bitRates = Range<int32_t>(32000, 500000);
+        sampleRateRange = Range<int32_t>(8000, 192000);
+        maxChannels = 255;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_OPUS)) {
+        bitRates = Range<int32_t>(6000, 510000);
+        sampleRates = { 8000, 12000, 16000, 24000, 48000 };
+        maxChannels = 255;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_RAW)) {
+        sampleRateRange = Range<int32_t>(1, 192000);
+        bitRates = Range<int32_t>(1, 10000000);
+        maxChannels = MAX_NUM_CHANNELS;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_FLAC)) {
+        sampleRateRange = Range<int32_t>(1, 655350);
+        // lossless codec, so bitrate is ignored
+        maxChannels = 255;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_G711_ALAW)
+            || base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_G711_MLAW)) {
+        sampleRates = { 8000 };
+        bitRates = Range<int32_t>(64000, 64000);
+        // platform allows multiple channels for this format
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_MSGSM)) {
+        sampleRates = { 8000 };
+        bitRates = Range<int32_t>(13000, 13000);
+        maxChannels = 1;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AC3)) {
+        maxChannels = 6;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_EAC3)) {
+        maxChannels = 16;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_EAC3_JOC)) {
+        sampleRates = { 48000 };
+        bitRates = Range<int32_t>(32000, 6144000);
+        maxChannels = 16;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AC4)) {
+        sampleRates = { 44100, 48000, 96000, 192000 };
+        bitRates = Range<int32_t>(16000, 2688000);
+        maxChannels = 24;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS)) {
+        sampleRates = { 44100, 48000 };
+        bitRates = Range<int32_t>(96000, 1524000);
+        maxChannels = 6;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS_HD)) {
+        for (ProfileLevel profileLevel: mProfileLevels) {
+            switch (profileLevel.mProfile) {
+                case DTS_HDProfileLBR:
+                    sampleRates = { 22050, 24000, 44100, 48000 };
+                    bitRates = Range<int32_t>(32000, 768000);
+                    break;
+                case DTS_HDProfileHRA:
+                case DTS_HDProfileMA:
+                    sampleRates = { 44100, 48000, 88200, 96000, 176400, 192000 };
+                    bitRates = Range<int32_t>(96000, 24500000);
+                    break;
+                default:
+                    ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile,
+                            mMediaType.c_str());
+                    mError |= ERROR_CAPABILITIES_UNRECOGNIZED;
+                    sampleRates = { 44100, 48000, 88200, 96000, 176400, 192000 };
+                    bitRates = Range<int32_t>(96000, 24500000);
+            }
+        }
+        maxChannels = 8;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS_UHD)) {
+        for (ProfileLevel profileLevel: mProfileLevels) {
+            switch (profileLevel.mProfile) {
+                case DTS_UHDProfileP2:
+                    sampleRates = { 48000 };
+                    bitRates = Range<int32_t>(96000, 768000);
+                    maxChannels = 10;
+                    break;
+                case DTS_UHDProfileP1:
+                    sampleRates = { 44100, 48000, 88200, 96000, 176400, 192000 };
+                    bitRates = Range<int32_t>(96000, 24500000);
+                    maxChannels = 32;
+                    break;
+                default:
+                    ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile,
+                            mMediaType.c_str());
+                    mError |= ERROR_CAPABILITIES_UNRECOGNIZED;
+                    sampleRates = { 44100, 48000, 88200, 96000, 176400, 192000 };
+                    bitRates = Range<int32_t>(96000, 24500000);
+                    maxChannels = 32;
+            }
+        }
+    } else {
+        ALOGW("Unsupported mediaType %s", mMediaType.c_str());
+        mError |= ERROR_CAPABILITIES_UNSUPPORTED;
+    }
+
+    // restrict ranges
+    if (!sampleRates.empty()) {
+        limitSampleRates(sampleRates);
+    } else if (sampleRateRange) {
+        std::vector<Range<int32_t>> rateRanges = { sampleRateRange.value() };
+        limitSampleRates(rateRanges);
+    }
+
+    Range<int32_t> channelRange = Range<int32_t>(1, maxChannels);
+    std::vector<Range<int32_t>> inputChannels = { channelRange };
+    applyLimits(inputChannels, bitRates);
+}
+
+void AudioCapabilities::applyLimits(
+        const std::vector<Range<int32_t>> &inputChannels,
+        const std::optional<Range<int32_t>> &bitRates) {
+    // clamp & make a local copy
+    std::vector<Range<int32_t>> inputChannelsCopy(inputChannels.size());
+    for (int i = 0; i < inputChannels.size(); i++) {
+        int32_t lower = inputChannels[i].clamp(1);
+        int32_t upper = inputChannels[i].clamp(MAX_INPUT_CHANNEL_COUNT);
+        inputChannelsCopy[i] = Range<int32_t>(lower, upper);
+    }
+
+    // sort, intersect with existing, & save channel list
+    sortDistinctRanges(&inputChannelsCopy);
+    mInputChannelRanges = intersectSortedDistinctRanges(inputChannelsCopy, mInputChannelRanges);
+
+    if (bitRates) {
+        mBitrateRange = mBitrateRange.intersect(bitRates.value());
+    }
+}
+
+void AudioCapabilities::parseFromInfo(const sp<AMessage> &format) {
+    int32_t maxInputChannels = MAX_INPUT_CHANNEL_COUNT;
+    std::vector<Range<int32_t>> channels = { Range<int32_t>(1, maxInputChannels) };
+    std::optional<Range<int32_t>> bitRates = POSITIVE_INT32;
+
+    AString rateAString;
+    if (format->findString("sample-rate-ranges", &rateAString)) {
+        std::vector<std::string> rateStrings = base::Split(std::string(rateAString.c_str()), ",");
+        std::vector<Range<int32_t>> rateRanges;
+        for (std::string rateString : rateStrings) {
+            std::optional<Range<int32_t>> rateRange = Range<int32_t>::Parse(rateString);
+            if (!rateRange) {
+                continue;
+            }
+            rateRanges.push_back(rateRange.value());
+        }
+        limitSampleRates(rateRanges);
+    }
+
+    // we will prefer channel-ranges over max-channel-count
+    AString valueStr;
+    if (format->findString("channel-ranges", &valueStr)) {
+        std::vector<std::string> channelStrings = base::Split(std::string(valueStr.c_str()), ",");
+        std::vector<Range<int32_t>> channelRanges;
+        for (std::string channelString : channelStrings) {
+            std::optional<Range<int32_t>> channelRange = Range<int32_t>::Parse(channelString);
+            if (!channelRange) {
+                continue;
+            }
+            channelRanges.push_back(channelRange.value());
+        }
+        channels = channelRanges;
+    } else if (format->findString("channel-range", &valueStr)) {
+        std::optional<Range<int32_t>> oneRange
+                = Range<int32_t>::Parse(std::string(valueStr.c_str()));
+        if (oneRange) {
+            channels = { oneRange.value() };
+        }
+    } else if (format->findString("max-channel-count", &valueStr)) {
+        maxInputChannels = std::atoi(valueStr.c_str());
+        if (maxInputChannels == 0) {
+            channels = { Range<int32_t>(0, 0) };
+        } else {
+            channels = { Range<int32_t>(1, maxInputChannels) };
+        }
+    } else if ((mError & ERROR_CAPABILITIES_UNSUPPORTED) != 0) {
+        maxInputChannels = 0;
+        channels = { Range<int32_t>(0, 0) };
+    }
+
+    if (format->findString("bitrate-range", &valueStr)) {
+        std::optional<Range<int32_t>> parsedBitrate = Range<int32_t>::Parse(valueStr.c_str());
+        if (parsedBitrate) {
+            bitRates = bitRates.value().intersect(parsedBitrate.value());
+        }
+    }
+
+    applyLimits(channels, bitRates);
+}
+
+void AudioCapabilities::getDefaultFormat(sp<AMessage> &format) {
+    // report settings that have only a single choice
+    if (mBitrateRange.lower() == mBitrateRange.upper()) {
+        format->setInt32(KEY_BIT_RATE, mBitrateRange.lower());
+    }
+    if (getMaxInputChannelCount() == 1) {
+        // mono-only format
+        format->setInt32(KEY_CHANNEL_COUNT, 1);
+    }
+    if (!mSampleRates.empty() && mSampleRates.size() == 1) {
+        format->setInt32(KEY_SAMPLE_RATE, mSampleRates[0]);
+    }
+}
+
+bool AudioCapabilities::supportsFormat(const sp<AMessage> &format) {
+    int32_t sampleRateValue;
+    std::optional<int32_t> sampleRate = format->findInt32(KEY_SAMPLE_RATE, &sampleRateValue)
+            ? std::make_optional<int32_t>(sampleRateValue) : std::nullopt;
+    int32_t channelsValue;
+    std::optional<int32_t> channels = format->findInt32(KEY_CHANNEL_COUNT, &channelsValue)
+            ? std::make_optional<int32_t>(channelsValue) : std::nullopt;
+
+    if (!supports(sampleRate, channels)) {
+        return false;
+    }
+
+    if (!CodecCapabilities::SupportsBitrate(mBitrateRange, format)) {
+        return false;
+    }
+
+    // nothing to do for:
+    // KEY_CHANNEL_MASK: codecs don't get this
+    // KEY_IS_ADTS:      required feature for all AAC decoders
+    return true;
+}
+
+}  // namespace android
\ No newline at end of file
diff --git a/media/libmedia/CodecCapabilities.cpp b/media/libmedia/CodecCapabilities.cpp
new file mode 100644
index 0000000..407d376
--- /dev/null
+++ b/media/libmedia/CodecCapabilities.cpp
@@ -0,0 +1,442 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecCapabilities"
+
+#include <android-base/strings.h>
+#include <utils/Log.h>
+#include <media/CodecCapabilities.h>
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+static const int32_t HEVCHighTierLevels =
+        HEVCHighTierLevel1 | HEVCHighTierLevel2 | HEVCHighTierLevel21 | HEVCHighTierLevel3 |
+        HEVCHighTierLevel31 | HEVCHighTierLevel4 | HEVCHighTierLevel41 | HEVCHighTierLevel5 |
+        HEVCHighTierLevel51 | HEVCHighTierLevel52 | HEVCHighTierLevel6 | HEVCHighTierLevel61 |
+        HEVCHighTierLevel62;
+
+static const int32_t DEFAULT_MAX_SUPPORTED_INSTANCES = 32;
+static const int32_t MAX_SUPPORTED_INSTANCES_LIMIT = 256;
+
+// must not contain KEY_PROFILE
+static const std::set<std::pair<std::string, AMessage::Type>> AUDIO_LEVEL_CRITICAL_FORMAT_KEYS = {
+    // We don't set level-specific limits for audio codecs today. Key candidates would
+    // be sample rate, bit rate or channel count.
+    // MediaFormat.KEY_SAMPLE_RATE,
+    // MediaFormat.KEY_CHANNEL_COUNT,
+    // MediaFormat.KEY_BIT_RATE,
+    { KEY_MIME, AMessage::kTypeString }
+};
+
+// CodecCapabilities Features
+static const std::vector<Feature> DECODER_FEATURES = {
+    Feature(FEATURE_AdaptivePlayback, (1 << 0), true),
+    Feature(FEATURE_SecurePlayback,   (1 << 1), false),
+    Feature(FEATURE_TunneledPlayback, (1 << 2), false),
+    Feature(FEATURE_PartialFrame,     (1 << 3), false),
+    Feature(FEATURE_FrameParsing,     (1 << 4), false),
+    Feature(FEATURE_MultipleFrames,   (1 << 5), false),
+    Feature(FEATURE_DynamicTimestamp, (1 << 6), false),
+    Feature(FEATURE_LowLatency,       (1 << 7), true),
+    // feature to exclude codec from REGULAR codec list
+    Feature(FEATURE_SpecialCodec,     (1 << 30), false, true),
+};
+static const std::vector<Feature> ENCODER_FEATURES = {
+    Feature(FEATURE_IntraRefresh, (1 << 0), false),
+    Feature(FEATURE_MultipleFrames, (1 << 1), false),
+    Feature(FEATURE_DynamicTimestamp, (1 << 2), false),
+    Feature(FEATURE_QpBounds, (1 << 3), false),
+    Feature(FEATURE_EncodingStatistics, (1 << 4), false),
+    Feature(FEATURE_HdrEditing, (1 << 5), false),
+    // feature to exclude codec from REGULAR codec list
+    Feature(FEATURE_SpecialCodec,     (1 << 30), false, true),
+};
+
+// must not contain KEY_PROFILE
+static const std::set<std::pair<std::string, AMessage::Type>> VIDEO_LEVEL_CRITICAL_FORMAT_KEYS = {
+    { KEY_WIDTH, AMessage::kTypeInt32 },
+    { KEY_HEIGHT, AMessage::kTypeInt32 },
+    { KEY_FRAME_RATE, AMessage::kTypeInt32 },
+    { KEY_BIT_RATE, AMessage::kTypeInt32 },
+    { KEY_MIME, AMessage::kTypeString }
+};
+
+bool CodecCapabilities::SupportsBitrate(Range<int32_t> bitrateRange,
+        const sp<AMessage> &format) {
+    // consider max bitrate over average bitrate for support
+    int32_t maxBitrate = 0;
+    format->findInt32(KEY_MAX_BIT_RATE, &maxBitrate);
+    int32_t bitrate = 0;
+    format->findInt32(KEY_BIT_RATE, &bitrate);
+
+    if (bitrate == 0) {
+        bitrate = maxBitrate;
+    } else if (maxBitrate != 0) {
+        bitrate = std::max(bitrate, maxBitrate);
+    }
+
+    if (bitrate > 0) {
+        return bitrateRange.contains(bitrate);
+    }
+
+    return true;
+}
+
+bool CodecCapabilities::isFeatureSupported(const std::string &name) const {
+    return mFeaturesSupported.contains(name);
+}
+
+bool CodecCapabilities::isFeatureRequired(const std::string &name) const {
+    return mFeaturesRequired.contains(name);
+}
+
+std::vector<std::string> CodecCapabilities::validFeatures() const {
+    std::vector<std::string> res;
+    for (const Feature& feature : getValidFeatures()) {
+        if (!feature.mInternal) {
+            res.push_back(feature.mName);
+        }
+    }
+    return res;
+}
+
+std::vector<Feature> CodecCapabilities::getValidFeatures() const {
+    if (isEncoder()) {
+        return ENCODER_FEATURES;
+    } else {
+        return DECODER_FEATURES;
+    }
+}
+
+bool CodecCapabilities::isRegular() const {
+    // regular codecs only require default features
+    std::vector<Feature> features = getValidFeatures();
+    return std::all_of(features.begin(), features.end(),
+            [this](Feature feat){ return (feat.mDefault || !isFeatureRequired(feat.mName)); });
+}
+
+bool CodecCapabilities::isFormatSupported(const sp<AMessage> &format) const {
+    AString mediaType;
+    format->findString(KEY_MIME, &mediaType);
+    // mediaType must match if present
+    if (!base::EqualsIgnoreCase(mMediaType, mediaType.c_str())) {
+        return false;
+    }
+
+    // check feature support
+    for (Feature feat: getValidFeatures()) {
+        if (feat.mInternal) {
+            continue;
+        }
+
+        int32_t yesNo;
+        std::string key = KEY_FEATURE_;
+        key = key + feat.mName;
+        if (format->findInt32(key.c_str(), &yesNo)) {
+            continue;
+        }
+        if ((yesNo == 1 && !isFeatureSupported(feat.mName)) ||
+                (yesNo == 0 && isFeatureRequired(feat.mName))) {
+            return false;
+        }
+    }
+
+    int32_t profile;
+    if (format->findInt32(KEY_PROFILE, &profile)) {
+        int32_t level = -1;
+        format->findInt32(KEY_LEVEL, &level);
+        if (!supportsProfileLevel(profile, level)) {
+            return false;
+        }
+
+        // If we recognize this profile, check that this format is supported by the
+        // highest level supported by the codec for that profile. (Ignore specified
+        // level beyond the above profile/level check as level is only used as a
+        // guidance. E.g. AVC Level 1 CIF format is supported if codec supports level 1.1
+        // even though max size for Level 1 is QCIF. However, MPEG2 Simple Profile
+        // 1080p format is not supported even if codec supports Main Profile Level High,
+        // as Simple Profile does not support 1080p.
+        int32_t maxLevel = 0;
+        for (ProfileLevel pl : mProfileLevels) {
+            if (pl.mProfile == profile && pl.mLevel > maxLevel) {
+                // H.263 levels are not completely ordered:
+                // Level45 support only implies Level10 support
+                if (!base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_H263)
+                        || pl.mLevel != H263Level45
+                        || maxLevel == H263Level10) {
+                    maxLevel = pl.mLevel;
+                }
+            }
+        }
+        std::shared_ptr<CodecCapabilities> levelCaps
+                = CreateFromProfileLevel(mMediaType, profile, maxLevel);
+        // We must remove the profile from this format otherwise levelCaps.isFormatSupported
+        // will get into this same condition and loop forever. Furthermore, since levelCaps
+        // does not contain features and bitrate specific keys, keep only keys relevant for
+        // a level check.
+        sp<AMessage> levelCriticalFormat = new AMessage;
+
+        // critical keys will always contain KEY_MIME, but should also contain others to be
+        // meaningful
+        if ((isVideo() || isAudio()) && levelCaps != nullptr) {
+            const std::set<std::pair<std::string, AMessage::Type>> criticalKeys =
+                isVideo() ? VIDEO_LEVEL_CRITICAL_FORMAT_KEYS : AUDIO_LEVEL_CRITICAL_FORMAT_KEYS;
+            for (std::pair<std::string, AMessage::Type> key : criticalKeys) {
+                if (format->contains(key.first.c_str())) {
+                    // AMessage::ItemData value = format->findItem(key.c_str());
+                    // levelCriticalFormat->setItem(key.c_str(), value);
+                    switch (key.second) {
+                        case AMessage::kTypeInt32: {
+                            int32_t value;
+                            format->findInt32(key.first.c_str(), &value);
+                            levelCriticalFormat->setInt32(key.first.c_str(), value);
+                            break;
+                        }
+                        case AMessage::kTypeString: {
+                            AString value;
+                            format->findString(key.first.c_str(), &value);
+                            levelCriticalFormat->setString(key.first.c_str(), value);
+                            break;
+                        }
+                        default:
+                            ALOGE("Unsupported type");
+                    }
+                }
+            }
+            if (!levelCaps->isFormatSupported(levelCriticalFormat)) {
+                return false;
+            }
+        }
+    }
+    if (mAudioCaps && !mAudioCaps->supportsFormat(format)) {
+        return false;
+    }
+    if (mVideoCaps && !mVideoCaps->supportsFormat(format)) {
+        return false;
+    }
+    if (mEncoderCaps && !mEncoderCaps->supportsFormat(format)) {
+        return false;
+    }
+    return true;
+}
+
+bool CodecCapabilities::supportsProfileLevel(int32_t profile, int32_t level) const {
+    for (ProfileLevel pl: mProfileLevels) {
+        if (pl.mProfile != profile) {
+            continue;
+        }
+
+        // No specific level requested
+        if (level == -1) {
+            return true;
+        }
+
+        // AAC doesn't use levels
+        if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AAC)) {
+            return true;
+        }
+
+        // DTS doesn't use levels
+        if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS)
+                || base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS_HD)
+                || base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS_UHD)) {
+            return true;
+        }
+
+        // H.263 levels are not completely ordered:
+        // Level45 support only implies Level10 support
+        if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_H263)) {
+            if (pl.mLevel != level && pl.mLevel == H263Level45
+                    && level > H263Level10) {
+                continue;
+            }
+        }
+
+        // MPEG4 levels are not completely ordered:
+        // Level1 support only implies Level0 (and not Level0b) support
+        if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_MPEG4)) {
+            if (pl.mLevel != level && pl.mLevel == MPEG4Level1
+                    && level > MPEG4Level0) {
+                continue;
+            }
+        }
+
+        // HEVC levels incorporate both tiers and levels. Verify tier support.
+        if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_HEVC)) {
+            bool supportsHighTier =
+                (pl.mLevel & HEVCHighTierLevels) != 0;
+            bool checkingHighTier = (level & HEVCHighTierLevels) != 0;
+            // high tier levels are only supported by other high tier levels
+            if (checkingHighTier && !supportsHighTier) {
+                continue;
+            }
+        }
+
+        if (pl.mLevel >= level) {
+            // if we recognize the listed profile/level, we must also recognize the
+            // profile/level arguments.
+            if (CreateFromProfileLevel(mMediaType, profile, pl.mLevel) != nullptr) {
+                return CreateFromProfileLevel(mMediaType, profile, level) != nullptr;
+            }
+            return true;
+        }
+    }
+    return false;
+}
+
+sp<AMessage> CodecCapabilities::getDefaultFormat() const {
+    return mDefaultFormat;
+}
+
+const std::string& CodecCapabilities::getMediaType() {
+    return mMediaType;
+}
+
+const std::vector<ProfileLevel>& CodecCapabilities::getProfileLevels() {
+    return mProfileLevels;
+}
+
+std::vector<uint32_t> CodecCapabilities::getColorFormats() const {
+    return mColorFormats;
+}
+
+int32_t CodecCapabilities::getMaxSupportedInstances() const {
+    return mMaxSupportedInstances;
+}
+
+bool CodecCapabilities::isAudio() const {
+    return mAudioCaps != nullptr;
+}
+
+std::shared_ptr<AudioCapabilities>
+        CodecCapabilities::getAudioCapabilities() const {
+    return mAudioCaps;
+}
+
+bool CodecCapabilities::isEncoder() const {
+    return mEncoderCaps != nullptr;
+}
+
+std::shared_ptr<EncoderCapabilities>
+        CodecCapabilities::getEncoderCapabilities() const {
+    return mEncoderCaps;
+}
+
+bool CodecCapabilities::isVideo() const {
+    return mVideoCaps != nullptr;
+}
+
+std::shared_ptr<VideoCapabilities> CodecCapabilities::getVideoCapabilities() const {
+    return mVideoCaps;
+}
+
+// static
+std::shared_ptr<CodecCapabilities> CodecCapabilities::CreateFromProfileLevel(
+        std::string mediaType, int32_t profile, int32_t level, int32_t maxConcurrentInstances) {
+    ProfileLevel pl;
+    pl.mProfile = profile;
+    pl.mLevel = level;
+    sp<AMessage> defaultFormat = new AMessage;
+    defaultFormat->setString(KEY_MIME, mediaType.c_str());
+
+    std::vector<ProfileLevel> pls;
+    pls.push_back(pl);
+    std::vector<uint32_t> colFmts;
+    sp<AMessage> capabilitiesInfo = new AMessage;
+    std::shared_ptr<CodecCapabilities> ret(new CodecCapabilities());
+    ret->init(pls, colFmts, true /* encoder */, defaultFormat, capabilitiesInfo,
+            maxConcurrentInstances);
+    if (ret->getErrors() != 0) {
+        return nullptr;
+    }
+    return ret;
+}
+
+void CodecCapabilities::init(std::vector<ProfileLevel> profLevs, std::vector<uint32_t> colFmts,
+        bool encoder, sp<AMessage> &defaultFormat, sp<AMessage> &capabilitiesInfo,
+        int32_t maxConcurrentInstances) {
+    mColorFormats = colFmts;
+    mDefaultFormat = defaultFormat;
+    mCapabilitiesInfo = capabilitiesInfo;
+
+    AString mediaTypeAStr;
+    mDefaultFormat->findString(KEY_MIME, &mediaTypeAStr);
+    mMediaType = mediaTypeAStr.c_str();
+
+    /* VP9 introduced profiles around 2016, so some VP9 codecs may not advertise any
+       supported profiles. Determine the level for them using the info they provide. */
+    if (profLevs.size() == 0 && mMediaType == MIMETYPE_VIDEO_VP9) {
+        ProfileLevel profLev;
+        profLev.mProfile = VP9Profile0;
+        profLev.mLevel = VideoCapabilities::EquivalentVP9Level(capabilitiesInfo);
+        profLevs.push_back(profLev);
+    }
+    mProfileLevels = profLevs;
+
+    if (mediaTypeAStr.startsWithIgnoreCase("audio/")) {
+        mAudioCaps = AudioCapabilities::Create(mMediaType, profLevs, capabilitiesInfo);
+        mAudioCaps->getDefaultFormat(mDefaultFormat);
+    } else if (mediaTypeAStr.startsWithIgnoreCase("video/")
+            || mediaTypeAStr.equalsIgnoreCase(MIMETYPE_IMAGE_ANDROID_HEIC)) {
+        mVideoCaps = VideoCapabilities::Create(mMediaType, profLevs, capabilitiesInfo);
+    }
+
+    if (encoder) {
+        mEncoderCaps = EncoderCapabilities::Create(mMediaType, profLevs, capabilitiesInfo);
+        mEncoderCaps->getDefaultFormat(mDefaultFormat);
+    }
+
+    mMaxSupportedInstances = maxConcurrentInstances > 0
+            ? maxConcurrentInstances : DEFAULT_MAX_SUPPORTED_INSTANCES;
+
+    int32_t maxInstances = mMaxSupportedInstances;
+    capabilitiesInfo->findInt32("max-concurrent-instances", &maxInstances);
+    mMaxSupportedInstances =
+            Range(1, MAX_SUPPORTED_INSTANCES_LIMIT).clamp(maxInstances);
+
+    mFeaturesRequired.clear();
+    mFeaturesSupported.clear();
+    for (Feature feat: getValidFeatures()) {
+        std::string key = KEY_FEATURE_;
+        key = key + feat.mName;
+        int yesNo = -1;
+        if (!capabilitiesInfo->findInt32(key.c_str(), &yesNo)) {
+            continue;
+        }
+        if (yesNo > 0) {
+            mFeaturesRequired.insert(feat.mName);
+        }
+        mFeaturesSupported.insert(feat.mName);
+        if (!feat.mInternal) {
+            mDefaultFormat->setInt32(key.c_str(), 1);
+        }
+    }
+}
+
+int32_t CodecCapabilities::getErrors() const {
+    if (mAudioCaps) {
+        return mAudioCaps->mError;
+    } else if (mVideoCaps) {
+        return mVideoCaps->mError;
+    }
+    return 0;
+}
+
+}  // namespace android
\ No newline at end of file
diff --git a/media/libmedia/CodecCapabilitiesUtils.cpp b/media/libmedia/CodecCapabilitiesUtils.cpp
new file mode 100644
index 0000000..01bb24e
--- /dev/null
+++ b/media/libmedia/CodecCapabilitiesUtils.cpp
@@ -0,0 +1,161 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecCapabilitiesUtils"
+
+#include <android-base/properties.h>
+#include <utils/Log.h>
+
+#include <algorithm>
+#include <cmath>
+#include <cstdlib>
+#include <string>
+#include <vector>
+
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
+
+namespace android {
+
+// VideoSize
+
+VideoSize::VideoSize(int32_t width, int32_t height) : mWidth(width), mHeight(height) {}
+
+VideoSize::VideoSize() : mWidth(0), mHeight(0) {}
+
+int32_t VideoSize::getWidth() const { return mWidth; }
+
+int32_t VideoSize::getHeight() const { return mHeight; }
+
+bool VideoSize::equals(VideoSize other) const {
+    return mWidth == other.mWidth && mHeight == other.mHeight;
+}
+
+bool VideoSize::empty() const {
+    return mWidth <= 0 || mHeight <= 0;
+}
+
+std::string VideoSize::toString() const {
+    return std::to_string(mWidth) + "x" + std::to_string(mHeight);
+}
+
+std::optional<VideoSize> VideoSize::ParseSize(std::string str) {
+    if (str.empty()) {
+        return std::nullopt;
+    }
+
+    std::regex regex("([0-9]+)([*x])([0-9]+)");
+    std::smatch match;
+    if (std::regex_match(str, match, regex)) {
+        long int w = strtol(match[1].str().c_str(), NULL, 10);
+        long int h = strtol(match[3].str().c_str(), NULL, 10);
+        return std::make_optional(VideoSize(w, h));
+    } else {
+        ALOGW("could not parse size %s", str.c_str());
+        return std::nullopt;
+    }
+}
+
+std::optional<std::pair<VideoSize, VideoSize>> VideoSize::ParseSizeRange(const std::string str) {
+    size_t ix = str.find_first_of('-');
+    if (ix != std::string::npos) {
+        std::optional<VideoSize> lowerOpt = VideoSize::ParseSize(str.substr(0, ix));
+        std::optional<VideoSize> upperOpt = VideoSize::ParseSize(str.substr(ix + 1));
+        if (!lowerOpt || !upperOpt) {
+            return std::nullopt;
+        }
+        return std::make_optional(
+                std::pair<VideoSize, VideoSize>(lowerOpt.value(), upperOpt.value()));
+    } else {
+        std::optional<VideoSize> opt = VideoSize::ParseSize(str);
+        if (!opt) {
+            return std::nullopt;
+        }
+        return std::make_optional(std::pair<VideoSize, VideoSize>(opt.value(), opt.value()));
+    }
+}
+
+Range<int32_t> VideoSize::GetAllowedDimensionRange() {
+#ifdef __LP64__
+    return Range<int32_t>(1, 32768);
+#else
+    int32_t value = base::GetIntProperty("media.resolution.limit.32bit", (int32_t)4096);
+    return Range<int32_t>(1, value);
+#endif
+}
+
+// Rational
+
+std::optional<Rational> Rational::Parse(std::string str) {
+    if (str.compare("NaN") == 0) {
+        return std::make_optional(NaN);
+    } else if (str.compare("Infinity") == 0) {
+        return std::make_optional(POSITIVE_INFINITY);
+    } else if (str.compare("-Infinity") == 0) {
+        return std::make_optional(NEGATIVE_INFINITY);
+    }
+
+    std::regex regex("([0-9]+)([:/])([0-9]+)");
+    std::smatch match;
+    if (std::regex_match(str, match, regex)) {
+        long int numerator = strtol(match[1].str().c_str(), NULL, 10);
+        long int denominator = strtol(match[3].str().c_str(), NULL, 10);
+        return std::make_optional(Rational(numerator, denominator));
+    } else {
+        ALOGW("could not parse string: %s to Rational", str.c_str());
+        return std::nullopt;
+    }
+}
+
+Rational Rational::scale(int32_t num, int32_t den) {
+    int32_t common = std::gcd(num, den);
+    num /= common;
+    den /= common;
+    return Rational(
+            (int32_t)(mNumerator * (double)num),     // saturate to int
+            (int32_t)(mDenominator * (double)den));  // saturate to int
+}
+
+Range<Rational> Rational::ScaleRange(Range<Rational> range, int32_t num, int32_t den) {
+    if (num == den) {
+        return range;
+    }
+    return Range(
+            range.lower().scale(num, den),
+            range.upper().scale(num, den));
+}
+
+std::optional<Range<Rational>> Rational::ParseRange(const std::string str) {
+    size_t ix = str.find_first_of('-');
+    if (ix != std::string::npos) {
+        std::optional<Rational> lower = Parse(str.substr(0, ix));
+        std::optional<Rational> upper = Parse(str.substr(ix + 1));
+        if (!lower || !upper) {
+            return std::nullopt;
+        }
+        return std::make_optional<Range<Rational>>(lower.value(), upper.value());
+    } else {
+        std::optional<Rational> value = Parse(str);
+        if (!value) {
+            return std::nullopt;
+        }
+        return std::make_optional<Range<Rational>>(value.value(), value.value());
+    }
+}
+
+}  // namespace android
\ No newline at end of file
diff --git a/media/libmedia/EncoderCapabilities.cpp b/media/libmedia/EncoderCapabilities.cpp
new file mode 100644
index 0000000..a840220
--- /dev/null
+++ b/media/libmedia/EncoderCapabilities.cpp
@@ -0,0 +1,205 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "EncoderCapabilities"
+
+#include <android-base/strings.h>
+
+#include <media/CodecCapabilities.h>
+#include <media/EncoderCapabilities.h>
+#include <media/stagefright/MediaCodecConstants.h>
+
+namespace android {
+
+const Range<int>& EncoderCapabilities::getQualityRange() {
+    return mQualityRange;
+}
+
+const Range<int>& EncoderCapabilities::getComplexityRange() {
+    return mComplexityRange;
+}
+
+// static
+int EncoderCapabilities::ParseBitrateMode(std::string mode) {
+    for (Feature feat: sBitrateModes) {
+        if (base::EqualsIgnoreCase(feat.mName, mode)) {
+            return feat.mValue;
+        }
+    }
+    return 0;
+}
+
+bool EncoderCapabilities::isBitrateModeSupported(int mode) {
+    for (Feature feat : sBitrateModes) {
+        if (mode == feat.mValue) {
+            return (mBitControl & (1 << mode)) != 0;
+        }
+    }
+    return false;
+}
+
+// static
+std::shared_ptr<EncoderCapabilities> EncoderCapabilities::Create(std::string mediaType,
+        std::vector<ProfileLevel> profLevs, const sp<AMessage> &format) {
+    std::shared_ptr<EncoderCapabilities> caps(new EncoderCapabilities());
+    caps->init(mediaType, profLevs, format);
+    return caps;
+}
+
+void EncoderCapabilities::init(std::string mediaType, std::vector<ProfileLevel> profLevs,
+        const sp<AMessage> &format) {
+    // no support for complexity or quality yet
+    mMediaType = mediaType;
+    mProfileLevels = profLevs;
+    mComplexityRange = Range(0, 0);
+    mQualityRange = Range(0, 0);
+    mBitControl = (1 << BITRATE_MODE_VBR);
+
+    applyLevelLimits();
+    parseFromInfo(format);
+}
+
+void EncoderCapabilities::applyLevelLimits() {
+    if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_FLAC)) {
+        mComplexityRange = Range(0, 8);
+        mBitControl = (1 << BITRATE_MODE_CQ);
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AMR_NB)
+            || base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AMR_WB)
+            || base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_G711_ALAW)
+            || base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_G711_MLAW)
+            || base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_MSGSM)) {
+        mBitControl = (1 << BITRATE_MODE_CBR);
+    }
+}
+
+void EncoderCapabilities::parseFromInfo(const sp<AMessage> &format) {
+    AString complexityRangeAStr;
+    if (format->findString("complexity-range", &complexityRangeAStr)) {
+        std::optional<Range<int>> complexityRangeOpt
+                = Range<int32_t>::Parse(std::string(complexityRangeAStr.c_str()));
+        mComplexityRange = complexityRangeOpt.value_or(mComplexityRange);
+        // TODO should we limit this to level limits?
+    }
+    AString qualityRangeAStr;
+    if (format->findString("quality-range", &qualityRangeAStr)) {
+        std::optional<Range<int>> qualityRangeOpt
+                = Range<int32_t>::Parse(std::string(qualityRangeAStr.c_str()));
+        mQualityRange = qualityRangeOpt.value_or(mQualityRange);
+    }
+    AString bitrateModesAStr;
+    if (format->findString("feature-bitrate-modes", &bitrateModesAStr)) {
+        mBitControl = 0;
+        for (std::string mode: base::Split(std::string(bitrateModesAStr.c_str()), ",")) {
+            mBitControl |= (1 << ParseBitrateMode(mode));
+        }
+    }
+    format->findInt32("complexity-default", &mDefaultComplexity);
+    format->findInt32("quality-default", &mDefaultQuality);
+    AString qualityScaleAStr;
+    if (format->findString("quality-scale", &qualityScaleAStr)) {
+        mQualityScale = std::string(qualityScaleAStr.c_str());
+    }
+}
+
+bool EncoderCapabilities::supports(
+        std::optional<int> complexity, std::optional<int> quality, std::optional<int> profile) {
+    bool ok = true;
+    if (complexity) {
+        ok &= mComplexityRange.contains(complexity.value());
+    }
+    if (quality) {
+        ok &= mQualityRange.contains(quality.value());
+    }
+    if (profile) {
+        ok &= std::any_of(mProfileLevels.begin(), mProfileLevels.end(),
+                [&profile](ProfileLevel pl){ return pl.mProfile == profile.value(); });
+    }
+    return ok;
+}
+
+void EncoderCapabilities::getDefaultFormat(sp<AMessage> &format) {
+    // don't list trivial quality/complexity as default for now
+    if (mQualityRange.upper() != mQualityRange.lower()
+            && mDefaultQuality != 0) {
+        format->setInt32(KEY_QUALITY, mDefaultQuality);
+    }
+    if (mComplexityRange.upper() != mComplexityRange.lower()
+            && mDefaultComplexity != 0) {
+        format->setInt32(KEY_COMPLEXITY, mDefaultComplexity);
+    }
+    // bitrates are listed in order of preference
+    for (Feature feat : sBitrateModes) {
+        if ((mBitControl & (1 << feat.mValue)) != 0) {
+            format->setInt32(KEY_BITRATE_MODE, feat.mValue);
+            break;
+        }
+    }
+}
+
+bool EncoderCapabilities::supportsFormat(const sp<AMessage> &format) {
+    int32_t mode;
+    if (format->findInt32(KEY_BITRATE_MODE, &mode) && !isBitrateModeSupported(mode)) {
+        return false;
+    }
+
+    int tmp;
+    std::optional<int> complexity = std::nullopt;
+    if (format->findInt32(KEY_COMPLEXITY, &tmp)) {
+        complexity = tmp;
+    }
+
+    if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_FLAC)) {
+        int flacComplexity;
+        if (format->findInt32(KEY_FLAC_COMPRESSION_LEVEL, &flacComplexity)) {
+            if (!complexity) {
+                complexity = flacComplexity;
+            } else if (flacComplexity != complexity.value()) {
+                ALOGE("Conflicting values for complexity and flac-compression-level,"
+                        " which are %d and %d", complexity.value(), flacComplexity);
+                return false;
+            }
+        }
+    }
+
+    // other audio parameters
+    std::optional<int> profile = std::nullopt;
+    if (format->findInt32(KEY_PROFILE, &tmp)) {
+        profile = tmp;
+    }
+
+    if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AAC)) {
+        int aacProfile;
+        if (format->findInt32(KEY_AAC_PROFILE, &aacProfile)) {
+            if (!profile) {
+                profile = aacProfile;
+            } else if (aacProfile != profile.value()) {
+                ALOGE("Conflicting values for profile and aac-profile, which are %d and %d",
+                        profile.value(), aacProfile);
+                return false;
+            }
+        }
+    }
+
+    std::optional<int> quality = std::nullopt;
+    if (format->findInt32(KEY_QUALITY, &tmp)) {
+        quality = tmp;
+    }
+
+    return supports(complexity, quality, profile);
+}
+
+}  // namespace android
\ No newline at end of file
diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp
index d5d1a09..3834278 100644
--- a/media/libmedia/MediaCodecInfo.cpp
+++ b/media/libmedia/MediaCodecInfo.cpp
@@ -26,6 +26,9 @@
 
 namespace android {
 
+// initialize max supported instances with default value.
+int32_t MediaCodecInfo::sMaxSupportedInstances = 0;
+
 /** This redundant redeclaration is needed for C++ pre 14 */
 constexpr char MediaCodecInfo::Capabilities::FEATURE_ADAPTIVE_PLAYBACK[];
 constexpr char MediaCodecInfo::Capabilities::FEATURE_DYNAMIC_TIMESTAMP[];
@@ -169,6 +172,15 @@
     return NULL;
 }
 
+const std::shared_ptr<CodecCapabilities> MediaCodecInfo::getCodecCapsFor(
+        const char *mediaType) const {
+    ssize_t ix = getCodecCapIndex(mediaType);
+    if (ix >= 0) {
+        return mCodecCaps.valueAt(ix);
+    }
+    return nullptr;
+}
+
 const char *MediaCodecInfo::getCodecName() const {
     return mName.c_str();
 }
@@ -179,6 +191,7 @@
 
 // static
 sp<MediaCodecInfo> MediaCodecInfo::FromParcel(const Parcel &parcel) {
+    sMaxSupportedInstances = parcel.readInt32();
     AString name = AString::FromParcel(parcel);
     AString owner = AString::FromParcel(parcel);
     Attributes attributes = static_cast<Attributes>(parcel.readInt32());
@@ -201,12 +214,17 @@
             return NULL;
         if (info != NULL) {
             info->mCaps.add(mediaType, caps);
+            std::shared_ptr<CodecCapabilities> codecCaps
+                    = MediaCodecInfoWriter::BuildCodecCapabilities(
+                            mediaType.c_str(), caps, info->isEncoder());
+            info->mCodecCaps.add(mediaType, codecCaps);
         }
     }
     return info;
 }
 
 status_t MediaCodecInfo::writeToParcel(Parcel *parcel) const {
+    parcel->writeInt32(sMaxSupportedInstances);
     mName.writeToParcel(parcel);
     mOwner.writeToParcel(parcel);
     parcel->writeInt32(mAttributes);
@@ -234,6 +252,25 @@
     return -1;
 }
 
+ssize_t MediaCodecInfo::getCodecCapIndex(const char *mediaType) const {
+    if (mediaType == nullptr) {
+        return -1;
+    }
+
+    if (mCodecCaps.size() != mCaps.size()) {
+        ALOGE("Size of mCodecCaps and mCaps do not match, which are %zu and %zu",
+                mCodecCaps.size(), mCaps.size());
+    }
+
+    for (size_t ix = 0; ix < mCodecCaps.size(); ix++) {
+        if (mCodecCaps.keyAt(ix).equalsIgnoreCase(mediaType)) {
+            return ix;
+        }
+    }
+
+    return -1;
+}
+
 MediaCodecInfo::MediaCodecInfo()
     : mAttributes((MediaCodecInfo::Attributes)0),
       mRank(0x100) {
@@ -283,6 +320,52 @@
     return false;
 }
 
+void MediaCodecInfoWriter::createCodecCaps() {
+    mInfo->mCodecCaps.clear();
+    for (size_t ix = 0; ix < mInfo->mCaps.size(); ix++) {
+        AString mediaType = mInfo->mCaps.keyAt(ix);
+        sp<MediaCodecInfo::Capabilities> caps = mInfo->mCaps.valueAt(ix);
+        mInfo->mCodecCaps.add(mediaType,
+                BuildCodecCapabilities(mediaType.c_str(), caps, mInfo->isEncoder(),
+                MediaCodecInfo::sMaxSupportedInstances));
+    }
+}
+
+// static
+std::shared_ptr<CodecCapabilities> MediaCodecInfoWriter::BuildCodecCapabilities(
+        const char *mediaType, sp<MediaCodecInfo::Capabilities> caps, bool isEncoder,
+        int32_t maxSupportedInstances) {
+    Vector<ProfileLevel> profileLevels_;
+    Vector<uint32_t> colorFormats_;
+    caps->getSupportedProfileLevels(&profileLevels_);
+    caps->getSupportedColorFormats(&colorFormats_);
+
+    std::vector<ProfileLevel> profileLevels;
+    std::vector<uint32_t> colorFormats;
+    for (ProfileLevel pl : profileLevels_) {
+        profileLevels.push_back(pl);
+    }
+    for (uint32_t cf : colorFormats_) {
+        colorFormats.push_back(cf);
+    }
+
+    sp<AMessage> defaultFormat = new AMessage();
+    defaultFormat->setString("mime", mediaType);
+
+    sp<AMessage> capabilitiesInfo = caps->getDetails();
+
+    std::shared_ptr<CodecCapabilities> codecCaps = std::make_shared<CodecCapabilities>();
+    codecCaps->init(profileLevels, colorFormats, isEncoder, defaultFormat,
+            capabilitiesInfo, maxSupportedInstances);
+
+    return codecCaps;
+}
+
+// static
+void MediaCodecInfoWriter::SetMaxSupportedInstances(int32_t maxSupportedInstances) {
+    MediaCodecInfo::sMaxSupportedInstances = maxSupportedInstances;
+}
+
 MediaCodecInfoWriter::MediaCodecInfoWriter(MediaCodecInfo* info) :
     mInfo(info) {
 }
diff --git a/media/libmedia/VideoCapabilities.cpp b/media/libmedia/VideoCapabilities.cpp
new file mode 100644
index 0000000..bd26b8c
--- /dev/null
+++ b/media/libmedia/VideoCapabilities.cpp
@@ -0,0 +1,1702 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoCapabilities"
+
+#include <android-base/strings.h>
+
+#include <media/CodecCapabilities.h>
+#include <media/VideoCapabilities.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaCodecConstants.h>
+
+#include <utils/Errors.h>
+
+namespace android {
+
+static const Range<int64_t> POSITIVE_INT64 = Range((int64_t)1, INT64_MAX);
+static const Range<int32_t> BITRATE_RANGE = Range<int32_t>(0, 500000000);
+static const Range<int32_t> FRAME_RATE_RANGE = Range<int32_t>(0, 960);
+static const Range<Rational> POSITIVE_RATIONALS =
+            Range<Rational>(Rational((int32_t)1, INT32_MAX), Rational(INT32_MAX, (int32_t)1));
+
+const Range<int32_t>& VideoCapabilities::getBitrateRange() const {
+    return mBitrateRange;
+}
+
+const Range<int32_t>& VideoCapabilities::getSupportedWidths() const {
+    return mWidthRange;
+}
+
+const Range<int32_t>& VideoCapabilities::getSupportedHeights() const {
+    return mHeightRange;
+}
+
+int32_t VideoCapabilities::getWidthAlignment() const {
+    return mWidthAlignment;
+}
+
+int32_t VideoCapabilities::getHeightAlignment() const {
+    return mHeightAlignment;
+}
+
+int32_t VideoCapabilities::getSmallerDimensionUpperLimit() const {
+    return mSmallerDimensionUpperLimit;
+}
+
+const Range<int32_t>& VideoCapabilities::getSupportedFrameRates() const {
+    return mFrameRateRange;
+}
+
+std::optional<Range<int32_t>> VideoCapabilities::getSupportedWidthsFor(int32_t height) const {
+    Range<int32_t> range = mWidthRange;
+    if (!mHeightRange.contains(height)
+            || (height % mHeightAlignment) != 0) {
+        ALOGE("unsupported height");
+        return std::nullopt;
+    }
+    const int32_t heightInBlocks = divUp(height, mBlockHeight);
+
+    // constrain by block count and by block aspect ratio
+    const int32_t minWidthInBlocks = std::max(
+            divUp(mBlockCountRange.lower(), heightInBlocks),
+            (int32_t)std::ceil(mBlockAspectRatioRange.lower().asDouble()
+                    * heightInBlocks));
+    const int32_t maxWidthInBlocks = std::min(
+            mBlockCountRange.upper() / heightInBlocks,
+            (int32_t)(mBlockAspectRatioRange.upper().asDouble()
+                    * heightInBlocks));
+    range = range.intersect(
+            (minWidthInBlocks - 1) * mBlockWidth + mWidthAlignment,
+            maxWidthInBlocks * mBlockWidth);
+
+    // constrain by smaller dimension limit
+    if (height > mSmallerDimensionUpperLimit) {
+        range = range.intersect(1, mSmallerDimensionUpperLimit);
+    }
+
+    // constrain by aspect ratio
+    range = range.intersect(
+            (int32_t)std::ceil(mAspectRatioRange.lower().asDouble()
+                    * height),
+            (int32_t)(mAspectRatioRange.upper().asDouble() * height));
+    return range;
+}
+
+std::optional<Range<int32_t>> VideoCapabilities::getSupportedHeightsFor(int32_t width) const {
+    Range<int32_t> range = mHeightRange;
+    if (!mWidthRange.contains(width)
+            || (width % mWidthAlignment) != 0) {
+        ALOGE("unsupported width");
+        return std::nullopt;
+    }
+    const int32_t widthInBlocks = divUp(width, mBlockWidth);
+
+    // constrain by block count and by block aspect ratio
+    const int32_t minHeightInBlocks = std::max(
+            divUp(mBlockCountRange.lower(), widthInBlocks),
+            (int32_t)std::ceil(widthInBlocks /
+                    mBlockAspectRatioRange.upper().asDouble()));
+    const int32_t maxHeightInBlocks = std::min(
+            mBlockCountRange.upper() / widthInBlocks,
+            (int32_t)(widthInBlocks /
+                    mBlockAspectRatioRange.lower().asDouble()));
+    range = range.intersect(
+            (minHeightInBlocks - 1) * mBlockHeight + mHeightAlignment,
+            maxHeightInBlocks * mBlockHeight);
+
+    // constrain by smaller dimension limit
+    if (width > mSmallerDimensionUpperLimit) {
+        range = range.intersect(1, mSmallerDimensionUpperLimit);
+    }
+
+    // constrain by aspect ratio
+    range = range.intersect(
+            (int32_t)std::ceil(width /
+                    mAspectRatioRange.upper().asDouble()),
+            (int32_t)(width / mAspectRatioRange.lower().asDouble()));
+    return range;
+}
+
+std::optional<Range<double>> VideoCapabilities::getSupportedFrameRatesFor(
+        int32_t width, int32_t height) const {
+    if (!supports(std::make_optional<int32_t>(width), std::make_optional<int32_t>(height),
+            std::nullopt /* rate */)) {
+        ALOGE("Unsupported size. width: %d, height: %d", width, height);
+        return std::nullopt;
+    }
+
+    const int32_t blockCount =
+            divUp(width, mBlockWidth) * divUp(height, mBlockHeight);
+
+    return std::make_optional(Range(
+            std::max(mBlocksPerSecondRange.lower() / (double) blockCount,
+                (double) mFrameRateRange.lower()),
+            std::min(mBlocksPerSecondRange.upper() / (double) blockCount,
+                (double) mFrameRateRange.upper())));
+}
+
+int32_t VideoCapabilities::getBlockCount(int32_t width, int32_t height) const {
+    return divUp(width, mBlockWidth) * divUp(height, mBlockHeight);
+}
+
+std::optional<VideoSize> VideoCapabilities::findClosestSize(
+        int32_t width, int32_t height) const {
+    int32_t targetBlockCount = getBlockCount(width, height);
+    std::optional<VideoSize> closestSize;
+    int32_t minDiff = INT32_MAX;
+    for (const auto &[size, range] : mMeasuredFrameRates) {
+        int32_t diff = std::abs(targetBlockCount -
+                getBlockCount(size.getWidth(), size.getHeight()));
+        if (diff < minDiff) {
+            minDiff = diff;
+            closestSize = size;
+        }
+    }
+    return closestSize;
+}
+
+std::optional<Range<double>> VideoCapabilities::estimateFrameRatesFor(
+        int32_t width, int32_t height) const {
+    std::optional<VideoSize> size = findClosestSize(width, height);
+    if (!size) {
+        return std::nullopt;
+    }
+    auto rangeItr = mMeasuredFrameRates.find(size.value());
+    if (rangeItr == mMeasuredFrameRates.end()) {
+        return std::nullopt;
+    }
+    Range<int64_t> range = rangeItr->second;
+    double ratio = getBlockCount(size.value().getWidth(), size.value().getHeight())
+            / (double)std::max(getBlockCount(width, height), 1);
+    return std::make_optional(Range(range.lower() * ratio, range.upper() * ratio));
+}
+
+std::optional<Range<double>> VideoCapabilities::getAchievableFrameRatesFor(
+        int32_t width, int32_t height) const {
+    if (!supports(std::make_optional<int32_t>(width), std::make_optional<int32_t>(height),
+            std::nullopt /* rate */)) {
+        ALOGE("Unsupported size. width: %d, height: %d", width, height);
+        return std::nullopt;
+    }
+
+    if (mMeasuredFrameRates.empty()) {
+        ALOGW("Codec did not publish any measurement data.");
+        return std::nullopt;
+    }
+
+    return estimateFrameRatesFor(width, height);
+}
+
+// VideoCapabilities::PerformancePoint
+
+int32_t VideoCapabilities::PerformancePoint::getMaxMacroBlocks() const {
+    return saturateInt64ToInt32(mWidth * (int64_t)mHeight);
+}
+
+int32_t VideoCapabilities::PerformancePoint::getWidth() const {
+    return mWidth;
+}
+
+int32_t VideoCapabilities::PerformancePoint::getHeight() const {
+    return mHeight;
+}
+
+int32_t VideoCapabilities::PerformancePoint::getMaxFrameRate() const {
+    return mMaxFrameRate;
+}
+
+int64_t VideoCapabilities::PerformancePoint::getMaxMacroBlockRate() const {
+    return mMaxMacroBlockRate;
+}
+
+VideoSize VideoCapabilities::PerformancePoint::getBlockSize() const {
+    return mBlockSize;
+}
+
+std::string VideoCapabilities::PerformancePoint::toString() const {
+    int64_t blockWidth = 16 * (int64_t)mBlockSize.getWidth();
+    int64_t blockHeight = 16 * (int64_t)mBlockSize.getHeight();
+    int32_t origRate = (int32_t)divUp(mMaxMacroBlockRate, (int64_t)getMaxMacroBlocks());
+    std::string info = std::to_string(mWidth * (int64_t)16) + "x"
+            + std::to_string(mHeight * (int64_t)16) + "@" + std::to_string(origRate);
+    if (origRate < mMaxFrameRate) {
+        info += ", max " + std::to_string(mMaxFrameRate) + "fps";
+    }
+    if (blockWidth > 16 || blockHeight > 16) {
+        info += ", " + std::to_string(blockWidth) + "x"
+                + std::to_string(blockHeight) + " blocks";
+    }
+    return "PerformancePoint(" + info + ")";
+}
+
+void VideoCapabilities::PerformancePoint::init(int32_t width, int32_t height,
+        int32_t frameRate, int32_t maxFrameRate, VideoSize blockSize) {
+    mBlockSize = VideoSize(divUp(blockSize.getWidth(), (int32_t)16),
+                            divUp(blockSize.getHeight(), (int32_t)16));
+    // Use  IsPowerOfTwoStrict as we do not want width and height to be 0;
+    if (!IsPowerOfTwoStrict(blockSize.getWidth()) || !IsPowerOfTwoStrict(blockSize.getHeight())) {
+        ALOGE("The width and height of a PerformancePoint must be the power of two and not zero."
+                " width: %d, height: %d", blockSize.getWidth(), blockSize.getHeight());
+    }
+
+    // these are guaranteed not to overflow as we decimate by 16
+    mWidth = (int32_t)(divUp(std::max(width, 1),
+                            std::max(blockSize.getWidth(), 16))
+                        * mBlockSize.getWidth());
+    mHeight = (int32_t)(divUp(std::max(height, 1),
+                            std::max(blockSize.getHeight(), 16))
+                        * mBlockSize.getHeight());
+    mMaxFrameRate = std::max(std::max(frameRate, maxFrameRate), 1);
+    mMaxMacroBlockRate = std::max(frameRate, 1) * (int64_t)getMaxMacroBlocks();
+}
+
+VideoCapabilities::PerformancePoint::PerformancePoint(int32_t width, int32_t height,
+        int32_t frameRate, int32_t maxFrameRate, VideoSize blockSize) {
+    init(width, height, frameRate, maxFrameRate, blockSize);
+}
+
+VideoCapabilities::PerformancePoint::PerformancePoint(VideoSize blockSize, int32_t width,
+        int32_t height, int32_t maxFrameRate, int64_t maxMacroBlockRate) :
+        mBlockSize(blockSize), mWidth(width), mHeight(height), mMaxFrameRate(maxFrameRate),
+        mMaxMacroBlockRate(maxMacroBlockRate) {}
+
+VideoCapabilities::PerformancePoint::PerformancePoint(
+        const PerformancePoint &pp, VideoSize newBlockSize) {
+    init(16 * pp.mWidth, 16 * pp.mHeight,
+            // guaranteed not to overflow as these were multiplied at construction
+            (int32_t)divUp(pp.mMaxMacroBlockRate, (int64_t)pp.getMaxMacroBlocks()),
+            pp.mMaxFrameRate,
+            VideoSize(std::max(newBlockSize.getWidth(), 16 * pp.mBlockSize.getWidth()),
+                 std::max(newBlockSize.getHeight(), 16 * pp.mBlockSize.getHeight())));
+}
+
+VideoCapabilities::PerformancePoint::PerformancePoint(
+        int32_t width, int32_t height, int32_t frameRate) {
+    init(width, height, frameRate, frameRate /* maxFrameRate */, VideoSize(16, 16));
+}
+
+int32_t VideoCapabilities::PerformancePoint::saturateInt64ToInt32(int64_t value) const {
+    if (value < INT32_MIN) {
+        return INT32_MIN;
+    } else if (value > INT32_MAX) {
+        return INT32_MAX;
+    } else {
+        return (int32_t)value;
+    }
+}
+
+/* This method may overflow */
+int32_t VideoCapabilities::PerformancePoint::align(
+        int32_t value, int32_t alignment) const {
+    return divUp(value, alignment) * alignment;
+}
+
+bool VideoCapabilities::PerformancePoint::covers(
+        const sp<AMessage> &format) const {
+    int32_t width, height;
+    format->findInt32(KEY_WIDTH, &width);
+    format->findInt32(KEY_HEIGHT, &height);
+    double frameRate;
+    format->findDouble(KEY_FRAME_RATE, &frameRate);
+    PerformancePoint other = PerformancePoint(
+            width, height,
+            // safely convert ceil(double) to int through float cast and std::round
+            std::round((float)(std::ceil(frameRate)))
+    );
+    return covers(other);
+}
+
+bool VideoCapabilities::PerformancePoint::covers(
+        const PerformancePoint &other) const {
+    // convert performance points to common block size
+    VideoSize commonSize = getCommonBlockSize(other);
+    PerformancePoint aligned = PerformancePoint(*this, commonSize);
+    PerformancePoint otherAligned = PerformancePoint(other, commonSize);
+
+    return (aligned.getMaxMacroBlocks() >= otherAligned.getMaxMacroBlocks()
+            && aligned.mMaxFrameRate >= otherAligned.mMaxFrameRate
+            && aligned.mMaxMacroBlockRate >= otherAligned.mMaxMacroBlockRate);
+}
+
+VideoSize VideoCapabilities::PerformancePoint::getCommonBlockSize(
+        const PerformancePoint &other) const {
+    return VideoSize(
+            16 * std::max(mBlockSize.getWidth(), other.mBlockSize.getWidth()),
+            16 * std::max(mBlockSize.getHeight(), other.mBlockSize.getHeight()));
+}
+
+bool VideoCapabilities::PerformancePoint::equals(
+        const PerformancePoint &other) const {
+    // convert performance points to common block size
+    VideoSize commonSize = getCommonBlockSize(other);
+    PerformancePoint aligned = PerformancePoint(*this, commonSize);
+    PerformancePoint otherAligned = PerformancePoint(other, commonSize);
+
+    return (aligned.getMaxMacroBlocks() == otherAligned.getMaxMacroBlocks()
+            && aligned.mMaxFrameRate == otherAligned.mMaxFrameRate
+            && aligned.mMaxMacroBlockRate == otherAligned.mMaxMacroBlockRate);
+}
+
+// VideoCapabilities
+
+const std::vector<VideoCapabilities::PerformancePoint>&
+        VideoCapabilities::getSupportedPerformancePoints() const {
+    return mPerformancePoints;
+}
+
+bool VideoCapabilities::areSizeAndRateSupported(
+        int32_t width, int32_t height, double frameRate) const {
+    return supports(std::make_optional<int32_t>(width), std::make_optional<int32_t>(height),
+            std::make_optional<double>(frameRate));
+}
+
+bool VideoCapabilities::isSizeSupported(int32_t width, int32_t height) const {
+    return supports(std::make_optional<int32_t>(width), std::make_optional<int32_t>(height),
+            std::nullopt /* rate */);
+}
+
+bool VideoCapabilities::supports(std::optional<int32_t> width, std::optional<int32_t> height,
+        std::optional<double> rate) const {
+    bool ok = true;
+
+    if (width) {
+        ok &= mWidthRange.contains(width.value())
+                && (width.value() % mWidthAlignment == 0);
+    }
+    if (height) {
+        ok &= mHeightRange.contains(height.value())
+                && (height.value() % mHeightAlignment == 0);
+    }
+    if (rate) {
+        ok &= mFrameRateRange.contains(Range<int32_t>::RangeFor(rate.value()));
+    }
+    if (height && width) {
+        ok &= std::min(height.value(), width.value()) <= mSmallerDimensionUpperLimit;
+
+        const int32_t widthInBlocks = divUp(width.value(), mBlockWidth);
+        const int32_t heightInBlocks = divUp(height.value(), mBlockHeight);
+        const int32_t blockCount = widthInBlocks * heightInBlocks;
+        ok &= mBlockCountRange.contains(blockCount)
+                && mBlockAspectRatioRange.contains(
+                        Rational(widthInBlocks, heightInBlocks))
+                && mAspectRatioRange.contains(Rational(width.value(), height.value()));
+        if (rate) {
+            double blocksPerSec = blockCount * rate.value();
+            ok &= mBlocksPerSecondRange.contains(
+                    Range<int64_t>::RangeFor(blocksPerSec));
+        }
+    }
+    return ok;
+}
+
+bool VideoCapabilities::supportsFormat(const sp<AMessage> &format) const {
+    int32_t widthVal, heightVal;
+    std::optional<int32_t> width = format->findInt32(KEY_WIDTH, &widthVal)
+            ? std::make_optional<int32_t>(widthVal) : std::nullopt;
+    std::optional<int32_t> height = format->findInt32(KEY_HEIGHT, &heightVal)
+            ? std::make_optional<int32_t>(heightVal) : std::nullopt;
+    double rateVal;
+    std::optional<double> rate = format->findDouble(KEY_FRAME_RATE, &rateVal)
+            ? std::make_optional<double>(rateVal) : std::nullopt;
+
+    if (!supports(width, height, rate)) {
+        return false;
+    }
+
+    if (!CodecCapabilities::SupportsBitrate(mBitrateRange, format)) {
+        return false;
+    }
+
+    // we ignore color-format for now as it is not reliably reported by codec
+    return true;
+}
+
+// static
+std::shared_ptr<VideoCapabilities> VideoCapabilities::Create(std::string mediaType,
+        std::vector<ProfileLevel> profLevs, const sp<AMessage> &format) {
+    std::shared_ptr<VideoCapabilities> caps(new VideoCapabilities());
+    caps->init(mediaType, profLevs, format);
+    return caps;
+}
+
+void VideoCapabilities::init(std::string mediaType, std::vector<ProfileLevel> profLevs,
+        const sp<AMessage> &format) {
+    mMediaType = mediaType;
+    mProfileLevels = profLevs;
+    mError = 0;
+
+    initWithPlatformLimits();
+    applyLevelLimits();
+    parseFromInfo(format);
+    updateLimits();
+}
+
+VideoSize VideoCapabilities::getBlockSize() const {
+    return VideoSize(mBlockWidth, mBlockHeight);
+}
+
+const Range<int32_t>& VideoCapabilities::getBlockCountRange() const {
+    return mBlockCountRange;
+}
+
+const Range<int64_t>& VideoCapabilities::getBlocksPerSecondRange() const {
+    return mBlocksPerSecondRange;
+}
+
+Range<Rational> VideoCapabilities::getAspectRatioRange(bool blocks) const {
+    return blocks ? mBlockAspectRatioRange : mAspectRatioRange;
+}
+
+void VideoCapabilities::initWithPlatformLimits() {
+    mBitrateRange = BITRATE_RANGE;
+
+    mWidthRange  = VideoSize::GetAllowedDimensionRange();
+    mHeightRange = VideoSize::GetAllowedDimensionRange();
+    mFrameRateRange = FRAME_RATE_RANGE;
+
+    mHorizontalBlockRange = VideoSize::GetAllowedDimensionRange();
+    mVerticalBlockRange   = VideoSize::GetAllowedDimensionRange();
+
+    // full positive ranges are supported as these get calculated
+    mBlockCountRange      = POSITIVE_INT32;
+    mBlocksPerSecondRange = POSITIVE_INT64;
+
+    mBlockAspectRatioRange = POSITIVE_RATIONALS;
+    mAspectRatioRange      = POSITIVE_RATIONALS;
+
+    // YUV 4:2:0 requires 2:2 alignment
+    mWidthAlignment = 2;
+    mHeightAlignment = 2;
+    mBlockWidth = 2;
+    mBlockHeight = 2;
+    mSmallerDimensionUpperLimit = VideoSize::GetAllowedDimensionRange().upper();
+}
+
+std::vector<VideoCapabilities::PerformancePoint>
+        VideoCapabilities::getPerformancePoints(
+        const sp<AMessage> &format) const {
+    std::vector<PerformancePoint> ret;
+    AMessage::Type type;
+    for (int i = 0; i < format->countEntries(); i++) {
+        const char *name = format->getEntryNameAt(i, &type);
+        AString rangeStr;
+        if (!format->findString(name, &rangeStr)) {
+            continue;
+        }
+
+        const std::string key = std::string(name);
+        // looking for: performance-point-WIDTHxHEIGHT-range
+
+        // check none performance point
+        if (key == "performance-point-none" && ret.size() == 0) {
+            // This means that component knowingly did not publish performance points.
+            // This is different from when the component forgot to publish performance
+            // points.
+            return ret;
+        }
+
+        // parse size from key
+        std::regex sizeRegex("performance-point-(.+)-range");
+        std::smatch sizeMatch;
+        if (!std::regex_match(key, sizeMatch, sizeRegex)) {
+            continue;
+        }
+        std::optional<VideoSize> size = VideoSize::ParseSize(sizeMatch[1].str());
+        if (!size || size.value().getWidth() * size.value().getHeight() <= 0) {
+            continue;
+        }
+
+        // parse range from value
+        std::optional<Range<int64_t>> range = Range<int64_t>::Parse(std::string(rangeStr.c_str()));
+        if (!range || range.value().lower() < 0 || range.value().upper() < 0) {
+            continue;
+        }
+
+        PerformancePoint given = PerformancePoint(
+                size.value().getWidth(), size.value().getHeight(), (int32_t)range.value().lower(),
+                (int32_t)range.value().upper(), VideoSize(mBlockWidth, mBlockHeight));
+        PerformancePoint rotated = PerformancePoint(
+                size.value().getHeight(), size.value().getWidth(), (int32_t)range.value().lower(),
+                (int32_t)range.value().upper(), VideoSize(mBlockWidth, mBlockHeight));
+        ret.push_back(given);
+        if (!given.covers(rotated)) {
+            ret.push_back(rotated);
+        }
+    }
+
+    // check if the component specified no performance point indication
+    if (ret.size() == 0) {
+        return ret;
+    }
+
+    // sort reversed by area first, then by frame rate
+    std::sort(ret.begin(), ret.end(), [](const PerformancePoint &a, const PerformancePoint &b) {
+        return -((a.getMaxMacroBlocks() != b.getMaxMacroBlocks()) ?
+                        (a.getMaxMacroBlocks() < b.getMaxMacroBlocks() ? -1 : 1) :
+                (a.getMaxMacroBlockRate() != b.getMaxMacroBlockRate()) ?
+                        (a.getMaxMacroBlockRate() < b.getMaxMacroBlockRate() ? -1 : 1) :
+                (a.getMaxFrameRate() != b.getMaxFrameRate()) ?
+                        (a.getMaxFrameRate() < b.getMaxFrameRate() ? -1 : 1) : 0);
+    });
+
+    return ret;
+}
+
+std::map<VideoSize, Range<int64_t>, VideoSizeCompare> VideoCapabilities
+        ::getMeasuredFrameRates(const sp<AMessage> &format) const {
+    std::map<VideoSize, Range<int64_t>, VideoSizeCompare> ret;
+    AMessage::Type type;
+    for (int i = 0; i < format->countEntries(); i++) {
+        const char *name = format->getEntryNameAt(i, &type);
+        AString rangeStr;
+        if (!format->findString(name, &rangeStr)) {
+            continue;
+        }
+
+        const std::string key = std::string(name);
+        // looking for: measured-frame-rate-WIDTHxHEIGHT-range
+
+        std::regex sizeRegex("measured-frame-rate-(.+)-range");
+        std::smatch sizeMatch;
+        if (!std::regex_match(key, sizeMatch, sizeRegex)) {
+            continue;
+        }
+
+        std::optional<VideoSize> size = VideoSize::ParseSize(sizeMatch[1].str());
+        if (!size || size.value().getWidth() * size.value().getHeight() <= 0) {
+            continue;
+        }
+
+        std::optional<Range<int64_t>> range = Range<int64_t>::Parse(std::string(rangeStr.c_str()));
+        if (!range || range.value().lower() < 0 || range.value().upper() < 0) {
+            continue;
+        }
+
+        ret.emplace(size.value(), range.value());
+    }
+    return ret;
+}
+
+// static
+std::optional<std::pair<Range<int32_t>, Range<int32_t>>> VideoCapabilities
+        ::ParseWidthHeightRanges(const std::string &str) {
+    std::optional<std::pair<VideoSize, VideoSize>> range = VideoSize::ParseSizeRange(str);
+    if (!range) {
+        ALOGW("could not parse size range: %s", str.c_str());
+        return std::nullopt;
+    }
+
+    return std::make_optional(std::pair(
+            Range(range.value().first.getWidth(), range.value().second.getWidth()),
+            Range(range.value().first.getHeight(), range.value().second.getHeight())));
+}
+
+// static
+int32_t VideoCapabilities::EquivalentVP9Level(const sp<AMessage> &format) {
+    int32_t blockSizeWidth = 8;
+    int32_t blockSizeHeight = 8;
+    // VideoSize *blockSizePtr = &VideoSize(8, 8);
+    AString blockSizeStr;
+    if (format->findString("block-size", &blockSizeStr)) {
+        std::optional<VideoSize> parsedBlockSize
+                = VideoSize::ParseSize(std::string(blockSizeStr.c_str()));
+        if (parsedBlockSize) {
+            // blockSize = parsedBlockSize.value();
+            blockSizeWidth = parsedBlockSize.value().getWidth();
+            blockSizeHeight = parsedBlockSize.value().getHeight();
+        }
+    }
+    int32_t BS = blockSizeWidth * blockSizeHeight;
+
+    int32_t FS = 0;
+    AString blockCountRangeStr;
+    if (format->findString("block-count-range", &blockCountRangeStr)) {
+        std::optional<Range<int>> counts = Range<int32_t>::Parse(
+                std::string(blockCountRangeStr.c_str()));
+        if (counts) {
+            FS = BS * counts.value().upper();
+        }
+    }
+
+    int64_t SR = 0;
+    AString blockRatesStr;
+    if (format->findString("blocks-per-second-range", &blockRatesStr)) {
+        std::optional<Range<int64_t>> blockRates
+                = Range<int64_t>::Parse(std::string(blockRatesStr.c_str()));
+        if (blockRates) {
+            // ToDo: Catch the potential overflow issue.
+            SR = BS * blockRates.value().upper();
+        }
+    }
+
+    int32_t D = 0;
+    AString dimensionRangesStr;
+    if (format->findString("size-range", &dimensionRangesStr)) {
+        std::optional<std::pair<Range<int>, Range<int>>> dimensionRanges =
+                ParseWidthHeightRanges(std::string(dimensionRangesStr.c_str()));
+        if (dimensionRanges) {
+            D = std::max(dimensionRanges.value().first.upper(),
+                    dimensionRanges.value().second.upper());
+        }
+    }
+
+    int32_t BR = 0;
+    AString bitrateRangeStr;
+    if (format->findString("bitrate-range", &bitrateRangeStr)) {
+        std::optional<Range<int>> bitRates = Range<int32_t>::Parse(
+                std::string(bitrateRangeStr.c_str()));
+        if (bitRates) {
+            BR = divUp(bitRates.value().upper(), 1000);
+        }
+    }
+
+    if (SR <=      829440 && FS <=    36864 && BR <=    200 && D <=   512)
+        return VP9Level1;
+    if (SR <=     2764800 && FS <=    73728 && BR <=    800 && D <=   768)
+        return VP9Level11;
+    if (SR <=     4608000 && FS <=   122880 && BR <=   1800 && D <=   960)
+        return VP9Level2;
+    if (SR <=     9216000 && FS <=   245760 && BR <=   3600 && D <=  1344)
+        return VP9Level21;
+    if (SR <=    20736000 && FS <=   552960 && BR <=   7200 && D <=  2048)
+        return VP9Level3;
+    if (SR <=    36864000 && FS <=   983040 && BR <=  12000 && D <=  2752)
+        return VP9Level31;
+    if (SR <=    83558400 && FS <=  2228224 && BR <=  18000 && D <=  4160)
+        return VP9Level4;
+    if (SR <=   160432128 && FS <=  2228224 && BR <=  30000 && D <=  4160)
+        return VP9Level41;
+    if (SR <=   311951360 && FS <=  8912896 && BR <=  60000 && D <=  8384)
+        return VP9Level5;
+    if (SR <=   588251136 && FS <=  8912896 && BR <= 120000 && D <=  8384)
+        return VP9Level51;
+    if (SR <=  1176502272 && FS <=  8912896 && BR <= 180000 && D <=  8384)
+        return VP9Level52;
+    if (SR <=  1176502272 && FS <= 35651584 && BR <= 180000 && D <= 16832)
+        return VP9Level6;
+    if (SR <= 2353004544L && FS <= 35651584 && BR <= 240000 && D <= 16832)
+        return VP9Level61;
+    if (SR <= 4706009088L && FS <= 35651584 && BR <= 480000 && D <= 16832)
+        return VP9Level62;
+    // returning largest level
+    return VP9Level62;
+}
+
+void VideoCapabilities::parseFromInfo(const sp<AMessage> &format) {
+    VideoSize blockSize = VideoSize(mBlockWidth, mBlockHeight);
+    VideoSize alignment = VideoSize(mWidthAlignment, mHeightAlignment);
+    std::optional<Range<int32_t>> counts, widths, heights;
+    std::optional<Range<int32_t>> frameRates, bitRates;
+    std::optional<Range<int64_t>> blockRates;
+    std::optional<Range<Rational>> ratios, blockRatios;
+
+    AString blockSizeStr;
+    if (format->findString("block-size", &blockSizeStr)) {
+        std::optional<VideoSize> parsedBlockSize
+                = VideoSize::ParseSize(std::string(blockSizeStr.c_str()));
+        blockSize = parsedBlockSize.value_or(blockSize);
+    }
+    AString alignmentStr;
+    if (format->findString("alignment", &alignmentStr)) {
+        std::optional<VideoSize> parsedAlignment
+            = VideoSize::ParseSize(std::string(alignmentStr.c_str()));
+        alignment = parsedAlignment.value_or(alignment);
+    }
+    AString blockCountRangeStr;
+    if (format->findString("block-count-range", &blockCountRangeStr)) {
+        std::optional<Range<int>> parsedBlockCountRange =
+                Range<int32_t>::Parse(std::string(blockCountRangeStr.c_str()));
+        if (parsedBlockCountRange) {
+            counts = parsedBlockCountRange.value();
+        }
+    }
+    AString blockRatesStr;
+    if (format->findString("blocks-per-second-range", &blockRatesStr)) {
+        blockRates = Range<int64_t>::Parse(std::string(blockRatesStr.c_str()));
+    }
+    mMeasuredFrameRates = getMeasuredFrameRates(format);
+    mPerformancePoints = getPerformancePoints(format);
+    AString sizeRangesStr;
+    if (format->findString("size-range", &sizeRangesStr)) {
+        std::optional<std::pair<Range<int>, Range<int>>> sizeRanges =
+            ParseWidthHeightRanges(std::string(sizeRangesStr.c_str()));
+        if (sizeRanges) {
+            widths = sizeRanges.value().first;
+            heights = sizeRanges.value().second;
+        }
+    }
+    // for now this just means using the smaller max size as 2nd
+    // upper limit.
+    // for now we are keeping the profile specific "width/height
+    // in macroblocks" limits.
+    if (format->contains("feature-can-swap-width-height")) {
+        if (widths && heights) {
+            mSmallerDimensionUpperLimit =
+                std::min(widths.value().upper(), heights.value().upper());
+            widths = heights = widths.value().extend(heights.value());
+        } else {
+            ALOGW("feature can-swap-width-height is best used with size-range");
+            mSmallerDimensionUpperLimit =
+                std::min(mWidthRange.upper(), mHeightRange.upper());
+            mWidthRange = mHeightRange = mWidthRange.extend(mHeightRange);
+        }
+    }
+
+    AString ratioStr;
+    if (format->findString("block-aspect-ratio-range", &ratioStr)) {
+        ratios = Rational::ParseRange(std::string(ratioStr.c_str()));
+    }
+    AString blockRatiosStr;
+    if (format->findString("pixel-aspect-ratio-range", &blockRatiosStr)) {
+        blockRatios = Rational::ParseRange(std::string(blockRatiosStr.c_str()));
+    }
+    AString frameRatesStr;
+    if (format->findString("frame-rate-range", &frameRatesStr)) {
+        frameRates = Range<int32_t>::Parse(std::string(frameRatesStr.c_str()));
+        if (frameRates) {
+            frameRates = frameRates.value().intersect(FRAME_RATE_RANGE);
+            if (frameRates.value().empty()) {
+                ALOGW("frame rate range is out of limits");
+                frameRates = std::nullopt;
+            }
+        }
+    }
+    AString bitRatesStr;
+    if (format->findString("bitrate-range", &bitRatesStr)) {
+        bitRates = Range<int32_t>::Parse(std::string(bitRatesStr.c_str()));
+        if (bitRates) {
+            bitRates = bitRates.value().intersect(BITRATE_RANGE);
+            if (bitRates.value().empty()) {
+                ALOGW("bitrate range is out of limits");
+                bitRates = std::nullopt;
+            }
+        }
+    }
+
+    if (!IsPowerOfTwo(blockSize.getWidth()) || !IsPowerOfTwo(blockSize.getHeight())
+            || !IsPowerOfTwo(alignment.getWidth()) || !IsPowerOfTwo(alignment.getHeight())) {
+        ALOGE("The widths and heights of blockSizes and alignments must be the power of two."
+                " blockSize width: %d; blockSize height: %d;"
+                " alignment width: %d; alignment height: %d.",
+                blockSize.getWidth(), blockSize.getHeight(),
+                alignment.getWidth(), alignment.getHeight());
+        mError |= ERROR_CAPABILITIES_UNRECOGNIZED;
+        return;
+    }
+
+    // update block-size and alignment
+    applyMacroBlockLimits(
+            INT32_MAX, INT32_MAX, INT32_MAX, INT64_MAX,
+            blockSize.getWidth(), blockSize.getHeight(),
+            alignment.getWidth(), alignment.getHeight());
+
+    if ((mError & ERROR_CAPABILITIES_UNSUPPORTED) != 0 || mAllowMbOverride) {
+        // codec supports profiles that we don't know.
+        // Use supplied values clipped to platform limits
+        if (widths) {
+            mWidthRange = VideoSize::GetAllowedDimensionRange().intersect(widths.value());
+        }
+        if (heights) {
+            mHeightRange = VideoSize::GetAllowedDimensionRange().intersect(heights.value());
+        }
+        if (counts) {
+            mBlockCountRange = POSITIVE_INT32.intersect(
+                    counts.value().factor(mBlockWidth * mBlockHeight
+                            / blockSize.getWidth() / blockSize.getHeight()));
+        }
+        if (blockRates) {
+            mBlocksPerSecondRange = POSITIVE_INT64.intersect(
+                    blockRates.value().factor(mBlockWidth * mBlockHeight
+                            / blockSize.getWidth() / blockSize.getHeight()));
+        }
+        if (blockRatios) {
+            mBlockAspectRatioRange = POSITIVE_RATIONALS.intersect(
+                    Rational::ScaleRange(blockRatios.value(),
+                            mBlockHeight / blockSize.getHeight(),
+                            mBlockWidth / blockSize.getWidth()));
+        }
+        if (ratios) {
+            mAspectRatioRange = POSITIVE_RATIONALS.intersect(ratios.value());
+        }
+        if (frameRates) {
+            mFrameRateRange = FRAME_RATE_RANGE.intersect(frameRates.value());
+        }
+        if (bitRates) {
+            // only allow bitrate override if unsupported profiles were encountered
+            if ((mError & ERROR_CAPABILITIES_UNSUPPORTED) != 0) {
+                mBitrateRange = BITRATE_RANGE.intersect(bitRates.value());
+            } else {
+                mBitrateRange = mBitrateRange.intersect(bitRates.value());
+            }
+        }
+    } else {
+        // no unsupported profile/levels, so restrict values to known limits
+        if (widths) {
+            mWidthRange = mWidthRange.intersect(widths.value());
+        }
+        if (heights) {
+            mHeightRange = mHeightRange.intersect(heights.value());
+        }
+        if (counts) {
+            mBlockCountRange = mBlockCountRange.intersect(
+                    counts.value().factor(mBlockWidth * mBlockHeight
+                            / blockSize.getWidth() / blockSize.getHeight()));
+        }
+        if (blockRates) {
+            mBlocksPerSecondRange = mBlocksPerSecondRange.intersect(
+                    blockRates.value().factor(mBlockWidth * mBlockHeight
+                            / blockSize.getWidth() / blockSize.getHeight()));
+        }
+        if (blockRatios) {
+            mBlockAspectRatioRange = mBlockAspectRatioRange.intersect(
+                    Rational::ScaleRange(blockRatios.value(),
+                            mBlockHeight / blockSize.getHeight(),
+                            mBlockWidth / blockSize.getWidth()));
+        }
+        if (ratios) {
+            mAspectRatioRange = mAspectRatioRange.intersect(ratios.value());
+        }
+        if (frameRates) {
+            mFrameRateRange = mFrameRateRange.intersect(frameRates.value());
+        }
+        if (bitRates) {
+            mBitrateRange = mBitrateRange.intersect(bitRates.value());
+        }
+    }
+    updateLimits();
+}
+
+void VideoCapabilities::applyBlockLimits(
+        int32_t blockWidth, int32_t blockHeight,
+        Range<int32_t> counts, Range<int64_t> rates, Range<Rational> ratios) {
+
+    if (!IsPowerOfTwo(blockWidth) || !IsPowerOfTwo(blockHeight)) {
+        ALOGE("blockWidth and blockHeight must be the power of two."
+                " blockWidth: %d; blockHeight: %d", blockWidth, blockHeight);
+        mError |= ERROR_CAPABILITIES_UNRECOGNIZED;
+        return;
+    }
+
+    const int32_t newBlockWidth = std::max(blockWidth, mBlockWidth);
+    const int32_t newBlockHeight = std::max(blockHeight, mBlockHeight);
+
+    // factor will always be a power-of-2
+    int32_t factor =
+        newBlockWidth * newBlockHeight / mBlockWidth / mBlockHeight;
+    if (factor != 1) {
+        mBlockCountRange = mBlockCountRange.factor(factor);
+        mBlocksPerSecondRange = mBlocksPerSecondRange.factor(factor);
+        mBlockAspectRatioRange = Rational::ScaleRange(
+                mBlockAspectRatioRange,
+                newBlockHeight / mBlockHeight,
+                newBlockWidth / mBlockWidth);
+        mHorizontalBlockRange = mHorizontalBlockRange.factor(newBlockWidth / mBlockWidth);
+        mVerticalBlockRange = mVerticalBlockRange.factor(newBlockHeight / mBlockHeight);
+    }
+    factor = newBlockWidth * newBlockHeight / blockWidth / blockHeight;
+    if (factor != 1) {
+        counts = counts.factor(factor);
+        rates = rates.factor((int64_t)factor);
+        ratios = Rational::ScaleRange(
+                ratios, newBlockHeight / blockHeight,
+                newBlockWidth / blockWidth);
+    }
+    mBlockCountRange = mBlockCountRange.intersect(counts);
+    mBlocksPerSecondRange = mBlocksPerSecondRange.intersect(rates);
+    mBlockAspectRatioRange = mBlockAspectRatioRange.intersect(ratios);
+    mBlockWidth = newBlockWidth;
+    mBlockHeight = newBlockHeight;
+}
+
+void VideoCapabilities::applyAlignment(
+        int32_t widthAlignment, int32_t heightAlignment) {
+    if (!IsPowerOfTwo(widthAlignment) || !IsPowerOfTwo(heightAlignment)) {
+        ALOGE("width and height alignments must be the power of two."
+                " widthAlignment: %d; heightAlignment: %d", widthAlignment, heightAlignment);
+        mError |= ERROR_CAPABILITIES_UNRECOGNIZED;
+        return;
+    }
+
+    if (widthAlignment > mBlockWidth || heightAlignment > mBlockHeight) {
+        // maintain assumption that 0 < alignment <= block-size
+        applyBlockLimits(
+                std::max(widthAlignment, mBlockWidth),
+                std::max(heightAlignment, mBlockHeight),
+                POSITIVE_INT32, POSITIVE_INT64, POSITIVE_RATIONALS);
+    }
+
+    mWidthAlignment = std::max(widthAlignment, mWidthAlignment);
+    mHeightAlignment = std::max(heightAlignment, mHeightAlignment);
+
+    mWidthRange = mWidthRange.align(mWidthAlignment);
+    mHeightRange = mHeightRange.align(mHeightAlignment);
+}
+
+void VideoCapabilities::updateLimits() {
+    // pixels -> blocks <- counts
+    mHorizontalBlockRange = mHorizontalBlockRange.intersect(
+            mWidthRange.factor(mBlockWidth));
+    mHorizontalBlockRange = mHorizontalBlockRange.intersect(
+            Range(  mBlockCountRange.lower() / mVerticalBlockRange.upper(),
+                    mBlockCountRange.upper() / mVerticalBlockRange.lower()));
+    mVerticalBlockRange = mVerticalBlockRange.intersect(
+            mHeightRange.factor(mBlockHeight));
+    mVerticalBlockRange = mVerticalBlockRange.intersect(
+            Range(  mBlockCountRange.lower() / mHorizontalBlockRange.upper(),
+                    mBlockCountRange.upper() / mHorizontalBlockRange.lower()));
+    mBlockCountRange = mBlockCountRange.intersect(
+            Range(  mHorizontalBlockRange.lower()
+                            * mVerticalBlockRange.lower(),
+                    mHorizontalBlockRange.upper()
+                            * mVerticalBlockRange.upper()));
+    mBlockAspectRatioRange = mBlockAspectRatioRange.intersect(
+            Rational(mHorizontalBlockRange.lower(), mVerticalBlockRange.upper()),
+            Rational(mHorizontalBlockRange.upper(), mVerticalBlockRange.lower()));
+
+    // blocks -> pixels
+    mWidthRange = mWidthRange.intersect(
+            (mHorizontalBlockRange.lower() - 1) * mBlockWidth + mWidthAlignment,
+            mHorizontalBlockRange.upper() * mBlockWidth);
+    mHeightRange = mHeightRange.intersect(
+            (mVerticalBlockRange.lower() - 1) * mBlockHeight + mHeightAlignment,
+            mVerticalBlockRange.upper() * mBlockHeight);
+    mAspectRatioRange = mAspectRatioRange.intersect(
+            Rational(mWidthRange.lower(), mHeightRange.upper()),
+            Rational(mWidthRange.upper(), mHeightRange.lower()));
+
+    mSmallerDimensionUpperLimit = std::min(
+            mSmallerDimensionUpperLimit,
+            std::min(mWidthRange.upper(), mHeightRange.upper()));
+
+    // blocks -> rate
+    mBlocksPerSecondRange = mBlocksPerSecondRange.intersect(
+            mBlockCountRange.lower() * (int64_t)mFrameRateRange.lower(),
+            mBlockCountRange.upper() * (int64_t)mFrameRateRange.upper());
+    mFrameRateRange = mFrameRateRange.intersect(
+            (int32_t)(mBlocksPerSecondRange.lower()
+                    / mBlockCountRange.upper()),
+            (int32_t)(mBlocksPerSecondRange.upper()
+                    / (double)mBlockCountRange.lower()));
+}
+
+void VideoCapabilities::applyMacroBlockLimits(
+        int32_t maxHorizontalBlocks, int32_t maxVerticalBlocks,
+        int32_t maxBlocks, int64_t maxBlocksPerSecond,
+        int32_t blockWidth, int32_t blockHeight,
+        int32_t widthAlignment, int32_t heightAlignment) {
+    applyMacroBlockLimits(
+            1 /* minHorizontalBlocks */, 1 /* minVerticalBlocks */,
+            maxHorizontalBlocks, maxVerticalBlocks,
+            maxBlocks, maxBlocksPerSecond,
+            blockWidth, blockHeight, widthAlignment, heightAlignment);
+}
+
+void VideoCapabilities::applyMacroBlockLimits(
+        int32_t minHorizontalBlocks, int32_t minVerticalBlocks,
+        int32_t maxHorizontalBlocks, int32_t maxVerticalBlocks,
+        int32_t maxBlocks, int64_t maxBlocksPerSecond,
+        int32_t blockWidth, int32_t blockHeight,
+        int32_t widthAlignment, int32_t heightAlignment) {
+    applyAlignment(widthAlignment, heightAlignment);
+    applyBlockLimits(
+            blockWidth, blockHeight, Range((int32_t)1, maxBlocks),
+            Range((int64_t)1, maxBlocksPerSecond),
+            Range(Rational(1, maxVerticalBlocks), Rational(maxHorizontalBlocks, 1)));
+    mHorizontalBlockRange =
+            mHorizontalBlockRange.intersect(
+                    divUp(minHorizontalBlocks, (mBlockWidth / blockWidth)),
+                    maxHorizontalBlocks / (mBlockWidth / blockWidth));
+    mVerticalBlockRange =
+            mVerticalBlockRange.intersect(
+                    divUp(minVerticalBlocks, (mBlockHeight / blockHeight)),
+                    maxVerticalBlocks / (mBlockHeight / blockHeight));
+}
+
+void VideoCapabilities::applyLevelLimits() {
+    int64_t maxBlocksPerSecond = 0;
+    int32_t maxBlocks = 0;
+    int32_t maxBps = 0;
+    int32_t maxDPBBlocks = 0;
+
+    int errors = ERROR_CAPABILITIES_NONE_SUPPORTED;
+    const char *mediaType = mMediaType.c_str();
+    if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_AVC)) {
+        maxBlocks = 99;
+        maxBlocksPerSecond = 1485;
+        maxBps = 64000;
+        maxDPBBlocks = 396;
+        for (ProfileLevel profileLevel: mProfileLevels) {
+            int32_t MBPS = 0, FS = 0, BR = 0, DPB = 0;
+            bool supported = true;
+            switch (profileLevel.mLevel) {
+                case AVCLevel1:
+                    MBPS =     1485; FS =     99; BR =     64; DPB =    396; break;
+                case AVCLevel1b:
+                    MBPS =     1485; FS =     99; BR =    128; DPB =    396; break;
+                case AVCLevel11:
+                    MBPS =     3000; FS =    396; BR =    192; DPB =    900; break;
+                case AVCLevel12:
+                    MBPS =     6000; FS =    396; BR =    384; DPB =   2376; break;
+                case AVCLevel13:
+                    MBPS =    11880; FS =    396; BR =    768; DPB =   2376; break;
+                case AVCLevel2:
+                    MBPS =    11880; FS =    396; BR =   2000; DPB =   2376; break;
+                case AVCLevel21:
+                    MBPS =    19800; FS =    792; BR =   4000; DPB =   4752; break;
+                case AVCLevel22:
+                    MBPS =    20250; FS =   1620; BR =   4000; DPB =   8100; break;
+                case AVCLevel3:
+                    MBPS =    40500; FS =   1620; BR =  10000; DPB =   8100; break;
+                case AVCLevel31:
+                    MBPS =   108000; FS =   3600; BR =  14000; DPB =  18000; break;
+                case AVCLevel32:
+                    MBPS =   216000; FS =   5120; BR =  20000; DPB =  20480; break;
+                case AVCLevel4:
+                    MBPS =   245760; FS =   8192; BR =  20000; DPB =  32768; break;
+                case AVCLevel41:
+                    MBPS =   245760; FS =   8192; BR =  50000; DPB =  32768; break;
+                case AVCLevel42:
+                    MBPS =   522240; FS =   8704; BR =  50000; DPB =  34816; break;
+                case AVCLevel5:
+                    MBPS =   589824; FS =  22080; BR = 135000; DPB = 110400; break;
+                case AVCLevel51:
+                    MBPS =   983040; FS =  36864; BR = 240000; DPB = 184320; break;
+                case AVCLevel52:
+                    MBPS =  2073600; FS =  36864; BR = 240000; DPB = 184320; break;
+                case AVCLevel6:
+                    MBPS =  4177920; FS = 139264; BR = 240000; DPB = 696320; break;
+                case AVCLevel61:
+                    MBPS =  8355840; FS = 139264; BR = 480000; DPB = 696320; break;
+                case AVCLevel62:
+                    MBPS = 16711680; FS = 139264; BR = 800000; DPB = 696320; break;
+                default:
+                    ALOGW("Unrecognized level %d for %s", profileLevel.mLevel, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+            }
+            switch (profileLevel.mProfile) {
+                case AVCProfileConstrainedHigh:
+                case AVCProfileHigh:
+                    BR *= 1250; break;
+                case AVCProfileHigh10:
+                    BR *= 3000; break;
+                case AVCProfileExtended:
+                case AVCProfileHigh422:
+                case AVCProfileHigh444:
+                    ALOGW("Unsupported profile %d for %s", profileLevel.mProfile, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+                    supported = false;
+                    FALLTHROUGH_INTENDED;
+                    // fall through - treat as base profile
+                case AVCProfileConstrainedBaseline:
+                    FALLTHROUGH_INTENDED;
+                case AVCProfileBaseline:
+                    FALLTHROUGH_INTENDED;
+                case AVCProfileMain:
+                    BR *= 1000; break;
+                default:
+                    ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+                    BR *= 1000;
+            }
+            if (supported) {
+                errors &= ~ERROR_CAPABILITIES_NONE_SUPPORTED;
+            }
+            maxBlocksPerSecond = std::max((int64_t)MBPS, maxBlocksPerSecond);
+            maxBlocks = std::max(FS, maxBlocks);
+            maxBps = std::max(BR, maxBps);
+            maxDPBBlocks = std::max(maxDPBBlocks, DPB);
+        }
+
+        int32_t maxLengthInBlocks = (int32_t)(std::sqrt(8 * maxBlocks));
+        applyMacroBlockLimits(
+                maxLengthInBlocks, maxLengthInBlocks,
+                maxBlocks, maxBlocksPerSecond,
+                16 /* blockWidth */, 16 /* blockHeight */,
+                1 /* widthAlignment */, 1 /* heightAlignment */);
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_MPEG2)) {
+        int32_t maxWidth = 11, maxHeight = 9, maxRate = 15;
+        maxBlocks = 99;
+        maxBlocksPerSecond = 1485;
+        maxBps = 64000;
+        for (ProfileLevel profileLevel: mProfileLevels) {
+            int32_t MBPS = 0, FS = 0, BR = 0, FR = 0, W = 0, H = 0;
+            bool supported = true;
+            switch (profileLevel.mProfile) {
+                case MPEG2ProfileSimple:
+                    switch (profileLevel.mLevel) {
+                        case MPEG2LevelML:
+                            FR = 30; W = 45; H =  36; MBPS =  40500; FS =  1620; BR =  15000; break;
+                        default:
+                            ALOGW("Unrecognized profile/level %d/%d for %s",
+                                    profileLevel.mProfile, profileLevel.mLevel, mediaType);
+                            errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+                    }
+                    break;
+                case MPEG2ProfileMain:
+                    switch (profileLevel.mLevel) {
+                        case MPEG2LevelLL:
+                            FR = 30; W = 22; H =  18; MBPS =  11880; FS =   396; BR =  4000; break;
+                        case MPEG2LevelML:
+                            FR = 30; W = 45; H =  36; MBPS =  40500; FS =  1620; BR = 15000; break;
+                        case MPEG2LevelH14:
+                            FR = 60; W = 90; H =  68; MBPS = 183600; FS =  6120; BR = 60000; break;
+                        case MPEG2LevelHL:
+                            FR = 60; W = 120; H = 68; MBPS = 244800; FS =  8160; BR = 80000; break;
+                        case MPEG2LevelHP:
+                            FR = 60; W = 120; H = 68; MBPS = 489600; FS =  8160; BR = 80000; break;
+                        default:
+                            ALOGW("Unrecognized profile/level %d / %d for %s",
+                                    profileLevel.mProfile, profileLevel.mLevel, mediaType);
+                            errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+                    }
+                    break;
+                case MPEG2Profile422:
+                case MPEG2ProfileSNR:
+                case MPEG2ProfileSpatial:
+                case MPEG2ProfileHigh:
+                    ALOGW("Unsupported profile %d for %s", profileLevel.mProfile, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNSUPPORTED;
+                    supported = false;
+                    break;
+                default:
+                    ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+            }
+            if (supported) {
+                errors &= ~ERROR_CAPABILITIES_NONE_SUPPORTED;
+            }
+            maxBlocksPerSecond = std::max((int64_t)MBPS, maxBlocksPerSecond);
+            maxBlocks = std::max(FS, maxBlocks);
+            maxBps = std::max(BR * 1000, maxBps);
+            maxWidth = std::max(W, maxWidth);
+            maxHeight = std::max(H, maxHeight);
+            maxRate = std::max(FR, maxRate);
+        }
+        applyMacroBlockLimits(maxWidth, maxHeight,
+                maxBlocks, maxBlocksPerSecond,
+                16 /* blockWidth */, 16 /* blockHeight */,
+                1 /* widthAlignment */, 1 /* heightAlignment */);
+        mFrameRateRange = mFrameRateRange.intersect(12, maxRate);
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_MPEG4)) {
+        int32_t maxWidth = 11, maxHeight = 9, maxRate = 15;
+        maxBlocks = 99;
+        maxBlocksPerSecond = 1485;
+        maxBps = 64000;
+        for (ProfileLevel profileLevel: mProfileLevels) {
+            int32_t MBPS = 0, FS = 0, BR = 0, FR = 0, W = 0, H = 0;
+            bool strict = false; // true: W, H and FR are individual max limits
+            bool supported = true;
+            switch (profileLevel.mProfile) {
+                case MPEG4ProfileSimple:
+                    switch (profileLevel.mLevel) {
+                        case MPEG4Level0:
+                            strict = true;
+                            FR = 15; W = 11; H =  9; MBPS =  1485; FS =  99; BR =  64; break;
+                        case MPEG4Level1:
+                            FR = 30; W = 11; H =  9; MBPS =  1485; FS =  99; BR =  64; break;
+                        case MPEG4Level0b:
+                            strict = true;
+                            FR = 15; W = 11; H =  9; MBPS =  1485; FS =  99; BR = 128; break;
+                        case MPEG4Level2:
+                            FR = 30; W = 22; H = 18; MBPS =  5940; FS = 396; BR = 128; break;
+                        case MPEG4Level3:
+                            FR = 30; W = 22; H = 18; MBPS = 11880; FS = 396; BR = 384; break;
+                        case MPEG4Level4a:
+                            FR = 30; W = 40; H = 30; MBPS = 36000; FS = 1200; BR = 4000; break;
+                        case MPEG4Level5:
+                            FR = 30; W = 45; H = 36; MBPS = 40500; FS = 1620; BR = 8000; break;
+                        case MPEG4Level6:
+                            FR = 30; W = 80; H = 45; MBPS = 108000; FS = 3600; BR = 12000; break;
+                        default:
+                            ALOGW("Unrecognized profile/level %d/%d for %s",
+                                    profileLevel.mProfile, profileLevel.mLevel, mediaType);
+                            errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+                    }
+                    break;
+                case MPEG4ProfileAdvancedSimple:
+                    switch (profileLevel.mLevel) {
+                        case MPEG4Level0:
+                        case MPEG4Level1:
+                            FR = 30; W = 11; H =  9; MBPS =  2970; FS =   99; BR =  128; break;
+                        case MPEG4Level2:
+                            FR = 30; W = 22; H = 18; MBPS =  5940; FS =  396; BR =  384; break;
+                        case MPEG4Level3:
+                            FR = 30; W = 22; H = 18; MBPS = 11880; FS =  396; BR =  768; break;
+                        case MPEG4Level3b:
+                            FR = 30; W = 22; H = 18; MBPS = 11880; FS =  396; BR = 1500; break;
+                        case MPEG4Level4:
+                            FR = 30; W = 44; H = 36; MBPS = 23760; FS =  792; BR = 3000; break;
+                        case MPEG4Level5:
+                            FR = 30; W = 45; H = 36; MBPS = 48600; FS = 1620; BR = 8000; break;
+                        default:
+                            ALOGW("Unrecognized profile/level %d/%d for %s",
+                                    profileLevel.mProfile, profileLevel.mLevel, mediaType);
+                            errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+                    }
+                    break;
+                case MPEG4ProfileMain:             // 2-4
+                case MPEG4ProfileNbit:             // 2
+                case MPEG4ProfileAdvancedRealTime: // 1-4
+                case MPEG4ProfileCoreScalable:     // 1-3
+                case MPEG4ProfileAdvancedCoding:   // 1-4
+                case MPEG4ProfileCore:             // 1-2
+                case MPEG4ProfileAdvancedCore:     // 1-4
+                case MPEG4ProfileSimpleScalable:   // 0-2
+                case MPEG4ProfileHybrid:           // 1-2
+
+                // Studio profiles are not supported by our codecs.
+
+                // Only profiles that can decode simple object types are considered.
+                // The following profiles are not able to.
+                case MPEG4ProfileBasicAnimated:    // 1-2
+                case MPEG4ProfileScalableTexture:  // 1
+                case MPEG4ProfileSimpleFace:       // 1-2
+                case MPEG4ProfileAdvancedScalable: // 1-3
+                case MPEG4ProfileSimpleFBA:        // 1-2
+                    ALOGV("Unsupported profile %d for %s", profileLevel.mProfile, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNSUPPORTED;
+                    supported = false;
+                    break;
+                default:
+                    ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+            }
+            if (supported) {
+                errors &= ~ERROR_CAPABILITIES_NONE_SUPPORTED;
+            }
+            maxBlocksPerSecond = std::max((int64_t)MBPS, maxBlocksPerSecond);
+            maxBlocks = std::max(FS, maxBlocks);
+            maxBps = std::max(BR * 1000, maxBps);
+            if (strict) {
+                maxWidth = std::max(W, maxWidth);
+                maxHeight = std::max(H, maxHeight);
+                maxRate = std::max(FR, maxRate);
+            } else {
+                // assuming max 60 fps frame rate and 1:2 aspect ratio
+                int32_t maxDim = (int32_t)std::sqrt(2 * FS);
+                maxWidth = std::max(maxDim, maxWidth);
+                maxHeight = std::max(maxDim, maxHeight);
+                maxRate = std::max(std::max(FR, 60), maxRate);
+            }
+        }
+        applyMacroBlockLimits(maxWidth, maxHeight,
+                maxBlocks, maxBlocksPerSecond,
+                16 /* blockWidth */, 16 /* blockHeight */,
+                1 /* widthAlignment */, 1 /* heightAlignment */);
+        mFrameRateRange = mFrameRateRange.intersect(12, maxRate);
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_H263)) {
+        int32_t maxWidth = 11, maxHeight = 9, maxRate = 15;
+        int32_t minWidth = maxWidth, minHeight = maxHeight;
+        int32_t minAlignment = 16;
+        maxBlocks = 99;
+        maxBlocksPerSecond = 1485;
+        maxBps = 64000;
+        for (ProfileLevel profileLevel: mProfileLevels) {
+            int32_t MBPS = 0, BR = 0, FR = 0, W = 0, H = 0, minW = minWidth, minH = minHeight;
+            bool strict = false; // true: support only sQCIF, QCIF (maybe CIF)
+            switch (profileLevel.mLevel) {
+                case H263Level10:
+                    strict = true; // only supports sQCIF & QCIF
+                    FR = 15; W = 11; H =  9; BR =   1; MBPS =  W * H * FR; break;
+                case H263Level20:
+                    strict = true; // only supports sQCIF, QCIF & CIF
+                    FR = 30; W = 22; H = 18; BR =   2; MBPS =  W * H * 15; break;
+                case H263Level30:
+                    strict = true; // only supports sQCIF, QCIF & CIF
+                    FR = 30; W = 22; H = 18; BR =   6; MBPS =  W * H * FR; break;
+                case H263Level40:
+                    strict = true; // only supports sQCIF, QCIF & CIF
+                    FR = 30; W = 22; H = 18; BR =  32; MBPS =  W * H * FR; break;
+                case H263Level45:
+                    // only implies level 10 support
+                    strict = profileLevel.mProfile == H263ProfileBaseline
+                            || profileLevel.mProfile ==
+                                    H263ProfileBackwardCompatible;
+                    if (!strict) {
+                        minW = 1; minH = 1; minAlignment = 4;
+                    }
+                    FR = 15; W = 11; H =  9; BR =   2; MBPS =  W * H * FR; break;
+                case H263Level50:
+                    // only supports 50fps for H > 15
+                    minW = 1; minH = 1; minAlignment = 4;
+                    FR = 60; W = 22; H = 18; BR =  64; MBPS =  W * H * 50; break;
+                case H263Level60:
+                    // only supports 50fps for H > 15
+                    minW = 1; minH = 1; minAlignment = 4;
+                    FR = 60; W = 45; H = 18; BR = 128; MBPS =  W * H * 50; break;
+                case H263Level70:
+                    // only supports 50fps for H > 30
+                    minW = 1; minH = 1; minAlignment = 4;
+                    FR = 60; W = 45; H = 36; BR = 256; MBPS =  W * H * 50; break;
+                default:
+                    ALOGW("Unrecognized profile/level %d/%d for %s",
+                            profileLevel.mProfile, profileLevel.mLevel, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+            }
+            switch (profileLevel.mProfile) {
+                case H263ProfileBackwardCompatible:
+                case H263ProfileBaseline:
+                case H263ProfileH320Coding:
+                case H263ProfileHighCompression:
+                case H263ProfileHighLatency:
+                case H263ProfileInterlace:
+                case H263ProfileInternet:
+                case H263ProfileISWV2:
+                case H263ProfileISWV3:
+                    break;
+                default:
+                    ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+            }
+            if (strict) {
+                // Strict levels define sub-QCIF min size and enumerated sizes. We cannot
+                // express support for "only sQCIF & QCIF (& CIF)" using VideoCapabilities
+                // but we can express "only QCIF (& CIF)", so set minimume size at QCIF.
+                // minW = 8; minH = 6;
+                minW = 11; minH = 9;
+            } else {
+                // any support for non-strict levels (including unrecognized profiles or
+                // levels) allow custom frame size support beyond supported limits
+                // (other than bitrate)
+                mAllowMbOverride = true;
+            }
+            errors &= ~ERROR_CAPABILITIES_NONE_SUPPORTED;
+            maxBlocksPerSecond = std::max((int64_t)MBPS, maxBlocksPerSecond);
+            maxBlocks = std::max(W * H, maxBlocks);
+            maxBps = std::max(BR * 64000, maxBps);
+            maxWidth = std::max(W, maxWidth);
+            maxHeight = std::max(H, maxHeight);
+            maxRate = std::max(FR, maxRate);
+            minWidth = std::min(minW, minWidth);
+            minHeight = std::min(minH, minHeight);
+        }
+        // unless we encountered custom frame size support, limit size to QCIF and CIF
+        // using aspect ratio.
+        if (!mAllowMbOverride) {
+            mBlockAspectRatioRange =
+                Range(Rational(11, 9), Rational(11, 9));
+        }
+        applyMacroBlockLimits(
+                minWidth, minHeight,
+                maxWidth, maxHeight,
+                maxBlocks, maxBlocksPerSecond,
+                16 /* blockWidth */, 16 /* blockHeight */,
+                minAlignment /* widthAlignment */, minAlignment /* heightAlignment */);
+        mFrameRateRange = Range(1, maxRate);
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_VP8)) {
+        maxBlocks = INT_MAX;
+        maxBlocksPerSecond = INT_MAX;
+
+        // TODO: set to 100Mbps for now, need a number for VP8
+        maxBps = 100000000;
+
+        // profile levels are not indicative for VPx, but verify
+        // them nonetheless
+        for (ProfileLevel profileLevel: mProfileLevels) {
+            switch (profileLevel.mLevel) {
+                case VP8Level_Version0:
+                case VP8Level_Version1:
+                case VP8Level_Version2:
+                case VP8Level_Version3:
+                    break;
+                default:
+                    ALOGW("Unrecognized level %d for %s", profileLevel.mLevel, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+            }
+            switch (profileLevel.mProfile) {
+                case VP8ProfileMain:
+                    break;
+                default:
+                    ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+            }
+            errors &= ~ERROR_CAPABILITIES_NONE_SUPPORTED;
+        }
+
+        const int32_t blockSize = 16;
+        applyMacroBlockLimits(SHRT_MAX, SHRT_MAX,
+                maxBlocks, maxBlocksPerSecond, blockSize, blockSize,
+                1 /* widthAlignment */, 1 /* heightAlignment */);
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_VP9)) {
+        maxBlocksPerSecond = 829440;
+        maxBlocks = 36864;
+        maxBps = 200000;
+        int32_t maxDim = 512;
+
+        for (ProfileLevel profileLevel: mProfileLevels) {
+            int64_t SR = 0; // luma sample rate
+            int32_t FS = 0;  // luma picture size
+            int32_t BR = 0;  // bit rate kbps
+            int32_t D = 0;   // luma dimension
+            switch (profileLevel.mLevel) {
+                case VP9Level1:
+                    SR =      829440; FS =    36864; BR =    200; D =   512; break;
+                case VP9Level11:
+                    SR =     2764800; FS =    73728; BR =    800; D =   768; break;
+                case VP9Level2:
+                    SR =     4608000; FS =   122880; BR =   1800; D =   960; break;
+                case VP9Level21:
+                    SR =     9216000; FS =   245760; BR =   3600; D =  1344; break;
+                case VP9Level3:
+                    SR =    20736000; FS =   552960; BR =   7200; D =  2048; break;
+                case VP9Level31:
+                    SR =    36864000; FS =   983040; BR =  12000; D =  2752; break;
+                case VP9Level4:
+                    SR =    83558400; FS =  2228224; BR =  18000; D =  4160; break;
+                case VP9Level41:
+                    SR =   160432128; FS =  2228224; BR =  30000; D =  4160; break;
+                case VP9Level5:
+                    SR =   311951360; FS =  8912896; BR =  60000; D =  8384; break;
+                case VP9Level51:
+                    SR =   588251136; FS =  8912896; BR = 120000; D =  8384; break;
+                case VP9Level52:
+                    SR =  1176502272; FS =  8912896; BR = 180000; D =  8384; break;
+                case VP9Level6:
+                    SR =  1176502272; FS = 35651584; BR = 180000; D = 16832; break;
+                case VP9Level61:
+                    SR = 2353004544L; FS = 35651584; BR = 240000; D = 16832; break;
+                case VP9Level62:
+                    SR = 4706009088L; FS = 35651584; BR = 480000; D = 16832; break;
+                default:
+                    ALOGW("Unrecognized level %d for %s", profileLevel.mLevel, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+            }
+            switch (profileLevel.mProfile) {
+                case VP9Profile0:
+                case VP9Profile1:
+                case VP9Profile2:
+                case VP9Profile3:
+                case VP9Profile2HDR:
+                case VP9Profile3HDR:
+                case VP9Profile2HDR10Plus:
+                case VP9Profile3HDR10Plus:
+                    break;
+                default:
+                    ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+            }
+            errors &= ~ERROR_CAPABILITIES_NONE_SUPPORTED;
+            maxBlocksPerSecond = std::max(SR, maxBlocksPerSecond);
+            maxBlocks = std::max(FS, maxBlocks);
+            maxBps = std::max(BR * 1000, maxBps);
+            maxDim = std::max(D, maxDim);
+        }
+
+        const int32_t blockSize = 8;
+        int32_t maxLengthInBlocks = divUp(maxDim, blockSize);
+        maxBlocks = divUp(maxBlocks, blockSize * blockSize);
+        maxBlocksPerSecond = divUp(maxBlocksPerSecond, blockSize * (int64_t)blockSize);
+
+        applyMacroBlockLimits(
+                maxLengthInBlocks, maxLengthInBlocks,
+                maxBlocks, maxBlocksPerSecond,
+                blockSize, blockSize,
+                1 /* widthAlignment */, 1 /* heightAlignment */);
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_HEVC)) {
+        // CTBs are at least 8x8 so use 8x8 block size
+        maxBlocks = 36864 >> 6; // 192x192 pixels == 576 8x8 blocks
+        maxBlocksPerSecond = maxBlocks * 15;
+        maxBps = 128000;
+        for (ProfileLevel profileLevel: mProfileLevels) {
+            double FR = 0;
+            int32_t FS = 0, BR = 0;
+            switch (profileLevel.mLevel) {
+                /* The HEVC spec talks only in a very convoluted manner about the
+                    existence of levels 1-3.1 for High tier, which could also be
+                    understood as 'decoders and encoders should treat these levels
+                    as if they were Main tier', so we do that. */
+                case HEVCMainTierLevel1:
+                case HEVCHighTierLevel1:
+                    FR =    15; FS =    36864; BR =    128; break;
+                case HEVCMainTierLevel2:
+                case HEVCHighTierLevel2:
+                    FR =    30; FS =   122880; BR =   1500; break;
+                case HEVCMainTierLevel21:
+                case HEVCHighTierLevel21:
+                    FR =    30; FS =   245760; BR =   3000; break;
+                case HEVCMainTierLevel3:
+                case HEVCHighTierLevel3:
+                    FR =    30; FS =   552960; BR =   6000; break;
+                case HEVCMainTierLevel31:
+                case HEVCHighTierLevel31:
+                    FR = 33.75; FS =   983040; BR =  10000; break;
+                case HEVCMainTierLevel4:
+                    FR =    30; FS =  2228224; BR =  12000; break;
+                case HEVCHighTierLevel4:
+                    FR =    30; FS =  2228224; BR =  30000; break;
+                case HEVCMainTierLevel41:
+                    FR =    60; FS =  2228224; BR =  20000; break;
+                case HEVCHighTierLevel41:
+                    FR =    60; FS =  2228224; BR =  50000; break;
+                case HEVCMainTierLevel5:
+                    FR =    30; FS =  8912896; BR =  25000; break;
+                case HEVCHighTierLevel5:
+                    FR =    30; FS =  8912896; BR = 100000; break;
+                case HEVCMainTierLevel51:
+                    FR =    60; FS =  8912896; BR =  40000; break;
+                case HEVCHighTierLevel51:
+                    FR =    60; FS =  8912896; BR = 160000; break;
+                case HEVCMainTierLevel52:
+                    FR =   120; FS =  8912896; BR =  60000; break;
+                case HEVCHighTierLevel52:
+                    FR =   120; FS =  8912896; BR = 240000; break;
+                case HEVCMainTierLevel6:
+                    FR =    30; FS = 35651584; BR =  60000; break;
+                case HEVCHighTierLevel6:
+                    FR =    30; FS = 35651584; BR = 240000; break;
+                case HEVCMainTierLevel61:
+                    FR =    60; FS = 35651584; BR = 120000; break;
+                case HEVCHighTierLevel61:
+                    FR =    60; FS = 35651584; BR = 480000; break;
+                case HEVCMainTierLevel62:
+                    FR =   120; FS = 35651584; BR = 240000; break;
+                case HEVCHighTierLevel62:
+                    FR =   120; FS = 35651584; BR = 800000; break;
+                default:
+                    ALOGW("Unrecognized level %d for %s", profileLevel.mLevel, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+            }
+            switch (profileLevel.mProfile) {
+                case HEVCProfileMain:
+                case HEVCProfileMain10:
+                case HEVCProfileMainStill:
+                case HEVCProfileMain10HDR10:
+                case HEVCProfileMain10HDR10Plus:
+                    break;
+                default:
+                    ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+            }
+
+            /* DPB logic:
+            if      (width * height <= FS / 4)    DPB = 16;
+            else if (width * height <= FS / 2)    DPB = 12;
+            else if (width * height <= FS * 0.75) DPB = 8;
+            else                                  DPB = 6;
+            */
+
+            FS >>= 6; // convert pixels to blocks
+            errors &= ~ERROR_CAPABILITIES_NONE_SUPPORTED;
+            maxBlocksPerSecond = std::max((int64_t)(FR * FS), maxBlocksPerSecond);
+            maxBlocks = std::max(FS, maxBlocks);
+            maxBps = std::max(1000 * BR, maxBps);
+        }
+
+        int32_t maxLengthInBlocks = (int32_t)(std::sqrt(8 * maxBlocks));
+        applyMacroBlockLimits(
+                maxLengthInBlocks, maxLengthInBlocks,
+                maxBlocks, maxBlocksPerSecond,
+                8 /* blockWidth */, 8 /* blockHeight */,
+                1 /* widthAlignment */, 1 /* heightAlignment */);
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_AV1)) {
+        maxBlocksPerSecond = 829440;
+        maxBlocks = 36864;
+        maxBps = 200000;
+        int32_t maxDim = 512;
+
+        // Sample rate, Picture Size, Bit rate and luma dimension for AV1 Codec,
+        // corresponding to the definitions in
+        // "AV1 Bitstream & Decoding Process Specification", Annex A
+        // found at https://aomedia.org/av1-bitstream-and-decoding-process-specification/
+        for (ProfileLevel profileLevel: mProfileLevels) {
+            int64_t SR = 0; // luma sample rate
+            int32_t FS = 0;  // luma picture size
+            int32_t BR = 0;  // bit rate kbps
+            int32_t D = 0;   // luma D
+            switch (profileLevel.mLevel) {
+                case AV1Level2:
+                    SR =     5529600; FS =   147456; BR =   1500; D =  2048; break;
+                case AV1Level21:
+                case AV1Level22:
+                case AV1Level23:
+                    SR =    10454400; FS =   278784; BR =   3000; D =  2816; break;
+
+                case AV1Level3:
+                    SR =    24969600; FS =   665856; BR =   6000; D =  4352; break;
+                case AV1Level31:
+                case AV1Level32:
+                case AV1Level33:
+                    SR =    39938400; FS =  1065024; BR =  10000; D =  5504; break;
+
+                case AV1Level4:
+                    SR =    77856768; FS =  2359296; BR =  12000; D =  6144; break;
+                case AV1Level41:
+                case AV1Level42:
+                case AV1Level43:
+                    SR =   155713536; FS =  2359296; BR =  20000; D =  6144; break;
+
+                case AV1Level5:
+                    SR =   273715200; FS =  8912896; BR =  30000; D =  8192; break;
+                case AV1Level51:
+                    SR =   547430400; FS =  8912896; BR =  40000; D =  8192; break;
+                case AV1Level52:
+                    SR =  1094860800; FS =  8912896; BR =  60000; D =  8192; break;
+                case AV1Level53:
+                    SR =  1176502272; FS =  8912896; BR =  60000; D =  8192; break;
+
+                case AV1Level6:
+                    SR =  1176502272; FS = 35651584; BR =  60000; D = 16384; break;
+                case AV1Level61:
+                    SR = 2189721600L; FS = 35651584; BR = 100000; D = 16384; break;
+                case AV1Level62:
+                    SR = 4379443200L; FS = 35651584; BR = 160000; D = 16384; break;
+                case AV1Level63:
+                    SR = 4706009088L; FS = 35651584; BR = 160000; D = 16384; break;
+
+                default:
+                    ALOGW("Unrecognized level %d for %s", profileLevel.mLevel, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+            }
+            switch (profileLevel.mProfile) {
+                case AV1ProfileMain8:
+                case AV1ProfileMain10:
+                case AV1ProfileMain10HDR10:
+                case AV1ProfileMain10HDR10Plus:
+                    break;
+                default:
+                    ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile, mediaType);
+                    errors |= ERROR_CAPABILITIES_UNRECOGNIZED;
+            }
+            errors &= ~ERROR_CAPABILITIES_NONE_SUPPORTED;
+            maxBlocksPerSecond = std::max(SR, maxBlocksPerSecond);
+            maxBlocks = std::max(FS, maxBlocks);
+            maxBps = std::max(BR * 1000, maxBps);
+            maxDim = std::max(D, maxDim);
+        }
+
+        const int32_t blockSize = 8;
+        int32_t maxLengthInBlocks = divUp(maxDim, blockSize);
+        maxBlocks = divUp(maxBlocks, blockSize * blockSize);
+        maxBlocksPerSecond = divUp(maxBlocksPerSecond, blockSize * (int64_t)blockSize);
+        applyMacroBlockLimits(
+                maxLengthInBlocks, maxLengthInBlocks,
+                maxBlocks, maxBlocksPerSecond,
+                blockSize, blockSize,
+                1 /* widthAlignment */, 1 /* heightAlignment */);
+    } else {
+        ALOGW("Unsupported mime %s", mediaType);
+        // using minimal bitrate here.  should be overridden by
+        // info from media_codecs.xml
+        maxBps = 64000;
+        errors |= ERROR_CAPABILITIES_UNSUPPORTED;
+    }
+    mBitrateRange = Range(1, maxBps);
+    mError |= errors;
+}
+
+}  // namespace android
\ No newline at end of file
diff --git a/media/libmedia/include/media/AudioCapabilities.h b/media/libmedia/include/media/AudioCapabilities.h
new file mode 100644
index 0000000..3e1c9df
--- /dev/null
+++ b/media/libmedia/include/media/AudioCapabilities.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef AUDIO_CAPABILITIES_H_
+
+#define AUDIO_CAPABILITIES_H_
+
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <system/audio.h>
+
+#include <utils/StrongPointer.h>
+
+namespace android {
+
+struct AudioCapabilities {
+    /**
+     * Create AudioCapabilities.
+     */
+    static std::shared_ptr<AudioCapabilities> Create(std::string mediaType,
+            std::vector<ProfileLevel> profLevs, const sp<AMessage> &format);
+
+    /**
+     * Returns the range of supported bitrates in bits/second.
+     */
+    const Range<int32_t>& getBitrateRange() const;
+
+    /**
+     * Returns the array of supported sample rates if the codec
+     * supports only discrete values. Otherwise, it returns an empty array.
+     * The array is sorted in ascending order.
+     */
+    const std::vector<int32_t>& getSupportedSampleRates() const;
+
+    /**
+     * Returns the array of supported sample rate ranges.  The
+     * array is sorted in ascending order, and the ranges are
+     * distinct.
+     */
+    const std::vector<Range<int32_t>>& getSupportedSampleRateRanges() const;
+
+    /**
+     * Returns the maximum number of input channels supported.
+     * The returned value should be between 1 and 255.
+     *
+     * Through {@link android.os.Build.VERSION_CODES#R}, this method indicated support
+     * for any number of input channels between 1 and this maximum value.
+     *
+     * As of {@link android.os.Build.VERSION_CODES#S},
+     * the implied lower limit of 1 channel is no longer valid.
+     * As of {@link android.os.Build.VERSION_CODES#S}, {@link #getMaxInputChannelCount} is
+     * superseded by {@link #getInputChannelCountRanges},
+     * which returns an array of ranges of channels.
+     * The {@link #getMaxInputChannelCount} method will return the highest value
+     * in the ranges returned by {@link #getInputChannelCountRanges}
+     */
+    int32_t getMaxInputChannelCount() const;
+
+    /**
+     * Returns the minimum number of input channels supported.
+     * This is often 1, but does vary for certain mime types.
+     *
+     * This returns the lowest channel count in the ranges returned by
+     * {@link #getInputChannelCountRanges}.
+     */
+    int32_t getMinInputChannelCount() const;
+
+    /**
+     * Returns an array of ranges representing the number of input channels supported.
+     * The codec supports any number of input channels within this range.
+     *
+     * This supersedes the {@link #getMaxInputChannelCount} method.
+     *
+     * For many codecs, this will be a single range [1..N], for some N.
+     *
+     * The returned array cannot be empty.
+     */
+    const std::vector<Range<int32_t>>& getInputChannelCountRanges() const;
+
+    /**
+     * Query whether the sample rate is supported by the codec.
+     */
+    bool isSampleRateSupported(int32_t sampleRate);
+
+    /* For internal use only. Not exposed as a public API */
+    void getDefaultFormat(sp<AMessage> &format);
+
+    /* For internal use only. Not exposed as a public API */
+    bool supportsFormat(const sp<AMessage> &format);
+
+private:
+    static constexpr int32_t MAX_INPUT_CHANNEL_COUNT = 30;
+    static constexpr uint32_t MAX_NUM_CHANNELS = FCC_LIMIT;
+
+    int mError;
+    std::string mMediaType;
+    std::vector<ProfileLevel> mProfileLevels;
+
+    Range<int32_t> mBitrateRange;
+
+    std::vector<int32_t> mSampleRates;
+    std::vector<Range<int32_t>> mSampleRateRanges;
+    std::vector<Range<int32_t>> mInputChannelRanges;
+
+    /* no public constructor */
+    AudioCapabilities() {}
+    void init(std::string mediaType, std::vector<ProfileLevel> profLevs,
+            const sp<AMessage> &format);
+    void initWithPlatformLimits();
+    bool supports(std::optional<int32_t> sampleRate, std::optional<int32_t> inputChannels);
+    void limitSampleRates(std::vector<int32_t> rates);
+    void createDiscreteSampleRates();
+    void limitSampleRates(std::vector<Range<int32_t>> rateRanges);
+    void applyLevelLimits();
+    void applyLimits(const std::vector<Range<int32_t>> &inputChannels,
+            const std::optional<Range<int32_t>> &bitRates);
+    void parseFromInfo(const sp<AMessage> &format);
+
+    friend struct CodecCapabilities;
+};
+
+}  // namespace android
+
+#endif // AUDIO_CAPABILITIES_H_
\ No newline at end of file
diff --git a/media/libmedia/include/media/CodecCapabilities.h b/media/libmedia/include/media/CodecCapabilities.h
new file mode 100644
index 0000000..0611d8c
--- /dev/null
+++ b/media/libmedia/include/media/CodecCapabilities.h
@@ -0,0 +1,279 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC_CAPABILITIES_H_
+
+#define CODEC_CAPABILITIES_H_
+
+#include <media/AudioCapabilities.h>
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/EncoderCapabilities.h>
+#include <media/VideoCapabilities.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/MediaCodecConstants.h>
+
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+#include <utils/Vector.h>
+#include <utils/StrongPointer.h>
+
+namespace android {
+
+struct CodecCapabilities {
+
+    static bool SupportsBitrate(Range<int32_t> bitrateRange,
+            const sp<AMessage> &format);
+
+    /**
+     * Retrieve the codec capabilities for a certain {@code mime type}, {@code
+     * profile} and {@code level}.  If the type, or profile-level combination
+     * is not understood by the framework, it returns null.
+     * <p class=note> In {@link android.os.Build.VERSION_CODES#M}, calling this
+     * method without calling any method of the {@link MediaCodecList} class beforehand
+     * results in a {@link NullPointerException}.</p>
+     */
+    static std::shared_ptr<CodecCapabilities> CreateFromProfileLevel(std::string mediaType,
+                int32_t profile, int32_t level, int32_t maxConcurrentInstances = -1);
+
+    CodecCapabilities() {};
+
+    /**
+     * Init CodecCapabilities with settings.
+     */
+    void init(std::vector<ProfileLevel> profLevs, std::vector<uint32_t> colFmts, bool encoder,
+            sp<AMessage> &defaultFormat, sp<AMessage> &capabilitiesInfo,
+            int32_t maxConcurrentInstances = 0);
+
+    /**
+     * Returns the media type for which this codec-capability object was created.
+     */
+    const std::string& getMediaType();
+
+    /**
+     * Returns the supported profile levels.
+     */
+    const std::vector<ProfileLevel>& getProfileLevels();
+
+    /**
+     * Returns the supported color formats.
+     */
+    std::vector<uint32_t> getColorFormats() const;
+
+    /**
+     * Returns a media format with default values for configurations that have defaults.
+     */
+    sp<AMessage> getDefaultFormat() const;
+
+    /**
+     * Returns the max number of the supported concurrent codec instances.
+     * <p>
+     * This is a hint for an upper bound. Applications should not expect to successfully
+     * operate more instances than the returned value, but the actual number of
+     * concurrently operable instances may be less as it depends on the available
+     * resources at time of use.
+     */
+    int32_t getMaxSupportedInstances() const;
+
+    /**
+     * Returns the audio capabilities or {@code null} if this is not an audio codec.
+     */
+    std::shared_ptr<AudioCapabilities> getAudioCapabilities() const;
+
+    /**
+     * Returns the video capabilities or {@code null} if this is not a video codec.
+     */
+    std::shared_ptr<VideoCapabilities> getVideoCapabilities() const;
+
+    /**
+     * Returns the encoding capabilities or {@code null} if this is not an encoder.
+     */
+    std::shared_ptr<EncoderCapabilities> getEncoderCapabilities() const;
+
+    std::vector<std::string> validFeatures() const;
+
+    /**
+     * Query codec feature capabilities.
+     * <p>
+     * These features are supported to be used by the codec.  These
+     * include optional features that can be turned on, as well as
+     * features that are always on.
+     */
+    bool isFeatureSupported(const std::string &name) const;
+
+    /**
+     * Query codec feature requirements.
+     * <p>
+     * These features are required to be used by the codec, and as such,
+     * they are always turned on.
+     */
+    bool isFeatureRequired(const std::string &name) const;
+
+    bool isRegular() const;
+
+    /**
+    * Query whether codec supports a given {@link MediaFormat}.
+    *
+    * <p class=note>
+    * <strong>Note:</strong> On {@link android.os.Build.VERSION_CODES#LOLLIPOP},
+    * {@code format} must not contain a {@linkplain MediaFormat#KEY_FRAME_RATE
+    * frame rate}. Use
+    * <code class=prettyprint>format.setString(MediaFormat.KEY_FRAME_RATE, null)</code>
+    * to clear any existing frame rate setting in the format.
+    * <p>
+    *
+    * The following table summarizes the format keys considered by this method.
+    * This is especially important to consider when targeting a higher SDK version than the
+    * minimum SDK version, as this method will disregard some keys on devices below the target
+    * SDK version.
+    *
+    * <table style="width: 0%">
+    *  <thead>
+    *   <tr>
+    *    <th rowspan=3>OS Version(s)</th>
+    *    <td colspan=3>{@code MediaFormat} keys considered for</th>
+    *   </tr><tr>
+    *    <th>Audio Codecs</th>
+    *    <th>Video Codecs</th>
+    *    <th>Encoders</th>
+    *   </tr>
+    *  </thead>
+    *  <tbody>
+    *   <tr>
+    *    <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP}</td>
+    *    <td rowspan=3>{@link MediaFormat#KEY_MIME}<sup>*</sup>,<br>
+    *        {@link MediaFormat#KEY_SAMPLE_RATE},<br>
+    *        {@link MediaFormat#KEY_CHANNEL_COUNT},</td>
+    *    <td>{@link MediaFormat#KEY_MIME}<sup>*</sup>,<br>
+    *        {@link CodecCapabilities#FEATURE_AdaptivePlayback}<sup>D</sup>,<br>
+    *        {@link CodecCapabilities#FEATURE_SecurePlayback}<sup>D</sup>,<br>
+    *        {@link CodecCapabilities#FEATURE_TunneledPlayback}<sup>D</sup>,<br>
+    *        {@link MediaFormat#KEY_WIDTH},<br>
+    *        {@link MediaFormat#KEY_HEIGHT},<br>
+    *        <strong>no</strong> {@code KEY_FRAME_RATE}</td>
+    *    <td rowspan=10>as to the left, plus<br>
+    *        {@link MediaFormat#KEY_BITRATE_MODE},<br>
+    *        {@link MediaFormat#KEY_PROFILE}
+    *        (and/or {@link MediaFormat#KEY_AAC_PROFILE}<sup>~</sup>),<br>
+    *        <!-- {link MediaFormat#KEY_QUALITY},<br> -->
+    *        {@link MediaFormat#KEY_COMPLEXITY}
+    *        (and/or {@link MediaFormat#KEY_FLAC_COMPRESSION_LEVEL}<sup>~</sup>)</td>
+    *   </tr><tr>
+    *    <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP_MR1}</td>
+    *    <td rowspan=2>as above, plus<br>
+    *        {@link MediaFormat#KEY_FRAME_RATE}</td>
+    *   </tr><tr>
+    *    <td>{@link android.os.Build.VERSION_CODES#M}</td>
+    *   </tr><tr>
+    *    <td>{@link android.os.Build.VERSION_CODES#N}</td>
+    *    <td rowspan=2>as above, plus<br>
+    *        {@link MediaFormat#KEY_PROFILE},<br>
+    *        <!-- {link MediaFormat#KEY_MAX_BIT_RATE},<br> -->
+    *        {@link MediaFormat#KEY_BIT_RATE}</td>
+    *    <td rowspan=2>as above, plus<br>
+    *        {@link MediaFormat#KEY_PROFILE},<br>
+    *        {@link MediaFormat#KEY_LEVEL}<sup>+</sup>,<br>
+    *        <!-- {link MediaFormat#KEY_MAX_BIT_RATE},<br> -->
+    *        {@link MediaFormat#KEY_BIT_RATE},<br>
+    *        {@link CodecCapabilities#FEATURE_IntraRefresh}<sup>E</sup></td>
+    *   </tr><tr>
+    *    <td>{@link android.os.Build.VERSION_CODES#N_MR1}</td>
+    *   </tr><tr>
+    *    <td>{@link android.os.Build.VERSION_CODES#O}</td>
+    *    <td rowspan=3 colspan=2>as above, plus<br>
+    *        {@link CodecCapabilities#FEATURE_PartialFrame}<sup>D</sup></td>
+    *   </tr><tr>
+    *    <td>{@link android.os.Build.VERSION_CODES#O_MR1}</td>
+    *   </tr><tr>
+    *    <td>{@link android.os.Build.VERSION_CODES#P}</td>
+    *   </tr><tr>
+    *    <td>{@link android.os.Build.VERSION_CODES#Q}</td>
+    *    <td colspan=2>as above, plus<br>
+    *        {@link CodecCapabilities#FEATURE_FrameParsing}<sup>D</sup>,<br>
+    *        {@link CodecCapabilities#FEATURE_MultipleFrames},<br>
+    *        {@link CodecCapabilities#FEATURE_DynamicTimestamp}</td>
+    *   </tr><tr>
+    *    <td>{@link android.os.Build.VERSION_CODES#R}</td>
+    *    <td colspan=2>as above, plus<br>
+    *        {@link CodecCapabilities#FEATURE_LowLatency}<sup>D</sup></td>
+    *   </tr>
+    *   <tr>
+    *    <td colspan=4>
+    *     <p class=note><strong>Notes:</strong><br>
+    *      *: must be specified; otherwise, method returns {@code false}.<br>
+    *      +: method does not verify that the format parameters are supported
+    *      by the specified level.<br>
+    *      D: decoders only<br>
+    *      E: encoders only<br>
+    *      ~: if both keys are provided values must match
+    *    </td>
+    *   </tr>
+    *  </tbody>
+    * </table>
+    *
+    * @param format media format with optional feature directives.
+    * @return whether the codec capabilities support the given format
+    *         and feature requests.
+    */
+    bool isFormatSupported(const sp<AMessage> &format) const;
+
+    /**
+     * If the CodecCapabilities contains an AudioCapabilities.
+     *
+     * Not a public API to users.
+     */
+    bool isAudio() const;
+
+    /**
+     * If the CodecCapabilities contains a VideoCapabilities.
+     *
+     * Not a public API to users.
+     */
+    bool isVideo() const;
+
+    /**
+     * If the CodecCapabilities contains an EncoderCapabilities.
+     *
+     * Not a public API to users.
+     */
+    bool isEncoder() const;
+
+private:
+    std::string mMediaType;
+    std::vector<ProfileLevel> mProfileLevels;
+    std::vector<uint32_t> mColorFormats;
+    int32_t mMaxSupportedInstances;
+
+    sp<AMessage> mDefaultFormat;
+    sp<AMessage> mCapabilitiesInfo;
+
+    // Features
+    std::set<std::string> mFeaturesSupported;
+    std::set<std::string> mFeaturesRequired;
+
+    std::shared_ptr<AudioCapabilities> mAudioCaps;
+    std::shared_ptr<VideoCapabilities> mVideoCaps;
+    std::shared_ptr<EncoderCapabilities> mEncoderCaps;
+
+    bool supportsProfileLevel(int32_t profile, int32_t level) const;
+    std::vector<Feature> getValidFeatures() const;
+    int32_t getErrors() const;
+};
+
+}  // namespace android
+
+#endif // CODEC_CAPABILITIES_H_
\ No newline at end of file
diff --git a/media/libmedia/include/media/CodecCapabilitiesUtils.h b/media/libmedia/include/media/CodecCapabilitiesUtils.h
new file mode 100644
index 0000000..eb62bf9
--- /dev/null
+++ b/media/libmedia/include/media/CodecCapabilitiesUtils.h
@@ -0,0 +1,727 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC_CAPABILITIES__UTILS_H_
+
+#define CODEC_CAPABILITIES__UTILS_H_
+
+#include <algorithm>
+#include <cerrno>
+#include <cmath>
+#include <cstdlib>
+#include <numeric>
+#include <optional>
+#include <regex>
+#include <string>
+#include <vector>
+
+#include <utils/Log.h>
+
+#include <media/stagefright/foundation/AUtils.h>
+
+namespace android {
+
+struct ProfileLevel {
+    uint32_t mProfile;
+    uint32_t mLevel;
+    bool operator <(const ProfileLevel &o) const {
+        return mProfile < o.mProfile || (mProfile == o.mProfile && mLevel < o.mLevel);
+    }
+};
+
+struct Feature {
+    std::string mName;
+    int mValue;
+    bool mDefault;
+    bool mInternal;
+    Feature(std::string name, int value, bool def, bool internal) {
+        mName = name;
+        mValue = value;
+        mDefault = def;
+        mInternal = internal;
+    }
+    Feature(std::string name, int value, bool def) :
+        Feature(name, value, def, false /* internal */) {}
+};
+
+/**
+ * Immutable class for describing the range of two numeric values.
+ *
+ * To make it immutable, all data are private and all functions are const.
+ *
+ * From frameworks/base/core/java/android/util/Range.java
+ */
+template<typename T>
+struct Range {
+    Range() : lower_(), upper_() {}
+
+    Range(T l, T u) : lower_(l), upper_(u) {}
+
+    constexpr bool empty() const { return lower_ > upper_; }
+
+    T lower() const { return lower_; }
+
+    T upper() const { return upper_; }
+
+    // Check if a value is in the range.
+    bool contains(T value) const {
+        return lower_ <= value && upper_ >= value;
+    }
+
+    bool contains(Range<T> range) const {
+        return (range.lower_ >= lower_) && (range.upper_ <= upper_);
+    }
+
+    // Clamp a value in the range
+    T clamp(T value) const{
+        if (value < lower_) {
+            return lower_;
+        } else if (value > upper_) {
+            return upper_;
+        } else {
+            return value;
+        }
+    }
+
+    // Return the intersected range
+    Range<T> intersect(Range<T> range) const {
+        if (lower_ >= range.lower() && range.upper() >= upper_) {
+            // range includes this
+            return *this;
+        } else if (range.lower() >= lower_ && range.upper() <= upper_) {
+            // this includes range
+            return range;
+        } else {
+            // if ranges are disjoint returns an empty Range(lower > upper)
+            Range<T> result = Range<T>(std::max(lower_, range.lower_),
+                    std::min(upper_, range.upper_));
+            if (result.empty()) {
+                ALOGE("Failed to intersect 2 ranges as they are disjoint");
+            }
+            return result;
+        }
+    }
+
+    /**
+     * Returns the intersection of this range and the inclusive range
+     * specified by {@code [lower, upper]}.
+     * <p>
+     * See {@link #intersect(Range)} for more details.</p>
+     *
+     * @param lower a non-{@code null} {@code T} reference
+     * @param upper a non-{@code null} {@code T} reference
+     * @return the intersection of this range and the other range
+     *
+     * @throws NullPointerException if {@code lower} or {@code upper} was {@code null}
+     * @throws IllegalArgumentException if the ranges are disjoint.
+     */
+    Range<T> intersect(T lower, T upper) {
+        return Range(std::max(lower_, lower), std::min(upper_, upper));
+    }
+
+    /**
+     * Returns the smallest range that includes this range and
+     * another range.
+     *
+     * E.g. if a < b < c < d, the
+     * extension of [a, c] and [b, d] ranges is [a, d].
+     * As the endpoints are object references, there is no guarantee
+     * which specific endpoint reference is used from the input ranges:
+     *
+     * E.g. if a == a' < b < c, the
+     * extension of [a, b] and [a', c] ranges could be either
+     * [a, c] or ['a, c], where ['a, c] could be either the exact
+     * input range, or a newly created range with the same endpoints.
+     *
+     * @param range a non-null Range<T> reference
+     * @return the extension of this range and the other range.
+     */
+    Range<T> extend(Range<T> range) {
+        return Range<T>(std::min(lower_, range.lower_), std::max(upper_, range.upper_));
+    }
+
+    Range<T> align(T align) {
+        return this->intersect(
+                divUp(lower_, align) * align, (upper_ / align) * align);
+    }
+
+    Range<T> factor(T factor) {
+        if (factor == 1) {
+            return *this;
+        }
+        return Range(divUp(this->lower(), factor), this->upper() / factor);
+    }
+
+    // parse a string into a range
+    static std::optional<Range<T>> Parse(const std::string &str) {
+        if (str.empty()) {
+            ALOGW("could not parse empty integer range");
+            return std::nullopt;
+        }
+        long long lower, upper;
+        std::regex regex("^([0-9]+)-([0-9]+)$");
+        std::smatch match;
+        errno = 0;
+        if (std::regex_match(str, match, regex)) {
+            lower = std::strtoll(match[1].str().c_str(), NULL, 10);
+            upper = std::strtoll(match[2].str().c_str(), NULL, 10);
+        } else {
+            char *end;
+            lower = upper = std::strtoll(str.c_str(), &end, 10);
+            if (*end != '\0') {
+                ALOGW("could not parse integer range: %s", str.c_str());
+                return std::nullopt;
+            }
+        }
+
+        if (errno == ERANGE || lower < std::numeric_limits<T>::min()
+                || std::numeric_limits<T>::max() < upper || upper < lower) {
+            ALOGW("could not parse integer range: %s", str.c_str());
+            return std::nullopt;
+        }
+
+        return std::make_optional<Range<T>>((T)lower, (T)upper);
+    }
+
+    static Range<T> RangeFor(double v) {
+        return Range((T)v, (T)ceil(v));
+    }
+
+private:
+    T lower_;
+    T upper_;
+};
+
+static const Range<int32_t> POSITIVE_INT32 = Range<int32_t>(1, INT32_MAX);
+
+// found stuff that is not supported by framework (=> this should not happen)
+constexpr int ERROR_CAPABILITIES_UNRECOGNIZED   = (1 << 0);
+// found profile/level for which we don't have capability estimates
+constexpr int ERROR_CAPABILITIES_UNSUPPORTED    = (1 << 1);
+// have not found any profile/level for which we don't have capability estimate
+constexpr int ERROR_CAPABILITIES_NONE_SUPPORTED = (1 << 2);
+
+/**
+ * Sorts distinct (non-intersecting) range array in ascending order.
+ * From frameworks/base/media/java/android/media/Utils.java
+ */
+template<typename T>
+void sortDistinctRanges(std::vector<Range<T>> *ranges) {
+    std::sort(ranges->begin(), ranges->end(),
+            [](Range<T> r1, Range<T> r2) {
+        if (r1.upper() < r2.lower()) {
+            return true;
+        } else if (r1.lower() > r2.upper()) {
+            return false;
+        } else {
+            ALOGE("sample rate ranges must be distinct.");
+            return false;
+        }
+    });
+}
+
+/**
+ * Returns the intersection of two sets of non-intersecting ranges
+ * From frameworks/base/media/java/android/media/Utils.java
+ * @param one a sorted set of non-intersecting ranges in ascending order
+ * @param another another sorted set of non-intersecting ranges in ascending order
+ * @return the intersection of the two sets, sorted in ascending order
+ */
+template<typename T>
+std::vector<Range<T>> intersectSortedDistinctRanges(
+        const std::vector<Range<T>> &one, const std::vector<Range<T>> &another) {
+    std::vector<Range<T>> result;
+    int ix = 0;
+    for (Range<T> range : another) {
+        while (ix < one.size() && one[ix].upper() < range.lower()) {
+            ++ix;
+        }
+        while (ix < one.size() && one[ix].upper() < range.upper()) {
+            result.push_back(range.intersect(one[ix]));
+            ++ix;
+        }
+        if (ix == one.size()) {
+            break;
+        }
+        if (one[ix].lower() <= range.upper()) {
+            result.push_back(range.intersect(one[ix]));
+        }
+    }
+    return result;
+}
+
+/**
+ * Immutable class for describing width and height dimensions in pixels.
+ */
+struct VideoSize {
+    /**
+     * Create a new immutable VideoSize instance.
+     *
+     * @param width The width of the size, in pixels
+     * @param height The height of the size, in pixels
+     */
+    VideoSize(int32_t width, int32_t height);
+
+    // default constructor
+    VideoSize();
+
+    /**
+     * Get the width of the size (in pixels).
+     * @return width
+     */
+    int32_t getWidth() const;
+
+    /**
+     * Get the height of the size (in pixels).
+     * @return height
+     */
+    int32_t getHeight() const;
+
+    /**
+     * Check if this size is equal to another size.
+     *
+     * Two sizes are equal if and only if both their widths and heights are
+     * equal.
+     *
+     * A size object is never equal to any other type of object.
+     *
+     * @return true if the objects were equal, false otherwise
+     */
+    bool equals(VideoSize other) const;
+
+    bool empty() const;
+
+    std::string toString() const;
+
+    /**
+     * Parses the specified string as a size value.
+     *
+     * The ASCII characters {@code \}{@code u002a} ('*') and
+     * {@code \}{@code u0078} ('x') are recognized as separators between
+     * the width and height.
+     *
+     * For any {@code VideoSize s}: {@code VideoSize::ParseSize(s.toString()).equals(s)}.
+     * However, the method also handles sizes expressed in the
+     * following forms:
+     *
+     * "<i>width</i>{@code x}<i>height</i>" or
+     * "<i>width</i>{@code *}<i>height</i>" {@code => new VideoSize(width, height)},
+     * where <i>width</i> and <i>height</i> are string integers potentially
+     * containing a sign, such as "-10", "+7" or "5".
+     *
+     * <pre>{@code
+     * VideoSize::ParseSize("3*+6").equals(new VideoSize(3, 6)) == true
+     * VideoSize::ParseSize("-3x-6").equals(new VideoSize(-3, -6)) == true
+     * VideoSize::ParseSize("4 by 3") => throws NumberFormatException
+     * }</pre>
+     *
+     * @param string the string representation of a size value.
+     * @return the size value represented by {@code string}.
+     */
+    static std::optional<VideoSize> ParseSize(std::string str);
+
+    static std::optional<std::pair<VideoSize, VideoSize>> ParseSizeRange(const std::string str);
+
+    static Range<int32_t> GetAllowedDimensionRange();
+
+private:
+    int32_t mWidth;
+    int32_t mHeight;
+};
+
+// This is used for the std::map<VideoSize> in VideoCapabilities
+struct VideoSizeCompare {
+    bool operator() (const VideoSize& lhs, const VideoSize& rhs) const {
+        if (lhs.getWidth() == rhs.getWidth()) {
+            return lhs.getHeight() < rhs.getHeight();
+        } else {
+            return lhs.getWidth() < rhs.getWidth();
+        }
+    }
+};
+
+/**
+ * An immutable data type representation a rational number.
+ *
+ * Contains a pair of ints representing the numerator and denominator of a
+ * Rational number.
+ */
+struct Rational {
+    /**
+     * <p>Create a {@code Rational} with a given numerator and denominator.</p>
+     *
+     * <p>The signs of the numerator and the denominator may be flipped such that the denominator
+     * is always positive. Both the numerator and denominator will be converted to their reduced
+     * forms (see {@link #equals} for more details).</p>
+     *
+     * <p>For example,
+     * <ul>
+     * <li>a rational of {@code 2/4} will be reduced to {@code 1/2}.
+     * <li>a rational of {@code 1/-1} will be flipped to {@code -1/1}
+     * <li>a rational of {@code 5/0} will be reduced to {@code 1/0}
+     * <li>a rational of {@code 0/5} will be reduced to {@code 0/1}
+     * </ul>
+     * </p>
+     *
+     * @param numerator the numerator of the rational
+     * @param denominator the denominator of the rational
+     *
+     * @see #equals
+     */
+    Rational(int32_t numerator, int32_t denominator) {
+        if (denominator < 0) {
+            numerator = -numerator;
+            denominator = -denominator;
+        }
+
+        // Convert to reduced form
+        if (denominator == 0 && numerator > 0) {
+            mNumerator = 1; // +Inf
+            mDenominator = 0;
+        } else if (denominator == 0 && numerator < 0) {
+            mNumerator = -1; // -Inf
+            mDenominator = 0;
+        } else if (denominator == 0 && numerator == 0) {
+            mNumerator = 0; // NaN
+            mDenominator = 0;
+        } else if (numerator == 0) {
+            mNumerator = 0;
+            mDenominator = 1;
+        } else {
+            int gcd = std::gcd(numerator, denominator);
+
+            mNumerator = numerator / gcd;
+            mDenominator = denominator / gcd;
+        }
+    }
+
+    // default constructor;
+    Rational() {
+        Rational(0, 0);
+    }
+
+    /**
+     * Gets the numerator of the rational.
+     *
+     * <p>The numerator will always return {@code 1} if this rational represents
+     * infinity (that is, the denominator is {@code 0}).</p>
+     */
+    int32_t getNumerator() const {
+        return mNumerator;
+    }
+
+    /**
+     * Gets the denominator of the rational
+     *
+     * <p>The denominator may return {@code 0}, in which case the rational may represent
+     * positive infinity (if the numerator was positive), negative infinity (if the numerator
+     * was negative), or {@code NaN} (if the numerator was {@code 0}).</p>
+     *
+     * <p>The denominator will always return {@code 1} if the numerator is {@code 0}.
+     */
+    int32_t getDenominator() const {
+        return mDenominator;
+    }
+
+    /**
+     * Indicates whether this rational is a <em>Not-a-Number (NaN)</em> value.
+     *
+     * <p>A {@code NaN} value occurs when both the numerator and the denominator are {@code 0}.</p>
+     *
+     * @return {@code true} if this rational is a <em>Not-a-Number (NaN)</em> value;
+     *         {@code false} if this is a (potentially infinite) number value
+     */
+    bool isNaN() const {
+        return mDenominator == 0 && mNumerator == 0;
+    }
+
+    /**
+     * Indicates whether this rational represents an infinite value.
+     *
+     * <p>An infinite value occurs when the denominator is {@code 0} (but the numerator is not).</p>
+     *
+     * @return {@code true} if this rational is a (positive or negative) infinite value;
+     *         {@code false} if this is a finite number value (or {@code NaN})
+     */
+    bool isInfinite() const {
+        return mNumerator != 0 && mDenominator == 0;
+    }
+
+    /**
+     * Indicates whether this rational represents a finite value.
+     *
+     * <p>A finite value occurs when the denominator is not {@code 0}; in other words
+     * the rational is neither infinity or {@code NaN}.</p>
+     *
+     * @return {@code true} if this rational is a (positive or negative) infinite value;
+     *         {@code false} if this is a finite number value (or {@code NaN})
+     */
+    bool isFinite() const {
+        return mDenominator != 0;
+    }
+
+    /**
+     * Indicates whether this rational represents a zero value.
+     *
+     * <p>A zero value is a {@link #isFinite finite} rational with a numerator of {@code 0}.</p>
+     *
+     * @return {@code true} if this rational is finite zero value;
+     *         {@code false} otherwise
+     */
+    bool isZero() const {
+        return isFinite() && mNumerator == 0;
+    }
+
+    /**
+     * Return a string representation of this rational, e.g. {@code "1/2"}.
+     *
+     * <p>The following rules of conversion apply:
+     * <ul>
+     * <li>{@code NaN} values will return {@code "NaN"}
+     * <li>Positive infinity values will return {@code "Infinity"}
+     * <li>Negative infinity values will return {@code "-Infinity"}
+     * <li>All other values will return {@code "numerator/denominator"} where {@code numerator}
+     * and {@code denominator} are substituted with the appropriate numerator and denominator
+     * values.
+     * </ul></p>
+     */
+    std::string toString() const {
+        if (isNaN()) {
+            return "NaN";
+        } else if (isPosInf()) {
+            return "Infinity";
+        } else if (isNegInf()) {
+            return "-Infinity";
+        } else {
+            return std::to_string(mNumerator) + "/" + std::to_string(mDenominator);
+        }
+    }
+
+    /**
+     * Returns the value of the specified number as a {@code double}.
+     *
+     * <p>The {@code double} is calculated by converting both the numerator and denominator
+     * to a {@code double}; then returning the result of dividing the numerator by the
+     * denominator.</p>
+     *
+     * @return the divided value of the numerator and denominator as a {@code double}.
+     */
+    double asDouble() const {
+        double num = mNumerator;
+        double den = mDenominator;
+
+        return num / den;
+    }
+
+    /**
+     * Returns the value of the specified number as a {@code float}.
+     *
+     * <p>The {@code float} is calculated by converting both the numerator and denominator
+     * to a {@code float}; then returning the result of dividing the numerator by the
+     * denominator.</p>
+     *
+     * @return the divided value of the numerator and denominator as a {@code float}.
+     */
+    float asfloat() const {
+        float num = mNumerator;
+        float den = mDenominator;
+
+        return num / den;
+    }
+
+    /**
+     * Returns the value of the specified number as a {@code int}.
+     *
+     * <p>{@link #isInfinite Finite} rationals are converted to an {@code int} value
+     * by dividing the numerator by the denominator; conversion for non-finite values happens
+     * identically to casting a floating point value to an {@code int}, in particular:
+     *
+     * @return the divided value of the numerator and denominator as a {@code int}.
+     */
+    int32_t asInt32() const {
+        // Mimic float to int conversion rules from JLS 5.1.3
+
+        if (isPosInf()) {
+            return INT32_MAX;
+        } else if (isNegInf()) {
+            return INT32_MIN;
+        } else if (isNaN()) {
+            return 0;
+        } else { // finite
+            return mNumerator / mDenominator;
+        }
+    }
+
+    /**
+     * Returns the value of the specified number as a {@code long}.
+     *
+     * <p>{@link #isInfinite Finite} rationals are converted to an {@code long} value
+     * by dividing the numerator by the denominator; conversion for non-finite values happens
+     * identically to casting a floating point value to a {@code long}, in particular:
+     *
+     * @return the divided value of the numerator and denominator as a {@code long}.
+     */
+    int64_t asInt64() const {
+        // Mimic float to long conversion rules from JLS 5.1.3
+
+        if (isPosInf()) {
+            return INT64_MAX;
+        } else if (isNegInf()) {
+            return INT64_MIN;
+        } else if (isNaN()) {
+            return 0;
+        } else { // finite
+            return mNumerator / mDenominator;
+        }
+    }
+
+    /**
+     * Returns the value of the specified number as a {@code short}.
+     *
+     * <p>{@link #isInfinite Finite} rationals are converted to a {@code short} value
+     * identically to {@link #intValue}; the {@code int} result is then truncated to a
+     * {@code short} before returning the value.</p>
+     *
+     * @return the divided value of the numerator and denominator as a {@code short}.
+     */
+    int16_t asInt16() const {
+        return (int16_t) asInt32();
+    }
+
+    /**
+     * Compare this rational to the specified rational to determine their natural order.
+     *
+     * Nan is considered to be equal to itself and greater than all other
+     * Rational values. Otherwise, if the objects are not equal, then
+     * the following rules apply:
+     *
+     * Positive infinity is greater than any other finite number (or negative infinity)
+     * Negative infinity is less than any other finite number (or positive infinity)
+     * The finite number represented by this rational is checked numerically
+     * against the other finite number by converting both rationals to a common denominator multiple
+     * and comparing their numerators.
+     *
+     * @param another the rational to be compared
+     *
+     * @return a negative integer, zero, or a positive integer as this object is less than,
+     *         equal to, or greater than the specified rational.
+     */
+    // bool operator> (const Rational& another) {
+    int compareTo(Rational another) const {
+        if (equals(another)) {
+            return 0;
+        } else if (isNaN()) { // NaN is greater than the other non-NaN value
+            return 1;
+        } else if (another.isNaN()) { // the other NaN is greater than this non-NaN value
+            return -1;
+        } else if (isPosInf() || another.isNegInf()) {
+            return 1; // positive infinity is greater than any non-NaN/non-posInf value
+        } else if (isNegInf() || another.isPosInf()) {
+            return -1; // negative infinity is less than any non-NaN/non-negInf value
+        }
+
+        // else both this and another are finite numbers
+
+        // make the denominators the same, then compare numerators. int64_t to avoid overflow
+        int64_t thisNumerator = ((int64_t)mNumerator) * another.mDenominator;
+        int64_t otherNumerator = ((int64_t)another.mNumerator) * mDenominator;
+
+        // avoid underflow from subtraction by doing comparisons
+        if (thisNumerator < otherNumerator) {
+            return -1;
+        } else if (thisNumerator > otherNumerator) {
+            return 1;
+        } else {
+            // This should be covered by #equals, but have this code path just in case
+            return 0;
+        }
+    }
+
+    bool operator > (const Rational& another) const {
+        return compareTo(another) > 0;
+    }
+
+    bool operator >= (const Rational& another) const {
+        return compareTo(another) >= 0;
+    }
+
+    bool operator < (const Rational& another) const {
+        return compareTo(another) < 0;
+    }
+
+    bool operator <= (const Rational& another) const {
+        return compareTo(another) <= 0;
+    }
+
+    bool operator == (const Rational& another) const {
+        return equals(another);
+    }
+
+    static std::optional<Range<Rational>> ParseRange(const std::string str);
+
+    static Range<Rational> ScaleRange(Range<Rational> range, int32_t num, int32_t den);
+
+private:
+    int32_t mNumerator;
+    int32_t mDenominator;
+
+    bool isPosInf() const {
+        return mDenominator == 0 && mNumerator > 0;
+    }
+
+    bool isNegInf() const {
+        return mDenominator == 0 && mNumerator < 0;
+    }
+
+    bool equals(Rational other) const {
+        return (mNumerator == other.mNumerator && mDenominator == other.mDenominator);
+    }
+
+    Rational scale(int32_t num, int32_t den);
+
+    /**
+     * Parses the specified string as a rational value.
+     * The ASCII characters {@code \}{@code u003a} (':') and
+     * {@code \}{@code u002f} ('/') are recognized as separators between
+     * the numerator and denominator.
+     *
+     * For any {@code Rational r}: {@code Rational::parseRational(r.toString()).equals(r)}.
+     * However, the method also handles rational numbers expressed in the
+     * following forms:
+     *
+     * "<i>num</i>{@code /}<i>den</i>" or
+     * "<i>num</i>{@code :}<i>den</i>" {@code => new Rational(num, den);},
+     * where <i>num</i> and <i>den</i> are string integers potentially
+     * containing a sign, such as "-10", "+7" or "5".
+     *
+     * Rational::Parse("3:+6").equals(new Rational(1, 2)) == true
+     * Rational::Parse("-3/-6").equals(new Rational(1, 2)) == true
+     * Rational::Parse("4.56") => return std::nullopt
+     *
+     * @param str the string representation of a rational value.
+     * @return the rational value wrapped by std::optional represented by str.
+     */
+    static std::optional<Rational> Parse(std::string str);
+};
+
+static const Rational NaN = Rational(0, 0);
+static const Rational POSITIVE_INFINITY = Rational(1, 0);
+static const Rational NEGATIVE_INFINITY = Rational(-1, 0);
+static const Rational ZERO = Rational(0, 1);
+
+}  // namespace android
+
+#endif  // CODEC_CAPABILITIES__UTILS_H_
\ No newline at end of file
diff --git a/media/libmedia/include/media/EncoderCapabilities.h b/media/libmedia/include/media/EncoderCapabilities.h
new file mode 100644
index 0000000..a9654bb
--- /dev/null
+++ b/media/libmedia/include/media/EncoderCapabilities.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ENCODER_CAPABILITIES_H_
+
+#define ENCODER_CAPABILITIES_H_
+
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <utils/StrongPointer.h>
+
+namespace android {
+
+/**
+ * A class that supports querying the encoding capabilities of a codec.
+ */
+struct EncoderCapabilities {
+    /**
+    * Returns the supported range of quality values.
+    *
+    * Quality is implementation-specific. As a general rule, a higher quality
+    * setting results in a better image quality and a lower compression ratio.
+    */
+    const Range<int>& getQualityRange();
+
+    /**
+     * Returns the supported range of encoder complexity values.
+     * <p>
+     * Some codecs may support multiple complexity levels, where higher
+     * complexity values use more encoder tools (e.g. perform more
+     * intensive calculations) to improve the quality or the compression
+     * ratio.  Use a lower value to save power and/or time.
+     */
+    const Range<int>& getComplexityRange();
+
+    /** Constant quality mode */
+    inline static constexpr int BITRATE_MODE_CQ = 0;
+    /** Variable bitrate mode */
+    inline static constexpr int BITRATE_MODE_VBR = 1;
+    /** Constant bitrate mode */
+    inline static constexpr int BITRATE_MODE_CBR = 2;
+    /** Constant bitrate mode with frame drops */
+    inline static constexpr int BITRATE_MODE_CBR_FD =  3;
+
+    /**
+     * Query whether a bitrate mode is supported.
+     */
+    bool isBitrateModeSupported(int mode);
+
+    /** @hide */
+    static std::shared_ptr<EncoderCapabilities> Create(std::string mediaType,
+            std::vector<ProfileLevel> profLevs, const sp<AMessage> &format);
+
+    /** @hide */
+    void getDefaultFormat(sp<AMessage> &format);
+
+    /** @hide */
+    bool supportsFormat(const sp<AMessage> &format);
+
+private:
+    inline static const Feature sBitrateModes[] = {
+        Feature("VBR", BITRATE_MODE_VBR, true),
+        Feature("CBR", BITRATE_MODE_CBR, false),
+        Feature("CQ",  BITRATE_MODE_CQ,  false),
+        Feature("CBR-FD", BITRATE_MODE_CBR_FD, false)
+    };
+    static int ParseBitrateMode(std::string mode);
+
+    std::string mMediaType;
+    std::vector<ProfileLevel> mProfileLevels;
+
+    Range<int> mQualityRange;
+    Range<int> mComplexityRange;
+    int mBitControl;
+    int mDefaultComplexity;
+    int mDefaultQuality;
+    std::string mQualityScale;
+
+    /* no public constructor */
+    EncoderCapabilities() {}
+    void init(std::string mediaType, std::vector<ProfileLevel> profLevs,
+            const sp<AMessage> &format);
+    void applyLevelLimits();
+    void parseFromInfo(const sp<AMessage> &format);
+    bool supports(std::optional<int> complexity, std::optional<int> quality,
+            std::optional<int> profile);
+};
+
+}  // namespace android
+
+#endif // ENCODER_CAPABILITIES_H_
\ No newline at end of file
diff --git a/media/libmedia/include/media/MediaCodecInfo.h b/media/libmedia/include/media/MediaCodecInfo.h
index 88a2dc4..60e383a 100644
--- a/media/libmedia/include/media/MediaCodecInfo.h
+++ b/media/libmedia/include/media/MediaCodecInfo.h
@@ -20,6 +20,8 @@
 
 #include <android-base/macros.h>
 #include <binder/Parcel.h>
+#include <media/CodecCapabilities.h>
+#include <media/CodecCapabilitiesUtils.h>
 #include <media/stagefright/foundation/ABase.h>
 #include <media/stagefright/foundation/AString.h>
 
@@ -43,13 +45,10 @@
 struct MediaCodecListWriter;
 
 struct MediaCodecInfo : public RefBase {
-    struct ProfileLevel {
-        uint32_t mProfile;
-        uint32_t mLevel;
-        bool operator <(const ProfileLevel &o) const {
-            return mProfile < o.mProfile || (mProfile == o.mProfile && mLevel < o.mLevel);
-        }
-    };
+
+    // Moved to CodecCapabilitiesUtils.h
+    // Map MediaCodecInfo::ProfileLevel to android::ProfileLevel to maintain compatibility.
+    typedef ::android::ProfileLevel ProfileLevel;
 
     struct CapabilitiesWriter;
 
@@ -193,6 +192,7 @@
     Attributes getAttributes() const;
     void getSupportedMediaTypes(Vector<AString> *mediaTypes) const;
     const sp<Capabilities> getCapabilitiesFor(const char *mediaType) const;
+    const std::shared_ptr<CodecCapabilities> getCodecCapsFor(const char *mediaType) const;
     const char *getCodecName() const;
 
     /**
@@ -230,14 +230,21 @@
     status_t writeToParcel(Parcel *parcel) const;
 
 private:
+    /**
+     * Max supported instances setting from MediaCodecList global setting.
+     */
+    static int32_t sMaxSupportedInstances;
+
     AString mName;
     AString mOwner;
     Attributes mAttributes;
     KeyedVector<AString, sp<Capabilities> > mCaps;
+    KeyedVector<AString, std::shared_ptr<CodecCapabilities>> mCodecCaps;
     Vector<AString> mAliases;
     uint32_t mRank;
 
     ssize_t getCapabilityIndex(const char *mediaType) const;
+    ssize_t getCodecCapIndex(const char *mediaType) const;
 
     /**
      * Construct an `MediaCodecInfo` object. After the construction, its
@@ -265,6 +272,15 @@
  */
 struct MediaCodecInfoWriter {
     /**
+     * Get CodecCapabilities from Capabilities.
+     */
+    static std::shared_ptr<CodecCapabilities> BuildCodecCapabilities(const char *mediaType,
+            sp<MediaCodecInfo::Capabilities> caps, bool isEncoder, int maxSupportedInstances = 0);
+    /**
+     * Set the max supported instances global setting from MediaCodecList.
+     */
+    static void SetMaxSupportedInstances(int32_t maxSupportedInstances);
+    /**
      * Set the name of the codec.
      *
      * @param name The new name.
@@ -320,6 +336,10 @@
      * @param rank The rank of the component.
      */
     void setRank(uint32_t rank);
+    /**
+     * Create CodecCapabilities map from Capabilities.
+     */
+    void createCodecCaps();
 private:
     /**
      * The associated `MediaCodecInfo`.
diff --git a/media/libmedia/include/media/RingBuffer.h b/media/libmedia/include/media/RingBuffer.h
index 4d92d87..a08f35e 100644
--- a/media/libmedia/include/media/RingBuffer.h
+++ b/media/libmedia/include/media/RingBuffer.h
@@ -44,8 +44,14 @@
     /**
      * Forward iterator to this class.  Implements an std:forward_iterator.
      */
-    class iterator : public std::iterator<std::forward_iterator_tag, T> {
+    class iterator {
     public:
+        using iterator_category = std::forward_iterator_tag;
+        using value_type = T;
+        using difference_type = std::ptrdiff_t;
+        using pointer = T*;
+        using reference = T&;
+
         iterator(T* ptr, size_t size, size_t pos, size_t ctr);
 
         iterator& operator++();
@@ -357,5 +363,3 @@
 }; // namespace android
 
 #endif // ANDROID_SERVICE_UTILS_RING_BUFFER_H
-
-
diff --git a/media/libmedia/include/media/VideoCapabilities.h b/media/libmedia/include/media/VideoCapabilities.h
new file mode 100644
index 0000000..5671375
--- /dev/null
+++ b/media/libmedia/include/media/VideoCapabilities.h
@@ -0,0 +1,457 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_CAPABILITIES_H_
+
+#define VIDEO_CAPABILITIES_H_
+
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <utils/StrongPointer.h>
+
+namespace android {
+
+struct VideoCapabilities {
+    struct PerformancePoint {
+        /**
+         * Maximum number of macroblocks in the frame.
+         *
+         * Video frames are conceptually divided into 16-by-16 pixel blocks called macroblocks.
+         * Most coding standards operate on these 16-by-16 pixel blocks; thus, codec performance
+         * is characterized using such blocks.
+         *
+         * Test API
+         */
+        int32_t getMaxMacroBlocks() const;
+
+        /**
+         * Return the width.
+         *
+         * For internal use only.
+         */
+        int32_t getWidth() const;
+
+        /**
+         * Return the height.
+         *
+         * For internal use only.
+         */
+        int32_t getHeight() const;
+
+        /**
+         * Maximum frame rate in frames per second.
+         *
+         * Test API
+         */
+        int32_t getMaxFrameRate() const;
+
+        /**
+         * Maximum number of macroblocks processed per second.
+         *
+         * Test API
+         */
+        int64_t getMaxMacroBlockRate() const;
+
+        /**
+         * Return the block size.
+         *
+         * For internal use only.
+         */
+        VideoSize getBlockSize() const;
+
+        /**
+         * convert to a debug string
+         *
+         * Be careful about the serializable compatibility across API revisions.
+         */
+        std::string toString() const;
+
+        /**
+         * Create a detailed performance point with custom max frame rate and macroblock size.
+         *
+         * @param width  frame width in pixels
+         * @param height frame height in pixels
+         * @param frameRate frames per second for frame width and height
+         * @param maxFrameRate maximum frames per second for any frame size
+         * @param blockSize block size for codec implementation. Must be powers of two in both
+         *        width and height.
+         *
+         * Test API
+         */
+        PerformancePoint(int32_t width, int32_t height, int32_t frameRate, int32_t maxFrameRate,
+                VideoSize blockSize);
+
+        /**
+         * Create a detailed performance point with custom max frame rate and macroblock size.
+         *
+         * @param width  frame width in pixels
+         * @param height frame height in pixels
+         * @param maxFrameRate maximum frames per second for any frame size
+         * @param maxMacroBlockRate maximum number of macroblocks processed per second.
+         * @param blockSize block size for codec implementation. Must be powers of two in both
+         *        width and height.
+         *
+         * Test API
+         */
+        PerformancePoint(VideoSize blockSize, int32_t width, int32_t height, int maxFrameRate,
+                int64_t maxMacroBlockRate);
+
+        /**
+         * Convert a performance point to a larger blocksize.
+         *
+         * @param pp performance point. NonNull
+         * @param blockSize block size for codec implementation. NonNull.
+         *
+         * Test API
+         */
+        PerformancePoint(const PerformancePoint &pp, VideoSize newBlockSize);
+
+        /**
+         * Create a performance point for a given frame size and frame rate.
+         *
+         * @param width width of the frame in pixels
+         * @param height height of the frame in pixels
+         * @param frameRate frame rate in frames per second
+         */
+        PerformancePoint(int32_t width, int32_t height, int32_t frameRate);
+
+        /**
+         * Checks whether the performance point covers a media format.
+         *
+         * @param format Stream format considered
+         *
+         * @return {@code true} if the performance point covers the format.
+         */
+        bool covers(const sp<AMessage> &format) const;
+
+        /**
+         * Checks whether the performance point covers another performance point. Use this
+         * method to determine if a performance point advertised by a codec covers the
+         * performance point required. This method can also be used for loose ordering as this
+         * method is transitive.
+         *
+         * @param other other performance point considered
+         *
+         * @return {@code true} if the performance point covers the other.
+         */
+        bool covers(const PerformancePoint &other) const;
+
+        /**
+         * Check if two PerformancePoint instances are equal.
+         *
+         * @param other other PerformancePoint instance for comparison.
+         *
+         * @return true if two PerformancePoint are equal.
+         */
+        bool equals(const PerformancePoint &other) const;
+
+    private:
+        VideoSize mBlockSize; // codec block size in macroblocks
+        int32_t mWidth; // width in macroblocks
+        int32_t mHeight; // height in macroblocks
+        int32_t mMaxFrameRate; // max frames per second
+        int64_t mMaxMacroBlockRate; // max macro block rate
+
+        void init(int32_t width, int32_t height, int32_t frameRate, int32_t maxFrameRate,
+                VideoSize blockSize);
+
+        /** Saturates a 64 bit integer value to a 32 bit integer */
+        int32_t saturateInt64ToInt32(int64_t value) const;
+
+        /** This method may overflow */
+        int32_t align(int32_t value, int32_t alignment) const;
+
+        /** @return NonNull */
+        VideoSize getCommonBlockSize(const PerformancePoint &other) const;
+    };
+
+    /**
+     * Find the equivalent VP9 profile level.
+     *
+     * Not a public API to developers.
+     */
+    static int32_t EquivalentVP9Level(const sp<AMessage> &format);
+
+    /**
+     * Returns the range of supported bitrates in bits/second.
+     */
+    const Range<int32_t>& getBitrateRange() const;
+
+    /**
+     * Returns the range of supported video widths.
+     * 32-bit processes will not support resolutions larger than 4096x4096 due to
+     * the limited address space.
+     */
+    const Range<int32_t>& getSupportedWidths() const;
+
+    /**
+     * Returns the range of supported video heights.
+     * 32-bit processes will not support resolutions larger than 4096x4096 due to
+     * the limited address space.
+     */
+    const Range<int32_t>& getSupportedHeights() const;
+
+    /**
+     * Returns the alignment requirement for video width (in pixels).
+     *
+     * This is a power-of-2 value that video width must be a
+     * multiple of.
+     */
+    int32_t getWidthAlignment() const;
+
+    /**
+     * Returns the alignment requirement for video height (in pixels).
+     *
+     * This is a power-of-2 value that video height must be a
+     * multiple of.
+     */
+    int32_t getHeightAlignment() const;
+
+    /**
+     * Return the upper limit on the smaller dimension of width or height.
+     *
+     * Some codecs have a limit on the smaller dimension, whether it be
+     * the width or the height.  E.g. a codec may only be able to handle
+     * up to 1920x1080 both in landscape and portrait mode (1080x1920).
+     * In this case the maximum width and height are both 1920, but the
+     * smaller dimension limit will be 1080. For other codecs, this is
+     * {@code Math.min(getSupportedWidths().getUpper(),
+     * getSupportedHeights().getUpper())}.
+     */
+    int32_t getSmallerDimensionUpperLimit() const;
+
+    /**
+     * Returns the range of supported frame rates.
+     *
+     * This is not a performance indicator.  Rather, it expresses the
+     * limits specified in the coding standard, based on the complexities
+     * of encoding material for later playback at a certain frame rate,
+     * or the decoding of such material in non-realtime.
+     */
+    const Range<int32_t>& getSupportedFrameRates() const;
+
+    /**
+     * Returns the range of supported video widths for a video height.
+     * @param height the height of the video
+     */
+    std::optional<Range<int32_t>> getSupportedWidthsFor(int32_t height) const;
+
+    /**
+     * Returns the range of supported video heights for a video width
+     * @param width the width of the video
+     */
+    std::optional<Range<int32_t>> getSupportedHeightsFor(int32_t width) const;
+
+    /**
+     * Returns the range of supported video frame rates for a video size.
+     *
+     * This is not a performance indicator.  Rather, it expresses the limits specified in
+     * the coding standard, based on the complexities of encoding material of a given
+     * size for later playback at a certain frame rate, or the decoding of such material
+     * in non-realtime.
+
+     * @param width the width of the video
+     * @param height the height of the video
+     */
+    std::optional<Range<double>> getSupportedFrameRatesFor(int32_t width, int32_t height) const;
+
+    /**
+     * Returns the range of achievable video frame rates for a video size.
+     * May return {@code null}, if the codec did not publish any measurement
+     * data.
+     * <p>
+     * This is a performance estimate provided by the device manufacturer based on statistical
+     * sampling of full-speed decoding and encoding measurements in various configurations
+     * of common video sizes supported by the codec. As such it should only be used to
+     * compare individual codecs on the device. The value is not suitable for comparing
+     * different devices or even different android releases for the same device.
+     * <p>
+     * <em>On {@link android.os.Build.VERSION_CODES#M} release</em> the returned range
+     * corresponds to the fastest frame rates achieved in the tested configurations. As
+     * such, it should not be used to gauge guaranteed or even average codec performance
+     * on the device.
+     * <p>
+     * <em>On {@link android.os.Build.VERSION_CODES#N} release</em> the returned range
+     * corresponds closer to sustained performance <em>in tested configurations</em>.
+     * One can expect to achieve sustained performance higher than the lower limit more than
+     * 50% of the time, and higher than half of the lower limit at least 90% of the time
+     * <em>in tested configurations</em>.
+     * Conversely, one can expect performance lower than twice the upper limit at least
+     * 90% of the time.
+     * <p class=note>
+     * Tested configurations use a single active codec. For use cases where multiple
+     * codecs are active, applications can expect lower and in most cases significantly lower
+     * performance.
+     * <p class=note>
+     * The returned range value is interpolated from the nearest frame size(s) tested.
+     * Codec performance is severely impacted by other activity on the device as well
+     * as environmental factors (such as battery level, temperature or power source), and can
+     * vary significantly even in a steady environment.
+     * <p class=note>
+     * Use this method in cases where only codec performance matters, e.g. to evaluate if
+     * a codec has any chance of meeting a performance target. Codecs are listed
+     * in {@link MediaCodecList} in the preferred order as defined by the device
+     * manufacturer. As such, applications should use the first suitable codec in the
+     * list to achieve the best balance between power use and performance.
+     *
+     * @param width the width of the video
+     * @param height the height of the video
+     */
+    std::optional<Range<double>> getAchievableFrameRatesFor(int32_t width, int32_t height) const;
+
+    /**
+     * Returns the supported performance points. May return {@code null} if the codec did not
+     * publish any performance point information (e.g. the vendor codecs have not been updated
+     * to the latest android release). May return an empty list if the codec published that
+     * if does not guarantee any performance points.
+     * <p>
+     * This is a performance guarantee provided by the device manufacturer for hardware codecs
+     * based on hardware capabilities of the device.
+     * <p>
+     * The returned list is sorted first by decreasing number of pixels, then by decreasing
+     * width, and finally by decreasing frame rate.
+     * Performance points assume a single active codec. For use cases where multiple
+     * codecs are active, should use that highest pixel count, and add the frame rates of
+     * each individual codec.
+     * <p class=note>
+     * 32-bit processes will not support resolutions larger than 4096x4096 due to
+     * the limited address space, but performance points will be presented as is.
+     * In other words, even though a component publishes a performance point for
+     * a resolution higher than 4096x4096, it does not mean that the resolution is supported
+     * for 32-bit processes.
+     */
+    const std::vector<PerformancePoint>& getSupportedPerformancePoints() const;
+
+    /**
+     * Returns whether a given video size ({@code width} and
+     * {@code height}) and {@code frameRate} combination is supported.
+     */
+    bool areSizeAndRateSupported(int32_t width, int32_t height, double frameRate) const;
+
+    /**
+     * Returns whether a given video size ({@code width} and
+     * {@code height}) is supported.
+     */
+    bool isSizeSupported(int32_t width, int32_t height) const;
+
+    /**
+     * Returns if a media format is supported.
+     *
+     * Not exposed to public
+     */
+    bool supportsFormat(const sp<AMessage> &format) const;
+
+    /**
+     * Create VideoCapabilities.
+     */
+    static std::shared_ptr<VideoCapabilities> Create(std::string mediaType,
+            std::vector<ProfileLevel> profLevs, const sp<AMessage> &format);
+
+    /**
+     * Get the block size.
+     *
+     * Not a public API to developers
+     */
+    VideoSize getBlockSize() const;
+
+    /**
+     * Get the block count range.
+     *
+     * Not a public API to developers
+     */
+    const Range<int32_t>& getBlockCountRange() const;
+
+    /**
+     * Get the blocks per second range.
+     *
+     * Not a public API to developers
+     */
+    const Range<int64_t>& getBlocksPerSecondRange() const;
+
+    /**
+     * Get the aspect ratio range.
+     *
+     * Not a public API to developers
+     */
+    Range<Rational> getAspectRatioRange(bool blocks) const;
+
+private:
+    std::string mMediaType;
+    std::vector<ProfileLevel> mProfileLevels;
+    int mError;
+
+    Range<int32_t> mBitrateRange;
+    Range<int32_t> mHeightRange;
+    Range<int32_t> mWidthRange;
+    Range<int32_t> mBlockCountRange;
+    Range<int32_t> mHorizontalBlockRange;
+    Range<int32_t> mVerticalBlockRange;
+    Range<Rational> mAspectRatioRange;
+    Range<Rational> mBlockAspectRatioRange;
+    Range<int64_t> mBlocksPerSecondRange;
+    std::map<VideoSize, Range<int64_t>, VideoSizeCompare> mMeasuredFrameRates;
+    std::vector<PerformancePoint> mPerformancePoints;
+    Range<int32_t> mFrameRateRange;
+
+    int32_t mBlockWidth;
+    int32_t mBlockHeight;
+    int32_t mWidthAlignment;
+    int32_t mHeightAlignment;
+    int mSmallerDimensionUpperLimit;
+
+    bool mAllowMbOverride; // allow XML to override calculated limits
+
+    int32_t getBlockCount(int32_t width, int32_t height) const;
+    std::optional<VideoSize> findClosestSize(int32_t width, int32_t height) const;
+    std::optional<Range<double>> estimateFrameRatesFor(int32_t width, int32_t height) const;
+    bool supports(std::optional<int32_t> width, std::optional<int32_t> height,
+                std::optional<double> rate) const;
+    /* no public constructor */
+    VideoCapabilities() {}
+    void init(std::string mediaType, std::vector<ProfileLevel> profLevs,
+            const sp<AMessage> &format);
+    void initWithPlatformLimits();
+    std::vector<PerformancePoint> getPerformancePoints(const sp<AMessage> &format) const;
+    std::map<VideoSize, Range<int64_t>, VideoSizeCompare>
+            getMeasuredFrameRates(const sp<AMessage> &format) const;
+
+    static std::optional<std::pair<Range<int32_t>, Range<int32_t>>> ParseWidthHeightRanges(
+            const std::string &str);
+    void parseFromInfo(const sp<AMessage> &format);
+    void applyBlockLimits(int32_t blockWidth, int32_t blockHeight,
+            Range<int32_t> counts, Range<int64_t> rates, Range<Rational> ratios);
+    void applyAlignment(int32_t widthAlignment, int32_t heightAlignment);
+    void updateLimits();
+    void applyMacroBlockLimits(
+            int32_t maxHorizontalBlocks, int32_t maxVerticalBlocks,
+            int32_t maxBlocks, int64_t maxBlocksPerSecond,
+            int32_t blockWidth, int32_t blockHeight,
+            int32_t widthAlignment, int32_t heightAlignment);
+    void applyMacroBlockLimits(
+            int32_t minHorizontalBlocks, int32_t minVerticalBlocks,
+            int32_t maxHorizontalBlocks, int32_t maxVerticalBlocks,
+            int32_t maxBlocks, int64_t maxBlocksPerSecond,
+            int32_t blockWidth, int32_t blockHeight,
+            int32_t widthAlignment, int32_t heightAlignment);
+    void applyLevelLimits();
+
+    friend struct CodecCapabilities;
+};
+
+}  // namespace android
+
+#endif // VIDEO_CAPABILITIES_H_
\ No newline at end of file
diff --git a/media/libmedia/include/media/mediametadataretriever.h b/media/libmedia/include/media/mediametadataretriever.h
index 116ed9a..d76ed25 100644
--- a/media/libmedia/include/media/mediametadataretriever.h
+++ b/media/libmedia/include/media/mediametadataretriever.h
@@ -122,6 +122,10 @@
     static sp<IMediaPlayerService>            sService;
 
     Mutex                                     mLock;
+    // Static lock was added to the client in order to consume at most
+    // one service thread from image extraction requests of the same
+    // client process(See also b/21277449).
+    static Mutex                              sLock;
     sp<IMediaMetadataRetriever>               mRetriever;
 
 };
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index 40fd022..9196f9f 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -35,6 +35,8 @@
 sp<IMediaPlayerService> MediaMetadataRetriever::sService;
 sp<MediaMetadataRetriever::DeathNotifier> MediaMetadataRetriever::sDeathNotifier;
 
+Mutex MediaMetadataRetriever::sLock;
+
 const sp<IMediaPlayerService> MediaMetadataRetriever::getService()
 {
     Mutex::Autolock lock(sServiceLock);
@@ -143,6 +145,7 @@
     ALOGV("getFrameAtTime: time(%" PRId64 " us) option(%d) colorFormat(%d) metaOnly(%d)",
             timeUs, option, colorFormat, metaOnly);
     Mutex::Autolock _l(mLock);
+    Mutex::Autolock _gLock(sLock);
     if (mRetriever == 0) {
         ALOGE("retriever is not initialized");
         return NULL;
@@ -155,6 +158,7 @@
     ALOGV("getImageAtIndex: index(%d) colorFormat(%d) metaOnly(%d) thumbnail(%d)",
             index, colorFormat, metaOnly, thumbnail);
     Mutex::Autolock _l(mLock);
+    Mutex::Autolock _gLock(sLock);
     if (mRetriever == 0) {
         ALOGE("retriever is not initialized");
         return NULL;
@@ -167,6 +171,7 @@
     ALOGV("getImageRectAtIndex: index(%d) colorFormat(%d) rect {%d, %d, %d, %d}",
             index, colorFormat, left, top, right, bottom);
     Mutex::Autolock _l(mLock);
+    Mutex::Autolock _gLock(sLock);
     if (mRetriever == 0) {
         ALOGE("retriever is not initialized");
         return NULL;
@@ -180,6 +185,7 @@
     ALOGV("getFrameAtIndex: index(%d), colorFormat(%d) metaOnly(%d)",
             index, colorFormat, metaOnly);
     Mutex::Autolock _l(mLock);
+    Mutex::Autolock _gLock(sLock);
     if (mRetriever == 0) {
         ALOGE("retriever is not initialized");
         return NULL;
diff --git a/media/libmedia/tests/codeccapabilities/Android.bp b/media/libmedia/tests/codeccapabilities/Android.bp
new file mode 100644
index 0000000..79eb71a
--- /dev/null
+++ b/media/libmedia/tests/codeccapabilities/Android.bp
@@ -0,0 +1,36 @@
+cc_test {
+    name: "CodecCapabilitiesTest",
+    team: "trendy_team_media_codec_framework",
+
+    test_suites: [
+        "general-tests",
+    ],
+    gtest: true,
+
+    srcs: [
+        "CodecCapabilitiesTest.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "liblog",
+        "libmedia_codeclist", // available >= R
+        "libmedia_codeclist_capabilities",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp b/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp
new file mode 100644
index 0000000..e59d4d6
--- /dev/null
+++ b/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp
@@ -0,0 +1,373 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecCapabilitiesTest"
+
+#include <utils/Log.h>
+
+#include <memory>
+
+#include <gtest/gtest.h>
+
+#include <binder/Parcel.h>
+
+#include <media/CodecCapabilities.h>
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/MediaCodecInfo.h>
+
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AString.h>
+
+using namespace android;
+
+class AudioCapsAacTest : public testing::Test {
+protected:
+    AudioCapsAacTest() {
+        std::string mediaType = MIMETYPE_AUDIO_AAC;
+
+        sp<AMessage> details = new AMessage;
+        details->setString("bitrate-range", "8000-960000");
+        details->setString("max-channel-count", "8");
+        details->setString("sample-rate-ranges",
+                "7350,8000,11025,12000,16000,22050,24000,32000,44100,48000");
+
+        std::vector<ProfileLevel> profileLevel{
+            ProfileLevel(2, 0),
+            ProfileLevel(5, 0),
+            ProfileLevel(29, 0),
+            ProfileLevel(23, 0),
+            ProfileLevel(39, 0),
+            ProfileLevel(20, 0),
+            ProfileLevel(42, 0),
+        };
+
+        audioCaps = AudioCapabilities::Create(mediaType, profileLevel, details);
+    }
+
+    std::shared_ptr<AudioCapabilities> audioCaps;
+};
+
+TEST_F(AudioCapsAacTest, AudioCaps_Aac_Bitrate) {
+    const Range<int32_t>& bitrateRange = audioCaps->getBitrateRange();
+    EXPECT_EQ(bitrateRange.lower(), 8000) << "bitrate range1 does not match. lower: "
+            << bitrateRange.lower();
+    EXPECT_EQ(bitrateRange.upper(), 510000) << "bitrate range1 does not match. upper: "
+            << bitrateRange.upper();
+}
+
+TEST_F(AudioCapsAacTest, AudioCaps_Aac_InputChannelCount) {
+    int32_t maxInputChannelCount = audioCaps->getMaxInputChannelCount();
+    EXPECT_EQ(maxInputChannelCount, 8);
+    int32_t minInputChannelCount = audioCaps->getMinInputChannelCount();
+    EXPECT_EQ(minInputChannelCount, 1);
+}
+
+TEST_F(AudioCapsAacTest, AudioCaps_Aac_SupportedSampleRates) {
+    const std::vector<int32_t>& sampleRates = audioCaps->getSupportedSampleRates();
+    EXPECT_EQ(sampleRates, std::vector<int32_t>({7350, 8000, 11025, 12000, 16000, 22050,
+            24000, 32000, 44100, 48000}));
+
+    EXPECT_FALSE(audioCaps->isSampleRateSupported(6000))
+            << "isSampleRateSupported returned true for unsupported sample rate";
+    EXPECT_TRUE(audioCaps->isSampleRateSupported(8000))
+            << "isSampleRateSupported returned false for supported sample rate";
+    EXPECT_TRUE(audioCaps->isSampleRateSupported(12000))
+            << "isSampleRateSupported returned false for supported sample rate";
+    EXPECT_FALSE(audioCaps->isSampleRateSupported(44000))
+            << "isSampleRateSupported returned true for unsupported sample rate";
+    EXPECT_TRUE(audioCaps->isSampleRateSupported(48000))
+            << "isSampleRateSupported returned true for unsupported sample rate";
+}
+
+class AudioCapsRawTest : public testing::Test {
+protected:
+    AudioCapsRawTest() {
+        std::string mediaType = MIMETYPE_AUDIO_RAW;
+
+        sp<AMessage> details = new AMessage;
+        details->setString("bitrate-range", "1-10000000");
+        details->setString("channel-ranges", "1,2,3,4,5,6,7,8,9,10,11,12");
+        details->setString("sample-rate-ranges", "8000-192000");
+
+        std::vector<ProfileLevel> profileLevel;
+
+        audioCaps = AudioCapabilities::Create(mediaType, profileLevel, details);
+    }
+
+    std::shared_ptr<AudioCapabilities> audioCaps;
+};
+
+TEST_F(AudioCapsRawTest, AudioCaps_Raw_Bitrate) {
+    const Range<int32_t>& bitrateRange = audioCaps->getBitrateRange();
+    EXPECT_EQ(bitrateRange.lower(), 1);
+    EXPECT_EQ(bitrateRange.upper(), 10000000);
+}
+
+TEST_F(AudioCapsRawTest, AudioCaps_Raw_InputChannelCount) {
+    int32_t maxInputChannelCount = audioCaps->getMaxInputChannelCount();
+    EXPECT_EQ(maxInputChannelCount, 12);
+    int32_t minInputChannelCount = audioCaps->getMinInputChannelCount();
+    EXPECT_EQ(minInputChannelCount, 1);
+}
+
+TEST_F(AudioCapsRawTest, AudioCaps_Raw_InputChannelCountRanges) {
+    const std::vector<Range<int32_t>>& inputChannelCountRanges
+            = audioCaps->getInputChannelCountRanges();
+    std::vector<Range<int32_t>> expectedOutput({{1,1}, {2,2}, {3,3}, {4,4}, {5,5},
+            {6,6}, {7,7}, {8,8}, {9,9}, {10,10}, {11,11}, {12,12}});
+    ASSERT_EQ(inputChannelCountRanges.size(), expectedOutput.size());
+    for (int i = 0; i < inputChannelCountRanges.size(); i++) {
+        EXPECT_EQ(inputChannelCountRanges.at(i).lower(), expectedOutput.at(i).lower());
+        EXPECT_EQ(inputChannelCountRanges.at(i).upper(), expectedOutput.at(i).upper());
+    }
+}
+
+TEST_F(AudioCapsRawTest, AudioCaps_Raw_SupportedSampleRates) {
+    const std::vector<Range<int32_t>>& sampleRateRanges = audioCaps->getSupportedSampleRateRanges();
+    EXPECT_EQ(sampleRateRanges.size(), 1);
+    EXPECT_EQ(sampleRateRanges.at(0).lower(), 8000);
+    EXPECT_EQ(sampleRateRanges.at(0).upper(), 192000);
+
+    EXPECT_EQ(audioCaps->isSampleRateSupported(7000), false);
+    EXPECT_EQ(audioCaps->isSampleRateSupported(10000), true);
+    EXPECT_EQ(audioCaps->isSampleRateSupported(193000), false);
+}
+
+class VideoCapsHevcTest : public testing::Test {
+protected:
+    VideoCapsHevcTest() {
+        std::string mediaType = MIMETYPE_VIDEO_HEVC;
+
+        sp<AMessage> details = new AMessage;
+        details->setString("alignment", "2x2");
+        details->setString("bitrate-range", "1-120000000");
+        details->setString("block-count-range", "1-32640");
+        details->setString("block-size", "16x16");
+        details->setString("blocks-per-second-range", "1-3916800");
+        details->setInt32("feature-adaptive-playback", 0);
+        details->setInt32("feature-can-swap-width-height", 1);
+        details->setString("max-concurrent-instances", "16");
+        details->setString("measured-frame-rate-1280x720-range", "547-553");
+        details->setString("measured-frame-rate-1920x1080-range", "569-572");
+        details->setString("measured-frame-rate-352x288-range", "1150-1250");
+        details->setString("measured-frame-rate-3840x2160-range", "159-159");
+        details->setString("measured-frame-rate-640x360-range", "528-529");
+        details->setString("measured-frame-rate-720x480-range", "546-548");
+        details->setString("performance-point-1280x720-range", "240");
+        details->setString("performance-point-3840x2160-range", "120");
+        details->setString("size-range", "64x64-3840x2176");
+
+        std::vector<ProfileLevel> profileLevel{
+            ProfileLevel(1, 8388608),
+            ProfileLevel(2, 8388608),
+            ProfileLevel(4096, 8388608),
+            ProfileLevel(8192, 8388608),
+        };
+
+        videoCaps = VideoCapabilities::Create(mediaType, profileLevel, details);
+    }
+
+    std::shared_ptr<VideoCapabilities> videoCaps;
+};
+
+TEST_F(VideoCapsHevcTest, VideoCaps_HEVC_Alignment) {
+    int32_t widthAlignment = videoCaps->getWidthAlignment();
+    EXPECT_EQ(widthAlignment, 2);
+    int32_t heightAlignment = videoCaps->getHeightAlignment();
+    EXPECT_EQ(heightAlignment, 2);
+}
+
+TEST_F(VideoCapsHevcTest, VideoCaps_HEVC_BitrateRange) {
+    const Range<int32_t>& bitrateRange = videoCaps->getBitrateRange();
+    EXPECT_EQ(bitrateRange.lower(), 1);
+    EXPECT_EQ(bitrateRange.upper(), 120000000);
+}
+
+TEST_F(VideoCapsHevcTest, VideoCaps_HEVC_SupportedWidthsAndHeights) {
+    const Range<int32_t>& supportedWidths = videoCaps->getSupportedWidths();
+    EXPECT_EQ(supportedWidths.upper(), 3840);
+    const Range<int32_t>& supportedHeights = videoCaps->getSupportedHeights();
+    EXPECT_EQ(supportedHeights.upper(), 3840);
+}
+
+TEST_F(VideoCapsHevcTest, VideoCaps_HEVC_SupportedFrameRates) {
+    const Range<int32_t>& supportedFrameRates = videoCaps->getSupportedFrameRates();
+    EXPECT_EQ(supportedFrameRates.lower(), 0);
+    EXPECT_EQ(supportedFrameRates.upper(), 960);
+
+    std::optional<Range<double>> supportedFR720p = videoCaps->getSupportedFrameRatesFor(1280, 720);
+    EXPECT_EQ(supportedFR720p.value().upper(), 960.0);
+    std::optional<Range<double>> supportedFR1080p
+            = videoCaps->getSupportedFrameRatesFor(1920, 1080);
+    EXPECT_EQ(supportedFR1080p.value().upper(), 480.0);
+    std::optional<Range<double>> supportedFR4k = videoCaps->getSupportedFrameRatesFor(3840, 2160);
+    EXPECT_EQ(std::round(supportedFR4k.value().upper()), 121);
+}
+
+TEST_F(VideoCapsHevcTest, VideoCaps_HEVC_AchievableFrameRates) {
+    std::optional<Range<double>> achievableFR1080p
+            = videoCaps->getAchievableFrameRatesFor(1920, 1080);
+    ASSERT_NE(achievableFR1080p, std::nullopt) << "resolution not supported";
+    EXPECT_EQ(achievableFR1080p.value().lower(), 569);
+    EXPECT_EQ(achievableFR1080p.value().upper(), 572);
+}
+
+class EncoderCapsAacTest : public testing::Test {
+protected:
+    EncoderCapsAacTest() {
+        std::string mediaType = MIMETYPE_AUDIO_AAC;
+
+        sp<AMessage> details = new AMessage;
+        details->setString("bitrate-range", "8000-960000");
+        details->setString("max-channel-count", "6");
+        details->setString("sample-rate-ranges",
+                "8000,11025,12000,16000,22050,24000,32000,44100,48000");
+
+        std::vector<ProfileLevel> profileLevel{
+            ProfileLevel(2, 0),
+            ProfileLevel(5, 0),
+            ProfileLevel(29, 0),
+            ProfileLevel(23, 0),
+            ProfileLevel(39, 0),
+        };
+
+        encoderCaps = EncoderCapabilities::Create(mediaType, profileLevel, details);
+    }
+
+    std::shared_ptr<EncoderCapabilities> encoderCaps;
+};
+
+
+TEST_F(EncoderCapsAacTest, EncoderCaps_AAC_ComplexityRange) {
+    const Range<int>& complexityRange = encoderCaps->getComplexityRange();
+    EXPECT_EQ(complexityRange.lower(), 0);
+    EXPECT_EQ(complexityRange.upper(), 0);
+}
+
+TEST_F(EncoderCapsAacTest, EncoderCaps_AAC_QualityRange) {
+    const Range<int>& qualityRange = encoderCaps->getQualityRange();
+    EXPECT_EQ(qualityRange.lower(), 0);
+    EXPECT_EQ(qualityRange.upper(), 0);
+}
+
+TEST_F(EncoderCapsAacTest, EncoderCaps_AAC_SupportedBitrateMode) {
+    EXPECT_FALSE(encoderCaps->isBitrateModeSupported(BITRATE_MODE_CBR));
+    EXPECT_TRUE(encoderCaps->isBitrateModeSupported(BITRATE_MODE_VBR));
+    EXPECT_FALSE(encoderCaps->isBitrateModeSupported(BITRATE_MODE_CQ));
+    EXPECT_FALSE(encoderCaps->isBitrateModeSupported(BITRATE_MODE_CBR_FD));
+}
+
+class EncoderCapsFlacTest : public testing::Test {
+protected:
+    EncoderCapsFlacTest() {
+        std::string mediaType = MIMETYPE_AUDIO_FLAC;
+
+        sp<AMessage> details = new AMessage;
+        details->setString("bitrate-range", "1-21000000");
+        details->setString("complexity-default", "5");
+        details->setString("complexity-range", "0-8");
+        details->setString("feature-bitrate-modes", "CQ");
+        details->setString("max-channel-count", "2");
+        details->setString("sample-rate-ranges", "1-655350");
+
+        std::vector<ProfileLevel> profileLevel;
+
+        encoderCaps = EncoderCapabilities::Create(mediaType, profileLevel, details);
+    }
+
+    std::shared_ptr<EncoderCapabilities> encoderCaps;
+};
+
+TEST_F(EncoderCapsFlacTest, EncoderCaps_FLAC_ComplexityRange) {
+    const Range<int>& complexityRange = encoderCaps->getComplexityRange();
+    EXPECT_EQ(complexityRange.lower(), 0);
+    EXPECT_EQ(complexityRange.upper(), 8);
+}
+
+TEST_F(EncoderCapsFlacTest, EncoderCaps_FLAC_QualityRange) {
+    const Range<int>& qualityRange = encoderCaps->getQualityRange();
+    EXPECT_EQ(qualityRange.lower(), 0);
+    EXPECT_EQ(qualityRange.upper(), 0);
+}
+
+TEST_F(EncoderCapsFlacTest, EncoderCaps_FLAC_SupportedBitrateMode) {
+    EXPECT_FALSE(encoderCaps->isBitrateModeSupported(BITRATE_MODE_CBR));
+    EXPECT_FALSE(encoderCaps->isBitrateModeSupported(BITRATE_MODE_VBR));
+    EXPECT_TRUE(encoderCaps->isBitrateModeSupported(BITRATE_MODE_CQ));
+    EXPECT_FALSE(encoderCaps->isBitrateModeSupported(BITRATE_MODE_CBR_FD));
+}
+
+class EncoderCapsHevcTest : public testing::Test {
+protected:
+    EncoderCapsHevcTest() {
+        std::string mediaType = MIMETYPE_VIDEO_HEVC;
+
+        sp<AMessage> details = new AMessage;
+        details->setString("alignment", "2x2");
+        details->setString("bitrate-range", "1-120000000");
+        details->setString("block-count-range", "1-8160");
+        details->setString("block-size", "32x32");
+        details->setString("blocks-per-second-range", "1-979200");
+        details->setString("feature-bitrate-modes", "VBR,CBR,CQ,CBR-FD");
+        details->setInt32("feature-can-swap-width-height", 1);
+        details->setInt32("feature-qp-bounds", 0);
+        details->setInt32("feature-vq-minimum-quality", 0);
+        details->setString("max-concurrent-instances", "16");
+        details->setString("measured-frame-rate-1280x720-range", "154-198");
+        details->setString("measured-frame-rate-1920x1080-range", "46-97");
+        details->setString("measured-frame-rate-320x240-range", "371-553");
+        details->setString("measured-frame-rate-720x480-range", "214-305");
+        details->setString("performance-point-1280x720-range", "240");
+        details->setString("performance-point-3840x2160-range", "120");
+        details->setString("quality-default", "57");
+        details->setString("quality-range", "0-100");
+        details->setString("quality-scale", "linear");
+        details->setString("size-range", "64x64-3840x2176");
+
+        std::vector<ProfileLevel> profileLevel{
+            ProfileLevel(1, 2097152),
+            ProfileLevel(2, 2097152),
+            ProfileLevel(4096, 2097152),
+            ProfileLevel(8192, 2097152),
+        };
+
+        encoderCaps = EncoderCapabilities::Create(mediaType, profileLevel, details);
+    }
+
+    std::shared_ptr<EncoderCapabilities> encoderCaps;
+};
+
+TEST_F(EncoderCapsHevcTest, EncoderCaps_HEVC_ComplexityRange) {
+    const Range<int>& complexityRange = encoderCaps->getComplexityRange();
+    EXPECT_EQ(complexityRange.lower(), 0);
+    EXPECT_EQ(complexityRange.upper(), 0);
+}
+
+TEST_F(EncoderCapsHevcTest, EncoderCaps_HEVC_QualityRange) {
+    const Range<int>& qualityRange = encoderCaps->getQualityRange();
+    EXPECT_EQ(qualityRange.lower(), 0);
+    EXPECT_EQ(qualityRange.upper(), 100);
+}
+
+TEST_F(EncoderCapsHevcTest, EncoderCaps_HEVC_SupportedBitrateMode) {
+    EXPECT_TRUE(encoderCaps->isBitrateModeSupported(BITRATE_MODE_CBR));
+    EXPECT_TRUE(encoderCaps->isBitrateModeSupported(BITRATE_MODE_VBR));
+    EXPECT_TRUE(encoderCaps->isBitrateModeSupported(BITRATE_MODE_CQ));
+    EXPECT_TRUE(encoderCaps->isBitrateModeSupported(BITRATE_MODE_CBR_FD));
+}
diff --git a/media/libmedia/xsd/vts/Android.bp b/media/libmedia/xsd/vts/Android.bp
index 83ab977..add7b51 100644
--- a/media/libmedia/xsd/vts/Android.bp
+++ b/media/libmedia/xsd/vts/Android.bp
@@ -15,6 +15,7 @@
 //
 
 package {
+    default_team: "trendy_team_android_kernel",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libmedia_license"
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index 718f782..a10c509 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -45,6 +45,7 @@
         "android.hardware.media.omx@1.0",
         "av-types-aidl-cpp",
         "framework-permission-aidl-cpp",
+        "libaconfig_storage_read_api_cc",
         "libaudioclient_aidl_conversion",
         "libbase",
         "libbinder_ndk",
@@ -76,6 +77,7 @@
         "libstagefright_httplive",
         "libutils",
         "packagemanager_aidl-cpp",
+        "server_configurable_flags",
     ],
 
     header_libs: [
@@ -86,6 +88,7 @@
     ],
 
     static_libs: [
+        "com.android.media.flags.editing-aconfig-cc",
         "libplayerservice_datasource",
         "libstagefright_nuplayer",
         "libstagefright_rtsp",
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 10a1da7..b267c08 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -1816,8 +1816,6 @@
         const sp<AudioSystem::AudioDeviceCallback>& deviceCallback)
     : mCachedPlayerIId(PLAYER_PIID_INVALID),
       mCallback(NULL),
-      mCallbackCookie(NULL),
-      mCallbackData(NULL),
       mStreamType(AUDIO_STREAM_MUSIC),
       mLeftVolume(1.0),
       mRightVolume(1.0),
@@ -2085,7 +2083,7 @@
 status_t MediaPlayerService::AudioOutput::open(
         uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
         audio_format_t format, int bufferCount,
-        AudioCallback cb, void *cookie,
+        AudioCallback cb, const wp<RefBase>& cookie,
         audio_output_flags_t flags,
         const audio_offload_info_t *offloadInfo,
         bool doNotReconnect,
@@ -2514,6 +2512,15 @@
     {
         Mutex::Autolock lock(mLock);
         track = mTrack;
+    }
+
+    // do not hold lock while joining.
+    if (track) {
+        track->stopAndJoinCallbacks();
+    }
+
+    {
+        Mutex::Autolock lock(mLock);
         close_l(); // clears mTrack
     }
     // destruction of the track occurs outside of mutex.
@@ -2705,7 +2712,7 @@
         return 0;
     }
     size_t actualSize = (*me->mCallback)(
-            me.get(), buffer.data(), buffer.size(), me->mCallbackCookie,
+            me, buffer.data(), buffer.size(), me->mCallbackCookie,
             CB_EVENT_FILL_BUFFER);
 
     // Log when no data is returned from the callback.
@@ -2730,7 +2737,7 @@
         return;
     }
     ALOGV("callbackwrapper: deliver EVENT_STREAM_END");
-    (*me->mCallback)(me.get(), NULL /* buffer */, 0 /* size */,
+    (*me->mCallback)(me, nullptr /* buffer */, 0 /* size */,
             me->mCallbackCookie, CB_EVENT_STREAM_END);
     unlock();
 }
@@ -2744,7 +2751,7 @@
         return;
     }
     ALOGV("callbackwrapper: deliver EVENT_TEAR_DOWN");
-    (*me->mCallback)(me.get(),  NULL /* buffer */, 0 /* size */,
+    (*me->mCallback)(me, nullptr /* buffer */, 0 /* size */,
             me->mCallbackCookie, CB_EVENT_TEAR_DOWN);
     unlock();
 }
@@ -2794,7 +2801,7 @@
 struct CallbackThread : public Thread {
     CallbackThread(const wp<MediaPlayerBase::AudioSink> &sink,
                    MediaPlayerBase::AudioSink::AudioCallback cb,
-                   void *cookie);
+                   const wp<RefBase>& cookie);
 
 protected:
     virtual ~CallbackThread();
@@ -2804,7 +2811,7 @@
 private:
     wp<MediaPlayerBase::AudioSink> mSink;
     MediaPlayerBase::AudioSink::AudioCallback mCallback;
-    void *mCookie;
+    wp<RefBase> mCookie;
     void *mBuffer;
     size_t mBufferSize;
 
@@ -2815,7 +2822,7 @@
 CallbackThread::CallbackThread(
         const wp<MediaPlayerBase::AudioSink> &sink,
         MediaPlayerBase::AudioSink::AudioCallback cb,
-        void *cookie)
+        const wp<RefBase>& cookie)
     : mSink(sink),
       mCallback(cb),
       mCookie(cookie),
@@ -2842,7 +2849,7 @@
     }
 
     size_t actualSize =
-        (*mCallback)(sink.get(), mBuffer, mBufferSize, mCookie,
+        (*mCallback)(sink, mBuffer, mBufferSize, mCookie,
                 MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER);
 
     if (actualSize > 0) {
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index cb544bd..76b7bcf 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -104,14 +104,14 @@
         virtual int64_t         getBufferDurationInUs() const;
         virtual audio_output_flags_t getFlags() const { return mFlags; }
 
-        virtual status_t        open(
+        status_t open(
                 uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
                 audio_format_t format, int bufferCount,
-                AudioCallback cb, void *cookie,
+                AudioCallback cb, const wp<RefBase>& cookie,
                 audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
                 const audio_offload_info_t *offloadInfo = NULL,
                 bool doNotReconnect = false,
-                uint32_t suggestedFrameCount = 0);
+                uint32_t suggestedFrameCount = 0) override;
 
         virtual void            setPlayerIId(int32_t playerIId);
 
@@ -164,7 +164,7 @@
         sp<AudioOutput>         mNextOutput;
         int                     mCachedPlayerIId;
         AudioCallback           mCallback;
-        void *                  mCallbackCookie;
+        wp<RefBase>             mCallbackCookie;
         sp<CallbackData>        mCallbackData;
         audio_stream_type_t     mStreamType;
         audio_attributes_t *    mAttributes;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index dce6ba8..086baa3 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -68,6 +68,7 @@
 #include <system/audio.h>
 
 #include <media/stagefright/rtsp/ARTPWriter.h>
+#include <com_android_media_editing_flags.h>
 
 namespace android {
 
@@ -2121,7 +2122,8 @@
         uint32_t bLayers = std::min(2u, tsLayers - 1); // use up-to 2 B-layers
         // TODO(b/341121900): Remove this once B frames are handled correctly in screen recorder
         // use case in case of mic only
-        if (mAudioSource == AUDIO_SOURCE_MIC && mVideoSource == VIDEO_SOURCE_SURFACE) {
+        if (!com::android::media::editing::flags::stagefrightrecorder_enable_b_frames()
+                && mAudioSource == AUDIO_SOURCE_MIC && mVideoSource == VIDEO_SOURCE_SURFACE) {
             bLayers = 0;
         }
         uint32_t pLayers = tsLayers - bLayers;
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index 78163e4..fcdaff9 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -64,6 +64,7 @@
         "mediarecorder_fuzzer.cpp",
     ],
     defaults: [
+        "libaudioflinger_dependencies",
         "libmediaplayerserviceFuzzer_defaults",
     ],
     static_libs: [
@@ -76,12 +77,10 @@
     ],
     shared_libs: [
         "android.hardware.media.omx@1.0",
-        "av-types-aidl-cpp",
         "media_permission-aidl-cpp",
-        "libaudioclient_aidl_conversion",
         "libactivitymanager_aidl",
         "libandroid_net",
-        "libaudioclient",
+        "libaudioflinger",
         "libcamera_client",
         "libcodec2_client",
         "libcrypto",
@@ -89,24 +88,13 @@
         "libdrmframework",
         "libgui",
         "libhidlbase",
-        "liblog",
         "libmedia_codeclist",
         "libmedia_omx",
         "libmediadrm",
-        "libmediametrics",
-        "libmediautils",
-        "libmemunreachable",
         "libnetd_client",
-        "libpowermanager",
         "libstagefright_httplive",
         "packagemanager_aidl-cpp",
         "libfakeservicemanager",
-        "libvibrator",
-        "libnbaio",
-        "libnblog",
-        "libpowermanager",
-        "libaudioprocessing",
-        "libaudioflinger",
         "libresourcemanagerservice",
         "libmediametricsservice",
         "mediametricsservice-aidl-cpp",
@@ -122,10 +110,6 @@
         "android.hardware.camera.device@3.4",
         "libaudiohal@7.0",
     ],
-    header_libs: [
-        "libaudiohal_headers",
-        "libaudioflinger_headers",
-    ],
 }
 
 cc_fuzz {
diff --git a/media/libmediaplayerservice/fuzzer/corpus/seed-2024-08-29-0 b/media/libmediaplayerservice/fuzzer/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..aae78ae
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/media/libmediaplayerservice/include/MediaPlayerInterface.h b/media/libmediaplayerservice/include/MediaPlayerInterface.h
index be1aa00..495cf00 100644
--- a/media/libmediaplayerservice/include/MediaPlayerInterface.h
+++ b/media/libmediaplayerservice/include/MediaPlayerInterface.h
@@ -76,6 +76,33 @@
         virtual ~Listener() {}
     };
 
+    // For the AudioCallback, we provide a WeakWrapper class
+    // to wrap a virtual RefBase derived object to pass into the AudioCallback.
+    // This is not used for NuPlayer::Renderer, only for legacy AudioPlayer implementation.
+    template <typename T>
+    class WeakWrapper : public RefBase {
+    public:
+        explicit WeakWrapper(const sp<T>& object)
+                : mObject(object) {}
+
+        sp<T> promote() const {
+            if (mObject == nullptr) return {};
+            return mObject.promote();
+        }
+
+        static sp<T> promoteFromRefBase(const wp<RefBase>& weakWrapper) {
+            if (weakWrapper == nullptr) return {};
+            const auto refBase = weakWrapper.promote();
+            if (!refBase) return {};
+            const auto wrapper = sp<WeakWrapper<T>>::fromExisting(
+                    static_cast<WeakWrapper<T>*>(refBase.get()));
+            return wrapper->promote();
+        }
+
+    private:
+        const wp<T> mObject;
+    };
+
     // AudioSink: abstraction layer for audio output
     class AudioSink : public RefBase {
     public:
@@ -89,8 +116,8 @@
 
         // Callback returns the number of bytes actually written to the buffer.
         typedef size_t (*AudioCallback)(
-                AudioSink *audioSink, void *buffer, size_t size, void *cookie,
-                        cb_event_t event);
+                const sp<AudioSink>& audioSink, void *buffer, size_t size,
+                const wp<RefBase>& cookie, cb_event_t event);
 
         virtual             ~AudioSink() {}
         virtual bool        ready() const = 0; // audio output is open and ready
@@ -117,7 +144,7 @@
                 audio_format_t format=AUDIO_FORMAT_PCM_16_BIT,
                 int bufferCount=DEFAULT_AUDIOSINK_BUFFERCOUNT,
                 AudioCallback cb = NULL,
-                void *cookie = NULL,
+                const wp<RefBase>& cookie = {},
                 audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
                 const audio_offload_info_t *offloadInfo = NULL,
                 bool doNotReconnect = false,
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 3d4e955..e434a3d 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -102,6 +102,10 @@
     switch (pcmEncoding) {
     case kAudioEncodingPcmFloat:
         return AUDIO_FORMAT_PCM_FLOAT;
+    case kAudioEncodingPcm32bit:
+        return AUDIO_FORMAT_PCM_32_BIT;
+    case kAudioEncodingPcm24bitPacked:
+        return AUDIO_FORMAT_PCM_24_BIT_PACKED;
     case kAudioEncodingPcm16bit:
         return AUDIO_FORMAT_PCM_16_BIT;
     case kAudioEncodingPcm8bit:
@@ -904,12 +908,15 @@
 
 // static
 size_t NuPlayer::Renderer::AudioSinkCallback(
-        MediaPlayerBase::AudioSink * /* audioSink */,
+        const sp<MediaPlayerBase::AudioSink>& /* audioSink */,
         void *buffer,
         size_t size,
-        void *cookie,
+        const wp<RefBase>& cookie,
         MediaPlayerBase::AudioSink::cb_event_t event) {
-    NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
+    if (cookie == nullptr) return 0;
+    const auto ref = cookie.promote();
+    if (!ref) return 0;
+    const auto me = static_cast<NuPlayer::Renderer*>(ref.get()); // we already hold a sp.
 
     switch (event) {
         case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
@@ -2025,7 +2032,12 @@
     if (offloadingAudio()) {
         AString mime;
         CHECK(format->findString("mime", &mime));
-        status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
+        status_t err = OK;
+        if (audioFormat == AUDIO_FORMAT_PCM_16_BIT) {
+            // If there is probably no pcm-encoding in the format message, try to get the format by
+            // its mimetype.
+            err = mapMimeToAudioFormat(audioFormat, mime.c_str());
+        }
 
         if (err != OK) {
             ALOGE("Couldn't map mime \"%s\" to a valid "
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
index 574ad3d..cfa742e 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
@@ -43,8 +43,8 @@
              uint32_t flags = 0);
 
     static size_t AudioSinkCallback(
-            MediaPlayerBase::AudioSink *audioSink,
-            void *data, size_t size, void *me,
+            const sp<MediaPlayerBase::AudioSink>& audioSink,
+            void *data, size_t size, const wp<RefBase>& me,
             MediaPlayerBase::AudioSink::cb_event_t event);
 
     void queueBuffer(
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index ac178aa..9ed5343 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -99,6 +99,7 @@
 
     static_libs: [
         "libstagefright_esds",
+        "android.media.extractor.flags-aconfig-cc",
     ],
 
     export_include_dirs: [
@@ -321,6 +322,8 @@
 
     static_libs: [
         "android.media.codec-aconfig-cc",
+        "android.media.extractor.flags-aconfig-cc",
+        "com.android.media.flags.editing-aconfig-cc",
         "libstagefright_esds",
         "libstagefright_color_conversion",
         "libyuv",
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 46703bb..cc78510 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -18,19 +18,14 @@
 #define LOG_TAG "FrameDecoder"
 #define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include "include/FrameDecoder.h"
-#include "include/FrameCaptureLayer.h"
-#include "include/HevcUtils.h"
+#include <android_media_codec.h>
 #include <binder/MemoryBase.h>
 #include <binder/MemoryHeapBase.h>
 #include <gui/Surface.h>
 #include <inttypes.h>
-#include <mediadrm/ICrypto.h>
 #include <media/IMediaSource.h>
 #include <media/MediaCodecBuffer.h>
-#include <media/stagefright/foundation/avc_utils.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/CodecBase.h>
 #include <media/stagefright/ColorConverter.h>
 #include <media/stagefright/FrameCaptureProcessor.h>
 #include <media/stagefright/MediaBuffer.h>
@@ -39,13 +34,24 @@
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/foundation/avc_utils.h>
+#include <mediadrm/ICrypto.h>
 #include <private/media/VideoFrame.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
+#include "include/FrameCaptureLayer.h"
+#include "include/HevcUtils.h"
+
+#include <C2Buffer.h>
+#include <Codec2BufferUtils.h>
 
 namespace android {
 
 static const int64_t kBufferTimeOutUs = 10000LL; // 10 msec
+static const int64_t kAsyncBufferTimeOutUs = 2000000LL; // 2000 msec
 static const size_t kRetryCount = 100; // must be >0
 static const int64_t kDefaultSampleDurationUs = 33333LL; // 33ms
 // For codec, 0 is the highest importance; higher the number lesser important.
@@ -232,6 +238,104 @@
     return false;
 }
 
+AsyncCodecHandler::AsyncCodecHandler(const wp<FrameDecoder>& frameDecoder) {
+    mFrameDecoder = frameDecoder;
+}
+
+void AsyncCodecHandler::onMessageReceived(const sp<AMessage>& msg) {
+    switch (msg->what()) {
+        case FrameDecoder::kWhatCallbackNotify:
+            int32_t callbackId;
+            if (!msg->findInt32("callbackID", &callbackId)) {
+                ALOGE("kWhatCallbackNotify: callbackID is expected.");
+                break;
+            }
+            switch (callbackId) {
+                case MediaCodec::CB_INPUT_AVAILABLE: {
+                    int32_t index;
+                    if (!msg->findInt32("index", &index)) {
+                        ALOGE("CB_INPUT_AVAILABLE: index is expected.");
+                        break;
+                    }
+                    ALOGD("CB_INPUT_AVAILABLE received, index is %d", index);
+                    sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
+                    if (frameDecoder != nullptr) {
+                        frameDecoder->handleInputBufferAsync(index);
+                    }
+                    break;
+                }
+                case MediaCodec::CB_OUTPUT_AVAILABLE: {
+                    int32_t index;
+                    int64_t timeUs;
+                    CHECK(msg->findInt32("index", &index));
+                    CHECK(msg->findInt64("timeUs", &timeUs));
+                    ALOGD("CB_OUTPUT_AVAILABLE received, index is %d", index);
+                    sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
+                    if (frameDecoder != nullptr) {
+                        frameDecoder->handleOutputBufferAsync(index, timeUs);
+                    }
+                    break;
+                }
+                case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
+                    ALOGD("CB_OUTPUT_FORMAT_CHANGED received");
+                    sp<AMessage> format;
+                    if (!msg->findMessage("format", &format) || format == nullptr) {
+                        ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
+                        break;
+                    }
+                    sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
+                    if (frameDecoder != nullptr) {
+                        frameDecoder->handleOutputFormatChangeAsync(format);
+                    }
+                    break;
+                }
+                case MediaCodec::CB_ERROR: {
+                    status_t err;
+                    int32_t actionCode;
+                    AString detail;
+                    if (!msg->findInt32("err", &err)) {
+                        ALOGE("CB_ERROR: err is expected.");
+                        break;
+                    }
+                    if (!msg->findInt32("actionCode", &actionCode)) {
+                        ALOGE("CB_ERROR: actionCode is expected.");
+                        break;
+                    }
+                    msg->findString("detail", &detail);
+                    ALOGE("Codec reported error(0x%x/%s), actionCode(%d), detail(%s)", err,
+                          StrMediaError(err).c_str(), actionCode, detail.c_str());
+                    break;
+                }
+                default:
+                    ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", callbackId);
+                    break;
+            }
+            break;
+        default:
+            ALOGE("unexpected message received: %s", msg->debugString().c_str());
+            break;
+    }
+}
+
+void InputBufferIndexQueue::enqueue(int32_t index) {
+    std::scoped_lock<std::mutex> lock(mMutex);
+    mQueue.push(index);
+    mCondition.notify_one();
+}
+
+bool InputBufferIndexQueue::dequeue(int32_t* index, int32_t timeOutUs) {
+    std::unique_lock<std::mutex> lock(mMutex);
+    bool hasAvailableIndex = mCondition.wait_for(lock, std::chrono::microseconds(timeOutUs),
+                                                 [this] { return !mQueue.empty(); });
+    if (hasAvailableIndex) {
+        *index = mQueue.front();
+        mQueue.pop();
+        return true;
+    } else {
+        return false;
+    }
+}
+
 //static
 sp<IMemory> FrameDecoder::getMetadataOnly(
         const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail, uint32_t bitDepth) {
@@ -281,6 +385,7 @@
         const sp<MetaData> &trackMeta,
         const sp<IMediaSource> &source)
     : mComponentName(componentName),
+      mUseBlockModel(false),
       mTrackMeta(trackMeta),
       mSource(source),
       mDstFormat(OMX_COLOR_Format16bitRGB565),
@@ -290,6 +395,10 @@
 }
 
 FrameDecoder::~FrameDecoder() {
+    if (mHandler != NULL) {
+        mAsyncLooper->stop();
+        mAsyncLooper->unregisterHandler(mHandler->id());
+    }
     if (mDecoder != NULL) {
         mDecoder->release();
         mSource->stop();
@@ -333,8 +442,18 @@
         return (decoder.get() == NULL) ? NO_MEMORY : err;
     }
 
+    if (mUseBlockModel) {
+        mAsyncLooper = new ALooper;
+        mAsyncLooper->start();
+        mHandler = new AsyncCodecHandler(wp<FrameDecoder>(this));
+        mAsyncLooper->registerHandler(mHandler);
+        sp<AMessage> callbackMsg = new AMessage(kWhatCallbackNotify, mHandler);
+        decoder->setCallback(callbackMsg);
+    }
+
     err = decoder->configure(
-            videoFormat, mSurface, NULL /* crypto */, 0 /* flags */);
+            videoFormat, mSurface, NULL /* crypto */,
+            mUseBlockModel ? MediaCodec::CONFIGURE_FLAG_USE_BLOCK_MODEL : 0 /* flags */);
     if (err != OK) {
         ALOGW("configure returned error %d (%s)", err, asString(err));
         decoder->release();
@@ -362,10 +481,18 @@
 sp<IMemory> FrameDecoder::extractFrame(FrameRect *rect) {
     ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ExtractFrame");
     status_t err = onExtractRect(rect);
-    if (err == OK) {
+    if (err != OK) {
+        ALOGE("onExtractRect error %d", err);
+        return NULL;
+    }
+
+    if (!mUseBlockModel) {
         err = extractInternal();
+    } else {
+        err = extractInternalUsingBlockModel();
     }
     if (err != OK) {
+        ALOGE("extractInternal error %d", err);
         return NULL;
     }
 
@@ -380,6 +507,7 @@
         ALOGE("decoder is not initialized");
         return NO_INIT;
     }
+
     do {
         size_t index;
         int64_t ptsUs = 0LL;
@@ -433,7 +561,8 @@
                         (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
                         mediaBuffer->range_length());
 
-                onInputReceived(codecBuffer, mediaBuffer->meta_data(), mFirstSample, &flags);
+                onInputReceived(codecBuffer->data(), codecBuffer->size(), mediaBuffer->meta_data(),
+                                mFirstSample, &flags);
                 mFirstSample = false;
             }
 
@@ -487,11 +616,14 @@
                         ALOGE("failed to get output buffer %zu", index);
                         break;
                     }
+                    uint8_t* frameData = videoFrameBuffer->data();
+                    sp<ABuffer> imageData;
+                    videoFrameBuffer->meta()->findBuffer("image-data", &imageData);
                     if (mSurface != nullptr) {
                         mDecoder->renderOutputBufferAndRelease(index);
-                        err = onOutputReceived(videoFrameBuffer, mOutputFormat, ptsUs, &done);
+                        err = onOutputReceived(frameData, imageData, mOutputFormat, ptsUs, &done);
                     } else {
-                        err = onOutputReceived(videoFrameBuffer, mOutputFormat, ptsUs, &done);
+                        err = onOutputReceived(frameData, imageData, mOutputFormat, ptsUs, &done);
                         mDecoder->releaseOutputBuffer(index);
                     }
                 } else {
@@ -510,6 +642,75 @@
     return err;
 }
 
+status_t FrameDecoder::extractInternalUsingBlockModel() {
+    status_t err = OK;
+    MediaBufferBase* mediaBuffer = NULL;
+    int64_t ptsUs = 0LL;
+    uint32_t flags = 0;
+    int32_t index;
+    mHandleOutputBufferAsyncDone = false;
+
+    err = mSource->read(&mediaBuffer, &mReadOptions);
+    mReadOptions.clearSeekTo();
+    if (err != OK) {
+        ALOGW("Input Error: err=%d", err);
+        if (mediaBuffer) {
+            mediaBuffer->release();
+        }
+        return err;
+    }
+
+    size_t inputSize = mediaBuffer->range_length();
+    std::shared_ptr<C2LinearBlock> block =
+            MediaCodec::FetchLinearBlock(inputSize, {std::string{mComponentName.c_str()}});
+    C2WriteView view{block->map().get()};
+    if (view.error() != C2_OK) {
+        ALOGE("Fatal error: failed to allocate and map a block");
+        mediaBuffer->release();
+        return NO_MEMORY;
+    }
+    if (inputSize > view.capacity()) {
+        ALOGE("Fatal error: allocated block is too small "
+              "(input size %zu; block cap %u)",
+              inputSize, view.capacity());
+        mediaBuffer->release();
+        return BAD_VALUE;
+    }
+    CHECK(mediaBuffer->meta_data().findInt64(kKeyTime, &ptsUs));
+    memcpy(view.base(), (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
+           inputSize);
+    std::shared_ptr<C2Buffer> c2Buffer =
+            C2Buffer::CreateLinearBuffer(block->share(0, inputSize, C2Fence{}));
+    onInputReceived(view.base(), inputSize, mediaBuffer->meta_data(), true /* firstSample */,
+                    &flags);
+    flags |= MediaCodec::BUFFER_FLAG_EOS;
+    mediaBuffer->release();
+
+    std::vector<AccessUnitInfo> infoVec;
+    infoVec.emplace_back(flags, inputSize, ptsUs);
+    sp<BufferInfosWrapper> infos = new BufferInfosWrapper{std::move(infoVec)};
+
+    if (!mInputBufferIndexQueue.dequeue(&index, kAsyncBufferTimeOutUs)) {
+        ALOGE("No available input buffer index for async mode.");
+        return TIMED_OUT;
+    }
+
+    AString errorDetailMsg;
+    ALOGD("QueueLinearBlock: index=%d size=%zu ts=%" PRId64 " us flags=%x",
+            index, inputSize, ptsUs,flags);
+    err = mDecoder->queueBuffer(index, c2Buffer, infos, nullptr, &errorDetailMsg);
+    if (err != OK) {
+        ALOGE("failed to queueBuffer (err %d): %s", err, errorDetailMsg.c_str());
+        return err;
+    }
+
+    // wait for handleOutputBufferAsync() to finish
+    std::unique_lock _lk(mMutex);
+    mOutputFramePending.wait_for(_lk, std::chrono::microseconds(kAsyncBufferTimeOutUs),
+                                 [this] { return mHandleOutputBufferAsyncDone; });
+    return mHandleOutputBufferAsyncDone ? OK : TIMED_OUT;
+}
+
 //////////////////////////////////////////////////////////////////////
 
 VideoFrameDecoder::VideoFrameDecoder(
@@ -525,6 +726,81 @@
       mDefaultSampleDurationUs(0) {
 }
 
+status_t FrameDecoder::handleOutputFormatChangeAsync(sp<AMessage> format) {
+    // Here format is MediaCodec's internal copy of output format.
+    // Make a copy since the client might modify it.
+    mOutputFormat = format->dup();
+    ALOGD("receive output format in async mode: %s", mOutputFormat->debugString().c_str());
+    return OK;
+}
+
+status_t FrameDecoder::handleInputBufferAsync(int32_t index) {
+    mInputBufferIndexQueue.enqueue(index);
+    return OK;
+}
+
+status_t FrameDecoder::handleOutputBufferAsync(int32_t index, int64_t timeUs) {
+    if (mHandleOutputBufferAsyncDone) {
+        // we have already processed an output buffer, skip others
+        return OK;
+    }
+
+    status_t err = OK;
+    sp<MediaCodecBuffer> videoFrameBuffer;
+    err = mDecoder->getOutputBuffer(index, &videoFrameBuffer);
+    if (err != OK || videoFrameBuffer == nullptr) {
+        ALOGE("failed to get output buffer %d", index);
+        return err;
+    }
+
+    bool onOutputReceivedDone = false;
+    if (mSurface != nullptr) {
+        mDecoder->renderOutputBufferAndRelease(index);
+        // frameData and imgObj will be fetched by captureSurface() inside onOutputReceived()
+        // explicitly pass null here
+        err = onOutputReceived(nullptr, nullptr, mOutputFormat, timeUs, &onOutputReceivedDone);
+    } else {
+        // get stride and frame data for block model buffer
+        std::shared_ptr<C2Buffer> c2buffer = videoFrameBuffer->asC2Buffer();
+        if (!c2buffer
+                || c2buffer->data().type() != C2BufferData::GRAPHIC
+                || c2buffer->data().graphicBlocks().size() == 0u) {
+            ALOGE("C2Buffer precond fail");
+            return ERROR_MALFORMED;
+        }
+
+        std::unique_ptr<const C2GraphicView> view(std::make_unique<const C2GraphicView>(
+            c2buffer->data().graphicBlocks()[0].map().get()));
+        GraphicView2MediaImageConverter converter(*view, mOutputFormat, false /* copy */);
+        if (converter.initCheck() != OK) {
+            ALOGE("Converter init failed: %d", converter.initCheck());
+            return NO_INIT;
+        }
+
+        uint8_t* frameData = converter.wrap()->data();
+        sp<ABuffer> imageData = converter.imageData();
+        if (imageData != nullptr) {
+            mOutputFormat->setBuffer("image-data", imageData);
+            MediaImage2 *img = (MediaImage2*) imageData->data();
+            if (img->mNumPlanes > 0 && img->mType != img->MEDIA_IMAGE_TYPE_UNKNOWN) {
+                int32_t stride = img->mPlane[0].mRowInc;
+                mOutputFormat->setInt32(KEY_STRIDE, stride);
+                ALOGD("updating stride = %d", stride);
+            }
+        }
+
+        err = onOutputReceived(frameData, imageData, mOutputFormat, timeUs, &onOutputReceivedDone);
+        mDecoder->releaseOutputBuffer(index);
+    }
+
+    if (err == OK && onOutputReceivedDone) {
+        std::lock_guard _lm(mMutex);
+        mHandleOutputBufferAsyncDone = true;
+        mOutputFramePending.notify_one();
+    }
+    return err;
+}
+
 sp<AMessage> VideoFrameDecoder::onGetFormatAndSeekOptions(
         int64_t frameTimeUs, int seekMode,
         MediaSource::ReadOptions *options,
@@ -575,8 +851,13 @@
     bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
             || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
     if (!isSeekingClosest) {
-        videoFormat->setInt32("android._num-input-buffers", 1);
-        videoFormat->setInt32("android._num-output-buffers", 1);
+        if (mComponentName.startsWithIgnoreCase("c2.")) {
+            mUseBlockModel = android::media::codec::provider_->thumbnail_block_model();
+        } else {
+            // OMX Codec
+            videoFormat->setInt32("android._num-input-buffers", 1);
+            videoFormat->setInt32("android._num-output-buffers", 1);
+        }
     }
 
     if (isHDR(videoFormat)) {
@@ -601,9 +882,8 @@
     return videoFormat;
 }
 
-status_t VideoFrameDecoder::onInputReceived(
-        const sp<MediaCodecBuffer> &codecBuffer,
-        MetaDataBase &sampleMeta, bool firstSample, uint32_t *flags) {
+status_t VideoFrameDecoder::onInputReceived(uint8_t* data, size_t size, MetaDataBase& sampleMeta,
+                                            bool firstSample, uint32_t* flags) {
     bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
             || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
 
@@ -612,10 +892,7 @@
         ALOGV("Seeking closest: targetTimeUs=%lld", (long long)mTargetTimeUs);
     }
 
-    if (!isSeekingClosest
-            && ((mIsAvc && IsIDR(codecBuffer->data(), codecBuffer->size()))
-            || (mIsHevc && IsIDR(
-            codecBuffer->data(), codecBuffer->size())))) {
+    if (!isSeekingClosest && ((mIsAvc && IsIDR(data, size)) || (mIsHevc && IsIDR(data, size)))) {
         // Only need to decode one IDR frame, unless we're seeking with CLOSEST
         // option, in which case we need to actually decode to targetTimeUs.
         *flags |= MediaCodec::BUFFER_FLAG_EOS;
@@ -630,7 +907,8 @@
 }
 
 status_t VideoFrameDecoder::onOutputReceived(
-        const sp<MediaCodecBuffer> &videoFrameBuffer,
+        uint8_t* frameData,
+        sp<ABuffer> imgObj,
         const sp<AMessage> &outputFormat,
         int64_t timeUs, bool *done) {
     int64_t durationUs = mDefaultSampleDurationUs;
@@ -703,7 +981,6 @@
         }
 
         mFrame = static_cast<VideoFrame*>(frameMem->unsecurePointer());
-
         setFrame(frameMem);
     }
 
@@ -712,7 +989,7 @@
     if (mCaptureLayer != nullptr) {
         return captureSurface();
     }
-    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
+    ColorConverter colorConverter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
 
     uint32_t standard, range, transfer;
     if (!outputFormat->findInt32("color-standard", (int32_t*)&standard)) {
@@ -724,22 +1001,25 @@
     if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
         transfer = 0;
     }
-    sp<ABuffer> imgObj;
-    if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
+
+    if (imgObj != nullptr) {
         MediaImage2 *imageData = nullptr;
         imageData = (MediaImage2 *)(imgObj.get()->data());
         if (imageData != nullptr) {
-            converter.setSrcMediaImage2(*imageData);
+            colorConverter.setSrcMediaImage2(*imageData);
         }
     }
     if (srcFormat == COLOR_FormatYUV420Flexible && imgObj.get() == nullptr) {
         return ERROR_UNSUPPORTED;
     }
-    converter.setSrcColorSpace(standard, range, transfer);
-    if (converter.isValid()) {
+    colorConverter.setSrcColorSpace(standard, range, transfer);
+    if (colorConverter.isValid()) {
         ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ColorConverter");
-        converter.convert(
-                (const uint8_t *)videoFrameBuffer->data(),
+        if (frameData == nullptr) {
+            ALOGD("frameData is null for ColorConverter");
+        }
+        colorConverter.convert(
+                (const uint8_t *)frameData,
                 width, height, stride,
                 crop_left, crop_top, crop_right, crop_bottom,
                 mFrame->getFlattenedData(),
@@ -955,7 +1235,8 @@
 }
 
 status_t MediaImageDecoder::onOutputReceived(
-        const sp<MediaCodecBuffer> &videoFrameBuffer,
+        uint8_t* frameData,
+        sp<ABuffer> imgObj,
         const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
     if (outputFormat == NULL) {
         return ERROR_MALFORMED;
@@ -1008,8 +1289,8 @@
     if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
         transfer = 0;
     }
-    sp<ABuffer> imgObj;
-    if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
+
+    if (imgObj != nullptr) {
         MediaImage2 *imageData = nullptr;
         imageData = (MediaImage2 *)(imgObj.get()->data());
         if (imageData != nullptr) {
@@ -1058,7 +1339,7 @@
 
     if (converter.isValid()) {
         converter.convert(
-                (const uint8_t *)videoFrameBuffer->data(),
+                (const uint8_t *)frameData,
                 width, height, stride,
                 crop_left, crop_top, crop_right, crop_bottom,
                 mFrame->getFlattenedData(),
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 15188b0..cb3c185 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -53,6 +53,9 @@
 #include <media/esds/ESDS.h>
 #include "include/HevcUtils.h"
 
+#include <com_android_media_editing_flags.h>
+namespace editing_flags = com::android::media::editing::flags;
+
 #ifndef __predict_false
 #define __predict_false(exp) __builtin_expect((exp) != 0, 0)
 #endif
@@ -158,6 +161,7 @@
     bool isAvc() const { return mIsAvc; }
     bool isHevc() const { return mIsHevc; }
     bool isAv1() const { return mIsAv1; }
+    bool isApv() const { return mIsApv; }
     bool isHeic() const { return mIsHeic; }
     bool isAvif() const { return mIsAvif; }
     bool isHeif() const { return mIsHeif; }
@@ -326,6 +330,7 @@
     bool mIsAvc;
     bool mIsHevc;
     bool mIsAv1;
+    bool mIsApv;
     bool mIsDovi;
     bool mIsAudio;
     bool mIsVideo;
@@ -477,6 +482,7 @@
     void writeAvccBox();
     void writeHvccBox();
     void writeAv1cBox();
+    void writeApvcBox();
     void writeDoviConfigBox();
     void writeUrlBox();
     void writeDrefBox();
@@ -678,6 +684,9 @@
             return "hvc1";
         } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime)) {
             return "av01";
+        } else if (editing_flags::muxer_mp4_enable_apv() &&
+                   !strcasecmp(MEDIA_MIMETYPE_VIDEO_APV, mime)) {
+            return "apv1";
         }
     } else if (!strncasecmp(mime, "application/", 12)) {
         return "mett";
@@ -2264,6 +2273,7 @@
     mIsAvc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
     mIsHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
     mIsAv1 = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1);
+    mIsApv = editing_flags::muxer_mp4_enable_apv() && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_APV);
     mIsDovi = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
     mIsAudio = !strncasecmp(mime, "audio/", 6);
     mIsVideo = !strncasecmp(mime, "video/", 6);
@@ -2706,6 +2716,9 @@
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1) ||
                !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
         mMeta->findData(kKeyAV1C, &type, &data, &size);
+    } else if (editing_flags::muxer_mp4_enable_apv() &&
+               !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_APV)) {
+        mMeta->findData(kKeyAPVC, &type, &data, &size);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
         getDolbyVisionProfile();
         if (!mMeta->findData(kKeyAVCC, &type, &data, &size) &&
@@ -3607,7 +3620,7 @@
                             (const uint8_t *)buffer->data()
                                 + buffer->range_offset(),
                             buffer->range_length());
-                } else if (mIsMPEG4 || mIsAv1) {
+                } else if (mIsMPEG4 || mIsAv1 || mIsApv) {
                     err = copyCodecSpecificData((const uint8_t *)buffer->data() + buffer->range_offset(),
                             buffer->range_length());
                 }
@@ -3776,6 +3789,12 @@
             if (mStszTableEntries->count() == 0) {
                 mFirstSampleTimeRealUs = systemTime() / 1000;
                 if (timestampUs < 0 && mFirstSampleStartOffsetUs == 0) {
+                    if (WARN_UNLESS(timestampUs != INT64_MIN, "for %s track", trackName)) {
+                        copy->release();
+                        mSource->stop();
+                        mIsMalformed = true;
+                        break;
+                    }
                     mFirstSampleStartOffsetUs = -timestampUs;
                     timestampUs = 0;
                 }
@@ -4330,6 +4349,7 @@
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime) ||
+        (editing_flags::muxer_mp4_enable_apv() && !strcasecmp(MEDIA_MIMETYPE_VIDEO_APV, mime)) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_IMAGE_AVIF, mime)) {
@@ -4504,6 +4524,9 @@
         writeHvccBox();
     } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime)) {
         writeAv1cBox();
+    } else if (editing_flags::muxer_mp4_enable_apv() &&
+               !strcasecmp(MEDIA_MIMETYPE_VIDEO_APV, mime)) {
+        writeApvcBox();
     } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime)) {
         if (mDoviProfile <= DolbyVisionProfileDvheSt) {
             writeHvccBox();
@@ -4938,6 +4961,8 @@
             // Track with start offset.
             ALOGV("Tracks starting > 0");
             int32_t editDurationTicks = 0;
+            int32_t trackStartOffsetBFramesUs = getMinCttsOffsetTimeUs() - kMaxCttsOffsetTimeUs;
+            ALOGV("trackStartOffsetBFramesUs:%" PRId32, trackStartOffsetBFramesUs);
             if (mMinCttsOffsetTicks == mMaxCttsOffsetTicks) {
                 // Video with no B frame or non-video track.
                 editDurationTicks =
@@ -4946,8 +4971,6 @@
                 ALOGV("editDuration:%" PRId64 "us", (trackStartOffsetUs + movieStartOffsetBFramesUs));
             } else {
                 // Track with B frame.
-                int32_t trackStartOffsetBFramesUs = getMinCttsOffsetTimeUs() - kMaxCttsOffsetTimeUs;
-                ALOGV("trackStartOffsetBFramesUs:%" PRId32, trackStartOffsetBFramesUs);
                 editDurationTicks =
                         ((trackStartOffsetUs + movieStartOffsetBFramesUs +
                           trackStartOffsetBFramesUs) * mvhdTimeScale + 5E5) / 1E6;
@@ -4961,7 +4984,15 @@
             } else if (editDurationTicks < 0) {
                 // Only video tracks with B Frames would hit this case.
                 ALOGV("Edit list entry to negate start offset by B frames in other tracks");
-                addOneElstTableEntry(tkhdDurationTicks, std::abs(editDurationTicks), 1, 0);
+                if (com::android::media::editing::flags::
+                        stagefrightrecorder_enable_b_frames()) {
+                    int32_t mediaTimeTicks =
+                            ((trackStartOffsetUs + movieStartOffsetBFramesUs +
+                              trackStartOffsetBFramesUs) * mTimeScale - 5E5) / 1E6;
+                    addOneElstTableEntry(tkhdDurationTicks, std::abs(mediaTimeTicks), 1, 0);
+                } else {
+                    addOneElstTableEntry(tkhdDurationTicks, std::abs(editDurationTicks), 1, 0);
+                }
             } else {
                 ALOGV("No edit list entry needed for this track");
             }
@@ -5087,6 +5118,15 @@
     mOwner->endBox();  // av1C
 }
 
+void MPEG4Writer::Track::writeApvcBox() {
+    CHECK(mCodecSpecificData);
+    CHECK_GE(mCodecSpecificDataSize, 4u);
+
+    mOwner->beginBox("apvC");
+    mOwner->write(mCodecSpecificData, mCodecSpecificDataSize);
+    mOwner->endBox();  // apvC
+}
+
 void MPEG4Writer::Track::writeDoviConfigBox() {
     CHECK_NE(mDoviProfile, 0u);
 
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index d0d5cca..8cb1674 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -487,7 +487,7 @@
                                     .id = getId(mClient),
                                     .name = mCodecName,
                                     .importance = mImportance};
-        return std::move(clientInfo);
+        return clientInfo;
     }
 
 private:
@@ -1189,6 +1189,19 @@
     return new PersistentSurface(bufferProducer, bufferSource);
 }
 
+//static
+status_t MediaCodec::getGloballyAvailableResources(std::vector<GlobalResourceInfo>& resources) {
+    resources.clear();
+    // Make sure codec availability feature is on.
+    if (!android::media::codec::codec_availability()) {
+        return ERROR_UNSUPPORTED;
+    }
+    // TODO: For now this is just an empty function.
+    // The actual implementation should use component store to query the
+    // available resources from hal, and fill in resources with the same.
+    return ERROR_UNSUPPORTED;
+}
+
 // GenerateCodecId generates a 64bit Random ID for each codec that is created.
 // The Codec ID is generated as:
 //   - A process-unique random high 32bits
@@ -1295,7 +1308,12 @@
     CHECK_EQ(mState, UNINITIALIZED);
     mResourceManagerProxy->removeClient();
 
-    flushMediametrics();
+    flushMediametrics();  // this deletes mMetricsHandle
+    // don't keep the last metrics handle around
+    if (mLastMetricsHandle != 0) {
+        mediametrics_delete(mLastMetricsHandle);
+        mLastMetricsHandle = 0;
+    }
 
     // clean any saved metrics info we stored as part of configure()
     if (mConfigureMsg != nullptr) {
@@ -1306,7 +1324,7 @@
     }
 }
 
-// except for in constructor, called from the looper thread (and therefore mutexed)
+// except for in constructor, called from the looper thread (and therefore not mutexed)
 void MediaCodec::initMediametrics() {
     if (mMetricsHandle == 0) {
         mMetricsHandle = mediametrics_create(kCodecKeyName);
@@ -1332,6 +1350,7 @@
         mInputBufferCounter = 0;
     }
 
+    mSubsessionCount = 0;
     mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
     resetMetricsFields();
 }
@@ -1343,6 +1362,17 @@
     mReliabilityContextMetrics = ReliabilityContextMetrics();
 }
 
+// always called from the looper thread (and therefore not mutexed)
+void MediaCodec::resetSubsessionMetricsFields() {
+    mBytesEncoded = 0;
+    mFramesEncoded = 0;
+    mFramesInput = 0;
+    mBytesInput = 0;
+    mEarliestEncodedPtsUs = INT64_MAX;
+    mLatestEncodedPtsUs = INT64_MIN;
+}
+
+// always called from the looper thread
 void MediaCodec::updateMediametrics() {
     if (mMetricsHandle == 0) {
         ALOGV("no metrics handle found");
@@ -1707,6 +1737,7 @@
     }
 }
 
+// except for in destructor, called from the looper thread
 void MediaCodec::flushMediametrics() {
     ALOGV("flushMediametrics");
 
@@ -1720,7 +1751,14 @@
         if (mMetricsToUpload && mediametrics_count(mMetricsHandle) > 0) {
             mediametrics_selfRecord(mMetricsHandle);
         }
-        mediametrics_delete(mMetricsHandle);
+        // keep previous metrics handle for subsequent getMetrics() calls.
+        // NOTE: There could be multiple error events, each flushing the metrics.
+        // We keep the last non-empty metrics handle, so getMetrics() in the
+        // next call will get the latest metrics prior to the errors.
+        if (mLastMetricsHandle != 0) {
+            mediametrics_delete(mLastMetricsHandle);
+        }
+        mLastMetricsHandle = mMetricsHandle;
         mMetricsHandle = 0;
     }
     // we no longer have anything pending upload
@@ -1885,7 +1923,10 @@
         });
     }
 
-    if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
+    // NOTE: these were erroneously restricted to video encoders, but we want them for all
+    // codecs.
+    if (android::media::codec::provider_->subsession_metrics()
+            || (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder))) {
         mBytesInput += buffer->size();
         mFramesInput++;
     }
@@ -1907,12 +1948,15 @@
     ++mInputBufferCounter;
 }
 
-// when we get a buffer back from the codec
+// when we get a buffer back from the codec, always called from the looper thread
 void MediaCodec::statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
 
     CHECK_NE(mState, UNINITIALIZED);
 
-    if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
+    // NOTE: these were erroneously restricted to video encoders, but we want them for all
+    // codecs.
+    if (android::media::codec::provider_->subsession_metrics()
+            || (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder))) {
         int32_t flags = 0;
         (void) buffer->meta()->findInt32("flags", &flags);
 
@@ -2015,6 +2059,7 @@
     int32_t flags;
     CHECK(buffer->meta()->findInt32("flags", &flags));
     if (flags & BUFFER_FLAG_DECODE_ONLY) {
+        ALOGV("discardDecodeOnlyOutputBuffer: mPortBuffers[out][%zu] NOT owned by client", index);
         info->mOwnedByClient = false;
         info->mData.clear();
         mBufferChannel->discardBuffer(buffer);
@@ -2459,6 +2504,7 @@
             mediametrics_setInt32(nextMetricsHandle, kCodecCrypto, 1);
         }
     } else if (mFlags & kFlagIsSecure) {
+        // We'll catch this later when we process the buffers.
         ALOGW("Crypto or descrambler should be given for secure codec");
     }
 
@@ -2520,6 +2566,31 @@
     return err;
 }
 
+status_t MediaCodec::getRequiredResources(std::vector<InstanceResourceInfo>& resources) {
+    resources.clear();
+    // Make sure codec availability feature is on.
+    if (!android::media::codec::codec_availability()) {
+        return ERROR_UNSUPPORTED;
+    }
+    // Make sure that the codec was configured already.
+    if (mState != CONFIGURED && mState != STARTING && mState != STARTED &&
+        mState != FLUSHING && mState != FLUSHED) {
+        ALOGE("Codec wasn't configured yet!");
+        return INVALID_OPERATION;
+    }
+
+    if (!mRequiredResourceInfo.empty()) {
+        resources = mRequiredResourceInfo;
+        return OK;
+    }
+
+    // TODO: For now this is just an empty function.
+    // The actual implementation should use component interface
+    // (for example, through mCodec->getRequiredDeviceResources) to query the
+    // the required resources for this configuration, and fill in resources with the same.
+    return ERROR_UNSUPPORTED;
+}
+
 // Media Format Shaping support
 //
 
@@ -3608,6 +3679,10 @@
         updateMediametrics();
         results = mediametrics_dup(mMetricsHandle);
         updateEphemeralMediametrics(results);
+    } else if (mLastMetricsHandle != 0) {
+        // After error, mMetricsHandle is cleared, but we keep the last
+        // metrics around so that it can be queried by getMetrics().
+        results = mediametrics_dup(mLastMetricsHandle);
     } else {
         results = mediametrics_dup(mMetricsHandle);
     }
@@ -3877,6 +3952,7 @@
     return true;
 }
 
+// always called from the looper thread
 MediaCodec::DequeueOutputResult MediaCodec::handleDequeueOutputBuffer(
         const sp<AReplyToken> &replyID, bool newRequest) {
     if (!isExecuting()) {
@@ -3932,6 +4008,9 @@
 
         response->setInt32("flags", flags);
 
+        // NOTE: we must account the stats for an output buffer only after we
+        // already handled a potential output format change that could have
+        // started a new subsession.
         statsBufferReceived(timeUs, buffer);
 
         response->postReply(replyID);
@@ -4489,9 +4568,16 @@
                 {
                     /* size_t index = */updateBuffers(kPortIndexInput, msg);
 
-                    if (mState == FLUSHING
-                            || mState == STOPPING
-                            || mState == RELEASING) {
+                    bool inStateToReturnBuffers =
+                        mState == FLUSHING || mState == STOPPING || mState == RELEASING;
+                    if (android::media::codec::provider_->codec_buffer_state_cleanup()) {
+                        // Late callbacks from the codec could arrive here
+                        // after the codec is already stopped or released.
+                        inStateToReturnBuffers = mState == FLUSHING ||
+                                                 mState == STOPPING || mState == INITIALIZED ||
+                                                 mState == RELEASING || mState == UNINITIALIZED;
+                    }
+                    if (inStateToReturnBuffers) {
                         returnBuffersToCodecOnPort(kPortIndexInput);
                         break;
                     }
@@ -4570,9 +4656,16 @@
 
                     /* size_t index = */updateBuffers(kPortIndexOutput, msg);
 
-                    if (mState == FLUSHING
-                            || mState == STOPPING
-                            || mState == RELEASING) {
+                    bool inStateToReturnBuffers =
+                        mState == FLUSHING || mState == STOPPING || mState == RELEASING;
+                    if (android::media::codec::provider_->codec_buffer_state_cleanup()) {
+                        // Late callbacks from the codec could arrive here
+                        // after the codec is already stopped or released.
+                        inStateToReturnBuffers = mState == FLUSHING ||
+                                                 mState == STOPPING || mState == INITIALIZED ||
+                                                 mState == RELEASING || mState == UNINITIALIZED;
+                    }
+                    if (inStateToReturnBuffers) {
                         returnBuffersToCodecOnPort(kPortIndexOutput);
                         break;
                     }
@@ -5822,6 +5915,7 @@
     }
 }
 
+// always called from the looper thread
 void MediaCodec::handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer) {
     sp<AMessage> format = buffer->format();
     if (mOutputFormat == format) {
@@ -5905,6 +5999,24 @@
             }
         }
     }
+
+    // Update the width and the height.
+    int32_t left = 0, top = 0, right = 0, bottom = 0, width = 0, height = 0;
+    bool newSubsession = false;
+    if (android::media::codec::provider_->subsession_metrics()
+            && mOutputFormat->findInt32("width", &width)
+            && mOutputFormat->findInt32("height", &height)
+            && (width != mWidth || height != mHeight)) {
+        // consider a new subsession if the width or height changes.
+        newSubsession = true;
+    }
+    // TODO: properly detect new audio subsession
+
+    // Only consider a new subsession if we already have output (from a previous subsession).
+    if (newSubsession && mMetricsToUpload && mBytesEncoded > 0) {
+        handleStartingANewSubsession();
+    }
+
     if (mFlags & kFlagIsAsync) {
         onOutputFormatChanged();
     } else {
@@ -5912,8 +6024,6 @@
         postActivityNotificationIfPossible();
     }
 
-    // Update the width and the height.
-    int32_t left = 0, top = 0, right = 0, bottom = 0, width = 0, height = 0;
     bool resolutionChanged = false;
     if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
         mWidth = right - left + 1;
@@ -5938,7 +6048,36 @@
     }
 
     updateHdrMetrics(false /* isConfig */);
- }
+}
+
+// always called from the looper thread (and therefore not mutexed)
+void MediaCodec::handleStartingANewSubsession() {
+    // create a new metrics item for the subsession with the new resolution.
+    // TODO: properly account input counts for the previous and the new
+    // subsessions. We only find out that a new subsession started from the
+    // output format, but by that time we already accounted the input counts
+    // to the previous subsession.
+    flushMediametrics(); // this deletes mMetricsHandle, but stores it in mLastMetricsHandle
+
+    // hence mLastMetricsHandle has the metrics item for the previous subsession.
+    if ((mFlags & kFlagIsAsync) && mCallback != nullptr) {
+        sp<AMessage> msg = mCallback->dup();
+        msg->setInt32("callbackID", CB_METRICS_FLUSHED);
+        std::unique_ptr<mediametrics::Item> flushedMetrics(
+                mediametrics::Item::convert(mediametrics_dup(mLastMetricsHandle)));
+        msg->setObject("metrics", new WrapperObject<std::unique_ptr<mediametrics::Item>>(
+                std::move(flushedMetrics)));
+        msg->post();
+    }
+
+    // reuse/continue old metrics item for the new subsession.
+    mMetricsHandle = mediametrics_dup(mLastMetricsHandle);
+    mMetricsToUpload = true;
+    // TODO: configured width/height for the new subsession should be the
+    // previous width/height.
+    mSubsessionCount++;
+    resetSubsessionMetricsFields();
+}
 
 void MediaCodec::extractCSD(const sp<AMessage> &format) {
     mCSD.clear();
@@ -6017,7 +6156,6 @@
             return -EINVAL;
         }
         if (codecInputData->data() == NULL) {
-            ALOGV("Input buffer %zu is not properly allocated", bufferIndex);
             mErrorLog.log(LOG_TAG, base::StringPrintf(
                     "Fatal error: input buffer %zu is not properly allocated", bufferIndex));
             return -EINVAL;
@@ -6063,6 +6201,10 @@
 
         mInputFormat.clear();
         mOutputFormat.clear();
+        if (android::media::codec::provider_->codec_buffer_state_cleanup()) {
+            mCSD.clear();
+            mLeftover.clear();
+        }
         mFlags &= ~kFlagOutputFormatChanged;
         mFlags &= ~kFlagOutputBuffersChanged;
         mFlags &= ~kFlagStickyError;
@@ -6121,6 +6263,8 @@
                 ALOGD("port %d buffer %zu still owned by client when codec is reclaimed",
                         portIndex, i);
             } else {
+                ALOGV("returnBuffersToCodecOnPort: mPortBuffers[%s][%zu] NOT owned by client",
+                      portIndex == kPortIndexInput ? "in" : "out", i);
                 info->mOwnedByClient = false;
                 info->mData.clear();
             }
@@ -6185,6 +6329,12 @@
     CryptoPlugin::SubSample ss;
     CryptoPlugin::Pattern pattern;
 
+    if (android::media::codec::provider_->secure_codecs_require_crypto()
+            && (mFlags & kFlagIsSecure) && !hasCryptoOrDescrambler()) {
+        mErrorLog.log(LOG_TAG, "Crypto or descrambler must be given for secure codec");
+        return INVALID_OPERATION;
+    }
+
     if (msg->findSize("size", &size)) {
         if (hasCryptoOrDescrambler()) {
             ss.mNumBytesOfClearData = size;
@@ -6473,6 +6623,7 @@
 
         // synchronization boundary for getBufferAndFormat
         Mutex::Autolock al(mBufferLock);
+        ALOGV("onQueueInputBuffer: mPortBuffers[in][%zu] NOT owned by client", index);
         info->mOwnedByClient = false;
         info->mData.clear();
 
@@ -6489,6 +6640,7 @@
     sp<AMessage> msg = mLeftover.front();
     mLeftover.pop_front();
     msg->setSize("index", index);
+    ALOGV("handleLeftover(%zu)", index);
     return onQueueInputBuffer(msg);
 }
 
@@ -6557,6 +6709,7 @@
     sp<MediaCodecBuffer> buffer;
     {
         Mutex::Autolock al(mBufferLock);
+        ALOGV("onReleaseOutputBuffer: mPortBuffers[out][%zu] NOT owned by client", index);
         info->mOwnedByClient = false;
         buffer = info->mData;
         info->mData.clear();
@@ -6669,6 +6822,8 @@
 
     {
         Mutex::Autolock al(mBufferLock);
+        ALOGV("dequeuePortBuffer: mPortBuffers[%s][%zu] checking if not owned by client",
+              portIndex == kPortIndexInput ? "in" : "out", index);
         CHECK(!info->mOwnedByClient);
         info->mOwnedByClient = true;
 
@@ -6924,6 +7079,18 @@
     }
 }
 
+void MediaCodec::onRequiredResourcesChanged(
+        const std::vector<InstanceResourceInfo>& resourceInfo) {
+    mRequiredResourceInfo = resourceInfo;
+    // Make sure codec availability feature is on.
+    if (mCallback != nullptr && android::media::codec::codec_availability()) {
+        // Post the callback
+        sp<AMessage> msg = mCallback->dup();
+        msg->setInt32("callbackID", CB_REQUIRED_RESOURCES_CHANGED);
+        msg->post();
+    }
+}
+
 void MediaCodec::postActivityNotificationIfPossible() {
     if (mActivityNotify == NULL) {
         return;
diff --git a/media/libstagefright/OmxInfoBuilder.cpp b/media/libstagefright/OmxInfoBuilder.cpp
index 79ffdeb..1cb8f14 100644
--- a/media/libstagefright/OmxInfoBuilder.cpp
+++ b/media/libstagefright/OmxInfoBuilder.cpp
@@ -21,6 +21,8 @@
 #define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
 #endif
 
+#include <cstdlib>
+
 #include <android-base/properties.h>
 #include <utils/Log.h>
 
@@ -131,6 +133,10 @@
     for (const auto& p : serviceAttributes) {
         writer->addGlobalSetting(
                 p.key.c_str(), p.value.c_str());
+        if (p.key == "max-concurrent-instances") {
+            MediaCodecInfoWriter::SetMaxSupportedInstances(
+                    (int32_t)strtol(p.value.c_str(), NULL, 10));
+        }
     }
 
     // Convert roles to lists of codecs
@@ -217,6 +223,8 @@
                 ALOGW("Fail to add media type %s to codec %s",
                         typeName.c_str(), nodeName.c_str());
                 info->removeMediaType(typeName.c_str());
+            } else {
+                info->createCodecCaps();
             }
         }
     }
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index b7efbce..354fab0 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -85,13 +85,37 @@
     // writerTest fails about 5 out of 66
     // { "name": "writerTest" },
     {
-       "name": "BatteryChecker_test"
+        "name": "BatteryChecker_test"
     },
     {
         "name": "ExtractorFactoryTest"
     },
     {
         "name": "HEVCUtilsUnitTest"
+    },
+    {
+      "name": "MctsMediaDecoderTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        }
+      ]
+    },
+    {
+      "name": "MctsMediaEncoderTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        }
+      ]
+    },
+    {
+      "name": "MctsMediaCodecTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        }
+      ]
     }
   ]
 }
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 86741a6..50eeb62 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -44,6 +44,8 @@
 #include <media/AudioParameter.h>
 #include <system/audio.h>
 
+#include <com_android_media_extractor_flags.h>
+
 // TODO : Remove the defines once mainline media is built against NDK >= 31.
 // The mp4 extractor is part of mainline and builds against NDK 29 as of
 // writing. These keys are available only from NDK 31:
@@ -1443,6 +1445,17 @@
         buffer->meta()->setInt64("timeUs", 0);
         msg->setBuffer("csd-0", buffer);
         parseAV1ProfileLevelFromCsd(buffer, msg);
+    } else if (com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
+               meta->findData(kKeyAPVC, &type, &data, &size)) {
+        sp<ABuffer> buffer = new (std::nothrow) ABuffer(size);
+        if (buffer.get() == NULL || buffer->base() == NULL) {
+            return NO_MEMORY;
+        }
+        memcpy(buffer->data(), data, size);
+
+        buffer->meta()->setInt32("csd", true);
+        buffer->meta()->setInt64("timeUs", 0);
+        msg->setBuffer("csd-0", buffer);
     } else if (meta->findData(kKeyESDS, &type, &data, &size)) {
         ESDS esds((const char *)data, size);
         if (esds.InitCheck() != (status_t)OK) {
@@ -2091,6 +2104,9 @@
         } else if (mime == MEDIA_MIMETYPE_VIDEO_AV1 ||
                    mime == MEDIA_MIMETYPE_IMAGE_AVIF) {
             meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
+        } else if (com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
+                   mime == MEDIA_MIMETYPE_VIDEO_APV) {
+            meta->setData(kKeyAPVC, 0, csd0->data(), csd0->size());
         } else if (mime == MEDIA_MIMETYPE_VIDEO_DOLBY_VISION) {
             int32_t profile = -1;
             uint8_t blCompatibilityId = -1;
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index 137b282..72a2551 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -95,6 +95,14 @@
             <Feature name="adaptive-playback" />
             <Attribute name="software-codec" />
         </MediaCodec>
+        <MediaCodec name="c2.android.apv.decoder" type="video/apv" enabled="false" minsdk="36" variant="!slow-cpu">
+            <Limit name="size" min="16x16" max="1920x1920"/>
+            <Limit name="alignment" value="2x2"/>
+            <Limit name="bitrate" range="1-240000000"/>
+            <Limit name="block-size" value="16x16" />
+            <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+            <Attribute name="software-codec"/>
+         </MediaCodec>
     </Decoders>
 
     <Encoders>
@@ -160,5 +168,13 @@
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="bitrate-modes" value="VBR,CBR" />
         </MediaCodec>
+        <MediaCodec name="c2.android.apv.encoder" type="video/apv" enabled="false" minsdk="36" variant="!slow-cpu">
+            <Limit name="size" min="2x2" max="1920x1920" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+            <Limit name="bitrate" range="1-240000000" />
+            <Feature name="bitrate-modes" value="VBR,CBR" />
+        </MediaCodec>
     </Encoders>
 </Included>
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index 6fb9232..20c97dc 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -255,6 +255,15 @@
             <Feature name="adaptive-playback" />
             <Attribute name="software-codec" />
         </MediaCodec>
+        <MediaCodec name="c2.android.apv.decoder" type="video/apv" enabled="false" minsdk="36" variant="!slow-cpu">
+            <Limit name="size" min="16x16" max="1920x1920"/>
+            <Limit name="alignment" value="2x2"/>
+            <Limit name="bitrate" range="1-240000000"/>
+            <Limit name="block-size" value="16x16" />
+            <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+            <Feature name="adaptive-playback" />
+            <Attribute name="software-codec"/>
+        </MediaCodec>
     </Decoders>
     <Encoders>
         <MediaCodec name="c2.android.aac.encoder" type="audio/mp4a-latm">
@@ -332,7 +341,7 @@
                 <!-- profiles and levels:  ProfileBaseline : Level3 -->
                 <Limit name="block-count" range="1-1620" />
                 <Limit name="blocks-per-second" range="1-40500" />
-                <Limit name="bitrate" range="1-2000000" />
+                <Limit name="bitrate" range="1-10000000" />
             </Variant>
             <Feature name="intra-refresh" />
             <!-- Video Quality control -->
@@ -409,5 +418,14 @@
             <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
+        <MediaCodec name="c2.android.apv.encoder" type="video/apv" enabled="false" minsdk="36" variant="!slow-cpu">
+            <Limit name="size" min="2x2" max="1920x1920" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+            <Limit name="bitrate" range="1-240000000" />
+            <Feature name="bitrate-modes" value="VBR,CBR" />
+            <Attribute name="software-codec" />
+        </MediaCodec>
     </Encoders>
 </MediaCodecs>
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
index e417324..94c201f 100644
--- a/media/libstagefright/include/FrameDecoder.h
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -18,12 +18,15 @@
 #define FRAME_DECODER_H_
 
 #include <memory>
+#include <mutex>
+#include <queue>
 #include <vector>
 
-#include <media/stagefright/foundation/AString.h>
-#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/openmax/OMX_Video.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/AString.h>
 #include <ui/GraphicTypes.h>
 
 namespace android {
@@ -34,11 +37,23 @@
 class MediaCodecBuffer;
 class Surface;
 class VideoFrame;
+struct AsyncCodecHandler;
 
 struct FrameRect {
     int32_t left, top, right, bottom;
 };
 
+struct InputBufferIndexQueue {
+public:
+    void enqueue(int32_t index);
+    bool dequeue(int32_t* index, int32_t timeOutUs);
+
+private:
+    std::queue<int32_t> mQueue;
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+};
+
 struct FrameDecoder : public RefBase {
     FrameDecoder(
             const AString &componentName,
@@ -53,7 +68,19 @@
             const sp<MetaData> &trackMeta, int colorFormat,
             bool thumbnail = false, uint32_t bitDepth = 0);
 
+    status_t handleInputBufferAsync(int32_t index);
+    status_t handleOutputBufferAsync(int32_t index, int64_t timeUs);
+    status_t handleOutputFormatChangeAsync(sp<AMessage> format);
+
+    enum {
+        kWhatCallbackNotify,
+    };
+
 protected:
+    AString mComponentName;
+    sp<AMessage> mOutputFormat;
+    bool mUseBlockModel;
+
     virtual ~FrameDecoder();
 
     virtual sp<AMessage> onGetFormatAndSeekOptions(
@@ -64,14 +91,12 @@
 
     virtual status_t onExtractRect(FrameRect *rect) = 0;
 
-    virtual status_t onInputReceived(
-            const sp<MediaCodecBuffer> &codecBuffer,
-            MetaDataBase &sampleMeta,
-            bool firstSample,
-            uint32_t *flags) = 0;
+    virtual status_t onInputReceived(uint8_t* data, size_t size, MetaDataBase& sampleMeta,
+                                     bool firstSample, uint32_t* flags) = 0;
 
     virtual status_t onOutputReceived(
-            const sp<MediaCodecBuffer> &videoFrameBuffer,
+            uint8_t* data,
+            sp<ABuffer> imgObj,
             const sp<AMessage> &outputFormat,
             int64_t timeUs,
             bool *done) = 0;
@@ -83,7 +108,6 @@
     void setFrame(const sp<IMemory> &frameMem) { mFrameMemory = frameMem; }
 
 private:
-    AString mComponentName;
     sp<MetaData> mTrackMeta;
     sp<IMediaSource> mSource;
     OMX_COLOR_FORMATTYPE mDstFormat;
@@ -92,17 +116,32 @@
     sp<IMemory> mFrameMemory;
     MediaSource::ReadOptions mReadOptions;
     sp<MediaCodec> mDecoder;
-    sp<AMessage> mOutputFormat;
+    sp<AsyncCodecHandler> mHandler;
+    sp<ALooper> mAsyncLooper;
     bool mHaveMoreInputs;
     bool mFirstSample;
+    bool mHandleOutputBufferAsyncDone;
     sp<Surface> mSurface;
+    std::mutex mMutex;
+    std::condition_variable mOutputFramePending;
+    InputBufferIndexQueue mInputBufferIndexQueue;
 
     status_t extractInternal();
+    status_t extractInternalUsingBlockModel();
 
     DISALLOW_EVIL_CONSTRUCTORS(FrameDecoder);
 };
 struct FrameCaptureLayer;
 
+struct AsyncCodecHandler : public AHandler {
+public:
+    explicit AsyncCodecHandler(const wp<FrameDecoder>& frameDecoder);
+    virtual void onMessageReceived(const sp<AMessage>& msg);
+
+private:
+    wp<FrameDecoder> mFrameDecoder;
+};
+
 struct VideoFrameDecoder : public FrameDecoder {
     VideoFrameDecoder(
             const AString &componentName,
@@ -121,14 +160,12 @@
         return (rect == NULL) ? OK : ERROR_UNSUPPORTED;
     }
 
-    virtual status_t onInputReceived(
-            const sp<MediaCodecBuffer> &codecBuffer,
-            MetaDataBase &sampleMeta,
-            bool firstSample,
-            uint32_t *flags) override;
+    virtual status_t onInputReceived(uint8_t* data, size_t size, MetaDataBase& sampleMeta,
+                                     bool firstSample, uint32_t* flags) override;
 
     virtual status_t onOutputReceived(
-            const sp<MediaCodecBuffer> &videoFrameBuffer,
+            uint8_t* data,
+            sp<ABuffer> imgObj,
             const sp<AMessage> &outputFormat,
             int64_t timeUs,
             bool *done) override;
@@ -162,14 +199,13 @@
 
     virtual status_t onExtractRect(FrameRect *rect) override;
 
-    virtual status_t onInputReceived(
-            const sp<MediaCodecBuffer> &codecBuffer __unused,
-            MetaDataBase &sampleMeta __unused,
-            bool firstSample __unused,
-            uint32_t *flags __unused) override { return OK; }
+    virtual status_t onInputReceived(uint8_t* __unused, size_t __unused,
+                                     MetaDataBase& sampleMeta __unused, bool firstSample __unused,
+                                     uint32_t* flags __unused) override { return OK; }
 
     virtual status_t onOutputReceived(
-            const sp<MediaCodecBuffer> &videoFrameBuffer,
+            uint8_t* data,
+            sp<ABuffer> imgObj,
             const sp<AMessage> &outputFormat,
             int64_t timeUs,
             bool *done) override;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 7169b1e..df1ebd7 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -123,6 +123,18 @@
         CB_RESOURCE_RECLAIMED = 5,
         CB_CRYPTO_ERROR = 6,
         CB_LARGE_FRAME_OUTPUT_AVAILABLE = 7,
+
+        /** Callback ID for when the metrics for this codec have been flushed
+         * due to the start of a new subsession. The associated AMessage will
+         * contain an sp<WrapperObject<std::unique_ptr<mediametrics::Item>>>
+         * Object at the "metrics" key.
+         */
+        CB_METRICS_FLUSHED = 8,
+
+        /** Callback ID to notify the change in resource requirement
+         * for the codec component.
+         */
+        CB_REQUIRED_RESOURCES_CHANGED = 9,
     };
 
     static const pid_t kNoPid = -1;
@@ -142,6 +154,73 @@
 
     static sp<PersistentSurface> CreatePersistentInputSurface();
 
+    /**
+     * Abstraction for the Global Codec resources.
+     * This encapsulates all the available codec resources on the device.
+     */
+    struct GlobalResourceInfo {
+        /**
+         * Name of the Resource type.
+         */
+        std::string mName;
+        /**
+         * Total count/capacity of resources of this type.
+         */
+        int mCapacity;
+        /**
+         * Available count of this resource type.
+         */
+        int mAvailable;
+
+        GlobalResourceInfo(const std::string& name, int capacity, int available) :
+                mName(name),
+                mCapacity(capacity),
+                mAvailable(available) {}
+
+        GlobalResourceInfo(const GlobalResourceInfo& info) :
+                mName(info.mName),
+                mCapacity(info.mCapacity),
+                mAvailable(info.mAvailable) {}
+    };
+
+    /**
+     * Abstraction for the resources associated with a codec instance.
+     * This encapsulates the required codec resources for a configured codec instance.
+     */
+    struct InstanceResourceInfo {
+        /**
+         * Name of the Resource type.
+         */
+        std::string mName;
+        /**
+         * Required resource count of this type.
+         */
+        int mStaticCount;
+        /**
+         * Per frame resource requirement of this resource type.
+         */
+        int mPerFrameCount;
+
+        InstanceResourceInfo(const std::string& name, int staticCount, int perFrameCount) :
+                mName(name),
+                mStaticCount(staticCount),
+                mPerFrameCount(perFrameCount) {}
+
+        InstanceResourceInfo(const InstanceResourceInfo& info) :
+                mName(info.mName),
+                mStaticCount(info.mStaticCount),
+                mPerFrameCount(info.mPerFrameCount) {}
+    };
+
+    /**
+     * Get a list of Globally available device codec resources.
+     *
+     * It will return INVALID_OPERATION if:
+     *  - HAL does not implement codec availability API
+     *  - codec_availability feature flag isn't defined.
+     */
+    static status_t getGloballyAvailableResources(std::vector<GlobalResourceInfo>& resources);
+
     status_t configure(
             const sp<AMessage> &format,
             const sp<Surface> &nativeWindow,
@@ -155,6 +234,19 @@
             const sp<IDescrambler> &descrambler,
             uint32_t flags);
 
+    /**
+     * Get a list of required codec resources.
+     *
+     * This may only be called after configuring the codec.
+     *
+     * Calling this while the codec wasn't configured, will result in
+     * returning INVALID_OPERATION error code.
+     * It will also return INVALID_OPERATION if:
+     *  - HAL does not implement codec availability API
+     *  - codec_availability feature flag isn't defined.
+     */
+    status_t getRequiredResources(std::vector<InstanceResourceInfo>& resources);
+
     status_t releaseCrypto();
 
     status_t setCallback(const sp<AMessage> &callback);
@@ -484,12 +576,21 @@
 
     Mutex mMetricsLock;
     mediametrics_handle_t mMetricsHandle = 0;
+    mediametrics_handle_t mLastMetricsHandle = 0; // only accessed from the looper or destructor
     bool mMetricsToUpload = false;
     nsecs_t mLifetimeStartNs = 0;
     void initMediametrics();
     void updateMediametrics();
     void flushMediametrics();
     void resetMetricsFields();
+
+    // Reset the metrics fields for a new subsession.
+    void resetSubsessionMetricsFields();
+
+    // Start a new subsession (for metrics). This includes flushing the current
+    // metrics, notifying the client and resetting the session fields.
+    void handleStartingANewSubsession();
+
     void updateEphemeralMediametrics(mediametrics_handle_t item);
     void updateLowLatency(const sp<AMessage> &msg);
     void updateCodecImportance(const sp<AMessage>& msg);
@@ -551,6 +652,7 @@
         int32_t setOutputSurfaceCount;
         int32_t resolutionChangeCount;
     } mReliabilityContextMetrics;
+    int32_t mSubsessionCount;
 
     // initial create parameters
     AString mInitName;
@@ -671,6 +773,7 @@
     void onCryptoError(const sp<AMessage> &msg);
     void onError(status_t err, int32_t actionCode, const char *detail = NULL);
     void onOutputFormatChanged();
+    void onRequiredResourcesChanged(const std::vector<InstanceResourceInfo>& resourceInfo);
 
     status_t onSetParameters(const sp<AMessage> &params);
 
@@ -770,6 +873,8 @@
     friend class MediaTestHelper;
 
     CodecErrorLog mErrorLog;
+    // Required resource info for this codec.
+    std::vector<InstanceResourceInfo> mRequiredResourceInfo;
 
     DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);
 };
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 4e4aa75..b0f671d 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -562,6 +562,163 @@
     }
 }
 
+inline constexpr int32_t DTS_HDProfileHRA = 0x1;
+inline constexpr int32_t DTS_HDProfileLBR = 0x2;
+inline constexpr int32_t DTS_HDProfileMA = 0x4;
+
+inline static const char *asString_Dts_HDProfile(int32_t i, const char *def = "??") {
+    switch (i) {
+        case DTS_HDProfileHRA:  return "HRA";
+        case DTS_HDProfileLBR:  return "LBR";
+        case DTS_HDProfileMA:   return "MA";
+        default:                return def;
+    }
+}
+
+inline constexpr int32_t DTS_UHDProfileP1 = 0x1;
+inline constexpr int32_t DTS_UHDProfileP2 = 0x2;
+
+inline static const char *asString_Dts_UHDProfile(int32_t i, const char *def = "??") {
+    switch (i) {
+        case DTS_UHDProfileP1:  return "P1";
+        case DTS_UHDProfileP2:  return "P2";
+        default:                return def;
+    }
+}
+
+// APV ProfileLevel
+inline constexpr int32_t APVProfile422_10           = 0x01;
+inline constexpr int32_t APVProfile422_10HDR10      = 0x1000;
+inline constexpr int32_t APVProfile422_10HDR10Plus  = 0x2000;
+
+inline static const char *asString_APVProfile(int32_t i, const char *def = "??") {
+    switch (i) {
+        case APVProfile422_10:           return "APVProfile422_10";
+        case APVProfile422_10HDR10:      return "APVProfile422_10HDR10";
+        case APVProfile422_10HDR10Plus:  return "APVProfile422_10HDR10Plus";
+        default:                        return def;
+    }
+}
+
+inline constexpr int32_t APVLevel1Band0 = 0x101;
+inline constexpr int32_t APVLevel1Band1 = 0x102;
+inline constexpr int32_t APVLevel1Band2 = 0x104;
+inline constexpr int32_t APVLevel1Band3 = 0x108;
+inline constexpr int32_t APVLevel11Band0 = 0x201;
+inline constexpr int32_t APVLevel11Band1 = 0x202;
+inline constexpr int32_t APVLevel11Band2 = 0x204;
+inline constexpr int32_t APVLevel11Band3 = 0x208;
+inline constexpr int32_t APVLevel2Band0 = 0x401;
+inline constexpr int32_t APVLevel2Band1 = 0x402;
+inline constexpr int32_t APVLevel2Band2 = 0x404;
+inline constexpr int32_t APVLevel2Band3 = 0x408;
+inline constexpr int32_t APVLevel21Band0 = 0x801;
+inline constexpr int32_t APVLevel21Band1 = 0x802;
+inline constexpr int32_t APVLevel21Band2 = 0x804;
+inline constexpr int32_t APVLevel21Band3 = 0x808;
+inline constexpr int32_t APVLevel3Band0 = 0x1001;
+inline constexpr int32_t APVLevel3Band1 = 0x1002;
+inline constexpr int32_t APVLevel3Band2 = 0x1004;
+inline constexpr int32_t APVLevel3Band3 = 0x1008;
+inline constexpr int32_t APVLevel31Band0 = 0x2001;
+inline constexpr int32_t APVLevel31Band1 = 0x2002;
+inline constexpr int32_t APVLevel31Band2 = 0x2004;
+inline constexpr int32_t APVLevel31Band3 = 0x2008;
+inline constexpr int32_t APVLevel4Band0 = 0x4001;
+inline constexpr int32_t APVLevel4Band1 = 0x4002;
+inline constexpr int32_t APVLevel4Band2 = 0x4004;
+inline constexpr int32_t APVLevel4Band3 = 0x4008;
+inline constexpr int32_t APVLevel41Band0 = 0x8001;
+inline constexpr int32_t APVLevel41Band1 = 0x8002;
+inline constexpr int32_t APVLevel41Band2 = 0x8004;
+inline constexpr int32_t APVLevel41Band3 = 0x8008;
+inline constexpr int32_t APVLevel5Band0 = 0x10001;
+inline constexpr int32_t APVLevel5Band1 = 0x10002;
+inline constexpr int32_t APVLevel5Band2 = 0x10004;
+inline constexpr int32_t APVLevel5Band3 = 0x10008;
+inline constexpr int32_t APVLevel51Band0 = 0x20001;
+inline constexpr int32_t APVLevel51Band1 = 0x20002;
+inline constexpr int32_t APVLevel51Band2 = 0x20004;
+inline constexpr int32_t APVLevel51Band3 = 0x20008;
+inline constexpr int32_t APVLevel6Band0 = 0x40001;
+inline constexpr int32_t APVLevel6Band1 = 0x40002;
+inline constexpr int32_t APVLevel6Band2 = 0x40004;
+inline constexpr int32_t APVLevel6Band3 = 0x40008;
+inline constexpr int32_t APVLevel61Band0 = 0x80001;
+inline constexpr int32_t APVLevel61Band1 = 0x80002;
+inline constexpr int32_t APVLevel61Band2 = 0x80004;
+inline constexpr int32_t APVLevel61Band3 = 0x80008;
+inline constexpr int32_t APVLevel7Band0 = 0x100001;
+inline constexpr int32_t APVLevel7Band1 = 0x100002;
+inline constexpr int32_t APVLevel7Band2 = 0x100004;
+inline constexpr int32_t APVLevel7Band3 = 0x100008;
+inline constexpr int32_t APVLevel71Band0 = 0x200001;
+inline constexpr int32_t APVLevel71Band1 = 0x200002;
+inline constexpr int32_t APVLevel71Band2 = 0x200004;
+inline constexpr int32_t APVLevel71Band3 = 0x200008;
+
+inline static const char *asString_APVBandLevel(int32_t i, const char *def = "??") {
+    switch (i) {
+        case APVLevel1Band0:     return "Level 1, Band 0";
+        case APVLevel1Band1:     return "Level 1, Band 1";
+        case APVLevel1Band2:     return "Level 1, Band 2";
+        case APVLevel1Band3:     return "Level 1, Band 3";
+        case APVLevel11Band0:     return "Level 1.1, Band 0";
+        case APVLevel11Band1:     return "Level 1.1, Band 1";
+        case APVLevel11Band2:     return "Level 1.1, Band 2";
+        case APVLevel11Band3:     return "Level 1.1, Band 3";
+        case APVLevel2Band0:     return "Level 2, Band 0";
+        case APVLevel2Band1:     return "Level 2, Band 1";
+        case APVLevel2Band2:     return "Level 2, Band 2";
+        case APVLevel2Band3:     return "Level 2, Band 3";
+        case APVLevel21Band0:     return "Level 2.1, Band 0";
+        case APVLevel21Band1:     return "Level 2.1, Band 1";
+        case APVLevel21Band2:     return "Level 2.1, Band 2";
+        case APVLevel21Band3:     return "Level 2.1, Band 3";
+        case APVLevel3Band0:     return "Level 3, Band 0";
+        case APVLevel3Band1:     return "Level 3, Band 1";
+        case APVLevel3Band2:     return "Level 3, Band 2";
+        case APVLevel3Band3:     return "Level 3, Band 3";
+        case APVLevel31Band0:     return "Level 3.1, Band 0";
+        case APVLevel31Band1:     return "Level 3.1, Band 1";
+        case APVLevel31Band2:     return "Level 3.1, Band 2";
+        case APVLevel31Band3:     return "Level 3.1, Band 3";
+        case APVLevel4Band0:     return "Level 4, Band 0";
+        case APVLevel4Band1:     return "Level 4, Band 1";
+        case APVLevel4Band2:     return "Level 4, Band 2";
+        case APVLevel4Band3:     return "Level 4, Band 3";
+        case APVLevel41Band0:     return "Level 4.1, Band 0";
+        case APVLevel41Band1:     return "Level 4.1, Band 1";
+        case APVLevel41Band2:     return "Level 4.1, Band 2";
+        case APVLevel41Band3:     return "Level 4.1, Band 3";
+        case APVLevel5Band0:     return "Level 5, Band 0";
+        case APVLevel5Band1:     return "Level 5, Band 1";
+        case APVLevel5Band2:     return "Level 5, Band 2";
+        case APVLevel5Band3:     return "Level 5, Band 3";
+        case APVLevel51Band0:     return "Level 5.1, Band 0";
+        case APVLevel51Band1:     return "Level 5.1, Band 1";
+        case APVLevel51Band2:     return "Level 5.1, Band 2";
+        case APVLevel51Band3:     return "Level 5.1, Band 3";
+        case APVLevel6Band0:     return "Level 6, Band 0";
+        case APVLevel6Band1:     return "Level 6, Band 1";
+        case APVLevel6Band2:     return "Level 6, Band 2";
+        case APVLevel6Band3:     return "Level 6, Band 3";
+        case APVLevel61Band0:     return "Level 6.1, Band 0";
+        case APVLevel61Band1:     return "Level 6.1, Band 1";
+        case APVLevel61Band2:     return "Level 6.1, Band 2";
+        case APVLevel61Band3:     return "Level 6.1, Band 3";
+        case APVLevel7Band0:     return "Level 7, Band 0";
+        case APVLevel7Band1:     return "Level 7, Band 1";
+        case APVLevel7Band2:     return "Level 7, Band 2";
+        case APVLevel7Band3:     return "Level 7, Band 3";
+        case APVLevel71Band0:     return "Level 7.1, Band 0";
+        case APVLevel71Band1:     return "Level 7.1, Band 1";
+        case APVLevel71Band2:     return "Level 7.1, Band 2";
+        case APVLevel71Band3:     return "Level 7.1, Band 3";
+        default:                return def;
+    }
+}
+
 inline constexpr int32_t BITRATE_MODE_CBR = 2;
 inline constexpr int32_t BITRATE_MODE_CBR_FD = 3;
 inline constexpr int32_t BITRATE_MODE_CQ = 0;
@@ -630,6 +787,7 @@
 inline constexpr int32_t COLOR_FormatYUV444Flexible          = 0x7F444888;
 inline constexpr int32_t COLOR_FormatYUV444Interleaved       = 29;
 inline constexpr int32_t COLOR_FormatYUVP010                 = 54;
+inline constexpr int32_t COLOR_FormatYUVP210                 = 60;
 inline constexpr int32_t COLOR_QCOM_FormatYUV420SemiPlanar   = 0x7fa30c00;
 inline constexpr int32_t COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100;
 
@@ -688,6 +846,7 @@
         case COLOR_FormatYUV444Flexible:            return "YUV444Flexible";
         case COLOR_FormatYUV444Interleaved:         return "YUV444Interleaved";
         case COLOR_FormatYUVP010:                   return "YUVP010";
+        case COLOR_FormatYUVP210:                   return "YUVP210";
         case COLOR_QCOM_FormatYUV420SemiPlanar:     return "QCOM_YUV420SemiPlanar";
         case COLOR_TI_FormatYUV420PackedSemiPlanar: return "TI_YUV420PackedSemiPlanar";
         default:                                    return def;
@@ -695,18 +854,24 @@
 }
 
 inline constexpr char FEATURE_AdaptivePlayback[]       = "adaptive-playback";
+inline constexpr char FEATURE_DynamicTimestamp[]       = "dynamic-timestamp";
 inline constexpr char FEATURE_EncodingStatistics[]     = "encoding-statistics";
+inline constexpr char FEATURE_FrameParsing[]           = "frame-parsing";
+inline constexpr char FEATURE_HdrEditing[]             = "hdr-editing";
 inline constexpr char FEATURE_IntraRefresh[] = "intra-refresh";
+inline constexpr char FEATURE_LowLatency[]             = "low-latency";
 inline constexpr char FEATURE_MultipleFrames[] = "multiple-frames";
 inline constexpr char FEATURE_PartialFrame[] = "partial-frame";
 inline constexpr char FEATURE_QpBounds[] = "qp-bounds";
 inline constexpr char FEATURE_SecurePlayback[]         = "secure-playback";
+inline constexpr char FEATURE_SpecialCodec[]           = "special-codec";
 inline constexpr char FEATURE_TunneledPlayback[]       = "tunneled-playback";
 
 // from MediaFormat.java
 inline constexpr char MIMETYPE_VIDEO_VP8[] = "video/x-vnd.on2.vp8";
 inline constexpr char MIMETYPE_VIDEO_VP9[] = "video/x-vnd.on2.vp9";
 inline constexpr char MIMETYPE_VIDEO_AV1[] = "video/av01";
+inline constexpr char MIMETYPE_VIDEO_APV[] = "video/apv";
 inline constexpr char MIMETYPE_VIDEO_AVC[] = "video/avc";
 inline constexpr char MIMETYPE_VIDEO_HEVC[] = "video/hevc";
 inline constexpr char MIMETYPE_VIDEO_MPEG4[] = "video/mp4v-es";
@@ -729,8 +894,13 @@
 inline constexpr char MIMETYPE_AUDIO_FLAC[] = "audio/flac";
 inline constexpr char MIMETYPE_AUDIO_MSGSM[] = "audio/gsm";
 inline constexpr char MIMETYPE_AUDIO_AC3[] = "audio/ac3";
+inline constexpr char MIMETYPE_AUDIO_AC4[] = "audio/ac4";
 inline constexpr char MIMETYPE_AUDIO_EAC3[] = "audio/eac3";
+inline constexpr char MIMETYPE_AUDIO_EAC3_JOC[] = "audio/eac3-joc";
 inline constexpr char MIMETYPE_AUDIO_SCRAMBLED[] = "audio/scrambled";
+inline constexpr char MIMETYPE_AUDIO_DTS[] = "audio/vnd.dts";
+inline constexpr char MIMETYPE_AUDIO_DTS_HD[] = "audio/vnd.dts.hd";
+inline constexpr char MIMETYPE_AUDIO_DTS_UHD[] = "audio/vnd.dts.uhd";
 
 inline constexpr char MIMETYPE_IMAGE_ANDROID_HEIC[] = "image/vnd.android.heic";
 
@@ -818,6 +988,7 @@
 inline constexpr char KEY_MAX_PTS_GAP_TO_ENCODER[] = "max-pts-gap-to-encoder";
 inline constexpr char KEY_MAX_WIDTH[] = "max-width";
 inline constexpr char KEY_MIME[] = "mime";
+inline constexpr char KEY_NUM_SLOTS[] = "num-slots";
 inline constexpr char KEY_OPERATING_RATE[] = "operating-rate";
 inline constexpr char KEY_OUTPUT_REORDER_DEPTH[] = "output-reorder-depth";
 inline constexpr char KEY_PCM_ENCODING[] = "pcm-encoding";
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index a7d2eb9..9dce55b 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -63,6 +63,7 @@
     kKeyDVVC              = 'dvvc',  // raw data
     kKeyDVWC              = 'dvwc',  // raw data
     kKeyAV1C              = 'av1c',  // raw data
+    kKeyAPVC              = 'apvc',  // raw data
     kKeyThumbnailHVCC     = 'thvc',  // raw data
     kKeyThumbnailAV1C     = 'tav1',  // raw data
     kKeyD263              = 'd263',  // raw data
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index 4183023..4ab5d10 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -619,6 +619,13 @@
                 if (!isValidOMXParam(outParams)) {
                     return OMX_ErrorBadParameter;
                 }
+                if (offsetof(DescribeHDR10PlusInfoParams, nValue) + outParams->nParamSize >
+                    outParams->nSize) {
+                    ALOGE("b/329641908: too large param size; nParamSize=%u nSize=%u",
+                          outParams->nParamSize, outParams->nSize);
+                    android_errorWriteLog(0x534e4554, "329641908");
+                    return OMX_ErrorBadParameter;
+                }
 
                 outParams->nParamSizeUsed = info->size();
 
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index ca862b0..151ce7c 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -290,7 +290,7 @@
     // Max file duration limit is set
     if (mMaxFileDurationLimitUs != 0) {
         if (bitRate > 0) {
-            int64_t size2 = ((mMaxFileDurationLimitUs * bitRate * 6) / 1000 / 8000000);
+            int64_t size2 = ((mMaxFileDurationLimitUs / 1000) * bitRate * 6) / 8000000;
             if (mMaxFileSizeLimitBytes != 0 && mIsFileSizeLimitExplicitlyRequested) {
                 // When both file size and duration limits are set,
                 // we use the smaller limit of the two.
diff --git a/media/libstagefright/writer_fuzzers/Android.bp b/media/libstagefright/writer_fuzzers/Android.bp
index 58aa7cd..840c6b3c 100644
--- a/media/libstagefright/writer_fuzzers/Android.bp
+++ b/media/libstagefright/writer_fuzzers/Android.bp
@@ -24,6 +24,7 @@
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
     default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+    default_team: "trendy_team_android_media_solutions_editing",
 }
 
 cc_defaults {
@@ -35,14 +36,17 @@
         "include",
     ],
     static_libs: [
+        "com.android.media.flags.editing-aconfig-cc",
         "liblog",
-        "libstagefright_foundation",
         "libstagefright",
+        "libstagefright_foundation",
     ],
     shared_libs: [
+        "libaconfig_storage_read_api_cc",
         "libbinder",
         "libcutils",
         "libutils",
+        "server_configurable_flags",
     ],
 }
 
@@ -96,9 +100,9 @@
 }
 
 cc_fuzz {
-    name : "mpeg4_writer_fuzzer",
-    defaults : ["writer-fuzzer-defaults"],
-    srcs : [
+    name: "mpeg4_writer_fuzzer",
+    defaults: ["writer-fuzzer-defaults"],
+    srcs: [
         "mpeg4_writer_fuzzer.cpp",
     ],
     static_libs: [
@@ -107,9 +111,9 @@
 }
 
 cc_fuzz {
-    name : "ogg_writer_fuzzer",
-    defaults : ["writer-fuzzer-defaults"],
-    srcs : [
+    name: "ogg_writer_fuzzer",
+    defaults: ["writer-fuzzer-defaults"],
+    srcs: [
         "ogg_writer_fuzzer.cpp",
     ],
     static_libs: [
@@ -118,9 +122,9 @@
 }
 
 cc_fuzz {
-    name : "webm_writer_fuzzer",
-    defaults : ["writer-fuzzer-defaults"],
-    srcs : [
+    name: "webm_writer_fuzzer",
+    defaults: ["writer-fuzzer-defaults"],
+    srcs: [
         "webm_writer_fuzzer.cpp",
     ],
     static_libs: [
diff --git a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
index 8c1ef3b..bd11326 100644
--- a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
+++ b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
@@ -1069,7 +1069,7 @@
         codec.rank = rank;
     }
 
-    codec.variantSet = variants;
+    codec.variantSet.insert(variants.begin(), variants.end());
 
     // we allow sets of domains...
     for (const std::string &domain : domains) {
diff --git a/media/libstagefright/xmlparser/vts/Android.bp b/media/libstagefright/xmlparser/vts/Android.bp
index 1e36c8f..527230c 100644
--- a/media/libstagefright/xmlparser/vts/Android.bp
+++ b/media/libstagefright/xmlparser/vts/Android.bp
@@ -15,6 +15,7 @@
 //
 
 package {
+    default_team: "trendy_team_android_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index 6ea40e3..b5124d0 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -51,9 +51,16 @@
     ],
 }
 
+vintf_fragment {
+    name: "manifest_media_c2_software.xml",
+    src: "manifest_media_c2_software.xml",
+}
+
 mediaserver_cc_binary {
     name: "mediaserver",
 
+    defaults: ["libcodec2_hal_selection"],
+
     srcs: ["main_mediaserver.cpp"],
 
     shared_libs: [
@@ -61,6 +68,7 @@
         "libicu",
         "libfmq",
         "libbinder",
+        "libbinder_ndk",
         "libhidlbase",
         "liblog",
         "libmediaplayerservice",
@@ -85,7 +93,7 @@
         "-Wall",
     ],
 
-    vintf_fragments: ["manifest_media_c2_software.xml"],
+    vintf_fragment_modules: ["manifest_media_c2_software.xml"],
 
     soong_config_variables: {
         TARGET_DYNAMIC_64_32_MEDIASERVER: {
diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp
index 026847a..8a62f30 100644
--- a/media/mediaserver/main_mediaserver.cpp
+++ b/media/mediaserver/main_mediaserver.cpp
@@ -17,11 +17,12 @@
 
 #define LOG_TAG "mediaserver"
 //#define LOG_NDEBUG 0
-
+#include <android/binder_process.h>
 #include <binder/IPCThreadState.h>
 #include <binder/ProcessState.h>
 #include <binder/IServiceManager.h>
 #include <hidl/HidlTransportSupport.h>
+#include <codec2/common/HalSelection.h>
 #include <utils/Log.h>
 #include "RegisterExtensions.h"
 
@@ -30,6 +31,14 @@
 
 using namespace android;
 
+namespace {
+    constexpr int kCodecThreadPoolCount = 16;
+
+    // This is the default thread count for binder thread pool
+    // if the thread count is not configured.
+    constexpr int kDefaultBinderThreadPoolCount = 15;
+}; // anonymous
+
 int main(int argc __unused, char **argv __unused)
 {
     signal(SIGPIPE, SIG_IGN);
@@ -40,8 +49,14 @@
     MediaPlayerService::instantiate();
     ResourceManagerService::instantiate();
     registerExtensions();
-    ::android::hardware::configureRpcThreadpool(16, false);
+
+    bool aidl = ::android::IsCodec2AidlHalSelected();
+    if (!aidl) {
+        ::android::hardware::configureRpcThreadpool(kCodecThreadPoolCount, false);
+    } else {
+        ABinderProcess_setThreadPoolMaxThreadCount(
+                kCodecThreadPoolCount + kDefaultBinderThreadPoolCount);
+    }
     ProcessState::self()->startThreadPool();
     IPCThreadState::self()->joinThreadPool();
-    ::android::hardware::joinRpcThreadpool();
 }
diff --git a/media/module/aidlpersistentsurface/Android.bp b/media/module/aidlpersistentsurface/Android.bp
index 5c1a010..8b273f3 100644
--- a/media/module/aidlpersistentsurface/Android.bp
+++ b/media/module/aidlpersistentsurface/Android.bp
@@ -3,6 +3,9 @@
     unstable: true,
     local_include_dir: "aidl",
     min_sdk_version: "29",
+    defaults: [
+        "android.hardware.graphics.common-latest"
+    ],
     srcs: [
         "aidl/android/media/AidlColorAspects.aidl",
         "aidl/android/media/IAidlGraphicBufferSource.aidl",
@@ -12,9 +15,6 @@
     headers: [
         "HardwareBuffer_aidl",
     ],
-    imports: [
-        "android.hardware.graphics.common-V5",
-    ],
     include_dirs: [
         "frameworks/native/aidl/gui",
     ],
@@ -41,6 +41,9 @@
 cc_library_shared {
     name: "libstagefright_graphicbuffersource_aidl",
     min_sdk_version: "29",
+    defaults: [
+        "android.hardware.graphics.common-ndk_shared",
+    ],
     srcs: [
         "AidlGraphicBufferSource.cpp",
         "wrapper/WAidlGraphicBufferSource.cpp",
@@ -56,7 +59,6 @@
         "media_plugin_headers",
     ],
     shared_libs: [
-        "android.hardware.graphics.common-V5-ndk",
         "graphicbuffersource-aidl-ndk",
         "libbinder_ndk",
         "libcutils",
diff --git a/media/module/bufferpool/2.0/BufferPoolClient.cpp b/media/module/bufferpool/2.0/BufferPoolClient.cpp
index cda23ff..66d11fa 100644
--- a/media/module/bufferpool/2.0/BufferPoolClient.cpp
+++ b/media/module/bufferpool/2.0/BufferPoolClient.cpp
@@ -762,6 +762,10 @@
     } else {
         connection = mRemoteConnection;
     }
+    if (!connection) {
+        ALOGE("connection null: fetchBufferHandle()");
+        return ResultStatus::CRITICAL_ERROR;
+    }
     ResultStatus status;
     Return<void> transResult = connection->fetch(
             transactionId, bufferId,
diff --git a/media/module/codecs/amrnb/common/Android.bp b/media/module/codecs/amrnb/common/Android.bp
index 0bc6ed2..35937cb 100644
--- a/media/module/codecs/amrnb/common/Android.bp
+++ b/media/module/codecs/amrnb/common/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_codec_framework",
     default_applicable_licenses: [
         "frameworks_av_media_codecs_amrnb_common_license",
     ],
@@ -42,8 +43,8 @@
         "src/gains_tbl.cpp",
         "src/gc_pred.cpp",
         "src/gmed_n.cpp",
-        "src/grid_tbl.cpp",
         "src/gray_tbl.cpp",
+        "src/grid_tbl.cpp",
         "src/int_lpc.cpp",
         "src/inv_sqrt.cpp",
         "src/inv_sqrt_tbl.cpp",
@@ -91,9 +92,9 @@
     export_include_dirs: ["include"],
 
     cflags: [
-        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
-        "-DOSCL_IMPORT_REF=",
         "-DOSCL_EXPORT_REF=",
+        "-DOSCL_IMPORT_REF=",
+        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
 
         "-Werror",
     ],
diff --git a/media/module/codecs/amrnb/common/include/abs_s.h b/media/module/codecs/amrnb/common/include/abs_s.h
deleted file mode 100644
index e92eaf4..0000000
--- a/media/module/codecs/amrnb/common/include/abs_s.h
+++ /dev/null
@@ -1,113 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Pathname: ./gsm-amr/c/include/abs_s.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for abs_s function.
-
- Description: Updated template to make it build for Symbian.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the abs_s function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef ABS_S_H
-#define ABS_S_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-    Word16 abs_s(Word16 var1);
-
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif  /* ABS_S_H */
-
-
diff --git a/media/module/codecs/amrnb/common/include/basic_op_c_equivalent.h b/media/module/codecs/amrnb/common/include/basic_op_c_equivalent.h
index 8f0867a..64fdfb9 100644
--- a/media/module/codecs/amrnb/common/include/basic_op_c_equivalent.h
+++ b/media/module/codecs/amrnb/common/include/basic_op_c_equivalent.h
@@ -115,20 +115,15 @@
      Returns:
         L_sum = 32-bit sum of L_var1 and L_var2 (Word32)
     */
-    __attribute__((no_sanitize("integer")))
     static inline Word32 L_add(Word32 L_var1, Word32 L_var2, Flag *pOverflow)
     {
         Word32 L_sum;
 
-        L_sum = L_var1 + L_var2;
-
-        if ((L_var1 ^ L_var2) >= 0)
+        if (__builtin_add_overflow(L_var1, L_var2, &L_sum))
         {
-            if ((L_sum ^ L_var1) < 0)
-            {
-                L_sum = (L_var1 < 0) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
+            // saturating...
+            L_sum = (L_var1 < 0) ? MIN_32 : MAX_32;
+            *pOverflow = 1;
         }
 
         return (L_sum);
@@ -160,15 +155,11 @@
     {
         Word32 L_diff;
 
-        L_diff = L_var1 - L_var2;
-
-        if ((L_var1 ^ L_var2) < 0)
+        if (__builtin_sub_overflow(L_var1, L_var2, &L_diff))
         {
-            if ((L_diff ^ L_var1) & MIN_32)
-            {
-                L_diff = (L_var1 < 0L) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
+            // saturating...
+            L_diff = (L_var1 < 0L) ? MIN_32 : MAX_32;
+            *pOverflow = 1;
         }
 
         return (L_diff);
@@ -204,16 +195,12 @@
         result = (Word32) var1 * var2;
         if (result != (Word32) 0x40000000L)
         {
-            L_sum = (result << 1) + L_var3;
-
             /* Check if L_sum and L_var_3 share the same sign */
-            if ((L_var3 ^ result) > 0)
+            if (__builtin_add_overflow((result << 1), L_var3, &L_sum))
             {
-                if ((L_sum ^ L_var3) < 0)
-                {
-                    L_sum = (L_var3 < 0) ? MIN_32 : MAX_32;
-                    *pOverflow = 1;
-                }
+                // saturating...
+                L_sum = (L_var3 < 0) ? MIN_32 : MAX_32;
+                *pOverflow = 1;
             }
         }
         else
@@ -345,14 +332,10 @@
         product32 = ((Word32) L_var1_hi * L_var2_lo) >> 15;
 
         /* L_product = L_mac (L_product, result, 1, pOverflow); */
-        L_sum = L_product + (product32 << 1);
-
-        if ((L_product ^ product32) > 0)
+        if (__builtin_add_overflow(L_product, (product32 << 1), &L_sum))
         {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-            }
+            // saturating...
+            L_sum = (L_product < 0) ? MIN_32 : MAX_32;
         }
 
         L_product = L_sum;
@@ -361,14 +344,10 @@
         product32 = ((Word32) L_var1_lo * L_var2_hi) >> 15;
 
         /* L_product = L_mac (L_product, result, 1, pOverflow); */
-        L_sum = L_product + (product32 << 1);
-
-        if ((L_product ^ product32) > 0)
+        if (__builtin_add_overflow(L_product, (product32 << 1), &L_sum))
         {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-            }
+            // saturating...
+            L_sum = (L_product < 0) ? MIN_32 : MAX_32;
         }
         return (L_sum);
     }
@@ -416,15 +395,11 @@
 
         result = ((Word32)L_var1_lo * var2) >> 15;
 
-        L_sum  =  L_product + (result << 1);
-
-        if ((L_product ^ result) > 0)
+        if (__builtin_add_overflow(L_product, (result << 1), &L_sum))
         {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
+            // saturating...
+            L_sum = (L_product < 0) ? MIN_32 : MAX_32;
+            *pOverflow = 1;
         }
         return (L_sum);
 
@@ -478,7 +453,8 @@
     {
         Word32 result;
 
-        result = L_var3 + L_var1 * L_var2;
+        __builtin_mul_overflow(L_var1, L_var2, &result);
+        __builtin_add_overflow(L_var3, result, &result);
 
         return result;
     }
@@ -487,7 +463,8 @@
     {
         Word32 result;
 
-        result = L_var3 - L_var1 * L_var2;
+        __builtin_mul_overflow(L_var1, L_var2, &result);
+        __builtin_sub_overflow(L_var3, result, &result);
 
         return result;
     }
diff --git a/media/module/codecs/amrnb/common/include/l_add.h b/media/module/codecs/amrnb/common/include/l_add.h
deleted file mode 100644
index 136b914..0000000
--- a/media/module/codecs/amrnb/common/include/l_add.h
+++ /dev/null
@@ -1,171 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/l_add.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_add function.
-
- Description: Changed function prototype declaration. A pointer to the overflow
-              flag is being passed in as a parameter instead of using global
-              data.
-
- Description: Updated template. Changed paramter name from overflow to
-              pOverflow
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Providing support for ARM and Linux-ARM assembly instructions.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_add function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_ADD_H
-#define L_ADD_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 L_add(register Word32 L_var1, register Word32 L_var2, Flag *pOverflow)
-    {
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-        __asm
-        {
-            QADD result, L_var1, L_var2
-        }
-        return(result);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    __inline Word32 L_add(register Word32 L_var1, register Word32 L_var2, Flag *pOverflow)
-    {
-        register Word32 ra = L_var1;
-        register Word32 rb = L_var2;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("qadd %0, %1, %2"
-             : "=r"(result)
-                             : "r"(ra), "r"(rb)
-                            );
-        return (result);
-
-    }
-
-#else /* C EQUIVALENT */
-
-
-    static inline Word32 L_add(register Word32 L_var1, register Word32 L_var2, Flag *pOverflow)
-    {
-        Word32 L_sum;
-
-        L_sum = L_var1 + L_var2;
-
-        if ((L_var1 ^ L_var2) >= 0)
-        {
-            if ((L_sum ^ L_var1) < 0)
-            {
-                L_sum = (L_var1 < 0) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
-        }
-
-        return (L_sum);
-    }
-
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_ADD_H_ */
diff --git a/media/module/codecs/amrnb/common/include/l_add_c.h b/media/module/codecs/amrnb/common/include/l_add_c.h
deleted file mode 100644
index 3585a3c..0000000
--- a/media/module/codecs/amrnb/common/include/l_add_c.h
+++ /dev/null
@@ -1,115 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/l_add_c.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_add_c function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag and carry flag is passed into the
-              function. Updated template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_add_c function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_ADD_C_H
-#define L_ADD_C_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-    Word32 L_add_c(Word32 L_var1, Word32 L_var2, Flag *pOverflow, Flag *pCarry);
-
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_ADD_C_H_ */
-
-
diff --git a/media/module/codecs/amrnb/common/include/l_mac.h b/media/module/codecs/amrnb/common/include/l_mac.h
deleted file mode 100644
index b4af3aa..0000000
--- a/media/module/codecs/amrnb/common/include/l_mac.h
+++ /dev/null
@@ -1,183 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
- Filename: /audio/gsm_amr/c/include/l_mac.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_mac function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: 1. Updated the function to include ARM and Linux-ARM assembly
-                 instructions.
-              2. Added OSCL_UNUSED_ARG(pOverflow) to remove compiler warnings.
-
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_mac function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_MAC_H
-#define L_MAC_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 L_mac(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 result;
-        Word32 L_sum;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm {SMULBB result, var1, var2}
-        __asm {QDADD L_sum, L_var3, result}
-        return (L_sum);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    static inline Word32 L_mac(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 ra = L_var3;
-        register Word32 rb = var1;
-        register Word32 rc = var2;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(result)
-                             : "r"(rb), "r"(rc)
-                            );
-
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(rc)
-                             : "r"(ra), "r"(result)
-                            );
-
-        return (rc);
-    }
-
-#else /* C_EQUIVALENT */
-
-    __inline Word32 L_mac(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 result;
-        Word32 L_sum;
-        result = (Word32) var1 * var2;
-        if (result != (Word32) 0x40000000L)
-        {
-            L_sum = (result << 1) + L_var3;
-
-            /* Check if L_sum and L_var_3 share the same sign */
-            if ((L_var3 ^ result) > 0)
-            {
-                if ((L_sum ^ L_var3) < 0)
-                {
-                    L_sum = (L_var3 < 0) ? MIN_32 : MAX_32;
-                    *pOverflow = 1;
-                }
-            }
-        }
-        else
-        {
-            *pOverflow = 1;
-            L_sum = MAX_32;
-        }
-        return (L_sum);
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_MAC_H_ */
-
-
diff --git a/media/module/codecs/amrnb/common/include/l_msu.h b/media/module/codecs/amrnb/common/include/l_msu.h
deleted file mode 100644
index 3bafb00..0000000
--- a/media/module/codecs/amrnb/common/include/l_msu.h
+++ /dev/null
@@ -1,171 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
- Filename: /audio/gsm_amr/c/include/l_msu.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_msu function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Providing support for ARM and Linux-ARM assembly instructions.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_msu function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_MSU_H
-#define L_MSU_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-#include    "l_mult.h"
-#include    "l_sub.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 L_msu(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 product;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm
-        {
-            SMULBB product, var1, var2
-            QDSUB  result, L_var3, product
-        }
-
-        return (result);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    __inline Word32 L_msu(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 ra = L_var3;
-        register Word32 rb = var1;
-        register Word32 rc = var2;
-        Word32 product;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(product)
-                             : "r"(rb), "r"(rc)
-                            );
-
-        asm volatile("qdsub %0, %1, %2"
-             : "=r"(result)
-                             : "r"(ra), "r"(product)
-                            );
-
-        return (result);
-    }
-
-#else /* C EQUIVALENT */
-
-    static inline Word32 L_msu(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 result;
-
-        result = L_mult(var1, var2, pOverflow);
-        result = L_sub(L_var3, result, pOverflow);
-
-        return (result);
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_MSU_H_ */
diff --git a/media/module/codecs/amrnb/common/include/l_mult.h b/media/module/codecs/amrnb/common/include/l_mult.h
deleted file mode 100644
index 061df60..0000000
--- a/media/module/codecs/amrnb/common/include/l_mult.h
+++ /dev/null
@@ -1,178 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/l_mult.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_mult function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Providing support for ARM and Linux-ARM assembly instructions.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_mult function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_MULT_H
-#define L_MULT_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 L_mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 result;
-        Word32 product;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm
-        {
-            SMULBB product, var1, var2
-            QADD   result, product, product
-        }
-
-        return (result);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    __inline Word32 L_mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 ra = var1;
-        register Word32 rb = var2;
-        Word32 result;
-        Word32 product;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(product)
-                             : "r"(ra), "r"(rb)
-                            );
-
-        asm volatile("qadd %0, %1, %2"
-             : "=r"(result)
-                             : "r"(product), "r"(product)
-                            );
-
-        return(result);
-    }
-
-#else /* C EQUIVALENT */
-
-    static inline Word32 L_mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 L_product;
-
-        L_product = (Word32) var1 * var2;
-
-        if (L_product != (Word32) 0x40000000L)
-        {
-            L_product <<= 1;          /* Multiply by 2 */
-        }
-        else
-        {
-            *pOverflow = 1;
-            L_product = MAX_32;
-        }
-
-        return (L_product);
-    }
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_MULT_H */
-
diff --git a/media/module/codecs/amrnb/common/include/l_shl.h b/media/module/codecs/amrnb/common/include/l_shl.h
deleted file mode 100644
index 7b9fdb1..0000000
--- a/media/module/codecs/amrnb/common/include/l_shl.h
+++ /dev/null
@@ -1,116 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/l_shl.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_shl function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_shl function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_SHL_H
-#define L_SHL_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-    Word32 L_shl(Word32 L_var1, Word16 var2, Flag *pOverflow);
-
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_SHL_H_ */
-
-
-
diff --git a/media/module/codecs/amrnb/common/include/l_shr.h b/media/module/codecs/amrnb/common/include/l_shr.h
deleted file mode 100644
index ef22073..0000000
--- a/media/module/codecs/amrnb/common/include/l_shr.h
+++ /dev/null
@@ -1,115 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
- Filename: /audio/gsm_amr/c/include/l_shr.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_shr function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_shr function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_SHR_H
-#define L_SHR_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-    Word32 L_shr(Word32 L_var1, Word16 var2, Flag *pOverflow);
-
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_SHR_H_ */
-
-
-
diff --git a/media/module/codecs/amrnb/common/include/l_sub.h b/media/module/codecs/amrnb/common/include/l_sub.h
deleted file mode 100644
index 97d7538..0000000
--- a/media/module/codecs/amrnb/common/include/l_sub.h
+++ /dev/null
@@ -1,173 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/l_sub.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_sub function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Providing support for ARM and Linux-ARM assembly instructions.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_sub function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_SUB_H
-#define L_SUB_H
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 L_sub(Word32 L_var1, Word32 L_var2, Flag *pOverflow)
-    {
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm
-        {
-            QSUB result, L_var1, L_var2
-        }
-
-        return(result);
-
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    __inline Word32 L_sub(Word32 L_var1, Word32 L_var2, Flag *pOverflow)
-    {
-        register Word32 ra = L_var1;
-        register Word32 rb = L_var2;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("qsub %0, %1, %2"
-             : "=r"(result)
-                             : "r"(ra), "r"(rb)
-                            );
-
-        return (result);
-    }
-
-#else /* C EQUIVALENT */
-
-    static inline Word32 L_sub(register Word32 L_var1, register Word32 L_var2,
-                               register Flag *pOverflow)
-    {
-        Word32 L_diff;
-
-        L_diff = L_var1 - L_var2;
-
-        if ((L_var1 ^ L_var2) < 0)
-        {
-            if ((L_diff ^ L_var1) & MIN_32)
-            {
-                L_diff = (L_var1 < 0L) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
-        }
-
-        return (L_diff);
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_SUB_H_ */
-
-
diff --git a/media/module/codecs/amrnb/common/include/mpy_32.h b/media/module/codecs/amrnb/common/include/mpy_32.h
deleted file mode 100644
index 03f36b2..0000000
--- a/media/module/codecs/amrnb/common/include/mpy_32.h
+++ /dev/null
@@ -1,272 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/mpy_32.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Updated the function to include ARM and Linux-ARM assembly
-              instructions.
-
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the Mpy_32 function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef MPY_32_H
-#define MPY_32_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 Mpy_32(Word16 L_var1_hi,
-    Word16 L_var1_lo,
-    Word16 L_var2_hi,
-    Word16 L_var2_lo,
-    Flag   *pOverflow)
-
-    {
-        /*----------------------------------------------------------------------------
-        ; Define all local variables
-        ----------------------------------------------------------------------------*/
-        Word32 L_product;
-        Word32 L_sum;
-        Word32 product32;
-
-        OSCL_UNUSED_ARG(pOverflow);
-        /*----------------------------------------------------------------------------
-        ; Function body here
-        ----------------------------------------------------------------------------*/
-        /* L_product = L_mult (L_var1_hi, L_var2_hi, pOverflow);*/
-
-        __asm {SMULBB L_product, L_var1_hi, L_var2_hi}
-        __asm {QDADD L_product, 0, L_product}
-        __asm {SMULBB product32, L_var1_hi, L_var2_lo}
-        product32 >>= 15;
-        __asm {QDADD L_sum, L_product, product32}
-        L_product = L_sum;
-        __asm {SMULBB product32, L_var1_lo, L_var2_hi}
-        product32 >>= 15;
-        __asm {QDADD L_sum, L_product, product32}
-        return (L_sum);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    static inline Word32 Mpy_32(Word16 L_var1_hi,
-                                Word16 L_var1_lo,
-                                Word16 L_var2_hi,
-                                Word16 L_var2_lo,
-                                Flag   *pOverflow)
-    {
-        register Word32 product32;
-        register Word32 L_sum;
-        register Word32 L_product, result;
-        register Word32 ra = L_var1_hi;
-        register Word32 rb = L_var1_lo;
-        register Word32 rc = L_var2_hi;
-        register Word32 rd = L_var2_lo;
-
-
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(L_product)
-                             : "r"(ra), "r"(rc)
-                            );
-        asm volatile("mov %0, #0"
-             : "=r"(result)
-                    );
-
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(L_sum)
-                             : "r"(result), "r"(L_product)
-                            );
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(product32)
-                             : "r"(ra), "r"(rd)
-                            );
-
-        asm volatile("mov %0, %1, ASR #15"
-             : "=r"(ra)
-                             : "r"(product32)
-                            );
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(L_product)
-                             : "r"(L_sum), "r"(ra)
-                            );
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(product32)
-                             : "r"(rb), "r"(rc)
-                            );
-
-        asm volatile("mov %0, %1, ASR #15"
-             : "=r"(rb)
-                             : "r"(product32)
-                            );
-
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(L_sum)
-                             : "r"(L_product), "r"(rb)
-                            );
-
-        return (L_sum);
-    }
-
-#else /* C_EQUIVALENT */
-
-    __inline Word32 Mpy_32(Word16 L_var1_hi,
-                           Word16 L_var1_lo,
-                           Word16 L_var2_hi,
-                           Word16 L_var2_lo,
-                           Flag   *pOverflow)
-    {
-        Word32 L_product;
-        Word32 L_sum;
-        Word32 product32;
-
-        OSCL_UNUSED_ARG(pOverflow);
-        L_product = (Word32) L_var1_hi * L_var2_hi;
-
-        if (L_product != (Word32) 0x40000000L)
-        {
-            L_product <<= 1;
-        }
-        else
-        {
-            L_product = MAX_32;
-        }
-
-        /* result = mult (L_var1_hi, L_var2_lo, pOverflow); */
-        product32 = ((Word32) L_var1_hi * L_var2_lo) >> 15;
-
-        /* L_product = L_mac (L_product, result, 1, pOverflow); */
-        L_sum = L_product + (product32 << 1);
-
-        if ((L_product ^ product32) > 0)
-        {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-            }
-        }
-
-        L_product = L_sum;
-
-        /* result = mult (L_var1_lo, L_var2_hi, pOverflow); */
-        product32 = ((Word32) L_var1_lo * L_var2_hi) >> 15;
-
-        /* L_product = L_mac (L_product, result, 1, pOverflow); */
-        L_sum = L_product + (product32 << 1);
-
-        if ((L_product ^ product32) > 0)
-        {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-            }
-        }
-
-        /*----------------------------------------------------------------------------
-        ; Return nothing or data or data pointer
-        ----------------------------------------------------------------------------*/
-        return (L_sum);
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _MPY_32_H_ */
diff --git a/media/module/codecs/amrnb/common/include/mpy_32_16.h b/media/module/codecs/amrnb/common/include/mpy_32_16.h
deleted file mode 100644
index 7eaa741..0000000
--- a/media/module/codecs/amrnb/common/include/mpy_32_16.h
+++ /dev/null
@@ -1,206 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
-  Filename: /audio/gsm_amr/c/include/mpy_32_16.h
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the Mpy_32_16 function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef MPY_32_16_H
-#define MPY_32_16_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 Mpy_32_16(Word16 L_var1_hi,
-    Word16 L_var1_lo,
-    Word16 var2,
-    Flag *pOverflow)
-    {
-
-        Word32 L_product;
-        Word32 L_sum;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm {SMULBB L_product, L_var1_hi, var2}
-        __asm {QDADD L_product, 0, L_product}
-        __asm {SMULBB result, L_var1_lo, var2}
-        result >>= 15;
-        __asm {QDADD L_sum, L_product, result}
-        return (L_sum);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    static inline Word32 Mpy_32_16(Word16 L_var1_hi,
-                                   Word16 L_var1_lo,
-                                   Word16 var2,
-                                   Flag *pOverflow)
-    {
-
-        register Word32 ra = L_var1_hi;
-        register Word32 rb = L_var1_lo;
-        register Word32 rc = var2;
-        Word32 result, L_product;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(L_product)
-                             : "r"(ra), "r"(rc)
-                            );
-        asm volatile("mov %0, #0"
-             : "=r"(result)
-                    );
-
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(L_product)
-                             : "r"(result), "r"(L_product)
-                            );
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(result)
-                             : "r"(rb), "r"(rc)
-                            );
-
-        asm volatile("mov %0, %1, ASR #15"
-             : "=r"(ra)
-                             : "r"(result)
-                            );
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(result)
-                             : "r"(L_product), "r"(ra)
-                            );
-
-        return (result);
-    }
-
-#else /* C_EQUIVALENT */
-    __inline Word32 Mpy_32_16(Word16 L_var1_hi,
-                              Word16 L_var1_lo,
-                              Word16 var2,
-                              Flag *pOverflow)
-    {
-
-        Word32 L_product;
-        Word32 L_sum;
-        Word32 result;
-        L_product = (Word32) L_var1_hi * var2;
-
-        if (L_product != (Word32) 0x40000000L)
-        {
-            L_product <<= 1;
-        }
-        else
-        {
-            *pOverflow = 1;
-            L_product = MAX_32;
-        }
-
-        result = ((Word32)L_var1_lo * var2) >> 15;
-
-        L_sum  =  L_product + (result << 1);
-
-        if ((L_product ^ result) > 0)
-        {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
-        }
-        return (L_sum);
-
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _MPY_32_16_H_ */
-
-
diff --git a/media/module/codecs/amrnb/common/include/mult.h b/media/module/codecs/amrnb/common/include/mult.h
deleted file mode 100644
index 6927eba..0000000
--- a/media/module/codecs/amrnb/common/include/mult.h
+++ /dev/null
@@ -1,190 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/mult.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for mult function.
-
- Description: Changed prototype of the mult() function. Instead of using global
-              a pointer to overflow flag is now passed into the function.
-
- Description: Updated copyright information.
-              Updated variable name from "overflow" to "pOverflow" to match
-              with original function declaration.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Providing support for ARM and Linux-ARM assembly instructions.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the mult function.
-
-------------------------------------------------------------------------------
-*/
-
-#ifndef MULT_H
-#define MULT_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5)
-
-    __inline Word16 mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 product;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm
-        {
-            SMULBB product, var1, var2
-            MOV    product, product, ASR #15
-            CMP    product, 0x7FFF
-            MOVGE  product, 0x7FFF
-        }
-
-        return ((Word16) product);
-    }
-
-#elif defined(PV_ARM_GCC_V5)
-
-    __inline Word16 mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 ra = var1;
-        register Word32 rb = var2;
-        Word32 product;
-        Word32 temp = 0x7FFF;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(product)
-                             : "r"(ra), "r"(rb)
-                            );
-        asm volatile("mov %0, %1, ASR #15"
-             : "=r"(product)
-                             : "r"(product)
-                            );
-        asm volatile("cmp %0, %1"
-             : "=r"(product)
-                             : "r"(temp)
-                            );
-        asm volatile("movge %0, %1"
-             : "=r"(product)
-                             : "r"(temp)
-                            );
-
-        return ((Word16) product);
-    }
-
-#else /* C EQUIVALENT */
-
-    static inline Word16 mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 product;
-
-        product = ((Word32) var1 * var2) >> 15;
-
-        /* Saturate result (if necessary). */
-        /* var1 * var2 >0x00007fff is the only case */
-        /* that saturation occurs. */
-
-        if (product > 0x00007fffL)
-        {
-            *pOverflow = 1;
-            product = (Word32) MAX_16;
-        }
-
-
-        /* Return the product as a 16 bit value by type casting Word32 to Word16 */
-
-        return ((Word16) product);
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif  /* _MULT_H_ */
-
diff --git a/media/module/codecs/amrnb/common/include/n_proc.h b/media/module/codecs/amrnb/common/include/n_proc.h
deleted file mode 100644
index e5738c1..0000000
--- a/media/module/codecs/amrnb/common/include/n_proc.h
+++ /dev/null
@@ -1,31 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/* $Id $ */
-
-void proc_head(char *mes);
diff --git a/media/module/codecs/amrnb/common/src/az_lsp.cpp b/media/module/codecs/amrnb/common/src/az_lsp.cpp
index f3098f5..a19ddbf 100644
--- a/media/module/codecs/amrnb/common/src/az_lsp.cpp
+++ b/media/module/codecs/amrnb/common/src/az_lsp.cpp
@@ -237,9 +237,6 @@
 
 ------------------------------------------------------------------------------
 */
-#ifdef __clang__
-__attribute__((no_sanitize("integer")))
-#endif
 static Word16 Chebps(Word16 x,
                      Word16 f[], /* (n) */
                      Word16 n,
diff --git a/media/module/codecs/amrnb/common/src/l_abs.cpp b/media/module/codecs/amrnb/common/src/l_abs.cpp
index 7e0ae99..b13a40a 100644
--- a/media/module/codecs/amrnb/common/src/l_abs.cpp
+++ b/media/module/codecs/amrnb/common/src/l_abs.cpp
@@ -186,8 +186,12 @@
     ; Function body here
     ----------------------------------------------------------------------------*/
 
-    Word32 y = L_var1 - (L_var1 < 0);
-    y = y ^(y >> 31);
-    return (y);
+    if (L_var1 >= 0) return L_var1;
+    if (L_var1 != 0x80000000) return -L_var1;
+    // abs(0x80000000) can not be represented in Word32.
+    // we choose to return the closest value we can -- 0x7fffffff
+    // This is acceptable because it keeps the result within the valid 32-bit signed integer range,
+    // consistent with other overflow handling in the code. such as amrnb/enc/src/l_negate.cpp.
+    return 0x7FFFFFFF;
 
 }
diff --git a/media/module/codecs/amrnb/common/src/lsp_az.cpp b/media/module/codecs/amrnb/common/src/lsp_az.cpp
index 495359f..bb8a34d 100644
--- a/media/module/codecs/amrnb/common/src/lsp_az.cpp
+++ b/media/module/codecs/amrnb/common/src/lsp_az.cpp
@@ -281,8 +281,8 @@
             t0 += ((Word32)lo * *lsp) >> 15;
 
             *(f) +=  *(f - 2);          /*      *f += f[-2]      */
-            *(f--) -=  t0 << 2;         /*      *f -= t0         */
-
+            __builtin_sub_overflow(*(f), (t0 << 2), f);   /*      *f -= t0         */
+            f--;
         }
 
         *f -= (Word32)(*lsp++) << 10;
diff --git a/media/module/codecs/amrnb/common/src/norm_l.cpp b/media/module/codecs/amrnb/common/src/norm_l.cpp
index d8d1259..b24ebda 100644
--- a/media/module/codecs/amrnb/common/src/norm_l.cpp
+++ b/media/module/codecs/amrnb/common/src/norm_l.cpp
@@ -211,8 +211,7 @@
     if (L_var1)
     {
 
-        Word32 y = L_var1 - (L_var1 < 0);
-        L_var1 = y ^(y >> 31);
+        L_var1 = L_abs(L_var1);
 
 
         while (!(0x40000000L & L_var1))
diff --git a/media/module/codecs/amrnb/common/src/residu.cpp b/media/module/codecs/amrnb/common/src/residu.cpp
index 2ad132f..9b077e2 100644
--- a/media/module/codecs/amrnb/common/src/residu.cpp
+++ b/media/module/codecs/amrnb/common/src/residu.cpp
@@ -227,22 +227,35 @@
         p_input3 = p_input_ptr--;
         p_input4 = p_input_ptr--;
 
+        Word32 tmp;
         for (j = M >> 1; j != 0; j--)
         {
-            s1 += ((Word32) * (p_coef) * *(p_input1++));
-            s2 += ((Word32) * (p_coef) * *(p_input2++));
-            s3 += ((Word32) * (p_coef) * *(p_input3++));
-            s4 += ((Word32) * (p_coef--) * *(p_input4++));
-            s1 += ((Word32) * (p_coef) * *(p_input1++));
-            s2 += ((Word32) * (p_coef) * *(p_input2++));
-            s3 += ((Word32) * (p_coef) * *(p_input3++));
-            s4 += ((Word32) * (p_coef--) * *(p_input4++));
+            __builtin_mul_overflow(*p_coef, *(p_input1++), &tmp);
+            __builtin_add_overflow(s1, tmp, &s1);
+            __builtin_mul_overflow(*p_coef, *(p_input2++), &tmp);
+            __builtin_add_overflow(s2, tmp, &s2);
+            __builtin_mul_overflow(*p_coef, *(p_input3++), &tmp);
+            __builtin_add_overflow(s3, tmp, &s3);
+            __builtin_mul_overflow(*(p_coef--), *(p_input4++), &tmp);
+            __builtin_add_overflow(s4, tmp, &s4);
+            __builtin_mul_overflow(*p_coef, *(p_input1++), &tmp);
+            __builtin_add_overflow(s1, tmp, &s1);
+            __builtin_mul_overflow(*p_coef, *(p_input2++), &tmp);
+            __builtin_add_overflow(s2, tmp, &s2);
+            __builtin_mul_overflow(*p_coef, *(p_input3++), &tmp);
+            __builtin_add_overflow(s3, tmp, &s3);
+            __builtin_mul_overflow(*(p_coef--), *(p_input4++), &tmp);
+            __builtin_add_overflow(s4, tmp, &s4);
         }
 
-        s1 += (((Word32) * (p_coef)) * *(p_input1));
-        s2 += (((Word32) * (p_coef)) * *(p_input2));
-        s3 += (((Word32) * (p_coef)) * *(p_input3));
-        s4 += (((Word32) * (p_coef)) * *(p_input4));
+        __builtin_mul_overflow(*p_coef, *(p_input1), &tmp);
+        __builtin_add_overflow(s1, tmp, &s1);
+        __builtin_mul_overflow(*p_coef, *(p_input2), &tmp);
+        __builtin_add_overflow(s2, tmp, &s2);
+        __builtin_mul_overflow(*p_coef, *(p_input3), &tmp);
+        __builtin_add_overflow(s3, tmp, &s3);
+        __builtin_mul_overflow(*p_coef, *(p_input4), &tmp);
+        __builtin_add_overflow(s4, tmp, &s4);
 
         *(p_residual_ptr--) = (Word16)(s1 >> 12);
         *(p_residual_ptr--) = (Word16)(s2 >> 12);
diff --git a/media/module/codecs/amrnb/common/src/sub.cpp b/media/module/codecs/amrnb/common/src/sub.cpp
index b956912..d936128 100644
--- a/media/module/codecs/amrnb/common/src/sub.cpp
+++ b/media/module/codecs/amrnb/common/src/sub.cpp
@@ -187,9 +187,6 @@
 ; FUNCTION CODE
 ----------------------------------------------------------------------------*/
 
-#ifdef __clang__
-__attribute__((no_sanitize("integer")))
-#endif
 Word16 sub(Word16 var1, Word16 var2, Flag *pOverflow)
 {
 
diff --git a/media/module/codecs/amrnb/common/src/syn_filt.cpp b/media/module/codecs/amrnb/common/src/syn_filt.cpp
index 36c1d84..82770f1 100644
--- a/media/module/codecs/amrnb/common/src/syn_filt.cpp
+++ b/media/module/codecs/amrnb/common/src/syn_filt.cpp
@@ -245,9 +245,6 @@
 
 ------------------------------------------------------------------------------
 */
-#ifdef __clang__
-__attribute__((no_sanitize("integer")))
-#endif
 void Syn_filt(
     Word16 a[],     /* (i)   : a[M+1] prediction coefficients   (M=10)  */
     Word16 x[],     /* (i)   : input signal                             */
diff --git a/media/module/codecs/amrnb/dec/Android.bp b/media/module/codecs/amrnb/dec/Android.bp
index 70741d2..a28500a 100644
--- a/media/module/codecs/amrnb/dec/Android.bp
+++ b/media/module/codecs/amrnb/dec/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_codec_framework",
     default_applicable_licenses: [
         "frameworks_av_media_codecs_amrnb_dec_license",
     ],
@@ -47,12 +48,12 @@
         "src/b_cn_cod.cpp",
         "src/bgnscd.cpp",
         "src/c_g_aver.cpp",
-        "src/d1035pf.cpp",
-        "src/d2_11pf.cpp",
         "src/d2_9pf.cpp",
+        "src/d2_11pf.cpp",
         "src/d3_14pf.cpp",
         "src/d4_17pf.cpp",
         "src/d8_31pf.cpp",
+        "src/d1035pf.cpp",
         "src/d_gain_c.cpp",
         "src/d_gain_p.cpp",
         "src/d_plsf.cpp",
@@ -81,8 +82,8 @@
     export_include_dirs: ["src"],
 
     cflags: [
-        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
         "-DOSCL_IMPORT_REF=",
+        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
 
         "-Werror",
     ],
@@ -94,8 +95,8 @@
     //},
 
     shared_libs: [
-        "libstagefright_amrnb_common",
         "liblog",
+        "libstagefright_amrnb_common",
     ],
 
     target: {
@@ -113,19 +114,22 @@
 
     srcs: ["test/amrnbdec_test.cpp"],
 
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
 
     local_include_dirs: ["src"],
 
     static_libs: [
-        "libstagefright_amrnbdec",
         "libsndfile",
+        "libstagefright_amrnbdec",
     ],
 
     shared_libs: [
-        "libstagefright_amrnb_common",
         "libaudioutils",
         "liblog",
+        "libstagefright_amrnb_common",
     ],
 
     target: {
diff --git a/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp b/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
index af62074..984baf8 100644
--- a/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
+++ b/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
@@ -22,6 +22,7 @@
 
 #include <audio_utils/sndfile.h>
 #include <stdio.h>
+#include <fstream>
 
 #include "gsmamr_dec.h"
 
@@ -40,7 +41,7 @@
 
 static AmrnbDecTestEnvironment *gEnv = nullptr;
 
-class AmrnbDecoderTest : public ::testing::TestWithParam<string> {
+class AmrnbDecoderTest : public ::testing::TestWithParam<std::tuple<string, string>> {
   public:
     AmrnbDecoderTest() : mFpInput(nullptr) {}
 
@@ -54,6 +55,7 @@
     FILE *mFpInput;
     SNDFILE *openOutputFile(SF_INFO *sfInfo);
     int32_t DecodeFrames(void *amrHandle, SNDFILE *outFileHandle, int32_t frameCount = INT32_MAX);
+    bool compareBinaryFiles(const std::string& refFilePath, const std::string& outFilePath);
 };
 
 SNDFILE *AmrnbDecoderTest::openOutputFile(SF_INFO *sfInfo) {
@@ -97,6 +99,42 @@
     return 0;
 }
 
+bool AmrnbDecoderTest::compareBinaryFiles(const std::string &refFilePath,
+                                          const std::string &outFilePath) {
+    std::ifstream refFile(refFilePath, std::ios::binary | std::ios::ate);
+    std::ifstream outFile(outFilePath, std::ios::binary | std::ios::ate);
+    assert(refFile.is_open() && "Error opening reference file " + refFilePath);
+    assert(outFile.is_open() && "Error opening output file " + outFilePath);
+
+    std::streamsize refFileSize = refFile.tellg();
+    std::streamsize outFileSize = outFile.tellg();
+    if (refFileSize != outFileSize) {
+        ALOGE("Error, File size mismatch: Reference file size = %td bytes,"
+              " but output file size = %td bytes.", refFileSize, outFileSize);
+        return false;
+    }
+
+    refFile.seekg(0, std::ios::beg);
+    outFile.seekg(0, std::ios::beg);
+    constexpr std::streamsize kBufferSize = 16 * 1024;
+    char refBuffer[kBufferSize];
+    char outBuffer[kBufferSize];
+
+    while (refFile && outFile) {
+        refFile.read(refBuffer, kBufferSize);
+        outFile.read(outBuffer, kBufferSize);
+
+        std::streamsize refBytesRead = refFile.gcount();
+        std::streamsize outBytesRead = outFile.gcount();
+
+        if (refBytesRead != outBytesRead || memcmp(refBuffer, outBuffer, refBytesRead) != 0) {
+            ALOGE("Error, File content mismatch.");
+            return false;
+        }
+    }
+    return true;
+}
+
 TEST_F(AmrnbDecoderTest, CreateAmrnbDecoderTest) {
     void *amrHandle;
     int32_t status = GSMInitDecode(&amrHandle, (Word8 *)"AMRNBDecoder");
@@ -106,7 +144,7 @@
 }
 
 TEST_P(AmrnbDecoderTest, DecodeTest) {
-    string inputFile = gEnv->getRes() + GetParam();
+    string inputFile = gEnv->getRes() + std::get<0>(GetParam());
     mFpInput = fopen(inputFile.c_str(), "rb");
     ASSERT_NE(mFpInput, nullptr) << "Error opening input file " << inputFile;
 
@@ -126,10 +164,15 @@
     sf_close(outFileHandle);
     GSMDecodeFrameExit(&amrHandle);
     ASSERT_EQ(amrHandle, nullptr) << "Error deleting AMR-NB decoder";
+
+    string refFilePath = gEnv->getRes() + std::get<1>(GetParam());
+    ASSERT_TRUE(compareBinaryFiles(refFilePath, OUTPUT_FILE))
+       << "Error, Binary file comparison failed: Output file " << OUTPUT_FILE
+       << " does not match the reference file " << refFilePath << ".";
 }
 
 TEST_P(AmrnbDecoderTest, ResetDecodeTest) {
-    string inputFile = gEnv->getRes() + GetParam();
+    string inputFile = gEnv->getRes() + std::get<0>(GetParam());
     mFpInput = fopen(inputFile.c_str(), "rb");
     ASSERT_NE(mFpInput, nullptr) << "Error opening input file " << inputFile;
 
@@ -159,8 +202,24 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(AmrnbDecoderTestAll, AmrnbDecoderTest,
-                         ::testing::Values(("bbb_8000hz_1ch_8kbps_amrnb_30sec.amrnb"),
-                                           ("sine_amrnb_1ch_12kbps_8000hz.amrnb")));
+                         ::testing::Values(std::make_tuple(
+                                                   "bbb_8000hz_1ch_8kbps_amrnb_30sec.amrnb",
+                                                   "bbb_8000hz_1ch_8kbps_amrnb_30sec_ref.pcm"),
+                                           std::make_tuple(
+                                                   "sine_amrnb_1ch_12kbps_8000hz.amrnb",
+                                                   "sine_amrnb_1ch_12kbps_8000hz_ref.pcm"),
+                                           std::make_tuple(
+                                                   "trim_8000hz_1ch_12kpbs_amrnb_200ms.amrnb",
+                                                   "trim_8000hz_1ch_12kpbs_amrnb_200ms_ref.pcm"),
+                                           std::make_tuple(
+                                                   "bbb_8kHz_1ch_4.75kbps_amrnb_3sec.amrnb",
+                                                   "bbb_8kHz_1ch_4.75kbps_amrnb_3sec_ref.pcm"),
+                                           std::make_tuple(
+                                                   "bbb_8kHz_1ch_10kbps_amrnb_1sec.amrnb",
+                                                   "bbb_8kHz_1ch_10kbps_amrnb_1sec_ref.pcm"),
+                                           std::make_tuple(
+                                                   "bbb_8kHz_1ch_12.2kbps_amrnb_3sec.amrnb",
+                                                   "bbb_8kHz_1ch_12.2kbps_amrnb_3sec_ref.pcm")));
 
 int main(int argc, char **argv) {
     gEnv = new AmrnbDecTestEnvironment();
diff --git a/media/module/codecs/amrnb/dec/test/AndroidTest.xml b/media/module/codecs/amrnb/dec/test/AndroidTest.xml
index 539fa5c..7b2ba15 100644
--- a/media/module/codecs/amrnb/dec/test/AndroidTest.xml
+++ b/media/module/codecs/amrnb/dec/test/AndroidTest.xml
@@ -23,17 +23,17 @@
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
         <option name="target" value="host" />
         <option name="config-filename" value="AmrnbDecoderTest" />
-        <option name="version" value="1.0"/>
+        <option name="version" value="2.0"/>
     </target_preparer>
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
         <option name="push-all" value="true" />
-        <option name="media-folder-name" value="AmrnbDecoderTest-1.0" />
+        <option name="media-folder-name" value="AmrnbDecoderTest-2.0" />
         <option name="dynamic-config-module" value="AmrnbDecoderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="AmrnbDecoderTest" />
-        <option name="native-test-flag" value="-P /sdcard/test/AmrnbDecoderTest-1.0/" />
+        <option name="native-test-flag" value="-P /sdcard/test/AmrnbDecoderTest-2.0/" />
     </test>
 </configuration>
diff --git a/media/module/codecs/amrnb/dec/test/DynamicConfig.xml b/media/module/codecs/amrnb/dec/test/DynamicConfig.xml
index 701a752..02b869a 100644
--- a/media/module/codecs/amrnb/dec/test/DynamicConfig.xml
+++ b/media/module/codecs/amrnb/dec/test/DynamicConfig.xml
@@ -15,6 +15,6 @@
 
 <dynamicConfig>
     <entry key="media_files_url">
-            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest-1.0.zip</value>
+            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest-2.0.zip</value>
     </entry>
 </dynamicConfig>
diff --git a/media/module/codecs/amrnb/dec/test/README.md b/media/module/codecs/amrnb/dec/test/README.md
index 41fb80a..ea54975 100644
--- a/media/module/codecs/amrnb/dec/test/README.md
+++ b/media/module/codecs/amrnb/dec/test/README.md
@@ -22,15 +22,15 @@
 adb push ${OUT}/data/nativetest/AmrnbDecoderTest/AmrnbDecoderTest /data/local/tmp/
 ```
 
-The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest-1.0.zip). Download, unzip and push these files into device for testing.
+The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest-2.0.zip). Download, unzip and push these files into device for testing.
 
 ```
-adb push AmrnbDecoderTest-1.0 /data/local/tmp/
+adb push AmrnbDecoderTest-2.0 /data/local/tmp/
 ```
 
 usage: AmrnbDecoderTest -P \<path_to_folder\>
 ```
-adb shell /data/local/tmp/AmrnbDecoderTest -P /data/local/tmp/AmrnbDecoderTest-1.0/
+adb shell /data/local/tmp/AmrnbDecoderTest -P /data/local/tmp/AmrnbDecoderTest-2.0/
 ```
 Alternatively, the test can also be run using atest command.
 
diff --git a/media/module/codecs/amrnb/enc/Android.bp b/media/module/codecs/amrnb/enc/Android.bp
index 3c6566e..13bb29c 100644
--- a/media/module/codecs/amrnb/enc/Android.bp
+++ b/media/module/codecs/amrnb/enc/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_codec_framework",
     default_applicable_licenses: [
         "frameworks_av_media_codecs_amrnb_enc_license",
     ],
@@ -42,12 +43,12 @@
     srcs: [
         "src/amrencode.cpp",
         "src/autocorr.cpp",
-        "src/c1035pf.cpp",
-        "src/c2_11pf.cpp",
         "src/c2_9pf.cpp",
+        "src/c2_11pf.cpp",
         "src/c3_14pf.cpp",
         "src/c4_17pf.cpp",
         "src/c8_31pf.cpp",
+        "src/c1035pf.cpp",
         "src/calc_cor.cpp",
         "src/calc_en.cpp",
         "src/cbsearch.cpp",
@@ -132,7 +133,10 @@
 
     srcs: ["test/amrnb_enc_test.cpp"],
 
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
 
     local_include_dirs: ["src"],
 
diff --git a/media/module/codecs/amrnb/enc/fuzzer/Android.bp b/media/module/codecs/amrnb/enc/fuzzer/Android.bp
index bcbcee2..1b2ec87 100644
--- a/media/module/codecs/amrnb/enc/fuzzer/Android.bp
+++ b/media/module/codecs/amrnb/enc/fuzzer/Android.bp
@@ -19,6 +19,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_codecs_amrnb_enc_license"
@@ -39,8 +40,8 @@
 
     static_libs: [
         "liblog",
-        "libstagefright_amrnbenc",
         "libstagefright_amrnb_common",
+        "libstagefright_amrnbenc",
     ],
 
     fuzz_config: {
diff --git a/media/module/codecs/amrnb/enc/src/autocorr.cpp b/media/module/codecs/amrnb/enc/src/autocorr.cpp
index c71811d..a078f5a 100644
--- a/media/module/codecs/amrnb/enc/src/autocorr.cpp
+++ b/media/module/codecs/amrnb/enc/src/autocorr.cpp
@@ -312,6 +312,7 @@
 
     Word16 y[L_WINDOW];
     Word32 sum;
+    Word32 mul;
     Word16 overfl_shft;
 
 
@@ -343,7 +344,8 @@
         temp = (amrnb_fxp_mac_16_by_16bb((Word32) * (p_x++), (Word32) * (p_wind++), 0x04000)) >> 15;
         *(p_y++) = temp;
 
-        sum += ((Word32)temp * temp) << 1;
+        __builtin_mul_overflow(temp, temp, &mul);
+        __builtin_add_overflow(sum, mul << 1, &sum);
         if (sum < 0)
         {
             /*
@@ -395,10 +397,12 @@
         {
             temp = *p_y >> 2;
             *(p_y++) = temp;
-            sum += ((Word32)temp * temp) << 1;
+            __builtin_mul_overflow(temp, temp, &mul);
+            __builtin_add_overflow(sum, mul << 1, &sum);
             temp = *p_y >> 2;
             *(p_y++) = temp;
-            sum += ((Word32)temp * temp) << 1;
+            __builtin_mul_overflow(temp, temp, &mul);
+            __builtin_add_overflow(sum, mul << 1, &sum);
         }
         if (sum > 0)
         {
diff --git a/media/module/codecs/amrnb/enc/src/c2_9pf.cpp b/media/module/codecs/amrnb/enc/src/c2_9pf.cpp
index b211032..56b4fb8 100644
--- a/media/module/codecs/amrnb/enc/src/c2_9pf.cpp
+++ b/media/module/codecs/amrnb/enc/src/c2_9pf.cpp
@@ -610,6 +610,7 @@
         Word32 alp1;
         Word16 i;
         Word32 L_temp;
+        Word32 mul;
         Word16 *p_codvec = &codvec[0];
 
         OSCL_UNUSED_ARG(pOverflow);
@@ -693,7 +694,8 @@
                     L_temp = ((Word32) alp * sq1) << 1;
 
                     /* s = L_msu(L_temp, sq, alp_16, pOverflow); */
-                    s = L_temp - (((Word32) sq * alp_16) << 1);
+                    __builtin_mul_overflow(sq, alp_16, &mul);
+                    __builtin_sub_overflow(L_temp, (mul << 1), &s);
 
                     if (s > 0)
                     {
diff --git a/media/module/codecs/amrnb/enc/src/c3_14pf.cpp b/media/module/codecs/amrnb/enc/src/c3_14pf.cpp
index 58ab2fa..bb4fe36 100644
--- a/media/module/codecs/amrnb/enc/src/c3_14pf.cpp
+++ b/media/module/codecs/amrnb/enc/src/c3_14pf.cpp
@@ -403,6 +403,7 @@
     Word16 *p_codvec = &codvec[0];
 
     Word32 s;
+    Word32 mul;
     Word32 alp0;
     Word32 alp1;
 
@@ -487,7 +488,8 @@
                             s = ((Word32) alp * sq1) << 1;
 
                             /* s = L_msu(s, sq, alp_16, pOverflow); */
-                            s -= (((Word32) sq * alp_16) << 1);
+                            __builtin_mul_overflow(sq, alp_16, &mul);
+                            __builtin_sub_overflow(s, (mul << 1), &s);
 
                             if (s > 0)
                             {
diff --git a/media/module/codecs/amrnb/enc/src/c4_17pf.cpp b/media/module/codecs/amrnb/enc/src/c4_17pf.cpp
index d52b43b..062ee5a 100644
--- a/media/module/codecs/amrnb/enc/src/c4_17pf.cpp
+++ b/media/module/codecs/amrnb/enc/src/c4_17pf.cpp
@@ -416,6 +416,7 @@
         Word16 *p_codvec = &codvec[0];
 
         Word32 s;
+        Word32 mul;
         Word32 alp0;
         Word32 alp1;
 
@@ -497,7 +498,8 @@
                             s = ((Word32) alp * sq1) << 1;
 
                             /* s = L_msu(s, sq, alp_16, pOverflow); */
-                            s -= (((Word32) sq * alp_16) << 1);
+                            __builtin_mul_overflow(sq, alp_16, &mul);
+                            __builtin_sub_overflow(s, (mul << 1), &s);
 
                             if (s > 0)
                             {
@@ -610,7 +612,8 @@
                             s = ((Word32) alp * sq1) << 1;
 
                             /* s = L_msu(s, sq, alp_16, pOverflow); */
-                            s -= (((Word32) sq * alp_16) << 1);
+                            __builtin_mul_overflow(sq, alp_16, &mul);
+                            __builtin_sub_overflow(s, (mul << 1), &s);
 
                             if (s > 0)
                             {
@@ -630,7 +633,8 @@
                         s = ((Word32) alpk * sq) << 1;
 
                         /* s = L_msu(s, psk, alp, pOverflow); */
-                        s -= (((Word32) psk * alp) << 1);
+                        __builtin_mul_overflow(psk, alp, &mul);
+                        __builtin_sub_overflow(s, (mul << 1), &s);
 
                         if (s > 0)
                         {
diff --git a/media/module/codecs/amrnb/enc/src/cor_h_x.cpp b/media/module/codecs/amrnb/enc/src/cor_h_x.cpp
index c25c026..398c71f 100644
--- a/media/module/codecs/amrnb/enc/src/cor_h_x.cpp
+++ b/media/module/codecs/amrnb/enc/src/cor_h_x.cpp
@@ -254,6 +254,7 @@
     Word16 k;
 
     Word32 s;
+    Word32 mul;
     Word32 y32[L_CODE];
     Word32 max;
     Word32 tot;
@@ -275,15 +276,19 @@
 
             for (j = (L_CODE - i - 1) >> 1; j != 0; j--)
             {
-                s += ((Word32) * (p_x++) * *(p_ptr++)) << 1;
-                s += ((Word32) * (p_x++) * *(p_ptr++)) << 1;
+                __builtin_mul_overflow(*(p_x++), *(p_ptr++), &mul);
+                __builtin_add_overflow(s, mul << 1, &s);
+                __builtin_mul_overflow(*(p_x++), *(p_ptr++), &mul);
+                __builtin_add_overflow(s, mul << 1, &s);
             }
 
-            s += ((Word32) * (p_x++) * *(p_ptr++)) << 1;
+            __builtin_mul_overflow(*(p_x++), *(p_ptr++), &mul);
+            __builtin_add_overflow(s, mul << 1, &s);
 
             if (!((L_CODE - i) & 1))    /* if even number of iterations */
             {
-                s += ((Word32) * (p_x++) * *(p_ptr++)) << 1;
+                __builtin_mul_overflow(*(p_x++), *(p_ptr++), &mul);
+                __builtin_add_overflow(s, mul << 1, &s);
             }
 
             y32[i] = s;
@@ -299,7 +304,7 @@
             }
         }
 
-        tot += (max >> 1);
+        __builtin_add_overflow(tot, (max >> 1), &tot);
     }
 
 
@@ -310,10 +315,13 @@
 
     for (i = L_CODE >> 1; i != 0; i--)
     {
+        Word32 result;
         s = L_shl(*(p_y32++), j, pOverflow);
-        *(p_ptr++) = (s + 0x00008000) >> 16;
+        __builtin_add_overflow(s, 0x00008000, &result);
+        *(p_ptr++) = result >> 16;
         s = L_shl(*(p_y32++), j, pOverflow);
-        *(p_ptr++) = (s + 0x00008000) >> 16;
+        __builtin_add_overflow(s, 0x00008000, &result);
+        *(p_ptr++) = result >> 16;
     }
 
     return;
diff --git a/media/module/codecs/amrnb/enc/src/cor_h_x2.cpp b/media/module/codecs/amrnb/enc/src/cor_h_x2.cpp
index e32eb4a..80ebb73 100644
--- a/media/module/codecs/amrnb/enc/src/cor_h_x2.cpp
+++ b/media/module/codecs/amrnb/enc/src/cor_h_x2.cpp
@@ -268,7 +268,7 @@
                 max = s;
             }
         }
-        tot = (tot + (max >> 1));
+        __builtin_add_overflow(tot, (max >> 1), &tot);
     }
 
     j = sub(norm_l(tot), sf, pOverflow);
diff --git a/media/module/codecs/amrnb/enc/src/dtx_enc.cpp b/media/module/codecs/amrnb/enc/src/dtx_enc.cpp
index 2ccb777..0d56c9b 100644
--- a/media/module/codecs/amrnb/enc/src/dtx_enc.cpp
+++ b/media/module/codecs/amrnb/enc/src/dtx_enc.cpp
@@ -945,6 +945,7 @@
 
     Word16 i;
     Word32 L_frame_en;
+    Word32 mul;
     Word32 L_temp;
     Word16 log_en_e;
     Word16 log_en_m;
@@ -967,7 +968,8 @@
 
     for (i = L_FRAME; i != 0; i--)
     {
-        L_frame_en += (((Word32) * p_speech) * *(p_speech)) << 1;
+        __builtin_mul_overflow(*p_speech, *p_speech, &mul);
+        __builtin_add_overflow(L_frame_en, mul << 1, &L_frame_en);
         p_speech++;
         if (L_frame_en < 0)
         {
diff --git a/media/module/codecs/amrnb/enc/src/g_pitch.cpp b/media/module/codecs/amrnb/enc/src/g_pitch.cpp
index 5b80e2a..6f686fa 100644
--- a/media/module/codecs/amrnb/enc/src/g_pitch.cpp
+++ b/media/module/codecs/amrnb/enc/src/g_pitch.cpp
@@ -376,15 +376,11 @@
     {
         L_temp = ((Word32) * (p_xn++) * *(p_y1++));
         s1 = s;
-        s = s1 + L_temp;
 
-        if ((s1 ^ L_temp) > 0)
+        if (__builtin_add_overflow(s1, L_temp, &s))
         {
-            if ((s1 ^ s) < 0)
-            {
-                *pOverflow = 1;
-                break;
-            }
+            *pOverflow = 1;
+            break;
         }
     }
 
diff --git a/media/module/codecs/amrnb/enc/src/levinson.cpp b/media/module/codecs/amrnb/enc/src/levinson.cpp
index 29cdac6..83dd81e 100644
--- a/media/module/codecs/amrnb/enc/src/levinson.cpp
+++ b/media/module/codecs/amrnb/enc/src/levinson.cpp
@@ -731,7 +731,7 @@
         t0 = t0 << 5;
 
         t1 = ((Word32) * (Rh + i) << 16) + ((Word32)(*(Rl + i)) << 1);
-        t0 += t1;
+        __builtin_add_overflow(t0, t1, &t0);
 
         /* K = -t0 / Alpha */
 
diff --git a/media/module/codecs/amrnb/enc/src/pitch_fr.cpp b/media/module/codecs/amrnb/enc/src/pitch_fr.cpp
index 584f79b..ab0a221 100644
--- a/media/module/codecs/amrnb/enc/src/pitch_fr.cpp
+++ b/media/module/codecs/amrnb/enc/src/pitch_fr.cpp
@@ -326,6 +326,7 @@
     Word16 norm_h;
     Word16 norm_l;
     Word32 s;
+    Word32 mul;
     Word32 s2;
     Word16 excf[L_SUBFR];
     Word16 scaling;
@@ -353,10 +354,12 @@
     {
         temp = *(p_excf++);
         *(p_s_excf++) = temp >> 2;
-        s += (Word32) temp * temp;
+        __builtin_mul_overflow(temp, temp, &mul);
+        __builtin_add_overflow(s, mul, &s);
         temp = *(p_excf++);
         *(p_s_excf++) = temp >> 2;
-        s += (Word32) temp * temp;
+        __builtin_mul_overflow(temp, temp, &mul);
+        __builtin_add_overflow(s, mul, &s);
     }
 
 
@@ -387,20 +390,24 @@
 
         while (j--)
         {
-            s  += (Word32) * (p_x++) * *(p_s_excf);
-            s2 += ((Word32)(*(p_s_excf)) * (*(p_s_excf)));
+            __builtin_mul_overflow(*(p_x++), *p_s_excf, &mul);
+            __builtin_add_overflow(s, mul, &s);
+            __builtin_mul_overflow(*p_s_excf, *p_s_excf, &mul);
+            __builtin_add_overflow(s2, mul, &s2);
             p_s_excf++;
-            s  += (Word32) * (p_x++) * *(p_s_excf);
-            s2 += ((Word32)(*(p_s_excf)) * (*(p_s_excf)));
+            __builtin_mul_overflow(*(p_x++), *p_s_excf, &mul);
+            __builtin_add_overflow(s, mul, &s);
+            __builtin_mul_overflow(*p_s_excf, *p_s_excf, &mul);
+            __builtin_add_overflow(s2, mul, &s2);
             p_s_excf++;
         }
 
         s2     = s2 << 1;
         s2     = Inv_sqrt(s2, pOverflow);
         norm_h = (Word16)(s2 >> 16);
-        norm_l = (Word16)((s2 >> 1) - (norm_h << 15));
+        __builtin_sub_overflow((s2 >> 1), (norm_h << 15), &norm_l);
         corr_h = (Word16)(s >> 15);
-        corr_l = (Word16)((s) - (corr_h << 15));
+        __builtin_sub_overflow(s, (corr_h << 15), &corr_l);
 
         /* Normalize correlation = correlation * (1/sqrt(energy)) */
 
diff --git a/media/module/codecs/amrnb/enc/src/pitch_ol.cpp b/media/module/codecs/amrnb/enc/src/pitch_ol.cpp
index c039bb0..0e4b74b 100644
--- a/media/module/codecs/amrnb/enc/src/pitch_ol.cpp
+++ b/media/module/codecs/amrnb/enc/src/pitch_ol.cpp
@@ -959,6 +959,7 @@
     Word16 p_max3;
     Word16 scal_flag = 0;
     Word32 t0;
+    Word32 mul;
 
 #ifdef VAD2
     Word32 r01;
@@ -1002,7 +1003,8 @@
 
     for (i = -pit_max; i < L_frame; i++)
     {
-        t0 += (((Word32) * (p_signal)) * *(p_signal)) << 1;
+        __builtin_mul_overflow(*p_signal, *p_signal, &mul);
+        __builtin_add_overflow(t0, mul << 1, &t0);
         p_signal++;
         if (t0 < 0)
         {
diff --git a/media/module/codecs/amrnb/enc/src/pre_proc.cpp b/media/module/codecs/amrnb/enc/src/pre_proc.cpp
index 042920e..0e2be41 100644
--- a/media/module/codecs/amrnb/enc/src/pre_proc.cpp
+++ b/media/module/codecs/amrnb/enc/src/pre_proc.cpp
@@ -576,7 +576,7 @@
         *(p_signal++) = (Word16)((L_tmp + 0x0000800L) >> 12);
 
         st->y1_hi = (Word16)(L_tmp >> 12);
-        st->y1_lo = (Word16)((L_tmp << 3) - ((Word32)(st->y1_hi) << 15));
+        __builtin_sub_overflow((Word16)(L_tmp << 3), (st->y1_hi) << 15, &st->y1_lo);
 
     }
 
diff --git a/media/module/codecs/amrnb/enc/src/s10_8pf.cpp b/media/module/codecs/amrnb/enc/src/s10_8pf.cpp
index 352b611..97d0318 100644
--- a/media/module/codecs/amrnb/enc/src/s10_8pf.cpp
+++ b/media/module/codecs/amrnb/enc/src/s10_8pf.cpp
@@ -746,11 +746,13 @@
 
             for (i5 = ipos[5]; i5 < L_CODE; i5 += step)
             {
-                ps2 = ps1 + *(p_temp1++);
+                __builtin_add_overflow(ps1, *(p_temp1++), &ps2);
 
-                alp2 = alp1 + ((Word32) * (p_temp2 + i5) << 12);
+                __builtin_add_overflow(alp1, *(p_temp2 + i5) << 12, &alp2);
 
-                alp_16 = (Word16)((alp2 + ((Word32) * (p_temp1++) << 14)) >> 16);
+                Word32 result;
+                __builtin_add_overflow(alp2, *(p_temp1++) << 14, &result);
+                alp_16 = (Word16)(result >> 16);
                 sq2 = (Word16)(((Word32) ps2 * ps2) >> 15);
 
                 if (((Word32) sq2 * alp) > ((Word32) sq * alp_16))
diff --git a/media/module/codecs/amrnb/enc/src/set_sign.cpp b/media/module/codecs/amrnb/enc/src/set_sign.cpp
index fa43f78..55658a4 100644
--- a/media/module/codecs/amrnb/enc/src/set_sign.cpp
+++ b/media/module/codecs/amrnb/enc/src/set_sign.cpp
@@ -505,6 +505,7 @@
     Word16 en[L_CODE];                  /* correlation vector */
     Word32 s;
     Word32 t;
+    Word32 mul;
     Word32 L_temp;
     Word16 *p_cn;
     Word16 *p_dn;
@@ -525,7 +526,8 @@
         val = *(p_cn++);
         s = L_mac(s, val, val, pOverflow);
         val = *(p_dn++);
-        t += ((Word32) val * val) << 1;
+        __builtin_mul_overflow(val, val, &mul);
+        __builtin_add_overflow(t, mul << 1, &t);
     }
     s = Inv_sqrt(s, pOverflow);
     k_cn = (Word16)((L_shl(s, 5, pOverflow)) >> 16);
diff --git a/media/module/codecs/amrnb/enc/src/spstproc.cpp b/media/module/codecs/amrnb/enc/src/spstproc.cpp
index b9574aa..5210a39 100644
--- a/media/module/codecs/amrnb/enc/src/spstproc.cpp
+++ b/media/module/codecs/amrnb/enc/src/spstproc.cpp
@@ -192,6 +192,7 @@
     Word16 i;
     Word16 j;
     Word16 temp;
+    Word32 mul;
     Word32 L_temp;
     Word32 L_temp2;
     Word16 tempShift;
@@ -262,8 +263,10 @@
          */
         L_temp     = ((Word32) * (p_exc++) * pitch_fac) << 1;
         L_temp2    = ((Word32) * (p_exc--) * pitch_fac) << 1;
-        L_temp    += ((Word32) * (p_code++) * gain_code) << 1;
-        L_temp2   += ((Word32) * (p_code++) * gain_code) << 1;
+        __builtin_mul_overflow(*(p_code++), gain_code, &mul);
+        __builtin_add_overflow(L_temp, mul << 1, &L_temp);
+        __builtin_mul_overflow(*(p_code++), gain_code, &mul);
+        __builtin_add_overflow(L_temp2, mul << 1, &L_temp2);
         L_temp   <<=  tempShift;
         L_temp2  <<=  tempShift;
         *(p_exc++) = (Word16)((L_temp  + 0x08000L) >> 16);
diff --git a/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp b/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp
index fb72998..e3bd0e0 100644
--- a/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp
+++ b/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp
@@ -21,6 +21,7 @@
 
 #include <audio_utils/sndfile.h>
 #include <stdio.h>
+#include <fstream>
 
 #include "gsmamr_enc.h"
 
@@ -39,7 +40,7 @@
 
 static AmrnbEncTestEnvironment *gEnv = nullptr;
 
-class AmrnbEncoderTest : public ::testing::TestWithParam<pair<string, int32_t>> {
+class AmrnbEncoderTest : public ::testing::TestWithParam<tuple<string, int32_t, string>> {
   public:
     AmrnbEncoderTest() : mAmrEncHandle(nullptr) {}
 
@@ -53,6 +54,7 @@
     AmrNbEncState *mAmrEncHandle;
     int32_t EncodeFrames(int32_t mode, FILE *fpInput, FILE *mFpOutput,
                          int32_t frameCount = INT32_MAX);
+    bool compareBinaryFiles(const string& refFilePath, const string& outFilePath);
 };
 
 int32_t AmrnbEncoderTest::EncodeFrames(int32_t mode, FILE *fpInput, FILE *mFpOutput,
@@ -87,6 +89,42 @@
     return 0;
 }
 
+bool AmrnbEncoderTest::compareBinaryFiles(const std::string &refFilePath,
+                                          const std::string &outFilePath) {
+    std::ifstream refFile(refFilePath, std::ios::binary | std::ios::ate);
+    std::ifstream outFile(outFilePath, std::ios::binary | std::ios::ate);
+    assert(refFile.is_open() && "Error opening reference file " + refFilePath);
+    assert(outFile.is_open() && "Error opening output file " + outFilePath);
+
+    std::streamsize refFileSize = refFile.tellg();
+    std::streamsize outFileSize = outFile.tellg();
+    if (refFileSize != outFileSize) {
+        ALOGE("Error, File size mismatch: Reference file size = %td bytes,"
+              " but output file size = %td bytes.", refFileSize, outFileSize);
+        return false;
+    }
+
+    refFile.seekg(0, std::ios::beg);
+    outFile.seekg(0, std::ios::beg);
+    constexpr std::streamsize kBufferSize = 16 * 1024;
+    char refBuffer[kBufferSize];
+    char outBuffer[kBufferSize];
+
+    while (refFile && outFile) {
+        refFile.read(refBuffer, kBufferSize);
+        outFile.read(outBuffer, kBufferSize);
+
+        std::streamsize refBytesRead = refFile.gcount();
+        std::streamsize outBytesRead = outFile.gcount();
+
+        if (refBytesRead != outBytesRead || memcmp(refBuffer, outBuffer, refBytesRead) != 0) {
+            ALOGE("Error, File content mismatch.");
+            return false;
+        }
+    }
+    return true;
+}
+
 TEST_F(AmrnbEncoderTest, CreateAmrnbEncoderTest) {
     mAmrEncHandle = (AmrNbEncState *)malloc(sizeof(AmrNbEncState));
     ASSERT_NE(mAmrEncHandle, nullptr) << "Error in allocating memory to Codec handle";
@@ -111,7 +149,7 @@
     int32_t status = AMREncodeInit(&mAmrEncHandle->encCtx, &mAmrEncHandle->pidSyncCtx, 0);
     ASSERT_EQ(status, 0) << "Error creating AMR-NB encoder";
 
-    string inputFile = gEnv->getRes() + GetParam().first;
+    string inputFile = gEnv->getRes() + std::get<0>(GetParam());
     FILE *fpInput = fopen(inputFile.c_str(), "rb");
     ASSERT_NE(fpInput, nullptr) << "Error opening input file " << inputFile;
 
@@ -121,7 +159,7 @@
     // Write file header.
     fwrite("#!AMR\n", 1, 6, fpOutput);
 
-    int32_t mode = GetParam().second;
+    int32_t mode = std::get<1>(GetParam());
     int32_t encodeErr = EncodeFrames(mode, fpInput, fpOutput);
     ASSERT_EQ(encodeErr, 0) << "EncodeFrames returned error for Codec mode: " << mode;
 
@@ -134,6 +172,11 @@
     free(mAmrEncHandle);
     mAmrEncHandle = nullptr;
     ALOGV("Successfully deleted encoder");
+
+    string refFilePath = gEnv->getRes() + std::get<2>(GetParam());
+    ASSERT_TRUE(compareBinaryFiles(refFilePath, OUTPUT_FILE))
+       << "Error, Binary file comparison failed: Output file " << OUTPUT_FILE
+       << " does not match the reference file " << refFilePath << ".";
 }
 
 TEST_P(AmrnbEncoderTest, ResetEncoderTest) {
@@ -142,7 +185,7 @@
     int32_t status = AMREncodeInit(&mAmrEncHandle->encCtx, &mAmrEncHandle->pidSyncCtx, 0);
     ASSERT_EQ(status, 0) << "Error creating AMR-NB encoder";
 
-    string inputFile = gEnv->getRes() + GetParam().first;
+    string inputFile = gEnv->getRes() + std::get<0>(GetParam());
     FILE *fpInput = fopen(inputFile.c_str(), "rb");
     ASSERT_NE(fpInput, nullptr) << "Error opening input file " << inputFile;
 
@@ -152,7 +195,7 @@
     // Write file header.
     fwrite("#!AMR\n", 1, 6, fpOutput);
 
-    int32_t mode = GetParam().second;
+    int32_t mode = std::get<1>(GetParam());
     // Encode kNumFrameReset first
     int32_t encodeErr = EncodeFrames(mode, fpInput, fpOutput, kNumFrameReset);
     ASSERT_EQ(encodeErr, 0) << "EncodeFrames returned error for Codec mode: " << mode;
@@ -177,22 +220,23 @@
 
 // TODO: Add more test vectors
 INSTANTIATE_TEST_SUITE_P(AmrnbEncoderTestAll, AmrnbEncoderTest,
-                         ::testing::Values(make_pair("bbb_raw_1ch_8khz_s16le.raw", MR475),
-                                           make_pair("bbb_raw_1ch_8khz_s16le.raw", MR515),
-                                           make_pair("bbb_raw_1ch_8khz_s16le.raw", MR59),
-                                           make_pair("bbb_raw_1ch_8khz_s16le.raw", MR67),
-                                           make_pair("bbb_raw_1ch_8khz_s16le.raw", MR74),
-                                           make_pair("bbb_raw_1ch_8khz_s16le.raw", MR795),
-                                           make_pair("bbb_raw_1ch_8khz_s16le.raw", MR102),
-                                           make_pair("bbb_raw_1ch_8khz_s16le.raw", MR122),
-                                           make_pair("sinesweepraw.raw", MR475),
-                                           make_pair("sinesweepraw.raw", MR515),
-                                           make_pair("sinesweepraw.raw", MR59),
-                                           make_pair("sinesweepraw.raw", MR67),
-                                           make_pair("sinesweepraw.raw", MR74),
-                                           make_pair("sinesweepraw.raw", MR795),
-                                           make_pair("sinesweepraw.raw", MR102),
-                                           make_pair("sinesweepraw.raw", MR122)));
+    ::testing::Values(
+        make_tuple("bbb_raw_1ch_8khz_s16le.raw", MR475, "bbb_raw_1ch_8khz_s16le_MR475_ref.amrnb"),
+        make_tuple("bbb_raw_1ch_8khz_s16le.raw", MR515, "bbb_raw_1ch_8khz_s16le_MR515_ref.amrnb"),
+        make_tuple("bbb_raw_1ch_8khz_s16le.raw", MR59, "bbb_raw_1ch_8khz_s16le_MR59_ref.amrnb"),
+        make_tuple("bbb_raw_1ch_8khz_s16le.raw", MR67, "bbb_raw_1ch_8khz_s16le_MR67_ref.amrnb"),
+        make_tuple("bbb_raw_1ch_8khz_s16le.raw", MR74, "bbb_raw_1ch_8khz_s16le_MR74_ref.amrnb"),
+        make_tuple("bbb_raw_1ch_8khz_s16le.raw", MR795, "bbb_raw_1ch_8khz_s16le_MR795_ref.amrnb"),
+        make_tuple("bbb_raw_1ch_8khz_s16le.raw", MR102, "bbb_raw_1ch_8khz_s16le_MR102_ref.amrnb"),
+        make_tuple("bbb_raw_1ch_8khz_s16le.raw", MR122, "bbb_raw_1ch_8khz_s16le_MR122_ref.amrnb"),
+        make_tuple("sinesweepraw.raw", MR475, "sinesweepraw_MR475_ref.amrnb"),
+        make_tuple("sinesweepraw.raw", MR515, "sinesweepraw_MR515_ref.amrnb"),
+        make_tuple("sinesweepraw.raw", MR59, "sinesweepraw_MR59_ref.amrnb"),
+        make_tuple("sinesweepraw.raw", MR67, "sinesweepraw_MR67_ref.amrnb"),
+        make_tuple("sinesweepraw.raw", MR74, "sinesweepraw_MR74_ref.amrnb"),
+        make_tuple("sinesweepraw.raw", MR795, "sinesweepraw_MR795_ref.amrnb"),
+        make_tuple("sinesweepraw.raw", MR102, "sinesweepraw_MR102_ref.amrnb"),
+        make_tuple("sinesweepraw.raw", MR122, "sinesweepraw_MR122_ref.amrnb")));
 
 int main(int argc, char **argv) {
     gEnv = new AmrnbEncTestEnvironment();
diff --git a/media/module/codecs/amrnb/enc/test/AndroidTest.xml b/media/module/codecs/amrnb/enc/test/AndroidTest.xml
index 1509728..a325ee8 100644
--- a/media/module/codecs/amrnb/enc/test/AndroidTest.xml
+++ b/media/module/codecs/amrnb/enc/test/AndroidTest.xml
@@ -23,17 +23,17 @@
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
         <option name="target" value="host" />
         <option name="config-filename" value="AmrnbEncoderTest" />
-        <option name="version" value="1.0"/>
+        <option name="version" value="2.0"/>
     </target_preparer>
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
         <option name="push-all" value="true" />
-        <option name="media-folder-name" value="AmrnbEncoderTest-1.0" />
+        <option name="media-folder-name" value="AmrnbEncoderTest-2.0" />
         <option name="dynamic-config-module" value="AmrnbEncoderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="AmrnbEncoderTest" />
-        <option name="native-test-flag" value="-P /sdcard/test/AmrnbEncoderTest-1.0/" />
+        <option name="native-test-flag" value="-P /sdcard/test/AmrnbEncoderTest-2.0/" />
     </test>
 </configuration>
diff --git a/media/module/codecs/amrnb/enc/test/DynamicConfig.xml b/media/module/codecs/amrnb/enc/test/DynamicConfig.xml
index 713667a..fdc0daa 100644
--- a/media/module/codecs/amrnb/enc/test/DynamicConfig.xml
+++ b/media/module/codecs/amrnb/enc/test/DynamicConfig.xml
@@ -15,6 +15,6 @@
 
 <dynamicConfig>
     <entry key="media_files_url">
-            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest-1.0.zip</value>
+            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest-2.0.zip</value>
     </entry>
 </dynamicConfig>
diff --git a/media/module/codecs/amrnb/enc/test/README.md b/media/module/codecs/amrnb/enc/test/README.md
index f896bd1..c7b9964 100644
--- a/media/module/codecs/amrnb/enc/test/README.md
+++ b/media/module/codecs/amrnb/enc/test/README.md
@@ -22,15 +22,15 @@
 adb push ${OUT}/data/nativetest/AmrnbEncoderTest/AmrnbEncoderTest /data/local/tmp/
 ```
 
-The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest-1.0.zip). Download, unzip and push these files into device for testing.
+The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest-2.0.zip). Download, unzip and push these files into device for testing.
 
 ```
-adb push AmrnbEncoderTest-1.0 /data/local/tmp/
+adb push AmrnbEncoderTest-2.0 /data/local/tmp/
 ```
 
 usage: AmrnbEncoderTest -P \<path_to_folder\>
 ```
-adb shell /data/local/tmp/AmrnbEncoderTest -P /data/local/tmp/AmrnbEncoderTest-1.0/
+adb shell /data/local/tmp/AmrnbEncoderTest -P /data/local/tmp/AmrnbEncoderTest-2.0/
 ```
 Alternatively, the test can also be run using atest command.
 
diff --git a/media/module/codecs/amrnb/fuzzer/Android.bp b/media/module/codecs/amrnb/fuzzer/Android.bp
index 3f29267..c5cbbe2 100644
--- a/media/module/codecs/amrnb/fuzzer/Android.bp
+++ b/media/module/codecs/amrnb/fuzzer/Android.bp
@@ -19,6 +19,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -34,9 +35,9 @@
         "amrnb_dec_fuzzer.cpp",
     ],
     static_libs: [
-        "libstagefright_amrnbdec",
-        "libstagefright_amrnb_common",
         "liblog",
+        "libstagefright_amrnb_common",
+        "libstagefright_amrnbdec",
     ],
     target: {
         darwin: {
diff --git a/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp b/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp
index 2cc88ce..c0e032f 100644
--- a/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp
+++ b/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp
@@ -23,6 +23,7 @@
 #include <audio_utils/sndfile.h>
 #include <memory>
 #include <stdio.h>
+#include <fstream>
 
 #include "pvamrwbdecoder.h"
 #include "pvamrwbdecoder_api.h"
@@ -44,7 +45,7 @@
 
 static AmrwbDecTestEnvironment *gEnv = nullptr;
 
-class AmrwbDecoderTest : public ::testing::TestWithParam<string> {
+class AmrwbDecoderTest : public ::testing::TestWithParam<std::tuple<string, string>> {
   public:
     AmrwbDecoderTest() : mFpInput(nullptr) {}
 
@@ -59,6 +60,7 @@
     int32_t DecodeFrames(int16_t *decoderCookie, void *decoderBuf, SNDFILE *outFileHandle,
                          int32_t frameCount = INT32_MAX);
     SNDFILE *openOutputFile(SF_INFO *sfInfo);
+    bool compareBinaryFiles(const std::string& refFilePath, const std::string& outFilePath);
 };
 
 SNDFILE *AmrwbDecoderTest::openOutputFile(SF_INFO *sfInfo) {
@@ -120,6 +122,42 @@
     return 0;
 }
 
+bool AmrwbDecoderTest::compareBinaryFiles(const std::string &refFilePath,
+                                          const std::string &outFilePath) {
+    std::ifstream refFile(refFilePath, std::ios::binary | std::ios::ate);
+    std::ifstream outFile(outFilePath, std::ios::binary | std::ios::ate);
+    assert(refFile.is_open() && "Error opening reference file " + refFilePath);
+    assert(outFile.is_open() && "Error opening output file " + outFilePath);
+
+    std::streamsize refFileSize = refFile.tellg();
+    std::streamsize outFileSize = outFile.tellg();
+    if (refFileSize != outFileSize) {
+        ALOGE("Error, File size mismatch: Reference file size = %td bytes,"
+               "but output file size = %td bytes", refFileSize, outFileSize);
+        return false;
+    }
+
+    refFile.seekg(0, std::ios::beg);
+    outFile.seekg(0, std::ios::beg);
+    constexpr std::streamsize kBufferSize = 16 * 1024;
+    char refBuffer[kBufferSize];
+    char outBuffer[kBufferSize];
+
+    while (refFile && outFile) {
+        refFile.read(refBuffer, kBufferSize);
+        outFile.read(outBuffer, kBufferSize);
+
+        std::streamsize refBytesRead = refFile.gcount();
+        std::streamsize outBytesRead = outFile.gcount();
+
+        if (refBytesRead != outBytesRead || memcmp(refBuffer, outBuffer, refBytesRead) != 0) {
+            ALOGE("Error, File content mismatch.");
+            return false;
+        }
+    }
+    return true;
+}
+
 TEST_F(AmrwbDecoderTest, MultiCreateAmrwbDecoderTest) {
     uint32_t memRequirements = pvDecoder_AmrWbMemRequirements();
     std::unique_ptr<char[]> decoderBuf(new char[memRequirements]);
@@ -147,7 +185,7 @@
     pvDecoder_AmrWb_Init(&amrHandle, decoderBuf.get(), &decoderCookie);
     ASSERT_NE(amrHandle, nullptr) << "Failed to initialize decoder";
 
-    string inputFile = gEnv->getRes() + GetParam();
+    string inputFile = gEnv->getRes() + std::get<0>(GetParam());
     mFpInput = fopen(inputFile.c_str(), "rb");
     ASSERT_NE(mFpInput, nullptr) << "Error opening input file " << inputFile;
 
@@ -160,6 +198,10 @@
     ASSERT_EQ(decoderErr, 0) << "DecodeFrames returned error";
 
     sf_close(outFileHandle);
+    string refFilePath = gEnv->getRes() + std::get<1>(GetParam());
+    ASSERT_TRUE(compareBinaryFiles(refFilePath, OUTPUT_FILE))
+    << "Error, Binary file comparison failed: Output file "
+    << OUTPUT_FILE << " does not match the reference file " << refFilePath << ".";
 }
 
 TEST_P(AmrwbDecoderTest, ResetDecoderTest) {
@@ -173,7 +215,7 @@
     pvDecoder_AmrWb_Init(&amrHandle, decoderBuf.get(), &decoderCookie);
     ASSERT_NE(amrHandle, nullptr) << "Failed to initialize decoder";
 
-    string inputFile = gEnv->getRes() + GetParam();
+    string inputFile = gEnv->getRes() + std::get<0>(GetParam());
     mFpInput = fopen(inputFile.c_str(), "rb");
     ASSERT_NE(mFpInput, nullptr) << "Error opening input file " << inputFile;
 
@@ -198,8 +240,21 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(AmrwbDecoderTestAll, AmrwbDecoderTest,
-                         ::testing::Values(("bbb_amrwb_1ch_14kbps_16000hz.amrwb"),
-                                           ("bbb_16000hz_1ch_9kbps_amrwb_30sec.amrwb")));
+                         ::testing::Values(std::make_tuple(
+                                                "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
+                                                "bbb_amrwb_1ch_14kbps_16000hz_ref.pcm"),
+                                           std::make_tuple(
+                                                "bbb_16000hz_1ch_9kbps_amrwb_30sec.amrwb",
+                                                "bbb_16000hz_1ch_9kbps_amrwb_30sec_ref.pcm"),
+                                           std::make_tuple(
+                                                "bbb_16kHz_1ch_16bps_1sec.amrwb",
+                                                "bbb_16kHz_1ch_16bps_1sec_ref.pcm"),
+                                           std::make_tuple(
+                                                "bbb_16kHz_1ch_6.6bps_3sec.amrwb",
+                                                "bbb_16kHz_1ch_6.6bps_3sec_ref.pcm"),
+                                           std::make_tuple(
+                                                "bbb_16kHz_1ch_23.85bps_3sec.amrwb",
+                                                "bbb_16kHz_1ch_23.85bps_3sec_ref.pcm")));
 
 int main(int argc, char **argv) {
     gEnv = new AmrwbDecTestEnvironment();
diff --git a/media/module/codecs/amrwb/dec/test/AndroidTest.xml b/media/module/codecs/amrwb/dec/test/AndroidTest.xml
index 392df03..dbd1407 100644
--- a/media/module/codecs/amrwb/dec/test/AndroidTest.xml
+++ b/media/module/codecs/amrwb/dec/test/AndroidTest.xml
@@ -23,17 +23,17 @@
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
         <option name="target" value="host" />
         <option name="config-filename" value="AmrwbDecoderTest" />
-        <option name="version" value="1.0"/>
+        <option name="version" value="2.0"/>
     </target_preparer>
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
         <option name="push-all" value="true" />
-        <option name="media-folder-name" value="AmrwbDecoderTest-1.0" />
+        <option name="media-folder-name" value="AmrwbDecoderTest-2.0" />
         <option name="dynamic-config-module" value="AmrwbDecoderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="AmrwbDecoderTest" />
-        <option name="native-test-flag" value="-P /sdcard/test/AmrwbDecoderTest-1.0/" />
+        <option name="native-test-flag" value="-P /sdcard/test/AmrwbDecoderTest-2.0/" />
     </test>
 </configuration>
diff --git a/media/module/codecs/amrwb/dec/test/DynamicConfig.xml b/media/module/codecs/amrwb/dec/test/DynamicConfig.xml
index 506cc3d..52453ee 100644
--- a/media/module/codecs/amrwb/dec/test/DynamicConfig.xml
+++ b/media/module/codecs/amrwb/dec/test/DynamicConfig.xml
@@ -15,6 +15,6 @@
 
 <dynamicConfig>
     <entry key="media_files_url">
-            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest-1.0.zip</value>
+            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest-2.0.zip</value>
     </entry>
 </dynamicConfig>
diff --git a/media/module/codecs/amrwb/dec/test/README.md b/media/module/codecs/amrwb/dec/test/README.md
index 8e77456..ed76051 100644
--- a/media/module/codecs/amrwb/dec/test/README.md
+++ b/media/module/codecs/amrwb/dec/test/README.md
@@ -22,15 +22,15 @@
 adb push ${OUT}/data/nativetest/AmrwbDecoderTest/AmrwbDecoderTest /data/local/tmp/
 ```
 
-The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest-1.0.zip). Download, unzip and push these files into device for testing.
+The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest-2.0.zip). Download, unzip and push these files into device for testing.
 
 ```
-adb push AmrwbDecoderTest-1.0 /data/local/tmp/
+adb push AmrwbDecoderTest-2.0 /data/local/tmp/
 ```
 
 usage: AmrwbDecoderTest -P \<path_to_folder\>
 ```
-adb shell /data/local/tmp/AmrwbDecoderTest -P /data/local/tmp/AmrwbDecoderTest-1.0/
+adb shell /data/local/tmp/AmrwbDecoderTest -P /data/local/tmp/AmrwbDecoderTest-2.0/
 ```
 Alternatively, the test can also be run using atest command.
 
diff --git a/media/module/codecs/amrwb/enc/Android.bp b/media/module/codecs/amrwb/enc/Android.bp
index 04f36b5..6ca3b6e 100644
--- a/media/module/codecs/amrwb/enc/Android.bp
+++ b/media/module/codecs/amrwb/enc/Android.bp
@@ -96,8 +96,6 @@
                 "-DARM",
                 "-DARMV7",
                 "-DASM_OPT",
-                // don't actually generate neon instructions, see bug 26932980
-                "-mfpu=vfpv3",
             ],
             local_include_dirs: [
                 "src/asm/ARMV7",
diff --git a/media/module/codecs/amrwb/enc/inc/basic_op.h b/media/module/codecs/amrwb/enc/inc/basic_op.h
index 80ad7f1..8e740b4 100644
--- a/media/module/codecs/amrwb/enc/inc/basic_op.h
+++ b/media/module/codecs/amrwb/enc/inc/basic_op.h
@@ -569,13 +569,10 @@
 static_vo Word32 L_add (Word32 L_var1, Word32 L_var2)
 {
     Word32 L_var_out;
-    L_var_out = L_var1 + L_var2;
-    if (((L_var1 ^ L_var2) & MIN_32) == 0)
+    if (__builtin_add_overflow(L_var1, L_var2, &L_var_out))
     {
-        if ((L_var_out ^ L_var1) & MIN_32)
-        {
-            L_var_out = (L_var1 < 0) ? MIN_32 : MAX_32;
-        }
+        // saturating...
+        L_var_out = (L_var1 < 0) ? MIN_32 : MAX_32;
     }
     return (L_var_out);
 }
@@ -616,13 +613,10 @@
 static_vo Word32 L_sub (Word32 L_var1, Word32 L_var2)
 {
     Word32 L_var_out;
-    L_var_out = L_var1 - L_var2;
-    if (((L_var1 ^ L_var2) & MIN_32) != 0)
+    if (__builtin_sub_overflow(L_var1, L_var2, &L_var_out))
     {
-        if ((L_var_out ^ L_var1) & MIN_32)
-        {
-            L_var_out = (L_var1 < 0L) ? MIN_32 : MAX_32;
-        }
+        // saturating...
+        L_var_out = (L_var1 < 0L) ? MIN_32 : MAX_32;
     }
     return (L_var_out);
 }
diff --git a/media/module/codecs/amrwb/enc/src/az_isp.c b/media/module/codecs/amrwb/enc/src/az_isp.c
index d7074f0..22a5c25 100644
--- a/media/module/codecs/amrwb/enc/src/az_isp.c
+++ b/media/module/codecs/amrwb/enc/src/az_isp.c
@@ -248,10 +248,10 @@
         b1_h = b0_h;
     }
 
-    t0 = ((b1_h * x)<<1) + (((b1_l * x)>>15)<<1);
-    t0 += (b2_h * (-32768))<<1;             /* t0 = x*b1 - b2          */
-    t0 -= (b2_l << 1);
-    t0 += (f[n] << 12);                     /* t0 = x*b1 - b2 + f[i]/2 */
+    __builtin_add_overflow(((b1_h * x)<<1), (((b1_l * x)>>15)<<1), &t0);
+    __builtin_add_overflow(t0, (b2_h * (-32768))<<1, &t0);   /* t0 = x*b1 - b2          */
+    __builtin_sub_overflow(t0, (b2_l << 1), &t0);
+    __builtin_add_overflow(t0, (f[n] << 12), &t0);     /* t0 = x*b1 - b2 + f[i]/2 */
 
     t0 = L_shl2(t0, 6);                     /* Q24 to Q30 with saturation */
 
diff --git a/media/module/codecs/amrwb/enc/src/syn_filt.c b/media/module/codecs/amrwb/enc/src/syn_filt.c
index 7eba12f..40398f5 100644
--- a/media/module/codecs/amrwb/enc/src/syn_filt.c
+++ b/media/module/codecs/amrwb/enc/src/syn_filt.c
@@ -109,38 +109,38 @@
         p2 = &sig_lo[i - 1];
         p3 = &sig_hi[i - 1];
 
-        L_tmp  -= vo_mult32((*p2--), (*p1));
-        L_tmp1 -= vo_mult32((*p3--), (*p1++));
-        L_tmp  -= vo_mult32((*p2--), (*p1));
-        L_tmp1 -= vo_mult32((*p3--), (*p1++));
-        L_tmp  -= vo_mult32((*p2--), (*p1));
-        L_tmp1 -= vo_mult32((*p3--), (*p1++));
-        L_tmp  -= vo_mult32((*p2--), (*p1));
-        L_tmp1 -= vo_mult32((*p3--), (*p1++));
-        L_tmp  -= vo_mult32((*p2--), (*p1));
-        L_tmp1 -= vo_mult32((*p3--), (*p1++));
-        L_tmp  -= vo_mult32((*p2--), (*p1));
-        L_tmp1 -= vo_mult32((*p3--), (*p1++));
-        L_tmp  -= vo_mult32((*p2--), (*p1));
-        L_tmp1 -= vo_mult32((*p3--), (*p1++));
-        L_tmp  -= vo_mult32((*p2--), (*p1));
-        L_tmp1 -= vo_mult32((*p3--), (*p1++));
-        L_tmp  -= vo_mult32((*p2--), (*p1));
-        L_tmp1 -= vo_mult32((*p3--), (*p1++));
-        L_tmp  -= vo_mult32((*p2--), (*p1));
-        L_tmp1 -= vo_mult32((*p3--), (*p1++));
-        L_tmp  -= vo_mult32((*p2--), (*p1));
-        L_tmp1 -= vo_mult32((*p3--), (*p1++));
-        L_tmp  -= vo_mult32((*p2--), (*p1));
-        L_tmp1 -= vo_mult32((*p3--), (*p1++));
-        L_tmp  -= vo_mult32((*p2--), (*p1));
-        L_tmp1 -= vo_mult32((*p3--), (*p1++));
-        L_tmp  -= vo_mult32((*p2--), (*p1));
-        L_tmp1 -= vo_mult32((*p3--), (*p1++));
-        L_tmp  -= vo_mult32((*p2--), (*p1));
-        L_tmp1 -= vo_mult32((*p3--), (*p1++));
-        L_tmp  -= vo_mult32((*p2--), (*p1));
-        L_tmp1 -= vo_mult32((*p3--), (*p1++));
+        __builtin_sub_overflow(L_tmp, vo_mult32((*p2--), (*p1)), &L_tmp);
+        __builtin_sub_overflow(L_tmp1, vo_mult32((*p3--), (*p1++)), &L_tmp1);
+        __builtin_sub_overflow(L_tmp, vo_mult32((*p2--), (*p1)), &L_tmp);
+        __builtin_sub_overflow(L_tmp1, vo_mult32((*p3--), (*p1++)), &L_tmp1);
+        __builtin_sub_overflow(L_tmp, vo_mult32((*p2--), (*p1)), &L_tmp);
+        __builtin_sub_overflow(L_tmp1, vo_mult32((*p3--), (*p1++)), &L_tmp1);
+        __builtin_sub_overflow(L_tmp, vo_mult32((*p2--), (*p1)), &L_tmp);
+        __builtin_sub_overflow(L_tmp1, vo_mult32((*p3--), (*p1++)), &L_tmp1);
+        __builtin_sub_overflow(L_tmp, vo_mult32((*p2--), (*p1)), &L_tmp);
+        __builtin_sub_overflow(L_tmp1, vo_mult32((*p3--), (*p1++)), &L_tmp1);
+        __builtin_sub_overflow(L_tmp, vo_mult32((*p2--), (*p1)), &L_tmp);
+        __builtin_sub_overflow(L_tmp1, vo_mult32((*p3--), (*p1++)), &L_tmp1);
+        __builtin_sub_overflow(L_tmp, vo_mult32((*p2--), (*p1)), &L_tmp);
+        __builtin_sub_overflow(L_tmp1, vo_mult32((*p3--), (*p1++)), &L_tmp1);
+        __builtin_sub_overflow(L_tmp, vo_mult32((*p2--), (*p1)), &L_tmp);
+        __builtin_sub_overflow(L_tmp1, vo_mult32((*p3--), (*p1++)), &L_tmp1);
+        __builtin_sub_overflow(L_tmp, vo_mult32((*p2--), (*p1)), &L_tmp);
+        __builtin_sub_overflow(L_tmp1, vo_mult32((*p3--), (*p1++)), &L_tmp1);
+        __builtin_sub_overflow(L_tmp, vo_mult32((*p2--), (*p1)), &L_tmp);
+        __builtin_sub_overflow(L_tmp1, vo_mult32((*p3--), (*p1++)), &L_tmp1);
+        __builtin_sub_overflow(L_tmp, vo_mult32((*p2--), (*p1)), &L_tmp);
+        __builtin_sub_overflow(L_tmp1, vo_mult32((*p3--), (*p1++)), &L_tmp1);
+        __builtin_sub_overflow(L_tmp, vo_mult32((*p2--), (*p1)), &L_tmp);
+        __builtin_sub_overflow(L_tmp1, vo_mult32((*p3--), (*p1++)), &L_tmp1);
+        __builtin_sub_overflow(L_tmp, vo_mult32((*p2--), (*p1)), &L_tmp);
+        __builtin_sub_overflow(L_tmp1, vo_mult32((*p3--), (*p1++)), &L_tmp1);
+        __builtin_sub_overflow(L_tmp, vo_mult32((*p2--), (*p1)), &L_tmp);
+        __builtin_sub_overflow(L_tmp1, vo_mult32((*p3--), (*p1++)), &L_tmp1);
+        __builtin_sub_overflow(L_tmp, vo_mult32((*p2--), (*p1)), &L_tmp);
+        __builtin_sub_overflow(L_tmp1, vo_mult32((*p3--), (*p1++)), &L_tmp1);
+        __builtin_sub_overflow(L_tmp, vo_mult32((*p2--), (*p1)), &L_tmp);
+        __builtin_sub_overflow(L_tmp1, vo_mult32((*p3--), (*p1++)), &L_tmp1);
 
         L_tmp = L_tmp >> 11;
         L_tmp += vo_L_mult(exc[i], a0);
diff --git a/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest.cpp b/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest.cpp
index 1a6ee27..dc9c1b1 100644
--- a/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest.cpp
+++ b/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest.cpp
@@ -20,6 +20,7 @@
 #include <utils/Log.h>
 
 #include <stdio.h>
+#include <fstream>
 
 #include "cmnMemory.h"
 #include "voAMRWB.h"
@@ -34,13 +35,15 @@
 
 static AmrwbEncTestEnvironment *gEnv = nullptr;
 
-class AmrwbEncoderTest : public ::testing::TestWithParam<tuple<string, int32_t, VOAMRWBFRAMETYPE>> {
+class AmrwbEncoderTest : public ::testing::TestWithParam<tuple<string, int32_t,
+                                                               VOAMRWBFRAMETYPE, string>> {
   public:
     AmrwbEncoderTest() : mEncoderHandle(nullptr) {
-        tuple<string, int32_t, VOAMRWBFRAMETYPE> params = GetParam();
+        tuple<string, int32_t, VOAMRWBFRAMETYPE, string> params = GetParam();
         mInputFile = gEnv->getRes() + get<0>(params);
         mMode = get<1>(params);
         mFrameType = get<2>(params);
+        refFilePath = gEnv->getRes() + get<3>(params);
         mMemOperator.Alloc = cmnMemAlloc;
         mMemOperator.Copy = cmnMemCopy;
         mMemOperator.Free = cmnMemFree;
@@ -66,8 +69,47 @@
     VO_CODEC_INIT_USERDATA mUserData;
     VO_HANDLE mEncoderHandle;
     int32_t mMode;
+    string refFilePath;
+
+    bool compareBinaryFiles(const string& refFilePath, const string& outFilePath);
 };
 
+bool AmrwbEncoderTest::compareBinaryFiles(const std::string &refFilePath,
+                                          const std::string &outFilePath) {
+    std::ifstream refFile(refFilePath, std::ios::binary | std::ios::ate);
+    std::ifstream outFile(outFilePath, std::ios::binary | std::ios::ate);
+    assert(refFile.is_open() && "Error opening reference file " + refFilePath);
+    assert(outFile.is_open() && "Error opening output file " + outFilePath);
+
+    std::streamsize refFileSize = refFile.tellg();
+    std::streamsize outFileSize = outFile.tellg();
+    if (refFileSize != outFileSize) {
+        ALOGE("Error, File size mismatch: Reference file size = %td bytes,"
+               "but output file size = %td bytes", refFileSize, outFileSize);
+        return false;
+    }
+
+    refFile.seekg(0, std::ios::beg);
+    outFile.seekg(0, std::ios::beg);
+    constexpr std::streamsize kBufferSize = 16 * 1024;
+    char refBuffer[kBufferSize];
+    char outBuffer[kBufferSize];
+
+    while (refFile && outFile) {
+        refFile.read(refBuffer, kBufferSize);
+        outFile.read(outBuffer, kBufferSize);
+
+        std::streamsize refBytesRead = refFile.gcount();
+        std::streamsize outBytesRead = outFile.gcount();
+
+        if (refBytesRead != outBytesRead || memcmp(refBuffer, outBuffer, refBytesRead) != 0) {
+            ALOGE("Error, File content mismatch.");
+            return false;
+        }
+    }
+    return true;
+}
+
 TEST_P(AmrwbEncoderTest, CreateAmrwbEncoderTest) {
     int32_t status = voGetAMRWBEncAPI(&mApiHandle);
     ASSERT_EQ(status, VO_ERR_NONE) << "Failed to get api handle";
@@ -152,38 +194,69 @@
     if (fpOutput) {
         fclose(fpOutput);
     }
+
+    ASSERT_TRUE(compareBinaryFiles(refFilePath, OUTPUT_FILE))
+    << "Error, Binary file comparison failed: Output file "
+    << OUTPUT_FILE << " does not match the reference file " << refFilePath << ".";
 }
 
 INSTANTIATE_TEST_SUITE_P(
-        AmrwbEncoderTestAll, AmrwbEncoderTest,
-        ::testing::Values(
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD66, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD885, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1265, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1425, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1585, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1825, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1985, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2305, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2385, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD66, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD885, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1265, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1425, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1585, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1825, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1985, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2305, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2385, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD66, VOAMRWB_RFC3267),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD885, VOAMRWB_RFC3267),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1265, VOAMRWB_RFC3267),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1425, VOAMRWB_RFC3267),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1585, VOAMRWB_RFC3267),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1825, VOAMRWB_RFC3267),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1985, VOAMRWB_RFC3267),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2305, VOAMRWB_RFC3267),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2385, VOAMRWB_RFC3267)));
+    AmrwbEncoderTestAll, AmrwbEncoderTest,
+    ::testing::Values(
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD66, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD66_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD885, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD885_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1265, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1265_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1425, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1425_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1585, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1585_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1825, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1825_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1985, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1985_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2305, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD2305_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2385, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD2385_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD66, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD66_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD885, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD885_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1265, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1265_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1425, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1425_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1585, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1585_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1825, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1825_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1985, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1985_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2305, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD2305_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2385, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD2385_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD66, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD66_VOAMRWB_RFC3267_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD885, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD885_VOAMRWB_RFC3267_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1265, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1265_VOAMRWB_RFC3267_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1425, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1425_VOAMRWB_RFC3267_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1585, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1585_VOAMRWB_RFC3267_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1825, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1825_VOAMRWB_RFC3267_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1985, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1985_VOAMRWB_RFC3267_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2305, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD2305_VOAMRWB_RFC3267_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2385, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD2385_VOAMRWB_RFC3267_ref.amrwb")));
 
 int main(int argc, char **argv) {
     gEnv = new AmrwbEncTestEnvironment();
diff --git a/media/module/codecs/amrwb/enc/test/AndroidTest.xml b/media/module/codecs/amrwb/enc/test/AndroidTest.xml
index 8822cb2..1f4121f 100644
--- a/media/module/codecs/amrwb/enc/test/AndroidTest.xml
+++ b/media/module/codecs/amrwb/enc/test/AndroidTest.xml
@@ -23,17 +23,17 @@
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
         <option name="target" value="host" />
         <option name="config-filename" value="AmrwbEncoderTest" />
-        <option name="version" value="1.0"/>
+        <option name="version" value="2.0"/>
     </target_preparer>
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
         <option name="push-all" value="true" />
-        <option name="media-folder-name" value="AmrwbEncoderTest-1.0" />
+        <option name="media-folder-name" value="AmrwbEncoderTest-2.0" />
         <option name="dynamic-config-module" value="AmrwbEncoderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="AmrwbEncoderTest" />
-        <option name="native-test-flag" value="-P /sdcard/test/AmrwbEncoderTest-1.0/" />
+        <option name="native-test-flag" value="-P /sdcard/test/AmrwbEncoderTest-2.0/" />
     </test>
 </configuration>
diff --git a/media/module/codecs/amrwb/enc/test/DynamicConfig.xml b/media/module/codecs/amrwb/enc/test/DynamicConfig.xml
index a0b6218..59701ea 100644
--- a/media/module/codecs/amrwb/enc/test/DynamicConfig.xml
+++ b/media/module/codecs/amrwb/enc/test/DynamicConfig.xml
@@ -15,6 +15,6 @@
 
 <dynamicConfig>
     <entry key="media_files_url">
-            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest-1.0.zip</value>
+            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest-2.0.zip</value>
     </entry>
 </dynamicConfig>
diff --git a/media/module/codecs/amrwb/enc/test/README.md b/media/module/codecs/amrwb/enc/test/README.md
index 3b9cc39..ea2c31e 100644
--- a/media/module/codecs/amrwb/enc/test/README.md
+++ b/media/module/codecs/amrwb/enc/test/README.md
@@ -22,7 +22,7 @@
 adb push ${OUT}/data/nativetest/AmrwbEncoderTest/AmrwbEncoderTest /data/local/tmp/
 ```
 
-The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest-1.0.zip). Download, unzip and push these files into device for testing.
+The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest-2.0.zip). Download, unzip and push these files into device for testing.
 
 ```
 adb push AmrwbEncoderTest-1.0 /data/local/tmp/
@@ -30,7 +30,7 @@
 
 usage: AmrwbEncoderTest -P \<path_to_folder\>
 ```
-adb shell /data/local/tmp/AmrwbEncoderTest -P /data/local/tmp/AmrwbEncoderTest-1.0/
+adb shell /data/local/tmp/AmrwbEncoderTest -P /data/local/tmp/AmrwbEncoderTest-2.0/
 ```
 Alternatively, the test can also be run using atest command.
 
diff --git a/media/module/codecserviceregistrant/Android.bp b/media/module/codecserviceregistrant/Android.bp
index becb98a..1d1dd71 100644
--- a/media/module/codecserviceregistrant/Android.bp
+++ b/media/module/codecserviceregistrant/Android.bp
@@ -43,6 +43,8 @@
         "libcodec2_soft_flacdec",
         "libcodec2_soft_flacenc",
         "libcodec2_soft_gsmdec",
+        "libcodec2_soft_apvenc",
+        "libcodec2_soft_apvdec",
     ],
 }
 
@@ -55,6 +57,8 @@
         "com.android.media.swcodec",
     ],
 
+    export_include_dirs: ["include"],
+
     srcs: [
         "CodecServiceRegistrant.cpp",
     ],
diff --git a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
index 6df9dc8..433b5e9 100644
--- a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
+++ b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
@@ -40,7 +40,7 @@
 #include <codec2/aidl/ComponentStore.h>
 #include <codec2/aidl/ParamTypes.h>
 
-#include <media/CodecServiceRegistrant.h>
+#include <codecserviceregistrant/CodecServiceRegistrant.h>
 
 namespace /* unnamed */ {
 
@@ -775,13 +775,12 @@
     return nullptr;
 }
 
-extern "C" void RegisterCodecServices() {
+/**
+ * This function encapsulates the core logic required to register codec services,
+ * separated from threadpool management to avoid timeouts when called by the fuzzer.
+ */
+static void RegisterCodecServicesWithExistingThreadpool() {
     const bool aidlSelected = c2_aidl::utils::IsSelected();
-    constexpr int kThreadCount = 64;
-    ABinderProcess_setThreadPoolMaxThreadCount(kThreadCount);
-    ABinderProcess_startThreadPool();
-    ::android::hardware::configureRpcThreadpool(kThreadCount, false);
-
     LOG(INFO) << "Creating software Codec2 service...";
     std::shared_ptr<C2ComponentStore> store =
         android::GetCodec2PlatformComponentStore();
@@ -791,47 +790,6 @@
     }
 
     using namespace ::android::hardware::media::c2;
-
-    if (!ionPropertiesDefined()) {
-        using IComponentStore =
-            ::android::hardware::media::c2::V1_0::IComponentStore;
-        std::string const preferredStoreName = "default";
-        if (aidlSelected) {
-            std::shared_ptr<c2_aidl::IComponentStore> preferredStore;
-            if (__builtin_available(android __ANDROID_API_S__, *)) {
-                std::string instanceName = ::android::base::StringPrintf(
-                        "%s/%s", c2_aidl::IComponentStore::descriptor, preferredStoreName.c_str());
-                if (AServiceManager_isDeclared(instanceName.c_str())) {
-                    preferredStore = c2_aidl::IComponentStore::fromBinder(::ndk::SpAIBinder(
-                            AServiceManager_waitForService(instanceName.c_str())));
-                }
-            }
-            if (preferredStore) {
-                ::android::SetPreferredCodec2ComponentStore(
-                        std::make_shared<H2C2ComponentStore>(preferredStore));
-                LOG(INFO) <<
-                        "Preferred Codec2 AIDL store is set to \"" <<
-                        preferredStoreName << "\".";
-            } else {
-                LOG(INFO) <<
-                        "Preferred Codec2 AIDL store is defaulted to \"software\".";
-            }
-        } else {
-            sp<IComponentStore> preferredStore =
-                IComponentStore::getService(preferredStoreName.c_str());
-            if (preferredStore) {
-                ::android::SetPreferredCodec2ComponentStore(
-                        std::make_shared<H2C2ComponentStore>(preferredStore));
-                LOG(INFO) <<
-                        "Preferred Codec2 HIDL store is set to \"" <<
-                        preferredStoreName << "\".";
-            } else {
-                LOG(INFO) <<
-                        "Preferred Codec2 HIDL store is defaulted to \"software\".";
-            }
-        }
-    }
-
     bool registered = false;
     const std::string aidlServiceName =
         std::string(c2_aidl::IComponentStore::descriptor) + "/software";
@@ -877,11 +835,61 @@
                      " so it is not being registered with hwservicemanager.";
     }
 
+    // Preferred store must be set after the store is registered to ensure that
+    // the correct preferred store is set.
+    if (!ionPropertiesDefined()) {
+        using IComponentStore =
+            ::android::hardware::media::c2::V1_0::IComponentStore;
+        std::string const preferredStoreName = "default";
+        if (aidlSelected) {
+            std::shared_ptr<c2_aidl::IComponentStore> preferredStore;
+            if (__builtin_available(android __ANDROID_API_S__, *)) {
+                std::string instanceName = ::android::base::StringPrintf(
+                        "%s/%s", c2_aidl::IComponentStore::descriptor, preferredStoreName.c_str());
+                if (AServiceManager_isDeclared(instanceName.c_str())) {
+                    preferredStore = c2_aidl::IComponentStore::fromBinder(::ndk::SpAIBinder(
+                            AServiceManager_waitForService(instanceName.c_str())));
+                }
+            }
+            if (preferredStore) {
+                ::android::SetPreferredCodec2ComponentStore(
+                        std::make_shared<H2C2ComponentStore>(preferredStore));
+                LOG(INFO) <<
+                        "Preferred Codec2 AIDL store is set to \"" <<
+                        preferredStoreName << "\".";
+            } else {
+                LOG(INFO) <<
+                        "Preferred Codec2 AIDL store is defaulted to \"software\".";
+            }
+        } else {
+            sp<IComponentStore> preferredStore =
+                IComponentStore::getService(preferredStoreName.c_str());
+            if (preferredStore) {
+                ::android::SetPreferredCodec2ComponentStore(
+                        std::make_shared<H2C2ComponentStore>(preferredStore));
+                LOG(INFO) <<
+                        "Preferred Codec2 HIDL store is set to \"" <<
+                        preferredStoreName << "\".";
+            } else {
+                LOG(INFO) <<
+                        "Preferred Codec2 HIDL store is defaulted to \"software\".";
+            }
+        }
+    }
+
     if (registered) {
         LOG(INFO) << "Software Codec2 service created and registered.";
     }
+}
+
+extern "C" void RegisterCodecServices() {
+    constexpr int kThreadCount = 64;
+    ABinderProcess_setThreadPoolMaxThreadCount(kThreadCount);
+    ABinderProcess_startThreadPool();
+    ::android::hardware::configureRpcThreadpool(kThreadCount, false);
+
+    RegisterCodecServicesWithExistingThreadpool();
 
     ABinderProcess_joinThreadPool();
     ::android::hardware::joinRpcThreadpool();
 }
-
diff --git a/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp b/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
index 4868e0c..0baf1ca 100644
--- a/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
+++ b/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
@@ -13,6 +13,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+#include <codecserviceregistrant/CodecServiceRegistrant.h>
+
 #include "../CodecServiceRegistrant.cpp"
 #include "fuzzer/FuzzedDataProvider.h"
 #include <C2Config.h>
@@ -166,9 +169,9 @@
 void CodecServiceRegistrantFuzzer::process(const uint8_t *data, size_t size) {
   mFDP = new FuzzedDataProvider(data, size);
   invokeH2C2ComponentStore();
-  /** RegisterCodecServices is called here to improve code coverage */
-  /** as currently it is not called by codecServiceRegistrant       */
-  RegisterCodecServices();
+  /** RegisterCodecServicesWithExistingThreadpool() is called here to improve
+   * code coverage as currently it is not called in codecServiceRegistrant.cpp */
+  RegisterCodecServicesWithExistingThreadpool();
   delete mFDP;
 }
 
diff --git a/media/libmedia/include/media/CodecServiceRegistrant.h b/media/module/codecserviceregistrant/include/codecserviceregistrant/CodecServiceRegistrant.h
similarity index 77%
rename from media/libmedia/include/media/CodecServiceRegistrant.h
rename to media/module/codecserviceregistrant/include/codecserviceregistrant/CodecServiceRegistrant.h
index e0af781..1c6f71f 100644
--- a/media/libmedia/include/media/CodecServiceRegistrant.h
+++ b/media/module/codecserviceregistrant/include/codecserviceregistrant/CodecServiceRegistrant.h
@@ -18,6 +18,13 @@
 
 #define CODEC_SERVICE_REGISTRANT_H_
 
+/**
+ * This function starts the threadpool, calls the registration logic
+ * encapsulated in RegisterCodecServicesWithExistingThreadpool(), and
+ * then stops the threadpool.
+ */
+extern "C" void RegisterCodecServices();
+
 typedef void (*RegisterCodecServicesFunc)();
 
 #endif  // CODEC_SERVICE_REGISTRANT_H_
diff --git a/media/module/extractors/Android.bp b/media/module/extractors/Android.bp
index f654ecd..cbaabe3 100644
--- a/media/module/extractors/Android.bp
+++ b/media/module/extractors/Android.bp
@@ -28,6 +28,10 @@
         "liblog",
     ],
 
+    static_libs: [
+        "libstagefright_metadatautils",
+    ],
+
     // extractors are expected to run on Q(29)
     min_sdk_version: "29",
     apex_available: [
@@ -56,6 +60,7 @@
                 "libutils",
                 "libmediandk_format",
                 "libmedia_ndkformatpriv",
+                "libstagefright_metadatautils",
             ],
         },
     },
@@ -68,3 +73,27 @@
         ],
     },
 }
+
+aconfig_declarations {
+    name: "android.media.extractor.flags-aconfig",
+    package: "com.android.media.extractor.flags",
+    container: "com.android.media",
+    srcs: ["extractor.aconfig"],
+}
+
+java_aconfig_library {
+    name: "android.media.extractor.flags-aconfig-java",
+    aconfig_declarations: "android.media.extractor.flags-aconfig",
+    defaults: ["framework-minus-apex-aconfig-java-defaults"],
+}
+
+cc_aconfig_library {
+    name: "android.media.extractor.flags-aconfig-cc",
+    aconfig_declarations: "android.media.extractor.flags-aconfig",
+    host_supported: true,
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
+}
diff --git a/media/module/extractors/extractor.aconfig b/media/module/extractors/extractor.aconfig
new file mode 100644
index 0000000..a7d3397
--- /dev/null
+++ b/media/module/extractors/extractor.aconfig
@@ -0,0 +1,23 @@
+# Media Extractor flags.
+#
+# !!! Please add flags in alphabetical order. !!!
+package: "com.android.media.extractor.flags"
+container: "com.android.media"
+
+flag {
+    name: "extractor_sniff_midi_optimizations"
+    is_exported: true
+    is_fixed_read_only: true
+    namespace: "media_solutions"
+    description: "Enable SniffMidi optimizations."
+    bug: "359920208"
+}
+
+flag {
+    name: "extractor_mp4_enable_apv"
+    is_exported: true
+    is_fixed_read_only: true
+    namespace: "media_solutions"
+    description: "Enable APV support in mp4 extractor."
+    bug: "370061501"
+}
diff --git a/media/module/extractors/fuzzers/Android.bp b/media/module/extractors/fuzzers/Android.bp
index 7a49d8e..f3da389 100644
--- a/media/module/extractors/fuzzers/Android.bp
+++ b/media/module/extractors/fuzzers/Android.bp
@@ -24,6 +24,7 @@
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
     default_applicable_licenses: ["frameworks_av_license"],
+    default_team: "trendy_team_android_media_solutions_playback",
 }
 
 cc_defaults {
@@ -128,9 +129,16 @@
     ],
 
     static_libs: [
+        "android.media.extractor.flags-aconfig-cc",
+        "libaconfig_storage_read_api_cc",
         "libstagefright_id3",
         "libstagefright_esds",
         "libmp4extractor",
+        "libstagefright_metadatautils",
+    ],
+
+    shared_libs: [
+        "server_configurable_flags",
     ],
 
     dictionary: "mp4_extractor_fuzzer.dict",
@@ -301,12 +309,18 @@
     ],
 
     static_libs: [
+        "android.media.extractor.flags-aconfig-cc",
+        "libaconfig_storage_read_api_cc",
         "libsonivox",
         "libmedia_midiiowrapper",
         "libmidiextractor",
         "libwatchdog",
     ],
 
+    shared_libs: [
+        "server_configurable_flags",
+    ],
+
     dictionary: "midi_extractor_fuzzer.dict",
 
     host_supported: true,
diff --git a/media/module/extractors/midi/Android.bp b/media/module/extractors/midi/Android.bp
index feabf9e..0eb34fc 100644
--- a/media/module/extractors/midi/Android.bp
+++ b/media/module/extractors/midi/Android.bp
@@ -32,6 +32,8 @@
     ],
 
     static_libs: [
+        "android.media.extractor.flags-aconfig-cc",
+        "libaconfig_storage_read_api_cc",
         "libmedia_midiiowrapper",
         "libsonivoxwithoutjet",
         "libstagefright_foundation",
@@ -40,6 +42,7 @@
 
     shared_libs: [
         "libbase",
+        "server_configurable_flags",
     ],
 
     host_supported: true,
diff --git a/media/module/extractors/midi/MidiExtractor.cpp b/media/module/extractors/midi/MidiExtractor.cpp
index 167cc40..98d7716 100644
--- a/media/module/extractors/midi/MidiExtractor.cpp
+++ b/media/module/extractors/midi/MidiExtractor.cpp
@@ -20,6 +20,7 @@
 
 #include "MidiExtractor.h"
 
+#include <com_android_media_extractor_flags.h>
 #include <media/MidiIoWrapper.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaBufferGroup.h>
@@ -323,10 +324,97 @@
     return AMediaFormat_copy(meta, mFileMetadata);
 }
 
-// Sniffer
+static bool startsWith(const uint8_t *buf, size_t size, const char *pattern, size_t patternSize) {
+    if (size < patternSize) {
+        return false;
+    }
+    return (memcmp(buf, pattern, patternSize) == 0);
+}
 
-bool SniffMidi(CDataSource *source, float *confidence)
-{
+static bool isValidMThd(const uint8_t *buf, size_t size) {
+    return startsWith(buf, size, "MThd", 4);
+}
+
+static bool isValidXmf(const uint8_t *buf, size_t size) {
+    return startsWith(buf, size, "XMF_", 4);
+}
+
+static bool isValidImelody(const uint8_t *buf, size_t size) {
+    return startsWith(buf, size, "BEGIN:IMELODY", 13);
+}
+
+static bool isValidRtttl(const uint8_t *buf, size_t size) {
+    #define RTTTL_MAX_TITLE_LEN 32
+    // rtttl starts with the following:
+    // <title>:<type>=<value>
+    //
+    // Where:
+    // - <title>: Up to 32 characters
+    // - <type>: Single character indicating:
+    //     'd' for duration
+    //     'o' for octave
+    //     'b' for beats per minute
+    // - <value>: Corresponding value for the type
+    if (size < 4) {
+        return false;
+    }
+    for (size_t i = 0; i < RTTTL_MAX_TITLE_LEN && i < size; i++) {
+        if (buf[i] == ':') {
+            if (i < (size - 3) && buf[i + 2] == '=') {
+                return true;
+            }
+            break;
+        }
+    }
+    return false;
+}
+
+static bool isValidOta(const uint8_t *buf, size_t size) {
+    #define OTA_RINGTONE 0x25
+    #define OTA_SOUND 0x1d
+    #define OTA_UNICODE 0x22
+
+    // ota starts with the following:
+    // <cmdLen><cmd1><cmd2>..<cmdN>
+    //
+    // Where:
+    // - <cmdLen>: Single character command length
+    // - <cmd1>: Single character (OTA_RINGTONE << 1)
+    // - <cmd2>: Single character (OTA_SOUND << 1) or (OTA_UNICODE << 1)
+    //           and so on with last cmd being (0x1d << 1)
+
+    if (size < 3) {
+        return false;
+    }
+
+    uint8_t cmdLen = buf[0];
+    if (cmdLen < 2) {
+        return false;
+    }
+
+    if ((buf[1] >> 1) != OTA_RINGTONE) {
+        return false;
+    }
+    cmdLen--;
+
+    size_t i = 2;
+    while(cmdLen && i < size) {
+        switch(buf[i] >> 1) {
+            case OTA_SOUND:
+                return true;
+            case OTA_UNICODE:
+                break;
+            default:
+                return false;
+        }
+        cmdLen--;
+        i++;
+    }
+
+    return false;
+}
+
+bool SniffMidiLegacy(CDataSource *source, float *confidence) {
     MidiEngine p(source, NULL, NULL);
     if (p.initCheck() == OK) {
         *confidence = 0.8;
@@ -335,7 +423,47 @@
     }
     ALOGV("SniffMidi: no");
     return false;
+}
 
+bool SniffMidiEfficiently(CDataSource *source, float *confidence) {
+    uint8_t header[128];
+    int filled = source->readAt(source->handle, 0, header, sizeof(header));
+
+    if (isValidMThd(header, filled)) {
+        *confidence = 0.80;
+        ALOGV("SniffMidi: yes, MThd");
+        return true;
+    }
+    if (isValidXmf(header, filled)) {
+        *confidence = 0.80;
+        ALOGV("SniffMidi: yes, XMF_");
+        return true;
+    }
+    if (isValidImelody(header, filled)) {
+        *confidence = 0.80;
+        ALOGV("SniffMidi: yes, imelody");
+        return true;
+    }
+    if (isValidRtttl(header, filled)) {
+        *confidence = 0.80;
+        ALOGV("SniffMidi: yes, rtttl");
+        return true;
+    }
+    if (isValidOta(header, filled)) {
+        *confidence = 0.80;
+        ALOGV("SniffMidi: yes, ota");
+        return true;
+    }
+    ALOGV("SniffMidi: no");
+    return false;
+}
+
+// Sniffer
+bool SniffMidi(CDataSource *source, float *confidence) {
+    if(com::android::media::extractor::flags::extractor_sniff_midi_optimizations()) {
+        return SniffMidiEfficiently(source, confidence);
+    }
+    return SniffMidiLegacy(source, confidence);
 }
 
 static const char *extensions[] = {
diff --git a/media/module/extractors/mkv/MatroskaExtractor.cpp b/media/module/extractors/mkv/MatroskaExtractor.cpp
index f326db1..10ae07a 100644
--- a/media/module/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/module/extractors/mkv/MatroskaExtractor.cpp
@@ -1787,7 +1787,7 @@
         return ERROR_MALFORMED;
     }
 
-    if (!MakeVP9CodecSpecificData(trackInfo->mMeta, tmpData.get(), frame.len)) {
+    if (!MakeVP9CodecSpecificDataFromFirstFrame(trackInfo->mMeta, tmpData.get(), frame.len)) {
         return ERROR_MALFORMED;
     }
 
diff --git a/media/module/extractors/mp4/Android.bp b/media/module/extractors/mp4/Android.bp
index 8072002..effd24a 100644
--- a/media/module/extractors/mp4/Android.bp
+++ b/media/module/extractors/mp4/Android.bp
@@ -42,12 +42,18 @@
     ],
 
     static_libs: [
+        "android.media.extractor.flags-aconfig-cc",
+        "libaconfig_storage_read_api_cc",
         "libstagefright_esds",
         "libstagefright_foundation",
         "libstagefright_id3",
         "libutils",
     ],
 
+    shared_libs: [
+        "server_configurable_flags",
+    ],
+
     host_supported: true,
 
     target: {
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index b3707c8..f062491 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -33,6 +33,7 @@
 #include "SampleTable.h"
 #include "ItemTable.h"
 
+#include <com_android_media_extractor_flags.h>
 #include <media/esds/ESDS.h>
 #include <ID3.h>
 #include <media/stagefright/DataSourceBase.h>
@@ -51,6 +52,7 @@
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaDataBase.h>
+#include <media/stagefright/MetaDataUtils.h>
 #include <utils/String8.h>
 
 #include <byteswap.h>
@@ -146,6 +148,7 @@
 
     bool mIsAVC;
     bool mIsHEVC;
+    bool mIsAPV;
     bool mIsDolbyVision;
     bool mIsAC4;
     bool mIsMpegH = false;
@@ -365,6 +368,13 @@
         case FOURCC("hev1"):
             return MEDIA_MIMETYPE_VIDEO_HEVC;
 
+        case FOURCC("apv1"):
+            if (!com::android::media::extractor::flags::extractor_mp4_enable_apv()) {
+                ALOGV("APV support not enabled");
+                return "application/octet-stream";
+            }
+            return MEDIA_MIMETYPE_VIDEO_APV;
+
         case FOURCC("dvav"):
         case FOURCC("dva1"):
         case FOURCC("dvhe"):
@@ -523,11 +533,10 @@
     }
 
     [this, &track] {
-        int64_t duration;
+        int64_t duration = track->mMdhdDurationUs;
         int32_t samplerate;
         // Only for audio track.
-        if (track->elst_needs_processing && mHeaderTimescale != 0 &&
-            AMediaFormat_getInt64(track->meta, AMEDIAFORMAT_KEY_DURATION, &duration) &&
+        if (track->elst_needs_processing && mHeaderTimescale != 0 && duration != 0 &&
             AMediaFormat_getInt32(track->meta, AMEDIAFORMAT_KEY_SAMPLE_RATE, &samplerate)) {
             // Elst has to be processed only the first time this function is called.
             track->elst_needs_processing = false;
@@ -1645,7 +1654,10 @@
                           (long long) duration, (long long) mLastTrack->timescale);
                     return ERROR_MALFORMED;
                 }
-                AMediaFormat_setInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION, durationUs);
+                // Store this track's mdhd duration to calculate the padding.
+                mLastTrack->mMdhdDurationUs = (int64_t)durationUs;
+            } else {
+                mLastTrack->mMdhdDurationUs = 0;
             }
 
             uint8_t lang[2];
@@ -2103,6 +2115,7 @@
         case FOURCC("dav1"):
         case FOURCC("av01"):
         case FOURCC("vp09"):
+        case FOURCC("apv1"):
         {
             uint8_t buffer[78];
             if (chunk_data_size < (ssize_t)sizeof(buffer)) {
@@ -2594,10 +2607,42 @@
             *offset += chunk_size;
             break;
         }
-
         case FOURCC("vpcC"):
+        {
+            if (mLastTrack == NULL) {
+                return ERROR_MALFORMED;
+            }
+
+            auto buffer = heapbuffer<uint8_t>(chunk_data_size);
+
+            if (buffer.get() == NULL) {
+                ALOGE("b/28471206");
+                return NO_MEMORY;
+            }
+
+            if (mDataSource->readAt(data_offset, buffer.get(), chunk_data_size) < chunk_data_size) {
+                return ERROR_IO;
+            }
+
+            if (!MakeVP9CodecPrivateFromVpcC(mLastTrack->meta, buffer.get(), chunk_data_size)) {
+                ALOGE("Failed to create VP9 CodecPrivate from vpcC.");
+                return ERROR_MALFORMED;
+            }
+
+            *offset += chunk_size;
+            break;
+        }
+
+        case FOURCC("apvC"):
         case FOURCC("av1C"):
         {
+            if (!com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
+                chunk_type == FOURCC("apvC")) {
+                ALOGV("APV support not enabled");
+                *offset += chunk_size;
+                break;
+            }
+
             auto buffer = heapbuffer<uint8_t>(chunk_data_size);
 
             if (buffer.get() == NULL) {
@@ -3907,17 +3952,18 @@
     }
 
     int32_t id;
+    int64_t duration;
 
     if (version == 1) {
         // we can get ctime value from U64_AT(&buffer[4])
         // we can get mtime value from U64_AT(&buffer[12])
         id = U32_AT(&buffer[20]);
-        // we can get duration value from U64_AT(&buffer[28])
+        duration = U64_AT(&buffer[28]);
     } else if (version == 0) {
         // we can get ctime value from U32_AT(&buffer[4])
         // we can get mtime value from U32_AT(&buffer[8])
         id = U32_AT(&buffer[12]);
-        // we can get duration value from U32_AT(&buffer[20])
+        duration = U32_AT(&buffer[20]);
     } else {
         return ERROR_UNSUPPORTED;
     }
@@ -3926,6 +3972,15 @@
         return ERROR_MALFORMED;
 
     AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_TRACK_ID, id);
+    if (duration != 0 && mHeaderTimescale != 0) {
+        long double durationUs = ((long double)duration * 1000000) / mHeaderTimescale;
+        if (durationUs < 0 || durationUs > INT64_MAX) {
+            ALOGE("cannot represent %lld * 1000000 / %lld in 64 bits",
+                  (long long) duration, (long long) mHeaderTimescale);
+            return ERROR_MALFORMED;
+        }
+        AMediaFormat_setInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION, durationUs);
+    }
 
     size_t matrixOffset = dynSize + 16;
     int32_t a00 = U32_AT(&buffer[matrixOffset]);
@@ -5108,6 +5163,7 @@
       mCurrentSampleInfoOffsets(NULL),
       mIsAVC(false),
       mIsHEVC(false),
+      mIsAPV(false),
       mIsDolbyVision(false),
       mIsAC4(false),
       mIsPcm(false),
@@ -5150,6 +5206,8 @@
     mIsAVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
     mIsHEVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC) ||
               !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
+    mIsAPV = com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
+             !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_APV);
     mIsAC4 = !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC4);
     mIsDolbyVision = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
     mIsHeif = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC) && mItemTable != NULL;
diff --git a/media/module/extractors/mp4/include/MPEG4Extractor.h b/media/module/extractors/mp4/include/MPEG4Extractor.h
index 542a3e6..59626f6 100644
--- a/media/module/extractors/mp4/include/MPEG4Extractor.h
+++ b/media/module/extractors/mp4/include/MPEG4Extractor.h
@@ -96,7 +96,7 @@
 
         uint8_t *mTx3gBuffer;
         size_t mTx3gSize, mTx3gFilled;
-
+        int64_t mMdhdDurationUs;
 
         Track() {
             next = NULL;
diff --git a/media/module/extractors/mpeg2/Android.bp b/media/module/extractors/mpeg2/Android.bp
index aa59a0c..63dbcda 100644
--- a/media/module/extractors/mpeg2/Android.bp
+++ b/media/module/extractors/mpeg2/Android.bp
@@ -44,7 +44,6 @@
 
     shared_libs: [
         "libbase",
-        "libcgrouprc#29",
     ],
 
     header_libs: [
diff --git a/media/module/extractors/tests/Android.bp b/media/module/extractors/tests/Android.bp
index d6e79c7..5f0f4fa 100644
--- a/media/module/extractors/tests/Android.bp
+++ b/media/module/extractors/tests/Android.bp
@@ -21,6 +21,7 @@
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
     default_applicable_licenses: ["frameworks_av_license"],
+    default_team: "trendy_team_android_media_solutions_playback",
 }
 
 cc_test {
@@ -31,6 +32,8 @@
     srcs: ["ExtractorUnitTest.cpp"],
 
     static_libs: [
+        "android.media.extractor.flags-aconfig-cc",
+        "libaconfig_storage_read_api_cc",
         "libaacextractor",
         "libamrextractor",
         "libmp3extractor",
@@ -77,6 +80,7 @@
         "libhidlmemory",
         "libhidlbase",
         "libbase",
+        "server_configurable_flags",
     ],
 
     compile_multilib: "first",
diff --git a/media/module/foundation/MediaDefs.cpp b/media/module/foundation/MediaDefs.cpp
index 7abab63..a890696 100644
--- a/media/module/foundation/MediaDefs.cpp
+++ b/media/module/foundation/MediaDefs.cpp
@@ -25,6 +25,7 @@
 const char *MEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
 const char *MEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
 const char *MEDIA_MIMETYPE_VIDEO_AV1 = "video/av01";
+const char *MEDIA_MIMETYPE_VIDEO_APV = "video/apv";
 const char *MEDIA_MIMETYPE_VIDEO_AVC = "video/avc";
 const char *MEDIA_MIMETYPE_VIDEO_HEVC = "video/hevc";
 const char *MEDIA_MIMETYPE_VIDEO_MPEG4 = "video/mp4v-es";
diff --git a/media/module/foundation/include/media/stagefright/foundation/AUtils.h b/media/module/foundation/include/media/stagefright/foundation/AUtils.h
index 3b646dc..eb605a7 100644
--- a/media/module/foundation/include/media/stagefright/foundation/AUtils.h
+++ b/media/module/foundation/include/media/stagefright/foundation/AUtils.h
@@ -92,4 +92,13 @@
     return (err < (period / 2)) ? err : (period - err);
 }
 
+inline static bool IsPowerOfTwo(int32_t value) {
+    return (value & (value - 1)) == 0;
+}
+
+/** Checks if the value is a power of two and not zero. */
+inline static bool IsPowerOfTwoStrict(int32_t value) {
+    return value != 0 && (value & (value - 1)) == 0;
+}
+
 #endif  // A_UTILS_H_
diff --git a/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h b/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
index 05ee7fc..2b3f446 100644
--- a/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
+++ b/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
@@ -27,6 +27,7 @@
 extern const char *MEDIA_MIMETYPE_VIDEO_VP8;
 extern const char *MEDIA_MIMETYPE_VIDEO_VP9;
 extern const char *MEDIA_MIMETYPE_VIDEO_AV1;
+extern const char *MEDIA_MIMETYPE_VIDEO_APV;
 extern const char *MEDIA_MIMETYPE_VIDEO_AVC;
 extern const char *MEDIA_MIMETYPE_VIDEO_HEVC;
 extern const char *MEDIA_MIMETYPE_VIDEO_MPEG4;
diff --git a/media/module/libapexcodecs/Android.bp b/media/module/libapexcodecs/Android.bp
new file mode 100644
index 0000000..790b749
--- /dev/null
+++ b/media/module/libapexcodecs/Android.bp
@@ -0,0 +1,71 @@
+//
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+package {
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "libapexcodecs-defaults",
+    header_libs: [
+        "libbase_headers",
+    ],
+
+    srcs: ["ApexCodecs.cpp"],
+
+    shared_libs: [
+        "libbase",
+        "libnativewindow",
+    ],
+
+    export_include_dirs: ["include"],
+
+    export_shared_lib_headers: [
+        "libbase",
+        "libnativewindow",
+    ],
+
+}
+
+cc_library {
+    name: "libapexcodecs-testing",
+    defaults: ["libapexcodecs-defaults"],
+
+    visibility: [
+        ":__subpackages__",
+    ],
+}
+
+cc_library {
+    name: "libapexcodecs",
+    defaults: ["libapexcodecs-defaults"],
+
+    visibility: [
+        "//frameworks/av/apex:__subpackages__",
+        "//frameworks/av/media/codec2/hal/client",
+    ],
+
+    min_sdk_version: "apex_inherit",
+    version_script: "libapexcodecs.map.txt",
+    stubs: {
+        symbol_file: "libapexcodecs.map.txt",
+        versions: ["36"],
+    },
+
+    apex_available: [
+        "com.android.media.swcodec",
+    ],
+}
diff --git a/media/module/libapexcodecs/ApexCodecs.cpp b/media/module/libapexcodecs/ApexCodecs.cpp
new file mode 100644
index 0000000..7101677
--- /dev/null
+++ b/media/module/libapexcodecs/ApexCodecs.cpp
@@ -0,0 +1,148 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <new>
+
+#include <android-base/no_destructor.h>
+#include <apex/ApexCodecs.h>
+
+// TODO: remove when we have real implementations
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wunused-parameter"
+
+struct ApexCodec_ComponentStore {
+    ApexCodec_ComponentStore() = default;
+};
+
+ApexCodec_ComponentStore *ApexCodec_GetComponentStore() {
+    ::android::base::NoDestructor<ApexCodec_ComponentStore> store;
+    return store.get();
+}
+
+ApexCodec_ComponentTraits *ApexCodec_Traits_get(
+        ApexCodec_ComponentStore *store, size_t index) {
+    return nullptr;
+}
+
+ApexCodec_Status ApexCodec_Component_create(
+        ApexCodec_ComponentStore *store, const char *name, ApexCodec_Component **comp) {
+    *comp = nullptr;
+    return APEXCODEC_STATUS_NOT_FOUND;
+}
+
+void ApexCodec_Component_destroy(ApexCodec_Component *comp) {}
+
+ApexCodec_Status ApexCodec_Component_start(ApexCodec_Component *comp) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Component_flush(ApexCodec_Component *comp) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Component_reset(ApexCodec_Component *comp) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Configurable *ApexCodec_Component_getConfigurable(
+        ApexCodec_Component *comp) {
+    return nullptr;
+}
+
+ApexCodec_Status ApexCodec_SupportedValues_getTypeAndValues(
+        ApexCodec_SupportedValues *supportedValues,
+        ApexCodec_SupportedValuesType *type,
+        ApexCodec_SupportedValuesNumberType *numberType,
+        ApexCodec_Value **values,
+        uint32_t *numValues) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+void ApexCodec_SupportedValues_release(ApexCodec_SupportedValues *values) {}
+
+ApexCodec_Status ApexCodec_SettingResults_getResultAtIndex(
+        ApexCodec_SettingResults *results,
+        size_t index,
+        ApexCodec_SettingResultFailure *failure,
+        ApexCodec_ParamFieldValues *field,
+        ApexCodec_ParamFieldValues **conflicts,
+        size_t *numConflicts) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+void ApexCodec_SettingResults_release(ApexCodec_SettingResults *results) {}
+
+ApexCodec_Status ApexCodec_Component_process(
+        ApexCodec_Component *comp,
+        const ApexCodec_Buffer *input,
+        ApexCodec_Buffer *output,
+        size_t *consumed,
+        size_t *produced) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Configurable_config(
+        ApexCodec_Configurable *comp,
+        ApexCodec_LinearBuffer *config,
+        ApexCodec_SettingResults **results) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Configurable_query(
+        ApexCodec_Configurable *comp,
+        uint32_t indices[],
+        size_t numIndices,
+        ApexCodec_LinearBuffer *config,
+        size_t *written) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_getIndices(
+        ApexCodec_ParamDescriptors *descriptors,
+        uint32_t **indices,
+        size_t *numIndices) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_getDescriptor(
+        ApexCodec_ParamDescriptors *descriptors,
+        uint32_t index,
+        ApexCodec_ParamAttribute *attr,
+        const char **name,
+        uint32_t **dependencies,
+        size_t *numDependencies) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_release(
+        ApexCodec_ParamDescriptors *descriptors) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Configurable_querySupportedParams(
+        ApexCodec_Configurable *comp,
+        ApexCodec_ParamDescriptors **descriptors) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Configurable_querySupportedValues(
+        ApexCodec_Configurable *comp,
+        ApexCodec_SupportedValuesQuery *queries,
+        size_t numQueries) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+#pragma clang diagnostic pop
\ No newline at end of file
diff --git a/media/module/libapexcodecs/include/apex/ApexCodecs.h b/media/module/libapexcodecs/include/apex/ApexCodecs.h
new file mode 100644
index 0000000..b9f2e83
--- /dev/null
+++ b/media/module/libapexcodecs/include/apex/ApexCodecs.h
@@ -0,0 +1,768 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/cdefs.h>
+#include <errno.h>
+#include <stdint.h>
+
+#include <android/api-level.h>
+#include <android/hardware_buffer.h>
+#include <android/versioning.h>
+
+__BEGIN_DECLS
+
+/**
+ * An API to access and operate codecs implemented within an APEX module,
+ * used only by the OS when using the codecs within a client process
+ * (instead of via a HAL).
+ *
+ * NOTE: Many of the constants and types mirror the ones in the Codec 2.0 API.
+ */
+
+/**
+ * Error code for ApexCodec APIs.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_Status : int32_t {
+    APEXCODEC_STATUS_OK        = 0,
+
+    /* bad input */
+    APEXCODEC_STATUS_BAD_VALUE = EINVAL,
+    APEXCODEC_STATUS_BAD_INDEX = ENXIO,
+    APEXCODEC_STATUS_CANNOT_DO = ENOTSUP,
+
+    /* bad sequencing of events */
+    APEXCODEC_STATUS_DUPLICATE = EEXIST,
+    APEXCODEC_STATUS_NOT_FOUND = ENOENT,
+    APEXCODEC_STATUS_BAD_STATE = EPERM,
+    APEXCODEC_STATUS_BLOCKING  = EWOULDBLOCK,
+    APEXCODEC_STATUS_CANCELED  = EINTR,
+
+    /* bad environment */
+    APEXCODEC_STATUS_NO_MEMORY = ENOMEM,
+    APEXCODEC_STATUS_REFUSED   = EACCES,
+
+    APEXCODEC_STATUS_TIMED_OUT = ETIMEDOUT,
+
+    /* bad versioning */
+    APEXCODEC_STATUS_OMITTED   = ENOSYS,
+
+    /* unknown fatal */
+    APEXCODEC_STATUS_CORRUPTED = EFAULT,
+    APEXCODEC_STATUS_NO_INIT   = ENODEV,
+} ApexCodec_Status;
+
+/**
+ * Enum that represents the kind of component
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_Kind : uint32_t {
+    /**
+     * The component is of a kind that is not listed below.
+     */
+    APEXCODEC_KIND_OTHER = 0x0,
+    /**
+     * The component is a decoder, which decodes coded bitstream
+     * into raw buffers.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_KIND_DECODER = 0x1,
+    /**
+     * The component is an encoder, which encodes raw buffers
+     * into coded bitstream.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_KIND_ENCODER = 0x2,
+} ApexCodec_Kind;
+
+typedef enum ApexCodec_Domain : uint32_t {
+    /**
+     * A component domain that is not listed below.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_DOMAIN_OTHER = 0x0,
+    /**
+     * A component domain that operates on video.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_DOMAIN_VIDEO = 0x1,
+    /**
+     * A component domain that operates on audio.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_DOMAIN_AUDIO = 0x2,
+    /**
+     * A component domain that operates on image.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_DOMAIN_IMAGE = 0x3,
+} ApexCodec_Domain;
+
+/**
+ * Handle for component traits such as name, media type, kind (decoder/encoder),
+ * domain (audio/video/image), etc.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ComponentTraits {
+    /**
+     * The name of the component.
+     */
+    const char *name;
+    /**
+     * The supported media type of the component.
+     */
+    const char *mediaType;
+    /**
+     * The kind of the component.
+     */
+    ApexCodec_Kind kind;
+    /**
+     * The domain on which the component operates.
+     */
+    ApexCodec_Domain domain;
+} ApexCodec_ComponentTraits;
+
+/**
+ * An opaque struct that represents a component store.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ComponentStore ApexCodec_ComponentStore;
+
+/**
+ * Get the component store object. This function never fails.
+ *
+ * \return component store object.
+ */
+ApexCodec_ComponentStore *ApexCodec_GetComponentStore()
+        __INTRODUCED_IN(36);
+
+/**
+ * Get the traits object of a component at given index. ApexCodecs_Traits_*
+ * functions are used to extract information from the traits object.
+ *
+ * Returns nullptr if index is out of bounds. The returned object is owned by
+ * ApexCodec_ComponentStore object and the client should not delete it.
+ *
+ * The client can iterate through the traits objects by calling this function
+ * with an index incrementing from 0 until it gets a nullptr.
+ *
+ * \param index index of the traits object to query
+ * \return traits object at the index, or nullptr if the index is out of bounds.
+ */
+ApexCodec_ComponentTraits *ApexCodec_Traits_get(
+        ApexCodec_ComponentStore *store, size_t index) __INTRODUCED_IN(36);
+
+/**
+ * An opaque struct that represents a codec.
+ */
+typedef struct ApexCodec_Component ApexCodec_Component;
+
+/**
+ * Create a component by the name.
+ *
+ * \param store the component store
+ * \param name the name of the component
+ * \param component out-param to be filled with the component; must not be null
+ * \return  APEXCODEC_STATUS_OK         if successful
+ *          APEXCODEC_STATUS_NOT_FOUND  if the name is not found
+ */
+ApexCodec_Status ApexCodec_Component_create(
+        ApexCodec_ComponentStore *store, const char *name, ApexCodec_Component **comp)
+        __INTRODUCED_IN(36);
+
+/**
+ * Destroy the component by the handle. It is invalid to call component methods on the handle
+ * after calling this method. It is no-op to call this method with |comp| == nullptr.
+ *
+ * \param comp the handle for the component
+ */
+void ApexCodec_Component_destroy(ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * Start the component. The component is ready to process buffers after this call.
+ *
+ * \param comp the handle for the component
+ */
+ApexCodec_Status ApexCodec_Component_start(
+        ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * Flush the component's internal states. This operation preserves the existing configurations.
+ *
+ * \param comp the handle for the component
+ */
+ApexCodec_Status ApexCodec_Component_flush(
+        ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * Resets the component to the initial state, right after creation. Note that the configuration
+ * will also revert to the initial state, so if there are configurations required those should be
+ * set again to use the component.
+ *
+ * \param comp the handle for the component
+ */
+ApexCodec_Status ApexCodec_Component_reset(
+        ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * An opaque struct that represents a configurable part of the component.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_Configurable ApexCodec_Configurable;
+
+/**
+ * Return the configurable object for the given ApexCodec_Component.
+ * The returned object has the same lifecycle as |comp|.
+ *
+ * \param comp the handle for the component
+ * \return the configurable object handle
+ */
+ApexCodec_Configurable *ApexCodec_Component_getConfigurable(
+        ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * Enum that represents the flags for ApexCodec_Buffer.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_BufferFlags : uint32_t {
+    APEXCODEC_FLAG_DROP_FRAME    = (1 << 0),
+    APEXCODEC_FLAG_END_OF_STREAM = (1 << 1),
+    APEXCODEC_FLAG_DISCARD_FRAME = (1 << 2),
+    APEXCODEC_FLAG_INCOMPLETE    = (1 << 3),
+    APEXCODEC_FLAG_CORRECTED     = (1 << 4),
+    APEXCODEC_FLAG_CORRUPT       = (1 << 5),
+    APEXCODEC_FLAG_CODEC_CONFIG  = (1u << 31),
+} ApexCodec_BufferFlags;
+
+/**
+ * Enum that represents the type of buffer.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_BufferType : uint32_t {
+    APEXCODEC_BUFFER_TYPE_INVALID,
+    APEXCODEC_BUFFER_TYPE_LINEAR,
+    APEXCODEC_BUFFER_TYPE_LINEAR_CHUNKS,
+    APEXCODEC_BUFFER_TYPE_GRAPHIC,
+    APEXCODEC_BUFFER_TYPE_GRAPHIC_CHUNKS,
+} ApexCodec_BufferType;
+
+/**
+ * Struct that represents the memory for ApexCodec_Buffer.
+ *
+ * All memory regions have the simple 1D representation.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_LinearBuffer {
+    /**
+     * A pointer to the start of the buffer. This is not aligned.
+     */
+    uint8_t *data;
+    /**
+     * Size of the buffer. The memory region between |data| (inclusive) and
+     * |data + size| (exclusive) is assumed to be valid for read/write.
+     */
+    size_t size;
+} ApexCodec_LinearBuffer;
+
+/**
+ * Struct that represents a buffer for ApexCodec_Component.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_Buffer {
+    /**
+     * Flags associated with the buffer.
+     */
+    ApexCodec_BufferFlags flags;
+    /**
+     * For input buffers client assign a unique sequential index for each buffer. For output buffers
+     * it is the same as the associated input buffer's frame index.
+     */
+    uint64_t frameIndex;
+    /**
+     * A timestamp associated with the buffer in microseconds.
+     */
+    uint64_t timestampUs;
+    /**
+     * The type of the buffer. The component may reject request to process a buffer with the wrong
+     * type. For example, a video decoder will reject an input buffer with type BUFFER_TYPE_GRAPHIC,
+     * or an output buffer with type BUFFER_TYPE_LINEAR.
+     */
+    ApexCodec_BufferType type;
+    /**
+     * The actual memory for the buffer.
+     */
+    union {
+        ApexCodec_LinearBuffer linear;
+        AHardwareBuffer *graphic;
+    } memory;
+    /**
+     * Config updates associated with the buffer. For input buffers these are sent to the component
+     * at the specific input frame. For output buffers these are config updates as a result of
+     * processing the buffer.
+     */
+    ApexCodec_LinearBuffer configUpdates;
+} ApexCodec_Buffer;
+
+/**
+ * Enum that represents the query type for the supported values.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SupportedValuesQueryType : uint32_t {
+    /** Query all possible supported values regardless of current configuration */
+    APEXCODEC_SUPPORTED_VALUES_QUERY_POSSIBLE,
+    /** Query supported values at current configuration */
+    APEXCODEC_SUPPORTED_VALUES_QUERY_CURRENT,
+} ApexCodec_SupportedValuesQueryType;
+
+/**
+ * Enum that represents the type of the supported values.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SupportedValuesType : uint32_t {
+    /** The supported values are empty. */
+    APEXCODEC_SUPPORTED_VALUES_EMPTY,
+    /**
+     * The supported values are represented by a range defined with {min, max, step, num, den}.
+     *
+     * If step is 0 and num and denom are both 1, the supported values are any value, for which
+     * min <= value <= max.
+     *
+     * Otherwise, the range represents a geometric/arithmetic/multiply-accumulate series, where
+     * successive supported values can be derived from previous values (starting at min), using the
+     * following formula:
+     *  v[0] = min
+     *  v[i] = v[i-1] * num / denom + step for i >= 1, while min < v[i] <= max.
+     */
+    APEXCODEC_SUPPORTED_VALUES_RANGE,
+    /** The supported values are represented by a list of values. */
+    APEXCODEC_SUPPORTED_VALUES_VALUES,
+    /** The supported values are represented by a list of flags. */
+    APEXCODEC_SUPPORTED_VALUES_FLAGS,
+} ApexCodec_SupportedValuesType;
+
+/**
+ * Enum that represents numeric types of the supported values.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SupportedValuesNumberType : uint32_t {
+    APEXCODEC_SUPPORTED_VALUES_TYPE_NONE   = 0,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_INT32  = 1,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_UINT32 = 2,
+    // RESERVED                            = 3,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_INT64  = 4,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_UINT64 = 5,
+    // RESERVED                            = 6,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_FLOAT  = 7,
+} ApexCodec_SupportedValuesNumberType;
+
+/**
+ * Union of primitive types.
+ *
+ * Introduced in API 36.
+ */
+typedef union {
+    int32_t i32;
+    uint32_t u32;
+    int64_t i64;
+    uint64_t u64;
+    float f;
+} ApexCodec_Value;
+
+/**
+ * An opaque struct that represents the supported values of a parameter.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_SupportedValues ApexCodec_SupportedValues;
+
+/**
+ * Extract information from ApexCodec_SupportedValues object.
+ *
+ * \param [in] supportedValues the supported values object
+ * \param [out] type        pointer to be filled with the type of the supported values
+ * \param [out] numberType  pointer to be filled with the numeric type of the supported values
+ * \param [out] values      pointer to be filled with the array of the actual supported values.
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_EMPTY: nullptr
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_RANGE: {min, max, step, num, den}
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_VALUES/_FLAGS:
+ *                              the array of supported values/flags
+ *                          the array is owned by the |supportedValues| object and the client
+ *                          should not free it.
+ * \param [out] numValues   pointer to be filled with the number of values.
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_EMPTY: 0
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_RANGE: 5
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_VALUES/_FLAGS: varies
+ */
+ApexCodec_Status ApexCodec_SupportedValues_getTypeAndValues(
+        ApexCodec_SupportedValues *supportedValues,
+        ApexCodec_SupportedValuesType *type,
+        ApexCodec_SupportedValuesNumberType *numberType,
+        ApexCodec_Value **values,
+        uint32_t *numValues) __INTRODUCED_IN(36);
+
+/**
+ * Release the supported values object.
+ *
+ * \param values the supported values object
+ */
+void ApexCodec_SupportedValues_release(
+        ApexCodec_SupportedValues *values) __INTRODUCED_IN(36);
+
+/**
+ * Struct that represents the result of ApexCodec_Configurable_config.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_SettingResults ApexCodec_SettingResults;
+
+/**
+ * Enum that represents the failure code of ApexCodec_SettingResults.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SettingResultFailure : uint32_t {
+    /** parameter type is not supported */
+    APEXCODEC_SETTING_RESULT_BAD_TYPE,
+    /** parameter is not supported on the specific port */
+    APEXCODEC_SETTING_RESULT_BAD_PORT,
+    /** parameter is not supported on the specific stream */
+    APEXCODEC_SETTING_RESULT_BAD_INDEX,
+    /** parameter is read-only */
+    APEXCODEC_SETTING_RESULT_READ_ONLY,
+    /** parameter mismatches input data */
+    APEXCODEC_SETTING_RESULT_MISMATCH,
+    /** strict parameter does not accept value for the field at all */
+    APEXCODEC_SETTING_RESULT_BAD_VALUE,
+    /** strict parameter field value conflicts with another settings */
+    APEXCODEC_SETTING_RESULT_CONFLICT,
+    /** strict parameter field is out of range due to other settings */
+    APEXCODEC_SETTING_RESULT_UNSUPPORTED,
+    /**
+     * field does not accept the requested parameter value at all. It has been corrected to
+     * the closest supported value. This failure mode is provided to give guidance as to what
+     * are the currently supported values for this field (which may be a subset of the at-all-
+     * potential values)
+     */
+    APEXCODEC_SETTING_RESULT_INFO_BAD_VALUE,
+    /**
+     * requested parameter value is in conflict with an/other setting(s)
+     * and has been corrected to the closest supported value. This failure
+     * mode is given to provide guidance as to what are the currently supported values as well
+     * as to optionally provide suggestion to the client as to how to enable the requested
+     * parameter value.
+     */
+    APEXCODEC_SETTING_RESULT_INFO_CONFLICT,
+} ApexCodec_SettingResultFailure;
+
+/**
+ * Struct that represents a field and its supported values of a parameter.
+ *
+ * The offset and size of the field are where the field is located in the blob representation of
+ * the parameter, as used in the ApexCodec_Configurable_query() and ApexCodec_Configurable_config(),
+ * for example.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ParamFieldValues {
+    /** index of the param */
+    uint32_t index;
+    /** offset of the param field */
+    uint32_t offset;
+    /** size of the param field */
+    uint32_t size;
+    /** currently supported values of the param field */
+    ApexCodec_SupportedValues *values;
+} ApexCodec_ParamFieldValues;
+
+/**
+ * Extract the result of ApexCodec_Configurable_config.
+ * The client can iterate through the results with index starting from 0 until this function returns
+ * APEXCODEC_STATUS_NOT_FOUND.
+ *
+ * \param [in]  result  the result object
+ * \param [in]  index   the index of the result to extract, starts from 0.
+ * \param [out] failure pointer to be filled with the failure code
+ * \param [out] field   pointer to be filled with the field that failed.
+ *                      |field->value| is owned by the |result| object and the client should not
+ *                      free it.
+ * \param [out] conflicts   pointer to be filled with the array of conflicts.
+ *                          nullptr if |numConflicts| is 0.
+ *                          the array and its content is owned by the |result| object and the client
+ *                          should not free it.
+ * \param [out] numConflicts pointer to be filled with the number of conflicts
+ *                          may be 0 if there are no conflicts
+ * \return APEXCODEC_STATUS_OK         if successful
+ * \return APEXCODEC_STATUS_NOT_FOUND  if index is out of range
+ */
+ApexCodec_Status ApexCodec_SettingResults_getResultAtIndex(
+        ApexCodec_SettingResults *results,
+        size_t index,
+        ApexCodec_SettingResultFailure *failure,
+        ApexCodec_ParamFieldValues *field,
+        ApexCodec_ParamFieldValues **conflicts,
+        size_t *numConflicts) __INTRODUCED_IN(36);
+
+/**
+ * Release the setting result object.
+ *
+ * \param result the setting result object
+ */
+void ApexCodec_SettingResults_release(
+        ApexCodec_SettingResults *results) __INTRODUCED_IN(36);
+
+/**
+ * Process one frame from |input|, and produce one frame to |output| if possible.
+ * When successfully filled, |output->memory.linear| has the size adjusted to the produced
+ * output size, in case of linear buffers. |input->configUpdates| is applied with the input
+ * buffer; |output->configUpdates| contains config updates as a result of processing the frame.
+ *
+ * \param comp      the component to process the buffers
+ * \param input     the input buffer; when nullptr, the component should fill |output| if there are
+ *                  any pending output buffers.
+ * \param output    the output buffer, should not be nullptr.
+ * \param consumed  the number of consumed bytes from the input buffer
+ *                  set to 0 if no input buffer has been consumed, including |input| is nullptr.
+ *                  for graphic buffers, any non-zero value means that the input buffer is consumed.
+ * \param produced  the number of bytes produced on the output buffer
+ *                  set to 0 if no output buffer has been produced.
+ *                  for graphic buffers, any non-zero value means that the output buffer is filled.
+ * \return APEXCODEC_STATUS_OK         if successful
+ * \return APEXCODEC_STATUS_NO_MEMORY  if the output buffer is not suitable to hold the output frame
+ *                                     the client should retry with a new output buffer;
+ *                                     configUpdates should have the information to update
+ *                                     the buffer size.
+ * \return APEXCODEC_STATUS_BAD_VALUE  if the parameters are bad
+ * \return APEXCODEC_STATUS_BAD_STATE  if the component is not in the right state
+ *                                     to process the frame
+ * \return APEXCODEC_STATUS_CORRUPTED  if unexpected error has occurred
+ */
+ApexCodec_Status ApexCodec_Component_process(
+        ApexCodec_Component *comp,
+        const ApexCodec_Buffer *input,
+        ApexCodec_Buffer *output,
+        size_t *consumed,
+        size_t *produced) __INTRODUCED_IN(36);
+
+/**
+ * Configure the component with the given config.
+ *
+ * Configurations are Codec 2.0 configs in binary blobs,
+ * concatenated if there are multiple configs.
+ *
+ * frameworks/av/media/codec2/core/include/C2Param.h contains more details about the configuration
+ * blob layout.
+ *
+ * The component may correct the configured parameters to the closest supported values, and could
+ * fail in case there are no values that the component can auto-correct to. |result| contains the
+ * information about the failures. See ApexCodec_SettingResultFailure and ApexCodec_SettingResults
+ * for more details.
+ *
+ * \param [in]    comp   the handle for the component
+ * \param [inout] config the config blob; after the call, the config blob is updated to the actual
+ *                       config by the component.
+ * \param [out]   result the result of the configuration.
+ *                       the client should call ApexCodec_SettingResult_getResultAtIndex()
+ *                       to extract the result. The result object is owned by the client and should
+ *                       be released with ApexCodec_SettingResult_release().
+ *                       |result| may be nullptr if empty.
+ * \return APEXCODEC_STATUS_OK         if successful
+ * \return APEXCODEC_STATUS_BAD_VALUE  if the config is invalid
+ * \return APEXCODEC_STATUS_BAD_STATE  if the component is not in the right state to be configured
+ * \return APEXCODEC_STATUS_CORRUPTED  if unexpected error has occurred
+ */
+ApexCodec_Status ApexCodec_Configurable_config(
+        ApexCodec_Configurable *comp,
+        ApexCodec_LinearBuffer *config,
+        ApexCodec_SettingResults **results) __INTRODUCED_IN(36);
+
+/**
+ * Query the component for the given indices.
+ *
+ * Parameter indices are defined in frameworks/av/media/codec2/core/include/C2Config.h.
+ *
+ * \param [in] comp         the handle for the component
+ * \param [in] indices      the array of indices to query
+ * \param [in] numIndices   the size of the indices array
+ * \param [inout] config    the output buffer for the config blob, allocated by the client.
+ *                          if the |config->size| was insufficient, it is set to the required size
+ *                          and |config->data| remains unchanged.
+ * \param [out] written     the number of bytes written to |config|.
+ * \return APEXCODEC_STATUS_OK          if successful
+ * \return APEXCODEC_STATUS_NO_MEMORY   if |config.size| is too small; |config.size| is updated to the
+ *                                      requested buffer size.
+ * \return APEXCODEC_STATUS_BAD_VALUE   if the parameters are bad. e.g. |indices|, |config|,
+ *                                      |config->data| or |written| is nullptr.
+ */
+ApexCodec_Status ApexCodec_Configurable_query(
+        ApexCodec_Configurable *comp,
+        uint32_t indices[],
+        size_t numIndices,
+        ApexCodec_LinearBuffer *config,
+        size_t *written) __INTRODUCED_IN(36);
+
+/**
+ * Struct that represents a parameter descriptor.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ParamDescriptors ApexCodec_ParamDescriptors;
+
+/**
+ * Enum that represents the attributes of a parameter.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_ParamAttribute : uint32_t {
+    /** parameter is required to be specified */
+    APEXCODEC_PARAM_IS_REQUIRED   = 1u << 0,
+    /** parameter retains its value */
+    APEXCODEC_PARAM_IS_PERSISTENT = 1u << 1,
+    /** parameter is strict */
+    APEXCODEC_PARAM_IS_STRICT     = 1u << 2,
+    /** parameter is read-only */
+    APEXCODEC_PARAM_IS_READ_ONLY  = 1u << 3,
+    /** parameter shall not be visible to clients */
+    APEXCODEC_PARAM_IS_HIDDEN     = 1u << 4,
+    /** parameter shall not be used by framework (other than testing) */
+    APEXCODEC_PARAM_IS_INTERNAL   = 1u << 5,
+    /** parameter is publicly const (hence read-only) */
+    APEXCODEC_PARAM_IS_CONST      = 1u << 6 | APEXCODEC_PARAM_IS_READ_ONLY,
+} ApexCodec_ParamAttribute;
+
+/**
+ * Get the parameter indices of the param descriptors.
+ *
+ * \param [in] descriptors the param descriptors object
+ * \param [out] indices the pointer to be filled with the array of the indices;
+ *                      the array is owned by |descriptors| and should not be freed by the client.
+ * \param [out] numIndices the size of the indices array
+ * \return APEXCODEC_STATUS_OK          if successful
+ * \return APEXCODEC_STATUS_BAD_VALUE   if parameters are bad. e.g. |descriptors|, |indices| or
+ *                                  |numIndices| is nullptr.
+ */
+ApexCodec_Status ApexCodec_ParamDescriptors_getIndices(
+        ApexCodec_ParamDescriptors *descriptors,
+        uint32_t **indices,
+        size_t *numIndices) __INTRODUCED_IN(36);
+
+/**
+ * Get the descriptor of the param.
+ *
+ * \param [in] descriptors the param descriptors object
+ * \param [in] index the index of the param
+ * \param [out] attr the attribute of the param
+ * \param [out] name    the pointer to be filled with the name of the param
+ *                      the string is owned by |descriptors| and should not be freed by the client.
+ * \param [out] dependencies the pointer to be filled with an array of the parameter indices
+ *                        that the parameter with |index| depends on.
+ *                        may be null if empty.
+ *                        the array is owned by |descriptors| and should not be freed by the client.
+ * \param [out] numDependencies the number of dependencies
+ * \return APEXCODEC_STATUS_OK          if successful
+ * \return APEXCODEC_STATUS_BAD_VALUE   if parameters are bad. e.g. |descriptors|, |attr|, |name|,
+ *                                  |dependencies| or |numDependencies| is nullptr.
+ * \return APEXCODEC_STATUS_BAD_INDEX   if the index is not included in the param descriptors.
+ */
+ApexCodec_Status ApexCodec_ParamDescriptors_getDescriptor(
+        ApexCodec_ParamDescriptors *descriptors,
+        uint32_t index,
+        ApexCodec_ParamAttribute *attr,
+        const char **name,
+        uint32_t **dependencies,
+        size_t *numDependencies) __INTRODUCED_IN(36);
+
+/**
+ * Release the param descriptors object.
+ *
+ * \param descriptors the param descriptors object
+ */
+ApexCodec_Status ApexCodec_ParamDescriptors_release(
+        ApexCodec_ParamDescriptors *descriptors) __INTRODUCED_IN(36);
+
+/**
+ * Query the component for the supported parameters.
+ *
+ * \param comp the handle for the component
+ * \param descriptors   the pointer to be filled with the param descriptors object
+ *                      the object should be released with ApexCodec_ParamDescriptors_release().
+ * \return APEXCODEC_STATUS_OK          if successful
+ * \return APEXCODEC_STATUS_BAD_VALUE   if parameters are bad. e.g. |descriptors| is nullptr.
+ */
+ApexCodec_Status ApexCodec_Configurable_querySupportedParams(
+        ApexCodec_Configurable *comp,
+        ApexCodec_ParamDescriptors **descriptors) __INTRODUCED_IN(36);
+
+/**
+ * Struct that represents the query for the supported values of a parameter.
+ *
+ * The offset of the field can be found in the layout of the parameter blob.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_SupportedValuesQuery {
+    /* in-params */
+
+    /** index of the param */
+    uint32_t index;
+    /** offset to the param field */
+    size_t offset;
+    /** query type */
+    ApexCodec_SupportedValuesQueryType type;
+
+    /* out-params */
+
+    /** status of the query */
+    ApexCodec_Status status;
+
+    /** supported values. must be released with ApexCodec_SupportedValues_release(). */
+    ApexCodec_SupportedValues *values;
+} ApexCodec_SupportedValuesQuery;
+
+/**
+ * Query the component for the supported values of the given indices.
+ *
+ * \param comp the handle for the component
+ * \param queries the array of queries
+ * \param numQueries the size of the queries array
+ * \return  APEXCODEC_STATUS_OK         if successful
+ *          APEXCODEC_STATUS_CORRUPTED  if unexpected error has occurred
+ */
+ApexCodec_Status ApexCodec_Configurable_querySupportedValues(
+        ApexCodec_Configurable *comp,
+        ApexCodec_SupportedValuesQuery *queries,
+        size_t numQueries) __INTRODUCED_IN(36);
+
+__END_DECLS
\ No newline at end of file
diff --git a/media/module/libapexcodecs/libapexcodecs.map.txt b/media/module/libapexcodecs/libapexcodecs.map.txt
new file mode 100644
index 0000000..672cf89
--- /dev/null
+++ b/media/module/libapexcodecs/libapexcodecs.map.txt
@@ -0,0 +1,26 @@
+LIBAPEXCODECS_36 { # introduced=36
+  global:
+    ApexCodec_Component_create; # apex
+    ApexCodec_Component_destroy; # apex
+    ApexCodec_Component_flush; # apex
+    ApexCodec_Component_getConfigurable; # apex
+    ApexCodec_Component_process; # apex
+    ApexCodec_Component_start; # apex
+    ApexCodec_Component_reset; # apex
+    ApexCodec_Configurable_config; # apex
+    ApexCodec_Configurable_query; # apex
+    ApexCodec_Configurable_querySupportedParams; # apex
+    ApexCodec_Configurable_querySupportedValues; # apex
+    ApexCodec_GetComponentStore; # apex
+    ApexCodec_ParamDescriptors_getDescriptor; # apex
+    ApexCodec_ParamDescriptors_getIndices; # apex
+    ApexCodec_ParamDescriptors_release; # apex
+    ApexCodec_SettingResults_getResultAtIndex; # apex
+    ApexCodec_SettingResults_release; # apex
+    ApexCodec_SupportedValues_getTypeAndValues; # apex
+    ApexCodec_SupportedValues_release; # apex
+    ApexCodec_Traits_get; # apex
+
+  local:
+    *;
+};
\ No newline at end of file
diff --git a/media/module/libapexcodecs/tests/Android.bp b/media/module/libapexcodecs/tests/Android.bp
new file mode 100644
index 0000000..162d12c
--- /dev/null
+++ b/media/module/libapexcodecs/tests/Android.bp
@@ -0,0 +1,30 @@
+//
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+package {
+    default_team: "trendy_team_android_media_codec_framework",
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_test {
+    name: "libapexcodecs_tests",
+    shared_libs: [
+        "libapexcodecs-testing",
+        "libcodec2",
+    ],
+
+    srcs: ["ApexCodecsTest.cpp"],
+}
diff --git a/media/module/libapexcodecs/tests/ApexCodecsTest.cpp b/media/module/libapexcodecs/tests/ApexCodecsTest.cpp
new file mode 100644
index 0000000..cd5ebba
--- /dev/null
+++ b/media/module/libapexcodecs/tests/ApexCodecsTest.cpp
@@ -0,0 +1,100 @@
+#include <C2.h>
+#include <C2Component.h>
+
+#include <apex/ApexCodecs.h>
+
+// static_asserts for enum values match
+static_assert((uint32_t)APEXCODEC_STATUS_OK        == (uint32_t)C2_OK);
+static_assert((uint32_t)APEXCODEC_STATUS_BAD_VALUE == (uint32_t)C2_BAD_VALUE);
+static_assert((uint32_t)APEXCODEC_STATUS_BAD_INDEX == (uint32_t)C2_BAD_INDEX);
+static_assert((uint32_t)APEXCODEC_STATUS_CANNOT_DO == (uint32_t)C2_CANNOT_DO);
+static_assert((uint32_t)APEXCODEC_STATUS_DUPLICATE == (uint32_t)C2_DUPLICATE);
+static_assert((uint32_t)APEXCODEC_STATUS_NOT_FOUND == (uint32_t)C2_NOT_FOUND);
+static_assert((uint32_t)APEXCODEC_STATUS_BAD_STATE == (uint32_t)C2_BAD_STATE);
+static_assert((uint32_t)APEXCODEC_STATUS_BLOCKING  == (uint32_t)C2_BLOCKING);
+static_assert((uint32_t)APEXCODEC_STATUS_CANCELED  == (uint32_t)C2_CANCELED);
+static_assert((uint32_t)APEXCODEC_STATUS_NO_MEMORY == (uint32_t)C2_NO_MEMORY);
+static_assert((uint32_t)APEXCODEC_STATUS_REFUSED   == (uint32_t)C2_REFUSED);
+static_assert((uint32_t)APEXCODEC_STATUS_TIMED_OUT == (uint32_t)C2_TIMED_OUT);
+static_assert((uint32_t)APEXCODEC_STATUS_OMITTED   == (uint32_t)C2_OMITTED);
+static_assert((uint32_t)APEXCODEC_STATUS_CORRUPTED == (uint32_t)C2_CORRUPTED);
+static_assert((uint32_t)APEXCODEC_STATUS_NO_INIT   == (uint32_t)C2_NO_INIT);
+
+static_assert((uint32_t)APEXCODEC_KIND_OTHER   == (uint32_t)C2Component::KIND_OTHER);
+static_assert((uint32_t)APEXCODEC_KIND_DECODER == (uint32_t)C2Component::KIND_DECODER);
+static_assert((uint32_t)APEXCODEC_KIND_ENCODER == (uint32_t)C2Component::KIND_ENCODER);
+
+static_assert((uint32_t)APEXCODEC_DOMAIN_OTHER == (uint32_t)C2Component::DOMAIN_OTHER);
+static_assert((uint32_t)APEXCODEC_DOMAIN_VIDEO == (uint32_t)C2Component::DOMAIN_VIDEO);
+static_assert((uint32_t)APEXCODEC_DOMAIN_AUDIO == (uint32_t)C2Component::DOMAIN_AUDIO);
+static_assert((uint32_t)APEXCODEC_DOMAIN_IMAGE == (uint32_t)C2Component::DOMAIN_IMAGE);
+
+static_assert((uint32_t)APEXCODEC_FLAG_DROP_FRAME    == (uint32_t)C2FrameData::FLAG_DROP_FRAME);
+static_assert((uint32_t)APEXCODEC_FLAG_END_OF_STREAM == (uint32_t)C2FrameData::FLAG_END_OF_STREAM);
+static_assert((uint32_t)APEXCODEC_FLAG_DISCARD_FRAME == (uint32_t)C2FrameData::FLAG_DISCARD_FRAME);
+static_assert((uint32_t)APEXCODEC_FLAG_INCOMPLETE    == (uint32_t)C2FrameData::FLAG_INCOMPLETE);
+static_assert((uint32_t)APEXCODEC_FLAG_CORRECTED     == (uint32_t)C2FrameData::FLAG_CORRECTED);
+static_assert((uint32_t)APEXCODEC_FLAG_CORRUPT       == (uint32_t)C2FrameData::FLAG_CORRUPT);
+static_assert((uint32_t)APEXCODEC_FLAG_CODEC_CONFIG  == (uint32_t)C2FrameData::FLAG_CODEC_CONFIG);
+
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_INVALID        ==
+              (uint32_t)C2BufferData::INVALID);
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_LINEAR         ==
+              (uint32_t)C2BufferData::LINEAR);
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_LINEAR_CHUNKS  ==
+              (uint32_t)C2BufferData::LINEAR_CHUNKS);
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_GRAPHIC        ==
+              (uint32_t)C2BufferData::GRAPHIC);
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_GRAPHIC_CHUNKS ==
+              (uint32_t)C2BufferData::GRAPHIC_CHUNKS);
+
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_QUERY_CURRENT  ==
+              (uint32_t)C2FieldSupportedValuesQuery::CURRENT);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_QUERY_POSSIBLE ==
+              (uint32_t)C2FieldSupportedValuesQuery::POSSIBLE);
+
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_EMPTY  ==
+              (uint32_t)C2FieldSupportedValues::EMPTY);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_RANGE  ==
+              (uint32_t)C2FieldSupportedValues::RANGE);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_VALUES ==
+              (uint32_t)C2FieldSupportedValues::VALUES);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_FLAGS  ==
+              (uint32_t)C2FieldSupportedValues::FLAGS);
+
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_NONE   == (uint32_t)C2Value::NO_INIT);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_INT32  == (uint32_t)C2Value::INT32);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_UINT32 == (uint32_t)C2Value::UINT32);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_INT64  == (uint32_t)C2Value::INT64);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_UINT64 == (uint32_t)C2Value::UINT64);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_FLOAT  == (uint32_t)C2Value::FLOAT);
+
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_BAD_TYPE       ==
+              (uint32_t)C2SettingResult::BAD_TYPE);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_BAD_PORT       ==
+              (uint32_t)C2SettingResult::BAD_PORT);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_BAD_INDEX      ==
+              (uint32_t)C2SettingResult::BAD_INDEX);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_READ_ONLY      ==
+              (uint32_t)C2SettingResult::READ_ONLY);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_MISMATCH       ==
+              (uint32_t)C2SettingResult::MISMATCH);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_BAD_VALUE      ==
+              (uint32_t)C2SettingResult::BAD_VALUE);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_CONFLICT       ==
+              (uint32_t)C2SettingResult::CONFLICT);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_UNSUPPORTED    ==
+              (uint32_t)C2SettingResult::UNSUPPORTED);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_INFO_BAD_VALUE ==
+              (uint32_t)C2SettingResult::INFO_BAD_VALUE);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_INFO_CONFLICT  ==
+              (uint32_t)C2SettingResult::INFO_CONFLICT);
+
+static_assert((uint32_t)APEXCODEC_PARAM_IS_REQUIRED   == (uint32_t)C2ParamDescriptor::IS_REQUIRED);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_PERSISTENT ==
+              (uint32_t)C2ParamDescriptor::IS_PERSISTENT);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_STRICT     == (uint32_t)C2ParamDescriptor::IS_STRICT);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_READ_ONLY  == (uint32_t)C2ParamDescriptor::IS_READ_ONLY);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_HIDDEN     == (uint32_t)C2ParamDescriptor::IS_HIDDEN);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_INTERNAL   == (uint32_t)C2ParamDescriptor::IS_INTERNAL);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_CONST      == (uint32_t)C2ParamDescriptor::IS_CONST);
\ No newline at end of file
diff --git a/media/module/metadatautils/MetaDataUtils.cpp b/media/module/metadatautils/MetaDataUtils.cpp
index 0895bb5..177438a 100644
--- a/media/module/metadatautils/MetaDataUtils.cpp
+++ b/media/module/metadatautils/MetaDataUtils.cpp
@@ -134,10 +134,54 @@
     }
     return true;
 }
+
+/**
+ * Build VP9 Codec Feature Metadata (CodecPrivate) to set CSD for VP9 codec.
+ * For reference:
+ * https://www.webmproject.org/docs/container/#vp9-codec-feature-metadata-codecprivate.
+ *
+ * @param meta          A pointer to AMediaFormat object.
+ * @param profile       The profile value of the VP9 stream.
+ * @param level         The VP9 codec level. If the level is unknown, pass -1 to this parameter.
+ * @param bitDepth      The bit depth of the luma and color components of the VP9 stream.
+ * @param chromaSubsampling  The chroma subsampling of the VP9 stream. If chromaSubsampling is
+ *                           unknown, pass -1 to this parameter.
+ * @return true if CodecPrivate is set as CSD of AMediaFormat object.
+ *
+ */
+static bool MakeVP9CodecPrivate(AMediaFormat* meta, int32_t profile, int32_t level,
+                                int32_t bitDepth, int32_t chromaSubsampling) {
+    if (meta == nullptr) {
+        return false;
+    }
+
+    std::vector<uint8_t> codecPrivate;
+    // Construct CodecPrivate in WebM format (ID | Length | Data).
+    // Helper lambda to add a field to the codec private data
+    auto addField = [&codecPrivate](uint8_t id, uint8_t value) {
+        codecPrivate.push_back(id);
+        codecPrivate.push_back(0x01);  // Length is always 1
+        codecPrivate.push_back(value);
+    };
+
+    // Add fields
+    addField(0x01, static_cast<uint8_t>(profile));
+    if (level >= 0) {
+        addField(0x02, static_cast<uint8_t>(level));
+    }
+    addField(0x03, static_cast<uint8_t>(bitDepth));
+    if (chromaSubsampling >= 0) {
+        addField(0x04, static_cast<uint8_t>(chromaSubsampling));
+    }
+    // Set CSD in the meta format
+    AMediaFormat_setBuffer(meta, AMEDIAFORMAT_KEY_CSD_0, codecPrivate.data(), codecPrivate.size());
+    return true;
+}
+
 // The param data contains the first frame data, starting with the uncompressed frame
 // header. This uncompressed header (refer section 6.2 of the VP9 bitstream spec) is
 // used to parse profile, bitdepth and subsampling.
-bool MakeVP9CodecSpecificData(AMediaFormat* meta, const uint8_t* data, size_t size) {
+bool MakeVP9CodecSpecificDataFromFirstFrame(AMediaFormat* meta, const uint8_t* data, size_t size) {
     if (meta == nullptr || data == nullptr || size == 0) {
         return false;
     }
@@ -227,29 +271,29 @@
     if (chromaSubsampling != -1) {
         csdSize += 3;
     }
+    // As level is not present in first frame build CodecPrivate without it.
+    return MakeVP9CodecPrivate(meta, profile, -1, bitDepth, chromaSubsampling);
+}
 
-    // Create VP9 Codec Feature Metadata (CodecPrivate) that can be parsed
-    // https://www.webmproject.org/docs/container/#vp9-codec-feature-metadata-codecprivate
-    sp<ABuffer> csd = sp<ABuffer>::make(csdSize);
-    uint8_t* csdData = csd->data();
-
-    *csdData++ = 0x01 /* FEATURE PROFILE */;
-    *csdData++ = 0x01 /* length */;
-    *csdData++ = profile;
-
-    *csdData++ = 0x03 /* FEATURE BITDEPTH */;
-    *csdData++ = 0x01 /* length */;
-    *csdData++ = bitDepth;
-
-    // csdSize more than 6 means chroma subsampling data was found.
-    if (csdSize > 6) {
-        *csdData++ = 0x04 /* FEATURE SUBSAMPLING */;
-        *csdData++ = 0x01 /* length */;
-        *csdData++ = chromaSubsampling;
+bool MakeVP9CodecPrivateFromVpcC(AMediaFormat* meta, const uint8_t* csdData, size_t size) {
+    if (meta == nullptr || csdData == nullptr || size < 12) {
+        return false;
     }
 
-    AMediaFormat_setBuffer(meta, AMEDIAFORMAT_KEY_CSD_0, csd->data(), csd->size());
-    return true;
+    // Check the first 4 bytes (VersionAndFlags) if they match the required value.
+    if (csdData[0] != 0x01 || csdData[1] != 0x00 || csdData[2] != 0x00 || csdData[3] != 0x00) {
+        return false;
+    }
+
+    // Create VP9 Codec Feature Metadata (CodecPrivate) that can be parsed.
+    // https://www.webmproject.org/docs/container/#vp9-codec-feature-metadata-codecprivate
+    const uint8_t* vpcCData = csdData + 4;  // Skip the first 4 bytes (VersionAndFlags)
+
+    int32_t profile = vpcCData[0];
+    int32_t level = vpcCData[1];
+    int32_t bitDepth = (vpcCData[2] >> 4) & 0x0F;           // Bit Depth (4 bits).
+    int32_t chromaSubsampling = (vpcCData[2] >> 1) & 0x07;  // Chroma Subsampling (3 bits).
+    return MakeVP9CodecPrivate(meta, profile, level, bitDepth, chromaSubsampling);
 }
 
 bool MakeAACCodecSpecificData(MetaDataBase &meta, const uint8_t *data, size_t size) {
diff --git a/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h b/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
index 69cf21a..9988544 100644
--- a/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
+++ b/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
@@ -38,7 +38,10 @@
 void parseVorbisComment(
         AMediaFormat *fileMeta, const char *comment, size_t commentLength);
 
-bool MakeVP9CodecSpecificData(AMediaFormat* meta, const uint8_t* data, size_t size);
+bool MakeVP9CodecSpecificData(AMediaFormat* meta, int32_t csdSize, int32_t profile, int32_t level,
+                              int32_t bitDepth, int32_t chromaSubsampling);
+bool MakeVP9CodecSpecificDataFromFirstFrame(AMediaFormat* meta, const uint8_t* data, size_t size);
+bool MakeVP9CodecPrivateFromVpcC(AMediaFormat* meta, const uint8_t* data, size_t size);
 
 }  // namespace android
 
diff --git a/media/mtp/OWNERS b/media/mtp/OWNERS
index 6b5336e..bdb6cdb 100644
--- a/media/mtp/OWNERS
+++ b/media/mtp/OWNERS
@@ -1,10 +1,9 @@
 set noparent
 
-aprasath@google.com
 anothermark@google.com
-kumarashishg@google.com
-sarup@google.com
+febinthattil@google.com
+aprasath@google.com
 jsharkey@android.com
 jameswei@google.com
 rmojumder@google.com
-
+kumarashishg@google.com
\ No newline at end of file
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 3d873df..b250a03 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -52,9 +52,6 @@
     symbol_file: "libmediandk.map.txt",
     first_version: "21",
     unversioned_until: "current",
-    export_header_libs: [
-        "libmediandk_headers",
-    ],
 }
 
 ndk_headers {
diff --git a/media/tests/benchmark/MediaBenchmarkTest/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
index d41a7f9..1049d5e 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
@@ -36,8 +36,8 @@
     resource_dirs: ["res"],
 
     libs: [
-        "android.test.runner",
-        "android.test.base",
+        "android.test.runner.stubs.system",
+        "android.test.base.stubs.system",
     ],
 
     jni_libs: [
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index ec68de7..6a5bbbe 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -23,6 +23,7 @@
 #include <android-base/logging.h>
 #include <android-base/strings.h>
 #include <audio_utils/clock.h>
+#include <cutils/properties.h>
 #include <mediautils/EventLog.h>
 #include <mediautils/FixedString.h>
 #include <mediautils/MethodStatistics.h>
@@ -36,6 +37,46 @@
 
 
 namespace android::mediautils {
+
+// Note: The sum of kDefaultTimeOutDurationMs and kDefaultSecondChanceDurationMs
+// should be no less than 2 seconds, otherwise spurious timeouts
+// may occur with system suspend.
+static constexpr int kDefaultTimeoutDurationMs = 3000;
+
+// Due to suspend abort not incrementing the monotonic clock,
+// we allow another second chance timeout after the first timeout expires.
+//
+// The total timeout is therefore kDefaultTimeoutDuration + kDefaultSecondChanceDuration,
+// and the result is more stable when the monotonic clock increments during suspend.
+//
+static constexpr int kDefaultSecondChanceDurationMs = 2000;
+
+/* static */
+TimeCheck::Duration TimeCheck::getDefaultTimeoutDuration() {
+    static constinit std::atomic<int> defaultTimeoutDurationMs{};
+    auto defaultMs = defaultTimeoutDurationMs.load(std::memory_order_relaxed);
+    if (defaultMs == 0) {
+        defaultMs = property_get_int32(
+                "audio.timecheck.timeout_duration_ms", kDefaultTimeoutDurationMs);
+        if (defaultMs < 1) defaultMs = kDefaultTimeoutDurationMs;
+        defaultTimeoutDurationMs.store(defaultMs, std::memory_order_relaxed);
+    }
+    return std::chrono::milliseconds(defaultMs);
+}
+
+/* static */
+TimeCheck::Duration TimeCheck::getDefaultSecondChanceDuration() {
+    static constinit std::atomic<int> defaultSecondChanceDurationMs{};
+    auto defaultMs = defaultSecondChanceDurationMs.load(std::memory_order_relaxed);
+    if (defaultMs == 0) {
+        defaultMs = property_get_int32(
+                "audio.timecheck.second_chance_duration_ms", kDefaultSecondChanceDurationMs);
+        if (defaultMs < 1) defaultMs = kDefaultSecondChanceDurationMs;
+        defaultSecondChanceDurationMs.store(defaultMs, std::memory_order_relaxed);
+    }
+    return std::chrono::milliseconds(defaultMs);
+}
+
 // This function appropriately signals a pid to dump a backtrace if we are
 // running on device (and the HAL exists). If we are not running on an Android
 // device, there is no HAL to signal (so we do nothing).
@@ -143,6 +184,22 @@
 }
 
 /* static */
+std::string TimeCheck::signalAudioHals() {
+    std::vector<pid_t> pids = getAudioHalPids();
+    std::string halPids;
+    if (pids.size() != 0) {
+        for (const auto& pid : pids) {
+            ALOGI("requesting tombstone for pid: %d", pid);
+            halPids.append(std::to_string(pid)).append(" ");
+            signalAudioHAL(pid);
+        }
+        // Allow time to complete, usually the caller is forcing restart afterwards.
+        sleep(1);
+    }
+    return halPids;
+}
+
+/* static */
 TimerThread& TimeCheck::getTimeCheckThread() {
     static TimerThread sTimeCheckThread{};
     return sTimeCheckThread;
@@ -182,23 +239,25 @@
 
 /* static */
 std::string TimeCheck::analyzeTimeouts(
-        float requestedTimeoutMs, float elapsedSteadyMs, float elapsedSystemMs) {
+        float requestedTimeoutMs, float secondChanceMs,
+        float elapsedSteadyMs, float elapsedSystemMs) {
     // Track any OS clock issues with suspend.
     // It is possible that the elapsedSystemMs is much greater than elapsedSteadyMs if
     // a suspend occurs; however, we always expect the timeout ms should always be slightly
     // less than the elapsed steady ms regardless of whether a suspend occurs or not.
 
-    std::string s("Timeout ms ");
-    s.append(std::to_string(requestedTimeoutMs))
-        .append(" elapsed steady ms ").append(std::to_string(elapsedSteadyMs))
-        .append(" elapsed system ms ").append(std::to_string(elapsedSystemMs));
+    const float totalTimeoutMs = requestedTimeoutMs + secondChanceMs;
+    std::string s = std::format(
+            "Timeout ms {:.2f} ({:.2f} + {:.2f})"
+            " elapsed steady ms {:.4f} elapsed system ms {:.4f}",
+            totalTimeoutMs, requestedTimeoutMs, secondChanceMs, elapsedSteadyMs, elapsedSystemMs);
 
     // Is there something unusual?
     static constexpr float TOLERANCE_CONTEXT_SWITCH_MS = 200.f;
 
-    if (requestedTimeoutMs > elapsedSteadyMs || requestedTimeoutMs > elapsedSystemMs) {
+    if (totalTimeoutMs > elapsedSteadyMs || totalTimeoutMs > elapsedSystemMs) {
         s.append("\nError: early expiration - "
-                "requestedTimeoutMs should be less than elapsed time");
+                "totalTimeoutMs should be less than elapsed time");
     }
 
     if (elapsedSteadyMs > elapsedSystemMs + TOLERANCE_CONTEXT_SWITCH_MS) {
@@ -206,13 +265,13 @@
     }
 
     // This has been found in suspend stress testing.
-    if (elapsedSteadyMs > requestedTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
+    if (elapsedSteadyMs > totalTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
         s.append("\nWarning: steady time significantly exceeds timeout "
                 "- possible thread stall or aborted suspend");
     }
 
     // This has been found in suspend stress testing.
-    if (elapsedSystemMs > requestedTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
+    if (elapsedSystemMs > totalTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
         s.append("\nInformation: system time significantly exceeds timeout "
                 "- possible suspend");
     }
@@ -259,21 +318,14 @@
     // HAL processes which can affect thread behavior.
     const auto snapshotAnalysis = getTimeCheckThread().getSnapshotAnalysis(4 /* retiredCount */);
 
-    // Generate audio HAL processes tombstones and allow time to complete
-    // before forcing restart
-    std::vector<pid_t> pids = TimeCheck::getAudioHalPids();
-    std::string halPids = "HAL pids [ ";
-    if (pids.size() != 0) {
-        for (const auto& pid : pids) {
-            ALOGI("requesting tombstone for pid: %d", pid);
-            halPids.append(std::to_string(pid)).append(" ");
-            signalAudioHAL(pid);
-        }
-        sleep(1);
+    // Generate audio HAL processes tombstones.
+    std::string halPids = signalAudioHals();
+    if (!halPids.empty()) {
+        halPids = "HAL pids [ " + halPids + "]";
     } else {
-        ALOGI("No HAL process pid available, skipping tombstones");
+        halPids = "No HAL process pids available";
+        ALOGI("%s", (halPids + ", skipping tombstones").c_str());
     }
-    halPids.append("]");
 
     LOG_EVENT_STRING(LOGTAG_AUDIO_BINDER_TIMEOUT, tag.c_str());
 
@@ -282,7 +334,7 @@
             .append(tag)
             .append(" scheduled ").append(formatTime(startSystemTime))
             .append(" on thread ").append(std::to_string(tid)).append("\n")
-            .append(analyzeTimeouts(requestedTimeoutMs + secondChanceMs,
+            .append(analyzeTimeouts(requestedTimeoutMs, secondChanceMs,
                     elapsedSteadyMs, elapsedSystemMs)).append("\n")
             .append(halPids).append("\n")
             .append(snapshotAnalysis.toString());
diff --git a/media/utils/include/mediautils/TimeCheck.h b/media/utils/include/mediautils/TimeCheck.h
index f1d572f..c112863 100644
--- a/media/utils/include/mediautils/TimeCheck.h
+++ b/media/utils/include/mediautils/TimeCheck.h
@@ -42,19 +42,29 @@
     //  float elapsedMs (the elapsed time to this event).
     using OnTimerFunc = std::function<void(bool /* timeout */, float /* elapsedMs */ )>;
 
-    // The default timeout is chosen to be less than system server watchdog timeout
-    // Note: kDefaultTimeOutMs should be no less than 2 seconds, otherwise spurious timeouts
-    // may occur with system suspend.
-    static constexpr TimeCheck::Duration kDefaultTimeoutDuration = std::chrono::milliseconds(3000);
+    /**
+     * Returns the default timeout to use for TimeCheck.
+     *
+     * The default timeout of 3000ms (kDefaultTimeoutDurationMs) is chosen to be less than
+     * the system server watchdog timeout, and can be changed by the sysprop
+     * audio.timecheck.timeout_duration_ms.
+     * A second chance wait may be set to extend the check.
+     */
+    static TimeCheck::Duration getDefaultTimeoutDuration();
 
-    // Due to suspend abort not incrementing the monotonic clock,
-    // we allow another second chance timeout after the first timeout expires.
-    //
-    // The total timeout is therefore kDefaultTimeoutDuration + kDefaultSecondChanceDuration,
-    // and the result is more stable when the monotonic clock increments during suspend.
-    //
-    static constexpr TimeCheck::Duration kDefaultSecondChanceDuration =
-            std::chrono::milliseconds(2000);
+    /**
+     * Returns the second chance timeout to use for TimeCheck.
+     *
+     * Due to suspend abort not incrementing the monotonic clock,
+     * we allow another second chance timeout after the first timeout expires.
+     * The second chance timeout default of 2000ms (kDefaultSecondChanceDurationMs)
+     * may be changed by the sysprop audio.timecheck.second_chance_duration_ms.
+     *
+     * The total timeout is therefore
+     * getDefaultTimeoutDuration() + getDefaultSecondChanceDuration(),
+     * and the result is more stable when the monotonic clock increments during suspend.
+     */
+    static TimeCheck::Duration getDefaultSecondChanceDuration();
 
     /**
      * TimeCheck is a RAII object which will notify a callback
@@ -97,6 +107,7 @@
     static std::string toString();
     static void setAudioHalPids(const std::vector<pid_t>& pids);
     static std::vector<pid_t> getAudioHalPids();
+    static std::string signalAudioHals();
 
   private:
     // Helper class for handling events.
@@ -130,7 +141,8 @@
     // Returns a string that represents the timeout vs elapsed time,
     // and diagnostics if there are any potential issues.
     static std::string analyzeTimeouts(
-            float timeoutMs, float elapsedSteadyMs, float elapsedSystemMs);
+            float timeoutMs, float secondChanceMs,
+            float elapsedSteadyMs, float elapsedSystemMs);
 
     static TimerThread& getTimeCheckThread();
     static void accessAudioHalPids(std::vector<pid_t>* pids, bool update);
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index b763f09..bf2915a 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -141,9 +141,15 @@
 cc_defaults {
     name: "libaudioflinger_dependencies",
 
+    header_libs: [
+        "libaudiohal_headers", // required for AudioFlinger
+    ],
+
     shared_libs: [
+        "audio-permission-aidl-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
+        "audiopermissioncontroller",
         "av-types-aidl-cpp",
         "com.android.media.audio-aconfig-cc",
         "effect-aidl-cpp",
@@ -177,11 +183,6 @@
         "libvibrator",
         "packagemanager_aidl-cpp",
     ],
-
-    static_libs: [
-        "libaudiospdif",
-        "libmedialogservice",
-    ],
 }
 
 cc_library {
@@ -213,22 +214,21 @@
     ],
 
     static_libs: [
+        "libaudiospdif",
         "libcpustats",
-        "libpermission",
+        "libmedialogservice",
     ],
 
     header_libs: [
-        "audiopermissioncontroller_headers",
         "audiopolicyservicelocal_headers",
         "libaaudio_headers",
-        "libaudioclient_headers",
-        "libaudiohal_headers",
-        "libaudioutils_headers",
         "libmedia_headers",
     ],
 
     export_header_lib_headers: ["audiopolicyservicelocal_headers"],
 
+    export_include_dirs: ["."],
+
     export_shared_lib_headers: [
         "libpermission",
     ],
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index a17ac58..8215247 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -2923,7 +2923,8 @@
                                                         audio_config_base_t *mixerConfig,
                                                         audio_devices_t deviceType,
                                                         const String8& address,
-                                                        audio_output_flags_t flags)
+                                                        audio_output_flags_t *flags,
+                                                        const audio_attributes_t attributes)
 {
     AudioHwDevice *outHwDev = findSuitableHwDev_l(module, deviceType);
     if (outHwDev == NULL) {
@@ -2941,18 +2942,23 @@
 
     mHardwareStatus = AUDIO_HW_OUTPUT_OPEN;
     AudioStreamOut *outputStream = NULL;
+
+    playback_track_metadata_v7_t trackMetadata;
+    trackMetadata.base.usage = attributes.usage;
+
     status_t status = outHwDev->openOutputStream(
             &outputStream,
             *output,
             deviceType,
             flags,
             halConfig,
-            address.c_str());
+            address.c_str(),
+            {trackMetadata});
 
     mHardwareStatus = AUDIO_HW_IDLE;
 
     if (status == NO_ERROR) {
-        if (flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) {
+        if (*flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) {
             const sp<IAfMmapPlaybackThread> thread = IAfMmapPlaybackThread::create(
                     this, *output, outHwDev, outputStream, mSystemReady);
             mMmapThreads.add(*output, thread);
@@ -2961,22 +2967,22 @@
             return thread;
         } else {
             sp<IAfPlaybackThread> thread;
-            if (flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+            if (*flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
                 thread = IAfPlaybackThread::createBitPerfectThread(
                         this, outputStream, *output, mSystemReady);
                 ALOGV("%s() created bit-perfect output: ID %d thread %p",
                       __func__, *output, thread.get());
-            } else if (flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) {
+            } else if (*flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) {
                 thread = IAfPlaybackThread::createSpatializerThread(this, outputStream, *output,
                                                     mSystemReady, mixerConfig);
                 ALOGV("openOutput_l() created spatializer output: ID %d thread %p",
                       *output, thread.get());
-            } else if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
+            } else if (*flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
                 thread = IAfPlaybackThread::createOffloadThread(this, outputStream, *output,
                         mSystemReady, halConfig->offload_info);
                 ALOGV("openOutput_l() created offload output: ID %d thread %p",
                       *output, thread.get());
-            } else if ((flags & AUDIO_OUTPUT_FLAG_DIRECT)
+            } else if ((*flags & AUDIO_OUTPUT_FLAG_DIRECT)
                     || !IAfThreadBase::isValidPcmSinkFormat(halConfig->format)
                     || !IAfThreadBase::isValidPcmSinkChannelMask(halConfig->channel_mask)) {
                 thread = IAfPlaybackThread::createDirectOutputThread(this, outputStream, *output,
@@ -3016,6 +3022,8 @@
             aidl2legacy_DeviceDescriptorBase(request.device));
     audio_output_flags_t flags = VALUE_OR_RETURN_STATUS(
             aidl2legacy_int32_t_audio_output_flags_t_mask(request.flags));
+    audio_attributes_t attributes = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioAttributes_audio_attributes_t(request.attributes));
 
     audio_io_handle_t output;
 
@@ -3038,7 +3046,7 @@
     audio_utils::lock_guard _l(mutex());
 
     const sp<IAfThreadBase> thread = openOutput_l(module, &output, &halConfig,
-            &mixerConfig, deviceType, address, flags);
+            &mixerConfig, deviceType, address, &flags, attributes);
     if (thread != 0) {
         uint32_t latencyMs = 0;
         if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) == 0) {
@@ -3940,8 +3948,12 @@
         // TODO: We could check compatibility of the secondaryThread with the PatchTrack
         // for fast usage: thread has fast mixer, sample rate matches, etc.;
         // for now, we exclude fast tracks by removing the Fast flag.
+        constexpr audio_output_flags_t kIncompatiblePatchTrackFlags =
+                static_cast<audio_output_flags_t>(AUDIO_OUTPUT_FLAG_FAST
+                        | AUDIO_OUTPUT_FLAG_DIRECT | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
+
         const audio_output_flags_t outputFlags =
-                (audio_output_flags_t)(track->getOutputFlags() & ~AUDIO_OUTPUT_FLAG_FAST);
+                (audio_output_flags_t)(track->getOutputFlags() & ~kIncompatiblePatchTrackFlags);
         sp<IAfPatchTrack> patchTrack = IAfPatchTrack::create(secondaryThread,
                                                        track->streamType(),
                                                        track->sampleRate(),
@@ -4491,7 +4503,7 @@
             // TODO(b/184194057): Use the vibrator information from the vibrator that will be used
             // for the HapticGenerator.
             const std::optional<media::AudioVibratorInfo> defaultVibratorInfo =
-                    std::move(getDefaultVibratorInfo_l());
+                    getDefaultVibratorInfo_l();
             if (defaultVibratorInfo) {
                 // Only set the vibrator info when it is a valid one.
                 audio_utils::lock_guard _cl(chain->mutex());
@@ -5126,8 +5138,8 @@
         } else {
             getIAudioFlingerStatistics().event(code, elapsedMs);
         }
-    }, mediautils::TimeCheck::kDefaultTimeoutDuration,
-    mediautils::TimeCheck::kDefaultSecondChanceDuration,
+    }, mediautils::TimeCheck::getDefaultTimeoutDuration(),
+    mediautils::TimeCheck::getDefaultSecondChanceDuration(),
     true /* crashOnTimeout */);
 
     return delegate();
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 501aed1..6777075 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -337,9 +337,12 @@
             audio_config_base_t* mixerConfig,
             audio_devices_t deviceType,
             const String8& address,
-            audio_output_flags_t flags) final REQUIRES(mutex());
+            audio_output_flags_t* flags,
+            audio_attributes_t attributes) final REQUIRES(mutex());
     const DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*>&
-            getAudioHwDevs_l() const final REQUIRES(mutex()) { return mAudioHwDevs; }
+            getAudioHwDevs_l() const final REQUIRES(mutex(), hardwareMutex()) {
+              return mAudioHwDevs;
+            }
     void updateDownStreamPatches_l(const struct audio_patch* patch,
             const std::set<audio_io_handle_t>& streams) final REQUIRES(mutex());
     void updateOutDevicesForRecordThreads_l(const DeviceDescriptorBaseVector& devices) final
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index feae97e..7cb9329 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -71,10 +71,15 @@
 
 void DeviceEffectManager::onReleaseAudioPatch(audio_patch_handle_t handle) {
     ALOGV("%s", __func__);
+    // Keep a reference on disconnected handle to delay destruction without lock held.
+    std::vector<sp<IAfEffectHandle>> disconnectedHandles{};
     audio_utils::lock_guard _l(mutex());
     for (auto& effectProxies : mDeviceEffects) {
         for (auto& effect : effectProxies.second) {
-            effect->onReleasePatch(handle);
+            sp<IAfEffectHandle> disconnectedHandle = effect->onReleasePatch(handle);
+            if (disconnectedHandle != nullptr) {
+                disconnectedHandles.push_back(std::move(disconnectedHandle));
+            }
         }
     }
 }
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 4da498b..cfd0a5e 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -1760,6 +1760,9 @@
         const sp<media::IEffectClient>& effectClient,
         int32_t priority, bool notifyFramesProcessed)
 {
+    if (client == nullptr && effectClient == nullptr) {
+        return sp<InternalEffectHandle>::make(effect, notifyFramesProcessed);
+    }
     return sp<EffectHandle>::make(
             effect, client, effectClient, priority, notifyFramesProcessed);
 }
@@ -1767,12 +1770,14 @@
 EffectHandle::EffectHandle(const sp<IAfEffectBase>& effect,
                                          const sp<Client>& client,
                                          const sp<media::IEffectClient>& effectClient,
-                                         int32_t priority, bool notifyFramesProcessed)
-    : BnEffect(),
+                                         int32_t priority, bool notifyFramesProcessed,
+                                         bool isInternal,
+                                         audio_utils::MutexOrder mutexOrder)
+    : BnEffect(), mMutex(audio_utils::mutex{mutexOrder}),
     mEffect(effect), mEffectClient(media::EffectClientAsyncProxy::makeIfNeeded(effectClient)),
     mClient(client), mCblk(nullptr),
     mPriority(priority), mHasControl(false), mEnabled(false), mDisconnected(false),
-    mNotifyFramesProcessed(notifyFramesProcessed)
+    mNotifyFramesProcessed(notifyFramesProcessed), mIsInternal(isInternal)
 {
     ALOGV("constructor %p client %p", this, client.get());
     setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
@@ -1939,7 +1944,7 @@
 
 void EffectHandle::disconnect(bool unpinIfLast)
 {
-    audio_utils::lock_guard _l(mutex());
+    audio_utils::unique_lock _l(mutex());
     ALOGV("disconnect(%s) %p", unpinIfLast ? "true" : "false", this);
     if (mDisconnected) {
         if (unpinIfLast) {
@@ -1951,11 +1956,19 @@
     {
         sp<IAfEffectBase> effect = mEffect.promote();
         if (effect != 0) {
+            // Unlock e.g. for device effect: may need to acquire AudioFlinger lock
+            // Also Internal Effect Handle would require Proxy lock (and vice versa).
+            if (isInternal()) {
+                _l.unlock();
+            }
             if (effect->disconnectHandle(this, unpinIfLast) > 0) {
                 ALOGW("%s Effect handle %p disconnected after thread destruction",
                     __func__, this);
             }
             effect->updatePolicyState();
+            if (isInternal()) {
+                _l.lock();
+            }
         }
     }
 
@@ -3544,8 +3557,7 @@
                 mHalEffect->setDevices({mDevice});
             }
         }
-        *handle = new EffectHandle(mHalEffect, nullptr, nullptr, 0 /*priority*/,
-                                   mNotifyFramesProcessed);
+        *handle = new InternalEffectHandle(mHalEffect, mNotifyFramesProcessed);
         status = (*handle)->initCheck();
         if (status == OK) {
             status = mHalEffect->addHandle((*handle).get());
@@ -3592,15 +3604,16 @@
     return status;
 }
 
-void DeviceEffectProxy::onReleasePatch(audio_patch_handle_t patchHandle) {
-    sp<IAfEffectHandle> effect;
+sp<IAfEffectHandle> DeviceEffectProxy::onReleasePatch(audio_patch_handle_t patchHandle) {
+    sp<IAfEffectHandle> disconnectedHandle;
     {
         audio_utils::lock_guard _l(proxyMutex());
         if (mEffectHandles.find(patchHandle) != mEffectHandles.end()) {
-            effect = mEffectHandles.at(patchHandle);
+            disconnectedHandle = std::move(mEffectHandles.at(patchHandle));
             mEffectHandles.erase(patchHandle);
         }
     }
+    return disconnectedHandle;
 }
 
 
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 5c90176..9ecf89e 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -344,7 +344,8 @@
     EffectHandle(const sp<IAfEffectBase>& effect,
             const sp<Client>& client,
             const sp<media::IEffectClient>& effectClient,
-            int32_t priority, bool notifyFramesProcessed);
+            int32_t priority, bool notifyFramesProcessed, bool isInternal = false,
+            audio_utils::MutexOrder mutexOrder = audio_utils::MutexOrder::kEffectHandle_Mutex);
     ~EffectHandle() override;
     status_t onTransact(
             uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) final;
@@ -364,6 +365,11 @@
                                       int32_t* _aidl_return) final;
 
     const sp<Client>& client() const final { return mClient; }
+    /**
+     * Checks if the handle is internal, aka created by AudioFlinger for internal needs (e.g.
+     * device effect HAL handle or device effect thread handle).
+     */
+    virtual bool isInternal() const { return mIsInternal; }
 
     sp<android::media::IEffect> asIEffect() final {
         return sp<android::media::IEffect>::fromExisting(this);
@@ -401,15 +407,18 @@
 
     void dumpToBuffer(char* buffer, size_t size) const final;
 
+protected:
+    // protects IEffect method calls
+    mutable audio_utils::mutex mMutex;
 
 private:
     DISALLOW_COPY_AND_ASSIGN(EffectHandle);
 
-    audio_utils::mutex& mutex() const RETURN_CAPABILITY(android::audio_utils::EffectHandle_Mutex) {
+    virtual audio_utils::mutex& mutex() const
+            RETURN_CAPABILITY(android::audio_utils::EffectHandle_Mutex) {
         return mMutex;
     }
-    // protects IEffect method calls
-    mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kEffectHandle_Mutex};
+
     const wp<IAfEffectBase> mEffect;               // pointer to controlled EffectModule
     const sp<media::IEffectClient> mEffectClient;  // callback interface for client notifications
     /*const*/ sp<Client> mClient;            // client for shared memory allocation, see
@@ -425,6 +434,28 @@
     bool mDisconnected;                      // Set to true by disconnect()
     const bool mNotifyFramesProcessed;       // true if the client callback event
                                              // EVENT_FRAMES_PROCESSED must be generated
+    const bool mIsInternal;
+};
+
+/**
+ * There are 2 types of effects:
+ * -Session Effect: handle is directly called from the client, without AudioFlinger lock.
+ * -Device Effect: a device effect proxy is aggregating a collection of internal effect handles that
+ * controls the same effect added on all audio patches involving the device effect selected port
+ * requested either by a client or by AudioPolicyEffects. These internal effect handles do not have
+ * client. Sequence flow implies a different locking order, hence the lock is specialied.
+ */
+class InternalEffectHandle : public EffectHandle {
+public:
+    InternalEffectHandle(const sp<IAfEffectBase>& effect, bool notifyFramesProcessed) :
+            EffectHandle(effect, /* client= */ nullptr, /* effectClient= */ nullptr,
+                         /* priority= */ 0, notifyFramesProcessed, /* isInternal */ true,
+                         audio_utils::MutexOrder::kDeviceEffectHandle_Mutex) {}
+
+    virtual audio_utils::mutex& mutex() const
+            RETURN_CAPABILITY(android::audio_utils::DeviceEffectHandle_Mutex) {
+        return mMutex;
+    }
 };
 
 // the EffectChain class represents a group of effects associated to one audio session.
@@ -762,7 +793,7 @@
     status_t onUpdatePatch(audio_patch_handle_t oldPatchHandle, audio_patch_handle_t newPatchHandle,
            const IAfPatchPanel::Patch& patch) final;
 
-    void onReleasePatch(audio_patch_handle_t patchHandle) final;
+    sp<IAfEffectHandle> onReleasePatch(audio_patch_handle_t patchHandle) final;
 
     size_t removeEffect(const sp<IAfEffectModule>& effect) final;
 
diff --git a/services/audioflinger/IAfEffect.h b/services/audioflinger/IAfEffect.h
index f1edcfe..3452e94 100644
--- a/services/audioflinger/IAfEffect.h
+++ b/services/audioflinger/IAfEffect.h
@@ -400,7 +400,14 @@
     virtual status_t onUpdatePatch(audio_patch_handle_t oldPatchHandle,
             audio_patch_handle_t newPatchHandle,
             const IAfPatchPanel::Patch& patch) = 0;
-    virtual void onReleasePatch(audio_patch_handle_t patchHandle) = 0;
+    /**
+     * Checks (and release) of the effect handle is linked with the given release patch handle.
+     *
+     * @param patchHandle handle of the released patch
+     * @return a reference on the effect handle released if any, nullptr otherwise.
+     * It allows to delay the destruction of the handle.
+     */
+    virtual sp<IAfEffectHandle> onReleasePatch(audio_patch_handle_t patchHandle) = 0;
 
     virtual void dump2(int fd, int spaces) const = 0; // TODO(b/291319101) naming?
 
diff --git a/services/audioflinger/IAfPatchPanel.h b/services/audioflinger/IAfPatchPanel.h
index 6110e4c..15b6ddf 100644
--- a/services/audioflinger/IAfPatchPanel.h
+++ b/services/audioflinger/IAfPatchPanel.h
@@ -82,7 +82,8 @@
             audio_config_base_t* mixerConfig,
             audio_devices_t deviceType,
             const String8& address,
-            audio_output_flags_t flags) REQUIRES(mutex()) = 0;
+            audio_output_flags_t* flags,
+            audio_attributes_t attributes) REQUIRES(mutex()) = 0;
     virtual audio_utils::mutex& mutex() const
             RETURN_CAPABILITY(audio_utils::AudioFlinger_Mutex) = 0;
     virtual const DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*>&
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index f57470f..994dd47 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -260,6 +260,7 @@
                     if (patch->sinks[0].config_mask & AUDIO_PORT_CONFIG_FLAGS) {
                         flags = patch->sinks[0].flags.output;
                     }
+                    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
                     const sp<IAfThreadBase> thread = mAfPatchPanelCallback->openOutput_l(
                                                             patch->sinks[0].ext.device.hw_module,
                                                             &output,
@@ -267,7 +268,8 @@
                                                             &mixerConfig,
                                                             outputDevice,
                                                             outputDeviceAddress,
-                                                            flags);
+                                                            &flags,
+                                                            attributes);
                     ALOGV("mAfPatchPanelCallback->openOutput_l() returned %p", thread.get());
                     if (thread == 0) {
                         status = NO_MEMORY;
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index 3de9968..83cd024 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -73,7 +73,7 @@
     bool isDirect() const final
                                 { return (mFlags & AUDIO_INPUT_FLAG_DIRECT) != 0; }
 
-    void setSilenced(bool silenced) final { if (!isPatchTrack()) mSilenced = silenced; }
+    void setSilenced(bool silenced) final;
     bool isSilenced() const final { return mSilenced; }
 
     status_t getActiveMicrophones(
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 5c0ca5e..b7c0bb3 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -1696,7 +1696,7 @@
             // TODO(b/184194057): Use the vibrator information from the vibrator that will be used
             // for the HapticGenerator.
             const std::optional<media::AudioVibratorInfo> defaultVibratorInfo =
-                    std::move(mAfThreadCallback->getDefaultVibratorInfo_l());
+                    mAfThreadCallback->getDefaultVibratorInfo_l();
             if (defaultVibratorInfo) {
                 audio_utils::lock_guard _cl(chain->mutex());
                 // Only set the vibrator info when it is a valid one.
@@ -2917,7 +2917,7 @@
                 // TODO(b/184194780): Use the vibrator information from the vibrator that will be
                 // used to play this track.
                  audio_utils::lock_guard _l(mAfThreadCallback->mutex());
-                vibratorInfo = std::move(mAfThreadCallback->getDefaultVibratorInfo_l());
+                vibratorInfo = mAfThreadCallback->getDefaultVibratorInfo_l();
             }
             mutex().lock();
             track->setHapticScale(hapticScale);
@@ -3220,9 +3220,9 @@
 
     // Calculate size of normal sink buffer relative to the HAL output buffer size
     double multiplier = 1.0;
-    // Note: mType == SPATIALIZER does not support FastMixer.
-    if (mType == MIXER && (kUseFastMixer == FastMixer_Static ||
-            kUseFastMixer == FastMixer_Dynamic)) {
+    // Note: mType == SPATIALIZER does not support FastMixer and DEEP is by definition not "fast"
+    if ((mType == MIXER && !(mOutput->flags & AUDIO_OUTPUT_FLAG_DEEP_BUFFER)) &&
+            (kUseFastMixer == FastMixer_Static || kUseFastMixer == FastMixer_Dynamic)) {
         size_t minNormalFrameCount = (kMinNormalSinkBufferSizeMs * mSampleRate) / 1000;
         size_t maxNormalFrameCount = (kMaxNormalSinkBufferSizeMs * mSampleRate) / 1000;
 
@@ -4017,7 +4017,13 @@
     // FIXME could this be made local to while loop?
     writeFrames = 0;
 
-    cacheParameters_l();
+    {
+        audio_utils::lock_guard l(mutex());
+
+        cacheParameters_l();
+        checkSilentMode_l();
+    }
+
     mSleepTimeUs = mIdleSleepTimeUs;
 
     if (mType == MIXER || mType == SPATIALIZER) {
@@ -4042,8 +4048,6 @@
     // suspended mode (for now) to help schedule the wait time until next iteration.
     nsecs_t timeLoopNextNs = 0;
 
-    checkSilentMode_l();
-
     audio_patch_handle_t lastDownstreamPatchHandle = AUDIO_PATCH_HANDLE_NONE;
 
     sendCheckOutputStageEffectsEvent();
@@ -5093,7 +5097,6 @@
         // mPipeSink below
         // mNormalSink below
 {
-    setMasterBalance(afThreadCallback->getMasterBalance_l());
     ALOGV("MixerThread() id=%d type=%d", id, type);
     ALOGV("mSampleRate=%u, mChannelMask=%#x, mChannelCount=%u, mFormat=%#x, mFrameSize=%zu, "
             "mFrameCount=%zu, mNormalFrameCount=%zu",
@@ -5105,6 +5108,8 @@
         // The Duplicating thread uses the AudioMixer and delivers data to OutputTracks
         // (downstream MixerThreads) in DuplicatingThread::threadLoop_write().
         // Do not create or use mFastMixer, mOutputSink, mPipeSink, or mNormalSink.
+        // Balance is *not* set in the DuplicatingThread here (or from AudioFlinger),
+        // as the downstream MixerThreads implement it.
         return;
     }
     // create an NBAIO sink for the HAL output stream, and negotiate
@@ -5134,7 +5139,16 @@
             break;
         case FastMixer_Static:
         case FastMixer_Dynamic:
-            initFastMixer = mFrameCount < mNormalFrameCount;
+            if (mType == MIXER && (output->flags & AUDIO_OUTPUT_FLAG_DEEP_BUFFER)) {
+                /* Do not init fast mixer on deep buffer, warn if buffers are confed too small */
+                initFastMixer = false;
+                ALOGW_IF(mFrameCount * 1000 / mSampleRate < kMinNormalSinkBufferSizeMs,
+                         "HAL DEEP BUFFER Buffer (%zu ms) is smaller than set minimal buffer "
+                         "(%u ms), seems like a configuration error",
+                         mFrameCount * 1000 / mSampleRate, kMinNormalSinkBufferSizeMs);
+            } else {
+                initFastMixer = mFrameCount < mNormalFrameCount;
+            }
             break;
         }
         ALOGW_IF(initFastMixer == false && mFrameCount < mNormalFrameCount,
@@ -5264,6 +5278,9 @@
         mNormalSink = initFastMixer ? mPipeSink : mOutputSink;
         break;
     }
+    // setMasterBalance needs to be called after the FastMixer
+    // (if any) is set up, in order to deliver the balance settings to it.
+    setMasterBalance(afThreadCallback->getMasterBalance_l());
 }
 
 MixerThread::~MixerThread()
@@ -7714,6 +7731,7 @@
 
 ssize_t DuplicatingThread::threadLoop_write()
 {
+    ATRACE_BEGIN("write");
     for (size_t i = 0; i < outputTracks.size(); i++) {
         const ssize_t actualWritten = outputTracks[i]->write(mSinkBuffer, writeFrames);
 
@@ -7732,6 +7750,7 @@
 
         // TODO: Report correction for the other output tracks and show in the dump.
     }
+    ATRACE_END();
     if (mStandby) {
         mThreadMetrics.logBeginInterval();
         mThreadSnapshot.onBegin();
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 654b841..10a77ef 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -379,10 +379,10 @@
         return isOutput() ? outDeviceTypes_l() : DeviceTypeSet({inDeviceType_l()});
     }
 
-    const AudioDeviceTypeAddrVector& outDeviceTypeAddrs() const final {
+    const AudioDeviceTypeAddrVector& outDeviceTypeAddrs() const final REQUIRES(mutex()) {
         return mOutDeviceTypeAddrs;
     }
-    const AudioDeviceTypeAddr& inDeviceTypeAddr() const final {
+    const AudioDeviceTypeAddr& inDeviceTypeAddr() const final REQUIRES(mutex()) {
         return mInDeviceTypeAddr;
     }
 
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index f5f11cc..3d4e771 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -3136,6 +3136,14 @@
     *backInserter++ = metadata;
 }
 
+void RecordTrack::setSilenced(bool silenced) {
+    if (!isPatchTrack() && mSilenced != silenced) {
+        mSilenced = silenced;
+        ALOGD("%s: track with port id: %d, (%s)", __func__, mPortId,
+              mSilenced ? "silenced" : "unsilenced");
+    }
+}
+
 // ----------------------------------------------------------------------------
 #undef LOG_TAG
 #define LOG_TAG "AF::PatchRecord"
diff --git a/services/audioflinger/datapath/AudioHwDevice.cpp b/services/audioflinger/datapath/AudioHwDevice.cpp
index 95e9ecc..c2e538c 100644
--- a/services/audioflinger/datapath/AudioHwDevice.cpp
+++ b/services/audioflinger/datapath/AudioHwDevice.cpp
@@ -41,18 +41,20 @@
         AudioStreamOut **ppStreamOut,
         audio_io_handle_t handle,
         audio_devices_t deviceType,
-        audio_output_flags_t flags,
+        audio_output_flags_t *flags,
         struct audio_config *config,
-        const char *address)
+        const char *address,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata)
 {
 
     struct audio_config originalConfig = *config;
-    auto outputStream = new AudioStreamOut(this, flags);
+    auto outputStream = new AudioStreamOut(this);
 
     // Try to open the HAL first using the current format.
     ALOGV("openOutputStream(), try sampleRate %d, format %#x, channelMask %#x", config->sample_rate,
             config->format, config->channel_mask);
-    status_t status = outputStream->open(handle, deviceType, config, address);
+    status_t status = outputStream->open(handle, deviceType, config, flags, address,
+                                        sourceMetadata);
 
     if (status != NO_ERROR) {
         delete outputStream;
@@ -66,18 +68,25 @@
 
         // If the data is encoded then try again using wrapped PCM.
         const bool wrapperNeeded = !audio_has_proportional_frames(originalConfig.format)
-                && ((flags & AUDIO_OUTPUT_FLAG_DIRECT) != 0)
-                && ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0);
+                && ((*flags & AUDIO_OUTPUT_FLAG_DIRECT) != 0)
+                && ((*flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0);
 
         if (wrapperNeeded) {
             if (SPDIFEncoder::isFormatSupported(originalConfig.format)) {
-                outputStream = new SpdifStreamOut(this, flags, originalConfig.format);
-                status = outputStream->open(handle, deviceType, &originalConfig, address);
+                outputStream = new SpdifStreamOut(this, originalConfig.format);
+                status = outputStream->open(handle, deviceType, &originalConfig, flags, address,
+                                            sourceMetadata);
                 if (status != NO_ERROR) {
                     ALOGE("ERROR - openOutputStream(), SPDIF open returned %d",
                         status);
                     delete outputStream;
                     outputStream = nullptr;
+                } else {
+                    // on success, we need to assign the actual HAL stream config so that clients
+                    // know and can later patch correctly.
+                    config->format = originalConfig.format;
+                    config->channel_mask = originalConfig.channel_mask;
+                    config->sample_rate = originalConfig.sample_rate;
                 }
             } else {
                 ALOGE("ERROR - openOutputStream(), SPDIFEncoder does not support format 0x%08x",
@@ -151,6 +160,12 @@
                         status);
                     delete inputStream;
                     inputStream = nullptr;
+                } else {
+                    // on success, we need to assign the actual HAL stream config so that clients
+                    // know and can later patch correctly.
+                    config->format = originalConfig.format;
+                    config->channel_mask = originalConfig.channel_mask;
+                    config->sample_rate = originalConfig.sample_rate;
                 }
             } else {
                 ALOGE("ERROR - openInputStream(), SPDIFDecoder does not support format 0x%08x",
diff --git a/services/audioflinger/datapath/AudioHwDevice.h b/services/audioflinger/datapath/AudioHwDevice.h
index 80c1473..6a35b91 100644
--- a/services/audioflinger/datapath/AudioHwDevice.h
+++ b/services/audioflinger/datapath/AudioHwDevice.h
@@ -85,9 +85,10 @@
             AudioStreamOut **ppStreamOut,
             audio_io_handle_t handle,
             audio_devices_t deviceType,
-            audio_output_flags_t flags,
+            audio_output_flags_t *flags,
             struct audio_config *config,
-            const char *address);
+            const char *address,
+            const std::vector<playback_track_metadata_v7_t>& sourceMetadata);
 
     status_t openInputStream(
             AudioStreamIn **ppStreamIn,
diff --git a/services/audioflinger/datapath/AudioStreamOut.cpp b/services/audioflinger/datapath/AudioStreamOut.cpp
index a686ff6..7aadda3 100644
--- a/services/audioflinger/datapath/AudioStreamOut.cpp
+++ b/services/audioflinger/datapath/AudioStreamOut.cpp
@@ -30,9 +30,8 @@
 namespace android {
 
 // ----------------------------------------------------------------------------
-AudioStreamOut::AudioStreamOut(AudioHwDevice *dev, audio_output_flags_t flags)
+AudioStreamOut::AudioStreamOut(AudioHwDevice *dev)
         : audioHwDev(dev)
-        , flags(flags)
 {
 }
 
@@ -93,13 +92,16 @@
         audio_io_handle_t handle,
         audio_devices_t deviceType,
         struct audio_config *config,
-        const char *address)
+        audio_output_flags_t *flagsPtr,
+        const char *address,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata)
 {
     sp<StreamOutHalInterface> outStream;
 
-    const audio_output_flags_t customFlags = (config->format == AUDIO_FORMAT_IEC61937)
-                ? (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO)
-                : flags;
+    audio_output_flags_t customFlags = (config->format == AUDIO_FORMAT_IEC61937)
+                ? (audio_output_flags_t)(*flagsPtr | AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO)
+                : *flagsPtr;
+    *flagsPtr = flags = customFlags;
 
     int status = hwDev()->openOutputStream(
             handle,
@@ -107,7 +109,8 @@
             customFlags,
             config,
             address,
-            &outStream);
+            &outStream,
+            sourceMetadata);
     ALOGV("AudioStreamOut::open(), HAL returned stream %p, sampleRate %d, format %#x,"
             " channelMask %#x, status %d", outStream.get(), config->sample_rate, config->format,
             config->channel_mask, status);
@@ -124,7 +127,8 @@
                 customFlags,
                 &customConfig,
                 address,
-                &outStream);
+                &outStream,
+                sourceMetadata);
         ALOGV("AudioStreamOut::open(), treat IEC61937 as PCM, status = %d", status);
     }
 
diff --git a/services/audioflinger/datapath/AudioStreamOut.h b/services/audioflinger/datapath/AudioStreamOut.h
index 2c9fb3e..1857099 100644
--- a/services/audioflinger/datapath/AudioStreamOut.h
+++ b/services/audioflinger/datapath/AudioStreamOut.h
@@ -37,17 +37,19 @@
 public:
     AudioHwDevice * const audioHwDev;
     sp<StreamOutHalInterface> stream;
-    const audio_output_flags_t flags;
+    audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
 
     [[nodiscard]] sp<DeviceHalInterface> hwDev() const;
 
-    AudioStreamOut(AudioHwDevice *dev, audio_output_flags_t flags);
+    explicit AudioStreamOut(AudioHwDevice *dev);
 
     virtual status_t open(
             audio_io_handle_t handle,
             audio_devices_t deviceType,
             struct audio_config *config,
-            const char *address);
+            audio_output_flags_t *flagsPtr,
+            const char *address,
+            const std::vector<playback_track_metadata_v7_t>& sourceMetadata);
 
     virtual ~AudioStreamOut();
 
diff --git a/services/audioflinger/datapath/SpdifStreamIn.cpp b/services/audioflinger/datapath/SpdifStreamIn.cpp
index 98ce712..0090bc5 100644
--- a/services/audioflinger/datapath/SpdifStreamIn.cpp
+++ b/services/audioflinger/datapath/SpdifStreamIn.cpp
@@ -81,6 +81,11 @@
             outputDevice,
             outputDeviceAddress);
 
+    // reset config back to whatever is returned by HAL
+    config->sample_rate = customConfig.sample_rate;
+    config->format = customConfig.format;
+    config->channel_mask = customConfig.channel_mask;
+
     ALOGI("SpdifStreamIn::open() status = %d", status);
 
 #ifdef TEE_SINK
diff --git a/services/audioflinger/datapath/SpdifStreamOut.cpp b/services/audioflinger/datapath/SpdifStreamOut.cpp
index 65a4eec..a565955 100644
--- a/services/audioflinger/datapath/SpdifStreamOut.cpp
+++ b/services/audioflinger/datapath/SpdifStreamOut.cpp
@@ -33,10 +33,8 @@
  * PCM then we need to wrap the data in an SPDIF wrapper.
  */
 SpdifStreamOut::SpdifStreamOut(AudioHwDevice *dev,
-            audio_output_flags_t flags,
             audio_format_t format)
-        // Tell the HAL that the data will be compressed audio wrapped in a data burst.
-        : AudioStreamOut(dev, (audio_output_flags_t) (flags | AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO))
+        : AudioStreamOut(dev)
         , mSpdifEncoder(this, format)
 {
 }
@@ -45,7 +43,9 @@
         audio_io_handle_t handle,
         audio_devices_t devices,
         struct audio_config *config,
-        const char *address)
+        audio_output_flags_t *flags,
+        const char *address,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata)
 {
     struct audio_config customConfig = *config;
 
@@ -62,6 +62,8 @@
 
     customConfig.format = AUDIO_FORMAT_PCM_16_BIT;
     customConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    // Tell the HAL that the data will be compressed audio wrapped in a data burst.
+    *flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO);
 
     // Always print this because otherwise it could be very confusing if the
     // HAL and AudioFlinger are using different formats.
@@ -75,7 +77,14 @@
             handle,
             devices,
             &customConfig,
-            address);
+            flags,
+            address,
+            sourceMetadata);
+
+    // reset config back to whatever is returned by HAL
+    config->sample_rate = customConfig.sample_rate;
+    config->format = customConfig.format;
+    config->channel_mask = customConfig.channel_mask;
 
     ALOGI("SpdifStreamOut::open() status = %d", status);
 
diff --git a/services/audioflinger/datapath/SpdifStreamOut.h b/services/audioflinger/datapath/SpdifStreamOut.h
index c6d27ba..3241d6f 100644
--- a/services/audioflinger/datapath/SpdifStreamOut.h
+++ b/services/audioflinger/datapath/SpdifStreamOut.h
@@ -36,14 +36,15 @@
 class SpdifStreamOut : public AudioStreamOut {
 public:
 
-    SpdifStreamOut(AudioHwDevice *dev, audio_output_flags_t flags,
-            audio_format_t format);
+    SpdifStreamOut(AudioHwDevice *dev, audio_format_t format);
 
     status_t open(
             audio_io_handle_t handle,
             audio_devices_t devices,
             struct audio_config *config,
-            const char *address) override;
+            audio_output_flags_t *flags,
+            const char *address,
+            const std::vector<playback_track_metadata_v7_t>& sourceMetadata) override;
 
     /**
     * Write audio buffer to driver. Returns number of bytes written, or a
diff --git a/services/audioparameterparser/Android.bp b/services/audioparameterparser/Android.bp
index 1c1c1e1..0b2c1ba 100644
--- a/services/audioparameterparser/Android.bp
+++ b/services/audioparameterparser/Android.bp
@@ -35,10 +35,10 @@
     name: "android.hardware.audio.parameter_parser.example_defaults",
     defaults: [
         "latest_android_hardware_audio_core_ndk_shared",
-        "latest_av_audio_types_aidl_ndk_shared",
     ],
 
     shared_libs: [
+        "av-audio-types-aidl-ndk",
         "libbase",
         "libbinder_ndk",
     ],
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index e91e2a3..35973c1 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -294,8 +294,7 @@
     virtual status_t startAudioSource(const struct audio_port_config *source,
                                       const audio_attributes_t *attributes,
                                       audio_port_handle_t *portId,
-                                      uid_t uid,
-                                      bool internal = false) = 0;
+                                      uid_t uid) = 0;
     virtual status_t stopAudioSource(audio_port_handle_t portId) = 0;
 
     virtual status_t setMasterMono(bool mono) = 0;
@@ -478,7 +477,8 @@
                                 audio_config_base_t *mixerConfig,
                                 const sp<DeviceDescriptorBase>& device,
                                 uint32_t *latencyMs,
-                                audio_output_flags_t flags) = 0;
+                                audio_output_flags_t *flags,
+                                audio_attributes_t audioAttributes) = 0;
     // creates a special output that is duplicated to the two outputs passed as arguments.
     // The duplication is performed by a special mixer thread in the AudioFlinger.
     virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index ee6af4c..d499222 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -26,6 +26,27 @@
 
 using StreamTypeVector = std::vector<audio_stream_type_t>;
 
+/**
+ * Legacy audio policy product strategies IDs. These strategies are supported by the default
+ * policy engine.
+ * IMPORTANT NOTE: the order of this enum is important as it determines the priority
+ * between active strategies for routing decisions: lower enum value => higher prioriy
+ */
+enum legacy_strategy {
+    STRATEGY_NONE = -1,
+    STRATEGY_PHONE,
+    STRATEGY_SONIFICATION,
+    STRATEGY_ENFORCED_AUDIBLE,
+    STRATEGY_ACCESSIBILITY,
+    STRATEGY_SONIFICATION_RESPECTFUL,
+    STRATEGY_MEDIA,
+    STRATEGY_DTMF,
+    STRATEGY_CALL_ASSISTANT,
+    STRATEGY_TRANSMITTED_THROUGH_SPEAKER,
+    STRATEGY_REROUTING,
+    STRATEGY_PATCH,
+};
+
 static const audio_attributes_t defaultAttr = AUDIO_ATTRIBUTES_INITIALIZER;
 
 static const std::set<audio_usage_t > gHighPriorityUseCases = {
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index e8b04ce..051e975 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -42,6 +42,7 @@
         "libaudioclient_aidl_conversion",
         "libaudiofoundation",
         "libaudiopolicy",
+        "libaudioutils",
         "libbase",
         "libcutils",
         "libhidlbase",
@@ -56,9 +57,6 @@
         "libmedia",
         "libmedia_helper",
     ],
-    static_libs: [
-        "libaudioutils",
-    ],
     header_libs: [
         "libaudiopolicycommon",
         "libaudiopolicymanager_interface_headers",
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h b/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
index 6167f95..e519766 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
@@ -47,13 +47,17 @@
 
     if (active) {
         // On MMAP IOs, the preferred device is selected by the first client (virtual client
-        // created when the mmap stream is opened). This client is never active.
+        // created when the mmap stream is opened). This client is never active and we only
+        // consider the Filter criteria, not the active state.
         // On non MMAP IOs, the preferred device is honored only if all active clients have
         // a preferred device in which case the first client drives the selection.
         if (desc->isMmap()) {
-            // The client list is never empty on a MMAP IO
-            return devices.getDeviceFromId(
-                    desc->clientsList(false /*activeOnly*/)[0]->preferredDeviceId());
+            auto matchingClients = desc->clientsList(
+                    false /*activeOnly*/, filter, false /*preferredDevice*/);
+            if (matchingClients.empty()) {
+                return nullptr;
+            }
+            return devices.getDeviceFromId(matchingClients[0]->preferredDeviceId());
         } else {
             auto activeClientsWithRoute =
                 desc->clientsList(true /*activeOnly*/, filter, true /*preferredDevice*/);
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 914f3fe..a18cf1f 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -412,8 +412,9 @@
                       const audio_config_base_t *mixerConfig,
                       const DeviceVector &devices,
                       audio_stream_type_t stream,
-                      audio_output_flags_t flags,
-                      audio_io_handle_t *output);
+                      audio_output_flags_t *flags,
+                      audio_io_handle_t *output,
+                      audio_attributes_t attributes);
 
         // Called when a stream is about to be started
         // Note: called before setClientActive(true);
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
index 80fa863..b193cb8 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
@@ -49,6 +49,7 @@
     static const constexpr char* const kDefaultConfigSource = "AudioPolicyConfig::setDefault";
     // The suffix of the "engine default" implementation shared library name.
     static const constexpr char* const kDefaultEngineLibraryNameSuffix = "default";
+    static const constexpr char* const kCapEngineLibraryNameSuffix = "configurable";
 
     // Creates the default (fallback) configuration.
     static sp<const AudioPolicyConfig> createDefault();
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index a596c43..60da405 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -63,6 +63,8 @@
      * HW Audio Source.
      */
     virtual bool isInternal() const { return false; }
+    virtual bool isCallRx() const { return false; }
+    virtual bool isCallTx() const { return false; }
     audio_port_handle_t portId() const { return mPortId; }
     uid_t uid() const { return mUid; }
     audio_session_t session() const { return mSessionId; };
@@ -236,7 +238,7 @@
                            const sp<DeviceDescriptor>& srcDevice,
                            audio_stream_type_t stream, product_strategy_t strategy,
                            VolumeSource volumeSource,
-                           bool isInternal);
+                           bool isInternal, bool isCallRx, bool isCallTx);
 
     ~SourceClientDescriptor() override = default;
 
@@ -263,6 +265,8 @@
     wp<HwAudioOutputDescriptor> hwOutput() const { return mHwOutput; }
     void setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput);
     bool isInternal() const override { return mIsInternal; }
+    bool isCallRx() const override { return mIsCallRx; }
+    bool isCallTx() const override { return mIsCallTx; }
 
     using ClientDescriptor::dump;
     void dump(String8 *dst, int spaces) const override;
@@ -294,6 +298,8 @@
      * requester to prevent rerouting SwOutput involved in raw patches.
      */
     bool mIsInternal = false;
+    bool mIsCallRx = false;
+    bool mIsCallTx = false;
 };
 
 class SourceClientCollection :
diff --git a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
index b92cd70..f7b9b33 100644
--- a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
@@ -99,21 +99,20 @@
      * @return ioHandle if found, AUDIO_IO_HANDLE_NONE otherwise.
      */
     audio_io_handle_t getIoForSession(audio_session_t sessionId,
-                                      const effect_uuid_t *effectType = nullptr) const;
-    bool hasOrphansForSession(audio_session_t sessionId) const;
-    EffectDescriptorCollection getOrphanEffectsForSession(audio_session_t sessionId) const;
-    void dump(String8 *dst, int spaces = 0, bool verbose = true) const;
+                                      const effect_uuid_t* effectType = nullptr) const;
 
     /**
-     * @brief Checks if there is at least one orphan effect with given sessionId and effect type
-     * uuid.
+     * @brief Checks if there is at least one orphan effect with given sessionId and optional effect
+     * type uuid.
      * @param sessionId Session ID.
-     * @param effectType Effect type UUID, the implementation will be same as hasOrphansForSession
-     * if null.
+     * @param effectType Optional effect type UUID pointer to effect_uuid_t, nullptr by default.
      * @return True if there is an orphan effect for given sessionId and type UUID, false otherwise.
      */
-    bool hasOrphanEffectsForSessionAndType(audio_session_t sessionId,
-                                           const effect_uuid_t* effectType) const;
+    bool hasOrphansForSession(audio_session_t sessionId,
+                              const effect_uuid_t* effectType = nullptr) const;
+
+    EffectDescriptorCollection getOrphanEffectsForSession(audio_session_t sessionId) const;
+    void dump(String8 *dst, int spaces = 0, bool verbose = true) const;
 
 private:
     status_t setEffectEnabled(const sp<EffectDescriptor> &effectDesc, bool enabled);
diff --git a/services/audiopolicy/common/managerdefinitions/include/HwModule.h b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
index d206637..26bb94f 100644
--- a/services/audiopolicy/common/managerdefinitions/include/HwModule.h
+++ b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
@@ -137,6 +137,7 @@
 class HwModuleCollection : public Vector<sp<HwModule> >
 {
 public:
+    sp<HwModule> getModuleFromHandle(audio_module_handle_t handle) const;
     sp<HwModule> getModuleFromName(const char *name) const;
 
     /**
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 68b2e7b..3c2f46a 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -587,8 +587,9 @@
                                        const audio_config_base_t *mixerConfig,
                                        const DeviceVector &devices,
                                        audio_stream_type_t stream,
-                                       audio_output_flags_t flags,
-                                       audio_io_handle_t *output)
+                                       audio_output_flags_t *flags,
+                                       audio_io_handle_t *output,
+                                       audio_attributes_t attributes)
 {
     mDevices = devices;
     sp<DeviceDescriptor> device = devices.getDeviceForOpening();
@@ -616,7 +617,7 @@
     // create a default one
     if ((mProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) &&
             lHalConfig.offload_info.format == AUDIO_FORMAT_DEFAULT) {
-        flags = (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
+        *flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
         lHalConfig.offload_info = AUDIO_INFO_INITIALIZER;
         lHalConfig.offload_info.sample_rate = lHalConfig.sample_rate;
         lHalConfig.offload_info.channel_mask = lHalConfig.channel_mask;
@@ -634,7 +635,7 @@
         lMixerConfig = *mixerConfig;
     }
 
-    mFlags = (audio_output_flags_t)(mFlags | flags);
+    mFlags = (audio_output_flags_t)(mFlags | *flags);
 
     // If no mixer config is specified for a spatializer output, default to 5.1 for proper
     // configuration of the final downmixer or spatializer
@@ -652,7 +653,9 @@
                                                    &lMixerConfig,
                                                    device,
                                                    &mLatency,
-                                                   mFlags);
+                                                   &mFlags,
+                                                   attributes);
+    *flags = mFlags;
 
     if (status == NO_ERROR) {
         LOG_ALWAYS_FATAL_IF(*output == AUDIO_IO_HANDLE_NONE,
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
index 58d7fc7..723887d 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
@@ -270,6 +270,9 @@
     mSurroundFormats = VALUE_OR_RETURN_STATUS(
             aidl2legacy_SurroundSoundConfig_SurroundFormats(aidl.surroundSoundConfig));
     mSource = kAidlConfigSource;
+    if (aidl.engineConfig.capSpecificConfig.has_value()) {
+        setEngineLibraryNameSuffix(kCapEngineLibraryNameSuffix);
+    }
     // No need to augmentData() as AIDL HAL must provide correct mic addresses.
     return NO_ERROR;
 }
@@ -339,7 +342,8 @@
                 AUDIO_FORMAT_AAC_XHE}},
         {AUDIO_FORMAT_DOLBY_TRUEHD, {}},
         {AUDIO_FORMAT_E_AC3_JOC, {}},
-        {AUDIO_FORMAT_AC4, {}}};
+        {AUDIO_FORMAT_AC4, {}},     // L0-3
+        {AUDIO_FORMAT_AC4_L4, {}}};
 }
 
 bool AudioPolicyConfig::useDeepBufferForMedia() const {
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 667c189..ad6977b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -96,12 +96,14 @@
 SourceClientDescriptor::SourceClientDescriptor(audio_port_handle_t portId, uid_t uid,
          audio_attributes_t attributes, const struct audio_port_config &config,
          const sp<DeviceDescriptor>& srcDevice, audio_stream_type_t stream,
-         product_strategy_t strategy, VolumeSource volumeSource, bool isInternal) :
+         product_strategy_t strategy, VolumeSource volumeSource,
+         bool isInternal, bool isCallRx, bool isCallTx) :
     TrackClientDescriptor::TrackClientDescriptor(portId, uid, AUDIO_SESSION_NONE, attributes,
         {config.sample_rate, config.channel_mask, config.format}, AUDIO_PORT_HANDLE_NONE,
         stream, strategy, volumeSource, AUDIO_OUTPUT_FLAG_NONE, false,
         {} /* Sources do not support secondary outputs*/, nullptr),
-    mSrcDevice(srcDevice), mIsInternal(isInternal)
+    mSrcDevice(srcDevice), mIsInternal(isInternal),
+    mIsCallRx(isCallRx), mIsCallTx(isCallTx)
 {
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
index 7971b61..090da6c 100644
--- a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
@@ -210,27 +210,13 @@
     }
 }
 
-bool EffectDescriptorCollection::hasOrphansForSession(audio_session_t sessionId) const
-{
+bool EffectDescriptorCollection::hasOrphansForSession(audio_session_t sessionId,
+                                                      const effect_uuid_t* effectType) const {
     for (size_t i = 0; i < size(); ++i) {
         sp<EffectDescriptor> effect = valueAt(i);
-        if (effect->mSession == sessionId && effect->mIsOrphan) {
-            return true;
-        }
-    }
-    return false;
-}
-
-bool EffectDescriptorCollection::hasOrphanEffectsForSessionAndType(
-        audio_session_t sessionId, const effect_uuid_t* effectType) const {
-    if (effectType == nullptr) {
-        return hasOrphansForSession(sessionId);
-    }
-
-    for (size_t i = 0; i < size(); ++i) {
-        sp<EffectDescriptor> effect = valueAt(i);
-        if (effect->mIsOrphan && effect->mSession == sessionId &&
-            memcmp(&effect->mDesc.type, effectType, sizeof(effect_uuid_t)) == 0) {
+        if (effect->mSession == sessionId && effect->mIsOrphan &&
+            (effectType == nullptr ||
+             memcmp(&effect->mDesc.type, effectType, sizeof(effect_uuid_t)) == 0)) {
             return true;
         }
     }
diff --git a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
index 6696b45..2d8231a 100644
--- a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
@@ -283,6 +283,16 @@
     dumpAudioRouteVector(mRoutes, dst, spaces);
 }
 
+sp<HwModule> HwModuleCollection::getModuleFromHandle(audio_module_handle_t handle) const
+{
+    for (const auto& module : *this) {
+        if (module->getHandle() == handle) {
+            return module;
+        }
+    }
+    return nullptr;
+}
+
 sp <HwModule> HwModuleCollection::getModuleFromName(const char *name) const
 {
     for (const auto& module : *this) {
diff --git a/services/audiopolicy/config/surround_sound_configuration_5_0.xml b/services/audiopolicy/config/surround_sound_configuration_5_0.xml
index 590a181..6a268d8 100644
--- a/services/audiopolicy/config/surround_sound_configuration_5_0.xml
+++ b/services/audiopolicy/config/surround_sound_configuration_5_0.xml
@@ -27,5 +27,6 @@
     <format name="AUDIO_FORMAT_DTS_HD" />
     <format name="AUDIO_FORMAT_AAC_LC" subformats="AUDIO_FORMAT_AAC_HE_V1 AUDIO_FORMAT_AAC_HE_V2 AUDIO_FORMAT_AAC_ELD AUDIO_FORMAT_AAC_XHE" />
     <format name="AUDIO_FORMAT_AC4" />
+    <format name="AUDIO_FORMAT_AC4_L4" />
   </formats>
 </surroundSound>
diff --git a/services/audiopolicy/config/surround_sound_configuration_aidl.xml b/services/audiopolicy/config/surround_sound_configuration_aidl.xml
index cf15711..51ccaa9 100644
--- a/services/audiopolicy/config/surround_sound_configuration_aidl.xml
+++ b/services/audiopolicy/config/surround_sound_configuration_aidl.xml
@@ -30,5 +30,6 @@
     <format name="AUDIO_FORMAT_DTS_UHD_P2" />
     <format name="AUDIO_FORMAT_AAC_LC" subformats="AUDIO_FORMAT_AAC_HE_V1 AUDIO_FORMAT_AAC_HE_V2 AUDIO_FORMAT_AAC_ELD AUDIO_FORMAT_AAC_XHE" />
     <format name="AUDIO_FORMAT_AC4" />
+    <format name="AUDIO_FORMAT_AC4_L4" />
   </formats>
 </surroundSound>
diff --git a/services/audiopolicy/engine/common/Android.bp b/services/audiopolicy/engine/common/Android.bp
index d8aac37..7daa064 100644
--- a/services/audiopolicy/engine/common/Android.bp
+++ b/services/audiopolicy/engine/common/Android.bp
@@ -64,5 +64,6 @@
     ],
     defaults: [
         "aconfig_lib_cc_static_link.defaults",
+        "latest_android_media_audio_common_types_cpp_static",
     ],
 }
diff --git a/services/audiopolicy/engine/common/include/EngineBase.h b/services/audiopolicy/engine/common/include/EngineBase.h
index b9c94a4..edb2e29 100644
--- a/services/audiopolicy/engine/common/include/EngineBase.h
+++ b/services/audiopolicy/engine/common/include/EngineBase.h
@@ -129,6 +129,8 @@
 
     product_strategy_t getProductStrategyByName(const std::string &name) const;
 
+    std::string getProductStrategyName(product_strategy_t id) const;
+
     AudioPolicyManagerObserver *getApmObserver() const { return mApmObserver; }
 
     inline bool isInCall() const
diff --git a/services/audiopolicy/engine/common/include/ProductStrategy.h b/services/audiopolicy/engine/common/include/ProductStrategy.h
index 1593be0..9b1125d 100644
--- a/services/audiopolicy/engine/common/include/ProductStrategy.h
+++ b/services/audiopolicy/engine/common/include/ProductStrategy.h
@@ -46,7 +46,7 @@
     using VolumeGroupAttributesVector = std::vector<VolumeGroupAttributes>;
 
 public:
-    ProductStrategy(const std::string &name);
+    ProductStrategy(const std::string &name, int id = PRODUCT_STRATEGY_NONE);
 
     void addAttributes(const VolumeGroupAttributes &volumeGroupAttributes);
 
@@ -92,6 +92,10 @@
 
     bool isDefault() const;
 
+    bool isPatchStrategy() const {
+        return getVolumeGroupForStreamType(AUDIO_STREAM_PATCH) != VOLUME_GROUP_NONE;
+    }
+
     void dump(String8 *dst, int spaces = 0) const;
 
 private:
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index e259e6e..fb8379e 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -116,6 +116,15 @@
     return PRODUCT_STRATEGY_NONE;
 }
 
+std::string EngineBase::getProductStrategyName(product_strategy_t id) const {
+    for (const auto &iter : mProductStrategies) {
+        if (iter.second->getId() == id) {
+            return iter.second->getName();
+        }
+    }
+    return "";
+}
+
 engineConfig::ParsingResult EngineBase::loadAudioPolicyEngineConfig(
         const media::audio::common::AudioHalEngineConfig& aidlConfig)
 {
@@ -240,7 +249,7 @@
         loadVolumeConfig(mVolumeGroups, volumeConfig);
     }
     for (auto& strategyConfig : result.parsedConfig->productStrategies) {
-        sp<ProductStrategy> strategy = new ProductStrategy(strategyConfig.name);
+        sp<ProductStrategy> strategy = new ProductStrategy(strategyConfig.name, strategyConfig.id);
         for (const auto &group : strategyConfig.attributesGroups) {
             const auto &iter = std::find_if(begin(mVolumeGroups), end(mVolumeGroups),
                                          [&group](const auto &volumeGroup) {
@@ -302,6 +311,9 @@
     }
     StrategyVector orderedStrategies;
     for (const auto &iter : strategies) {
+        if (iter.second->isPatchStrategy()) {
+            continue;
+        }
         orderedStrategies.push_back(iter.second->getId());
     }
     return orderedStrategies;
@@ -733,6 +745,9 @@
     auto defaultDevices = DeviceVector(getApmObserver()->getDefaultOutputDevice());
     for (const auto &iter : getProductStrategies()) {
         const auto &strategy = iter.second;
+        if (strategy->isPatchStrategy()) {
+            continue;
+        }
         mDevicesForStrategies[strategy->getId()] = defaultDevices;
         setStrategyDevices(strategy, defaultDevices);
     }
@@ -741,6 +756,9 @@
 void EngineBase::updateDeviceSelectionCache() {
     for (const auto &iter : getProductStrategies()) {
         const auto& strategy = iter.second;
+        if (strategy->isPatchStrategy()) {
+            continue;
+        }
         auto devices = getDevicesForProductStrategy(strategy->getId());
         mDevicesForStrategies[strategy->getId()] = devices;
         setStrategyDevices(strategy, devices);
diff --git a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
index 548a20d..c4bf64a 100644
--- a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
+++ b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
@@ -18,14 +18,17 @@
 
 #include <EngineConfig.h>
 
+#include <media/AudioProductStrategy.h>
+#include <policy.h>
 #include <system/audio.h>
 
 namespace android {
+
 /**
  * @brief AudioProductStrategies hard coded array of strategies to fill new engine API contract.
  */
 const engineConfig::ProductStrategies gOrderedStrategies = {
-    {"STRATEGY_PHONE",
+    {"STRATEGY_PHONE", STRATEGY_PHONE,
      {
          {AUDIO_STREAM_VOICE_CALL, "AUDIO_STREAM_VOICE_CALL",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VOICE_COMMUNICATION, AUDIO_SOURCE_DEFAULT,
@@ -37,7 +40,7 @@
          }
      },
     },
-    {"STRATEGY_SONIFICATION",
+    {"STRATEGY_SONIFICATION", STRATEGY_SONIFICATION,
      {
          {AUDIO_STREAM_RING, "AUDIO_STREAM_RING",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
@@ -49,7 +52,7 @@
          }
      },
     },
-    {"STRATEGY_ENFORCED_AUDIBLE",
+    {"STRATEGY_ENFORCED_AUDIBLE", STRATEGY_ENFORCED_AUDIBLE,
      {
          {AUDIO_STREAM_ENFORCED_AUDIBLE, "AUDIO_STREAM_ENFORCED_AUDIBLE",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
@@ -57,7 +60,7 @@
          }
      },
     },
-    {"STRATEGY_ACCESSIBILITY",
+    {"STRATEGY_ACCESSIBILITY", STRATEGY_ACCESSIBILITY,
      {
          {AUDIO_STREAM_ACCESSIBILITY, "AUDIO_STREAM_ACCESSIBILITY",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
@@ -65,7 +68,7 @@
          }
      },
     },
-    {"STRATEGY_SONIFICATION_RESPECTFUL",
+    {"STRATEGY_SONIFICATION_RESPECTFUL", STRATEGY_SONIFICATION_RESPECTFUL,
      {
          {AUDIO_STREAM_NOTIFICATION, "AUDIO_STREAM_NOTIFICATION",
           {
@@ -77,7 +80,7 @@
          }
      },
     },
-    {"STRATEGY_MEDIA",
+    {"STRATEGY_MEDIA", STRATEGY_MEDIA,
      {
          {AUDIO_STREAM_ASSISTANT, "AUDIO_STREAM_ASSISTANT",
           {{AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
@@ -103,7 +106,7 @@
          }
      },
     },
-    {"STRATEGY_DTMF",
+    {"STRATEGY_DTMF", STRATEGY_DTMF,
      {
          {AUDIO_STREAM_DTMF, "AUDIO_STREAM_DTMF",
           {
@@ -113,7 +116,7 @@
          }
      },
     },
-    {"STRATEGY_CALL_ASSISTANT",
+    {"STRATEGY_CALL_ASSISTANT", STRATEGY_CALL_ASSISTANT,
      {
          {AUDIO_STREAM_CALL_ASSISTANT, "AUDIO_STREAM_CALL_ASSISTANT",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_CALL_ASSISTANT, AUDIO_SOURCE_DEFAULT,
@@ -121,7 +124,7 @@
          }
      },
     },
-    {"STRATEGY_TRANSMITTED_THROUGH_SPEAKER",
+    {"STRATEGY_TRANSMITTED_THROUGH_SPEAKER", STRATEGY_TRANSMITTED_THROUGH_SPEAKER,
      {
          {AUDIO_STREAM_TTS, "AUDIO_STREAM_TTS",
           {
@@ -140,7 +143,7 @@
  * For compatibility reason why apm volume config file, volume group name is the stream type.
  */
 const engineConfig::ProductStrategies gOrderedSystemStrategies = {
-    {"STRATEGY_REROUTING",
+    {"STRATEGY_REROUTING", STRATEGY_REROUTING,
      {
          {AUDIO_STREAM_REROUTING, "AUDIO_STREAM_REROUTING",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VIRTUAL_SOURCE, AUDIO_SOURCE_DEFAULT,
@@ -148,7 +151,7 @@
          }
      },
     },
-    {"STRATEGY_PATCH",
+    {"STRATEGY_PATCH", STRATEGY_PATCH,
      {
          {AUDIO_STREAM_PATCH, "AUDIO_STREAM_PATCH",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
diff --git a/services/audiopolicy/engine/common/src/ProductStrategy.cpp b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
index 0d25955..8ed7403 100644
--- a/services/audiopolicy/engine/common/src/ProductStrategy.cpp
+++ b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
@@ -19,6 +19,7 @@
 
 #include "ProductStrategy.h"
 
+#include <android/media/audio/common/AudioHalProductStrategy.h>
 #include <media/AudioProductStrategy.h>
 #include <media/TypeConverter.h>
 #include <utils/String8.h>
@@ -30,11 +31,20 @@
 
 namespace android {
 
-ProductStrategy::ProductStrategy(const std::string &name) :
+using media::audio::common::AudioHalProductStrategy;
+
+/*
+ * Note on the id: either is provided (legacy strategies have hard coded id, aidl have
+ * own id, enforced to be started from VENDOR_STRATEGY_ID_START.
+ * REROUTING & PATCH system strategies are added.
+ * To prevent from collision, generate from VENDOR_STRATEGY_ID_START when id is not provided.
+ */
+ProductStrategy::ProductStrategy(const std::string &name, int id) :
     mName(name),
-    mId(static_cast<product_strategy_t>(HandleGenerator<uint32_t>::getNextHandle()))
-{
-}
+    mId((static_cast<product_strategy_t>(id) != PRODUCT_STRATEGY_NONE) ?
+            static_cast<product_strategy_t>(id) :
+            static_cast<product_strategy_t>(AudioHalProductStrategy::VENDOR_STRATEGY_ID_START +
+                    HandleGenerator<uint32_t>::getNextHandle())) {}
 
 void ProductStrategy::addAttributes(const VolumeGroupAttributes &volumeGroupAttributes)
 {
diff --git a/services/audiopolicy/engine/config/Android.bp b/services/audiopolicy/engine/config/Android.bp
index ab2c134..d771605 100644
--- a/services/audiopolicy/engine/config/Android.bp
+++ b/services/audiopolicy/engine/config/Android.bp
@@ -10,6 +10,9 @@
 
 cc_library {
     name: "libaudiopolicyengine_config",
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
     export_include_dirs: ["include"],
     include_dirs: [
         "external/libxml2/include",
diff --git a/services/audiopolicy/engine/config/include/EngineConfig.h b/services/audiopolicy/engine/config/include/EngineConfig.h
index 119dbd6..054bdae 100644
--- a/services/audiopolicy/engine/config/include/EngineConfig.h
+++ b/services/audiopolicy/engine/config/include/EngineConfig.h
@@ -71,6 +71,7 @@
 
 struct ProductStrategy {
     std::string name;
+    int id;
     AttributesGroups attributesGroups;
 };
 
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index 3f9ae19..714ab78 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -20,7 +20,6 @@
 #include <sstream>
 #include <stdarg.h>
 #include <string>
-#include <string>
 #include <vector>
 #include <unordered_map>
 
@@ -42,6 +41,7 @@
 
 namespace android {
 
+using media::audio::common::AudioStreamType;
 using utilities::convertTo;
 
 namespace engineConfig {
@@ -66,6 +66,9 @@
                             STRATEGY_ENTRY(ACCESSIBILITY)};
 #undef STRATEGY_ENTRY
 
+    if (id >= media::audio::common::AudioHalProductStrategy::VENDOR_STRATEGY_ID_START) {
+        return std::to_string(id);
+    }
     auto it = productStrategyMap.find(id);
     if (it == productStrategyMap.end()) {
         return base::unexpected(BAD_VALUE);
@@ -76,8 +79,12 @@
 ConversionResult<AttributesGroup> aidl2legacy_AudioHalAttributeGroup_AttributesGroup(
         const media::audio::common::AudioHalAttributesGroup& aidl) {
     AttributesGroup legacy;
-    legacy.stream = VALUE_OR_RETURN(
-            aidl2legacy_AudioStreamType_audio_stream_type_t(aidl.streamType));
+    // StreamType may only be set to AudioStreamType.INVALID when using the
+    // Configurable Audio Policy (CAP) engine. An AudioHalAttributesGroup with
+    // AudioStreamType.INVALID is used when the volume group and attributes are
+    // not associated to any AudioStreamType.
+    legacy.stream = ((aidl.streamType == AudioStreamType::INVALID) ? AUDIO_STREAM_DEFAULT :
+            VALUE_OR_RETURN(aidl2legacy_AudioStreamType_audio_stream_type_t(aidl.streamType)));
     legacy.volumeGroup = aidl.volumeGroupName;
     legacy.attributesVect = VALUE_OR_RETURN(convertContainer<AttributesVector>(
                     aidl.attributes, aidl2legacy_AudioAttributes_audio_attributes_t));
@@ -87,8 +94,9 @@
 ConversionResult<ProductStrategy> aidl2legacy_AudioHalProductStrategy_ProductStrategy(
         const media::audio::common::AudioHalProductStrategy& aidl) {
     ProductStrategy legacy;
-    legacy.name = VALUE_OR_RETURN(
-                    aidl2legacy_AudioHalProductStrategy_ProductStrategyType(aidl.id));
+    legacy.name = aidl.name.value_or(VALUE_OR_RETURN(
+                    aidl2legacy_AudioHalProductStrategy_ProductStrategyType(aidl.id)));
+    legacy.id = aidl.id;
     legacy.attributesGroups = VALUE_OR_RETURN(convertContainer<AttributesGroups>(
                     aidl.attributesGroups,
                     aidl2legacy_AudioHalAttributeGroup_AttributesGroup));
@@ -148,7 +156,6 @@
                     aidl.volumeCurves, aidl2legacy_AudioHalVolumeCurve_VolumeCurve));
     return legacy;
 }
-
 }  // namespace
 
 template<typename E, typename C>
@@ -175,6 +182,7 @@
 
     struct Attributes {
         static constexpr const char *name = "name";
+        static constexpr const char *id = "id";
     };
     static android::status_t deserialize(_xmlDoc *doc, const _xmlNode *root, Collection &ps);
 };
@@ -533,13 +541,21 @@
         ALOGE("ProductStrategyTraits No attribute %s found", Attributes::name);
         return BAD_VALUE;
     }
+    int id = PRODUCT_STRATEGY_NONE;
+    std::string idLiteral = getXmlAttribute(child, Attributes::id);
+    if (!idLiteral.empty()) {
+        if (!convertTo(idLiteral, id)) {
+            return BAD_VALUE;
+        }
+        ALOGV("%s: %s, %s = %d", __FUNCTION__, name.c_str(), Attributes::id, id);
+    }
     ALOGV("%s: %s = %s", __FUNCTION__, Attributes::name, name.c_str());
 
     size_t skipped = 0;
     AttributesGroups attrGroups;
     deserializeCollection<AttributesGroupTraits>(doc, child, attrGroups, skipped);
 
-    strategies.push_back({name, attrGroups});
+    strategies.push_back({name, id, attrGroups});
     return NO_ERROR;
 }
 
diff --git a/services/audiopolicy/engineconfigurable/Android.bp b/services/audiopolicy/engineconfigurable/Android.bp
index a0a4bdf..1c98faf 100644
--- a/services/audiopolicy/engineconfigurable/Android.bp
+++ b/services/audiopolicy/engineconfigurable/Android.bp
@@ -36,10 +36,10 @@
         "libbase_headers",
     ],
     static_libs: [
+        "libaudiopolicycapengine_config",
         "libaudiopolicyengine_common",
         "libaudiopolicyengine_config",
         "libaudiopolicyengineconfigurable_pfwwrapper",
-
     ],
     shared_libs: [
         "libaudio_aidl_conversion_common_cpp",
@@ -56,5 +56,13 @@
     ],
     defaults: [
         "aconfig_lib_cc_static_link.defaults",
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
+    required: [
+        "CapClass.xml",
+        "CapProductStrategies.xml",
+        "CapSubsystem-CommonTypes.xml",
+        "CapSubsystem.xml",
+        "ParameterFrameworkConfigurationCap.xml",
     ],
 }
diff --git a/services/audiopolicy/engineconfigurable/config/Android.bp b/services/audiopolicy/engineconfigurable/config/Android.bp
index 8dd13e8..95a7cf8 100644
--- a/services/audiopolicy/engineconfigurable/config/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/Android.bp
@@ -38,6 +38,53 @@
 }
 
 filegroup {
+    name: "audio_policy_engine_aidl_criterion_types_template",
+    srcs: ["example/common/audio_policy_engine_criterion_types_aidl.xml.in"],
+}
+
+filegroup {
     name: "audio_policy_engine_criteria",
     srcs: ["example/common/audio_policy_engine_criteria.xml"],
 }
+
+cc_library_headers {
+    name: "libaudiopolicycapengine_config_headers",
+    export_include_dirs: ["include"],
+}
+
+cc_library {
+    name: "libaudiopolicycapengine_config",
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
+    export_header_lib_headers: [
+        "libaudiopolicycapengine_config_headers",
+    ],
+    include_dirs: [
+        "external/libxml2/include",
+    ],
+    srcs: [
+        "src/CapEngineConfig.cpp",
+    ],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+    shared_libs: [
+        "libaudio_aidl_conversion_common_cpp",
+        "libaudiopolicycomponents",
+        "libaudiopolicyengine_config",
+        "libcutils",
+        "liblog",
+        "libmedia_helper",
+        "libutils",
+        "libxml2",
+    ],
+    header_libs: [
+        "libaudio_system_headers",
+        "libaudioclient_headers",
+        "libaudiopolicycapengine_config_headers",
+        "libmedia_headers",
+    ],
+}
diff --git a/services/audiopolicy/engineconfigurable/config/example/common/audio_policy_engine_criterion_types_aidl.xml.in b/services/audiopolicy/engineconfigurable/config/example/common/audio_policy_engine_criterion_types_aidl.xml.in
new file mode 100644
index 0000000..424c983
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/config/example/common/audio_policy_engine_criterion_types_aidl.xml.in
@@ -0,0 +1,97 @@
+<?xml version='1.0' encoding='UTF-8'?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<criterion_types>
+    <criterion_type name="OutputDevicesMaskType" type="inclusive"/>
+    <criterion_type name="InputDevicesMaskType" type="inclusive"/>
+    <criterion_type name="OutputDevicesAddressesType" type="inclusive">
+        <values>
+            <!-- legacy remote submix -->
+            <value literal="0"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="InputDevicesAddressesType" type="inclusive">
+        <values>
+            <!-- legacy remote submix -->
+            <value literal="0"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="AndroidModeType" type="exclusive"/>
+    <criterion_type name="ForceUseForCommunicationType" type="exclusive">
+        <values>
+            <value literal="NONE"/>
+            <value literal="SPEAKER"/>
+            <value literal="BT_SCO"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="ForceUseForMediaType" type="exclusive">
+        <values>
+            <value literal="NONE"/>
+            <value literal="SPEAKER"/>
+            <value literal="HEADPHONES"/>
+            <value literal="BT_A2DP"/>
+            <value literal="ANALOG_DOCK"/>
+            <value literal="DIGITAL_DOCK"/>
+            <value literal="WIRED_ACCESSORY"/>
+            <value literal="NO_BT_A2DP"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="ForceUseForRecordType" type="exclusive">
+        <values>
+            <value literal="NONE"/>
+            <value literal="BT_SCO"/>
+            <value literal="WIRED_ACCESSORY"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="ForceUseForDockType" type="exclusive">
+        <values>
+            <value literal="NONE"/>
+            <value literal="BT_CAR_DOCK"/>
+            <value literal="BT_DESK_DOCK"/>
+            <value literal="ANALOG_DOCK"/>
+            <value literal="DIGITAL_DOCK"/>
+            <value literal="WIRED_ACCESSORY"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="ForceUseForSystemType" type="exclusive" >
+        <values>
+            <value literal="NONE"/>
+            <value literal="SYSTEM_ENFORCED"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="ForceUseForHdmiSystemAudioType" type="exclusive">
+        <values>
+            <value literal="NONE"/>
+            <value literal="HDMI_SYSTEM_AUDIO_ENFORCED"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="ForceUseForEncodedSurroundType" type="exclusive">
+        <values>
+            <value literal="UNSPECIFIED"/>
+            <value literal="NEVER"/>
+            <value literal="ALWAYS"/>
+            <value literal="MANUAL"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="ForceUseForVibrateRingingType" type="exclusive">
+        <values>
+            <value literal="NONE"/>
+            <value literal="BT_SCO"/>
+            <value literal="BT_BLE"/>
+        </values>
+    </criterion_type>
+</criterion_types>
+
+
diff --git a/services/audiopolicy/engineconfigurable/config/include/CapEngineConfig.h b/services/audiopolicy/engineconfigurable/config/include/CapEngineConfig.h
new file mode 100644
index 0000000..6c55a49
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/config/include/CapEngineConfig.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <EngineConfig.h>
+
+#include <android/media/audio/common/AudioHalEngineConfig.h>
+#include <system/audio_policy.h>
+#include <string>
+#include <vector>
+
+namespace android {
+namespace capEngineConfig {
+
+static const char *const gCriterionTypeSuffix = "Type";
+static const char *const gInputDeviceCriterionName = "AvailableInputDevices";
+static const char *const gOutputDeviceCriterionName = "AvailableOutputDevices";
+static const char *const gPhoneStateCriterionName = "TelephonyMode";
+static const char *const gOutputDeviceAddressCriterionName = "AvailableOutputDevicesAddresses";
+static const char *const gInputDeviceAddressCriterionName = "AvailableInputDevicesAddresses";
+
+/**
+* Order MUST be aligned with definition of audio_policy_force_use_t within audio_policy.h
+*/
+static const char *const gForceUseCriterionTag[AUDIO_POLICY_FORCE_USE_CNT] =
+{
+    [AUDIO_POLICY_FORCE_FOR_COMMUNICATION] =        "ForceUseForCommunication",
+    [AUDIO_POLICY_FORCE_FOR_MEDIA] =                "ForceUseForMedia",
+    [AUDIO_POLICY_FORCE_FOR_RECORD] =               "ForceUseForRecord",
+    [AUDIO_POLICY_FORCE_FOR_DOCK] =                 "ForceUseForDock",
+    [AUDIO_POLICY_FORCE_FOR_SYSTEM] =               "ForceUseForSystem",
+    [AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO] =    "ForceUseForHdmiSystemAudio",
+    [AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND] =     "ForceUseForEncodedSurround",
+    [AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING] =      "ForceUseForVibrateRinging"
+};
+
+using ParameterValues = std::vector<std::string>;
+
+struct ConfigurableElement {
+    std::string path;
+};
+
+struct ConfigurableElementValue {
+    ConfigurableElement configurableElement;
+    std::string value;
+};
+using ConfigurableElementValues = std::vector<ConfigurableElementValue>;
+
+struct CapSetting {
+    std::string configurationName;
+    ConfigurableElementValues configurableElementValues;
+};
+using CapSettings = std::vector<CapSetting>;
+
+struct CapConfiguration {
+    std::string name;
+    std::string rule;
+};
+
+using ConfigurableElementPaths = std::vector<std::string>;
+using CapConfigurations = std::vector<CapConfiguration>;
+
+struct CapConfigurableDomain {
+    std::string name;
+    CapConfigurations configurations;
+    CapSettings settings;
+};
+
+struct CapCriterion {
+    engineConfig::Criterion criterion;
+    engineConfig::CriterionType criterionType;
+};
+
+using CapCriteria = std::vector<CapCriterion>;
+using CapConfigurableDomains = std::vector<CapConfigurableDomain>;
+
+struct CapConfig {
+    CapCriteria capCriteria;
+    CapConfigurableDomains capConfigurableDomains;
+};
+
+/** Result of `parse(const char*)` */
+struct ParsingResult {
+    /** Parsed config, nullptr if the xml lib could not load the file */
+    std::unique_ptr<CapConfig> parsedConfig;
+    size_t nbSkippedElement; //< Number of skipped invalid product strategies
+};
+
+/** Convert the provided Cap Settings configuration.
+ * @return audio policy usage @see Config
+ */
+ParsingResult convert(const ::android::media::audio::common::AudioHalEngineConfig& aidlConfig);
+
+}
+}
diff --git a/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp b/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp
new file mode 100644
index 0000000..b89fba0
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp
@@ -0,0 +1,666 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cinttypes>
+#include <cstdint>
+#include <istream>
+#include <map>
+#include <sstream>
+#include <stdarg.h>
+#include <string>
+#include <string>
+#include <vector>
+
+#define LOG_TAG "APM::AudioPolicyEngine/CapConfig"
+//#define LOG_NDEBUG 0
+
+#include "CapEngineConfig.h"
+#include <TypeConverter.h>
+#include <Volume.h>
+#include <cutils/properties.h>
+#include <media/AidlConversion.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionUtil.h>
+#include <media/TypeConverter.h>
+#include <media/convert.h>
+#include <system/audio_config.h>
+#include <utils/Log.h>
+
+namespace android {
+
+using base::unexpected;
+using media::audio::common::AudioDeviceAddress;
+using media::audio::common::AudioDeviceDescription;
+using media::audio::common::AudioHalCapCriterion;
+using media::audio::common::AudioHalCapCriterionV2;
+using media::audio::common::AudioHalCapParameter;
+using media::audio::common::AudioHalCapRule;
+using media::audio::common::AudioPolicyForceUse;
+using media::audio::common::AudioSource;
+using media::audio::common::AudioStreamType;
+using utilities::convertTo;
+
+namespace capEngineConfig {
+
+static constexpr const char *gLegacyOutputDevicePrefix = "AUDIO_DEVICE_OUT_";
+static constexpr const char *gLegacyInputDevicePrefix = "AUDIO_DEVICE_IN_";
+static constexpr const char *gLegacyForcePrefix = "AUDIO_POLICY_FORCE_";
+static constexpr const char *gLegacyStreamPrefix = "AUDIO_STREAM_";
+static constexpr const char *gLegacySourcePrefix = "AUDIO_SOURCE_";
+static constexpr const char *gPolicyParamPrefix = "/Policy/policy/";
+
+namespace {
+
+ConversionResult<std::string> truncatePrefixToLower(const std::string& legacyName,
+                                                    const std::string& legacyPrefix) {
+    std::size_t pos = legacyName.find(legacyPrefix);
+    if (pos == std::string::npos) {
+        return unexpected(BAD_VALUE);
+    }
+    std::string capName = legacyName.substr(pos + legacyPrefix.length());
+    std::transform(capName.begin(), capName.end(), capName.begin(),
+                   [](unsigned char c) { return std::tolower(c); });
+    return capName;
+}
+
+ConversionResult<std::string> truncatePrefix(const std::string& name,  const std::string& prefix) {
+    std::size_t pos = name.find(prefix);
+    if (pos == std::string::npos) {
+        return unexpected(BAD_VALUE);
+    }
+    std::string capName = name.substr(pos + prefix.length());
+    return capName;
+}
+
+ConversionResult<audio_policy_forced_cfg_t>
+        aidl2legacy_AudioPolicyForceUseCommunicationDeviceCategory_audio_policy_forced_cfg_t(
+        const AudioPolicyForceUse::CommunicationDeviceCategory aidl) {
+    switch (aidl) {
+        case AudioPolicyForceUse::CommunicationDeviceCategory::NONE:
+            return AUDIO_POLICY_FORCE_NONE;
+        case AudioPolicyForceUse::CommunicationDeviceCategory::SPEAKER:
+            return AUDIO_POLICY_FORCE_SPEAKER;
+        case AudioPolicyForceUse::CommunicationDeviceCategory::BT_SCO:
+            return AUDIO_POLICY_FORCE_BT_SCO;
+        case AudioPolicyForceUse::CommunicationDeviceCategory::BT_BLE:
+            return AUDIO_POLICY_FORCE_BT_BLE;
+        case AudioPolicyForceUse::CommunicationDeviceCategory::WIRED_ACCESSORY:
+            return AUDIO_POLICY_FORCE_WIRED_ACCESSORY;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_policy_forced_cfg_t>
+        aidl2legacy_AudioPolicyForceUseMediaDeviceCategory_audio_policy_forced_cfg_t(
+        const AudioPolicyForceUse::MediaDeviceCategory aidl) {
+    switch (aidl) {
+        case AudioPolicyForceUse::MediaDeviceCategory::NONE:
+            return AUDIO_POLICY_FORCE_NONE;
+        case AudioPolicyForceUse::MediaDeviceCategory::SPEAKER:
+            return AUDIO_POLICY_FORCE_SPEAKER;
+        case AudioPolicyForceUse::MediaDeviceCategory::HEADPHONES:
+            return AUDIO_POLICY_FORCE_HEADPHONES;
+        case AudioPolicyForceUse::MediaDeviceCategory::BT_A2DP:
+            return AUDIO_POLICY_FORCE_BT_A2DP;
+        case AudioPolicyForceUse::MediaDeviceCategory::ANALOG_DOCK:
+            return AUDIO_POLICY_FORCE_ANALOG_DOCK;
+        case AudioPolicyForceUse::MediaDeviceCategory::DIGITAL_DOCK:
+            return AUDIO_POLICY_FORCE_DIGITAL_DOCK;
+        case AudioPolicyForceUse::MediaDeviceCategory::WIRED_ACCESSORY:
+            return AUDIO_POLICY_FORCE_WIRED_ACCESSORY;
+        case AudioPolicyForceUse::MediaDeviceCategory::NO_BT_A2DP:
+            return AUDIO_POLICY_FORCE_NO_BT_A2DP;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_policy_forced_cfg_t>
+        aidl2legacy_AudioPolicyForceUseDockType_audio_policy_forced_cfg_t(
+        const AudioPolicyForceUse::DockType aidl) {
+    switch (aidl) {
+        case AudioPolicyForceUse::DockType::NONE:
+            return AUDIO_POLICY_FORCE_NONE;
+        case AudioPolicyForceUse::DockType::BT_CAR_DOCK:
+            return AUDIO_POLICY_FORCE_BT_CAR_DOCK;
+        case AudioPolicyForceUse::DockType::BT_DESK_DOCK:
+            return AUDIO_POLICY_FORCE_BT_DESK_DOCK;
+        case AudioPolicyForceUse::DockType::ANALOG_DOCK:
+            return AUDIO_POLICY_FORCE_ANALOG_DOCK;
+        case AudioPolicyForceUse::DockType::DIGITAL_DOCK:
+            return AUDIO_POLICY_FORCE_DIGITAL_DOCK;
+        case AudioPolicyForceUse::DockType::WIRED_ACCESSORY:
+            return AUDIO_POLICY_FORCE_WIRED_ACCESSORY;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_policy_forced_cfg_t>
+        aidl2legacy_AudioPolicyForceUseEncodedSurroundConfig_audio_policy_forced_cfg_t(
+        const AudioPolicyForceUse::EncodedSurroundConfig aidl) {
+    switch (aidl) {
+        case AudioPolicyForceUse::EncodedSurroundConfig::UNSPECIFIED:
+            return AUDIO_POLICY_FORCE_NONE;
+        case AudioPolicyForceUse::EncodedSurroundConfig::NEVER:
+            return AUDIO_POLICY_FORCE_ENCODED_SURROUND_NEVER;
+        case AudioPolicyForceUse::EncodedSurroundConfig::ALWAYS:
+            return AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS;
+        case AudioPolicyForceUse::EncodedSurroundConfig::MANUAL:
+            return AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<std::pair<audio_policy_force_use_t, audio_policy_forced_cfg_t>>
+        aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t_audio_policy_forced_cfg_t(
+        const AudioPolicyForceUse& aidl) {
+    switch (aidl.getTag()) {
+        case AudioPolicyForceUse::forCommunication:
+            return std::make_pair(
+                    AUDIO_POLICY_FORCE_FOR_COMMUNICATION,
+                    VALUE_OR_RETURN(
+                            aidl2legacy_AudioPolicyForceUseCommunicationDeviceCategory_audio_policy_forced_cfg_t(
+                                    aidl.get<AudioPolicyForceUse::forCommunication>())));
+        case AudioPolicyForceUse::forMedia:
+            return std::make_pair(
+                    AUDIO_POLICY_FORCE_FOR_MEDIA,
+                    VALUE_OR_RETURN(
+                            aidl2legacy_AudioPolicyForceUseMediaDeviceCategory_audio_policy_forced_cfg_t(
+                                    aidl.get<AudioPolicyForceUse::forMedia>())));
+        case AudioPolicyForceUse::forRecord:
+            return std::make_pair(
+                    AUDIO_POLICY_FORCE_FOR_RECORD,
+                    VALUE_OR_RETURN(
+                            aidl2legacy_AudioPolicyForceUseCommunicationDeviceCategory_audio_policy_forced_cfg_t(
+                                    aidl.get<AudioPolicyForceUse::forRecord>())));
+        case AudioPolicyForceUse::dock:
+            return std::make_pair(AUDIO_POLICY_FORCE_FOR_DOCK,
+                    VALUE_OR_RETURN(
+                            aidl2legacy_AudioPolicyForceUseDockType_audio_policy_forced_cfg_t(
+                                    aidl.get<AudioPolicyForceUse::dock>())));
+        case AudioPolicyForceUse::systemSounds:
+            return std::make_pair(AUDIO_POLICY_FORCE_FOR_SYSTEM,
+                    aidl.get<AudioPolicyForceUse::systemSounds>() ?
+                    AUDIO_POLICY_FORCE_SYSTEM_ENFORCED : AUDIO_POLICY_FORCE_NONE);
+        case AudioPolicyForceUse::hdmiSystemAudio:
+            return std::make_pair(
+                    AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO,
+                    aidl.get<AudioPolicyForceUse::hdmiSystemAudio>() ?
+                    AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED : AUDIO_POLICY_FORCE_NONE);
+        case AudioPolicyForceUse::encodedSurround:
+            return std::make_pair(AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, VALUE_OR_RETURN(
+                aidl2legacy_AudioPolicyForceUseEncodedSurroundConfig_audio_policy_forced_cfg_t(
+                        aidl.get<AudioPolicyForceUse::encodedSurround>())));
+        case AudioPolicyForceUse::forVibrateRinging:
+            return std::make_pair(
+                    AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING,
+                    VALUE_OR_RETURN(
+                            aidl2legacy_AudioPolicyForceUseCommunicationDeviceCategory_audio_policy_forced_cfg_t(
+                                    aidl.get<AudioPolicyForceUse::forVibrateRinging>())));
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<std::string> aidl2legacy_AudioHalCapCriterionV2_CapName(
+        const AudioHalCapCriterionV2& aidl) {
+    switch (aidl.getTag()) {
+        case AudioHalCapCriterionV2::availableInputDevices:
+            return gInputDeviceCriterionName;
+        case AudioHalCapCriterionV2::availableOutputDevices:
+            return gOutputDeviceCriterionName;
+        case AudioHalCapCriterionV2::availableInputDevicesAddresses:
+            return gInputDeviceAddressCriterionName;
+        case AudioHalCapCriterionV2::availableOutputDevicesAddresses:
+            return gOutputDeviceAddressCriterionName;
+        case AudioHalCapCriterionV2::telephonyMode:
+            return gPhoneStateCriterionName;
+        case AudioHalCapCriterionV2::forceConfigForUse: {
+            auto aidlCriterion = aidl.get<AudioHalCapCriterionV2::forceConfigForUse>().values[0];
+            const auto [forceUse, _] = VALUE_OR_RETURN(
+                aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t_audio_policy_forced_cfg_t(
+                        aidlCriterion));
+            return gForceUseCriterionTag[forceUse];
+        }
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<std::string> aidl2legacy_AudioHalCapCriterionV2TypeDevice_CapCriterionValue(
+        const AudioDeviceDescription& aidl) {
+    audio_devices_t legacyDeviceType = VALUE_OR_RETURN(
+            aidl2legacy_AudioDeviceDescription_audio_devices_t(aidl));
+    bool isOut = audio_is_output_devices(legacyDeviceType);
+    std::string legacyTypeLiteral;
+    if (!::android::DeviceConverter::toString(legacyDeviceType, legacyTypeLiteral)) {
+        ALOGE("%s Invalid strategy device type %d", __func__, legacyDeviceType);
+        return unexpected(BAD_VALUE);
+    }
+    return truncatePrefix(legacyTypeLiteral,
+            isOut ? gLegacyOutputDevicePrefix : gLegacyInputDevicePrefix);
+}
+
+ConversionResult<audio_policy_forced_cfg_t>
+        aidl2legacy_AudioHalCapCriterionV2ForceUse_audio_policy_forced_cfg_t(
+        const AudioPolicyForceUse& aidl) {
+    const auto [_, legacyForcedCfg] = VALUE_OR_RETURN(
+            aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t_audio_policy_forced_cfg_t(
+                    aidl));
+    return legacyForcedCfg;
+}
+
+ConversionResult<std::string> audio_policy_forced_cfg_t_CapCriterionValue(
+        audio_policy_forced_cfg_t legacyForcedCfg) {
+    std::string legacyForcedCfgLiteral = audio_policy_forced_cfg_to_string(legacyForcedCfg);
+    if (legacyForcedCfgLiteral.empty()) {
+        ALOGE("%s Invalid forced config value %d", __func__, legacyForcedCfg);
+        return unexpected(BAD_VALUE);
+    }
+    return truncatePrefix(legacyForcedCfgLiteral, gLegacyForcePrefix);
+}
+
+ConversionResult<std::string> aidl2legacy_AudioHalCapCriterionV2ForceUse_CapCriterionValue(
+        const AudioPolicyForceUse& aidl) {
+    const audio_policy_forced_cfg_t legacyForcedCfg = VALUE_OR_RETURN(
+            aidl2legacy_AudioHalCapCriterionV2ForceUse_audio_policy_forced_cfg_t(aidl));
+    return audio_policy_forced_cfg_t_CapCriterionValue(legacyForcedCfg);
+}
+
+ConversionResult<std::string> aidl2legacy_AudioHalCapCriterionV2Type_CapCriterionValue(
+        const AudioHalCapCriterionV2& aidl) {
+    switch (aidl.getTag()) {
+        case AudioHalCapCriterionV2::availableInputDevices:
+            return aidl2legacy_AudioHalCapCriterionV2TypeDevice_CapCriterionValue(
+                    aidl.get<AudioHalCapCriterionV2::availableInputDevices>().values[0]);
+        case AudioHalCapCriterionV2::availableOutputDevices:
+            return aidl2legacy_AudioHalCapCriterionV2TypeDevice_CapCriterionValue(
+                    aidl.get<AudioHalCapCriterionV2::availableOutputDevices>().values[0]);
+        case AudioHalCapCriterionV2::availableInputDevicesAddresses:
+            return aidl.get<AudioHalCapCriterionV2::availableInputDevicesAddresses>().values[0].
+                    template get<AudioDeviceAddress::id>();
+        case AudioHalCapCriterionV2::availableOutputDevicesAddresses:
+            return aidl.get<AudioHalCapCriterionV2::availableOutputDevicesAddresses>().values[0].
+                    template get<AudioDeviceAddress::id>();
+        case AudioHalCapCriterionV2::telephonyMode:
+            return toString(aidl.get<AudioHalCapCriterionV2::telephonyMode>().values[0]);
+        case AudioHalCapCriterionV2::forceConfigForUse:
+            return aidl2legacy_AudioHalCapCriterionV2ForceUse_CapCriterionValue(
+                    aidl.get<AudioHalCapCriterionV2::forceConfigForUse>().values[0]);
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<std::string> aidl2legacy_AudioHalCapRule_CapRule(
+        const AudioHalCapRule& aidlRule) {
+    std::string rule;
+    switch (aidlRule.compoundRule) {
+        case AudioHalCapRule::CompoundRule::ANY:
+            rule += "Any";
+            break;
+        case AudioHalCapRule::CompoundRule::ALL:
+            rule += "All";
+            break;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+    rule += "{";
+    if (!aidlRule.nestedRules.empty()) {
+        for (const auto& nestedRule: aidlRule.nestedRules) {
+            rule += VALUE_OR_FATAL(aidl2legacy_AudioHalCapRule_CapRule(nestedRule));
+        }
+        if (!aidlRule.criterionRules.empty()) {
+            rule += ",";
+        }
+    }
+    bool isFirstCriterionRule = true;
+    for (const auto& criterionRule: aidlRule.criterionRules) {
+        if (!isFirstCriterionRule) {
+            rule += ",";
+        }
+        isFirstCriterionRule = false;
+        std::string selectionCriterion = VALUE_OR_RETURN(
+                aidl2legacy_AudioHalCapCriterionV2_CapName(criterionRule.criterionAndValue));
+        std::string matchesWhen;
+        std::string value = VALUE_OR_RETURN(
+                aidl2legacy_AudioHalCapCriterionV2Type_CapCriterionValue(
+                        criterionRule.criterionAndValue));
+
+        switch (criterionRule.matchingRule) {
+            case AudioHalCapRule::MatchingRule::IS:
+                matchesWhen = "Is";
+                break;
+            case AudioHalCapRule::MatchingRule::IS_NOT:
+                matchesWhen = "IsNot";
+                break;
+            case AudioHalCapRule::MatchingRule::INCLUDES:
+                matchesWhen = "Includes";
+                break;
+            case AudioHalCapRule::MatchingRule::EXCLUDES:
+                matchesWhen = "Excludes";
+                break;
+            default:
+                return unexpected(BAD_VALUE);
+        }
+        rule += selectionCriterion + " " + matchesWhen + " " + value;
+    }
+    rule += "}";
+    return rule;
+}
+
+ConversionResult<CapConfiguration> aidl2legacy_AudioHalCapConfiguration_CapConfiguration(
+        const media::audio::common::AudioHalCapConfiguration& aidl) {
+    CapConfiguration legacy;
+    legacy.name = aidl.name;
+    legacy.rule = VALUE_OR_FATAL(aidl2legacy_AudioHalCapRule_CapRule(aidl.rule));
+    return legacy;
+}
+
+ConversionResult<ConfigurableElementValue> aidl2legacy_ParameterSetting_ConfigurableElementValue(
+        const AudioHalCapParameter& aidl) {
+    ConfigurableElementValue legacy;
+    std::string literalValue;
+    switch (aidl.getTag()) {
+        case AudioHalCapParameter::selectedStrategyDevice: {
+            auto strategyDevice = aidl.get<AudioHalCapParameter::selectedStrategyDevice>();
+            literalValue = std::to_string(strategyDevice.isSelected);
+            audio_devices_t legacyType = VALUE_OR_RETURN(
+                    aidl2legacy_AudioDeviceDescription_audio_devices_t(strategyDevice.device));
+            std::string legacyTypeLiteral;
+            if (!::android::OutputDeviceConverter::toString(legacyType, legacyTypeLiteral)) {
+                ALOGE("%s Invalid device type %d", __func__, legacyType);
+                return unexpected(BAD_VALUE);
+            }
+            std::string deviceLiteral = VALUE_OR_RETURN(
+                    truncatePrefixToLower(legacyTypeLiteral, gLegacyOutputDevicePrefix));
+            if (deviceLiteral == "default") {
+                deviceLiteral = "stub";
+            }
+            legacy.configurableElement.path = std::string(gPolicyParamPrefix)
+                    + "product_strategies/vx_" + std::to_string(strategyDevice.id)
+                    + "/selected_output_devices/mask/" + deviceLiteral;
+            break;
+        }
+        case AudioHalCapParameter::strategyDeviceAddress: {
+            auto strategyAddress = aidl.get<AudioHalCapParameter::strategyDeviceAddress>();
+            legacy.configurableElement.path = std::string(gPolicyParamPrefix)
+                    + "product_strategies/vx_" + std::to_string(strategyAddress.id)
+                    + "/device_address";
+            literalValue = strategyAddress.deviceAddress.get<AudioDeviceAddress::id>();
+            break;
+        }
+        case AudioHalCapParameter::selectedInputSourceDevice: {
+            auto inputSourceDevice = aidl.get<AudioHalCapParameter::selectedInputSourceDevice>();
+            literalValue = std::to_string(inputSourceDevice.isSelected);
+            audio_devices_t legacyType = VALUE_OR_RETURN(
+                    aidl2legacy_AudioDeviceDescription_audio_devices_t(inputSourceDevice.device));
+            std::string legacyTypeLiteral;
+            if (!::android::InputDeviceConverter::toString(legacyType, legacyTypeLiteral)) {
+                ALOGE("%s Invalid input source device type %d", __func__, legacyType);
+                return unexpected(BAD_VALUE);
+            }
+            std::string deviceLiteral = VALUE_OR_RETURN(
+                    truncatePrefixToLower(legacyTypeLiteral, gLegacyInputDevicePrefix));
+            if (deviceLiteral == "default") {
+                deviceLiteral = "stub";
+            }
+            audio_source_t legacySource = VALUE_OR_RETURN(aidl2legacy_AudioSource_audio_source_t(
+                    inputSourceDevice.inputSource));
+            std::string inputSourceLiteral;
+            if (!::android::SourceTypeConverter::toString(legacySource, inputSourceLiteral)) {
+                ALOGE("%s Invalid input source  %d", __func__, legacySource);
+                return unexpected(BAD_VALUE);
+            }
+            inputSourceLiteral = VALUE_OR_RETURN(
+                    truncatePrefixToLower(inputSourceLiteral, gLegacySourcePrefix));
+            legacy.configurableElement.path = std::string(gPolicyParamPrefix) + "input_sources/"
+                    + inputSourceLiteral + "/applicable_input_device/mask/" + deviceLiteral;
+            break;
+        }
+        case AudioHalCapParameter::streamVolumeProfile: {
+            auto streamVolumeProfile = aidl.get<AudioHalCapParameter::streamVolumeProfile>();
+            audio_stream_type_t legacyStreamType = VALUE_OR_RETURN(
+                    aidl2legacy_AudioStreamType_audio_stream_type_t(streamVolumeProfile.stream));
+            std::string legacyStreamLiteral;
+            if (!::android::StreamTypeConverter::toString(legacyStreamType, legacyStreamLiteral)) {
+                ALOGE("%s Invalid stream type  %d", __func__, legacyStreamType);
+                return unexpected(BAD_VALUE);
+            }
+            legacyStreamLiteral = VALUE_OR_RETURN(
+                    truncatePrefixToLower(legacyStreamLiteral, gLegacyStreamPrefix));
+
+            audio_stream_type_t legacyProfile = VALUE_OR_RETURN(
+                    aidl2legacy_AudioStreamType_audio_stream_type_t(streamVolumeProfile.profile));
+            std::string legacyProfileLiteral;
+            if (!::android::StreamTypeConverter::toString(legacyProfile, legacyProfileLiteral)) {
+                ALOGE("%s Invalid profile %d", __func__, legacyProfile);
+                return unexpected(BAD_VALUE);
+            }
+            literalValue = VALUE_OR_RETURN(
+                    truncatePrefixToLower(legacyProfileLiteral, gLegacyStreamPrefix));
+            legacy.configurableElement.path = std::string(gPolicyParamPrefix) + "streams/"
+                    + legacyStreamLiteral + "/applicable_volume_profile/volume_profile";
+            break;
+        }
+        default:
+            return unexpected(BAD_VALUE);
+    }
+    legacy.value = literalValue;
+    return legacy;
+}
+
+ConversionResult<CapSetting> aidl2legacy_AudioHalCapConfiguration_CapSetting(
+        const media::audio::common::AudioHalCapConfiguration& aidl) {
+    CapSetting legacy;
+    legacy.configurationName = aidl.name;
+    legacy.configurableElementValues = VALUE_OR_RETURN(convertContainer<ConfigurableElementValues>(
+            aidl.parameterSettings, aidl2legacy_ParameterSetting_ConfigurableElementValue));
+    return legacy;
+}
+
+ConversionResult<CapConfigurableDomain> aidl2legacy_AudioHalCapDomain_CapConfigurableDomain(
+        const media::audio::common::AudioHalCapDomain& aidl) {
+    CapConfigurableDomain legacy;
+    legacy.name = aidl.name;
+    legacy.configurations = VALUE_OR_RETURN(convertContainer<CapConfigurations>(
+            aidl.configurations,
+            aidl2legacy_AudioHalCapConfiguration_CapConfiguration));
+    legacy.settings = VALUE_OR_RETURN(convertContainer<CapSettings>(
+            aidl.configurations,
+            aidl2legacy_AudioHalCapConfiguration_CapSetting));
+    return legacy;
+}
+
+ConversionResult<CapCriterion> aidl2legacy_AudioHalCapCriterionV2_Criterion(
+            const AudioHalCapCriterionV2& aidl) {
+    CapCriterion capCriterion;
+    engineConfig::Criterion& criterion = capCriterion.criterion;
+    engineConfig::CriterionType& criterionType = capCriterion.criterionType;
+
+    auto loadForceUseCriterion = [](const auto& aidlCriterion, auto& criterion,
+                                    auto& criterionType) -> status_t {
+        if (aidlCriterion.values.empty()) {
+            return BAD_VALUE;
+        }
+        const auto [legacyForceUse, _] = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t_audio_policy_forced_cfg_t(
+                        aidlCriterion.values[0]));
+        criterion.typeName = criterionType.name;
+        criterionType.name = criterion.typeName + gCriterionTypeSuffix;
+        criterionType.isInclusive =
+                (aidlCriterion.logic == AudioHalCapCriterionV2::LogicalDisjunction::INCLUSIVE);
+        criterion.name = gForceUseCriterionTag[legacyForceUse];
+        criterion.defaultLiteralValue = toString(
+                aidlCriterion.defaultValue.template get<AudioPolicyForceUse::forMedia>());
+        for (auto &value : aidlCriterion.values) {
+            const audio_policy_forced_cfg_t legacyForcedCfg = VALUE_OR_RETURN_STATUS(
+                    aidl2legacy_AudioHalCapCriterionV2ForceUse_audio_policy_forced_cfg_t(value));
+            const std::string legacyForcedCfgLiteral = VALUE_OR_RETURN_STATUS(
+                    audio_policy_forced_cfg_t_CapCriterionValue(legacyForcedCfg));
+            criterionType.valuePairs.push_back(
+                    {legacyForcedCfg, 0, legacyForcedCfgLiteral});
+        }
+        return NO_ERROR;
+    };
+
+    auto loadDevicesCriterion = [](const auto &aidlCriterion, auto &criterion,
+            auto &criterionType) -> status_t {
+        criterionType.name = criterion.name + gCriterionTypeSuffix;
+        criterionType.isInclusive =
+                (aidlCriterion.logic == AudioHalCapCriterionV2::LogicalDisjunction::INCLUSIVE);
+        criterion.typeName = criterionType.name;
+        int shift = 0;
+        if (aidlCriterion.values.empty()) {
+            return BAD_VALUE;
+        }
+        for (const auto &value : aidlCriterion.values) {
+            audio_devices_t legacyDeviceType = VALUE_OR_RETURN_STATUS(
+                    aidl2legacy_AudioDeviceDescription_audio_devices_t(value));
+            bool isOut = audio_is_output_devices(legacyDeviceType);
+            std::string legacyTypeLiteral;
+            if (!::android::DeviceConverter::toString(legacyDeviceType, legacyTypeLiteral)) {
+                ALOGE("%s Invalid device type %d", __func__, legacyDeviceType);
+                return BAD_VALUE;
+            }
+            std::string deviceLiteral = VALUE_OR_RETURN_STATUS(truncatePrefix(legacyTypeLiteral,
+                    isOut ? gLegacyOutputDevicePrefix : gLegacyInputDevicePrefix));
+            uint64_t pfwCriterionValue = 1 << shift++;
+            criterionType.valuePairs.push_back(
+                    {pfwCriterionValue, static_cast<int32_t>(legacyDeviceType), deviceLiteral});
+            ALOGV("%s: adding %" PRIu64 " %d %s %s", __func__, pfwCriterionValue, legacyDeviceType,
+                    toString(value.type).c_str(), deviceLiteral.c_str());
+        }
+        return NO_ERROR;
+    };
+
+    auto loadDeviceAddressesCriterion = [](const auto &aidlCriterion, auto &criterion,
+            auto &criterionType) -> status_t {
+        criterionType.name = criterion.name + gCriterionTypeSuffix;
+        criterionType.isInclusive =
+                (aidlCriterion.logic == AudioHalCapCriterionV2::LogicalDisjunction::INCLUSIVE);
+        criterion.typeName = criterionType.name;
+        int shift = 0;
+        for (auto &value : aidlCriterion.values) {
+            uint64_t pfwCriterionValue = 1 << shift++;
+            if (value.getTag() != AudioDeviceAddress::id) {
+                return BAD_VALUE;
+            }
+            std::string address = value.template get<AudioDeviceAddress::id>();
+            criterionType.valuePairs.push_back({pfwCriterionValue, 0, address});
+        }
+        return NO_ERROR;
+    };
+
+    switch (aidl.getTag()) {
+        case AudioHalCapCriterionV2::availableInputDevices: {
+            auto aidlCriterion = aidl.get<AudioHalCapCriterionV2::availableInputDevices>();
+            criterion.name = gInputDeviceCriterionName;
+            if (loadDevicesCriterion(aidlCriterion, criterion, criterionType) != NO_ERROR) {
+                return unexpected(BAD_VALUE);
+            }
+            break;
+        }
+        case AudioHalCapCriterionV2::availableOutputDevices: {
+            auto aidlCriterion = aidl.get<AudioHalCapCriterionV2::availableOutputDevices>();
+            criterion.name = gOutputDeviceCriterionName;
+            if (loadDevicesCriterion(aidlCriterion, criterion, criterionType) != NO_ERROR) {
+                return unexpected(BAD_VALUE);
+            }
+            break;
+        }
+        case AudioHalCapCriterionV2::availableInputDevicesAddresses: {
+            auto aidlCriterion =
+                    aidl.get<AudioHalCapCriterionV2::availableInputDevicesAddresses>();
+            criterion.name = gInputDeviceAddressCriterionName;
+            if (loadDeviceAddressesCriterion(aidlCriterion, criterion, criterionType) != NO_ERROR) {
+                return unexpected(BAD_VALUE);
+            }
+            break;
+        }
+        case AudioHalCapCriterionV2::availableOutputDevicesAddresses: {
+            auto aidlCriterion =
+                    aidl.get<AudioHalCapCriterionV2::availableOutputDevicesAddresses>();
+            criterion.name = gOutputDeviceAddressCriterionName;
+            if (loadDeviceAddressesCriterion(aidlCriterion, criterion, criterionType) != NO_ERROR) {
+                return unexpected(BAD_VALUE);
+            }
+            break;
+        }
+        case AudioHalCapCriterionV2::telephonyMode: {
+            auto aidlCriterion = aidl.get<AudioHalCapCriterionV2::telephonyMode>();
+            criterion.name = gPhoneStateCriterionName;
+            criterionType.name = criterion.name  + gCriterionTypeSuffix;
+            criterionType.isInclusive =
+                    (aidlCriterion.logic == AudioHalCapCriterionV2::LogicalDisjunction::INCLUSIVE);
+            criterion.typeName = criterionType.name;
+            criterion.defaultLiteralValue = toString( aidlCriterion.defaultValue);
+            if (aidlCriterion.values.empty()) {
+                return unexpected(BAD_VALUE);
+            }
+            for (auto &value : aidlCriterion.values) {
+                uint32_t legacyMode =
+                        VALUE_OR_RETURN(aidl2legacy_AudioMode_audio_mode_t(value));
+                criterionType.valuePairs.push_back({legacyMode, 0, toString(value)});
+            }
+            break;
+        }
+        case AudioHalCapCriterionV2::forceConfigForUse: {
+            auto aidlCriterion = aidl.get<AudioHalCapCriterionV2::forceConfigForUse>();
+            if (loadForceUseCriterion(aidlCriterion, criterion, criterionType) != NO_ERROR) {
+                return unexpected(BAD_VALUE);
+            }
+            break;
+        }
+        default:
+            return unexpected(BAD_VALUE);
+    }
+    return capCriterion;
+}
+
+}  // namespace
+
+ParsingResult convert(const ::android::media::audio::common::AudioHalEngineConfig& aidlConfig) {
+    auto config = std::make_unique<capEngineConfig::CapConfig>();
+
+    if (!aidlConfig.capSpecificConfig.has_value() ||
+            !aidlConfig.capSpecificConfig.value().domains.has_value()) {
+        ALOGE("%s: no Cap Engine config", __func__);
+        return ParsingResult{};
+    }
+    for (auto& aidlCriteria: aidlConfig.capSpecificConfig.value().criteriaV2.value()) {
+        if (aidlCriteria.has_value()) {
+            if (auto conv = aidl2legacy_AudioHalCapCriterionV2_Criterion(aidlCriteria.value());
+                    conv.ok()) {
+                config->capCriteria.push_back(std::move(conv.value()));
+            } else {
+                return ParsingResult{};
+            }
+        }
+    }
+    size_t skippedElement = 0;
+    for (auto& aidlDomain: aidlConfig.capSpecificConfig.value().domains.value()) {
+        if (aidlDomain.has_value()) {
+            if (auto conv = aidl2legacy_AudioHalCapDomain_CapConfigurableDomain(aidlDomain.value());
+                    conv.ok()) {
+                config->capConfigurableDomains.push_back(std::move(conv.value()));
+            } else {
+                return ParsingResult{};
+            }
+        } else {
+            skippedElement += 1;
+        }
+    }
+    return {.parsedConfig=std::move(config), .nbSkippedElement=skippedElement};
+}
+} // namespace capEngineConfig
+} // namespace android
diff --git a/services/audiopolicy/engineconfigurable/data/Android.bp b/services/audiopolicy/engineconfigurable/data/Android.bp
new file mode 100644
index 0000000..303cabc
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/data/Android.bp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+prebuilt_etc {
+    name: "CapClass.xml",
+    src: ":CapClass",
+    sub_dir: "parameter-framework/Structure/Policy",
+}
+
+prebuilt_etc {
+    name: "CapSubsystem.xml",
+    src: ":CapSubsystem",
+    sub_dir: "parameter-framework/Structure/Policy",
+}
+
+prebuilt_etc {
+    name: "CapSubsystem-CommonTypes.xml",
+    src: ":buildaidlcommontypesstructure_gen",
+    sub_dir: "parameter-framework/Structure/Policy",
+}
+
+prebuilt_etc {
+    name: "CapProductStrategies.xml",
+    src: ":cap_product_strategies_structure",
+    sub_dir: "parameter-framework/Structure/Policy",
+}
+
+prebuilt_etc {
+    name: "ParameterFrameworkConfigurationCap.xml",
+    src: ":ParameterFrameworkConfigurationCapSrc_gen",
+    sub_dir: "parameter-framework",
+}
+
+genrule {
+    name: "ParameterFrameworkConfigurationCapSrc_gen",
+    out: ["ParameterFrameworkConfigurationCap.xml"],
+    srcs: [":ParameterFrameworkConfigurationCapSrc"],
+    product_variables: {
+        debuggable: {
+            cmd: "sed -e 's|TuningAllowed=\"false\"|TuningAllowed=\"true\" ServerPort=\"unix:///dev/socket/audioserver/policy_debug\"|g' <$(in) > $(out)",
+        },
+    },
+    cmd: "cp -f $(in) $(out)",
+}
+
+genrule {
+    name: "buildaidlcommontypesstructure_gen",
+    defaults: ["buildcommontypesstructurerule"],
+    out: ["CapSubsystem-CommonTypes.xml"],
+}
+
+filegroup {
+    name: "ParameterFrameworkConfigurationCapSrc",
+    srcs: ["etc/ParameterFrameworkConfigurationCap.xml"],
+}
+
+filegroup {
+    name: "cap_product_strategies_structure",
+    srcs: ["etc/Structure/CapProductStrategies.xml"],
+}
+
+filegroup {
+    name: "CapSubsystem",
+    srcs: ["etc/Structure/CapSubsystem.xml"],
+}
+
+filegroup {
+    name: "aidl_common_types_structure_template",
+    srcs: ["etc/Structure/CapSubsystem-CommonTypes.xml.in"],
+}
+
+filegroup {
+    name: "CapClass",
+    srcs: ["etc/Structure/CapClass.xml"],
+}
diff --git a/services/audiopolicy/engineconfigurable/data/etc/ParameterFrameworkConfigurationCap.xml b/services/audiopolicy/engineconfigurable/data/etc/ParameterFrameworkConfigurationCap.xml
new file mode 100644
index 0000000..bac7a25
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/data/etc/ParameterFrameworkConfigurationCap.xml
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<ParameterFrameworkConfiguration xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+    SystemClassName="Policy" TuningAllowed="false">
+
+    <SubsystemPlugins>
+        <Location Folder="">
+            <Plugin Name="libpolicy-subsystem.so"/>
+        </Location>
+    </SubsystemPlugins>
+    <StructureDescriptionFileLocation Path="Structure/Policy/CapClass.xml"/>
+</ParameterFrameworkConfiguration>
diff --git a/services/audiopolicy/engineconfigurable/data/etc/Structure/CapClass.xml b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapClass.xml
new file mode 100644
index 0000000..e233673
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapClass.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<SystemClass xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+    xsi:noNamespaceSchemaLocation="../../Schemas/SystemClass.xsd" Name="Policy">
+    <SubsystemInclude Path="CapSubsystem.xml"/>
+</SystemClass>
diff --git a/services/audiopolicy/engineconfigurable/data/etc/Structure/CapProductStrategies.xml b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapProductStrategies.xml
new file mode 100644
index 0000000..61f056a
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapProductStrategies.xml
@@ -0,0 +1,63 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<ComponentTypeSet xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+              xmlns:xi="http://www.w3.org/2001/XInclude"
+              xsi:noNamespaceSchemaLocation="Schemas/ComponentTypeSet.xsd">
+  <!-- This structure is expected to be in the system partition and provisionned a maximum
+       allowed strategies to be used by vendor. -->
+  <ComponentType Name="ProductStrategies" Description="">
+    <Component Name="vx_1000" Type="ProductStrategy" Mapping="Identifier:1000,Name:vx_1000"/>
+    <Component Name="vx_1001" Type="ProductStrategy" Mapping="Identifier:1001,Name:vx_1001"/>
+    <Component Name="vx_1002" Type="ProductStrategy" Mapping="Identifier:1002,Name:vx_1002"/>
+    <Component Name="vx_1003" Type="ProductStrategy" Mapping="Identifier:1003,Name:vx_1003"/>
+    <Component Name="vx_1004" Type="ProductStrategy" Mapping="Identifier:1004,Name:vx_1004"/>
+    <Component Name="vx_1005" Type="ProductStrategy" Mapping="Identifier:1005,Name:vx_1005"/>
+    <Component Name="vx_1006" Type="ProductStrategy" Mapping="Identifier:1006,Name:vx_1006"/>
+    <Component Name="vx_1007" Type="ProductStrategy" Mapping="Identifier:1007,Name:vx_1007"/>
+    <Component Name="vx_1008" Type="ProductStrategy" Mapping="Identifier:1008,Name:vx_1008"/>
+    <Component Name="vx_1009" Type="ProductStrategy" Mapping="Identifier:1009,Name:vx_1009"/>
+    <Component Name="vx_1010" Type="ProductStrategy" Mapping="Identifier:1010,Name:vx_1010"/>
+    <Component Name="vx_1011" Type="ProductStrategy" Mapping="Identifier:1011,Name:vx_1011"/>
+    <Component Name="vx_1012" Type="ProductStrategy" Mapping="Identifier:1012,Name:vx_1012"/>
+    <Component Name="vx_1013" Type="ProductStrategy" Mapping="Identifier:1013,Name:vx_1013"/>
+    <Component Name="vx_1014" Type="ProductStrategy" Mapping="Identifier:1014,Name:vx_1014"/>
+    <Component Name="vx_1015" Type="ProductStrategy" Mapping="Identifier:1015,Name:vx_1015"/>
+    <Component Name="vx_1016" Type="ProductStrategy" Mapping="Identifier:1016,Name:vx_1016"/>
+    <Component Name="vx_1017" Type="ProductStrategy" Mapping="Identifier:1017,Name:vx_1017"/>
+    <Component Name="vx_1018" Type="ProductStrategy" Mapping="Identifier:1018,Name:vx_1018"/>
+    <Component Name="vx_1019" Type="ProductStrategy" Mapping="Identifier:1019,Name:vx_1019"/>
+    <Component Name="vx_1020" Type="ProductStrategy" Mapping="Identifier:1020,Name:vx_1020"/>
+    <Component Name="vx_1021" Type="ProductStrategy" Mapping="Identifier:1021,Name:vx_1021"/>
+    <Component Name="vx_1022" Type="ProductStrategy" Mapping="Identifier:1022,Name:vx_1022"/>
+    <Component Name="vx_1023" Type="ProductStrategy" Mapping="Identifier:1023,Name:vx_1023"/>
+    <Component Name="vx_1024" Type="ProductStrategy" Mapping="Identifier:1024,Name:vx_1024"/>
+    <Component Name="vx_1025" Type="ProductStrategy" Mapping="Identifier:1025,Name:vx_1025"/>
+    <Component Name="vx_1026" Type="ProductStrategy" Mapping="Identifier:1026,Name:vx_1026"/>
+    <Component Name="vx_1027" Type="ProductStrategy" Mapping="Identifier:1027,Name:vx_1027"/>
+    <Component Name="vx_1028" Type="ProductStrategy" Mapping="Identifier:1028,Name:vx_1028"/>
+    <Component Name="vx_1029" Type="ProductStrategy" Mapping="Identifier:1029,Name:vx_1029"/>
+    <Component Name="vx_1030" Type="ProductStrategy" Mapping="Identifier:1030,Name:vx_1030"/>
+    <Component Name="vx_1031" Type="ProductStrategy" Mapping="Identifier:1031,Name:vx_1031"/>
+    <Component Name="vx_1032" Type="ProductStrategy" Mapping="Identifier:1032,Name:vx_1032"/>
+    <Component Name="vx_1033" Type="ProductStrategy" Mapping="Identifier:1033,Name:vx_1033"/>
+    <Component Name="vx_1034" Type="ProductStrategy" Mapping="Identifier:1034,Name:vx_1034"/>
+    <Component Name="vx_1035" Type="ProductStrategy" Mapping="Identifier:1035,Name:vx_1035"/>
+    <Component Name="vx_1036" Type="ProductStrategy" Mapping="Identifier:1036,Name:vx_1036"/>
+    <Component Name="vx_1037" Type="ProductStrategy" Mapping="Identifier:1037,Name:vx_1037"/>
+    <Component Name="vx_1038" Type="ProductStrategy" Mapping="Identifier:1038,Name:vx_1038"/>
+    <Component Name="vx_1039" Type="ProductStrategy" Mapping="Identifier:1039,Name:vx_1039"/>
+  </ComponentType>
+</ComponentTypeSet>
diff --git a/services/audiopolicy/engineconfigurable/data/etc/Structure/CapSubsystem-CommonTypes.xml.in b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapSubsystem-CommonTypes.xml.in
new file mode 100644
index 0000000..2c4c7b5
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapSubsystem-CommonTypes.xml.in
@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<ComponentTypeSet xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+              xmlns:xi="http://www.w3.org/2001/XInclude"
+              xsi:noNamespaceSchemaLocation="Schemas/ComponentTypeSet.xsd">
+    <!-- Output devices definition as a bitfield for the supported devices per output
+    profile. It must match with the output device enum parameter.
+    -->
+     <!--#################### GLOBAL COMPONENTS BEGIN ####################-->
+
+     <!--#################### GLOBAL COMPONENTS END ####################-->
+
+    <!-- Automatically filled from audio-base.h file -->
+    <ComponentType Name="OutputDevicesMask" Description="64bit representation of devices">
+        <BitParameterBlock Name="mask" Size="64">
+        </BitParameterBlock>
+    </ComponentType>
+
+    <!-- Input devices definition as a bitfield for the supported devices per Input
+    profile. It must match with the Input device enum parameter.
+    -->
+    <!-- Automatically filled from audio-base.h file -->
+    <ComponentType Name="InputDevicesMask" Description="64bit representation of devices">
+        <BitParameterBlock Name="mask" Size="64">
+        </BitParameterBlock>
+    </ComponentType>
+
+    <!--#################### STREAM COMMON TYPES BEGIN ####################-->
+    <!-- Automatically filled from audio-base.h file. VolumeProfileType is associated to stream type -->
+    <ComponentType Name="VolumeProfileType">
+        <EnumParameter Name="volume_profile" Size="32">
+        </EnumParameter>
+    </ComponentType>
+
+    <ComponentType Name="Stream"  Mapping="Stream">
+        <Component Name="applicable_volume_profile" Type="VolumeProfileType"
+                   Description="Volume profile followed by a given stream type."/>
+    </ComponentType>
+
+    <!--#################### STREAM COMMON TYPES END ####################-->
+
+    <!--#################### INPUT SOURCE COMMON TYPES BEGIN ####################-->
+
+    <ComponentType Name="InputSource">
+        <Component Name="applicable_input_device" Type="InputDevicesMask"
+                   Mapping="InputSource" Description="Selected Input device"/>
+    </ComponentType>
+
+    <!--#################### INPUT SOURCE COMMON TYPES END ####################-->
+
+    <!--#################### PRODUCT STRATEGY COMMON TYPES BEGIN ####################-->
+
+    <ComponentType Name="ProductStrategy" Mapping="ProductStrategy">
+        <Component Name="selected_output_devices" Type="OutputDevicesMask"/>
+        <StringParameter Name="device_address" MaxLength="256"
+                         Description="if any, device address associated"/>
+    </ComponentType>
+
+    <!--#################### PRODUCT STRATEGY COMMON TYPES END ####################-->
+
+</ComponentTypeSet>
diff --git a/services/audiopolicy/engineconfigurable/data/etc/Structure/CapSubsystem.xml b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapSubsystem.xml
new file mode 100644
index 0000000..45a0bd4
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapSubsystem.xml
@@ -0,0 +1,93 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<Subsystem xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+           xmlns:xi="http://www.w3.org/2001/XInclude"
+           xsi:noNamespaceSchemaLocation="Schemas/Subsystem.xsd"
+           Name="policy" Type="Policy">
+
+    <ComponentLibrary>
+        <!--#################### GLOBAL COMPONENTS BEGIN ####################-->
+        <!-- Common Types definition -->
+        <xi:include href="CapSubsystem-CommonTypes.xml"/>
+        <xi:include href="CapProductStrategies.xml"/>
+
+
+        <!--#################### GLOBAL COMPONENTS END ####################-->
+
+        <!--#################### STREAM BEGIN ####################-->
+
+        <ComponentType Name="Streams" Description="associated to audio_stream_type_t definition">
+            <Component Name="voice_call" Type="Stream" Mapping="Name:AUDIO_STREAM_VOICE_CALL"/>
+            <Component Name="system" Type="Stream" Mapping="Name:AUDIO_STREAM_SYSTEM"/>
+            <Component Name="ring" Type="Stream" Mapping="Name:AUDIO_STREAM_RING"/>
+            <Component Name="music" Type="Stream" Mapping="Name:AUDIO_STREAM_MUSIC"/>
+            <Component Name="alarm" Type="Stream" Mapping="Name:AUDIO_STREAM_ALARM"/>
+            <Component Name="notification" Type="Stream" Mapping="Name:AUDIO_STREAM_NOTIFICATION"/>
+            <Component Name="bluetooth_sco" Type="Stream" Mapping="Name:AUDIO_STREAM_BLUETOOTH_SCO"/>
+            <Component Name="enforced_audible" Type="Stream" Mapping="Name:AUDIO_STREAM_ENFORCED_AUDIBLE"
+                       Description="Sounds that cannot be muted by user and must be routed to speaker"/>
+            <Component Name="dtmf" Type="Stream" Mapping="Name:AUDIO_STREAM_DTMF"/>
+            <Component Name="tts" Type="Stream" Mapping="Name:AUDIO_STREAM_TTS"
+                             Description="Transmitted Through Speaker. Plays over speaker only, silent on other devices"/>
+            <Component Name="accessibility" Type="Stream" Mapping="Name:AUDIO_STREAM_ACCESSIBILITY"
+                             Description="For accessibility talk back prompts"/>
+            <Component Name="assistant" Type="Stream" Mapping="Name:AUDIO_STREAM_ASSISTANT"
+                             Description="used by a virtual assistant like Google Assistant, Bixby, etc."/>
+            <Component Name="rerouting" Type="Stream" Mapping="Name:AUDIO_STREAM_REROUTING"
+                             Description="For dynamic policy output mixes"/>
+            <Component Name="patch" Type="Stream" Mapping="Name:AUDIO_STREAM_PATCH"
+                             Description="For internal audio flinger tracks. Fixed volume"/>
+        </ComponentType>
+
+        <!--#################### STREAM END ####################-->
+
+        <!--#################### INPUT SOURCE BEGIN ####################-->
+
+        <ComponentType Name="InputSources" Description="associated to audio_source_t definition,
+                             identifier mapping must match the value of the enum">
+            <Component Name="default" Type="InputSource" Mapping="Name:AUDIO_SOURCE_DEFAULT"/>
+            <Component Name="mic" Type="InputSource" Mapping="Name:AUDIO_SOURCE_MIC"/>
+            <Component Name="voice_uplink" Type="InputSource"
+                                           Mapping="Name:AUDIO_SOURCE_VOICE_UPLINK"/>
+            <Component Name="voice_downlink" Type="InputSource"
+                                             Mapping="Name:AUDIO_SOURCE_VOICE_DOWNLINK"/>
+            <Component Name="voice_call" Type="InputSource"
+                                         Mapping="Name:AUDIO_SOURCE_VOICE_CALL"/>
+            <Component Name="camcorder" Type="InputSource" Mapping="Name:AUDIO_SOURCE_CAMCORDER"/>
+            <Component Name="voice_recognition" Type="InputSource"
+                                                Mapping="Name:AUDIO_SOURCE_VOICE_RECOGNITION"/>
+            <Component Name="voice_communication" Type="InputSource"
+                                                  Mapping="Name:AUDIO_SOURCE_VOICE_COMMUNICATION"/>
+            <Component Name="remote_submix" Type="InputSource"
+                                            Mapping="Name:AUDIO_SOURCE_REMOTE_SUBMIX"/>
+            <Component Name="unprocessed" Type="InputSource"
+                                            Mapping="Name:AUDIO_SOURCE_UNPROCESSED"/>
+            <Component Name="voice_performance" Type="InputSource"
+                                            Mapping="Name:AUDIO_SOURCE_VOICE_PERFORMANCE"/>
+            <Component Name="echo_reference" Type="InputSource"
+                                            Mapping="Name:AUDIO_SOURCE_ECHO_REFERENCE"/>
+            <Component Name="fm_tuner" Type="InputSource" Mapping="Name:AUDIO_SOURCE_FM_TUNER"/>
+            <Component Name="hotword" Type="InputSource" Mapping="Name:AUDIO_SOURCE_HOTWORD"/>
+        </ComponentType>
+        <!--#################### INPUT SOURCE END ####################-->
+    </ComponentLibrary>
+
+    <InstanceDefinition>
+        <Component Name="streams" Type="Streams"/>
+        <Component Name="input_sources" Type="InputSources"/>
+        <Component Name="product_strategies" Type="ProductStrategies"/>
+    </InstanceDefinition>
+</Subsystem>
diff --git a/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h b/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h
index 9fd8b8e..e0b7210 100644
--- a/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h
+++ b/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h
@@ -98,7 +98,8 @@
     virtual bool setDeviceTypesForProductStrategy(product_strategy_t strategy,
                                                   uint64_t devices) = 0;
 
-    virtual product_strategy_t getProductStrategyByName(const std::string &address) = 0;
+    virtual product_strategy_t getProductStrategyByName(const std::string &name) = 0;
+    virtual std::string getProductStrategyName(product_strategy_t id) const = 0;
 
 protected:
     virtual ~AudioPolicyPluginInterface() {}
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
index 7fe111f..e4f44d5 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
@@ -50,6 +50,7 @@
 genrule {
     name: "buildcommontypesstructure_gen",
     defaults: ["buildcommontypesstructurerule"],
+    out: ["PolicySubsystem-CommonTypes.xml"],
 }
 
 filegroup {
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystem.cpp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystem.cpp
index 8bd7f66..bf5767d 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystem.cpp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystem.cpp
@@ -72,7 +72,7 @@
         );
     addSubsystemObjectFactory(
         new TSubsystemObjectFactory<ProductStrategy>(
-            mProductStrategyComponentName, (1 << MappingKeyName))
+            mProductStrategyComponentName, (1 << MappingKeyName) | (1 << MappingKeyIdentifier))
         );
 }
 
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.cpp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.cpp
index ebd9456..06efbf28 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.cpp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.cpp
@@ -18,10 +18,7 @@
 #include "PolicyMappingKeys.h"
 #include "PolicySubsystem.h"
 
-using std::string;
-using android::product_strategy_t;
-
-ProductStrategy::ProductStrategy(const string &mappingValue,
+ProductStrategy::ProductStrategy(const std::string &mappingValue,
                    CInstanceConfigurableElement *instanceConfigurableElement,
                    const CMappingContext &context,
                    core::log::Logger& logger)
@@ -30,26 +27,32 @@
                                 mappingValue,
                                 MappingKeyAmend1,
                                 (MappingKeyAmendEnd - MappingKeyAmend1 + 1),
-                                context)
-{
-    std::string name(context.getItem(MappingKeyName));
+                                context) {
+
+    size_t id = context.getItemAsInteger(MappingKeyIdentifier);
+    std::string nameFromStructure(context.getItem(MappingKeyName));
 
     ALOG_ASSERT(instanceConfigurableElement != nullptr, "Invalid Configurable Element");
     mPolicySubsystem = static_cast<const PolicySubsystem *>(
-                instanceConfigurableElement->getBelongingSubsystem());
+            instanceConfigurableElement->getBelongingSubsystem());
     ALOG_ASSERT(mPolicySubsystem != nullptr, "Invalid Policy Subsystem");
 
     mPolicyPluginInterface = mPolicySubsystem->getPolicyPluginInterface();
     ALOG_ASSERT(mPolicyPluginInterface != nullptr, "Invalid Policy Plugin Interface");
 
-    mId = mPolicyPluginInterface->getProductStrategyByName(name);
+    mId = static_cast<android::product_strategy_t>(id);
+    std::string name = mPolicyPluginInterface->getProductStrategyName(mId);
+    if (name.empty()) {
+        name = nameFromStructure;
+        mId = mPolicyPluginInterface->getProductStrategyByName(name);
+    }
 
     ALOG_ASSERT(mId != PRODUCT_STRATEGY_INVALID, "Product Strategy %s not found", name.c_str());
 
     ALOGE("Product Strategy %s added", name.c_str());
 }
 
-bool ProductStrategy::sendToHW(string & /*error*/)
+bool ProductStrategy::sendToHW(std::string & /*error*/)
 {
     Device deviceParams;
     blackboardRead(&deviceParams, sizeof(deviceParams));
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index ccd4316..45da7b0 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -71,27 +71,75 @@
 }
 
 status_t Engine::loadFromHalConfigWithFallback(
-        const media::audio::common::AudioHalEngineConfig& config __unused) {
-    // b/242678729. Need to implement for the configurable engine.
-    return INVALID_OPERATION;
-}
+        const media::audio::common::AudioHalEngineConfig& aidlConfig) {
 
-status_t Engine::loadFromXmlConfigWithFallback(const std::string& xmlFilePath)
-{
-    status_t loadResult = loadAudioPolicyEngineConfig(xmlFilePath);
-    if (loadResult < 0) {
-        ALOGE("Policy Engine configuration is invalid.");
+    auto capResult = capEngineConfig::convert(aidlConfig);
+    if (capResult.parsedConfig == nullptr) {
+        ALOGE("%s CapEngine Config invalid", __func__);
+        return BAD_VALUE;
     }
-    return loadResult;
-}
-
-status_t Engine::initCheck()
-{
+    status_t ret = loadWithFallback(aidlConfig);
+    if (ret != NO_ERROR) {
+        return ret;
+    }
+    auto loadCriteria= [this](const auto& capCriteria) {
+        for (auto& capCriterion : capCriteria) {
+            mPolicyParameterMgr->addCriterion(capCriterion.criterion.name,
+                    capCriterion.criterionType.isInclusive,
+                    capCriterion.criterionType.valuePairs,
+                    capCriterion.criterion.defaultLiteralValue);
+        }
+    };
+    loadCriteria(capResult.parsedConfig->capCriteria);
     std::string error;
     if (mPolicyParameterMgr == nullptr || mPolicyParameterMgr->start(error) != NO_ERROR) {
         ALOGE("%s: could not start Policy PFW: %s", __FUNCTION__, error.c_str());
         return NO_INIT;
     }
+    return mPolicyParameterMgr->setConfiguration(capResult);
+}
+
+status_t Engine::loadFromXmlConfigWithFallback(const std::string& xmlFilePath)
+{
+    status_t status = loadWithFallback(xmlFilePath);
+    std::string error;
+    if (mPolicyParameterMgr == nullptr || mPolicyParameterMgr->start(error) != NO_ERROR) {
+        ALOGE("%s: could not start Policy PFW: %s", __FUNCTION__, error.c_str());
+        return NO_INIT;
+    }
+    return status;
+}
+
+template<typename T>
+status_t Engine::loadWithFallback(const T& configSource) {
+    auto result = EngineBase::loadAudioPolicyEngineConfig(configSource);
+    ALOGE_IF(result.nbSkippedElement != 0,
+             "Policy Engine configuration is partially invalid, skipped %zu elements",
+             result.nbSkippedElement);
+
+    auto loadCriteria= [this](const auto& configCriteria, const auto& configCriterionTypes) {
+        for (auto& criterion : configCriteria) {
+            engineConfig::CriterionType criterionType;
+            for (auto &configCriterionType : configCriterionTypes) {
+                if (configCriterionType.name == criterion.typeName) {
+                    criterionType = configCriterionType;
+                    break;
+                }
+            }
+            ALOG_ASSERT(not criterionType.name.empty(), "Invalid criterion type for %s",
+                        criterion.name.c_str());
+            mPolicyParameterMgr->addCriterion(criterion.name, criterionType.isInclusive,
+                                              criterionType.valuePairs,
+                                              criterion.defaultLiteralValue);
+        }
+    };
+
+    loadCriteria(result.parsedConfig->criteria, result.parsedConfig->criterionTypes);
+    return result.nbSkippedElement == 0? NO_ERROR : BAD_VALUE;
+}
+
+status_t Engine::initCheck()
+{
     return EngineBase::initCheck();
 }
 
@@ -199,32 +247,6 @@
     return EngineBase::setDeviceConnectionState(device, state);
 }
 
-status_t Engine::loadAudioPolicyEngineConfig(const std::string& xmlFilePath)
-{
-    auto result = EngineBase::loadAudioPolicyEngineConfig(xmlFilePath);
-
-    // Custom XML Parsing
-    auto loadCriteria= [this](const auto& configCriteria, const auto& configCriterionTypes) {
-        for (auto& criterion : configCriteria) {
-            engineConfig::CriterionType criterionType;
-            for (auto &configCriterionType : configCriterionTypes) {
-                if (configCriterionType.name == criterion.typeName) {
-                    criterionType = configCriterionType;
-                    break;
-                }
-            }
-            ALOG_ASSERT(not criterionType.name.empty(), "Invalid criterion type for %s",
-                        criterion.name.c_str());
-            mPolicyParameterMgr->addCriterion(criterion.name, criterionType.isInclusive,
-                                              criterionType.valuePairs,
-                                              criterion.defaultLiteralValue);
-        }
-    };
-
-    loadCriteria(result.parsedConfig->criteria, result.parsedConfig->criterionTypes);
-    return result.nbSkippedElement == 0? NO_ERROR : BAD_VALUE;
-}
-
 status_t Engine::setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
                                            const AudioDeviceTypeAddrVector &devices)
 {
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
index 4f3e620..d9ebbe7 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.h
+++ b/services/audiopolicy/engineconfigurable/src/Engine.h
@@ -16,10 +16,11 @@
 
 #pragma once
 
-#include "EngineBase.h"
-#include <EngineInterface.h>
-#include <AudioPolicyPluginInterface.h>
 #include "Collection.h"
+#include "EngineBase.h"
+#include <AudioPolicyPluginInterface.h>
+#include <CapEngineConfig.h>
+#include <EngineInterface.h>
 
 namespace android {
 class AudioPolicyManagerObserver;
@@ -107,8 +108,14 @@
     {
         return EngineBase::getProductStrategyByName(name);
     }
+    std::string getProductStrategyName(product_strategy_t id) const override {
+        return EngineBase::getProductStrategyName(id);
+    }
 
 private:
+    template<typename T>
+    status_t loadWithFallback(const T& configSource);
+
     android::status_t disableDevicesForStrategy(product_strategy_t strategy,
             const DeviceVector &devicesToDisable);
     void enableDevicesForStrategy(product_strategy_t strategy, const DeviceVector &devicesToEnable);
@@ -140,8 +147,6 @@
     template <typename Property, typename Key>
     bool setPropertyForKey(const Property &property, const Key &key);
 
-    status_t loadAudioPolicyEngineConfig(const std::string& xmlFilePath);
-
     DeviceVector getCachedDevices(product_strategy_t ps) const;
 
     ///
diff --git a/services/audiopolicy/engineconfigurable/tools/Android.bp b/services/audiopolicy/engineconfigurable/tools/Android.bp
index d1fb2fb..7ae124c 100644
--- a/services/audiopolicy/engineconfigurable/tools/Android.bp
+++ b/services/audiopolicy/engineconfigurable/tools/Android.bp
@@ -23,7 +23,7 @@
 }
 
 //##################################################################################################
-// Tools for audio policy engine criterion type configuration file
+// Legacy tools for audio policy engine criterion type configuration file
 //
 python_binary_host {
     name: "buildPolicyCriterionTypes",
@@ -57,6 +57,40 @@
 }
 
 //##################################################################################################
+// Tools for audio policy engine criterion type configuration file
+//
+python_binary_host {
+    name: "capBuildPolicyCriterionTypes",
+    main: "capBuildPolicyCriterionTypes.py",
+    srcs: [
+        "capBuildPolicyCriterionTypes.py",
+    ],
+}
+
+genrule_defaults {
+    name: "capbuildpolicycriteriontypesrule",
+    tools: ["capBuildPolicyCriterionTypes"],
+    cmd: "cp $(locations :audio_policy_configuration_files) $(genDir)/. && " +
+        "cp $(location :audio_policy_configuration_top_file) $(genDir)/audio_policy_configuration.xml && " +
+        "$(location capBuildPolicyCriterionTypes) " +
+        " --androidaudiobaseheader $(location :libaudio_system_audio_base) " +
+        " --androidaudiocommonbaseheader $(location :libaudio_system_audio_common_base) " +
+        "--audiopolicyconfigurationfile $(genDir)/audio_policy_configuration.xml " +
+        "--criteriontypes $(location :audio_policy_engine_aidl_criterion_types_template) " +
+        "--outputfile $(out)",
+    srcs: [
+        // The commented inputs must be provided to use this genrule_defaults
+        // @todo uncomment if 1428659 is merged":android_audio_base_header_file",
+        ":audio_policy_engine_aidl_criterion_types_template",
+        ":libaudio_system_audio_base",
+        ":libaudio_system_audio_common_base",
+        // ":audio_policy_configuration_top_file",
+        // ":audio_policy_configuration_files",
+    ],
+    out: ["audio_policy_engine_criterion_types.xml"],
+}
+
+//##################################################################################################
 // Tools for audio policy engine parameter framework configurable domains
 //
 python_binary_host {
@@ -84,6 +118,7 @@
     cmd: "mkdir -p $(genDir)/Structure/Policy && " +
         "cp $(locations :audio_policy_pfw_structure_files) $(genDir)/Structure/Policy && " +
         "cp $(location :audio_policy_pfw_toplevel) $(genDir)/top_level && " +
+        "sed -i -e 's|TuningAllowed=\"false\"|TuningAllowed=\"true\" ServerPort=\"unix:///dev/socket/audioserver/policy_debug\"|g' $(genDir)/top_level &&" +
         "$(location domainGeneratorPolicy) " +
         "--validate " +
         "--domain-generator-tool $(location domainGeneratorConnector) " +
@@ -106,7 +141,7 @@
 }
 
 //##################################################################################################
-// Tools for policy parameter-framework product strategies structure file generation
+// Legacy tools for policy parameter-framework product strategies structure file generation
 //
 python_binary_host {
     name: "buildStrategiesStructureFile",
@@ -154,5 +189,4 @@
         ":common_types_structure_template",
         ":libaudio_system_audio_base",
     ],
-    out: ["PolicySubsystem-CommonTypes.xml"],
 }
diff --git a/services/audiopolicy/engineconfigurable/tools/buildStrategiesStructureFile.py b/services/audiopolicy/engineconfigurable/tools/buildStrategiesStructureFile.py
index f69d346..6eabc57 100755
--- a/services/audiopolicy/engineconfigurable/tools/buildStrategiesStructureFile.py
+++ b/services/audiopolicy/engineconfigurable/tools/buildStrategiesStructureFile.py
@@ -75,7 +75,7 @@
     strategy_components = strategies_root.find('ComponentType')
 
     for strategy_name in strategies:
-        context_mapping = "".join(map(str, ["Name:", strategy_name]))
+        context_mapping = "".join(map(str, ["Identifier:-1,Name:", strategy_name]))
         strategy_pfw_name = strategy_name.replace('STRATEGY_', '').lower()
         ET.SubElement(strategy_components, "Component",
                       Name=strategy_pfw_name, Type="ProductStrategy",
diff --git a/services/audiopolicy/engineconfigurable/tools/capBuildPolicyCriterionTypes.py b/services/audiopolicy/engineconfigurable/tools/capBuildPolicyCriterionTypes.py
new file mode 100755
index 0000000..1adc602
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/tools/capBuildPolicyCriterionTypes.py
@@ -0,0 +1,361 @@
+#!/usr/bin/python3
+
+#
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import argparse
+import re
+import sys
+import os
+import logging
+import xml.etree.ElementTree as ET
+import xml.etree.ElementInclude as EI
+import xml.dom.minidom as MINIDOM
+from collections import OrderedDict
+
+#
+# Helper script that helps to feed at build time the XML criterion types file used by
+# the engineconfigurable to start the parameter-framework.
+# It prevents to fill them manually and avoid divergences with android.
+#
+# The Device Types criterion types are fed from audio-base.h file with the option
+#           --androidaudiobaseheader <path/to/android/audio/base/file/audio-base.h>
+#
+# The Device Addresses criterion types are fed from the audio policy configuration file
+# in order to discover all the devices for which the address matter.
+#           --audiopolicyconfigurationfile <path/to/audio_policy_configuration.xml>
+#
+# The reference file of criterion types must also be set as an input of the script:
+#           --criteriontypes <path/to/criterion/file/audio_criterion_types.xml.in>
+#
+# At last, the output of the script shall be set also:
+#           --outputfile <path/to/out/vendor/etc/audio_criterion_types.xml>
+#
+
+def parseArgs():
+    argparser = argparse.ArgumentParser(description="Parameter-Framework XML \
+                                        audio criterion type file generator.\n\
+                                        Exit with the number of (recoverable or not) \
+                                        error that occurred.")
+    argparser.add_argument('--androidaudiobaseheader',
+                           help="Android Audio Base C header file, Mandatory.",
+                           metavar="ANDROID_AUDIO_BASE_HEADER",
+                           type=argparse.FileType('r'),
+                           required=True)
+    argparser.add_argument('--androidaudiocommonbaseheader',
+                           help="Android Audio CommonBase C header file, Mandatory.",
+                           metavar="ANDROID_AUDIO_COMMON_BASE_HEADER",
+                           type=argparse.FileType('r'),
+                           required=True)
+    argparser.add_argument('--audiopolicyconfigurationfile',
+                           help="Android Audio Policy Configuration file, Mandatory.",
+                           metavar="(AUDIO_POLICY_CONFIGURATION_FILE)",
+                           type=argparse.FileType('r'),
+                           required=True)
+    argparser.add_argument('--criteriontypes',
+                           help="Criterion types XML base file, in \
+                           '<criterion_types> \
+                               <criterion_type name="" type=<inclusive|exclusive> \
+                               values=<value1,value2,...>/>' \
+                           format. Mandatory.",
+                           metavar="CRITERION_TYPE_FILE",
+                           type=argparse.FileType('r'),
+                           required=True)
+    argparser.add_argument('--outputfile',
+                           help="Criterion types outputfile file. Mandatory.",
+                           metavar="CRITERION_TYPE_OUTPUT_FILE",
+                           type=argparse.FileType('w'),
+                           required=True)
+    argparser.add_argument('--verbose',
+                           action='store_true')
+
+    return argparser.parse_args()
+
+
+output_devices_type_value = {}
+input_devices_type_value = {}
+
+def generateXmlCriterionTypesFile(criterionTypes, addressCriteria, criterionTypesFile, outputFile):
+
+    logging.info("Importing criterionTypesFile {}".format(criterionTypesFile))
+    criterion_types_in_tree = ET.parse(criterionTypesFile)
+
+    criterion_types_root = criterion_types_in_tree.getroot()
+
+    for criterion_name, values_dict in criterionTypes.items():
+        for criterion_type in criterion_types_root.findall('criterion_type'):
+            if criterion_type.get('name') == criterion_name:
+                values_node = ET.SubElement(criterion_type, "values")
+                ordered_values = OrderedDict(sorted(values_dict.items(), key=lambda x: x[1]))
+                for key, value in ordered_values.items():
+                    value_node = ET.SubElement(values_node, "value")
+                    value_node.set('literal', key)
+
+                    if criterion_type.get('name') == "OutputDevicesMaskType":
+                        value_node.set('android_type', output_devices_type_value[key])
+                    if criterion_type.get('name') == "InputDevicesMaskType":
+                        value_node.set('android_type', input_devices_type_value[key])
+
+    if addressCriteria:
+        for criterion_name, values_list in addressCriteria.items():
+            for criterion_type in criterion_types_root.findall('criterion_type'):
+                if criterion_type.get('name') == criterion_name:
+                    existing_values_node = criterion_type.find("values")
+                    if existing_values_node is not None:
+                        values_node = existing_values_node
+                    else:
+                        values_node = ET.SubElement(criterion_type, "values")
+
+                    for value in values_list:
+                        value_node = ET.SubElement(values_node, "value", literal=value)
+
+    xmlstr = ET.tostring(criterion_types_root, encoding='utf8', method='xml')
+    reparsed = MINIDOM.parseString(xmlstr)
+    prettyXmlStr = reparsed.toprettyxml(newl='\r\n')
+    prettyXmlStr = os.linesep.join([s for s in prettyXmlStr.splitlines() if s.strip()])
+    outputFile.write(prettyXmlStr)
+
+def capitalizeLine(line):
+    return ' '.join((w.capitalize() for w in line.split(' ')))
+
+
+#
+# Parse the audio policy configuration file and output a dictionary of device criteria addresses
+#
+def parseAndroidAudioPolicyConfigurationFile(audiopolicyconfigurationfile):
+
+    logging.info("Checking Audio Policy Configuration file {}".format(audiopolicyconfigurationfile))
+    #
+    # extract all devices addresses from audio policy configuration file
+    #
+    address_criteria_mapping_table = {
+        'sink' : "OutputDevicesAddressesType",
+        'source' : "InputDevicesAddressesType"}
+
+    address_criteria = {
+        'OutputDevicesAddressesType' : [],
+        'InputDevicesAddressesType' : []}
+
+    old_working_dir = os.getcwd()
+    print("Current working directory %s" % old_working_dir)
+
+    new_dir = os.path.join(old_working_dir, audiopolicyconfigurationfile.name)
+
+    policy_in_tree = ET.parse(audiopolicyconfigurationfile)
+    os.chdir(os.path.dirname(os.path.normpath(new_dir)))
+
+    print("new working directory %s" % os.getcwd())
+
+    policy_root = policy_in_tree.getroot()
+    EI.include(policy_root)
+
+    os.chdir(old_working_dir)
+
+    for device in policy_root.iter('devicePort'):
+        for key in address_criteria_mapping_table.keys():
+            if device.get('role') == key and device.get('address'):
+                logging.info("{}: <{}>".format(key, device.get('address')))
+                address_criteria[address_criteria_mapping_table[key]].append(device.get('address'))
+
+    for criteria in address_criteria:
+        values = ','.join(address_criteria[criteria])
+        logging.info("{}: <{}>".format(criteria, values))
+
+    return address_criteria
+
+#
+# Parse the audio-base.h file and output a dictionary of android dependent criterion types:
+#   -Android Mode
+#   -Output devices type
+#   -Input devices type
+#
+def parseAndroidAudioFile(androidaudiobaseheaderFile, androidaudiocommonbaseheaderFile):
+    #
+    # Adaptation table between Android Enumeration prefix and Audio PFW Criterion type names
+    #
+    criterion_mapping_table = {
+        'HAL_AUDIO_MODE' : "AndroidModeType",
+        'AUDIO_DEVICE_OUT' : "OutputDevicesMaskType",
+        'AUDIO_DEVICE_IN' : "InputDevicesMaskType"}
+
+    all_criteria = {
+        'AndroidModeType' : {},
+        'OutputDevicesMaskType' : {},
+        'InputDevicesMaskType' : {}}
+
+    #
+    # _CNT, _MAX, _ALL and _NONE are prohibited values as ther are just helpers for enum users.
+    #
+    ignored_values = ['CNT', 'MAX', 'ALL', 'NONE']
+
+    multi_bit_outputdevice_shift = 32
+    multi_bit_inputdevice_shift = 32
+
+    criteria_pattern = re.compile(
+        r"\s*V\((?P<type>(?:"+'|'.join(criterion_mapping_table.keys()) + "))_" \
+        r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*,\s*" \
+        r"(?:AUDIO_DEVICE_BIT_IN \| )?(?P<values>(?:0[xX])?[0-9a-fA-F]+|[0-9]+)")
+
+    logging.info("Checking Android Header file {}".format(androidaudiobaseheaderFile))
+
+    for line_number, line in enumerate(androidaudiobaseheaderFile):
+        match = criteria_pattern.match(line)
+        if match:
+            logging.debug("The following line is VALID: {}:{}\n{}".format(
+                androidaudiobaseheaderFile.name, line_number, line))
+
+            criterion_name = criterion_mapping_table[match.groupdict()['type']]
+            criterion_literal = ''.join(match.groupdict()['literal'])
+            criterion_numerical_value = match.groupdict()['values']
+
+            if criterion_name == "InputDevicesMaskType":
+                # Remove ambient and in_communication since they were deprecated
+                logging.info("Remove deprecated device {}".format(criterion_literal))
+                if criterion_literal == "AMBIENT" or criterion_literal == "COMMUNICATION":
+                    logging.info("Remove deprecated device {}".format(criterion_literal))
+                    continue
+                # for AUDIO_DEVICE_IN: rename default to stub
+                elif criterion_literal == "DEFAULT":
+                    criterion_numerical_value = str(int("0x40000000", 0))
+                    input_devices_type_value[criterion_literal] = "0xC0000000"
+                else:
+                    try:
+                        string_int = int(criterion_numerical_value, 0)
+                        # Append AUDIO_DEVICE_IN for android type tag
+                        input_devices_type_value[criterion_literal] = hex(string_int | 2147483648)
+
+                        num_bits = bin(string_int).count("1")
+                        if num_bits > 1:
+                            logging.info("The value {}:{} is for criterion {} binary rep {} has {} bits sets"
+                                .format(criterion_numerical_value, criterion_literal, criterion_name, bin(string_int), num_bits))
+                            string_int = 2**multi_bit_inputdevice_shift
+                            logging.info("new val assigned is {} {}" .format(string_int, bin(string_int)))
+                            multi_bit_inputdevice_shift += 1
+                            criterion_numerical_value = str(string_int)
+
+                    except ValueError:
+                        # Handle the exception
+                        logging.info("value {}:{} for criterion {} is not a number, ignoring"
+                            .format(criterion_numerical_value, criterion_literal, criterion_name))
+                        continue
+
+            if criterion_name == "OutputDevicesMaskType":
+                if criterion_literal == "DEFAULT":
+                    criterion_numerical_value = str(int("0x40000000", 0))
+                    output_devices_type_value[criterion_literal] = "0x40000000"
+                else:
+                    try:
+                        string_int = int(criterion_numerical_value, 0)
+                        output_devices_type_value[criterion_literal] = criterion_numerical_value
+
+                        num_bits = bin(string_int).count("1")
+                        if num_bits > 1:
+                            logging.info("The value {}:{} is for criterion {} binary rep {} has {} bits sets"
+                                .format(criterion_numerical_value, criterion_literal, criterion_name, bin(string_int), num_bits))
+                            string_int = 2**multi_bit_outputdevice_shift
+                            logging.info("new val assigned is {} {}" .format(string_int, bin(string_int)))
+                            multi_bit_outputdevice_shift += 1
+                            criterion_numerical_value = str(string_int)
+
+                    except ValueError:
+                        # Handle the exception
+                        logging.info("The value {}:{} is for criterion {} is not a number, ignoring"
+                            .format(criterion_numerical_value, criterion_literal, criterion_name))
+                        continue
+
+            try:
+                string_int = int(criterion_numerical_value, 0)
+
+            except ValueError:
+                # Handle the exception
+                logging.info("The value {}:{} is for criterion {} is not a number, ignoring"
+                    .format(criterion_numerical_value, criterion_literal, criterion_name))
+                continue
+
+            # Remove duplicated numerical values
+            if int(criterion_numerical_value, 0) in all_criteria[criterion_name].values():
+                logging.info("criterion {} duplicated values:".format(criterion_name))
+                logging.info("{}:{}".format(criterion_numerical_value, criterion_literal))
+                logging.info("KEEPING LATEST")
+                for key in list(all_criteria[criterion_name]):
+                    if all_criteria[criterion_name][key] == int(criterion_numerical_value, 0):
+                        del all_criteria[criterion_name][key]
+
+            all_criteria[criterion_name][criterion_literal] = int(criterion_numerical_value, 0)
+
+            logging.debug("type:{},".format(criterion_name))
+            logging.debug("iteral:{},".format(criterion_literal))
+            logging.debug("values:{}.".format(criterion_numerical_value))
+
+    logging.info("Checking Android Common Header file {}".format(androidaudiocommonbaseheaderFile))
+
+    criteria_pattern = re.compile(
+        r"\s*(?P<type>(?:"+'|'.join(criterion_mapping_table.keys()) + "))_" \
+        r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*=\s*" \
+        r"(?:AUDIO_DEVICE_BIT_IN \| )?(?P<values>(?:0[xX])?[0-9a-fA-F]+|[0-9]+)")
+
+    for line_number, line in enumerate(androidaudiocommonbaseheaderFile):
+        match = criteria_pattern.match(line)
+        if match:
+            logging.debug("The following line is VALID: {}:{}\n{}".format(
+                androidaudiocommonbaseheaderFile.name, line_number, line))
+
+            criterion_name = criterion_mapping_table[match.groupdict()['type']]
+            criterion_literal = ''.join(match.groupdict()['literal'])
+            criterion_numerical_value = match.groupdict()['values']
+
+            try:
+                string_int = int(criterion_numerical_value, 0)
+            except ValueError:
+                # Handle the exception
+                logging.info("The value {}:{} is for criterion {} is not a number, ignoring"
+                    .format(criterion_numerical_value, criterion_literal, criterion_name))
+                continue
+
+            # Remove duplicated numerical values
+            if int(criterion_numerical_value, 0) in all_criteria[criterion_name].values():
+                logging.info("criterion {} duplicated values:".format(criterion_name))
+                logging.info("{}:{}".format(criterion_numerical_value, criterion_literal))
+                logging.info("KEEPING LATEST")
+                for key in list(all_criteria[criterion_name]):
+                    if all_criteria[criterion_name][key] == int(criterion_numerical_value, 0):
+                        del all_criteria[criterion_name][key]
+
+            all_criteria[criterion_name][criterion_literal] = int(criterion_numerical_value, 0)
+
+            logging.debug("type:{},".format(criterion_name))
+            logging.debug("iteral:{},".format(criterion_literal))
+            logging.debug("values:{}.".format(criterion_numerical_value))
+
+    return all_criteria
+
+
+def main():
+    logging.root.setLevel(logging.INFO)
+    args = parseArgs()
+
+    all_criteria = parseAndroidAudioFile(args.androidaudiobaseheader,
+                                         args.androidaudiocommonbaseheader)
+
+    address_criteria = parseAndroidAudioPolicyConfigurationFile(args.audiopolicyconfigurationfile)
+
+    criterion_types = args.criteriontypes
+
+    generateXmlCriterionTypesFile(all_criteria, address_criteria, criterion_types, args.outputfile)
+
+# If this file is directly executed
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/services/audiopolicy/engineconfigurable/wrapper/Android.bp b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
index 78d5fa3..506b19b 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/Android.bp
+++ b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
@@ -19,10 +19,12 @@
     ],
     header_libs: [
         "libaudiofoundation_headers",
+        "libaudiopolicycapengine_config_headers",
         "libaudiopolicycommon",
         "libbase_headers",
     ],
     shared_libs: [
+        "libaudiopolicyengine_config",
         "liblog",
         "libmedia_helper",
         "libparameter",
diff --git a/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp b/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
index 099d55d..0bcde8d 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
+++ b/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
@@ -18,13 +18,13 @@
 //#define LOG_NDEBUG 0
 
 #include "ParameterManagerWrapper.h"
+#include <ParameterMgrFullConnector.h>
 #include <ParameterMgrPlatformConnector.h>
 #include <SelectionCriterionTypeInterface.h>
 #include <SelectionCriterionInterface.h>
 #include <media/convert.h>
 #include <algorithm>
 #include <cutils/bitops.h>
-#include <cutils/config_utils.h>
 #include <cutils/misc.h>
 #include <fstream>
 #include <limits>
@@ -64,31 +64,10 @@
 namespace audio_policy {
 
 const char *const ParameterManagerWrapper::mPolicyPfwDefaultConfFileName =
-    "/etc/parameter-framework/ParameterFrameworkConfigurationPolicy.xml";
+    "/etc/parameter-framework/ParameterFrameworkConfigurationCap.xml";
 const char *const ParameterManagerWrapper::mPolicyPfwVendorConfFileName =
     "/vendor/etc/parameter-framework/ParameterFrameworkConfigurationPolicy.xml";
 
-static const char *const gInputDeviceCriterionName = "AvailableInputDevices";
-static const char *const gOutputDeviceCriterionName = "AvailableOutputDevices";
-static const char *const gPhoneStateCriterionName = "TelephonyMode";
-static const char *const gOutputDeviceAddressCriterionName = "AvailableOutputDevicesAddresses";
-static const char *const gInputDeviceAddressCriterionName = "AvailableInputDevicesAddresses";
-
-/**
- * Order MUST be align with defintiion of audio_policy_force_use_t within audio_policy.h
- */
-static const char *const gForceUseCriterionTag[AUDIO_POLICY_FORCE_USE_CNT] =
-{
-    [AUDIO_POLICY_FORCE_FOR_COMMUNICATION] =        "ForceUseForCommunication",
-    [AUDIO_POLICY_FORCE_FOR_MEDIA] =                "ForceUseForMedia",
-    [AUDIO_POLICY_FORCE_FOR_RECORD] =               "ForceUseForRecord",
-    [AUDIO_POLICY_FORCE_FOR_DOCK] =                 "ForceUseForDock",
-    [AUDIO_POLICY_FORCE_FOR_SYSTEM] =               "ForceUseForSystem",
-    [AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO] =    "ForceUseForHdmiSystemAudio",
-    [AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND] =     "ForceUseForEncodedSurround",
-    [AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING] =      "ForceUseForVibrateRinging"
-};
-
 template <>
 struct ParameterManagerWrapper::parameterManagerElementSupported<ISelectionCriterionInterface> {};
 template <>
@@ -100,9 +79,9 @@
 {
     // Connector
     if (access(mPolicyPfwVendorConfFileName, R_OK) == 0) {
-        mPfwConnector = new CParameterMgrPlatformConnector(mPolicyPfwVendorConfFileName);
+        mPfwConnector = new CParameterMgrFullConnector(mPolicyPfwVendorConfFileName);
     } else {
-        mPfwConnector = new CParameterMgrPlatformConnector(mPolicyPfwDefaultConfFileName);
+        mPfwConnector = new CParameterMgrFullConnector(mPolicyPfwDefaultConfFileName);
     }
 
     // Logger
@@ -130,13 +109,13 @@
               std::get<2>(pair).c_str(), name.c_str());
         criterionType->addValuePair(std::get<0>(pair), std::get<2>(pair), error);
 
-        if (name == gOutputDeviceCriterionName) {
+        if (name == capEngineConfig::gOutputDeviceCriterionName) {
             ALOGV("%s: Adding mOutputDeviceToCriterionTypeMap %d %" PRIu64" for criterionType %s",
                   __func__, std::get<1>(pair), std::get<0>(pair), name.c_str());
             audio_devices_t androidType = static_cast<audio_devices_t>(std::get<1>(pair));
             mOutputDeviceToCriterionTypeMap[androidType] = std::get<0>(pair);
         }
-        if (name == gInputDeviceCriterionName) {
+        if (name == capEngineConfig::gInputDeviceCriterionName) {
             ALOGV("%s: Adding mInputDeviceToCriterionTypeMap %d %" PRIu64" for criterionType %s",
                   __func__, std::get<1>(pair), std::get<0>(pair), name.c_str());
             audio_devices_t androidType = static_cast<audio_devices_t>(std::get<1>(pair));
@@ -207,10 +186,11 @@
 
 status_t ParameterManagerWrapper::setPhoneState(audio_mode_t mode)
 {
-    ISelectionCriterionInterface *criterion =
-            getElement<ISelectionCriterionInterface>(gPhoneStateCriterionName, mPolicyCriteria);
+    ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
+            capEngineConfig::gPhoneStateCriterionName, mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __FUNCTION__, gPhoneStateCriterionName);
+        ALOGE("%s: no criterion found for %s", __FUNCTION__,
+              capEngineConfig::gPhoneStateCriterionName);
         return BAD_VALUE;
     }
     if (!isValueValidForCriterion(criterion, static_cast<int>(mode))) {
@@ -223,10 +203,11 @@
 
 audio_mode_t ParameterManagerWrapper::getPhoneState() const
 {
-    const ISelectionCriterionInterface *criterion =
-            getElement<ISelectionCriterionInterface>(gPhoneStateCriterionName, mPolicyCriteria);
+    const ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
+            capEngineConfig::gPhoneStateCriterionName, mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __FUNCTION__, gPhoneStateCriterionName);
+        ALOGE("%s: no criterion found for %s", __FUNCTION__,
+              capEngineConfig::gPhoneStateCriterionName);
         return AUDIO_MODE_NORMAL;
     }
     return static_cast<audio_mode_t>(criterion->getCriterionState());
@@ -240,10 +221,11 @@
         return BAD_VALUE;
     }
 
-    ISelectionCriterionInterface *criterion =
-            getElement<ISelectionCriterionInterface>(gForceUseCriterionTag[usage], mPolicyCriteria);
+    ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
+            capEngineConfig::gForceUseCriterionTag[usage], mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __FUNCTION__, gForceUseCriterionTag[usage]);
+        ALOGE("%s: no criterion found for %s", __FUNCTION__,
+              capEngineConfig::gForceUseCriterionTag[usage]);
         return BAD_VALUE;
     }
     if (!isValueValidForCriterion(criterion, static_cast<int>(config))) {
@@ -260,10 +242,11 @@
     if (usage > AUDIO_POLICY_FORCE_USE_CNT) {
         return AUDIO_POLICY_FORCE_NONE;
     }
-    const ISelectionCriterionInterface *criterion =
-            getElement<ISelectionCriterionInterface>(gForceUseCriterionTag[usage], mPolicyCriteria);
+    const ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
+            capEngineConfig::gForceUseCriterionTag[usage], mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __FUNCTION__, gForceUseCriterionTag[usage]);
+        ALOGE("%s: no criterion found for %s", __FUNCTION__,
+              capEngineConfig::gForceUseCriterionTag[usage]);
         return AUDIO_POLICY_FORCE_NONE;
     }
     return static_cast<audio_policy_forced_cfg_t>(criterion->getCriterionState());
@@ -281,8 +264,8 @@
         audio_devices_t type, const std::string &address, audio_policy_dev_state_t state)
 {
     std::string criterionName = audio_is_output_device(type) ?
-                gOutputDeviceAddressCriterionName : gInputDeviceAddressCriterionName;
-
+            capEngineConfig::gOutputDeviceAddressCriterionName :
+            capEngineConfig::gInputDeviceAddressCriterionName;
     ALOGV("%s: device with address %s %s", __FUNCTION__, address.c_str(),
           state != AUDIO_POLICY_DEVICE_STATE_AVAILABLE? "disconnected" : "connected");
     ISelectionCriterionInterface *criterion =
@@ -311,12 +294,12 @@
     return NO_ERROR;
 }
 
-status_t ParameterManagerWrapper::setAvailableInputDevices(const DeviceTypeSet &types)
-{
-    ISelectionCriterionInterface *criterion =
-            getElement<ISelectionCriterionInterface>(gInputDeviceCriterionName, mPolicyCriteria);
+status_t ParameterManagerWrapper::setAvailableInputDevices(const DeviceTypeSet &types) {
+    ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
+            capEngineConfig::gInputDeviceCriterionName, mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __func__, gInputDeviceCriterionName);
+        ALOGE("%s: no criterion found for %s", __func__,
+              capEngineConfig::gInputDeviceCriterionName);
         return DEAD_OBJECT;
     }
     criterion->setCriterionState(convertDeviceTypesToCriterionValue(types));
@@ -324,12 +307,12 @@
     return NO_ERROR;
 }
 
-status_t ParameterManagerWrapper::setAvailableOutputDevices(const DeviceTypeSet &types)
-{
-    ISelectionCriterionInterface *criterion =
-            getElement<ISelectionCriterionInterface>(gOutputDeviceCriterionName, mPolicyCriteria);
+status_t ParameterManagerWrapper::setAvailableOutputDevices(const DeviceTypeSet &types) {
+    ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
+            capEngineConfig::gOutputDeviceCriterionName, mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __func__, gOutputDeviceCriterionName);
+        ALOGE("%s: no criterion found for %s", __func__,
+              capEngineConfig::gOutputDeviceCriterionName);
         return DEAD_OBJECT;
     }
     criterion->setCriterionState(convertDeviceTypesToCriterionValue(types));
@@ -344,21 +327,15 @@
 
 uint64_t ParameterManagerWrapper::convertDeviceTypeToCriterionValue(audio_devices_t type) const {
     bool isOut = audio_is_output_devices(type);
-    uint32_t typeMask = isOut ? type : (type & ~AUDIO_DEVICE_BIT_IN);
-
     const auto &adapters = isOut ? mOutputDeviceToCriterionTypeMap : mInputDeviceToCriterionTypeMap;
-    // Only multibit devices need adaptation.
-    if (popcount(typeMask) > 1) {
-        const auto &adapter = adapters.find(type);
-        if (adapter != adapters.end()) {
-            ALOGV("%s: multibit device %d converted to criterion %" PRIu64, __func__, type,
-                  adapter->second);
-            return adapter->second;
-        }
-        ALOGE("%s: failed to find map for multibit device %d", __func__, type);
-        return 0;
+    const auto &adapter = adapters.find(type);
+    if (adapter != adapters.end()) {
+        ALOGV("%s: multibit device %d converted to criterion %" PRIu64, __func__, type,
+              adapter->second);
+        return adapter->second;
     }
-    return typeMask;
+    ALOGE("%s: failed to find map for multibit device %d", __func__, type);
+    return 0;
 }
 
 uint64_t ParameterManagerWrapper::convertDeviceTypesToCriterionValue(
@@ -382,5 +359,88 @@
     return deviceTypes;
 }
 
+void ParameterManagerWrapper::createDomain(const std::string &domain)
+{
+    std::string error;
+    bool ret = mPfwConnector->createDomain(domain, error);
+    if (!ret) {
+        ALOGD("%s: failed to create domain %s (error=%s)", __func__, domain.c_str(),
+        error.c_str());
+    }
+}
+
+void ParameterManagerWrapper::addConfigurableElementToDomain(const std::string &domain,
+        const std::string &elementPath)
+{
+    std::string error;
+    bool ret = mPfwConnector->addConfigurableElementToDomain(domain, elementPath, error);
+    ALOGE_IF(!ret, "%s: failed to add parameter %s for domain %s (error=%s)",
+              __func__, elementPath.c_str(), domain.c_str(), error.c_str());
+}
+
+void ParameterManagerWrapper::createConfiguration(const std::string &domain,
+        const std::string &configurationName)
+{
+    std::string error;
+    bool ret = mPfwConnector->createConfiguration(domain, configurationName, error);
+    ALOGE_IF(!ret, "%s: failed to create configuration %s for domain %s (error=%s)",
+              __func__, configurationName.c_str(), domain.c_str(), error.c_str());
+}
+
+void ParameterManagerWrapper::setApplicationRule(
+        const std::string &domain, const std::string &configurationName, const std::string &rule)
+{
+    std::string error;
+    bool ret = mPfwConnector->setApplicationRule(domain, configurationName, rule, error);
+    ALOGE_IF(!ret, "%s: failed to set rule %s for domain %s and configuration %s (error=%s)",
+              __func__, rule.c_str(), domain.c_str(), configurationName.c_str(), error.c_str());
+}
+
+void ParameterManagerWrapper::accessConfigurationValue(const std::string &domain,
+        const std::string &configurationName, const std::string &elementPath,
+        std::string &value)
+{
+    std::string error;
+    bool ret = mPfwConnector->accessConfigurationValue(domain, configurationName, elementPath,
+            value, /*set=*/ true, error);
+    ALOGE_IF(!ret, "%s: failed to set value %s for parameter %s on domain %s and configuration %s "
+          "(error=%s)", __func__, value.c_str(), elementPath.c_str(),  domain.c_str(),
+          configurationName.c_str(), error.c_str());
+}
+
+status_t ParameterManagerWrapper::setConfiguration(
+        const android::capEngineConfig::ParsingResult& capSettings)
+{
+    if (!isStarted()) {
+        return NO_INIT;
+    }
+    std::string error;
+    if (!mPfwConnector->setTuningMode(/* bOn= */ true, error)) {
+        ALOGD("%s: failed to set Tuning Mode error=%s", __FUNCTION__, error.c_str());
+        return DEAD_OBJECT;
+    }
+    for (auto &domain: capSettings.parsedConfig->capConfigurableDomains) {
+        createDomain(domain.name);
+        for (const auto &configurableElementValue : domain.settings[0].configurableElementValues) {
+            addConfigurableElementToDomain(domain.name,
+                    configurableElementValue.configurableElement.path);
+        }
+        for (const auto &configuration : domain.configurations) {
+            createConfiguration(domain.name, configuration.name);
+            setApplicationRule(domain.name, configuration.name, configuration.rule);
+        }
+        for (const auto &setting : domain.settings) {
+            for (const auto &configurableElementValue : setting.configurableElementValues) {
+                std::string value = configurableElementValue.value;
+                accessConfigurationValue(domain.name, setting.configurationName,
+                        configurableElementValue.configurableElement.path, value);
+            }
+
+        }
+    }
+    mPfwConnector->setTuningMode(/* bOn= */ false, error);
+    return OK;
+}
+
 } // namespace audio_policy
 } // namespace android
diff --git a/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h b/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
index fa4ae1e..0c45a60 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
+++ b/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
@@ -16,6 +16,7 @@
 
 #pragma once
 
+#include <CapEngineConfig.h>
 #include <media/AudioContainers.h>
 #include <system/audio.h>
 #include <system/audio_policy.h>
@@ -27,7 +28,7 @@
 #include <string>
 #include <vector>
 
-class CParameterMgrPlatformConnector;
+class CParameterMgrFullConnector;
 class ISelectionCriterionInterface;
 struct cnode;
 
@@ -59,6 +60,9 @@
      */
     status_t start(std::string &error);
 
+    status_t setConfiguration(const android::capEngineConfig::ParsingResult& capSettings);
+
+
     /**
      * The following API wrap policy action to criteria
      */
@@ -148,6 +152,14 @@
             uint64_t criterionValue, bool isOut) const;
 
 private:
+    void createDomain(const std::string &domain);
+    void addConfigurableElementToDomain(const std::string &domain, const std::string &elementPath);
+    void createConfiguration(const std::string &domain, const std::string &configurationName);
+    void setApplicationRule(const std::string &domain, const std::string &configurationName,
+            const std::string &rule);
+    void accessConfigurationValue(const std::string &domain, const std::string &configurationName,
+                                  const std::string &elementPath, std::string &value);
+
     /**
      * Apply the configuration of the platform on the policy parameter manager.
      * Once all the criteria have been set, the client of the platform state must call
@@ -206,7 +218,7 @@
 
     Criteria mPolicyCriteria; /**< Policy Criterion Map. */
 
-    CParameterMgrPlatformConnector *mPfwConnector; /**< Policy Parameter Manager connector. */
+    CParameterMgrFullConnector *mPfwConnector; /**< Policy Parameter Manager connector. */
     ParameterMgrPlatformConnectorLogger *mPfwConnectorLogger; /**< Policy PFW logger. */
 
 
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index b04aff0..7e35cca 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -51,7 +51,7 @@
         { "STRATEGY_TRANSMITTED_THROUGH_SPEAKER", STRATEGY_TRANSMITTED_THROUGH_SPEAKER },
         { "STRATEGY_ACCESSIBILITY", STRATEGY_ACCESSIBILITY },
         { "STRATEGY_REROUTING", STRATEGY_REROUTING },
-        { "STRATEGY_PATCH", STRATEGY_REROUTING }, // boiler to manage stream patch volume
+        { "STRATEGY_PATCH", STRATEGY_PATCH }, // boiler to manage stream patch volume
         { "STRATEGY_CALL_ASSISTANT", STRATEGY_CALL_ASSISTANT },
     };
     return legacyStrategy;
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index a6090cf..862b5fd 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -28,20 +28,6 @@
 namespace audio_policy
 {
 
-enum legacy_strategy {
-    STRATEGY_NONE = -1,
-    STRATEGY_MEDIA,
-    STRATEGY_PHONE,
-    STRATEGY_SONIFICATION,
-    STRATEGY_SONIFICATION_RESPECTFUL,
-    STRATEGY_DTMF,
-    STRATEGY_ENFORCED_AUDIBLE,
-    STRATEGY_TRANSMITTED_THROUGH_SPEAKER,
-    STRATEGY_ACCESSIBILITY,
-    STRATEGY_REROUTING,
-    STRATEGY_CALL_ASSISTANT,
-};
-
 class Engine : public EngineBase
 {
 public:
diff --git a/services/audiopolicy/fuzzer/Android.bp b/services/audiopolicy/fuzzer/Android.bp
index 8cee613..30d4403 100644
--- a/services/audiopolicy/fuzzer/Android.bp
+++ b/services/audiopolicy/fuzzer/Android.bp
@@ -28,39 +28,18 @@
 
 cc_fuzz {
     name: "audiopolicy_fuzzer",
+    defaults: [
+        "libaudiopolicyservice_dependencies",
+    ],
     srcs: [
         "audiopolicy_fuzzer.cpp",
     ],
-    include_dirs: [
-        "frameworks/av/services/audiopolicy",
-    ],
-    shared_libs: [
-        "android.hardware.audio.common-util",
-        "capture_state_listener-aidl-cpp",
-        "framework-permission-aidl-cpp",
-        "libaudioclient",
-        "libaudiofoundation",
-        "libaudiopolicy",
-        "libaudiopolicycomponents",
-        "libaudiopolicymanagerdefault",
-        "libbase",
-        "libbinder",
-        "libcutils",
-        "libdl",
-        "libhidlbase",
-        "liblog",
-        "libmedia_helper",
-        "libmediametrics",
-        "libutils",
-        "libxml2",
-    ],
     static_libs: [
         "android.hardware.audio.common@7.0-enums",
     ],
-    header_libs: [
-        "libaudiopolicycommon",
-        "libaudiopolicyengine_interface_headers",
-        "libaudiopolicymanager_interface_headers",
+    include_dirs: [
+        "frameworks/av/services/audiopolicy", // include path outside of libaudiopolicyservice
+        "frameworks/av/services/audiopolicy/engine/interface", // for /tests/AudioPolicyTestManager.h:
     ],
     data: [":audiopolicyfuzzer_configuration_files"],
     fuzz_config: {
diff --git a/services/audiopolicy/fuzzer/aidl/Android.bp b/services/audiopolicy/fuzzer/aidl/Android.bp
index a91ecd7..680f76d 100644
--- a/services/audiopolicy/fuzzer/aidl/Android.bp
+++ b/services/audiopolicy/fuzzer/aidl/Android.bp
@@ -23,37 +23,15 @@
 cc_defaults {
     name: "audiopolicy_aidl_fuzzer_defaults",
     shared_libs: [
-        "audiopolicy-aidl-cpp",
-        "audiopolicy-types-aidl-cpp",
-        "framework-permission-aidl-cpp",
-        "libactivitymanager_aidl",
-        "libaudioclient",
         "libaudioflinger",
-        "libaudiohal",
-        "libaudiopolicy",
-        "libaudiopolicymanagerdefault",
         "libaudiopolicyservice",
-        "libaudioprocessing",
-        "libhidlbase",
-        "liblog",
-        "libmediautils",
-        "libnbaio",
-        "libnblog",
-        "libpowermanager",
-        "libvibrator",
-        "packagemanager_aidl-cpp",
-    ],
-    static_libs: [
-        "audiopermissioncontroller",
-        "libaudiomockhal",
-        "libfakeservicemanager",
         "libmediaplayerservice",
     ],
+    static_libs: [
+        "libaudiomockhal",
+        "libfakeservicemanager",
+    ],
     header_libs: [
-        "libaudioflinger_headers",
-        "libaudiohal_headers",
-        "libaudiopolicymanager_interface_headers",
-        "libbinder_headers",
         "libmedia_headers",
     ],
     fuzz_config: {
@@ -78,6 +56,8 @@
         "latest_android_hardware_audio_core_ndk_shared",
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_hardware_audio_effect_ndk_shared",
+        "libaudioflinger_dependencies",
+        "libaudiopolicyservice_dependencies",
         "service_fuzzer_defaults",
     ],
 }
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-0 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..4d539b7
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-1 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-1
new file mode 100644
index 0000000..8af7d2f
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-1
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-2 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-2
new file mode 100644
index 0000000..b89b77e
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-2
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-3 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-3
new file mode 100644
index 0000000..6e966e9
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-3
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-4 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-4
new file mode 100644
index 0000000..8ccf24d
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-4
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-5 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-5
new file mode 100644
index 0000000..223d1df
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-5
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-6 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-6
new file mode 100644
index 0000000..ad54b83
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-6
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-7 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-7
new file mode 100644
index 0000000..f4eabf4
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-7
Binary files differ
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 252dae6..354c59c 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -122,17 +122,16 @@
     }
 }
 
-void AudioPolicyManager::broadcastDeviceConnectionState(const sp<DeviceDescriptor> &device,
+status_t AudioPolicyManager::broadcastDeviceConnectionState(const sp<DeviceDescriptor> &device,
                                                         media::DeviceConnectedState state)
 {
     audio_port_v7 devicePort;
     device->toAudioPort(&devicePort);
-    if (status_t status = mpClientInterface->setDeviceConnectedState(&devicePort, state);
-            status != OK) {
-        ALOGE("Error %d while setting connected state %d for device %s",
-                status, static_cast<int>(state),
-                device->getDeviceTypeAddr().toString(false).c_str());
-    }
+    status_t status = mpClientInterface->setDeviceConnectedState(&devicePort, state);
+    ALOGE_IF(status != OK, "Error %d while setting connected state %d for device %s", status,
+             static_cast<int>(state), device->getDeviceTypeAddr().toString(false).c_str());
+
+    return status;
 }
 
 status_t AudioPolicyManager::setDeviceConnectionStateInt(
@@ -213,7 +212,14 @@
 
             // Before checking outputs, broadcast connect event to allow HAL to retrieve dynamic
             // parameters on newly connected devices (instead of opening the outputs...)
-            broadcastDeviceConnectionState(device, media::DeviceConnectedState::CONNECTED);
+            if (broadcastDeviceConnectionState(
+                        device, media::DeviceConnectedState::CONNECTED) != NO_ERROR) {
+                mAvailableOutputDevices.remove(device);
+                mHwModules.cleanUpForDevice(device);
+                ALOGE("%s() device %s format %x connection failed", __func__,
+                      device->toString().c_str(), device->getEncodedFormat());
+                return INVALID_OPERATION;
+            }
 
             if (checkOutputsForDevice(device, state, outputs) != NO_ERROR) {
                 mAvailableOutputDevices.remove(device);
@@ -398,7 +404,14 @@
 
             // Before checking intputs, broadcast connect event to allow HAL to retrieve dynamic
             // parameters on newly connected devices (instead of opening the inputs...)
-            broadcastDeviceConnectionState(device, media::DeviceConnectedState::CONNECTED);
+            if (broadcastDeviceConnectionState(
+                        device, media::DeviceConnectedState::CONNECTED) != NO_ERROR) {
+                mAvailableInputDevices.remove(device);
+                mHwModules.cleanUpForDevice(device);
+                ALOGE("%s() device %s format %x connection failed", __func__,
+                      device->toString().c_str(), device->getEncodedFormat());
+                return INVALID_OPERATION;
+            }
             // Propagate device availability to Engine
             setEngineDeviceConnectionState(device, state);
 
@@ -768,7 +781,7 @@
         }
         muteWaitMs = setOutputDevices(__func__, mPrimaryOutput, rxDevices, true, delayMs);
     } else { // create RX path audio patch
-        connectTelephonyRxAudioSource();
+        connectTelephonyRxAudioSource(delayMs);
         // If the TX device is on the primary HW module but RX device is
         // on other HW module, SinkMetaData of telephony input should handle it
         // assuming the device uses audio HAL V5.0 and above
@@ -803,7 +816,7 @@
     return false;
 }
 
-void AudioPolicyManager::connectTelephonyRxAudioSource()
+void AudioPolicyManager::connectTelephonyRxAudioSource(uint32_t delayMs)
 {
     disconnectTelephonyAudioSource(mCallRxSourceClient);
     const struct audio_port_config source = {
@@ -813,7 +826,8 @@
     const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
 
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
-    status_t status = startAudioSource(&source, &aa, &portId, 0 /*uid*/, true /*internal*/);
+    status_t status = startAudioSourceInternal(&source, &aa, &portId, 0 /*uid*/,
+                                       true /*internal*/, true /*isCallRx*/, delayMs);
     ALOGE_IF(status != OK, "%s: failed to start audio source (%d)", __func__, status);
     mCallRxSourceClient = mAudioSources.valueFor(portId);
     ALOGE_IF(mCallRxSourceClient == nullptr,
@@ -850,7 +864,8 @@
     srcDevice->toAudioPortConfig(&source);
     mCallTxSourceClient = new SourceClientDescriptor(
                 callTxSourceClientPortId, mUidCached, aa, source, srcDevice, AUDIO_STREAM_PATCH,
-                mCommunnicationStrategy, toVolumeSource(aa), true);
+                mCommunnicationStrategy, toVolumeSource(aa), true,
+                false /*isCallRx*/, true /*isCallTx*/);
     mCallTxSourceClient->setPreferredDeviceId(sinkDevice->getId());
 
     audio_patch_handle_t patchHandle = AUDIO_PATCH_HANDLE_NONE;
@@ -1248,7 +1263,8 @@
 
     // FIXME: in case of RENDER policy, the output capabilities should be checked
     if ((secondaryMixes != nullptr && !secondaryMixes->empty())
-            && !audio_is_linear_pcm(config->format)) {
+            && (!audio_is_linear_pcm(config->format) ||
+                    *flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD)) {
         ALOGD("%s: rejecting request as secondary mixes only support pcm", __func__);
         return BAD_VALUE;
     }
@@ -1273,7 +1289,7 @@
             status = openDirectOutput(
                     *stream, session, config,
                     (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_DIRECT),
-                    DeviceVector(policyMixDevice), &newOutput);
+                    DeviceVector(policyMixDevice), &newOutput, *resultAttr);
             if (status == NO_ERROR) {
                 policyDesc = mOutputs.valueFor(newOutput);
                 primaryMix->setOutput(policyDesc);
@@ -1511,7 +1527,8 @@
                                               const audio_config_t *config,
                                               audio_output_flags_t flags,
                                               const DeviceVector &devices,
-                                              audio_io_handle_t *output) {
+                                              audio_io_handle_t *output,
+                                              audio_attributes_t attributes) {
 
     *output = AUDIO_IO_HANDLE_NONE;
 
@@ -1606,13 +1623,19 @@
     releaseMsdOutputPatches(devices);
 
     status_t status =
-            outputDesc->open(config, nullptr /* mixerConfig */, devices, stream, flags, output);
+            outputDesc->open(config, nullptr /* mixerConfig */, devices, stream, &flags, output,
+                             attributes);
 
-    // only accept an output with the requested parameters
+    // only accept an output with the requested parameters, unless the format can be IEC61937
+    // encapsulated and opened by AudioFlinger as wrapped IEC61937.
+    const bool ignoreRequestedParametersCheck = audio_is_iec61937_compatible(config->format)
+            && (flags & AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO)
+            && audio_has_proportional_frames(outputDesc->getFormat());
     if (status != NO_ERROR ||
-        (config->sample_rate != 0 && config->sample_rate != outputDesc->getSamplingRate()) ||
-        (config->format != AUDIO_FORMAT_DEFAULT && config->format != outputDesc->getFormat()) ||
-        (config->channel_mask != 0 && config->channel_mask != outputDesc->getChannelMask())) {
+        (!ignoreRequestedParametersCheck &&
+        ((config->sample_rate != 0 && config->sample_rate != outputDesc->getSamplingRate()) ||
+         (config->format != AUDIO_FORMAT_DEFAULT && config->format != outputDesc->getFormat()) ||
+         (config->channel_mask != 0 && config->channel_mask != outputDesc->getChannelMask())))) {
         ALOGV("%s failed opening direct output: output %d sample rate %d %d,"
                 "format %d %d, channel mask %04x %04x", __func__, *output, config->sample_rate,
                 outputDesc->getSamplingRate(), config->format, outputDesc->getFormat(),
@@ -1632,11 +1655,11 @@
     outputDesc->mDirectClientSession = session;
 
     addOutput(*output, outputDesc);
-    setOutputDevices(__func__, outputDesc,
-                     devices,
-                     true,
-                     0,
-                     NULL);
+    // The version check is essentially to avoid making this call in the case of the HIDL HAL.
+    if (auto hwModule = mHwModules.getModuleFromHandle(mPrimaryModuleHandle); hwModule &&
+            hwModule->getHalVersionMajor() >= 3) {
+        setOutputDevices(__func__, outputDesc, devices, true, 0, NULL);
+    }
     mPreviousOutputs = mOutputs;
     ALOGV("%s returns new direct output %d", __func__, *output);
     mpClientInterface->onAudioPortListUpdate();
@@ -1711,7 +1734,9 @@
 
     audio_config_t directConfig = *config;
     directConfig.channel_mask = channelMask;
-    status_t status = openDirectOutput(stream, session, &directConfig, *flags, devices, &output);
+
+    status_t status = openDirectOutput(stream, session, &directConfig, *flags, devices, &output,
+                                       *attr);
     if (status != NAME_NOT_FOUND) {
         return output;
     }
@@ -2148,8 +2173,7 @@
     // matching criteria values in priority order for best matching output so far
     std::vector<uint32_t> bestMatchCriteria(8, 0);
 
-    const bool hasOrphanHaptic =
-            mEffects.hasOrphanEffectsForSessionAndType(sessionId, FX_IID_HAPTICGENERATOR);
+    const bool hasOrphanHaptic = mEffects.hasOrphansForSession(sessionId, FX_IID_HAPTICGENERATOR);
     const uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
     const uint32_t hapticChannelCount = audio_channel_count_from_out_mask(
         channelMask & AUDIO_CHANNEL_HAPTIC_ALL);
@@ -3472,6 +3496,11 @@
     }
     ALOGV("%s: group %d matching with %s index %d",
             __FUNCTION__, group, toString(attributes).c_str(), index);
+    if (mEngine->getStreamTypeForAttributes(attributes) == AUDIO_STREAM_PATCH) {
+        ALOGV("%s: cannot change volume for PATCH stream, attrs: %s",
+                __FUNCTION__, toString(attributes).c_str());
+        return NO_ERROR;
+    }
     status_t status = NO_ERROR;
     IVolumeCurves &curves = getVolumeCurves(attributes);
     VolumeSource vs = toVolumeSource(group);
@@ -5114,7 +5143,7 @@
             new SourceClientDescriptor(
                 portId, uid, attributes, *source, srcDevice, AUDIO_STREAM_PATCH,
                 mEngine->getProductStrategyForAttributes(attributes), toVolumeSource(attributes),
-                true);
+                true, false /*isCallRx*/, false /*isCallTx*/);
     sourceDesc->setPreferredDeviceId(sinkDevice->getId());
 
     status_t status =
@@ -5446,7 +5475,7 @@
                         outputDesc->toAudioPortConfig(&srcMixPortConfig, nullptr);
                         // for volume control, we may need a valid stream
                         srcMixPortConfig.ext.mix.usecase.stream =
-                            (!sourceDesc->isInternal() || isCallTxAudioSource(sourceDesc)) ?
+                            (!sourceDesc->isInternal() || sourceDesc->isCallTx()) ?
                                     mEngine->getStreamTypeForAttributes(sourceDesc->attributes()) :
                                     AUDIO_STREAM_PATCH;
                         patchBuilder.addSource(srcMixPortConfig);
@@ -5784,7 +5813,16 @@
 status_t AudioPolicyManager::startAudioSource(const struct audio_port_config *source,
                                               const audio_attributes_t *attributes,
                                               audio_port_handle_t *portId,
-                                              uid_t uid, bool internal)
+                                              uid_t uid) {
+    return startAudioSourceInternal(source, attributes, portId, uid,
+                                    false /*internal*/, false /*isCallRx*/, 0 /*delayMs*/);
+}
+
+status_t AudioPolicyManager::startAudioSourceInternal(const struct audio_port_config *source,
+                                              const audio_attributes_t *attributes,
+                                              audio_port_handle_t *portId,
+                                              uid_t uid, bool internal, bool isCallRx,
+                                              uint32_t delayMs)
 {
     ALOGV("%s", __FUNCTION__);
     *portId = AUDIO_PORT_HANDLE_NONE;
@@ -5817,16 +5855,17 @@
         new SourceClientDescriptor(*portId, uid, *attributes, *source, srcDevice,
                                    mEngine->getStreamTypeForAttributes(*attributes),
                                    mEngine->getProductStrategyForAttributes(*attributes),
-                                   toVolumeSource(*attributes), internal);
+                                   toVolumeSource(*attributes), internal, isCallRx, false);
 
-    status_t status = connectAudioSource(sourceDesc);
+    status_t status = connectAudioSource(sourceDesc, delayMs);
     if (status == NO_ERROR) {
         mAudioSources.add(*portId, sourceDesc);
     }
     return status;
 }
 
-status_t AudioPolicyManager::connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc)
+status_t AudioPolicyManager::connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc,
+                                                uint32_t delayMs)
 {
     ALOGV("%s handle %d", __FUNCTION__, sourceDesc->portId());
 
@@ -5852,7 +5891,7 @@
     audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
 
     return connectAudioSourceToSink(
-                sourceDesc, sinkDevice, patchBuilder.patch(), handle, mUidCached, 0 /*delayMs*/);
+                sourceDesc, sinkDevice, patchBuilder.patch(), handle, mUidCached, delayMs);
 }
 
 status_t AudioPolicyManager::stopAudioSource(audio_port_handle_t portId)
@@ -5969,7 +6008,8 @@
         audio_devices_t deviceType = device->type();
         // Enabling/disabling formats are applied to only HDMI devices. So, this function
         // returns formats reported by HDMI devices.
-        if (deviceType != AUDIO_DEVICE_OUT_HDMI) {
+        if (deviceType != AUDIO_DEVICE_OUT_HDMI &&
+            deviceType != AUDIO_DEVICE_OUT_HDMI_ARC && deviceType != AUDIO_DEVICE_OUT_HDMI_EARC) {
             continue;
         }
         // Formats reported by sink devices
@@ -6038,13 +6078,13 @@
 
     sp<SwAudioOutputDescriptor> outputDesc;
     bool profileUpdated = false;
-    DeviceVector hdmiOutputDevices = mAvailableOutputDevices.getDevicesFromType(
-        AUDIO_DEVICE_OUT_HDMI);
+    DeviceVector hdmiOutputDevices = mAvailableOutputDevices.getDevicesFromTypes(
+        {AUDIO_DEVICE_OUT_HDMI, AUDIO_DEVICE_OUT_HDMI_ARC, AUDIO_DEVICE_OUT_HDMI_EARC});
     for (size_t i = 0; i < hdmiOutputDevices.size(); i++) {
         // Simulate reconnection to update enabled surround sound formats.
         String8 address = String8(hdmiOutputDevices[i]->address().c_str());
         std::string name = hdmiOutputDevices[i]->getName();
-        status_t status = setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_HDMI,
+        status_t status = setDeviceConnectionStateInt(hdmiOutputDevices[i]->type(),
                                                       AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
                                                       address.c_str(),
                                                       name.c_str(),
@@ -6052,7 +6092,7 @@
         if (status != NO_ERROR) {
             continue;
         }
-        status = setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_HDMI,
+        status = setDeviceConnectionStateInt(hdmiOutputDevices[i]->type(),
                                              AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
                                              address.c_str(),
                                              name.c_str(),
@@ -6289,7 +6329,7 @@
         // means haptic use cases (either the client channelmask includes haptic bits, or created a
         // HapticGenerator effect for this session) are not supported.
         return clientHapticChannel == 0 &&
-               !mEffects.hasOrphanEffectsForSessionAndType(sessionId, FX_IID_HAPTICGENERATOR);
+               !mEffects.hasOrphansForSession(sessionId, FX_IID_HAPTICGENERATOR);
     }
 }
 
@@ -6592,10 +6632,12 @@
             sp<SwAudioOutputDescriptor> outputDesc = new SwAudioOutputDescriptor(outProfile,
                                                                                  mpClientInterface);
             audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+            audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
+            audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
             status_t status = outputDesc->open(nullptr /* halConfig */, nullptr /* mixerConfig */,
                                                DeviceVector(supportedDevice),
                                                AUDIO_STREAM_DEFAULT,
-                                               AUDIO_OUTPUT_FLAG_NONE, &output);
+                                               &flags, &output, attributes);
             if (status != NO_ERROR) {
                 ALOGW("Cannot open output stream for devices %s on hw module %s",
                       supportedDevice->toString().c_str(), hwModule->getName());
@@ -6663,7 +6705,7 @@
             status_t status = inputDesc->open(nullptr,
                                               availProfileDevices.itemAt(0),
                                               AUDIO_SOURCE_MIC,
-                                              AUDIO_INPUT_FLAG_NONE,
+                                              (audio_input_flags_t) inProfile->getFlags(),
                                               &input);
             if (status != NO_ERROR) {
                 ALOGW("%s: Cannot open input stream for device %s for profile %s on hw module %s",
@@ -6978,7 +7020,8 @@
             desc = new AudioInputDescriptor(profile, mpClientInterface);
             audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
             ALOGV("%s opening input for profile %s", __func__, profile->getTagName().c_str());
-            status = desc->open(nullptr, device, AUDIO_SOURCE_MIC, AUDIO_INPUT_FLAG_NONE, &input);
+            status = desc->open(nullptr, device, AUDIO_SOURCE_MIC,
+                                (audio_input_flags_t) profile->getFlags(), &input);
 
             if (status == NO_ERROR) {
                 const String8& address = String8(device->address().c_str());
@@ -7225,8 +7268,8 @@
         sp<SourceClientDescriptor> sourceDesc = mAudioSources.valueAt(i);
         if (sourceDesc != nullptr && followsSameRouting(attr, sourceDesc->attributes())
                 && sourceDesc->getPatchHandle() == AUDIO_PATCH_HANDLE_NONE
-                && !isCallRxAudioSource(sourceDesc) && !sourceDesc->isInternal()) {
-            connectAudioSource(sourceDesc);
+                && !sourceDesc->isCallRx() && !sourceDesc->isInternal()) {
+            connectAudioSource(sourceDesc, 0 /*delayMs*/);
         }
     }
 }
@@ -7257,28 +7300,35 @@
     std::vector<sp<SwAudioOutputDescriptor>> invalidatedOutputs;
     // take into account dynamic audio policies related changes: if a client is now associated
     // to a different policy mix than at creation time, invalidate corresponding stream
+    // invalidate clients on outputs that do not support all the newly selected devices for the
+    // strategy
     for (size_t i = 0; i < mPreviousOutputs.size(); i++) {
         const sp<SwAudioOutputDescriptor>& desc = mPreviousOutputs.valueAt(i);
-        if (desc->isDuplicated()) {
+        if (desc->isDuplicated() || desc->getClientCount() == 0) {
             continue;
         }
+
         for (const sp<TrackClientDescriptor>& client : desc->getClientIterable()) {
             if (mEngine->getProductStrategyForAttributes(client->attributes()) != psId) {
                 continue;
             }
+            if (!desc->supportsAllDevices(newDevices)) {
+                invalidatedOutputs.push_back(desc);
+                break;
+            }
             sp<AudioPolicyMix> primaryMix;
             status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->config(),
                     client->uid(), client->session(), client->flags(), mAvailableOutputDevices,
                     nullptr /* requestedDevice */, primaryMix, nullptr /* secondaryMixes */,
                     unneededUsePrimaryOutputFromPolicyMixes);
-            if (status != OK) {
-                continue;
-            }
-            if (client->getPrimaryMix() != primaryMix || client->hasLostPrimaryMix()) {
-                if (desc->isStrategyActive(psId) && maxLatency < desc->latency()) {
-                    maxLatency = desc->latency();
+            if (status == OK) {
+                if (client->getPrimaryMix() != primaryMix || client->hasLostPrimaryMix()) {
+                    if (desc->isStrategyActive(psId) && maxLatency < desc->latency()) {
+                        maxLatency = desc->latency();
+                    }
+                    invalidatedOutputs.push_back(desc);
+                    break;
                 }
-                invalidatedOutputs.push_back(desc);
             }
         }
     }
@@ -7332,8 +7382,8 @@
                 }
             }
             sp<SourceClientDescriptor> source = getSourceForAttributesOnOutput(srcOut, attr);
-            if (source != nullptr && !isCallRxAudioSource(source) && !source->isInternal()) {
-                connectAudioSource(source);
+            if (source != nullptr && !source->isCallRx() && !source->isInternal()) {
+                connectAudioSource(source, 0 /*delayMs*/);
             }
         }
 
@@ -7396,7 +7446,8 @@
                     client->getSecondaryOutputs().begin(),
                     client->getSecondaryOutputs().end(),
                     secondaryDescs.begin(), secondaryDescs.end())) {
-                if (!audio_is_linear_pcm(client->config().format)) {
+                if (client->flags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD
+                        || !audio_is_linear_pcm(client->config().format)) {
                     // If the format is not PCM, the tracks should be invalidated to get correct
                     // behavior when the secondary output is changed.
                     clientsToInvalidate.push_back(client->portId());
@@ -7956,9 +8007,21 @@
                         if (result.source == AUDIO_SOURCE_HOTWORD && !inputDesc->isSoundTrigger()) {
                             result.source = AUDIO_SOURCE_VOICE_RECOGNITION;
                         }
-                        return result; }).
+                        return result; });
             //only one input device for now
-                    addSource(device);
+            if (audio_is_remote_submix_device(device->type())) {
+                // remote submix HAL does not support audio conversion, need source device
+                // audio config to match the sink input descriptor audio config, otherwise AIDL
+                // HAL patching will fail
+                audio_port_config srcDevicePortConfig = {};
+                device->toAudioPortConfig(&srcDevicePortConfig, nullptr);
+                srcDevicePortConfig.sample_rate = inputDesc->getSamplingRate();
+                srcDevicePortConfig.channel_mask = inputDesc->getChannelMask();
+                srcDevicePortConfig.format = inputDesc->getFormat();
+                patchBuilder.addSource(srcDevicePortConfig);
+            } else {
+                patchBuilder.addSource(device);
+            }
             status = installPatch(__func__, patchHandle, inputDesc.get(), patchBuilder.patch(), 0);
         }
     }
@@ -8274,6 +8337,9 @@
                                                int delayMs,
                                                bool force)
 {
+    // APM is single threaded, and single instance.
+    static std::set<IVolumeCurves*> invalidCurvesReported;
+
     // do not change actual attributes volume if the attributes is muted
     if (outputDesc->isMuted(volumeSource)) {
         ALOGVV("%s: volume source %d muted count %d active=%d", __func__, volumeSource,
@@ -8298,7 +8364,12 @@
     }
 
     if (curves.getVolumeIndexMin() < 0 || curves.getVolumeIndexMax() < 0) {
-        ALOGE("invalid volume index range");
+        if (!invalidCurvesReported.count(&curves)) {
+            invalidCurvesReported.insert(&curves);
+            String8 dump;
+            curves.dump(&dump);
+            ALOGE("invalid volume index range in the curve:\n%s", dump.c_str());
+        }
         return BAD_VALUE;
     }
 
@@ -8505,7 +8576,7 @@
         sp<SourceClientDescriptor> sourceDesc = mAudioSources.valueAt(i);
         if (sourceDesc->isConnected() && (sourceDesc->srcDevice()->equals(deviceDesc) ||
                                           sourceDesc->sinkDevice()->equals(deviceDesc))
-                && !isCallRxAudioSource(sourceDesc)) {
+                && !sourceDesc->isCallRx()) {
             disconnectAudioSource(sourceDesc);
         }
     }
@@ -8659,6 +8730,8 @@
     mReportedFormatsMap[devDesc] = formats;
 
     if (devDesc->type() == AUDIO_DEVICE_OUT_HDMI ||
+        devDesc->type() == AUDIO_DEVICE_OUT_HDMI_ARC ||
+        devDesc->type() == AUDIO_DEVICE_OUT_HDMI_EARC ||
         isDeviceOfModule(devDesc,AUDIO_HARDWARE_MODULE_ID_MSD)) {
         modifySurroundFormats(devDesc, &formats);
         size_t modifiedNumProfiles = 0;
@@ -8791,8 +8864,9 @@
     }
     sp<SwAudioOutputDescriptor> desc = new SwAudioOutputDescriptor(profile, mpClientInterface);
     audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
     status_t status = desc->open(halConfig, mixerConfig, devices,
-            AUDIO_STREAM_DEFAULT, flags, &output);
+            AUDIO_STREAM_DEFAULT, &flags, &output, attributes);
     if (status != NO_ERROR) {
         ALOGE("%s failed to open output %d", __func__, status);
         return nullptr;
@@ -8830,18 +8904,19 @@
         config.offload_info.channel_mask = config.channel_mask;
         config.offload_info.format = config.format;
 
-        status = desc->open(&config, mixerConfig, devices, AUDIO_STREAM_DEFAULT, flags, &output);
+        status = desc->open(&config, mixerConfig, devices, AUDIO_STREAM_DEFAULT, &flags, &output,
+                            attributes);
         if (status != NO_ERROR) {
             return nullptr;
         }
     }
 
     addOutput(output, desc);
-    setOutputDevices(__func__, desc,
-                     devices,
-                     true,
-                     0,
-                     NULL);
+    // The version check is essentially to avoid making this call in the case of the HIDL HAL.
+    if (auto hwModule = mHwModules.getModuleFromHandle(mPrimaryModuleHandle); hwModule &&
+            hwModule->getHalVersionMajor() >= 3) {
+        setOutputDevices(__func__, desc, devices, true, 0, NULL);
+    }
     sp<DeviceDescriptor> speaker = mAvailableOutputDevices.getDevice(
             AUDIO_DEVICE_OUT_SPEAKER, String8(""), AUDIO_FORMAT_DEFAULT);
 
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 011e867..9d2166a 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -345,8 +345,7 @@
         virtual status_t startAudioSource(const struct audio_port_config *source,
                                           const audio_attributes_t *attributes,
                                           audio_port_handle_t *portId,
-                                          uid_t uid,
-                                          bool internal = false);
+                                          uid_t uid);
         virtual status_t stopAudioSource(audio_port_handle_t portId);
 
         virtual status_t setMasterMono(bool mono);
@@ -705,15 +704,7 @@
         void updateCallAndOutputRouting(bool forceVolumeReeval = true, uint32_t delayMs = 0,
                 bool skipDelays = false);
 
-        bool isCallRxAudioSource(const sp<SourceClientDescriptor> &source) {
-            return mCallRxSourceClient != nullptr && source == mCallRxSourceClient;
-        }
-
-        bool isCallTxAudioSource(const sp<SourceClientDescriptor> &source) {
-            return mCallTxSourceClient != nullptr && source == mCallTxSourceClient;
-        }
-
-        void connectTelephonyRxAudioSource();
+        void connectTelephonyRxAudioSource(uint32_t delayMs);
 
         void disconnectTelephonyAudioSource(sp<SourceClientDescriptor> &clientDesc);
 
@@ -938,7 +929,8 @@
 
         status_t hasPrimaryOutput() const { return mPrimaryOutput != 0; }
 
-        status_t connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc);
+        status_t connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc,
+                                    uint32_t delayMs);
         status_t disconnectAudioSource(const sp<SourceClientDescriptor>& sourceDesc);
 
         status_t connectAudioSourceToSink(const sp<SourceClientDescriptor>& sourceDesc,
@@ -976,6 +968,13 @@
         void checkLeBroadcastRoutes(bool wasUnicastActive,
                 sp<SwAudioOutputDescriptor> ignoredOutput, uint32_t delayMs);
 
+        status_t startAudioSourceInternal(const struct audio_port_config *source,
+                                          const audio_attributes_t *attributes,
+                                          audio_port_handle_t *portId,
+                                          uid_t uid,
+                                          bool internal,
+                                          bool isCallRx,
+                                          uint32_t delayMs);
         const uid_t mUidCached;                         // AID_AUDIOSERVER
         sp<const AudioPolicyConfig> mConfig;
         EngineInstance mEngine;                         // Audio Policy Engine instance
@@ -1106,8 +1105,8 @@
         // It can give a chance to HAL implementer to retrieve dynamic capabilities associated
         // to this device for example.
         // TODO avoid opening stream to retrieve capabilities of a profile.
-        void broadcastDeviceConnectionState(const sp<DeviceDescriptor> &device,
-                                            media::DeviceConnectedState state);
+        status_t broadcastDeviceConnectionState(const sp<DeviceDescriptor> &device,
+                                                media::DeviceConnectedState state);
 
         // updates device caching and output for streams that can influence the
         //    routing of notifications
@@ -1153,7 +1152,8 @@
                 const audio_config_t *config,
                 audio_output_flags_t flags,
                 const DeviceVector &devices,
-                audio_io_handle_t *output);
+                audio_io_handle_t *output,
+                audio_attributes_t attributes);
 
         /**
          * @brief Queries if some kind of spatialization will be performed if the audio playback
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index 84458f9..e157808 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -11,12 +11,16 @@
 cc_defaults {
     name: "libaudiopolicyservice_dependencies",
 
-    header_libs: ["audiopolicyservicelocal_headers"],
+    include_dirs: [
+        "frameworks/av/services/audiopolicy", // include path outside of libaudiopolicyservice
+    ],
 
     shared_libs: [
         "android.media.audiopolicy-aconfig-cc",
+        "audio-permission-aidl-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
+        "audiopermissioncontroller",
         "audiopolicy-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
         "capture_state_listener-aidl-cpp",
@@ -35,6 +39,7 @@
         "libaudioutils",
         "libbinder",
         "libcutils",
+        "libeffectsconfig",
         "libhardware_legacy",
         "libheadtracking",
         "libheadtracking-binding",
@@ -52,13 +57,6 @@
         "packagemanager_aidl-cpp",
         "spatializer-aidl-cpp",
     ],
-
-    static_libs: [
-        "audio-permission-aidl-cpp",
-        "audiopermissioncontroller",
-        "libaudiopolicycomponents",
-        "libeffectsconfig",
-    ],
 }
 
 cc_library {
@@ -84,10 +82,6 @@
         "frameworks/av/services/audioflinger",
     ],
 
-    static_libs: [
-        "framework-permission-aidl-cpp", // TODO remove when unnnecessary
-    ],
-
     header_libs: [
         "audiopolicyservicelocal_headers",
         "libaudiohal_headers",
@@ -97,21 +91,14 @@
         "libaudioutils_headers",
     ],
 
+    export_include_dirs: ["."],
+
     cflags: [
         "-Wall",
         "-Werror",
         "-Wthread-safety",
         "-fvisibility=hidden",
     ],
-
-    export_shared_lib_headers: [
-        "framework-permission-aidl-cpp",
-        "libactivitymanager_aidl",
-        "libaudiousecasevalidation",
-        "libheadtracking",
-        "libheadtracking-binding",
-        "libsensorprivacy",
-    ],
 }
 
 cc_library_headers {
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index f70dc52..6d2c772 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -56,7 +56,8 @@
                                                            audio_config_base_t *mixerConfig,
                                                            const sp<DeviceDescriptorBase>& device,
                                                            uint32_t *latencyMs,
-                                                           audio_output_flags_t flags)
+                                                           audio_output_flags_t *flags,
+                                                           audio_attributes_t attributes)
 {
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
     if (af == 0) {
@@ -73,7 +74,9 @@
     request.mixerConfig = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_config_base_t_AudioConfigBase(*mixerConfig, false /*isInput*/));
     request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_DeviceDescriptorBase(device));
-    request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+    request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_output_flags_t_int32_t_mask(*flags));
+    request.attributes = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(attributes));
 
     status_t status = af->openOutput(request, &response);
     if (status == OK) {
@@ -86,7 +89,9 @@
             .channel_mask = halConfig->channel_mask,
             .format = halConfig->format,
         };
-        mAudioPolicyService->registerOutput(*output, config, flags);
+        *flags = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_int32_t_audio_output_flags_t_mask(response.flags));
+        mAudioPolicyService->registerOutput(*output, config, *flags);
     }
     return status;
 }
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index cc67481..d529130 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -193,9 +193,7 @@
     media::AudioPolicyConfig apmConfig;
     if (status_t status = clientInterface->getAudioPolicyConfig(&apmConfig); status == OK) {
         auto config = AudioPolicyConfig::loadFromApmAidlConfigWithFallback(apmConfig);
-        LOG_ALWAYS_FATAL_IF(config->getEngineLibraryNameSuffix() !=
-                AudioPolicyConfig::kDefaultEngineLibraryNameSuffix,
-                "Only default engine is currently supported with the AIDL HAL");
+        ALOGD("%s loading APM engine %s", __func__, config->getEngineLibraryNameSuffix().c_str());
         apm = new AudioPolicyManager(config,
                 loadApmEngineLibraryAndCreateEngine(
                         config->getEngineLibraryNameSuffix(), apmConfig.engineConfig),
@@ -1415,8 +1413,8 @@
         } else {
             getIAudioPolicyServiceStatistics().event(code, elapsedMs);
         }
-    }, mediautils::TimeCheck::kDefaultTimeoutDuration,
-    mediautils::TimeCheck::kDefaultSecondChanceDuration,
+    }, mediautils::TimeCheck::getDefaultTimeoutDuration(),
+    mediautils::TimeCheck::getDefaultSecondChanceDuration(),
     true /* crashOnTimeout */);
 
     switch (code) {
@@ -1803,6 +1801,7 @@
                   ++numTimesBecameEmpty;
                 }
                 mLastCommand = command;
+                status_t createAudioPatchStatus;
 
                 switch (command->mCommand) {
                 case SET_VOLUME: {
@@ -1860,10 +1859,11 @@
                     ALOGV("AudioCommandThread() processing create audio patch");
                     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
                     if (af == 0) {
-                        command->mStatus = PERMISSION_DENIED;
+                        createAudioPatchStatus = PERMISSION_DENIED;
                     } else {
                         ul.unlock();
-                        command->mStatus = af->createAudioPatch(&data->mPatch, &data->mHandle);
+                        createAudioPatchStatus = af->createAudioPatch(&data->mPatch,
+                                                                      &data->mHandle);
                         ul.lock();
                     }
                     } break;
@@ -2032,8 +2032,28 @@
                 {
                     audio_utils::lock_guard _l(command->mMutex);
                     if (command->mWaitStatus) {
+                        if (command->mCommand == CREATE_AUDIO_PATCH) {
+                            command->mStatus = createAudioPatchStatus;
+                        }
                         command->mWaitStatus = false;
                         command->mCond.notify_one();
+                    } else if (command->mCommand == CREATE_AUDIO_PATCH &&
+                               command->mStatus == TIMED_OUT &&
+                               createAudioPatchStatus == NO_ERROR) {
+                        // Because of special handling in insertCommand_l() the CREATE_AUDIO_PATCH
+                        // command wait status can be only false in case timeout (see TIMED_OUT)
+                        // happened.
+                        CreateAudioPatchData *createData =
+                                (CreateAudioPatchData *)command->mParam.get();
+                        ALOGW("AudioCommandThread() no caller awaiting for handle(%d) after \
+                                processing create audio patch, going to release it",
+                                createData->mHandle);
+                        sp<AudioCommand> releaseCommand = new AudioCommand();
+                        releaseCommand->mCommand = RELEASE_AUDIO_PATCH;
+                        ReleaseAudioPatchData *releaseData = new ReleaseAudioPatchData();
+                        releaseData->mHandle = createData->mHandle;
+                        releaseCommand->mParam = releaseData;
+                        insertCommand_l(releaseCommand, 0);
                     }
                 }
                 waitTime = -1;
@@ -2551,7 +2571,8 @@
 
     // Disable wait for status if delay is not 0.
     // Except for create audio patch command because the returned patch handle
-    // is needed by audio policy manager
+    // is needed by audio policy manager. Audio patch created after timeout
+    // (see TIMED_OUT) will be released from threadLoop().
     if (delayMs != 0 && command->mCommand != CREATE_AUDIO_PATCH) {
         command->mWaitStatus = false;
     }
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 699cacf..428e560 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -790,7 +790,8 @@
                                     audio_config_base_t *mixerConfig,
                                     const sp<DeviceDescriptorBase>& device,
                                     uint32_t *latencyMs,
-                                    audio_output_flags_t flags);
+                                    audio_output_flags_t *flags,
+                                    audio_attributes_t attributes);
         // creates a special output that is duplicated to the two outputs passed as arguments. The duplication is performed by
         // a special mixer thread in the AudioFlinger.
         virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1, audio_io_handle_t output2);
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index dc61115..c600fb6 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -12,6 +12,7 @@
     name: "audiopolicy_tests",
 
     defaults: [
+        "aconfig_lib_cc_shared_link.defaults",
         "latest_android_media_audio_common_types_cpp_static",
     ],
 
@@ -106,7 +107,7 @@
         "-Werror",
     ],
 
-    test_suites: ["device-tests"],
+    test_suites: ["general-tests"],
 
 }
 
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index 46e6a0b..483f827 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -42,20 +42,21 @@
                         audio_config_base_t *mixerConfig,
                         const sp<DeviceDescriptorBase>& /*device*/,
                         uint32_t * /*latencyMs*/,
-                        audio_output_flags_t flags) override {
+                        audio_output_flags_t *flags,
+                        audio_attributes_t /*attributes*/) override {
         if (module >= mNextModuleHandle) {
             ALOGE("%s: Module handle %d has not been allocated yet (next is %d)",
                   __func__, module, mNextModuleHandle);
             return BAD_VALUE;
         }
         *output = mNextIoHandle++;
-        mOpenedOutputs[*output] = flags;
+        mOpenedOutputs[*output] = *flags;
         ALOGD("%s: opened output %d: HAL(%s %s %d) Mixer(%s %s %d) %s", __func__, *output,
               audio_channel_out_mask_to_string(halConfig->channel_mask),
               audio_format_to_string(halConfig->format), halConfig->sample_rate,
               audio_channel_out_mask_to_string(mixerConfig->channel_mask),
               audio_format_to_string(mixerConfig->format), mixerConfig->sample_rate,
-              android::toString(flags).c_str());
+              android::toString(*flags).c_str());
         return NO_ERROR;
     }
 
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index c15adcb..6116eab 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -37,7 +37,8 @@
                         audio_config_base_t* /*mixerConfig*/,
                         const sp<DeviceDescriptorBase>& /*device*/,
                         uint32_t* /*latencyMs*/,
-                        audio_output_flags_t /*flags*/) override { return NO_INIT; }
+                        audio_output_flags_t* /*flags*/,
+                        audio_attributes_t /*attributes*/) override { return NO_INIT; }
     audio_io_handle_t openDuplicateOutput(audio_io_handle_t /*output1*/,
                                           audio_io_handle_t /*output2*/) override {
         return AUDIO_IO_HANDLE_NONE;
diff --git a/services/audiopolicy/tests/AudioPolicyTestManager.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
index 34ceeab..bf45bb2 100644
--- a/services/audiopolicy/tests/AudioPolicyTestManager.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -24,9 +24,11 @@
     explicit AudioPolicyTestManager(AudioPolicyClientInterface *clientInterface)
             : AudioPolicyTestManager(AudioPolicyConfig::createDefault(), clientInterface) {}
     AudioPolicyTestManager(const sp<const AudioPolicyConfig>& config,
-            AudioPolicyClientInterface *clientInterface)
+            AudioPolicyClientInterface *clientInterface,
+            std::string engineConfig = "")
             : AudioPolicyManager(config,
-                    loadApmEngineLibraryAndCreateEngine(config->getEngineLibraryNameSuffix()),
+                    loadApmEngineLibraryAndCreateEngine(config->getEngineLibraryNameSuffix(),
+                                                        engineConfig),
                     clientInterface) {}
     using AudioPolicyManager::getConfig;
     using AudioPolicyManager::initialize;
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 75d17d2..5278b73 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -227,12 +227,17 @@
     sp<AudioPolicyConfig> mConfig;
     std::unique_ptr<AudioPolicyManagerTestClient> mClient;
     std::unique_ptr<AudioPolicyTestManager> mManager;
+
+    static const std::string sTestEngineConfig;
 };
 
+const std::string AudioPolicyManagerTest::sTestEngineConfig =
+        base::GetExecutableDirectory() + "/engine/test_audio_policy_engine_configuration.xml";
+
 void AudioPolicyManagerTest::SetUp() {
     mClient.reset(getClient());
     ASSERT_NO_FATAL_FAILURE(SetUpManagerConfig());  // Subclasses may want to customize the config.
-    mManager.reset(new AudioPolicyTestManager(mConfig, mClient.get()));
+    mManager.reset(new AudioPolicyTestManager(mConfig, mClient.get(), sTestEngineConfig));
     ASSERT_EQ(NO_ERROR, mManager->initialize());
     ASSERT_EQ(NO_ERROR, mManager->initCheck());
 }
@@ -493,6 +498,9 @@
 void AudioPolicyManagerTestMsd::SetUpManagerConfig() {
     // TODO: Consider using Serializer to load part of the config from a string.
     ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTest::SetUpManagerConfig());
+    mConfig->getHwModules().getModuleFromName(
+            AUDIO_HARDWARE_MODULE_ID_PRIMARY)->setHalVersion(3, 0);
+
     mMsdOutputDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_BUS);
     sp<AudioProfile> pcmOutputProfile = new AudioProfile(
             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
@@ -524,7 +532,7 @@
                 addOutputProfile(spdifOutputProfile);
     }
 
-    sp<HwModule> msdModule = new HwModule(AUDIO_HARDWARE_MODULE_ID_MSD, 2 /*halVersionMajor*/);
+    sp<HwModule> msdModule = new HwModule(AUDIO_HARDWARE_MODULE_ID_MSD, 3 /*halVersionMajor*/);
     HwModuleCollection modules = mConfig->getHwModules();
     modules.add(msdModule);
     mConfig->setHwModules(modules);
@@ -2512,10 +2520,12 @@
                         audio_config_base_t * mixerConfig,
                         const sp<DeviceDescriptorBase>& device,
                         uint32_t * latencyMs,
-                        audio_output_flags_t flags) override {
+                        audio_output_flags_t *flags,
+                        audio_attributes_t attributes) override {
         return mSimulateFailure ? BAD_VALUE :
                 AudioPolicyManagerTestClient::openOutput(
-                        module, output, halConfig, mixerConfig, device, latencyMs, flags);
+                        module, output, halConfig, mixerConfig, device, latencyMs, flags,
+                        attributes);
     }
 
     status_t openInput(audio_module_handle_t module,
@@ -2532,8 +2542,29 @@
 
     void setSimulateFailure(bool simulateFailure) { mSimulateFailure = simulateFailure; }
 
+    void setSimulateBroadcastDeviceStatus(audio_devices_t device, status_t status) {
+        if (status != NO_ERROR) {
+            // simulate device connect status
+            mSimulateBroadcastDeviceStatus[device] = status;
+        } else {
+            // remove device connection fixed status
+            mSimulateBroadcastDeviceStatus.erase(device);
+        }
+    }
+
+    status_t setDeviceConnectedState(const struct audio_port_v7* port,
+                                     media::DeviceConnectedState state) override {
+        if (mSimulateBroadcastDeviceStatus.find(port->ext.device.type) !=
+            mSimulateBroadcastDeviceStatus.end()) {
+            // If a simulated status exists, return a status value
+            return mSimulateBroadcastDeviceStatus[port->ext.device.type];
+        }
+        return AudioPolicyManagerTestClient::setDeviceConnectedState(port, state);
+    }
+
   private:
     bool mSimulateFailure = false;
+    std::map<audio_devices_t, status_t> mSimulateBroadcastDeviceStatus;
 };
 
 }  // namespace
@@ -2554,6 +2585,9 @@
     void setSimulateOpenFailure(bool simulateFailure) {
         mFullClient->setSimulateFailure(simulateFailure); }
 
+    void setSimulateBroadcastDeviceStatus(audio_devices_t device, status_t status) {
+        mFullClient->setSimulateBroadcastDeviceStatus(device, status); }
+
     static const std::string sBluetoothConfig;
 
   private:
@@ -2597,6 +2631,30 @@
     }
 }
 
+TEST_P(AudioPolicyManagerTestDeviceConnectionFailed, BroadcastDeviceFailure) {
+    const audio_devices_t type = std::get<0>(GetParam());
+    const std::string name = std::get<1>(GetParam());
+    const std::string address = std::get<2>(GetParam());
+    const audio_format_t format = std::get<3>(GetParam());
+
+    // simulate broadcastDeviceConnectionState return failure
+    setSimulateBroadcastDeviceStatus(type, INVALID_OPERATION);
+    ASSERT_EQ(INVALID_OPERATION, mManager->setDeviceConnectionState(
+            type, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+            address.c_str(), name.c_str(), format));
+
+    // if broadcast is fail, device should not be added to available devices list
+    if (audio_is_output_device(type)) {
+        auto availableDevices = mManager->getAvailableOutputDevices();
+        EXPECT_FALSE(availableDevices.containsDeviceWithType(type));
+    } else if (audio_is_input_device(type)) {
+        auto availableDevices = mManager->getAvailableInputDevices();
+        EXPECT_FALSE(availableDevices.containsDeviceWithType(type));
+    }
+
+    setSimulateBroadcastDeviceStatus(type, NO_ERROR);
+}
+
 INSTANTIATE_TEST_CASE_P(
         DeviceConnectionFailure,
         AudioPolicyManagerTestDeviceConnectionFailed,
diff --git a/services/audiopolicy/tests/resources/Android.bp b/services/audiopolicy/tests/resources/Android.bp
index 42a57d3..8e7a697 100644
--- a/services/audiopolicy/tests/resources/Android.bp
+++ b/services/audiopolicy/tests/resources/Android.bp
@@ -11,6 +11,10 @@
 filegroup {
     name: "audiopolicytest_configuration_files",
     srcs: [
+        "engine/test_audio_policy_engine_configuration.xml",
+        "engine/test_audio_policy_engine_default_stream_volumes.xml",
+        "engine/test_audio_policy_engine_product_strategies.xml",
+        "engine/test_audio_policy_engine_stream_volumes.xml",
         "test_audio_policy_configuration.xml",
         "test_audio_policy_configuration_bluetooth.xml",
         "test_audio_policy_primary_only_configuration.xml",
diff --git a/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_configuration.xml b/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_configuration.xml
new file mode 100644
index 0000000..dc2e7af
--- /dev/null
+++ b/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_configuration.xml
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+     -->
+
+<configuration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+
+    <xi:include href="test_audio_policy_engine_product_strategies.xml"/>
+    <xi:include href="test_audio_policy_engine_stream_volumes.xml"/>
+    <xi:include href="test_audio_policy_engine_default_stream_volumes.xml"/>
+
+</configuration>
+
diff --git a/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_default_stream_volumes.xml b/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_default_stream_volumes.xml
new file mode 100644
index 0000000..d184cb5
--- /dev/null
+++ b/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_default_stream_volumes.xml
@@ -0,0 +1,136 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<!-- Default Volume Tables included by Audio Policy Configuration file -->
+<!-- Full Default Volume table for all device category -->
+<volumes>
+    <reference name="FULL_SCALE_VOLUME_CURVE">
+    <!-- Full Scale reference Volume Curve -->
+        <point>0,0</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="SILENT_VOLUME_CURVE">
+        <point>0,-9600</point>
+        <point>100,-9600</point>
+    </reference>
+    <reference name="DEFAULT_SYSTEM_VOLUME_CURVE">
+    <!-- Default System reference Volume Curve -->
+        <point>1,-2400</point>
+        <point>33,-1800</point>
+        <point>66,-1200</point>
+        <point>100,-600</point>
+    </reference>
+    <reference name="DEFAULT_MEDIA_VOLUME_CURVE">
+    <!-- Default Media reference Volume Curve -->
+        <point>1,-5800</point>
+        <point>20,-4000</point>
+        <point>60,-1700</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_DEVICE_CATEGORY_HEADSET_VOLUME_CURVE">
+    <!--Default Volume Curve -->
+        <point>1,-4950</point>
+        <point>33,-3350</point>
+        <point>66,-1700</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_DEVICE_CATEGORY_SPEAKER_VOLUME_CURVE">
+    <!-- Default is Speaker Media Volume Curve -->
+        <point>1,-5800</point>
+        <point>20,-4000</point>
+        <point>60,-1700</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_DEVICE_CATEGORY_SPEAKER_SYSTEM_VOLUME_CURVE">
+    <!-- Default is Speaker System Volume Curve -->
+        <point>1,-4680</point>
+        <point>42,-2070</point>
+        <point>85,-540</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_DEVICE_CATEGORY_EARPIECE_VOLUME_CURVE">
+    <!--Default Volume Curve -->
+        <point>1,-4950</point>
+        <point>33,-3350</point>
+        <point>66,-1700</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_DEVICE_CATEGORY_EXT_MEDIA_VOLUME_CURVE">
+    <!-- Default is Ext Media System Volume Curve -->
+        <point>1,-5800</point>
+        <point>20,-4000</point>
+        <point>60,-2100</point>
+        <point>100,-1000</point>
+    </reference>
+    <reference name="DEFAULT_HEARING_AID_VOLUME_CURVE">
+    <!-- Default Hearing Aid Volume Curve -->
+        <point>1,-12700</point>
+        <point>20,-8000</point>
+        <point>60,-4000</point>
+        <point>100,0</point>
+    </reference>
+    <!-- **************************************************************** -->
+    <!-- Non-mutable default volume curves:                               -->
+    <!--     * first point is always for index 0                          -->
+    <!--     * attenuation is small enough that stream can still be heard -->
+    <reference name="DEFAULT_NON_MUTABLE_VOLUME_CURVE">
+    <!-- Default non-mutable reference Volume Curve -->
+    <!--        based on DEFAULT_MEDIA_VOLUME_CURVE -->
+        <point>0,-5800</point>
+        <point>20,-4000</point>
+        <point>60,-1700</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_NON_MUTABLE_HEADSET_VOLUME_CURVE">
+    <!--Default non-mutable Volume Curve for headset -->
+    <!--    based on DEFAULT_DEVICE_CATEGORY_HEADSET_VOLUME_CURVE -->
+        <point>0,-4950</point>
+        <point>33,-3350</point>
+        <point>66,-1700</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_NON_MUTABLE_SPEAKER_VOLUME_CURVE">
+    <!-- Default non-mutable Speaker Volume Curve -->
+    <!--    based on DEFAULT_DEVICE_CATEGORY_SPEAKER_VOLUME_CURVE -->
+        <point>0,-5800</point>
+        <point>20,-4000</point>
+        <point>60,-1700</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_NON_MUTABLE_EARPIECE_VOLUME_CURVE">
+    <!--Default non-mutable Volume Curve -->
+    <!--    based on DEFAULT_DEVICE_CATEGORY_EARPIECE_VOLUME_CURVE -->
+        <point>0,-4950</point>
+        <point>33,-3350</point>
+        <point>66,-1700</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_NON_MUTABLE_EXT_VOLUME_CURVE">
+    <!-- Default non-mutable Ext Media System Volume Curve -->
+    <!--     based on DEFAULT_DEVICE_CATEGORY_EXT_MEDIA_VOLUME_CURVE -->
+        <point>0,-5800</point>
+        <point>20,-4000</point>
+        <point>60,-2100</point>
+        <point>100,-1000</point>
+    </reference>
+    <reference name="DEFAULT_NON_MUTABLE_HEARING_AID_VOLUME_CURVE">
+    <!-- Default non-mutable Hearing Aid Volume Curve -->
+    <!--     based on DEFAULT_HEARING_AID_VOLUME_CURVE -->
+        <point>0,-12700</point>
+        <point>20,-8000</point>
+        <point>60,-4000</point>
+        <point>100,0</point>
+    </reference>
+</volumes>
diff --git a/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_product_strategies.xml b/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_product_strategies.xml
new file mode 100644
index 0000000..58e7152
--- /dev/null
+++ b/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_product_strategies.xml
@@ -0,0 +1,98 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+     -->
+
+<ProductStrategies>
+
+    <!-- "hidden strategies" like TTS, enforced audible:
+            Shall we expose them here or keep it hard coded -->
+
+    <!-- Used to identify the volume of audio streams for enforced system sounds in certain
+         countries (e.g. camera in Japan)
+         This strategy will only have higher priority than phone if force for system is set to
+         enforced. -->
+
+    <ProductStrategy name="STRATEGY_PHONE">
+        <AttributesGroup streamType="AUDIO_STREAM_VOICE_CALL" volumeGroup="voice_call">
+            <Attributes> <Usage value="AUDIO_USAGE_VOICE_COMMUNICATION"/> </Attributes>
+        </AttributesGroup>
+        <AttributesGroup streamType="AUDIO_STREAM_BLUETOOTH_SCO" volumeGroup="bluetooth_sco">
+            <Attributes> <Flags value="AUDIO_FLAG_SCO"/> </Attributes>
+        </AttributesGroup>
+    </ProductStrategy>
+
+    <ProductStrategy name="STRATEGY_SONIFICATION">
+        <AttributesGroup streamType="AUDIO_STREAM_RING" volumeGroup="ring">
+            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE"/> </Attributes>
+        </AttributesGroup>
+        <AttributesGroup streamType="AUDIO_STREAM_ALARM" volumeGroup="alarm">
+            <Attributes> <Usage value="AUDIO_USAGE_ALARM"/> </Attributes>
+        </AttributesGroup>
+    </ProductStrategy>
+
+    <ProductStrategy name="STRATEGY_ENFORCED_AUDIBLE">
+        <AttributesGroup streamType="AUDIO_STREAM_ENFORCED_AUDIBLE" volumeGroup="enforced_audible">
+            <Attributes> <Flags value="AUDIO_FLAG_AUDIBILITY_ENFORCED"/> </Attributes>
+        </AttributesGroup>
+    </ProductStrategy>
+
+    <ProductStrategy name="STRATEGY_ACCESSIBILITY">
+        <AttributesGroup streamType="AUDIO_STREAM_ACCESSIBILITY" volumeGroup="accessibility">
+            <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY"/> </Attributes>
+        </AttributesGroup>
+    </ProductStrategy>
+
+    <ProductStrategy name="STRATEGY_SONIFICATION_RESPECTFUL">
+        <AttributesGroup streamType="AUDIO_STREAM_NOTIFICATION" volumeGroup="notification">
+            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION"/> </Attributes>
+            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_EVENT"/> </Attributes>
+        </AttributesGroup>
+    </ProductStrategy>
+
+    <ProductStrategy name="STRATEGY_MEDIA">
+        <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
+            <Attributes>
+                <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
+                <Usage value="AUDIO_USAGE_ASSISTANT"/>
+            </Attributes>
+        </AttributesGroup>
+         <AttributesGroup streamType="AUDIO_STREAM_MUSIC" volumeGroup="music">
+            <Attributes> <Usage value="AUDIO_USAGE_MEDIA"/> </Attributes>
+            <Attributes> <Usage value="AUDIO_USAGE_GAME"/> </Attributes>
+            <Attributes> <Usage value="AUDIO_USAGE_ASSISTANT"/> </Attributes>
+            <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"/> </Attributes>
+            <Attributes></Attributes>
+        </AttributesGroup>
+        <AttributesGroup streamType="AUDIO_STREAM_SYSTEM" volumeGroup="system">
+            <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_SONIFICATION"/> </Attributes>
+        </AttributesGroup>
+    </ProductStrategy>
+
+    <ProductStrategy name="STRATEGY_DTMF">
+        <AttributesGroup streamType="AUDIO_STREAM_DTMF" volumeGroup="dtmf">
+            <Attributes> <Usage value="AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING"/> </Attributes>
+        </AttributesGroup>
+    </ProductStrategy>
+
+    <!-- Used to identify the volume of audio streams exclusively transmitted through the  speaker
+         (TTS) of the device -->
+    <ProductStrategy name="STRATEGY_TRANSMITTED_THROUGH_SPEAKER">
+        <AttributesGroup streamType="AUDIO_STREAM_TTS" volumeGroup="tts">
+            <Attributes> <Flags value="AUDIO_FLAG_BEACON"/> </Attributes>
+        </AttributesGroup>
+    </ProductStrategy>
+
+</ProductStrategies>
+
diff --git a/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_stream_volumes.xml b/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_stream_volumes.xml
new file mode 100644
index 0000000..af517cf
--- /dev/null
+++ b/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_stream_volumes.xml
@@ -0,0 +1,221 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<!-- Volume section defines a volume curve for a given use case and device category.
+It contains a list of points of this curve expressing the attenuation in Millibels for a given
+volume index from 0 to 100.
+<volume deviceCategory=””>
+<point>0,-9600</point>
+<point>100,0</point>
+</volume>
+-->
+
+<volumeGroups>
+    <volumeGroup>
+        <name>voice_call</name>
+        <indexMin>1</indexMin>
+        <indexMax>7</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET">
+            <point>0,-4200</point>
+            <point>33,-2800</point>
+            <point>66,-1400</point>
+            <point>100,0</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER">
+            <point>0,-2400</point>
+            <point>33,-1600</point>
+            <point>66,-800</point>
+            <point>100,0</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE">
+            <point>0,-2700</point>
+            <point>33,-1800</point>
+            <point>66,-900</point>
+            <point>100,0</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>system</name>
+        <indexMin>0</indexMin>
+        <indexMax>7</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET">
+            <point>1,-3000</point>
+            <point>33,-2600</point>
+            <point>66,-2200</point>
+            <point>100,-1800</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER">
+            <point>1,-5100</point>
+            <point>57,-2800</point>
+            <point>71,-2500</point>
+            <point>85,-2300</point>
+            <point>100,-2100</point>
+        </volume>
+        <!--volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_SYSTEM_VOLUME_CURVE"/-->
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_SYSTEM_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_DEVICE_CATEGORY_EXT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>ring</name>
+        <indexMin>0</indexMin>
+        <indexMax>7</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET" ref="DEFAULT_DEVICE_CATEGORY_HEADSET_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_DEVICE_CATEGORY_SPEAKER_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_DEVICE_CATEGORY_EARPIECE_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_DEVICE_CATEGORY_EXT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>music</name>
+        <indexMin>0</indexMin>
+        <indexMax>25</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_DEVICE_CATEGORY_SPEAKER_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID"  ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>alarm</name>
+        <indexMin>1</indexMin>
+        <indexMax>7</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET" ref="DEFAULT_NON_MUTABLE_HEADSET_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_NON_MUTABLE_SPEAKER_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_NON_MUTABLE_EARPIECE_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_NON_MUTABLE_EXT_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_NON_MUTABLE_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>notification</name>
+        <indexMin>0</indexMin>
+        <indexMax>7</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET" ref="DEFAULT_DEVICE_CATEGORY_HEADSET_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_DEVICE_CATEGORY_SPEAKER_SYSTEM_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_DEVICE_CATEGORY_EARPIECE_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_DEVICE_CATEGORY_EXT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_DEVICE_CATEGORY_HEADSET_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>bluetooth_sco</name>
+        <indexMin>0</indexMin>
+        <indexMax>15</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET">
+            <point>0,-4200</point>
+            <point>33,-2800</point>
+            <point>66,-1400</point>
+            <point>100,0</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER">
+            <point>0,-2400</point>
+            <point>33,-1600</point>
+            <point>66,-800</point>
+            <point>100,0</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE">
+            <point>0,-4200</point>
+            <point>33,-2800</point>
+            <point>66,-1400</point>
+            <point>100,0</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>enforced_audible</name>
+        <indexMin>0</indexMin>
+        <indexMax>7</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET">
+            <point>1,-3000</point>
+            <point>33,-2600</point>
+            <point>66,-2200</point>
+            <point>100,-1800</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER">
+            <point>1,-3400</point>
+            <point>71,-2400</point>
+            <point>100,-2000</point>
+        </volume>
+        <!--volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_SYSTEM_VOLUME_CURVE"/-->
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_SYSTEM_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_DEVICE_CATEGORY_EXT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>dtmf</name>
+        <indexMin>0</indexMin>
+        <indexMax>15</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET">
+            <point>1,-3000</point>
+            <point>33,-2600</point>
+            <point>66,-2200</point>
+            <point>100,-1800</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER">
+            <point>1,-4000</point>
+            <point>71,-2400</point>
+            <point>100,-1400</point>
+        </volume>
+        <!--volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_SYSTEM_VOLUME_CURVE"/-->
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_SYSTEM_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_DEVICE_CATEGORY_EXT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>tts</name>
+        <indexMin>0</indexMin>
+        <indexMax>15</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET" ref="SILENT_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="FULL_SCALE_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="SILENT_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="SILENT_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="SILENT_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>accessibility</name>
+        <indexMin>1</indexMin>
+        <indexMax>15</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET" ref="DEFAULT_NON_MUTABLE_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_NON_MUTABLE_SPEAKER_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_NON_MUTABLE_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_NON_MUTABLE_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_NON_MUTABLE_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>assistant</name>
+        <indexMin>0</indexMin>
+        <indexMax>15</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_DEVICE_CATEGORY_SPEAKER_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID"  ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+</volumeGroups>
+
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 38476a4..0c4bfcb 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -265,9 +265,21 @@
         "liblog",
         "libutils",
         "libxml2",
-        "camera_platform_flags_c_lib",
     ],
 
+    target: {
+        android: {
+            shared_libs: [
+                "camera_platform_flags_c_lib",
+            ],
+        },
+        host: {
+            shared_libs: [
+                "camera_platform_flags_c_lib_for_test",
+            ],
+        },
+    },
+
     include_dirs: [
         "frameworks/av/camera/include",
         "frameworks/av/camera/include/camera",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index acc3b32..516e8f0 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -513,7 +513,7 @@
 void CameraService::onDeviceStatusChanged(const std::string& cameraId,
         CameraDeviceStatus newHalStatus) {
     ALOGI("%s: Status changed for cameraId=%s, newStatus=%d", __FUNCTION__,
-            cameraId.c_str(), newHalStatus);
+            cameraId.c_str(), eToI(newHalStatus));
 
     StatusInternal newStatus = mapToInternal(newHalStatus);
 
@@ -537,7 +537,8 @@
     StatusInternal oldStatus = state->getStatus();
 
     if (oldStatus == newStatus) {
-        ALOGE("%s: State transition to the same status %#x not allowed", __FUNCTION__, newStatus);
+        ALOGE("%s: State transition to the same status %#x not allowed", __FUNCTION__,
+                eToI(newStatus));
         return;
     }
 
@@ -580,7 +581,7 @@
         const std::string& physicalId,
         CameraDeviceStatus newHalStatus) {
     ALOGI("%s: Status changed for cameraId=%s, physicalCameraId=%s, newStatus=%d",
-            __FUNCTION__, id.c_str(), physicalId.c_str(), newHalStatus);
+            __FUNCTION__, id.c_str(), physicalId.c_str(), eToI(newHalStatus));
 
     StatusInternal newStatus = mapToInternal(newHalStatus);
 
@@ -596,7 +597,7 @@
     if (logicalCameraStatus != StatusInternal::PRESENT &&
             logicalCameraStatus != StatusInternal::NOT_AVAILABLE) {
         ALOGE("%s: Physical camera id %s status %d change for an invalid logical camera state %d",
-                __FUNCTION__, physicalId.c_str(), newHalStatus, logicalCameraStatus);
+                __FUNCTION__, physicalId.c_str(), eToI(newHalStatus), eToI(logicalCameraStatus));
         return;
     }
 
@@ -688,7 +689,7 @@
 void CameraService::onTorchStatusChangedLocked(const std::string& cameraId,
         TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
     ALOGI("%s: Torch status changed for cameraId=%s, newStatus=%d",
-            __FUNCTION__, cameraId.c_str(), newStatus);
+            __FUNCTION__, cameraId.c_str(), eToI(newStatus));
 
     TorchModeStatus status;
     status_t res = getTorchStatusLocked(cameraId, &status);
@@ -1536,7 +1537,7 @@
             s << ", " << std::to_string(i);
         }
     }
-    return std::move(s.str());
+    return s.str();
 }
 
 int32_t CameraService::mapToInterface(TorchModeStatus status) {
@@ -1552,7 +1553,7 @@
             serviceStatus = ICameraServiceListener::TORCH_STATUS_AVAILABLE_ON;
             break;
         default:
-            ALOGW("Unknown new flash status: %d", status);
+            ALOGW("Unknown new flash status: %d", eToI(status));
     }
     return serviceStatus;
 }
@@ -1570,7 +1571,7 @@
             serviceStatus = StatusInternal::ENUMERATING;
             break;
         default:
-            ALOGW("Unknown new HAL device status: %d", status);
+            ALOGW("Unknown new HAL device status: %d", eToI(status));
     }
     return serviceStatus;
 }
@@ -1594,7 +1595,7 @@
             serviceStatus = ICameraServiceListener::STATUS_UNKNOWN;
             break;
         default:
-            ALOGW("Unknown new internal device status: %d", status);
+            ALOGW("Unknown new internal device status: %d", eToI(status));
     }
     return serviceStatus;
 }
@@ -5171,7 +5172,7 @@
     }
     if (hasAny) ret << "\n";
     ret << "]\n";
-    return std::move(ret.str());
+    return ret.str();
 }
 
 CameraService::DescriptorPtr CameraService::CameraClientManager::makeClientDescriptor(
@@ -5777,7 +5778,7 @@
     }
 
     ALOGV("%s: Status has changed for camera ID %s from %#x to %#x", __FUNCTION__,
-            cameraId.c_str(), oldStatus, status);
+            cameraId.c_str(), eToI(oldStatus), eToI(status));
 
     if (oldStatus == StatusInternal::NOT_PRESENT &&
             (status != StatusInternal::PRESENT &&
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.cpp b/services/camera/libcameraservice/aidl/AidlUtils.cpp
index d23566c..1ec5072 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.cpp
+++ b/services/camera/libcameraservice/aidl/AidlUtils.cpp
@@ -26,6 +26,7 @@
 #include <device3/Camera3StreamInterface.h>
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
 #include <mediautils/AImageReaderUtils.h>
+#include "utils/Utils.h"
 
 namespace android::hardware::cameraservice::utils::conversion::aidl {
 
@@ -347,8 +348,8 @@
     // Ensure the vendor ID are the same before attempting
     // anything else. If vendor IDs differ we cannot safely copy the characteristics.
     if (from.getVendorId() != to->getVendorId()) {
-        ALOGE("%s: Incompatible CameraMetadata objects. Vendor IDs differ. From: %lu; To: %lu",
-              __FUNCTION__, from.getVendorId(), to->getVendorId());
+        ALOGE("%s: Incompatible CameraMetadata objects. Vendor IDs differ. From: %" PRIu64
+              "; To: %" PRIu64, __FUNCTION__, from.getVendorId(), to->getVendorId());
         return BAD_VALUE;
     }
 
@@ -364,7 +365,7 @@
     for (size_t i = 0; i < get_camera_metadata_entry_count(src); i++) {
         int ret = get_camera_metadata_ro_entry(src, i, &entry);
         if (ret != OK) {
-            ALOGE("%s: Could not fetch entry at index %lu. Error: %d", __FUNCTION__, i, ret);
+            ALOGE("%s: Could not fetch entry at index %zu. Error: %d", __FUNCTION__, i, ret);
             from.unlock(src);
             return BAD_VALUE;
         }
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 61577e4..0d89b3e 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -420,7 +420,7 @@
         result << "    none\n";
     }
 
-    std::string resultStr = std::move(result.str());
+    std::string resultStr = result.str();
 
     write(fd, resultStr.c_str(), resultStr.size());
 
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 086a4ce..89e268c 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -40,6 +40,7 @@
 #include "common/CameraDeviceBase.h"
 #include "utils/ExifUtils.h"
 #include "utils/SessionConfigurationUtils.h"
+#include "utils/Utils.h"
 #include "HeicEncoderInfoManager.h"
 #include "HeicCompositeStream.h"
 
@@ -1466,7 +1467,7 @@
     const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(CameraBlob));
     const CameraBlob *blob = (const CameraBlob*)(header);
     if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
-        ALOGE("%s: Invalid EXIF blobId %d", __FUNCTION__, blob->blobId);
+        ALOGE("%s: Invalid EXIF blobId %d", __FUNCTION__, eToI(blob->blobId));
         return 0;
     }
 
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 6416c11..2440c37 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -51,6 +51,7 @@
 
 #include "api2/HeicCompositeStream.h"
 #include "device3/ZoomRatioMapper.h"
+#include "utils/Utils.h"
 
 namespace android {
 
@@ -92,7 +93,7 @@
         case TorchModeStatus::AVAILABLE_ON:
             return "AVAILABLE_ON";
     }
-    ALOGW("Unexpected HAL torch mode status code %d", s);
+    ALOGW("Unexpected HAL torch mode status code %d", eToI(s));
     return "UNKNOWN_STATUS";
 }
 
@@ -105,7 +106,7 @@
         case CameraDeviceStatus::ENUMERATING:
             return "ENUMERATING";
     }
-    ALOGW("Unexpected HAL device status code %d", s);
+    ALOGW("Unexpected HAL device status code %d", eToI(s));
     return "UNKNOWN_STATUS";
 }
 
@@ -2279,7 +2280,7 @@
                         return tryToInitializeAidlProviderLocked(removedAidlProviderName,
                                 providerInfo);
                     default:
-                        ALOGE("%s Unsupported Transport %d", __FUNCTION__, providerTransport);
+                        ALOGE("%s Unsupported Transport %d", __FUNCTION__, eToI(providerTransport));
                 }
             }
         }
@@ -2366,7 +2367,7 @@
             }
             break;
         default:
-            ALOGE("%s Invalid transport %d", __FUNCTION__, transport);
+            ALOGE("%s Invalid transport %d", __FUNCTION__, eToI(transport));
             return BAD_VALUE;
     }
 
@@ -2712,7 +2713,7 @@
         }
         if (!known) {
             ALOGW("Camera provider %s says an unknown camera %s now has torch status %d. Curious.",
-                    mProviderName.c_str(), cameraDeviceName.c_str(), newStatus);
+                mProviderName.c_str(), cameraDeviceName.c_str(), eToI(newStatus));
             return;
         }
         // no lock needed since listener is set up only once during
diff --git a/services/camera/libcameraservice/common/HalConversionsTemplated.h b/services/camera/libcameraservice/common/HalConversionsTemplated.h
index 96a715c..c586062 100644
--- a/services/camera/libcameraservice/common/HalConversionsTemplated.h
+++ b/services/camera/libcameraservice/common/HalConversionsTemplated.h
@@ -19,6 +19,7 @@
 #include "common/CameraProviderManager.h"
 
 #include <device3/Camera3StreamInterface.h>
+#include <utils/Utils.h>
 
 namespace android {
 
@@ -48,7 +49,7 @@
         case HalCameraDeviceStatus::ENUMERATING:
             return CameraDeviceStatus::ENUMERATING;
     }
-    ALOGW("Unexpectedcamera device status code %d", s);
+    ALOGW("Unexpectedcamera device status code %d", eToI(s));
     return CameraDeviceStatus::NOT_PRESENT;
 }
 
@@ -74,7 +75,7 @@
         case HalTorchModeStatus::AVAILABLE_ON:
             return TorchModeStatus::AVAILABLE_ON;
     }
-    ALOGW("Unexpectedcamera torch mode status code %d", s);
+    ALOGW("Unexpectedcamera torch mode status code %d", eToI(s));
     return TorchModeStatus::NOT_AVAILABLE;
 }
 
@@ -88,7 +89,7 @@
         case HalCameraDeviceStatus::ENUMERATING:
             return "ENUMERATING";
     }
-    ALOGW("Unexpected HAL device status code %d", s);
+    ALOGW("Unexpected HAL device status code %d", eToI(s));
     return "UNKNOWN_STATUS";
 }
 
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index 1e546fb..40800d9 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -26,6 +26,7 @@
 #include "device3/ZoomRatioMapper.h"
 #include <utils/SessionConfigurationUtilsHidl.h>
 #include <utils/Trace.h>
+#include <utils/Utils.h>
 
 #include <android/hardware/camera/device/3.7/ICameraDevice.h>
 
@@ -69,7 +70,7 @@
         case Status::INTERNAL_ERROR:
             return INVALID_OPERATION;
     }
-    ALOGW("Unexpected HAL status code %d", s);
+    ALOGW("Unexpected HAL status code %d", eToI(s));
     return INVALID_OPERATION;
 }
 
@@ -111,7 +112,7 @@
         case Status::INTERNAL_ERROR:
             return "INTERNAL_ERROR";
     }
-    ALOGW("Unexpected HAL status code %d", s);
+    ALOGW("Unexpected HAL status code %d", eToI(s));
     return "UNKNOWN_ERROR";
 }
 
@@ -591,7 +592,7 @@
     }
     if (status != Status::OK) {
         ALOGE("%s: Unable to get camera characteristics for device %s: %s (%d)",
-                __FUNCTION__, id.c_str(), statusToString(status), status);
+                __FUNCTION__, id.c_str(), statusToString(status), eToI(status));
         return;
     }
 
@@ -770,7 +771,7 @@
             if (status != Status::OK) {
                 ALOGE("%s: Unable to get physical camera %s characteristics for device %s: %s (%d)",
                         __FUNCTION__, id.c_str(), mId.c_str(),
-                        statusToString(status), status);
+                        statusToString(status), eToI(status));
                 return;
             }
 
@@ -928,7 +929,7 @@
                 res = INVALID_OPERATION;
                 break;
             default:
-                ALOGE("%s: Session configuration query failed: %d", __FUNCTION__, callStatus);
+                ALOGE("%s: Session configuration query failed: %d", __FUNCTION__, eToI(callStatus));
                 res = UNKNOWN_ERROR;
         }
     } else {
@@ -1076,7 +1077,7 @@
                         break;
                     default:
                         ALOGE("%s: Session configuration query failed: %d", __FUNCTION__,
-                                  callStatus);
+                                eToI(callStatus));
                         res = UNKNOWN_ERROR;
                 }
             } else {
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
index c42e51a..65fee7d 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
@@ -485,7 +485,7 @@
                     streamId, bufferCount);
         }
     }
-    std::string linesStr = std::move(lines.str());
+    std::string linesStr = lines.str();
     write(fd, linesStr.c_str(), linesStr.size());
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index 22d2716..61c5a3b 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -104,7 +104,7 @@
     lines << fmt::sprintf("      Total buffers: %zu, currently dequeued: %zu, "
             "currently cached: %zu\n", mTotalBufferCount, mHandoutTotalBufferCount,
             mCachedOutputBufferCount);
-    std::string linesStr = std::move(lines.str());
+    std::string linesStr = lines.str();
     write(fd, linesStr.c_str(), linesStr.size());
 
     Camera3Stream::dump(fd, args);
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 9dacaf6..09299e6 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -56,6 +56,7 @@
 #include "device3/hidl/HidlCamera3OfflineSession.h"
 #include "utils/SessionConfigurationUtilsHidl.h"
 #include "utils/TraceHFR.h"
+#include "utils/Utils.h"
 
 #include "../../common/hidl/HidlProviderInfo.h"
 #include "HidlCamera3Device.h"
@@ -881,7 +882,7 @@
                     ret = true;
                     break;
                 default:
-                    ALOGV("%s: Reconfiguration query failed: %d", __FUNCTION__, callStatus);
+                    ALOGV("%s: Reconfiguration query failed: %d", __FUNCTION__, eToI(callStatus));
                     ret = true;
             }
         } else {
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-0 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..4c56959
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-1 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-1
new file mode 100644
index 0000000..fc0e371
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-1
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-10 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-10
new file mode 100644
index 0000000..1266b3e
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-10
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-11 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-11
new file mode 100644
index 0000000..cb1c0e4
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-11
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-12 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-12
new file mode 100644
index 0000000..ab820a4
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-12
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-13 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-13
new file mode 100644
index 0000000..6051e9a
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-13
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-14 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-14
new file mode 100644
index 0000000..596e55b
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-14
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-15 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-15
new file mode 100644
index 0000000..20d7dcb
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-15
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-2 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-2
new file mode 100644
index 0000000..5bbfa56
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-2
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-3 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-3
new file mode 100644
index 0000000..cd148f6
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-3
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-4 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-4
new file mode 100644
index 0000000..e4ddb50
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-4
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-5 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-5
new file mode 100644
index 0000000..3be3ce1
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-5
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-6 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-6
new file mode 100644
index 0000000..3b51e41
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-6
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-7 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-7
new file mode 100644
index 0000000..3b929df
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-7
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-8 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-8
new file mode 100644
index 0000000..f92337b
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-8
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-9 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-9
new file mode 100644
index 0000000..0fe0f06
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-9
Binary files differ
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 55e2c9d..d49aad6 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -44,13 +44,25 @@
         "libjpeg",
         "liblog",
         "libutils",
-        "camera_platform_flags_c_lib",
     ],
 
     static_libs: [
         "libgmock",
     ],
 
+    target: {
+        android: {
+            shared_libs: [
+                "camera_platform_flags_c_lib",
+            ],
+        },
+        host: {
+            shared_libs: [
+                "camera_platform_flags_c_lib_for_test",
+            ],
+        },
+    },
+
     cflags: [
         "-Wall",
         "-Wextra",
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index d5e3790..85bca6f 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -268,6 +268,12 @@
             const auto& gbps = config.getGraphicBufferProducers();
             int32_t width = 0, height = 0;
             if (gbps.size() > 0) {
+                if (gbps[0] == nullptr) {
+                    ALOGE("%s: Failed to query size due to abandoned surface.",
+                            __FUNCTION__);
+                    return CameraFeatureCombinationStats::CAMERA_FEATURE_UNKNOWN;
+                }
+
                 sp<Surface> surface = new Surface(gbps[0], /*useAsync*/false);
                 ANativeWindow *anw = surface.get();
 
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
index cfa1815..9986a84 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
@@ -23,6 +23,7 @@
 #include "device3/aidl/AidlCamera3Device.h"
 #include "device3/hidl/HidlCamera3Device.h"
 #include "device3/Camera3OutputStream.h"
+#include "utils/Utils.h"
 
 using android::camera3::OutputStreamInfo;
 using android::hardware::camera2::ICameraDeviceUser;
@@ -48,16 +49,16 @@
     hidl.streams.resize(aidl.streams.size());
     size_t i = 0;
     for (const auto &stream : aidl.streams) {
-        if (static_cast<int>(stream.dynamicRangeProfile) !=
+        if (eToI(stream.dynamicRangeProfile) !=
                 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
             ALOGE("%s Dynamic range profile %" PRId64 " not supported by HIDL", __FUNCTION__,
-                    stream.dynamicRangeProfile);
+                    eToI(stream.dynamicRangeProfile));
             return BAD_VALUE;
         }
 
-        if (static_cast<int>(stream.useCase) != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
+        if (eToI(stream.useCase) != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
             ALOGE("%s Stream use case %" PRId64 "not supported by HIDL", __FUNCTION__,
-                    stream.useCase);
+                    eToI(stream.useCase));
             return BAD_VALUE;
         }
 
diff --git a/services/camera/libcameraservice/utils/TagMonitor.cpp b/services/camera/libcameraservice/utils/TagMonitor.cpp
index 38de93a..5258c0e 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.cpp
+++ b/services/camera/libcameraservice/utils/TagMonitor.cpp
@@ -384,7 +384,7 @@
         }
         returnStr << "]\n";
     }
-    return std::move(returnStr.str());
+    return returnStr.str();
 }
 
 template<typename T>
diff --git a/services/camera/libcameraservice/utils/Utils.h b/services/camera/libcameraservice/utils/Utils.h
index 4e90871..0eb5e2c 100644
--- a/services/camera/libcameraservice/utils/Utils.h
+++ b/services/camera/libcameraservice/utils/Utils.h
@@ -19,10 +19,21 @@
 
 #include <sched.h>
 #include <unistd.h>
+#include <type_traits>
 
 namespace android {
 
 /**
+ * Magically convert an enum to its underlying integer type, mostly so they can be
+ * printed with printf-style formatters without warnings.
+ * Backport of C++23 std::to_underlying()
+ */
+template<typename Enum>
+constexpr std::underlying_type_t<Enum> eToI(Enum val) {
+    return static_cast<std::underlying_type_t<Enum>>(val);
+}
+
+/**
  * As of Android V, ro.board.api_level returns the year and month of release (ex. 202404)
  * instead of release SDK version. This function maps year/month format back to release
  * SDK version.
diff --git a/services/camera/virtualcamera/util/JpegUtil.h b/services/camera/virtualcamera/util/JpegUtil.h
index 184dd56..0a8df90 100644
--- a/services/camera/virtualcamera/util/JpegUtil.h
+++ b/services/camera/virtualcamera/util/JpegUtil.h
@@ -18,6 +18,7 @@
 #define ANDROID_COMPANION_VIRTUALCAMERA_JPEGUTIL_H
 
 #include <optional>
+#include <vector>
 
 #include "android/hardware_buffer.h"
 #include "util/Util.h"
diff --git a/services/medialog/Android.bp b/services/medialog/Android.bp
index fdb56e5..7a4c3ad 100644
--- a/services/medialog/Android.bp
+++ b/services/medialog/Android.bp
@@ -29,6 +29,8 @@
         "packagemanager_aidl-cpp",
     ],
 
+    export_include_dirs: ["."],
+
     cflags: [
         "-Wall",
         "-Werror",
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-0 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..802c2b5
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-1 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-1
new file mode 100644
index 0000000..9ee6a15
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-1
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-10 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-10
new file mode 100644
index 0000000..95006c8
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-10
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-11 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-11
new file mode 100644
index 0000000..853be96
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-11
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-12 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-12
new file mode 100644
index 0000000..c3e9848
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-12
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-13 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-13
new file mode 100644
index 0000000..08b7f0d
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-13
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-14 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-14
new file mode 100644
index 0000000..20e5e80
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-14
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-15 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-15
new file mode 100644
index 0000000..4e54f0b
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-15
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-2 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-2
new file mode 100644
index 0000000..2b2495d
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-2
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-3 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-3
new file mode 100644
index 0000000..753594d
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-3
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-4 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-4
new file mode 100644
index 0000000..0ed2010
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-4
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-5 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-5
new file mode 100644
index 0000000..f6141d1
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-5
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-6 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-6
new file mode 100644
index 0000000..b93f618
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-6
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-7 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-7
new file mode 100644
index 0000000..f8f296d
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-7
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-8 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-8
new file mode 100644
index 0000000..29bdbc1
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-8
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-9 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-9
new file mode 100644
index 0000000..315f25e
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-9
Binary files differ
diff --git a/services/mediametrics/fuzzer/mediametrics_aidl_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_aidl_fuzzer.cpp
index c7468c7..572e969 100644
--- a/services/mediametrics/fuzzer/mediametrics_aidl_fuzzer.cpp
+++ b/services/mediametrics/fuzzer/mediametrics_aidl_fuzzer.cpp
@@ -22,6 +22,7 @@
 using ::android::MediaMetricsService;
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    signal(SIGPIPE, SIG_IGN);
     auto service = sp<MediaMetricsService>::make();
     fuzzService(service, FuzzedDataProvider(data, size));
     return 0;
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
index a8a1de1..8b3711c 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.cpp
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -821,7 +821,7 @@
     metricsLog += getAppsPixelCount(mProcessPixelsMap);
     metricsLog += getAppsCodecUsageMetrics(mProcessConcurrentCodecsMap);
 
-    return std::move(metricsLog);
+    return metricsLog;
 }
 
 } // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 9c2fb7c..f12a5d6 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -310,6 +310,7 @@
     mServiceLog->add(log);
 
     std::scoped_lock lock{mLock};
+    ClientInfoParcel updatedClientInfo = clientInfo;
     if (!mProcessInfo->isPidUidTrusted(pid, uid)) {
         pid_t callingPid = IPCThreadState::self()->getCallingPid();
         uid_t callingUid = IPCThreadState::self()->getCallingUid();
@@ -317,6 +318,8 @@
                 __FUNCTION__, pid, uid, callingPid, callingUid);
         pid = callingPid;
         uid = callingUid;
+        updatedClientInfo.pid = callingPid;
+        updatedClientInfo.uid = callingUid;
     }
     ResourceInfos& infos = getResourceInfosForEdit(pid, mMap);
     ResourceInfo& info = getResourceInfoForEdit(clientInfo, client, infos);
@@ -342,7 +345,7 @@
     }
     if (info.deathNotifier == nullptr && client != nullptr) {
         info.deathNotifier = DeathNotifier::Create(
-            client, ref<ResourceManagerService>(), clientInfo);
+            client, ref<ResourceManagerService>(), updatedClientInfo);
     }
     if (mObserverService != nullptr && !resourceAdded.empty()) {
         mObserverService->onResourceAdded(uid, pid, resourceAdded);
diff --git a/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp b/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
index 679ab13..49f68e9 100644
--- a/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
+++ b/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
@@ -94,7 +94,7 @@
         str.append("\n");
     }
 
-    return std::move(str);
+    return str;
 }
 
 bool ResourceList::operator==(const ResourceList& rhs) const {
diff --git a/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp
index 6253df7..1cad482 100644
--- a/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp
+++ b/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp
@@ -26,6 +26,7 @@
 using ndk::SharedRefBase;
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+   signal(SIGPIPE, SIG_IGN);
    std::shared_ptr<ResourceManagerService> service = ResourceManagerService::Create();
    fuzzService(service->asBinder().get(), FuzzedDataProvider(data, size));
    return 0;
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index e3601a1..67b319f 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -84,6 +84,7 @@
     shared_libs: [
         "aaudio-aidl-cpp",
         "com.android.media.aaudio-aconfig-cc",
+        "com.android.media.aaudio-aconfig-cc",
         "framework-permission-aidl-cpp",
         "libaaudio_internal",
         "libaudioclient",
@@ -157,6 +158,8 @@
         "frameworks/av/media/libnbaio/include_mono",
     ],
 
+    export_include_dirs: ["."],
+
     tidy: true,
     tidy_checks: tidy_errors,
     tidy_checks_as_errors: tidy_errors,