Merge "Force slowJpegMode on certain camera1 apps" into udc-dev
diff --git a/media/audioaidlconversion/AidlConversionNdk.cpp b/media/audioaidlconversion/AidlConversionNdk.cpp
index 71c547c..9b14a5e 100644
--- a/media/audioaidlconversion/AidlConversionNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionNdk.cpp
@@ -79,39 +79,42 @@
 }  // namespace
 
 // buffer_provider_t is not supported thus skipped
-ConversionResult<buffer_config_t> aidl2legacy_AudioConfigBase_buffer_config_t(
-        const media::audio::common::AudioConfigBase& aidl, bool isInput) {
+ConversionResult<buffer_config_t> aidl2legacy_AudioConfig_buffer_config_t(
+        const media::audio::common::AudioConfig& aidl, bool isInput) {
     buffer_config_t legacy;
 
-    legacy.samplingRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
+    legacy.samplingRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.base.sampleRate));
     legacy.mask |= EFFECT_CONFIG_SMP_RATE;
 
     legacy.channels = VALUE_OR_RETURN(
-            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(aidl.channelMask, isInput));
+            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(aidl.base.channelMask, isInput));
     legacy.mask |= EFFECT_CONFIG_CHANNELS;
 
-    legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormatDescription_audio_format_t(aidl.format));
+    legacy.format =
+            VALUE_OR_RETURN(aidl2legacy_AudioFormatDescription_audio_format_t(aidl.base.format));
     legacy.mask |= EFFECT_CONFIG_FORMAT;
+    legacy.buffer.frameCount = aidl.frameCount;
 
     // TODO: add accessMode and mask
     return legacy;
 }
 
-ConversionResult<media::audio::common::AudioConfigBase>
-legacy2aidl_buffer_config_t_AudioConfigBase(const buffer_config_t& legacy, bool isInput) {
-    media::audio::common::AudioConfigBase aidl;
+ConversionResult<media::audio::common::AudioConfig>
+legacy2aidl_buffer_config_t_AudioConfig(const buffer_config_t& legacy, bool isInput) {
+    media::audio::common::AudioConfig aidl;
 
     if (legacy.mask & EFFECT_CONFIG_SMP_RATE) {
-        aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.samplingRate));
+        aidl.base.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.samplingRate));
     }
     if (legacy.mask & EFFECT_CONFIG_CHANNELS) {
-        aidl.channelMask = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+        aidl.base.channelMask = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
                 static_cast<audio_channel_mask_t>(legacy.channels), isInput));
     }
     if (legacy.mask & EFFECT_CONFIG_FORMAT) {
-        aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormatDescription(
+        aidl.base.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormatDescription(
                 static_cast<audio_format_t>(legacy.format)));
     }
+    aidl.frameCount = legacy.buffer.frameCount;
 
     // TODO: add accessMode and mask
     return aidl;
diff --git a/media/audioaidlconversion/include/media/AidlConversionNdk.h b/media/audioaidlconversion/include/media/AidlConversionNdk.h
index e92f1a9..813a728 100644
--- a/media/audioaidlconversion/include/media/AidlConversionNdk.h
+++ b/media/audioaidlconversion/include/media/AidlConversionNdk.h
@@ -34,9 +34,9 @@
 namespace aidl {
 namespace android {
 
-ConversionResult<buffer_config_t> aidl2legacy_AudioConfigBase_buffer_config_t(
-        const media::audio::common::AudioConfigBase& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioConfigBase> legacy2aidl_buffer_config_t_AudioConfigBase(
+ConversionResult<buffer_config_t> aidl2legacy_AudioConfig_buffer_config_t(
+        const media::audio::common::AudioConfig& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioConfig> legacy2aidl_buffer_config_t_AudioConfig(
         const buffer_config_t& legacy, bool isInput);
 
 ::android::status_t aidl2legacy_AudioAttributesTags(
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index 5d2856a..9c054f0 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -356,7 +356,7 @@
                 needsUpdate = true;
             }
         }
-        if (!found) {
+        if (!found || me.v.level > LEVEL_AVC_5) {
             // We set to the highest supported level.
             me.set().level = LEVEL_AVC_5;
         }
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index 9c26c02..56e6e8a 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -362,7 +362,7 @@
                 needsUpdate = true;
             }
         }
-        if (!found) {
+        if (!found || me.v.level > LEVEL_HEVC_MAIN_5_2) {
             // We set to the highest supported level.
             me.set().level = LEVEL_HEVC_MAIN_5_2;
         }
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 4955e13..eb1b4b5 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -2557,17 +2557,6 @@
 }
 
 void CCodec::initiateReleaseIfStuck() {
-    std::string name;
-    bool pendingDeadline = false;
-    {
-        Mutexed<NamedTimePoint>::Locked deadline(mDeadline);
-        if (deadline->get() < std::chrono::steady_clock::now()) {
-            name = deadline->getName();
-        }
-        if (deadline->get() != TimePoint::max()) {
-            pendingDeadline = true;
-        }
-    }
     bool tunneled = false;
     bool isMediaTypeKnown = false;
     {
@@ -2605,6 +2594,17 @@
         tunneled = config->mTunneled;
         isMediaTypeKnown = (kKnownMediaTypes.count(config->mCodingMediaType) != 0);
     }
+    std::string name;
+    bool pendingDeadline = false;
+    {
+        Mutexed<NamedTimePoint>::Locked deadline(mDeadline);
+        if (deadline->get() < std::chrono::steady_clock::now()) {
+            name = deadline->getName();
+        }
+        if (deadline->get() != TimePoint::max()) {
+            pendingDeadline = true;
+        }
+    }
     if (!tunneled && isMediaTypeKnown && name.empty()) {
         constexpr std::chrono::steady_clock::duration kWorkDurationThreshold = 3s;
         std::chrono::steady_clock::duration elapsed = mChannel->elapsed();
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 9386b9b..d727758 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1702,13 +1702,21 @@
 
 status_t AudioTrack::setOutputDevice(audio_port_handle_t deviceId) {
     AutoMutex lock(mLock);
-    ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
-            __func__, mPortId, deviceId, mSelectedDeviceId);
+    ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d mRoutedDeviceId %d",
+            __func__, mPortId, deviceId, mSelectedDeviceId, mRoutedDeviceId);
     if (mSelectedDeviceId != deviceId) {
         mSelectedDeviceId = deviceId;
-        if (mStatus == NO_ERROR) {
-            android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
-            mProxy->interrupt();
+        if (mStatus == NO_ERROR && mSelectedDeviceId != mRoutedDeviceId) {
+            if (isPlaying_l()) {
+                android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                mProxy->interrupt();
+            } else {
+                // if the track is idle, try to restore now and
+                // defer to next start if not possible
+                if (restoreTrack_l("setOutputDevice") != OK) {
+                    android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                }
+            }
         }
     }
     return NO_ERROR;
@@ -2185,7 +2193,6 @@
         // obtainBuffer() is called with mutex unlocked, so keep extra references to these fields to
         // keep them from going away if another thread re-creates the track during obtainBuffer()
         sp<AudioTrackClientProxy> proxy;
-        sp<IMemory> iMem;
 
         {   // start of lock scope
             AutoMutex lock(mLock);
@@ -2211,8 +2218,9 @@
             }
 
             // Keep the extra references
+            mProxyObtainBufferRef = mProxy;
             proxy = mProxy;
-            iMem = mCblkMemory;
+            mCblkMemoryObtainBufferRef = mCblkMemory;
 
             if (mState == STATE_STOPPING) {
                 status = -EINTR;
@@ -2260,6 +2268,8 @@
     buffer.mFrameCount = stepCount;
     buffer.mRaw = audioBuffer->raw;
 
+    sp<IMemory> tempMemory;
+    sp<AudioTrackClientProxy> tempProxy;
     AutoMutex lock(mLock);
     if (audioBuffer->sequence != mSequence) {
         // This Buffer came from a different IAudioTrack instance, so ignore the releaseBuffer
@@ -2269,7 +2279,12 @@
     }
     mReleased += stepCount;
     mInUnderrun = false;
-    mProxy->releaseBuffer(&buffer);
+    mProxyObtainBufferRef->releaseBuffer(&buffer);
+    // The extra reference of shared memory and proxy from `obtainBuffer` is not used after
+    // calling `releaseBuffer`. Move the extra reference to a temp strong pointer so that it
+    // will be cleared outside `releaseBuffer`.
+    tempMemory = std::move(mCblkMemoryObtainBufferRef);
+    tempProxy = std::move(mProxyObtainBufferRef);
 
     // restart track if it was disabled by audioflinger due to previous underrun
     restartIfDisabled();
diff --git a/media/libaudioclient/aidl/android/media/ISoundDose.aidl b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
index a4c37bc..0e2a5ab 100644
--- a/media/libaudioclient/aidl/android/media/ISoundDose.aidl
+++ b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
@@ -62,6 +62,11 @@
     float getOutputRs2UpperBound();
     /** Get the current CSD from audioserver. */
     float getCsd();
+    /**
+     * Returns true if the HAL supports the ISoundDose interface. Can be either
+     * as part of IModule or standalon sound dose HAL.
+     */
+    boolean isSoundDoseHalSupported();
     /** Enables/Disables MEL computations from framework. */
     oneway void forceUseFrameworkMel(boolean useFrameworkMel);
     /** Enables/Disables the computation of CSD on all devices. */
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 31f81be..8f712db 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1126,6 +1126,9 @@
 
             bool isPlaying() {
                 AutoMutex lock(mLock);
+                return isPlaying_l();
+            }
+            bool isPlaying_l() {
                 return mState == STATE_ACTIVE || mState == STATE_STOPPING;
             }
 
@@ -1262,6 +1265,11 @@
     audio_track_cblk_t*     mCblk;                  // re-load after mLock.unlock()
     audio_io_handle_t       mOutput = AUDIO_IO_HANDLE_NONE; // from AudioSystem::getOutputForAttr()
 
+    // A copy of shared memory and proxy between obtainBuffer and releaseBuffer to keep the
+    // shared memory valid when processing data.
+    sp<IMemory>               mCblkMemoryObtainBufferRef GUARDED_BY(mLock);
+    sp<AudioTrackClientProxy> mProxyObtainBufferRef GUARDED_BY(mLock);
+
     sp<AudioTrackThread>    mAudioTrackThread;
     bool                    mThreadCanCallJava;
 
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index f47dd0b..1dbcb86 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -76,3 +76,11 @@
 
     export_include_dirs: ["include"],
 }
+
+cc_library_headers {
+    name: "libaudiohalimpl_headers",
+
+    header_libs: ["libaudiohal_headers"],
+    export_header_lib_headers: ["libaudiohal_headers"],
+    export_include_dirs: ["impl"],
+}
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index ff817d4..15726ff 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -280,6 +280,7 @@
         "EffectsFactoryHalAidl.cpp",
         "EffectsFactoryHalEntry.cpp",
         "StreamHalAidl.cpp",
+        ":audio_effectproxy_src_files"
     ],
     static_libs: [
         "android.hardware.common-V2-ndk",
@@ -303,3 +304,8 @@
         "-DBACKEND_CPP_NDK",
     ],
 }
+
+filegroup {
+    name: "audio_effectproxy_src_files",
+    srcs: ["EffectProxy.cpp"],
+}
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
index 519b871..5ab7c84 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
@@ -29,6 +29,7 @@
 #include <utils/Log.h>
 
 #include "EffectConversionHelperAidl.h"
+#include "EffectProxy.h"
 
 namespace android {
 namespace effect {
@@ -37,7 +38,9 @@
 using ::aidl::android::hardware::audio::effect::CommandId;
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::Flags;
+using ::aidl::android::hardware::audio::effect::IEffect;
 using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::State;
 using ::aidl::android::media::audio::common::AudioDeviceDescription;
 using ::aidl::android::media::audio::common::AudioMode;
 using ::aidl::android::media::audio::common::AudioSource;
@@ -72,7 +75,9 @@
       mIoId(ioId),
       mDesc(desc),
       mEffect(std::move(effect)),
-      mIsInputStream(mDesc.common.flags.type == Flags::Type::PRE_PROC) {
+      mIsInputStream(mDesc.common.flags.type == Flags::Type::PRE_PROC),
+      mIsProxyEffect(mDesc.common.id.proxy.has_value() &&
+                     mDesc.common.id.proxy.value() == mDesc.common.id.uuid) {
     mCommon.session = sessionId;
     mCommon.ioHandle = ioId;
     mCommon.input = mCommon.output = kDefaultAudioConfig;
@@ -96,8 +101,8 @@
         return BAD_VALUE;
     }
 
-    return *(status_t*)pReplyData =
-                   statusTFromBinderStatus(mEffect->open(mCommon, std::nullopt, &mOpenReturn));
+    // Do nothing for EFFECT_CMD_INIT, call IEffect.open() with EFFECT_CMD_SET_CONFIG
+    return *(status_t*)pReplyData = OK;
 }
 
 status_t EffectConversionHelperAidl::handleSetParameter(uint32_t cmdSize, const void* pCmdData,
@@ -154,22 +159,55 @@
     }
 
     effect_config_t* config = (effect_config_t*)pCmdData;
-    Parameter::Common aidlCommon = {
-            .session = mSessionId,
-            .ioHandle = mIoId,
-            .input = {.base = VALUE_OR_RETURN_STATUS(
-                              ::aidl::android::legacy2aidl_buffer_config_t_AudioConfigBase(
-                                      config->inputCfg, mIsInputStream))},
-            .output = {.base = VALUE_OR_RETURN_STATUS(
-                               ::aidl::android::legacy2aidl_buffer_config_t_AudioConfigBase(
-                                       config->outputCfg, mIsInputStream))}};
+    Parameter::Common common = {
+            .input =
+                    VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_buffer_config_t_AudioConfig(
+                            config->inputCfg, mIsInputStream)),
+            .output =
+                    VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_buffer_config_t_AudioConfig(
+                            config->outputCfg, mIsInputStream)),
+            .session = mCommon.session,
+            .ioHandle = mCommon.ioHandle};
 
-    Parameter aidlParam = UNION_MAKE(Parameter, common, aidlCommon);
+    State state;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getState(&state)));
+    // in case of buffer/ioHandle re-configure for an opened effect, close it and re-open
+    if (state != State::INIT && mCommon != common) {
+        ALOGI("%s at state %s, closing effect", __func__,
+              android::internal::ToString(state).c_str());
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->close()));
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getState(&state)));
+        mStatusQ.reset();
+        mInputQ.reset();
+        mOutputQ.reset();
+    }
 
-    status_t ret = statusTFromBinderStatus(mEffect->setParameter(aidlParam));
-    EffectParamWriter writer(*(effect_param_t*)pReplyData);
-    writer.setStatus(ret);
-    return ret;
+    if (state == State::INIT) {
+        ALOGI("%s at state %s, opening effect", __func__,
+              android::internal::ToString(state).c_str());
+        IEffect::OpenEffectReturn openReturn;
+        RETURN_STATUS_IF_ERROR(
+                statusTFromBinderStatus(mEffect->open(common, std::nullopt, &openReturn)));
+
+        if (mIsProxyEffect) {
+            const auto& ret =
+                    std::static_pointer_cast<EffectProxy>(mEffect)->getEffectReturnParam();
+            mStatusQ = std::make_shared<StatusMQ>(ret->statusMQ);
+            mInputQ = std::make_shared<DataMQ>(ret->inputDataMQ);
+            mOutputQ = std::make_shared<DataMQ>(ret->outputDataMQ);
+        } else {
+            mStatusQ = std::make_shared<StatusMQ>(openReturn.statusMQ);
+            mInputQ = std::make_shared<DataMQ>(openReturn.inputDataMQ);
+            mOutputQ = std::make_shared<DataMQ>(openReturn.outputDataMQ);
+        }
+        mCommon = common;
+    } else if (mCommon != common) {
+        ALOGI("%s at state %s, setParameter", __func__, android::internal::ToString(state).c_str());
+        Parameter aidlParam = UNION_MAKE(Parameter, common, mCommon);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam)));
+    }
+
+    return *static_cast<int32_t*>(pReplyData) = OK;
 }
 
 status_t EffectConversionHelperAidl::handleGetConfig(uint32_t cmdSize __unused,
@@ -187,11 +225,9 @@
     const auto& common = param.get<Parameter::common>();
     effect_config_t* pConfig = (effect_config_t*)pReplyData;
     pConfig->inputCfg = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::aidl2legacy_AudioConfigBase_buffer_config_t(common.input.base, true));
-    pConfig->outputCfg =
-            VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_AudioConfigBase_buffer_config_t(
-                    common.output.base, false));
-    mCommon = common;
+            ::aidl::android::aidl2legacy_AudioConfig_buffer_config_t(common.input, true));
+    pConfig->outputCfg = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioConfig_buffer_config_t(common.output, false));
     return OK;
 }
 
@@ -294,7 +330,20 @@
               pReplyData);
         return BAD_VALUE;
     }
-    // TODO: handle this after effectproxy implemented in libaudiohal
+    effect_offload_param_t* offload = (effect_offload_param_t*)pCmdData;
+    // send to proxy to update active sub-effect
+    if (mIsProxyEffect) {
+        ALOGI("%s offload param offload %s ioHandle %d", __func__,
+              offload->isOffload ? "true" : "false", offload->ioHandle);
+        mCommon.ioHandle = offload->ioHandle;
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                std::static_pointer_cast<EffectProxy>(mEffect)->setOffloadParam(offload)));
+        // update FMQs
+        const auto& ret = std::static_pointer_cast<EffectProxy>(mEffect)->getEffectReturnParam();
+        mStatusQ = std::make_shared<StatusMQ>(ret->statusMQ);
+        mInputQ = std::make_shared<DataMQ>(ret->inputDataMQ);
+        mOutputQ = std::make_shared<DataMQ>(ret->outputDataMQ);
+    }
     return *static_cast<int32_t*>(pReplyData) = OK;
 }
 
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.h b/media/libaudiohal/impl/EffectConversionHelperAidl.h
index 54df1b8..1200264 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.h
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.h
@@ -19,6 +19,7 @@
 #include <utils/Errors.h>
 
 #include <aidl/android/hardware/audio/effect/BpEffect.h>
+#include <fmq/AidlMessageQueue.h>
 #include <system/audio_effect.h>
 #include <system/audio_effects/audio_effects_utils.h>
 
@@ -30,10 +31,15 @@
     status_t handleCommand(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData, uint32_t* replySize,
                            void* pReplyData);
     virtual ~EffectConversionHelperAidl() {}
-    const ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn&
-    getEffectReturnParam() const {
-        return mOpenReturn;
-    }
+
+    using StatusMQ = ::android::AidlMessageQueue<
+            ::aidl::android::hardware::audio::effect::IEffect::Status,
+            ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
+    using DataMQ = ::android::AidlMessageQueue<
+            float, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
+    std::shared_ptr<StatusMQ> getStatusMQ() { return mStatusQ; }
+    std::shared_ptr<DataMQ> getInputMQ() { return mInputQ; }
+    std::shared_ptr<DataMQ> getOutputMQ() { return mOutputQ; }
 
   protected:
     const int32_t mSessionId;
@@ -42,7 +48,6 @@
     const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> mEffect;
     // whether the effect is instantiated on an input stream
     const bool mIsInputStream;
-    ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn mOpenReturn;
     ::aidl::android::hardware::audio::effect::Parameter::Common mCommon;
 
     EffectConversionHelperAidl(
@@ -59,6 +64,7 @@
     const aidl::android::media::audio::common::AudioFormatDescription kDefaultFormatDescription = {
             .type = aidl::android::media::audio::common::AudioFormatType::PCM,
             .pcm = aidl::android::media::audio::common::PcmType::FLOAT_32_BIT};
+    const bool mIsProxyEffect;
 
     static constexpr int kDefaultframeCount = 0x100;
 
@@ -75,6 +81,9 @@
                                                                    uint32_t* /* replySize */,
                                                                    void* /* pReplyData */);
     static const std::map<uint32_t /* effect_command_e */, CommandHandler> mCommandHandlerMap;
+    // data and status FMQ
+    std::shared_ptr<StatusMQ> mStatusQ = nullptr;
+    std::shared_ptr<DataMQ> mInputQ = nullptr, mOutputQ = nullptr;
 
     status_t handleInit(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
                         void* pReplyData);
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index 0c19ac8..d6135af 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -31,6 +31,7 @@
 #include <utils/Log.h>
 
 #include "EffectHalAidl.h"
+#include "EffectProxy.h"
 
 #include <aidl/android/hardware/audio/effect/IEffect.h>
 
@@ -61,19 +62,22 @@
 
 EffectHalAidl::EffectHalAidl(const std::shared_ptr<IFactory>& factory,
                              const std::shared_ptr<IEffect>& effect, uint64_t effectId,
-                             int32_t sessionId, int32_t ioId, const Descriptor& desc)
+                             int32_t sessionId, int32_t ioId, const Descriptor& desc,
+                             bool isProxyEffect)
     : mFactory(factory),
       mEffect(effect),
       mEffectId(effectId),
       mSessionId(sessionId),
       mIoId(ioId),
-      mDesc(desc) {
+      mDesc(desc),
+      mIsProxyEffect(isProxyEffect) {
     createAidlConversion(effect, sessionId, ioId, desc);
 }
 
 EffectHalAidl::~EffectHalAidl() {
-    if (mFactory) {
-        mFactory->destroyEffect(mEffect);
+    if (mEffect) {
+        mIsProxyEffect ? std::static_pointer_cast<EffectProxy>(mEffect)->destroy()
+                       : mFactory->destroyEffect(mEffect);
     }
 }
 
@@ -160,34 +164,49 @@
 
 // write to input FMQ here, wait for statusMQ STATUS_OK, and read from output FMQ
 status_t EffectHalAidl::process() {
-    size_t available = mInputQ->availableToWrite();
+    auto statusQ = mConversion->getStatusMQ();
+    auto inputQ = mConversion->getInputMQ();
+    auto outputQ = mConversion->getOutputMQ();
+    if (!statusQ || !statusQ->isValid() || !inputQ || !inputQ->isValid() || !outputQ ||
+        !outputQ->isValid()) {
+        ALOGE("%s invalid FMQ [Status %d I %d O %d]", __func__, statusQ ? statusQ->isValid() : 0,
+              inputQ ? inputQ->isValid() : 0, outputQ ? outputQ->isValid() : 0);
+        return INVALID_OPERATION;
+    }
+
+    size_t available = inputQ->availableToWrite();
     size_t floatsToWrite = std::min(available, mInBuffer->getSize() / sizeof(float));
     if (floatsToWrite == 0) {
-        ALOGW("%s not able to write, floats in buffer %zu, space in FMQ %zu", __func__,
+        ALOGE("%s not able to write, floats in buffer %zu, space in FMQ %zu", __func__,
               mInBuffer->getSize() / sizeof(float), available);
         return INVALID_OPERATION;
     }
-    if (!mInputQ->write((float*)mInBuffer->ptr(), floatsToWrite)) {
-        ALOGW("%s failed to write %zu into inputQ", __func__, floatsToWrite);
+    if (!mInBuffer->audioBuffer() ||
+        !inputQ->write((float*)mInBuffer->audioBuffer()->f32, floatsToWrite)) {
+        ALOGE("%s failed to write %zu floats from audiobuffer %p to inputQ [avail %zu]", __func__,
+              floatsToWrite, mInBuffer->audioBuffer(), inputQ->availableToWrite());
         return INVALID_OPERATION;
     }
 
     IEffect::Status retStatus{};
-    if (!mStatusQ->readBlocking(&retStatus, 1) || retStatus.status != OK ||
+    if (!statusQ->readBlocking(&retStatus, 1) || retStatus.status != OK ||
         (size_t)retStatus.fmqConsumed != floatsToWrite || retStatus.fmqProduced == 0) {
-        ALOGW("%s read status failed: %s", __func__, retStatus.toString().c_str());
+        ALOGE("%s read status failed: %s", __func__, retStatus.toString().c_str());
         return INVALID_OPERATION;
     }
 
-    available = mOutputQ->availableToRead();
+    available = outputQ->availableToRead();
     size_t floatsToRead = std::min(available, mOutBuffer->getSize() / sizeof(float));
     if (floatsToRead == 0) {
-        ALOGW("%s not able to read, buffer space %zu, floats in FMQ %zu", __func__,
+        ALOGE("%s not able to read, buffer space %zu, floats in FMQ %zu", __func__,
               mOutBuffer->getSize() / sizeof(float), available);
         return INVALID_OPERATION;
     }
-    if (!mOutputQ->read((float*)mOutBuffer->ptr(), floatsToRead)) {
-        ALOGW("%s failed to read %zu from outputQ", __func__, floatsToRead);
+    // always read floating point data for AIDL
+    if (!mOutBuffer->audioBuffer() ||
+        !outputQ->read(mOutBuffer->audioBuffer()->f32, floatsToRead)) {
+        ALOGE("%s failed to read %zu from outputQ to audioBuffer %p", __func__, floatsToRead,
+              mOutBuffer->audioBuffer());
         return INVALID_OPERATION;
     }
 
@@ -210,20 +229,7 @@
         return INVALID_OPERATION;
     }
 
-    status_t ret = mConversion->handleCommand(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
-    // update FMQs when effect open successfully
-    if (ret == OK && cmdCode == EFFECT_CMD_INIT) {
-        const auto& retParam = mConversion->getEffectReturnParam();
-        mStatusQ = std::make_unique<StatusMQ>(retParam.statusMQ);
-        mInputQ = std::make_unique<DataMQ>(retParam.inputDataMQ);
-        mOutputQ = std::make_unique<DataMQ>(retParam.outputDataMQ);
-        if (!mStatusQ->isValid() || !mInputQ->isValid() || !mOutputQ->isValid()) {
-            ALOGE("%s return with invalid FMQ", __func__);
-            return NO_INIT;
-        }
-    }
-
-    return ret;
+    return mConversion->handleCommand(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
 }
 
 status_t EffectHalAidl::getDescriptor(effect_descriptor_t* pDescriptor) {
diff --git a/media/libaudiohal/impl/EffectHalAidl.h b/media/libaudiohal/impl/EffectHalAidl.h
index 194150d..8966363 100644
--- a/media/libaudiohal/impl/EffectHalAidl.h
+++ b/media/libaudiohal/impl/EffectHalAidl.h
@@ -31,11 +31,6 @@
 
 class EffectHalAidl : public EffectHalInterface {
   public:
-    using StatusMQ = ::android::AidlMessageQueue<
-            ::aidl::android::hardware::audio::effect::IEffect::Status,
-            ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
-    using DataMQ = ::android::AidlMessageQueue<
-            float, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
 
     // Set the input buffer.
     status_t setInBuffer(const sp<EffectBufferHalInterface>& buffer) override;
@@ -83,12 +78,11 @@
     const int32_t mSessionId;
     const int32_t mIoId;
     const ::aidl::android::hardware::audio::effect::Descriptor mDesc;
+    const bool mIsProxyEffect;
+
     std::unique_ptr<EffectConversionHelperAidl> mConversion;
-    std::unique_ptr<StatusMQ> mStatusQ;
-    std::unique_ptr<DataMQ> mInputQ, mOutputQ;
 
     sp<EffectBufferHalInterface> mInBuffer, mOutBuffer;
-    effect_config_t mConfig;
 
     status_t createAidlConversion(
             std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
@@ -99,8 +93,10 @@
             const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory>& factory,
             const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& effect,
             uint64_t effectId, int32_t sessionId, int32_t ioId,
-            const ::aidl::android::hardware::audio::effect::Descriptor& desc);
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc,
+            bool isProxyEffect);
     bool setEffectReverse(bool reverse);
+    bool needUpdateReturnParam(uint32_t cmdCode);
 
     // The destructor automatically releases the effect.
     virtual ~EffectHalAidl();
diff --git a/media/libaudiohal/impl/EffectProxy.cpp b/media/libaudiohal/impl/EffectProxy.cpp
new file mode 100644
index 0000000..c4d85e5
--- /dev/null
+++ b/media/libaudiohal/impl/EffectProxy.cpp
@@ -0,0 +1,292 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <memory>
+#define LOG_TAG "EffectProxy"
+//#define LOG_NDEBUG 0
+
+#include <fmq/AidlMessageQueue.h>
+#include <utils/Log.h>
+
+#include "EffectProxy.h"
+
+using ::aidl::android::hardware::audio::effect::CommandId;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::Flags;
+using ::aidl::android::hardware::audio::effect::IEffect;
+using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::State;
+using ::aidl::android::media::audio::common::AudioUuid;
+
+namespace android {
+namespace effect {
+
+EffectProxy::EffectProxy(const Descriptor::Identity& id, const std::shared_ptr<IFactory>& factory)
+    : mIdentity([](const Descriptor::Identity& subId) {
+          // update EffectProxy implementation UUID to the sub-effect proxy UUID
+          ALOG_ASSERT(subId.proxy.has_value(), "Sub-effect Identity must have valid proxy UUID");
+          Descriptor::Identity tempId = subId;
+          tempId.uuid = subId.proxy.value();
+          return tempId;
+      }(id)),
+      mFactory(factory) {}
+
+EffectProxy::~EffectProxy() {
+    close();
+    destroy();
+    mSubEffects.clear();
+}
+
+// sub effect must have same proxy UUID as EffectProxy, and the type UUID must match.
+ndk::ScopedAStatus EffectProxy::addSubEffect(const Descriptor& sub) {
+    ALOGV("%s: %s", __func__, mIdentity.type.toString().c_str());
+    if (0 != mSubEffects.count(sub.common.id) || !sub.common.id.proxy.has_value() ||
+        sub.common.id.proxy.value() != mIdentity.uuid) {
+        ALOGE("%s sub effect already exist or mismatch %s", __func__, sub.toString().c_str());
+        return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                "illegalSubEffect");
+    }
+
+    // not create sub-effect yet
+    std::get<SubEffectTupleIndex::HANDLE>(mSubEffects[sub.common.id]) = nullptr;
+    std::get<SubEffectTupleIndex::DESCRIPTOR>(mSubEffects[sub.common.id]) = sub;
+    // set the last added sub-effect to active before setOffloadParam()
+    mActiveSub = sub.common.id;
+    ALOGI("%s add %s to proxy %s flag %s", __func__, mActiveSub.toString().c_str(),
+          mIdentity.toString().c_str(), sub.common.flags.toString().c_str());
+
+    if (sub.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL) {
+        mSubFlags.hwAcceleratorMode = Flags::HardwareAccelerator::TUNNEL;
+    }
+
+    // initial flag values before we know which sub-effect to active (with setOffloadParam)
+    // same as HIDL EffectProxy flags
+    mSubFlags.type = Flags::Type::INSERT;
+    mSubFlags.insert = Flags::Insert::LAST;
+    mSubFlags.volume = Flags::Volume::CTRL;
+
+    // set indication if any sub-effect indication was set
+    mSubFlags.offloadIndication |= sub.common.flags.offloadIndication;
+    mSubFlags.deviceIndication |= sub.common.flags.deviceIndication;
+    mSubFlags.audioModeIndication |= sub.common.flags.audioModeIndication;
+    mSubFlags.audioSourceIndication |= sub.common.flags.audioSourceIndication;
+
+    // set bypass when all sub-effects are bypassing
+    mSubFlags.bypass &= sub.common.flags.bypass;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectProxy::create() {
+    ALOGV("%s: %s", __func__, mIdentity.type.toString().c_str());
+    ndk::ScopedAStatus status = ndk::ScopedAStatus::ok();
+
+    for (auto& sub : mSubEffects) {
+        auto& effectHandle = std::get<SubEffectTupleIndex::HANDLE>(sub.second);
+        ALOGI("%s sub-effect %s", __func__, sub.first.uuid.toString().c_str());
+        status = mFactory->createEffect(sub.first.uuid, &effectHandle);
+        if (!status.isOk() || !effectHandle) {
+            ALOGE("%s sub-effect failed %s", __func__, sub.first.uuid.toString().c_str());
+            break;
+        }
+    }
+
+    // destroy all created effects if failure
+    if (!status.isOk()) {
+        destroy();
+    }
+    return status;
+}
+
+ndk::ScopedAStatus EffectProxy::destroy() {
+    ALOGV("%s: %s", __func__, mIdentity.type.toString().c_str());
+    return runWithAllSubEffects([&](std::shared_ptr<IEffect>& effect) {
+        ndk::ScopedAStatus status = mFactory->destroyEffect(effect);
+        if (status.isOk()) {
+            effect.reset();
+        }
+        return status;
+    });
+}
+
+const IEffect::OpenEffectReturn* EffectProxy::getEffectReturnParam() {
+    return &std::get<SubEffectTupleIndex::RETURN>(mSubEffects[mActiveSub]);
+}
+
+ndk::ScopedAStatus EffectProxy::setOffloadParam(const effect_offload_param_t* offload) {
+    const auto& itor = std::find_if(mSubEffects.begin(), mSubEffects.end(), [&](const auto& sub) {
+        const auto& desc = std::get<SubEffectTupleIndex::DESCRIPTOR>(sub.second);
+        ALOGI("%s: isOffload %d sub-effect: %s, flags %s", __func__, offload->isOffload,
+              desc.common.id.uuid.toString().c_str(), desc.common.flags.toString().c_str());
+        return offload->isOffload ==
+               (desc.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL);
+    });
+    if (itor == mSubEffects.end()) {
+        ALOGE("%s no %soffload sub-effect found", __func__, offload->isOffload ? "" : "non-");
+        return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_NULL_POINTER,
+                                                                "noActiveEffctFound");
+    }
+
+    mActiveSub = itor->first;
+    ALOGI("%s: active %soffload sub-effect: %s, flags %s", __func__,
+          offload->isOffload ? "" : "non-", mActiveSub.uuid.toString().c_str(),
+          std::get<SubEffectTupleIndex::DESCRIPTOR>(itor->second).common.flags.toString().c_str());
+    return ndk::ScopedAStatus::ok();
+}
+
+// EffectProxy go over sub-effects and call IEffect interfaces
+ndk::ScopedAStatus EffectProxy::open(const Parameter::Common& common,
+                                     const std::optional<Parameter::Specific>& specific,
+                                     IEffect::OpenEffectReturn* ret __unused) {
+    ALOGV("%s: %s", __func__, mIdentity.type.toString().c_str());
+    ndk::ScopedAStatus status = ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+            EX_ILLEGAL_ARGUMENT, "nullEffectHandle");
+    for (auto& sub : mSubEffects) {
+        auto& effect = std::get<SubEffectTupleIndex::HANDLE>(sub.second);
+        auto& openRet = std::get<SubEffectTupleIndex::RETURN>(sub.second);
+        if (!effect ||
+            (status = effect->open(common, specific, &openRet)).isOk()) {
+            ALOGE("%s: failed to open UUID %s", __func__, sub.first.uuid.toString().c_str());
+            break;
+        }
+    }
+
+    // close all opened effects if failure
+    if (!status.isOk()) {
+        close();
+    }
+
+    return status;
+}
+
+ndk::ScopedAStatus EffectProxy::close() {
+    ALOGV("%s: %s", __func__, mIdentity.type.toString().c_str());
+    return runWithAllSubEffects([&](std::shared_ptr<IEffect>& effect) {
+        return effect->close();
+    });
+}
+
+ndk::ScopedAStatus EffectProxy::getDescriptor(Descriptor* desc) {
+    if (!desc) {
+        ALOGE("%s: nuull descriptor pointer", __func__);
+        return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_NULL_POINTER, "nullptr");
+    }
+
+    auto& activeSubEffect = std::get<SubEffectTupleIndex::HANDLE>(mSubEffects[mActiveSub]);
+    // return initial descriptor if no active sub-effect exist
+    if (!activeSubEffect) {
+        desc->common.id = mIdentity;
+        desc->common.flags = mSubFlags;
+        desc->common.name = "Proxy";
+        desc->common.implementor = "AOSP";
+    } else {
+        *desc = std::get<SubEffectTupleIndex::DESCRIPTOR>(mSubEffects[mActiveSub]);
+        desc->common.id = mIdentity;
+    }
+
+    ALOGI("%s with %s", __func__, desc->toString().c_str());
+    return ndk::ScopedAStatus::ok();
+}
+
+// Handle with active sub-effect first, only send to other sub-effects when success
+ndk::ScopedAStatus EffectProxy::command(CommandId id) {
+    ALOGV("%s: %s, command %s", __func__, mIdentity.type.toString().c_str(),
+          android::internal::ToString(id).c_str());
+    return runWithActiveSubEffectThenOthers(
+            [&](const std::shared_ptr<IEffect>& effect) -> ndk::ScopedAStatus {
+                return effect->command(id);
+            });
+}
+
+// Return the active sub-effect state
+ndk::ScopedAStatus EffectProxy::getState(State* state) {
+    return runWithActiveSubEffect(
+            [&](const std::shared_ptr<IEffect>& effect) -> ndk::ScopedAStatus {
+                return effect->getState(state);
+            });
+}
+
+// Handle with active sub-effect first, only send to other sub-effects when success
+ndk::ScopedAStatus EffectProxy::setParameter(const Parameter& param) {
+    return runWithActiveSubEffectThenOthers(
+            [&](const std::shared_ptr<IEffect>& effect) -> ndk::ScopedAStatus {
+                return effect->setParameter(param);
+            });
+}
+
+// Return the active sub-effect parameter
+ndk::ScopedAStatus EffectProxy::getParameter(const Parameter::Id& id, Parameter* param) {
+    return runWithActiveSubEffect(
+            [&](const std::shared_ptr<IEffect>& effect) -> ndk::ScopedAStatus {
+                return effect->getParameter(id, param);
+            });
+}
+
+ndk::ScopedAStatus EffectProxy::runWithActiveSubEffectThenOthers(
+        std::function<ndk::ScopedAStatus(const std::shared_ptr<IEffect>&)> const& func) {
+    ndk::ScopedAStatus status = runWithActiveSubEffect(func);
+    if (!status.isOk()) {
+        return status;
+    }
+
+    // proceed with others if active sub-effect success
+    for (const auto& sub : mSubEffects) {
+        auto& effect = std::get<SubEffectTupleIndex::HANDLE>(sub.second);
+        if (sub.first != mActiveSub) {
+            if (!effect) {
+                ALOGE("%s null sub-effect interface for %s", __func__,
+                      sub.first.toString().c_str());
+                continue;
+            }
+            func(effect);
+        }
+    }
+    return status;
+}
+
+ndk::ScopedAStatus EffectProxy::runWithActiveSubEffect(
+        std::function<ndk::ScopedAStatus(const std::shared_ptr<IEffect>&)> const& func) {
+    auto& effect = std::get<SubEffectTupleIndex::HANDLE>(mSubEffects[mActiveSub]);
+    if (!effect) {
+        ALOGE("%s null active sub-effect interface, active %s", __func__,
+              mActiveSub.toString().c_str());
+        return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_NULL_POINTER,
+                                                                "activeSubEffectNull");
+    }
+    return func(effect);
+}
+
+ndk::ScopedAStatus EffectProxy::runWithAllSubEffects(
+        std::function<ndk::ScopedAStatus(std::shared_ptr<IEffect>&)> const& func) {
+    ndk::ScopedAStatus status = ndk::ScopedAStatus::ok();
+    // proceed with others if active sub-effect success
+    for (auto& sub : mSubEffects) {
+        auto& effect = std::get<SubEffectTupleIndex::HANDLE>(sub.second);
+        if (!effect) {
+            ALOGW("%s null sub-effect interface for %s", __func__, sub.first.toString().c_str());
+            continue;
+        }
+        ndk::ScopedAStatus temp = func(effect);
+        if (!temp.isOk()) {
+            status = ndk::ScopedAStatus::fromStatus(temp.getStatus());
+        }
+    }
+    return status;
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectProxy.h b/media/libaudiohal/impl/EffectProxy.h
new file mode 100644
index 0000000..ffb8a19
--- /dev/null
+++ b/media/libaudiohal/impl/EffectProxy.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <map>
+#include <memory>
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+#include <aidl/android/hardware/audio/effect/BnFactory.h>
+#include <fmq/AidlMessageQueue.h>
+#include <system/audio_effect.h>
+
+namespace android {
+namespace effect {
+
+/**
+ * EffectProxy is the proxy for one or more effect AIDL implementations (sub effect) of same type.
+ * The audio framework use EffectProxy as a composite implementation of all sub effect
+ * implementations.
+ *
+ * At any given time, there is only one active effect which consuming and producing data for each
+ * proxy. All setter commands (except the legacy EFFECT_CMD_OFFLOAD, it will be handled by the audio
+ * framework directly) and parameters will be pass through to all sub effects, the getter commands
+ * and parameters will only passthrough to the active sub-effect.
+ *
+ */
+class EffectProxy final : public ::aidl::android::hardware::audio::effect::BnEffect {
+  public:
+    EffectProxy(const ::aidl::android::hardware::audio::effect::Descriptor::Identity& id,
+                const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory>& factory);
+
+    /**
+     * Add a sub effect into the proxy, the descriptor of candidate sub-effect need to have same
+     * proxy UUID as mUuid.
+     */
+    ndk::ScopedAStatus addSubEffect(
+            const ::aidl::android::hardware::audio::effect::Descriptor& sub);
+
+    /**
+     * Create all sub-effects via AIDL IFactory, always call create() after all sub-effects added
+     * successfully with addSubEffect.
+     */
+    ndk::ScopedAStatus create();
+
+    /**
+     * Destroy all sub-effects via AIDL IFactory, always call create() after all sub-effects added
+     * successfully with addSubEffect.
+     */
+    ndk::ScopedAStatus destroy();
+
+    /**
+     * Handle offload parameter setting from framework.
+     */
+    ndk::ScopedAStatus setOffloadParam(const effect_offload_param_t* offload);
+
+    /**
+     * Get the const reference of the active sub-effect return parameters.
+     * Always use this interface to get the effect open return parameters (FMQs) after a success
+     * setOffloadParam() call.
+     */
+    const IEffect::OpenEffectReturn* getEffectReturnParam();
+
+    // IEffect interfaces override
+    ndk::ScopedAStatus open(
+            const ::aidl::android::hardware::audio::effect::Parameter::Common& common,
+            const std::optional<::aidl::android::hardware::audio::effect::Parameter::Specific>&
+                    specific,
+            ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn* ret) override;
+    ndk::ScopedAStatus close() override;
+    ndk::ScopedAStatus getDescriptor(
+            ::aidl::android::hardware::audio::effect::Descriptor* desc) override;
+    ndk::ScopedAStatus command(::aidl::android::hardware::audio::effect::CommandId id) override;
+    ndk::ScopedAStatus getState(::aidl::android::hardware::audio::effect::State* state) override;
+    ndk::ScopedAStatus setParameter(
+            const ::aidl::android::hardware::audio::effect::Parameter& param) override;
+    ndk::ScopedAStatus getParameter(
+            const ::aidl::android::hardware::audio::effect::Parameter::Id& id,
+            ::aidl::android::hardware::audio::effect::Parameter* param) override;
+
+  private:
+    // Proxy identity, copy from one sub-effect, and update the implementation UUID to proxy UUID
+    const ::aidl::android::hardware::audio::effect::Descriptor::Identity mIdentity;
+    const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory> mFactory;
+
+    // A map of sub effects descriptor to the IEffect handle and return FMQ
+    enum SubEffectTupleIndex { HANDLE, DESCRIPTOR, RETURN };
+    using EffectProxySub =
+            std::tuple<std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>,
+                       ::aidl::android::hardware::audio::effect::Descriptor,
+                       ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn>;
+    std::map<const ::aidl::android::hardware::audio::effect::Descriptor::Identity, EffectProxySub>
+            mSubEffects;
+
+    // Descriptor of the only active effect in the mSubEffects map
+    ::aidl::android::hardware::audio::effect::Descriptor::Identity mActiveSub;
+
+    // keep the flag of sub-effects
+    ::aidl::android::hardware::audio::effect::Flags mSubFlags;
+
+    ndk::ScopedAStatus runWithActiveSubEffectThenOthers(
+            std::function<ndk::ScopedAStatus(
+                    const std::shared_ptr<
+                            ::aidl::android::hardware::audio::effect::IEffect>&)> const& func);
+
+    ndk::ScopedAStatus runWithActiveSubEffect(
+            std::function<ndk::ScopedAStatus(const std::shared_ptr<IEffect>&)> const& func);
+
+    ndk::ScopedAStatus runWithAllSubEffects(
+            std::function<ndk::ScopedAStatus(std::shared_ptr<IEffect>&)> const& func);
+
+    // close and release all sub-effects
+    ~EffectProxy();
+};
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index f289f24..bc05aa0 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -15,7 +15,9 @@
  */
 
 #include <algorithm>
+#include <cstddef>
 #include <cstdint>
+#include <iterator>
 #include <memory>
 #define LOG_TAG "EffectsFactoryHalAidl"
 //#define LOG_NDEBUG 0
@@ -29,10 +31,12 @@
 
 #include "EffectBufferHalAidl.h"
 #include "EffectHalAidl.h"
+#include "EffectProxy.h"
 #include "EffectsFactoryHalAidl.h"
 
 using ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid;
 using aidl::android::aidl_utils::statusTFromBinderStatus;
+using aidl::android::hardware::audio::effect::Descriptor;
 using aidl::android::hardware::audio::effect::IFactory;
 using aidl::android::media::audio::common::AudioUuid;
 using android::detail::AudioHalVersionInfo;
@@ -42,12 +46,56 @@
 
 EffectsFactoryHalAidl::EffectsFactoryHalAidl(std::shared_ptr<IFactory> effectsFactory)
     : mFactory(effectsFactory),
-      mHalVersion(AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, [this]() {
-          int32_t majorVersion = 0;
-          return (mFactory && mFactory->getInterfaceVersion(&majorVersion).isOk()) ? majorVersion
-                                                                                   : 0;
-      }())) {
-    ALOG_ASSERT(effectsFactory != nullptr, "Provided IEffectsFactory service is NULL");
+      mHalVersion(AudioHalVersionInfo(
+              AudioHalVersionInfo::Type::AIDL,
+              [this]() {
+                  int32_t majorVersion = 0;
+                  return (mFactory && mFactory->getInterfaceVersion(&majorVersion).isOk())
+                                 ? majorVersion
+                                 : 0;
+              }())),
+      mHalDescList([this]() {
+          std::vector<Descriptor> list;
+          if (mFactory) {
+              mFactory->queryEffects(std::nullopt, std::nullopt, std::nullopt, &list).isOk();
+          }
+          return list;
+      }()),
+      mUuidProxyMap([this]() {
+          std::map<AudioUuid, std::shared_ptr<EffectProxy>> proxyMap;
+          for (const auto& desc : mHalDescList) {
+              // create EffectProxy
+              if (desc.common.id.proxy.has_value()) {
+                  const auto& uuid = desc.common.id.proxy.value();
+                  if (0 == proxyMap.count(uuid)) {
+                      proxyMap.insert({uuid, ndk::SharedRefBase::make<EffectProxy>(desc.common.id,
+                                                                                   mFactory)});
+                  }
+                  proxyMap[uuid]->addSubEffect(desc);
+                  ALOGI("%s addSubEffect %s", __func__, desc.common.toString().c_str());
+              }
+          }
+          return proxyMap;
+      }()),
+      mProxyDescList([this]() {
+          std::vector<Descriptor> list;
+          for (const auto& proxy : mUuidProxyMap) {
+              if (Descriptor desc; proxy.second && proxy.second->getDescriptor(&desc).isOk()) {
+                  list.emplace_back(std::move(desc));
+              }
+          }
+          return list;
+      }()),
+      mNonProxyDescList([this]() {
+          std::vector<Descriptor> list;
+          std::copy_if(mHalDescList.begin(), mHalDescList.end(), std::back_inserter(list),
+                       [](const Descriptor& desc) { return !desc.common.id.proxy.has_value(); });
+          return list;
+      }()),
+      mEffectCount(mNonProxyDescList.size() + mProxyDescList.size()) {
+    ALOG_ASSERT(mFactory != nullptr, "Provided IEffectsFactory service is NULL");
+    ALOGI("%s with %zu nonProxyEffects and %zu proxyEffects", __func__, mNonProxyDescList.size(),
+          mProxyDescList.size());
 }
 
 status_t EffectsFactoryHalAidl::queryNumberEffects(uint32_t *pNumEffects) {
@@ -55,11 +103,7 @@
         return BAD_VALUE;
     }
 
-    {
-        std::lock_guard lg(mLock);
-        RETURN_STATUS_IF_ERROR(queryEffectList_l());
-        *pNumEffects = mDescList->size();
-    }
+    *pNumEffects = mEffectCount;
     ALOGI("%s %d", __func__, *pNumEffects);
     return OK;
 }
@@ -69,42 +113,43 @@
         return BAD_VALUE;
     }
 
-    std::lock_guard lg(mLock);
-    RETURN_STATUS_IF_ERROR(queryEffectList_l());
-
-    auto listSize = mDescList->size();
-    if (index >= listSize) {
-        ALOGE("%s index %d exceed size DescList %zd", __func__, index, listSize);
+    if (index >= mEffectCount) {
+        ALOGE("%s index %d exceed max number %zu", __func__, index, mEffectCount);
         return INVALID_OPERATION;
     }
 
-    *pDescriptor = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::aidl2legacy_Descriptor_effect_descriptor(mDescList->at(index)));
+    if (index >= mNonProxyDescList.size()) {
+        *pDescriptor =
+                VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_Descriptor_effect_descriptor(
+                        mProxyDescList.at(index - mNonProxyDescList.size())));
+    } else {
+        *pDescriptor =
+                VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_Descriptor_effect_descriptor(
+                        mNonProxyDescList.at(index)));
+    }
     return OK;
 }
 
 status_t EffectsFactoryHalAidl::getDescriptor(const effect_uuid_t* halUuid,
                                               effect_descriptor_t* pDescriptor) {
-    if (halUuid == nullptr || pDescriptor == nullptr) {
+    if (halUuid == nullptr) {
         return BAD_VALUE;
     }
 
-    AudioUuid uuid = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*halUuid));
-    std::lock_guard lg(mLock);
-    return getHalDescriptorWithImplUuid_l(uuid, pDescriptor);
+    AudioUuid uuid =
+            VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*halUuid));
+    return getHalDescriptorWithImplUuid(uuid, pDescriptor);
 }
 
 status_t EffectsFactoryHalAidl::getDescriptors(const effect_uuid_t* halType,
                                                std::vector<effect_descriptor_t>* descriptors) {
-    if (halType == nullptr || descriptors == nullptr) {
+    if (halType == nullptr) {
         return BAD_VALUE;
     }
 
-    AudioUuid type = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*halType));
-    std::lock_guard lg(mLock);
-    return getHalDescriptorWithTypeUuid_l(type, descriptors);
+    AudioUuid type =
+            VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*halType));
+    return getHalDescriptorWithTypeUuid(type, descriptors);
 }
 
 status_t EffectsFactoryHalAidl::createEffect(const effect_uuid_t* uuid, int32_t sessionId,
@@ -116,18 +161,25 @@
     if (sessionId == AUDIO_SESSION_DEVICE && ioId == AUDIO_IO_HANDLE_NONE) {
         return INVALID_OPERATION;
     }
-
     ALOGI("%s session %d ioId %d", __func__, sessionId, ioId);
 
-    AudioUuid aidlUuid = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
+    AudioUuid aidlUuid =
+            VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
     std::shared_ptr<IEffect> aidlEffect;
-    Descriptor desc;
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mFactory->createEffect(aidlUuid, &aidlEffect)));
+    // Use EffectProxy interface instead of IFactory to create
+    const bool isProxy = isProxyEffect(aidlUuid);
+    if (isProxy) {
+        aidlEffect = mUuidProxyMap.at(aidlUuid);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mUuidProxyMap.at(aidlUuid)->create()));
+    } else {
+        RETURN_STATUS_IF_ERROR(
+                statusTFromBinderStatus(mFactory->createEffect(aidlUuid, &aidlEffect)));
+    }
     if (aidlEffect == nullptr) {
-        ALOGE("%s IFactory::createFactory failed UUID %s", __func__, aidlUuid.toString().c_str());
+        ALOGE("%s failed to create effect with UUID: %s", __func__, aidlUuid.toString().c_str());
         return NAME_NOT_FOUND;
     }
+    Descriptor desc;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(aidlEffect->getDescriptor(&desc)));
 
     uint64_t effectId;
@@ -136,13 +188,23 @@
         effectId = ++mEffectIdCounter;
     }
 
-    *effect = sp<EffectHalAidl>::make(mFactory, aidlEffect, effectId, sessionId, ioId, desc);
+    *effect =
+            sp<EffectHalAidl>::make(mFactory, aidlEffect, effectId, sessionId, ioId, desc, isProxy);
     return OK;
 }
 
 status_t EffectsFactoryHalAidl::dumpEffects(int fd) {
-    // TODO: add proxy dump here because AIDL service EffectFactory doesn't have proxy handle
-    return mFactory->dump(fd, nullptr, 0);
+    status_t ret = OK;
+    // record the error ret and continue dump as many effects as possible
+    for (const auto& proxy : mUuidProxyMap) {
+        if (proxy.second) {
+            if (status_t temp = proxy.second->dump(fd, nullptr, 0); temp != OK) {
+                ret = temp;
+            }
+        }
+    }
+    RETURN_STATUS_IF_ERROR(mFactory->dump(fd, nullptr, 0));
+    return ret;
 }
 
 status_t EffectsFactoryHalAidl::allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) {
@@ -160,56 +222,42 @@
     return mHalVersion;
 }
 
-status_t EffectsFactoryHalAidl::queryEffectList_l() {
-    if (!mDescList) {
-        std::vector<Descriptor> list;
-        auto status = mFactory->queryEffects(std::nullopt, std::nullopt, std::nullopt, &list);
-        if (!status.isOk()) {
-            ALOGE("%s IFactory::queryEffects failed %s", __func__, status.getDescription().c_str());
-            return status.getStatus();
-        }
-
-        mDescList = std::make_unique<std::vector<Descriptor>>(list);
-    }
-    return OK;
-}
-
-status_t EffectsFactoryHalAidl::getHalDescriptorWithImplUuid_l(const AudioUuid& uuid,
-                                                               effect_descriptor_t* pDescriptor) {
+status_t EffectsFactoryHalAidl::getHalDescriptorWithImplUuid(const AudioUuid& uuid,
+                                                             effect_descriptor_t* pDescriptor) {
     if (pDescriptor == nullptr) {
         return BAD_VALUE;
     }
-    if (!mDescList) {
-        RETURN_STATUS_IF_ERROR(queryEffectList_l());
-    }
 
-    auto matchIt = std::find_if(mDescList->begin(), mDescList->end(),
-                                 [&](const auto& desc) { return desc.common.id.uuid == uuid; });
-    if (matchIt == mDescList->end()) {
-        ALOGE("%s UUID %s not found", __func__, uuid.toString().c_str());
+    const auto& list = isProxyEffect(uuid) ? mProxyDescList : mNonProxyDescList;
+    auto matchIt = std::find_if(list.begin(), list.end(),
+                                [&](const auto& desc) { return desc.common.id.uuid == uuid; });
+    if (matchIt == list.end()) {
+        ALOGE("%s UUID not found in HAL and proxy list %s", __func__, uuid.toString().c_str());
         return BAD_VALUE;
     }
+    ALOGI("%s UUID impl found %s", __func__, uuid.toString().c_str());
 
     *pDescriptor = VALUE_OR_RETURN_STATUS(
             ::aidl::android::aidl2legacy_Descriptor_effect_descriptor(*matchIt));
     return OK;
 }
 
-status_t EffectsFactoryHalAidl::getHalDescriptorWithTypeUuid_l(
+status_t EffectsFactoryHalAidl::getHalDescriptorWithTypeUuid(
         const AudioUuid& type, std::vector<effect_descriptor_t>* descriptors) {
     if (descriptors == nullptr) {
         return BAD_VALUE;
     }
-    if (!mDescList) {
-        RETURN_STATUS_IF_ERROR(queryEffectList_l());
-    }
+
     std::vector<Descriptor> result;
-    std::copy_if(mDescList->begin(), mDescList->end(), std::back_inserter(result),
+    std::copy_if(mNonProxyDescList.begin(), mNonProxyDescList.end(), std::back_inserter(result),
                  [&](auto& desc) { return desc.common.id.type == type; });
-    if (result.size() == 0) {
-        ALOGE("%s type UUID %s not found", __func__, type.toString().c_str());
+    std::copy_if(mProxyDescList.begin(), mProxyDescList.end(), std::back_inserter(result),
+                 [&](auto& desc) { return desc.common.id.type == type; });
+    if (result.empty()) {
+        ALOGW("%s UUID type not found in HAL and proxy list %s", __func__, type.toString().c_str());
         return BAD_VALUE;
     }
+    ALOGI("%s UUID type found %zu \n %s", __func__, result.size(), type.toString().c_str());
 
     *descriptors = VALUE_OR_RETURN_STATUS(
             aidl::android::convertContainer<std::vector<effect_descriptor_t>>(
@@ -217,6 +265,10 @@
     return OK;
 }
 
+bool EffectsFactoryHalAidl::isProxyEffect(const AudioUuid& uuid) const {
+    return 0 != mUuidProxyMap.count(uuid);
+}
+
 } // namespace effect
 
 // When a shared library is built from a static library, even explicit
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.h b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
index 9c3643b..debfacf 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
@@ -25,6 +25,8 @@
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <system/thread_defs.h>
 
+#include "EffectProxy.h"
+
 namespace android {
 namespace effect {
 
@@ -60,24 +62,35 @@
 
     detail::AudioHalVersionInfo getHalVersion() const override;
 
-    // for TIME_CHECK
-    const std::string getClassName() const { return "EffectHalAidl"; }
-
   private:
-    std::mutex mLock;
     const std::shared_ptr<IFactory> mFactory;
-    uint64_t mEffectIdCounter GUARDED_BY(mLock) = 0; // Align with HIDL (0 is INVALID_ID)
-    std::unique_ptr<std::vector<Descriptor>> mDescList GUARDED_BY(mLock) = nullptr;
     const detail::AudioHalVersionInfo mHalVersion;
+    // Full list of HAL effect descriptors
+    const std::vector<Descriptor> mHalDescList;
+    // Map of proxy UUID (key) to the proxy object
+    const std::map<::aidl::android::media::audio::common::AudioUuid /* proxy impl UUID */,
+                   std::shared_ptr<EffectProxy>>
+            mUuidProxyMap;
+    // List of effect proxy, initialize after mUuidProxyMap because it need to have all sub-effects
+    const std::vector<Descriptor> mProxyDescList;
+    // List of non-proxy effects
+    const std::vector<Descriptor> mNonProxyDescList;
+    // total number of effects including proxy effects
+    const size_t mEffectCount;
+
+    std::mutex mLock;
+    uint64_t mEffectIdCounter GUARDED_BY(mLock) = 0;  // Align with HIDL (0 is INVALID_ID)
 
     virtual ~EffectsFactoryHalAidl() = default;
-    status_t queryEffectList_l() REQUIRES(mLock);
-    status_t getHalDescriptorWithImplUuid_l(
+    status_t getHalDescriptorWithImplUuid(
             const aidl::android::media::audio::common::AudioUuid& uuid,
-            effect_descriptor_t* pDescriptor) REQUIRES(mLock);
-    status_t getHalDescriptorWithTypeUuid_l(
+            effect_descriptor_t* pDescriptor);
+
+    status_t getHalDescriptorWithTypeUuid(
             const aidl::android::media::audio::common::AudioUuid& type,
-            std::vector<effect_descriptor_t>* descriptors) REQUIRES(mLock);
+            std::vector<effect_descriptor_t>* descriptors);
+
+    bool isProxyEffect(const aidl::android::media::audio::common::AudioUuid& uuid) const;
 };
 
 } // namespace effect
diff --git a/media/libaudiohal/tests/Android.bp b/media/libaudiohal/tests/Android.bp
index 2f78dd0..8210f7d 100644
--- a/media/libaudiohal/tests/Android.bp
+++ b/media/libaudiohal/tests/Android.bp
@@ -20,18 +20,12 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_test {
-    name: "EffectsFactoryHalInterfaceTest",
+cc_defaults {
+    name: "AudioHalTestDefaults",
     test_suites: ["device-tests"],
-
-    srcs: [
-        "EffectsFactoryHalInterface_test.cpp",
-    ],
-
     defaults: [
         "latest_android_media_audio_common_types_ndk_shared",
     ],
-
     cflags: [
         "-Wall",
         "-Wextra",
@@ -48,8 +42,31 @@
         "libutils",
         "libvibrator",
     ],
+}
 
-    header_libs: [
-        "libaudiohal_headers",
+cc_test {
+    name: "EffectsFactoryHalInterfaceTest",
+    srcs: ["EffectsFactoryHalInterface_test.cpp"],
+    defaults: ["AudioHalTestDefaults"],
+    header_libs: ["libaudiohal_headers"],
+}
+
+cc_test {
+    name: "EffectProxyTest",
+    srcs: [
+        "EffectProxy_test.cpp",
+        ":audio_effectproxy_src_files",
     ],
+    defaults: [
+        "AudioHalTestDefaults",
+        "latest_android_hardware_audio_effect_ndk_shared",
+        "libaudiohal_default",
+        "use_libaidlvintf_gtest_helper_static",
+    ],
+    shared_libs: [
+        "android.hardware.common.fmq-V1-ndk",
+        "libbinder_ndk",
+        "libfmq",
+    ],
+    header_libs: ["libaudiohalimpl_headers"],
 }
diff --git a/media/libaudiohal/tests/EffectProxy_test.cpp b/media/libaudiohal/tests/EffectProxy_test.cpp
new file mode 100644
index 0000000..92e3dce
--- /dev/null
+++ b/media/libaudiohal/tests/EffectProxy_test.cpp
@@ -0,0 +1,357 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#include <cstddef>
+#include <cstdint>
+#include <memory>
+#include <utility>
+#define LOG_TAG "EffectProxyTest"
+
+#include <aidl/android/media/audio/common/AudioUuid.h>
+#include <aidl/Vintf.h>
+#include <android/binder_manager.h>
+#include <gtest/gtest.h>
+#include <utils/RefBase.h>
+
+#include "EffectProxy.h"
+
+/**
+ * This test suite is depending on audio effect AIDL service.
+ */
+namespace android {
+
+using ::aidl::android::hardware::audio::effect::CommandId;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::Flags;
+using ::aidl::android::hardware::audio::effect::IEffect;
+using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::State;
+using ::aidl::android::media::audio::common::AudioChannelLayout;
+using ::aidl::android::media::audio::common::AudioFormatDescription;
+using ::aidl::android::media::audio::common::AudioFormatType;
+using ::aidl::android::media::audio::common::AudioUuid;
+using ::aidl::android::media::audio::common::PcmType;
+using ::android::effect::EffectProxy;
+
+class EffectProxyTest : public testing::Test {
+  public:
+    void SetUp() override {
+        auto serviceName = android::getAidlHalInstanceNames(IFactory::descriptor);
+        // only unit test with the first one in case more than one EffectFactory service exist
+        ASSERT_NE(0ul, serviceName.size());
+        mFactory = IFactory::fromBinder(
+                ndk::SpAIBinder(AServiceManager_waitForService(serviceName[0].c_str())));
+        ASSERT_NE(nullptr, mFactory);
+        mFactory->queryEffects(std::nullopt, std::nullopt, std::nullopt, &mDescs);
+        for (const auto& desc : mDescs) {
+            if (desc.common.id.proxy.has_value()) {
+                mProxyDescs.insert({desc.common.id, desc});
+            }
+        }
+    }
+
+    void TearDown() override {}
+
+    const AudioFormatDescription kDefaultFormatDescription = {
+            .type = AudioFormatType::PCM, .pcm = PcmType::FLOAT_32_BIT, .encoding = ""};
+
+    Parameter::Common createParamCommon(
+            int session = 0, int ioHandle = -1, int iSampleRate = 48000, int oSampleRate = 48000,
+            long iFrameCount = 0x100, long oFrameCount = 0x100,
+            AudioChannelLayout inputChannelLayout =
+                    AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+                            AudioChannelLayout::LAYOUT_STEREO),
+            AudioChannelLayout outputChannelLayout =
+                    AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+                            AudioChannelLayout::LAYOUT_STEREO)) {
+        Parameter::Common common;
+        common.session = session;
+        common.ioHandle = ioHandle;
+
+        auto& input = common.input;
+        auto& output = common.output;
+        input.base.sampleRate = iSampleRate;
+        input.base.channelMask = inputChannelLayout;
+        input.base.format = kDefaultFormatDescription;
+        input.frameCount = iFrameCount;
+        output.base.sampleRate = oSampleRate;
+        output.base.channelMask = outputChannelLayout;
+        output.base.format = kDefaultFormatDescription;
+        output.frameCount = oFrameCount;
+        return common;
+    }
+
+    static bool isFlagSet(const ::aidl::android::hardware::audio::effect::Descriptor& desc,
+                          Flags::HardwareAccelerator flag) {
+        return desc.common.flags.hwAcceleratorMode == flag;
+    }
+
+    enum TupleIndex { HANDLE, DESCRIPTOR };
+    using EffectProxyTuple = std::tuple<std::shared_ptr<EffectProxy>, std::vector<Descriptor>>;
+
+    std::map<AudioUuid, EffectProxyTuple> createAllProxies() {
+        std::map<AudioUuid, EffectProxyTuple> proxyMap;
+        for (const auto& itor : mProxyDescs) {
+            const auto& uuid = itor.first.proxy.value();
+            if (proxyMap.end() == proxyMap.find(uuid)) {
+                std::get<TupleIndex::HANDLE>(proxyMap[uuid]) =
+                        ndk::SharedRefBase::make<EffectProxy>(itor.first, mFactory);
+            }
+        }
+        return proxyMap;
+    }
+
+    bool addAllSubEffects(std::map<AudioUuid, EffectProxyTuple> proxyMap) {
+        for (auto& itor : mProxyDescs) {
+            const auto& uuid = itor.first.proxy.value();
+            if (proxyMap.end() == proxyMap.find(uuid)) {
+                return false;
+            }
+            auto& proxy = std::get<TupleIndex::HANDLE>(proxyMap[uuid]);
+            if (!proxy->addSubEffect(itor.second).isOk()) {
+                return false;
+            }
+            std::get<TupleIndex::DESCRIPTOR>(proxyMap[uuid]).emplace_back(itor.second);
+        }
+        return true;
+    }
+
+    std::shared_ptr<IFactory> mFactory;
+    std::vector<Descriptor> mDescs;
+    std::map<Descriptor::Identity, Descriptor> mProxyDescs;
+};
+
+TEST_F(EffectProxyTest, createProxy) {
+    auto proxyMap = createAllProxies();
+    // if there are some descriptor defined with proxy, then proxyMap can not be empty
+    EXPECT_EQ(mProxyDescs.size() == 0, proxyMap.size() == 0);
+}
+
+TEST_F(EffectProxyTest, addSubEffectsCreateAndDestroy) {
+    auto proxyMap = createAllProxies();
+    ASSERT_TRUE(addAllSubEffects(proxyMap));
+
+    for (const auto& itor : proxyMap) {
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->create().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+TEST_F(EffectProxyTest, addSubEffectsCreateOpenCloseDestroy) {
+    auto proxyMap = createAllProxies();
+    EXPECT_TRUE(addAllSubEffects(proxyMap));
+
+    Parameter::Common common = createParamCommon();
+    IEffect::OpenEffectReturn ret;
+    for (const auto& itor : proxyMap) {
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->create().isOk());
+        EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+// Add sub-effects, set active sub-effect with different checkers
+TEST_F(EffectProxyTest, setOffloadParam) {
+    auto proxyMap = createAllProxies();
+    EXPECT_TRUE(addAllSubEffects(proxyMap));
+
+    // Any flag exist should be able to set successfully
+    bool isNoneExist = false, isSimpleExist = false, isTunnelExist = false;
+    for (const auto& itor : mProxyDescs) {
+        isNoneExist = isNoneExist || isFlagSet(itor.second, Flags::HardwareAccelerator::NONE);
+        isSimpleExist = isSimpleExist || isFlagSet(itor.second, Flags::HardwareAccelerator::SIMPLE);
+        isTunnelExist = isTunnelExist || isFlagSet(itor.second, Flags::HardwareAccelerator::TUNNEL);
+    }
+
+    Parameter::Common common = createParamCommon();
+    IEffect::OpenEffectReturn ret;
+    for (const auto& itor : proxyMap) {
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->create().isOk());
+        EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+        effect_offload_param_t offloadParam{false, 0};
+        EXPECT_EQ(isNoneExist || isSimpleExist, proxy->setOffloadParam(&offloadParam).isOk());
+        offloadParam.isOffload = true;
+        EXPECT_EQ(isTunnelExist, proxy->setOffloadParam(&offloadParam).isOk());
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+TEST_F(EffectProxyTest, destroyWithoutCreate) {
+    auto proxyMap = createAllProxies();
+    ASSERT_TRUE(addAllSubEffects(proxyMap));
+
+    for (const auto& itor : proxyMap) {
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+TEST_F(EffectProxyTest, closeWithoutOpen) {
+    auto proxyMap = createAllProxies();
+    ASSERT_TRUE(addAllSubEffects(proxyMap));
+
+    for (const auto& itor : proxyMap) {
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->create().isOk());
+
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+// Add sub-effects, set active sub-effect, create, open, and send command, expect success handling
+TEST_F(EffectProxyTest, normalSequency) {
+    auto proxyMap = createAllProxies();
+    ASSERT_TRUE(addAllSubEffects(proxyMap));
+
+    bool isTunnelExist = [&]() {
+        for (const auto& itor : mProxyDescs) {
+            if (isFlagSet(itor.second, Flags::HardwareAccelerator::TUNNEL)) {
+                return true;
+            }
+        }
+        return false;
+    }();
+
+    Parameter::Common common = createParamCommon();
+    IEffect::OpenEffectReturn ret;
+    Parameter::VolumeStereo volumeStereo({.left = .1f, .right = -0.8f});
+    Parameter param = Parameter::make<Parameter::volumeStereo>(volumeStereo);
+    Parameter::Id id = Parameter::Id::make<Parameter::Id::commonTag>(Parameter::volumeStereo);
+    State state;
+    for (const auto& itor : proxyMap) {
+        Parameter expect;
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        effect_offload_param_t offloadParam{true, 0};
+        EXPECT_EQ(isTunnelExist, proxy->setOffloadParam(&offloadParam).isOk());
+
+        EXPECT_TRUE(proxy->create().isOk());
+        EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+
+        EXPECT_TRUE(proxy->setParameter(param).isOk());
+        EXPECT_TRUE(proxy->getParameter(id, &expect).isOk());
+        EXPECT_EQ(expect, param);
+
+        EXPECT_TRUE(proxy->command(CommandId::START).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::PROCESSING, state);
+
+        EXPECT_TRUE(proxy->command(CommandId::STOP).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::IDLE, state);
+
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+// setParameter, change active sub-effect, verify with getParameter
+TEST_F(EffectProxyTest, changeActiveSubAndVerifyParameter) {
+    auto proxyMap = createAllProxies();
+    EXPECT_TRUE(addAllSubEffects(proxyMap));
+
+    bool isNoneExist = false, isSimpleExist = false, isTunnelExist = false;
+    for (const auto& itor : mProxyDescs) {
+        isNoneExist = isNoneExist || isFlagSet(itor.second, Flags::HardwareAccelerator::NONE);
+        isSimpleExist = isSimpleExist || isFlagSet(itor.second, Flags::HardwareAccelerator::SIMPLE);
+        isTunnelExist = isTunnelExist || isFlagSet(itor.second, Flags::HardwareAccelerator::TUNNEL);
+    }
+
+    Parameter::Common common = createParamCommon();
+    IEffect::OpenEffectReturn ret;
+    Parameter::VolumeStereo volumeStereo({.left = .5f, .right = .8f});
+    Parameter param = Parameter::make<Parameter::volumeStereo>(volumeStereo);
+    Parameter::Id id = Parameter::Id::make<Parameter::Id::commonTag>(Parameter::volumeStereo);
+    for (const auto& itor : proxyMap) {
+        Parameter expect;
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->create().isOk());
+        EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+        EXPECT_TRUE(proxy->setParameter(param).isOk());
+        EXPECT_TRUE(proxy->getParameter(id, &expect).isOk());
+        EXPECT_EQ(expect, param);
+
+        effect_offload_param_t offloadParam{false, 0};
+        EXPECT_EQ(isNoneExist || isSimpleExist, proxy->setOffloadParam(&offloadParam).isOk());
+        EXPECT_TRUE(proxy->getParameter(id, &expect).isOk());
+        EXPECT_EQ(expect, param);
+
+        offloadParam.isOffload = true;
+        EXPECT_EQ(isTunnelExist, proxy->setOffloadParam(&offloadParam).isOk());
+        EXPECT_TRUE(proxy->getParameter(id, &expect).isOk());
+        EXPECT_EQ(expect, param);
+
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+// send command, change active sub-effect, then verify the state with getState
+TEST_F(EffectProxyTest, changeActiveSubAndVerifyState) {
+    auto proxyMap = createAllProxies();
+    ASSERT_TRUE(addAllSubEffects(proxyMap));
+
+    bool isNoneExist = false, isSimpleExist = false, isTunnelExist = false;
+    for (const auto& itor : mProxyDescs) {
+        isNoneExist = isNoneExist || isFlagSet(itor.second, Flags::HardwareAccelerator::NONE);
+        isSimpleExist = isSimpleExist || isFlagSet(itor.second, Flags::HardwareAccelerator::SIMPLE);
+        isTunnelExist = isTunnelExist || isFlagSet(itor.second, Flags::HardwareAccelerator::TUNNEL);
+    }
+
+    Parameter::Common common = createParamCommon();
+    IEffect::OpenEffectReturn ret;
+    State state;
+    for (const auto& itor : proxyMap) {
+        Parameter expect;
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->create().isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::INIT, state);
+        EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::IDLE, state);
+        EXPECT_TRUE(proxy->command(CommandId::START).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::PROCESSING, state);
+
+        effect_offload_param_t offloadParam{false, 0};
+        EXPECT_EQ(isNoneExist || isSimpleExist, proxy->setOffloadParam(&offloadParam).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::PROCESSING, state);
+
+        offloadParam.isOffload = true;
+        EXPECT_EQ(isTunnelExist, proxy->setOffloadParam(&offloadParam).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::PROCESSING, state);
+
+        EXPECT_TRUE(proxy->command(CommandId::STOP).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::IDLE, state);
+
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::INIT, state);
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+} // namespace android
diff --git a/media/libheadtracking/PosePredictor.cpp b/media/libheadtracking/PosePredictor.cpp
index f67a966..5209d54 100644
--- a/media/libheadtracking/PosePredictor.cpp
+++ b/media/libheadtracking/PosePredictor.cpp
@@ -91,20 +91,23 @@
 
 // Formatting
 static inline std::vector<size_t> createDelimiterIdx(size_t predictors, size_t lookaheads) {
-    if (predictors == 0) return {};
-    --predictors;
-    std::vector<size_t> delimiterIdx(predictors);
-    for (size_t i = 0; i < predictors; ++i) {
-        delimiterIdx[i] = (i + 1) * lookaheads;
+    if (lookaheads == 0) return {};
+    --lookaheads;
+    std::vector<size_t> delimiterIdx(lookaheads);
+    for (size_t i = 0; i < lookaheads; ++i) {
+        delimiterIdx[i] = (i + 1) * predictors;
     }
     return delimiterIdx;
 }
 
 PosePredictor::PosePredictor()
-    : mPredictors{  // must match switch in getCurrentPredictor()
+    : mPredictors{
+            // First predictors must match switch in getCurrentPredictor()
             std::make_shared<LastPredictor>(),
             std::make_shared<TwistPredictor>(),
             std::make_shared<LeastSquaresPredictor>(),
+            // After this, can place additional predictors here for comparison such as
+            // std::make_shared<LeastSquaresPredictor>(0.25),
         }
     , mLookaheadMs(kLookAheadMs.begin(), kLookAheadMs.end())
     , mVerifiers(std::size(mLookaheadMs) * std::size(mPredictors))
@@ -195,7 +198,12 @@
     if constexpr (kEnableVerification) {
         // dump verification
         ss.append(prefixSpace)
-            .append(" Prediction abs error (L1) degrees [ type (last twist least-squares) x ( ");
+            .append(" Prediction abs error (L1) degrees [ type (");
+        for (size_t i = 0; i < mPredictors.size(); ++i) {
+            if (i > 0) ss.append(" , ");
+            ss.append(mPredictors[i]->name());
+        }
+        ss.append(" ) x ( ");
         for (size_t i = 0; i < mLookaheadMs.size(); ++i) {
             if (i > 0) ss.append(" : ");
             ss.append(std::to_string(mLookaheadMs[i]));
diff --git a/media/libheadtracking/PosePredictor.h b/media/libheadtracking/PosePredictor.h
index 06983cc..53211e3 100644
--- a/media/libheadtracking/PosePredictor.h
+++ b/media/libheadtracking/PosePredictor.h
@@ -32,6 +32,7 @@
     virtual void add(int64_t atNs, const Pose3f& pose, const Twist3f& twist) = 0;
     virtual Pose3f predict(int64_t atNs) const = 0;
     virtual void reset() = 0;
+    virtual std::string name() const = 0;
     virtual std::string toString(size_t index) const = 0;
 };
 
@@ -57,6 +58,10 @@
         mLastPose = {};
     }
 
+    std::string name() const override {
+        return "LAST";
+    }
+
     std::string toString(size_t index) const override {
         std::string s(index, ' ');
         s.append("LastPredictor using last pose: ")
@@ -92,6 +97,10 @@
         mLastTwist = {};
     }
 
+    std::string name() const override {
+        return "TWIST";
+    }
+
     std::string toString(size_t index) const override {
         std::string s(index, ' ');
         s.append("TwistPredictor using last pose: ")
@@ -130,13 +139,16 @@
     void add(int64_t atNs, const Pose3f& pose, const Twist3f& twist) override;
     Pose3f predict(int64_t atNs) const override;
     void reset() override;
+    std::string name() const override {
+        return "LEAST_SQUARES(" + std::to_string(mAlpha) + ")";
+    }
     std::string toString(size_t index) const override;
 
 private:
     const double mAlpha;
     int64_t mLastAtNs{};
     Pose3f mLastPose;
-    static constexpr double kDefaultAlphaEstimator = 0.5;
+    static constexpr double kDefaultAlphaEstimator = 0.2;
     static constexpr size_t kMinimumSamplesForPrediction = 4;
     audio_utils::LinearLeastSquaresFit<double> mRw;
     audio_utils::LinearLeastSquaresFit<double> mRx;
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 7c78900..2ba1fc3 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -26,7 +26,6 @@
 #include <binder/IMemory.h>
 #include <binder/MemoryDealer.h>
 #include <drm/drm_framework_common.h>
-#include <log/log.h>
 #include <media/mediametadataretriever.h>
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index bdf1cbc..9e9e9d8 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -1837,7 +1837,6 @@
     } else {
         mAttributes = NULL;
     }
-
     setMinBufferCount();
 }
 
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index 5abac81..5e95c87 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -46,6 +46,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmediaplayerservice",
+        vector: "remote",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 52b2041..8da09c4 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -1104,14 +1104,14 @@
                         static_cast<MediaBufferHolder*>(holder.get())->mediaBuffer() : nullptr;
                 }
                 if (mediaBuf != NULL) {
-                    if (mediaBuf->size() > codecBuffer->capacity()) {
+                    if (mediaBuf->range_length() > codecBuffer->capacity()) {
                         handleError(ERROR_BUFFER_TOO_SMALL);
                         mDequeuedInputBuffers.push_back(bufferIx);
                         return false;
                     }
 
-                    codecBuffer->setRange(0, mediaBuf->size());
-                    memcpy(codecBuffer->data(), mediaBuf->data(), mediaBuf->size());
+                    codecBuffer->setRange(0, mediaBuf->range_length());
+                    memcpy(codecBuffer->data(), mediaBuf->data(), mediaBuf->range_length());
 
                     MetaDataBase &meta_data = mediaBuf->meta_data();
                     cryptInfo = NuPlayerDrm::getSampleCryptoInfo(meta_data);
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 2370a7b..6e97bf7 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -542,7 +542,7 @@
     if (dstFormat() == COLOR_Format32bitABGR2101010) {
         videoFormat->setInt32("color-format", COLOR_FormatYUVP010);
     } else {
-        videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+        videoFormat->setInt32("color-format", COLOR_FormatYUV420Flexible);
     }
 
     // For the thumbnail extraction case, try to allocate single buffer in both
@@ -685,7 +685,6 @@
     if (mCaptureLayer != nullptr) {
         return captureSurface();
     }
-
     ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
 
     uint32_t standard, range, transfer;
@@ -698,8 +697,18 @@
     if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
         transfer = 0;
     }
+    sp<ABuffer> imgObj;
+    if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
+        MediaImage2 *imageData = nullptr;
+        imageData = (MediaImage2 *)(imgObj.get()->data());
+        if (imageData != nullptr) {
+            converter.setSrcMediaImage2(*imageData);
+        }
+    }
+    if (srcFormat == COLOR_FormatYUV420Flexible && imgObj.get() == nullptr) {
+        return ERROR_UNSUPPORTED;
+    }
     converter.setSrcColorSpace(standard, range, transfer);
-
     if (converter.isValid()) {
         converter.convert(
                 (const uint8_t *)videoFrameBuffer->data(),
@@ -864,7 +873,7 @@
     if (dstFormat() == COLOR_Format32bitABGR2101010) {
         videoFormat->setInt32("color-format", COLOR_FormatYUVP010);
     } else {
-        videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+        videoFormat->setInt32("color-format", COLOR_FormatYUV420Flexible);
     }
 
     if ((mGridRows == 1) && (mGridCols == 1)) {
@@ -967,6 +976,17 @@
     if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
         transfer = 0;
     }
+    sp<ABuffer> imgObj;
+    if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
+        MediaImage2 *imageData = nullptr;
+        imageData = (MediaImage2 *)(imgObj.get()->data());
+        if (imageData != nullptr) {
+            converter.setSrcMediaImage2(*imageData);
+        }
+    }
+    if (srcFormat == COLOR_FormatYUV420Flexible && imgObj.get() == nullptr) {
+        return ERROR_UNSUPPORTED;
+    }
     converter.setSrcColorSpace(standard, range, transfer);
 
     int32_t crop_left, crop_top, crop_right, crop_bottom;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index c4a29c0..e5cc991 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -20,6 +20,7 @@
 #include <utils/Log.h>
 
 #include <set>
+#include <random>
 #include <stdlib.h>
 
 #include <inttypes.h>
@@ -99,6 +100,7 @@
 // These must be kept synchronized with the constants there.
 static const char *kCodecLogSessionId = "android.media.mediacodec.log-session-id";
 static const char *kCodecCodec = "android.media.mediacodec.codec";  /* e.g. OMX.google.aac.decoder */
+static const char *kCodecId = "android.media.mediacodec.id";
 static const char *kCodecMime = "android.media.mediacodec.mime";    /* e.g. audio/mime */
 static const char *kCodecMode = "android.media.mediacodec.mode";    /* audio, video */
 static const char *kCodecModeVideo = "video";            /* values returned for kCodecMode */
@@ -218,7 +220,7 @@
         sp<MediaCodec> codec = mMediaCodec.promote();
         if (codec == NULL) {
             // Codec is already gone, so remove the resources as well
-            ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
+            ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
             std::shared_ptr<IResourceManagerService> service =
                     IResourceManagerService::fromBinder(binder);
             if (service == nullptr) {
@@ -290,6 +292,9 @@
     void removeClient();
     void markClientForPendingRemoval();
     bool reclaimResource(const std::vector<MediaResourceParcel> &resources);
+    void notifyClientCreated();
+    void notifyClientStarted(ClientConfigParcel& clientConfig);
+    void notifyClientStopped(ClientConfigParcel& clientConfig);
 
     inline void setCodecName(const char* name) {
         mCodecName = name;
@@ -331,7 +336,7 @@
 }
 
 status_t MediaCodec::ResourceManagerServiceProxy::init() {
-    ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
+    ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
     mService = IResourceManagerService::fromBinder(binder);
     if (mService == nullptr) {
         ALOGE("Failed to get ResourceManagerService");
@@ -468,6 +473,32 @@
     return status.isOk() && success;
 }
 
+void MediaCodec::ResourceManagerServiceProxy::notifyClientCreated() {
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                .uid = static_cast<int32_t>(mUid),
+                                .id = getId(mClient),
+                                .name = mCodecName};
+    mService->notifyClientCreated(clientInfo);
+}
+
+void MediaCodec::ResourceManagerServiceProxy::notifyClientStarted(
+    ClientConfigParcel& clientConfig) {
+    clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
+    clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
+    clientConfig.clientInfo.id = getId(mClient);
+    clientConfig.clientInfo.name = mCodecName;
+    mService->notifyClientStarted(clientConfig);
+}
+
+void MediaCodec::ResourceManagerServiceProxy::notifyClientStopped(
+    ClientConfigParcel& clientConfig) {
+    clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
+    clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
+    clientConfig.clientInfo.id = getId(mClient);
+    clientConfig.clientInfo.name = mCodecName;
+    mService->notifyClientStopped(clientConfig);
+}
+
 ////////////////////////////////////////////////////////////////////////////////
 
 MediaCodec::BufferInfo::BufferInfo() : mOwnedByClient(false) {}
@@ -860,6 +891,23 @@
     return new PersistentSurface(bufferProducer, bufferSource);
 }
 
+// GenerateCodecId generates a 64bit Random ID for each codec that is created.
+// The Codec ID is generated as:
+//   - A process-unique random high 32bits
+//   - An atomic sequence low 32bits
+//
+static uint64_t GenerateCodecId() {
+    static std::atomic_uint64_t sId = [] {
+        std::random_device rd;
+        std::mt19937 gen(rd());
+        std::uniform_int_distribution<uint32_t> distrib(0, UINT32_MAX);
+        uint32_t randomID = distrib(gen);
+        uint64_t id = randomID;
+        return id << 32;
+    }();
+    return sId++;
+}
+
 MediaCodec::MediaCodec(
         const sp<ALooper> &looper, pid_t pid, uid_t uid,
         std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
@@ -902,6 +950,7 @@
       mInputBufferCounter(0),
       mGetCodecBase(getCodecBase),
       mGetCodecInfo(getCodecInfo) {
+    mCodecId = GenerateCodecId();
     mResourceManagerProxy = new ResourceManagerServiceProxy(pid, uid,
             ::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid, uid));
     if (!mGetCodecBase) {
@@ -1235,12 +1284,14 @@
     // ensure mutex while we do our own work
     Mutex::Autolock _lock(mMetricsLock);
     if (mMetricsHandle != 0) {
-        if (mediametrics_count(mMetricsHandle) > 0) {
+        if (mMetricsToUpload && mediametrics_count(mMetricsHandle) > 0) {
             mediametrics_selfRecord(mMetricsHandle);
         }
         mediametrics_delete(mMetricsHandle);
         mMetricsHandle = 0;
     }
+    // we no longer have anything pending upload
+    mMetricsToUpload = false;
 }
 
 void MediaCodec::updateLowLatency(const sp<AMessage> &msg) {
@@ -1793,6 +1844,12 @@
             break;
         }
     }
+
+    if (OK == err) {
+        // Notify the ResourceManager that, this codec has been created
+        // (initialized) successfully.
+        mResourceManagerProxy->notifyClientCreated();
+    }
     return err;
 }
 
@@ -1839,6 +1896,7 @@
         const sp<ICrypto> &crypto,
         const sp<IDescrambler> &descrambler,
         uint32_t flags) {
+
     sp<AMessage> msg = new AMessage(kWhatConfigure, this);
     mediametrics_handle_t nextMetricsHandle = mediametrics_create(kCodecKeyName);
 
@@ -1846,6 +1904,7 @@
     format->findString("log-session-id", &mLogSessionId);
 
     if (nextMetricsHandle != 0) {
+        mediametrics_setInt64(nextMetricsHandle, kCodecId, mCodecId);
         int32_t profile = 0;
         if (format->findInt32("profile", &profile)) {
             mediametrics_setInt32(nextMetricsHandle, kCodecProfile, profile);
@@ -3340,6 +3399,17 @@
     return DequeueOutputResult::kRepliedWithError;
 }
 
+
+inline void MediaCodec::initClientConfigParcel(ClientConfigParcel& clientConfig) {
+    clientConfig.codecType = toMediaResourceSubType(mDomain);
+    clientConfig.isEncoder = mFlags & kFlagIsEncoder;
+    clientConfig.isHardware = !MediaCodecList::isSoftwareCodec(mComponentName);
+    clientConfig.width = mWidth;
+    clientConfig.height = mHeight;
+    clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
+    clientConfig.id = mCodecId;
+}
+
 void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
         case kWhatCodecNotify:
@@ -3586,14 +3656,8 @@
                         mediametrics_setInt32(mMetricsHandle, kCodecSecure, 0);
                     }
 
-                    MediaCodecInfo::Attributes attr = mCodecInfo
-                            ? mCodecInfo->getAttributes()
-                            : MediaCodecInfo::Attributes(0);
-                    if (mDomain == DOMAIN_VIDEO || !(attr & MediaCodecInfo::kFlagIsSoftwareOnly)) {
-                        // software audio codecs are currently ignored.
-                        mResourceManagerProxy->addResource(MediaResource::CodecResource(
+                    mResourceManagerProxy->addResource(MediaResource::CodecResource(
                             mFlags & kFlagIsSecure, toMediaResourceSubType(mDomain)));
-                    }
 
                     postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
                     break;
@@ -3763,6 +3827,11 @@
                         mResourceManagerProxy->addResource(
                                 MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
                     }
+                    // Notify the RM that the codec is in use (has been started).
+                    ClientConfigParcel clientConfig;
+                    initClientConfigParcel(clientConfig);
+                    mResourceManagerProxy->notifyClientStarted(clientConfig);
+
                     setState(STARTED);
                     postPendingRepliesAndDeferredMessages("kWhatStartCompleted");
 
@@ -3993,6 +4062,11 @@
                               mState, stateString(mState).c_str());
                         break;
                     }
+                    // Notify the RM that the codec has been stopped.
+                    ClientConfigParcel clientConfig;
+                    initClientConfigParcel(clientConfig);
+                    mResourceManagerProxy->notifyClientStopped(clientConfig);
+
                     setState(INITIALIZED);
                     if (mReplyID) {
                         postPendingRepliesAndDeferredMessages("kWhatStopCompleted");
@@ -4175,6 +4249,10 @@
                 initMediametrics();
             }
 
+            // from this point forward, in this configure/use/release lifecycle, we want to
+            // upload our data
+            mMetricsToUpload = true;
+
             int32_t push;
             if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) {
                 mFlags |= kFlagPushBlankBuffersOnShutdown;
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index 485759c..4ad3276 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -405,7 +405,7 @@
     // a suitable codec.
     //
     int profile = -1;
-    if (matches->empty() && format->findInt32(KEY_PROFILE, &profile)) {
+    if (matches->empty() && format != nullptr && format->findInt32(KEY_PROFILE, &profile)) {
         ALOGV("no matching codec found, retrying without profile");
         sp<AMessage> formatNoProfile = format->dup();
         formatNoProfile->removeEntryByName(KEY_PROFILE);
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 0536f2a..d736734 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -639,9 +639,11 @@
         numPageSamples = -1;
     }
 
+    // insert, including accounting for the space used.
     memcpy((uint8_t *)buffer->data() + mbuf->range_length(),
            &numPageSamples,
            sizeof(numPageSamples));
+    buffer->setRange(buffer->offset(), buffer->size() + sizeof(numPageSamples));
 
     uint32_t type;
     const void *data;
@@ -690,6 +692,8 @@
 
     ssize_t minIndex = fetchAllTrackSamples();
 
+    buffer->setRange(0, 0);     // start with an empty buffer
+
     if (minIndex < 0) {
         return ERROR_END_OF_STREAM;
     }
@@ -705,25 +709,25 @@
         sampleSize += sizeof(int32_t);
     }
 
+    // capacity() is ok since we cleared out the buffer
     if (buffer->capacity() < sampleSize) {
         return -ENOMEM;
     }
 
+    const size_t srclen = it->mBuffer->range_length();
     const uint8_t *src =
         (const uint8_t *)it->mBuffer->data()
             + it->mBuffer->range_offset();
 
-    memcpy((uint8_t *)buffer->data(), src, it->mBuffer->range_length());
+    memcpy((uint8_t *)buffer->data(), src, srclen);
+    buffer->setRange(0, srclen);
 
     status_t err = OK;
     if (info->mTrackFlags & kIsVorbis) {
+        // adjusts range when it inserts the extra bits
         err = appendVorbisNumPageSamples(it->mBuffer, buffer);
     }
 
-    if (err == OK) {
-        buffer->setRange(0, sampleSize);
-    }
-
     return err;
 }
 
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 92c5eb3..9d2568e 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -33,10 +33,8 @@
 #include <functional>
 #include <sys/time.h>
 
-#define USE_LIBYUV
 #define PERF_PROFILING 0
 
-
 #if defined(__aarch64__) || defined(__ARM_NEON__)
 #define USE_NEON_Y410 1
 #else
@@ -48,6 +46,48 @@
 #endif
 
 namespace android {
+typedef const struct libyuv::YuvConstants LibyuvConstants;
+
+struct LibyuvConstPair {
+    const LibyuvConstants *yuv;
+    const LibyuvConstants *yvu;
+};
+
+// Function to resolve YUV Matrices defined in libyuv
+static LibyuvConstPair getLibYUVMatrix(
+        const ColorConverter::ColorSpace &colorSpace, bool is10Bit) {
+    LibyuvConstPair matrix = {nullptr, nullptr};
+    const bool isFullRange = (colorSpace.mRange == ColorUtils::kColorRangeFull);
+    if (colorSpace.isI601()) {
+        matrix.yuv = &libyuv::kYuvI601Constants;
+        matrix.yvu = &libyuv::kYvuI601Constants;
+    } else if (colorSpace.isJ601()) {
+        matrix.yuv = &libyuv::kYuvJPEGConstants;
+        matrix.yvu = &libyuv::kYvuJPEGConstants;
+    } else if (colorSpace.isH709()) {
+        matrix.yuv = &libyuv::kYuvH709Constants;
+        matrix.yvu = &libyuv::kYvuH709Constants;
+    } else if (colorSpace.isF709()) {
+        matrix.yuv = &libyuv::kYuvF709Constants;
+        matrix.yvu = &libyuv::kYvuF709Constants;
+    } else if (colorSpace.isBt2020()) {
+        matrix.yuv = &libyuv::kYuv2020Constants;
+        matrix.yvu = &libyuv::kYvu2020Constants;
+    } else if (colorSpace.isBtV2020()) {
+        matrix.yuv = &libyuv::kYuvV2020Constants;
+        matrix.yvu = &libyuv::kYvuV2020Constants;
+    } else {
+        // unspecified
+        if (isFullRange) {
+            matrix.yuv = is10Bit ? &libyuv::kYuvV2020Constants : &libyuv::kYuvJPEGConstants;
+            matrix.yvu = is10Bit ? &libyuv::kYvuV2020Constants : &libyuv::kYvuJPEGConstants;
+        } else {
+            matrix.yuv = is10Bit ? &libyuv::kYuv2020Constants : &libyuv::kYuvI601Constants;
+            matrix.yvu = is10Bit ? &libyuv::kYvu2020Constants : &libyuv::kYvuI601Constants;
+        }
+    }
+    return matrix;
+}
 
 static bool isRGB(OMX_COLOR_FORMATTYPE colorFormat) {
     return colorFormat == OMX_COLOR_Format16bitRGB565
@@ -56,28 +96,234 @@
             || colorFormat == COLOR_Format32bitABGR2101010;
 }
 
-bool ColorConverter::ColorSpace::isBt2020() const {
-    return (mStandard == ColorUtils::kColorStandardBT2020);
+// check for limited Range
+bool ColorConverter::ColorSpace::isLimitedRange() const {
+    return mRange == ColorUtils::kColorRangeLimited;
 }
 
-bool ColorConverter::ColorSpace::isH420() const {
+// BT.2020 limited range YUV to RGB
+bool ColorConverter::ColorSpace::isBt2020() const {
+    return (mStandard == ColorUtils::kColorStandardBT2020
+            && mRange == ColorUtils::kColorRangeLimited);
+}
+
+// BT.2020 full range YUV to RGB
+bool ColorConverter::ColorSpace::isBtV2020() const {
+    return (mStandard == ColorUtils::kColorStandardBT2020
+            && mRange == ColorUtils::kColorRangeFull);
+}
+
+// BT.709 full range YUV to RGB
+bool ColorConverter::ColorSpace::isF709() const {
+    return (mStandard == ColorUtils::kColorStandardBT709
+            && mRange == ColorUtils::kColorRangeFull);
+}
+
+// BT.709 limited range YUV to RGB
+bool ColorConverter::ColorSpace::isH709() const {
     return (mStandard == ColorUtils::kColorStandardBT709)
             && (mRange == ColorUtils::kColorRangeLimited);
 }
 
+// BT.601 limited range YUV to RGB
 // the matrix coefficients are the same for both 601.625 and 601.525 standards
-bool ColorConverter::ColorSpace::isI420() const {
+bool ColorConverter::ColorSpace::isI601() const {
     return ((mStandard == ColorUtils::kColorStandardBT601_625)
             || (mStandard == ColorUtils::kColorStandardBT601_525))
             && (mRange == ColorUtils::kColorRangeLimited);
 }
 
-bool ColorConverter::ColorSpace::isJ420() const {
+// BT.601 full range YUV to RGB
+bool ColorConverter::ColorSpace::isJ601() const {
     return ((mStandard == ColorUtils::kColorStandardBT601_625)
             || (mStandard == ColorUtils::kColorStandardBT601_525))
             && (mRange == ColorUtils::kColorRangeFull);
 }
 
+// Utility functions for MediaImage2
+static MediaImage2 CreateYUV420PlanarMediaImage2(
+        uint32_t width, uint32_t height, uint32_t stride,
+        uint32_t vstride, uint32_t bitDepth) {
+    const uint32_t componentBytes = (bitDepth + 7) / 8;
+    return MediaImage2 {
+        .mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV,
+        .mNumPlanes = 3,
+        .mWidth = width,
+        .mHeight = height,
+        .mBitDepth = bitDepth,
+        .mBitDepthAllocated = componentBytes * 8,
+        .mPlane = {
+            {
+                .mOffset = 0,
+                .mColInc = static_cast<int32_t>(componentBytes),
+                .mRowInc = static_cast<int32_t>(stride),
+                .mHorizSubsampling = 1,
+                .mVertSubsampling = 1,
+            },
+            {
+                .mOffset = stride * vstride,
+                .mColInc = static_cast<int32_t>(componentBytes),
+                .mRowInc = static_cast<int32_t>(stride / 2),
+                .mHorizSubsampling = 2,
+                .mVertSubsampling = 2,
+            },
+            {
+                .mOffset = stride * vstride * 5 / 4,
+                .mColInc = static_cast<int32_t>(componentBytes),
+                .mRowInc = static_cast<int32_t>(stride / 2),
+                .mHorizSubsampling = 2,
+                .mVertSubsampling = 2,
+            }
+        },
+    };
+}
+
+static MediaImage2 CreateYUV420SemiPlanarMediaImage2(
+        uint32_t width, uint32_t height, uint32_t stride,
+        uint32_t vstride, uint32_t bitDepth, bool uv = true /*nv12 or not*/) {
+    const uint32_t componentBytes = (bitDepth + 7) / 8;
+    return MediaImage2 {
+        .mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV,
+        .mNumPlanes = 3,
+        .mWidth = width,
+        .mHeight = height,
+        .mBitDepth = bitDepth,
+        .mBitDepthAllocated = componentBytes * 8,
+        .mPlane = {
+            {
+                .mOffset = 0,
+                .mColInc = static_cast<int32_t>(componentBytes),
+                .mRowInc = static_cast<int32_t>(stride),
+                .mHorizSubsampling = 1,
+                .mVertSubsampling = 1,
+            },
+            {
+                .mOffset = stride * vstride + (uv ? 0 : componentBytes),
+                .mColInc = static_cast<int32_t>(2 * componentBytes),
+                .mRowInc = static_cast<int32_t>(stride),
+                .mHorizSubsampling = 2,
+                .mVertSubsampling = 2,
+            },
+            {
+                .mOffset = stride * vstride + (uv ? componentBytes : 0),
+                .mColInc = static_cast<int32_t>(2 * componentBytes),
+                .mRowInc = static_cast<int32_t>(stride),
+                .mHorizSubsampling = 2,
+                .mVertSubsampling = 2,
+            }
+        },
+    };
+}
+
+ColorConverter::Image::Image(const MediaImage2& img)
+    :mImage(img),
+    mLayout(ImageLayoutUnknown),
+    mSampling(ImageSamplingUnknown) {
+    const MediaImage2::PlaneInfo &yPlane =
+            img.mPlane[MediaImage2::PlaneIndex::Y];
+    const MediaImage2::PlaneInfo &uPlane =
+            img.mPlane[MediaImage2::PlaneIndex::U];
+    const MediaImage2::PlaneInfo &vPlane =
+            img.mPlane[MediaImage2::PlaneIndex::V];
+
+    if (mImage.mNumPlanes != 3) {
+        ALOGE("Conversion error: MediaImage2 mNumPlanes != 3");
+        mLayout = ImageLayoutUnknown;
+        mSampling = ImageSamplingUnknown;
+        mBitDepth = ImageBitDepthInvalid;
+        return;
+    }
+
+    if (mImage.mBitDepth == 8
+            && yPlane.mColInc == 1
+            && uPlane.mColInc == 1
+            && vPlane.mColInc == 1
+            && yPlane.mVertSubsampling == 1
+            && uPlane.mVertSubsampling == 2
+            && vPlane.mVertSubsampling == 2) {
+        mLayout = ImageLayout420Planar;
+        mSampling = ImageSamplingYUV420;
+    } else if (mImage.mBitDepth == 8
+            && yPlane.mColInc == 1
+            && uPlane.mColInc == 2
+            && vPlane.mColInc == 2
+            && yPlane.mVertSubsampling == 1
+            && uPlane.mVertSubsampling == 2
+            && vPlane.mVertSubsampling == 2
+            && ((vPlane.mOffset == uPlane.mOffset + 1) ||
+            (uPlane.mOffset == vPlane.mOffset + 1))) {
+        mLayout = ImageLayout420SemiPlanar;
+        mSampling = ImageSamplingYUV420;
+    }
+
+    mBitDepth = ImageBitDepthInvalid;
+    switch (img.mBitDepth) {
+        case 8:
+            mBitDepth = ImageBitDepth8;
+            break;
+
+        case 10:
+        case 12:
+        case 16:
+        default:
+            // TODO: Implement 10b, 12b and 16b using MediaImage2
+            mBitDepth = ImageBitDepthInvalid;
+    }
+
+}
+
+status_t ColorConverter::Image::getYUVPlaneOffsetAndStride(
+        const BitmapParams &src,
+        uint32_t *y_offset,
+        uint32_t *u_offset,
+        uint32_t *v_offset,
+        size_t *y_stride,
+        size_t *u_stride,
+        size_t *v_stride) const {
+
+    if (y_offset == nullptr || u_offset == nullptr || v_offset == nullptr
+            || y_stride == nullptr || u_stride == nullptr || v_stride == nullptr) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    if (mImage.mNumPlanes != 3) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    const MediaImage2::PlaneInfo &yPlane = mImage.mPlane[MediaImage2::PlaneIndex::Y];
+    *y_offset = yPlane.mOffset
+            + src.mCropTop * yPlane.mRowInc
+            + src.mCropLeft * yPlane.mColInc;
+
+    const MediaImage2::PlaneInfo &uPlane = mImage.mPlane[MediaImage2::PlaneIndex::U];
+    *u_offset = uPlane.mOffset
+            + (src.mCropTop / uPlane.mVertSubsampling) * uPlane.mRowInc
+            + (src.mCropLeft / uPlane.mHorizSubsampling) * uPlane.mColInc;
+
+    const MediaImage2::PlaneInfo &vPlane = mImage.mPlane[MediaImage2::PlaneIndex::V];
+    *v_offset = vPlane.mOffset
+            + (src.mCropTop / vPlane.mVertSubsampling) * vPlane.mRowInc
+            + (src.mCropLeft / vPlane.mHorizSubsampling) * vPlane.mColInc;
+
+    *y_stride = yPlane.mRowInc;
+    *u_stride = uPlane.mRowInc;
+    *v_stride = vPlane.mRowInc;
+
+    return OK;
+}
+
+bool ColorConverter::Image::isNV21() const {
+    if (getLayout() == ImageLayout420SemiPlanar) {
+        const MediaImage2::PlaneInfo &uPlane = mImage.mPlane[MediaImage2::PlaneIndex::U];
+        const MediaImage2::PlaneInfo &vPlane = mImage.mPlane[MediaImage2::PlaneIndex::V];
+
+        int componentBytes = (mImage.mBitDepthAllocated) / 8;
+
+        return (((vPlane.mOffset + componentBytes) == uPlane.mOffset));
+    }
+    return false;
+}
+
 /**
  * This class approximates the standard YUV to RGB conversions by factoring the matrix
  * coefficients to 1/256th-s (as dividing by 256 is easy to do with right shift). The chosen value
@@ -227,8 +473,42 @@
     mClip10Bit = NULL;
 }
 
+// Set MediaImage2 Flexible formats
+void ColorConverter::setSrcMediaImage2(MediaImage2 img) {
+    mSrcImage = Image(img);
+ }
+
+bool ColorConverter::isValidForMediaImage2() const {
+
+    if (!mSrcImage
+            || mSrcImage->getMediaImage2().mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV) {
+        // TODO: support Yonly or RGB etc?
+        return false;
+    }
+    // try to identify the src format
+
+    BitDepth_t srcBitDepth = mSrcImage->getBitDepth();
+
+    //TODO: support 12b and 16b ?
+    if (srcBitDepth == ImageBitDepthInvalid) {
+        return false;
+    }
+
+    return ((srcBitDepth == ImageBitDepth8  &&
+            (mDstFormat == OMX_COLOR_Format16bitRGB565
+            || mDstFormat == OMX_COLOR_Format32BitRGBA8888
+            || mDstFormat == OMX_COLOR_Format32bitBGRA8888))
+
+            || (srcBitDepth == ImageBitDepth10
+            && (mDstFormat == COLOR_Format32bitABGR2101010)));
+}
+
 bool ColorConverter::isValid() const {
     switch ((int32_t)mSrcFormat) {
+        case COLOR_FormatYUV420Flexible:
+            return isValidForMediaImage2();
+            break;
+
         case OMX_COLOR_FormatYUV420Planar16:
             if (mDstFormat == OMX_COLOR_FormatYUV444Y410) {
                 return true;
@@ -240,22 +520,23 @@
                     || mDstFormat == OMX_COLOR_Format32bitBGRA8888;
 
         case OMX_COLOR_FormatCbYCrY:
-        case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
-        case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
             return mDstFormat == OMX_COLOR_Format16bitRGB565;
 
         case OMX_COLOR_FormatYUV420SemiPlanar:
-#ifdef USE_LIBYUV
+        case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
+        case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
+            if (mSrcImage) {
+                return isValidForMediaImage2();
+            }
             return mDstFormat == OMX_COLOR_Format16bitRGB565
                     || mDstFormat == OMX_COLOR_Format32BitRGBA8888
                     || mDstFormat == OMX_COLOR_Format32bitBGRA8888;
-#else
-            return mDstFormat == OMX_COLOR_Format16bitRGB565;
-#endif
+
         case COLOR_FormatYUVP010:
             return mDstFormat == COLOR_Format32bitABGR2101010;
 
         default:
+            //TODO: Should this be enabled for MediaImage2?
             return false;
     }
 }
@@ -320,6 +601,13 @@
         mStride = mWidth;
         break;
 
+    case COLOR_FormatYUV420Flexible:
+        // MediaImage2 should be used.
+        mBpp = 1;
+        mStride = mWidth;
+
+        break;
+
     default:
         ALOGE("Unsupported color format %d", mColorFormat);
         mBpp = 1;
@@ -360,7 +648,8 @@
     BitmapParams src(
             const_cast<void *>(srcBits),
             srcWidth, srcHeight, srcStride,
-            srcCropLeft, srcCropTop, srcCropRight, srcCropBottom, mSrcFormat);
+            srcCropLeft, srcCropTop, srcCropRight, srcCropBottom,
+            mSrcFormat);
 
     BitmapParams dst(
             dstBits,
@@ -374,71 +663,68 @@
             && src.cropHeight() == dst.cropHeight())) {
         return ERROR_UNSUPPORTED;
     }
-
-    status_t err;
-
-    switch ((int32_t)mSrcFormat) {
-        case OMX_COLOR_FormatYUV420Planar:
-#ifdef USE_LIBYUV
-            err = convertYUV420PlanarUseLibYUV(src, dst);
-#else
-            err = convertYUV420Planar(src, dst);
+#if PERF_PROFILING
+    int64_t startTimeUs = ALooper::GetNowUs();
 #endif
+    status_t err;
+    switch ((int32_t)mSrcFormat) {
+        case COLOR_FormatYUV420Flexible:
+            err = convertYUVMediaImage(src, dst);
+            break;
+
+        case OMX_COLOR_FormatYUV420Planar:
+            if (!mSrcImage) {
+                mSrcImage = Image(CreateYUV420PlanarMediaImage2(
+                        srcWidth, srcHeight, srcStride, srcHeight, 8 /*bitDepth*/));
+            }
+            err = convertYUVMediaImage(src, dst);
+
             break;
 
         case OMX_COLOR_FormatYUV420Planar16:
-        {
-#if PERF_PROFILING
-            int64_t startTimeUs = ALooper::GetNowUs();
-#endif
             err = convertYUV420Planar16(src, dst);
-#if PERF_PROFILING
-            int64_t endTimeUs = ALooper::GetNowUs();
-            ALOGD("convertYUV420Planar16 took %lld us", (long long) (endTimeUs - startTimeUs));
-#endif
             break;
-        }
 
         case COLOR_FormatYUVP010:
-        {
-#if PERF_PROFILING
-            int64_t startTimeUs = ALooper::GetNowUs();
-#endif
             err = convertYUVP010(src, dst);
-#if PERF_PROFILING
-            int64_t endTimeUs = ALooper::GetNowUs();
-            ALOGD("convertYUVP010 took %lld us", (long long) (endTimeUs - startTimeUs));
-#endif
+
             break;
-        }
 
         case OMX_COLOR_FormatCbYCrY:
             err = convertCbYCrY(src, dst);
             break;
 
         case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
-            err = convertQCOMYUV420SemiPlanar(src, dst);
+            if (!mSrcImage) {
+                mSrcImage = Image(CreateYUV420SemiPlanarMediaImage2(
+                    srcWidth, srcHeight, srcStride, srcHeight, 8 /*bitDepth*/, false));
+            }
+            err = convertYUVMediaImage(src, dst);
+
             break;
 
         case OMX_COLOR_FormatYUV420SemiPlanar:
-#ifdef USE_LIBYUV
-            err = convertYUV420SemiPlanarUseLibYUV(src, dst);
-#else
-            err = convertYUV420SemiPlanar(src, dst);
-#endif
-            break;
-
         case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
-            err = convertTIYUV420PackedSemiPlanar(src, dst);
+            if (!mSrcImage) {
+                mSrcImage = Image(CreateYUV420SemiPlanarMediaImage2(
+                    srcWidth, srcHeight, srcStride, srcHeight, 8 /*bitDepth*/));
+            }
+            err = convertYUVMediaImage(src, dst);
+
             break;
 
         default:
-        {
+
             CHECK(!"Should not be here. Unknown color conversion.");
             break;
-        }
     }
 
+#if PERF_PROFILING
+    int64_t endTimeUs = ALooper::GetNowUs();
+    ALOGD("%s image took %lld us", asString_ColorFormat(mSrcFormat,"Unknown"),
+            (long long) (endTimeUs - startTimeUs));
+#endif
+
     return err;
 }
 
@@ -547,67 +833,103 @@
     return OK;
 }
 
+status_t ColorConverter::getSrcYUVPlaneOffsetAndStride(
+        const BitmapParams &src,
+        uint32_t *y_offset, uint32_t *u_offset, uint32_t *v_offset,
+        size_t *y_stride, size_t *u_stride, size_t *v_stride) const {
+    if (y_offset == nullptr || u_offset == nullptr || v_offset == nullptr
+            || y_stride == nullptr || u_stride == nullptr || v_stride == nullptr) {
+        ALOGE("nullptrs given for yuv source offset / stride");
+        return ERROR_MALFORMED;
+    }
+
+    if (mSrcImage) {
+        // if we have MediaImage2; get the info from MediaImage2
+        return mSrcImage->getYUVPlaneOffsetAndStride(src, y_offset, u_offset, v_offset,
+                y_stride, u_stride, v_stride);
+    }
+    return ERROR_UNSUPPORTED;
+}
 /*
     libyuv supports the following color spaces:
 
-    I420: BT.601 limited range
-    J420: BT.601 full range (jpeg)
-    H420: BT.709 limited range
+    I601:  BT.601 limited range
+    J601:  BT.601 full range (jpeg)
+    H709:  BT.709 limited range
+    F709:  BT.709 Full range
+    2020:  BT.2020 limited range
+    V2020: BT.2020 Full range
 
 */
 
-#define DECLARE_YUV2RGBFUNC(func, rgb) int (*func)(     \
-        const uint8_t*, int, const uint8_t*, int,       \
-        const uint8_t*, int, uint8_t*, int, int, int)   \
-        = mSrcColorSpace.isH420() ? libyuv::H420To##rgb \
-        : mSrcColorSpace.isJ420() ? libyuv::J420To##rgb \
-        : libyuv::I420To##rgb
-
 status_t ColorConverter::convertYUV420PlanarUseLibYUV(
         const BitmapParams &src, const BitmapParams &dst) {
-    // Fall back to our conversion if libyuv does not support the color space.
-    // I420 (BT.601 limited) is default, so don't fall back if we end up using it anyway.
-    if (!mSrcColorSpace.isH420() && !mSrcColorSpace.isJ420()
-            // && !mSrcColorSpace.isI420() /* same as line below */
-            && getMatrix() != &BT601_LIMITED) {
-        return convertYUV420Planar(src, dst);
+    LibyuvConstPair yuvConstants =
+            getLibYUVMatrix(mSrcColorSpace, false);
+
+    uint32_t y_offset = 0, u_offset = 0, v_offset = 0;
+    size_t src_stride_y =0, src_stride_u = 0, src_stride_v = 0;
+    if (getSrcYUVPlaneOffsetAndStride(src, &y_offset, &u_offset, &v_offset,
+                          &src_stride_y, &src_stride_u, &src_stride_v) != OK) {
+        return ERROR_UNSUPPORTED;
     }
 
     uint8_t *dst_ptr = (uint8_t *)dst.mBits
         + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
 
-    const uint8_t *src_y =
-        (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
+    const uint8_t *src_y = (const uint8_t *)src.mBits + y_offset;
 
-    const uint8_t *src_u =
-        (const uint8_t *)src.mBits + src.mStride * src.mHeight
-        + (src.mCropTop / 2) * (src.mStride / 2) + (src.mCropLeft / 2);
+    const uint8_t *src_u = (const uint8_t *)src.mBits + u_offset;
 
-    const uint8_t *src_v =
-        src_u + (src.mStride / 2) * (src.mHeight / 2);
+    const uint8_t *src_v = (const uint8_t *)src.mBits + v_offset;
 
     switch (mDstFormat) {
     case OMX_COLOR_Format16bitRGB565:
     {
-        DECLARE_YUV2RGBFUNC(func, RGB565);
-        (*func)(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
-                (uint8_t *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight());
-        break;
-    }
+        libyuv::I420ToRGB565Matrix(src_y,
+                src_stride_y,
+                src_u,
+                src_stride_u,
+                src_v,
+                src_stride_v,
+                dst_ptr,
+                dst.mStride,
+                yuvConstants.yuv,
+                src.cropWidth(),
+                src.cropHeight());
 
-    case OMX_COLOR_Format32BitRGBA8888:
-    {
-        DECLARE_YUV2RGBFUNC(func, ABGR);
-        (*func)(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
-                (uint8_t *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight());
         break;
     }
 
     case OMX_COLOR_Format32bitBGRA8888:
     {
-        DECLARE_YUV2RGBFUNC(func, ARGB);
-        (*func)(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
-                (uint8_t *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight());
+        libyuv::I420ToARGBMatrix(src_y,
+                src_stride_y,
+                src_u,
+                src_stride_u,
+                src_v,
+                src_stride_v,
+                (uint8_t*)dst_ptr,
+                dst.mStride,
+                yuvConstants.yuv,
+                src.cropWidth(),
+                src.cropHeight());
+        break;
+    }
+
+    case OMX_COLOR_Format32BitRGBA8888:
+    {
+        libyuv::I420ToARGBMatrix(src_y,
+                src_stride_y,
+                src_v,
+                src_stride_v,
+                src_u,
+                src_stride_u,
+                (uint8_t*)dst_ptr,
+                dst.mStride,
+                yuvConstants.yvu,
+                src.cropWidth(),
+                src.cropHeight());
         break;
     }
 
@@ -620,38 +942,90 @@
 
 status_t ColorConverter::convertYUV420SemiPlanarUseLibYUV(
         const BitmapParams &src, const BitmapParams &dst) {
-    // Fall back to our conversion if libyuv does not support the color space.
-    // libyuv only supports BT.601 limited range NV12. Don't fall back if we end up using it anyway.
-    if (// !mSrcColorSpace.isI420() && /* same as below */
-        getMatrix() != &BT601_LIMITED) {
-        return convertYUV420SemiPlanar(src, dst);
-    }
+    LibyuvConstPair yuvConstants =
+            getLibYUVMatrix(mSrcColorSpace, false);
 
+    uint32_t y_offset = 0, u_offset = 0, v_offset = 0;
+    size_t src_stride_y =0, src_stride_u = 0, src_stride_v = 0;
+    if (getSrcYUVPlaneOffsetAndStride(src, &y_offset, &u_offset, &v_offset,
+                          &src_stride_y, &src_stride_u, &src_stride_v) != OK) {
+        return ERROR_UNSUPPORTED;
+    }
+    (void)v_offset;
     uint8_t *dst_ptr = (uint8_t *)dst.mBits
         + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
 
-    const uint8_t *src_y =
-        (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
+    const uint8_t *src_y = (const uint8_t *)src.mBits + y_offset;
 
-    const uint8_t *src_u =
-        (const uint8_t *)src.mBits + src.mStride * src.mHeight
-        + (src.mCropTop / 2) * src.mStride + src.mCropLeft;
+    const uint8_t *src_u = (const uint8_t *)src.mBits + u_offset;
+
+    const uint8_t *src_v = (const uint8_t *)src.mBits + v_offset;
+
+    bool isNV21 = (u_offset == (v_offset + 1)) ? true : false;
+
+    // libyuv function signature for semiplanar formats;
+    std::function<int(const uint8_t*, int,
+            const uint8_t*, int, uint8_t *, int,
+            LibyuvConstants *, int, int)> libyuvFunc;
 
     switch (mDstFormat) {
     case OMX_COLOR_Format16bitRGB565:
-        libyuv::NV12ToRGB565(src_y, src.mStride, src_u, src.mStride, (uint8_t *)dst_ptr,
-                dst.mStride, src.cropWidth(), src.cropHeight());
+    {
+        // Note: We don't seem to have similar function for NV21
+        libyuv::NV12ToRGB565Matrix(src_y,
+                src_stride_y,
+                src_u,
+                src_stride_u,
+                (uint8_t*)dst_ptr,
+                dst.mStride,
+                yuvConstants.yuv,
+                src.cropWidth(),
+                src.cropHeight());
         break;
-
+    }
     case OMX_COLOR_Format32bitBGRA8888:
-        libyuv::NV12ToARGB(src_y, src.mStride, src_u, src.mStride, (uint8_t *)dst_ptr,
-                dst.mStride, src.cropWidth(), src.cropHeight());
+    {
+        if (src_stride_u != src_stride_v) {
+            return ERROR_UNSUPPORTED;
+        }
+
+        libyuvFunc = isNV21 ? libyuv:: NV21ToARGBMatrix : libyuv:: NV12ToARGBMatrix;
+
+        libyuvFunc(src_y,
+                src_stride_y,
+                isNV21 ? src_v: src_u,
+                // src_stride_v should be equal to src_stride_u
+                // but this is done like this for readability
+                isNV21 ? src_stride_v : src_stride_u,
+                (uint8_t*)dst_ptr,
+                dst.mStride,
+                yuvConstants.yuv,
+                src.cropWidth(),
+                src.cropHeight());
         break;
+    }
 
     case OMX_COLOR_Format32BitRGBA8888:
-        libyuv::NV12ToABGR(src_y, src.mStride, src_u, src.mStride, (uint8_t *)dst_ptr,
-                dst.mStride, src.cropWidth(), src.cropHeight());
+    {
+
+        if (src_stride_u != src_stride_v) {
+            return ERROR_UNSUPPORTED;
+        }
+
+        libyuvFunc = isNV21 ? libyuv::NV12ToARGBMatrix : libyuv::NV21ToARGBMatrix;
+
+        libyuvFunc(src_y,
+                src_stride_y,
+                isNV21 ? src_v : src_u,
+                // src_stride_v should be equal to src_stride_u
+                isNV21 ? src_stride_v : src_stride_u,
+                (uint8_t*)dst_ptr,
+                dst.mStride,
+                yuvConstants.yvu,
+                src.cropWidth(),
+                src.cropHeight());
         break;
+    }
 
     default:
         return ERROR_UNSUPPORTED;
@@ -661,20 +1035,40 @@
 }
 
 std::function<void (void *, void *, void *, size_t,
-                    signed *, signed *, signed *, signed *)>
-getReadFromSrc(OMX_COLOR_FORMATTYPE srcFormat) {
-    switch(srcFormat) {
-    case OMX_COLOR_FormatYUV420Planar:
-        return [](void *src_y, void *src_u, void *src_v, size_t x,
-                  signed *y1, signed *y2, signed *u, signed *v) {
-            *y1 = ((uint8_t*)src_y)[x];
-            *y2 = ((uint8_t*)src_y)[x + 1];
-            *u = ((uint8_t*)src_u)[x / 2] - 128;
-            *v = ((uint8_t*)src_v)[x / 2] - 128;
-        };
-    // this format stores 10 bits content with 16 bits
-    // converting it to 8 bits src
-    case OMX_COLOR_FormatYUV420Planar16:
+        signed *, signed *, signed *, signed *)>
+getReadFromChromaHorizSubsampled2Image8b(std::optional<MediaImage2> image,
+        OMX_COLOR_FORMATTYPE srcFormat) {
+    // this function is for reading src only
+    // when both chromas are horizontally subsampled by 2
+    // this returns 2 luma for one chroma.
+    if (image) {
+        uint32_t uColInc =
+                image->mPlane[MediaImage2::PlaneIndex::U].mColInc;
+        uint32_t vColInc =
+                image->mPlane[MediaImage2::PlaneIndex::V].mColInc;
+        uint32_t uHorizSubsampling =
+                image->mPlane[MediaImage2::PlaneIndex::U].mHorizSubsampling;
+         uint32_t vHorizSubsampling =
+                image->mPlane[MediaImage2::PlaneIndex::V].mHorizSubsampling;
+
+        if (!(uHorizSubsampling == 2 && vHorizSubsampling == 2)) {
+            return nullptr;
+        }
+
+        if (image->mBitDepthAllocated == 8) {
+
+            return [uColInc, vColInc, uHorizSubsampling, vHorizSubsampling]
+                    (void *src_y, void *src_u, void *src_v, size_t x,
+                    signed *y1, signed *y2, signed *u, signed *v) {
+                *y1 = ((uint8_t *)src_y)[x];
+                *y2 = ((uint8_t *)src_y)[x + 1];
+                *u  = ((uint8_t *)src_u)[(x / uHorizSubsampling) * uColInc] - 128;
+                *v  = ((uint8_t *)src_v)[(x / vHorizSubsampling) * vColInc] - 128;
+            };
+        }
+    }
+    if (srcFormat == OMX_COLOR_FormatYUV420Planar16) {
+        // OMX_COLOR_FormatYUV420Planar16
         return [](void *src_y, void *src_u, void *src_v, size_t x,
                 signed *y1, signed *y2, signed *u, signed *v) {
             *y1 = (uint8_t)(((uint16_t*)src_y)[x] >> 2);
@@ -682,8 +1076,34 @@
             *u = (uint8_t)(((uint16_t*)src_u)[x / 2] >> 2) - 128;
             *v = (uint8_t)(((uint16_t*)src_v)[x / 2] >> 2) - 128;
         };
-    default:
-        TRESPASS();
+    }
+    return nullptr;
+}
+
+std::function<void (void *, void *, void *, size_t,
+        signed *, signed *, signed *)>
+getReadFromImage(std::optional<MediaImage2> image, OMX_COLOR_FORMATTYPE &srcFormat) {
+    (void)srcFormat;
+    if (image) {
+        uint32_t uColInc =
+                image->mPlane[MediaImage2::PlaneIndex::U].mColInc;
+        uint32_t vColInc =
+                image->mPlane[MediaImage2::PlaneIndex::V].mColInc;
+        uint32_t uHorizSubsampling =
+                image->mPlane[MediaImage2::PlaneIndex::U].mHorizSubsampling;
+         uint32_t vHorizSubsampling =
+                image->mPlane[MediaImage2::PlaneIndex::V].mHorizSubsampling;
+
+        if (image->mBitDepthAllocated == 8) {
+
+            return [uColInc, vColInc, uHorizSubsampling, vHorizSubsampling]
+                    (void *src_y, void *src_u, void *src_v, size_t x,
+                    signed *y1, signed *u, signed *v) {
+                *y1 = ((uint8_t *)src_y)[x];
+                *u  = ((uint8_t *)src_u)[(x / uHorizSubsampling) * uColInc] - 128;
+                *v  = ((uint8_t *)src_v)[(x / vHorizSubsampling) * vColInc] - 128;
+            };
+        }
     }
     return nullptr;
 }
@@ -782,8 +1202,178 @@
     return nullptr;
 }
 
-status_t ColorConverter::convertYUV420Planar(
+status_t ColorConverter::convertYUVMediaImage(
         const BitmapParams &src, const BitmapParams &dst) {
+    // first see if we can do this as a 420Planar or 420SemiPlanar 8b
+
+    if(!mSrcImage ||
+            mSrcImage->getMediaImage2().mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV
+            || mSrcImage->getMediaImage2().mNumPlanes != 3) {
+        ALOGE("Cannot convert without MediaImage2 or MediaImage is not Valid YUV");
+        return ERROR_UNSUPPORTED;
+    }
+    if (mSrcImage->getBitDepth() == ImageBitDepth8
+            && mSrcImage->getSampling() == ImageSamplingYUV420) {
+        Layout_t layout = mSrcImage->getLayout();
+        switch (layout) {
+            case Layout_t::ImageLayout420Planar:
+            {
+                return convertYUV420PlanarUseLibYUV(src, dst);
+                break;
+            }
+
+            case Layout_t::ImageLayout420SemiPlanar:
+            {
+                // Note: libyuv doesn't support NV21 -> RGB565
+                if (!(mSrcImage->isNV21() && mDstFormat == OMX_COLOR_Format16bitRGB565)) {
+                    status_t ret = convertYUV420SemiPlanarUseLibYUV(src, dst);
+                    // This function may fail if some specific conditions are not
+                    // met for semiPlanar formats like strideU != strideV.
+                    // if failed, this will fail before attempting conversion, so
+                    // no additional memcpy will be involved here.
+                    // Upon failure, this will fall into pixel based processing below.
+                    if (ret == OK) {
+                        return ret;
+                    }
+                }
+                break;
+            }
+            default:
+                // we will handle this case below.
+                break;
+        }
+    }
+    const struct Coeffs *matrix = getMatrix();
+    if (!matrix) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    signed _b_u = matrix->_b_u;
+    signed _neg_g_u = -matrix->_g_u;
+    signed _neg_g_v = -matrix->_g_v;
+    signed _r_v = matrix->_r_v;
+    signed _y = matrix->_y;
+
+    uint8_t *dst_ptr = (uint8_t *)dst.mBits
+            + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
+
+
+    uint32_t y_offset = 0, u_offset = 0, v_offset = 0;
+    size_t src_stride_y =0, src_stride_u = 0, src_stride_v = 0;
+    if (getSrcYUVPlaneOffsetAndStride(src, &y_offset, &u_offset, &v_offset,
+            &src_stride_y, &src_stride_u, &src_stride_v) != OK) {
+        return ERROR_UNSUPPORTED;
+    }
+    uint32_t uVertSubsampling =
+            mSrcImage->getMediaImage2().mPlane[MediaImage2::PlaneIndex::U].mVertSubsampling;
+    uint32_t vVertSubsampling =
+            mSrcImage->getMediaImage2().mPlane[MediaImage2::PlaneIndex::V].mVertSubsampling;
+
+    //TODO: optimize for chroma sampling, reading and writing multiple pixels
+    //      within the same loop
+    signed _c16 = 0;
+    void *kAdjustedClip = nullptr;
+    if (mSrcImage->getBitDepth() != ImageBitDepth8) {
+        ALOGE("BitDepth != 8 for MediaImage2");
+        return ERROR_UNSUPPORTED;
+    }
+    _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
+    kAdjustedClip = initClip();
+
+    auto writeToDst = getWriteToDst(mDstFormat, (void *)kAdjustedClip);
+    uint8_t *src_y = (uint8_t *)src.mBits + y_offset;
+    uint8_t *src_u = (uint8_t *)src.mBits + u_offset;
+    uint8_t *src_v = (uint8_t *)src.mBits + v_offset;
+
+    switch (mSrcImage->getSampling()) {
+
+        case ImageSamplingYUV420:
+        {
+            // get read function that can read
+            // chroma sampling 2 with image
+            auto readFromSrcImage = getReadFromChromaHorizSubsampled2Image8b(
+                    mSrcImage->getMediaImage2(), mSrcFormat);
+            if (readFromSrcImage == nullptr) {
+                ALOGE("Cannot get a read function for this MediaImage2");
+                return ERROR_UNSUPPORTED;
+            }
+            for (size_t y = 0; y < src.cropHeight(); ++y) {
+                for (size_t x = 0; x < src.cropWidth(); x += 2) {
+                    signed y1, y2, u, v;
+                    readFromSrcImage(src_y, src_u, src_v, x, &y1, &y2, &u, &v);
+
+                    signed u_b = u * _b_u;
+                    signed u_g = u * _neg_g_u;
+                    signed v_g = v * _neg_g_v;
+                    signed v_r = v * _r_v;
+
+                    y1 = y1 - _c16;
+                    signed tmp1 = y1 * _y + 128;
+                    signed b1 = (tmp1 + u_b) / 256;
+                    signed g1 = (tmp1 + v_g + u_g) / 256;
+                    signed r1 = (tmp1 + v_r) / 256;
+
+                    y2 = y2 - _c16;
+                    signed tmp2 = y2 * _y + 128;
+                    signed b2 = (tmp2 + u_b) / 256;
+                    signed g2 = (tmp2 + v_g + u_g) / 256;
+                    signed r2 = (tmp2 + v_r) / 256;
+
+                    bool uncropped = x + 1 < src.cropWidth();
+                    writeToDst(dst_ptr + x * dst.mBpp, uncropped, r1, g1, b1, r2, g2, b2);
+                }
+                src_y += src_stride_y;
+                src_u += (((y + 1) % uVertSubsampling) == 0) ? src_stride_u : 0;
+                src_v += (((y + 1) % vVertSubsampling) == 0) ? src_stride_v : 0;
+
+                dst_ptr += dst.mStride;
+            }
+            break;
+        }
+
+        default:
+        {
+            // Interleaved or any other formats.
+            auto readFromSrcImage = getReadFromImage(mSrcImage->getMediaImage2(), mSrcFormat);
+            if (readFromSrcImage == nullptr) {
+                ALOGE("Cannot get a read function for this MediaImage2");
+                return ERROR_UNSUPPORTED;
+            }
+            for (size_t y = 0; y < src.cropHeight(); ++y) {
+                for (size_t x = 0; x < src.cropWidth(); x += 1) {
+                    signed y1, y2, u, v;
+                    readFromSrcImage(src_y, src_u, src_v, x, &y1, &u, &v);
+
+                    signed u_b = u * _b_u;
+                    signed u_g = u * _neg_g_u;
+                    signed v_g = v * _neg_g_v;
+                    signed v_r = v * _r_v;
+
+                    y1 = y1 - _c16;
+                    signed tmp1 = y1 * _y + 128;
+                    signed b1 = (tmp1 + u_b) / 256;
+                    signed g1 = (tmp1 + v_g + u_g) / 256;
+                    signed r1 = (tmp1 + v_r) / 256;
+
+                    writeToDst(dst_ptr + x * dst.mBpp, false, r1, g1, b1, 0, 0, 0);
+                }
+                src_y += src_stride_y;
+                src_u += (((y + 1) % uVertSubsampling) == 0) ? src_stride_u : 0;
+                src_v += (((y + 1) % vVertSubsampling) == 0) ? src_stride_v : 0;
+
+                dst_ptr += dst.mStride;
+            }
+        }
+    }
+    return OK;
+}
+
+status_t ColorConverter::convertYUV420Planar16(
+        const BitmapParams &src, const BitmapParams &dst) {
+    if (mDstFormat == OMX_COLOR_FormatYUV444Y410) {
+        return convertYUV420Planar16ToY410(src, dst);
+    }
+
     const struct Coeffs *matrix = getMatrix();
     if (!matrix) {
         return ERROR_UNSUPPORTED;
@@ -798,7 +1388,7 @@
 
     uint8_t *kAdjustedClip = initClip();
 
-    auto readFromSrc = getReadFromSrc(mSrcFormat);
+    auto readFromSrc = getReadFromChromaHorizSubsampled2Image8b(std::nullopt, mSrcFormat);
     auto writeToDst = getWriteToDst(mDstFormat, (void *)kAdjustedClip);
 
     uint8_t *dst_ptr = (uint8_t *)dst.mBits
@@ -845,19 +1435,9 @@
 
         dst_ptr += dst.mStride;
     }
-
     return OK;
 }
 
-status_t ColorConverter::convertYUV420Planar16(
-        const BitmapParams &src, const BitmapParams &dst) {
-    if (mDstFormat == OMX_COLOR_FormatYUV444Y410) {
-        return convertYUV420Planar16ToY410(src, dst);
-    }
-
-    return convertYUV420Planar(src, dst);
-}
-
 status_t ColorConverter::convertYUVP010(
         const BitmapParams &src, const BitmapParams &dst) {
     if (mDstFormat == COLOR_Format32bitABGR2101010) {
@@ -1136,102 +1716,6 @@
 
 #endif // USE_NEON_Y410
 
-status_t ColorConverter::convertQCOMYUV420SemiPlanar(
-        const BitmapParams &src, const BitmapParams &dst) {
-    /* QCOMYUV420SemiPlanar is NV21, while MediaCodec uses NV12 */
-    return convertYUV420SemiPlanarBase(
-            src, dst, src.mWidth /* row_inc */, true /* isNV21 */);
-}
-
-status_t ColorConverter::convertTIYUV420PackedSemiPlanar(
-        const BitmapParams &src, const BitmapParams &dst) {
-    return convertYUV420SemiPlanarBase(
-            src, dst, src.mWidth /* row_inc */);
-}
-
-status_t ColorConverter::convertYUV420SemiPlanar(
-        const BitmapParams &src, const BitmapParams &dst) {
-    return convertYUV420SemiPlanarBase(
-            src, dst, src.mStride /* row_inc */);
-}
-
-status_t ColorConverter::convertYUV420SemiPlanarBase(const BitmapParams &src,
-        const BitmapParams &dst, size_t row_inc, bool isNV21) {
-    const struct Coeffs *matrix = getMatrix();
-    if (!matrix) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    signed _b_u = matrix->_b_u;
-    signed _neg_g_u = -matrix->_g_u;
-    signed _neg_g_v = -matrix->_g_v;
-    signed _r_v = matrix->_r_v;
-    signed _y = matrix->_y;
-    signed _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
-
-    uint8_t *kAdjustedClip = initClip();
-
-    uint16_t *dst_ptr = (uint16_t *)((uint8_t *)
-            dst.mBits + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp);
-
-    const uint8_t *src_y =
-        (const uint8_t *)src.mBits + src.mCropTop * row_inc + src.mCropLeft;
-
-    const uint8_t *src_u = (const uint8_t *)src.mBits + src.mHeight * row_inc +
-        (src.mCropTop / 2) * row_inc + src.mCropLeft;
-
-    for (size_t y = 0; y < src.cropHeight(); ++y) {
-        for (size_t x = 0; x < src.cropWidth(); x += 2) {
-            signed y1 = (signed)src_y[x] - _c16;
-            signed y2 = (signed)src_y[x + 1] - _c16;
-
-            signed u = (signed)src_u[(x & ~1) + isNV21] - 128;
-            signed v = (signed)src_u[(x & ~1) + !isNV21] - 128;
-
-            signed u_b = u * _b_u;
-            signed u_g = u * _neg_g_u;
-            signed v_g = v * _neg_g_v;
-            signed v_r = v * _r_v;
-
-            signed tmp1 = y1 * _y + 128;
-            signed b1 = (tmp1 + u_b) / 256;
-            signed g1 = (tmp1 + v_g + u_g) / 256;
-            signed r1 = (tmp1 + v_r) / 256;
-
-            signed tmp2 = y2 * _y + 128;
-            signed b2 = (tmp2 + u_b) / 256;
-            signed g2 = (tmp2 + v_g + u_g) / 256;
-            signed r2 = (tmp2 + v_r) / 256;
-
-            uint32_t rgb1 =
-                ((kAdjustedClip[r1] >> 3) << 11)
-                | ((kAdjustedClip[g1] >> 2) << 5)
-                | (kAdjustedClip[b1] >> 3);
-
-            uint32_t rgb2 =
-                ((kAdjustedClip[r2] >> 3) << 11)
-                | ((kAdjustedClip[g2] >> 2) << 5)
-                | (kAdjustedClip[b2] >> 3);
-
-            if (x + 1 < src.cropWidth()) {
-                *(uint32_t *)(&dst_ptr[x]) = (rgb2 << 16) | rgb1;
-            } else {
-                dst_ptr[x] = rgb1;
-            }
-        }
-
-        src_y += row_inc;
-
-        if (y & 1) {
-            src_u += row_inc;
-        }
-
-        dst_ptr = (uint16_t*)((uint8_t*)dst_ptr + dst.mStride);
-    }
-
-    return OK;
-}
-
 uint8_t *ColorConverter::initClip() {
     if (mClip == NULL) {
         mClip = new uint8_t[CLIP_RANGE_MAX_8BIT - CLIP_RANGE_MIN_8BIT + 1];
diff --git a/media/libstagefright/httplive/fuzzer/Android.bp b/media/libstagefright/httplive/fuzzer/Android.bp
index 85fd8b7..dd49714 100644
--- a/media/libstagefright/httplive/fuzzer/Android.bp
+++ b/media/libstagefright/httplive/fuzzer/Android.bp
@@ -62,5 +62,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libstagefright_httplive",
+        vector: "remote",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/libstagefright/include/media/stagefright/ColorConverter.h b/media/libstagefright/include/media/stagefright/ColorConverter.h
index da3267e..e8b89c7 100644
--- a/media/libstagefright/include/media/stagefright/ColorConverter.h
+++ b/media/libstagefright/include/media/stagefright/ColorConverter.h
@@ -23,7 +23,10 @@
 #include <stdint.h>
 #include <utils/Errors.h>
 
+#include <optional>
+
 #include <OMX_Video.h>
+#include <media/hardware/VideoAPI.h>
 
 namespace android {
 
@@ -35,6 +38,8 @@
 
     bool isDstRGB() const;
 
+    void setSrcMediaImage2(MediaImage2 img);
+
     void setSrcColorSpace(uint32_t standard, uint32_t range, uint32_t transfer);
 
     status_t convert(
@@ -49,18 +54,91 @@
 
     struct Coeffs; // matrix coefficients
 
-private:
     struct ColorSpace {
         uint32_t mStandard;
         uint32_t mRange;
         uint32_t mTransfer;
 
-        bool isBt2020() const;
-
+        bool isLimitedRange() const;
         // libyuv helper methods
-        bool isH420() const;
-        bool isI420() const;
-        bool isJ420() const;
+        //   BT.2020 limited Range
+        bool isBt2020() const;
+        // BT.2020 full range
+        bool isBtV2020() const;
+        // 709 limited range
+        bool isH709() const;
+        // 709 full range
+        bool isF709() const;
+        // 601 limited range
+        bool isI601() const;
+        // 601 full range
+        // also called "JPEG" in libyuv
+        bool isJ601() const;
+    };
+
+private:
+
+    typedef enum : uint8_t {
+        ImageLayoutUnknown = 0x0,
+        ImageLayout420SemiPlanar = 0x1,
+        ImageLayout420Planar = 0x2
+    } Layout_t;
+
+    typedef enum : uint8_t {
+        ImageSamplingUnknown = 0x0,
+        ImageSamplingYUV420 = 0x1,
+    } Sampling_t;
+
+    //this is the actual usable bit
+    typedef enum : uint8_t {
+        ImageBitDepthInvalid = 0x0,
+        ImageBitDepth8 = 0x1,
+        ImageBitDepth10 = 0x2,
+        ImageBitDepth12 = 0x3,
+        ImageBitDepth16 = 0x4
+    } BitDepth_t;
+
+    struct BitmapParams;
+
+
+    class Image {
+    public:
+        Image(const MediaImage2& img);
+        virtual ~Image() {}
+
+        const MediaImage2 getMediaImage2() const {
+            return mImage;
+        }
+
+        Layout_t getLayout() const {
+            return mLayout;
+        }
+        Sampling_t getSampling() const {
+            return mSampling;
+        }
+        BitDepth_t getBitDepth() const {
+            return mBitDepth;
+        }
+
+        // Returns the plane offset for this image
+        // after accounting for the src Crop offsets
+        status_t getYUVPlaneOffsetAndStride(
+                const BitmapParams &src,
+                uint32_t *y_offset,
+                uint32_t *u_offset,
+                uint32_t *v_offset,
+                size_t *y_stride,
+                size_t *u_stride,
+                size_t *v_stride
+                ) const;
+
+        bool isNV21() const;
+
+    private:
+        MediaImage2 mImage;
+        Layout_t mLayout;
+        Sampling_t mSampling;
+        BitDepth_t mBitDepth;
     };
 
     struct BitmapParams {
@@ -84,6 +162,7 @@
     };
 
     OMX_COLOR_FORMATTYPE mSrcFormat, mDstFormat;
+    std::optional<Image> mSrcImage;
     ColorSpace mSrcColorSpace;
     uint8_t *mClip;
     uint16_t *mClip10Bit;
@@ -91,14 +170,30 @@
     uint8_t *initClip();
     uint16_t *initClip10Bit();
 
+    // resolve YUVFormat from YUV420Flexible
+    bool isValidForMediaImage2() const;
+
+    // get plane offsets from Formats
+    status_t getSrcYUVPlaneOffsetAndStride(
+            const BitmapParams &src,
+            uint32_t *y_offset,
+            uint32_t *u_offset,
+            uint32_t *v_offset,
+            size_t *y_stride,
+            size_t *u_stride,
+            size_t *v_stride) const;
+
+    status_t convertYUVMediaImage(
+        const BitmapParams &src, const BitmapParams &dst);
+
     // returns the YUV2RGB matrix coefficients according to the color aspects and bit depth
     const struct Coeffs *getMatrix() const;
 
     status_t convertCbYCrY(
             const BitmapParams &src, const BitmapParams &dst);
 
-    status_t convertYUV420Planar(
-            const BitmapParams &src, const BitmapParams &dst);
+    // status_t convertYUV420Planar(
+    //        const BitmapParams &src, const BitmapParams &dst);
 
     status_t convertYUV420PlanarUseLibYUV(
             const BitmapParams &src, const BitmapParams &dst);
@@ -115,19 +210,6 @@
     status_t convertYUV420Planar16ToRGB(
             const BitmapParams &src, const BitmapParams &dst);
 
-    status_t convertQCOMYUV420SemiPlanar(
-            const BitmapParams &src, const BitmapParams &dst);
-
-    status_t convertYUV420SemiPlanar(
-            const BitmapParams &src, const BitmapParams &dst);
-
-    status_t convertYUV420SemiPlanarBase(
-            const BitmapParams &src, const BitmapParams &dst,
-            size_t row_inc, bool isNV21 = false);
-
-    status_t convertTIYUV420PackedSemiPlanar(
-            const BitmapParams &src, const BitmapParams &dst);
-
     status_t convertYUVP010(
                 const BitmapParams &src, const BitmapParams &dst);
 
@@ -135,6 +217,7 @@
                 const BitmapParams &src, const BitmapParams &dst);
 
     ColorConverter(const ColorConverter &);
+
     ColorConverter &operator=(const ColorConverter &);
 };
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index ad04b1f..77394d5 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -39,6 +39,7 @@
 namespace android {
 namespace media {
 class MediaResourceParcel;
+class ClientConfigParcel;
 } // media
 } // android
 } // aidl
@@ -71,6 +72,7 @@
 
 using hardware::cas::native::V1_0::IDescrambler;
 using aidl::android::media::MediaResourceParcel;
+using aidl::android::media::ClientConfigParcel;
 
 struct MediaCodec : public AHandler {
     enum Domain {
@@ -442,6 +444,7 @@
 
     Mutex mMetricsLock;
     mediametrics_handle_t mMetricsHandle = 0;
+    bool mMetricsToUpload = false;
     nsecs_t mLifetimeStartNs = 0;
     void initMediametrics();
     void updateMediametrics();
@@ -453,6 +456,8 @@
     void updateTunnelPeek(const sp<AMessage> &msg);
     void updatePlaybackDuration(const sp<AMessage> &msg);
 
+    inline void initClientConfigParcel(ClientConfigParcel& clientConfig);
+
     sp<AMessage> mOutputFormat;
     sp<AMessage> mInputFormat;
     sp<AMessage> mCallback;
@@ -705,6 +710,8 @@
     };
 
     Histogram mLatencyHist;
+    // An unique ID for the codec - Used by the metrics.
+    uint64_t mCodecId = 0;
 
     std::function<sp<CodecBase>(const AString &, const char *)> mGetCodecBase;
     std::function<status_t(const AString &, sp<MediaCodecInfo> *)> mGetCodecInfo;
diff --git a/media/libstagefright/tests/HEVC/Android.bp b/media/libstagefright/tests/HEVC/Android.bp
index 7a0ba52..7f2ff12 100644
--- a/media/libstagefright/tests/HEVC/Android.bp
+++ b/media/libstagefright/tests/HEVC/Android.bp
@@ -56,4 +56,7 @@
             "signed-integer-overflow",
         ],
     },
+    data: [
+        ":https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/HEVCUtils/HEVCUtilsUnitTest.zip?unzip=true",
+    ],
 }
diff --git a/media/libstagefright/tests/extractorFactory/Android.bp b/media/libstagefright/tests/extractorFactory/Android.bp
index a067284..20ebe44 100644
--- a/media/libstagefright/tests/extractorFactory/Android.bp
+++ b/media/libstagefright/tests/extractorFactory/Android.bp
@@ -66,4 +66,7 @@
             "signed-integer-overflow",
         ],
     },
+    data: [
+        ":https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor.zip?unzip=true",
+    ],
 }
diff --git a/media/libstagefright/timedtext/test/Android.bp b/media/libstagefright/timedtext/test/Android.bp
index ae97c50..953da79 100644
--- a/media/libstagefright/timedtext/test/Android.bp
+++ b/media/libstagefright/timedtext/test/Android.bp
@@ -62,4 +62,7 @@
             "signed-integer-overflow",
         ],
     },
+    data: [
+        ":https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/timedtext/test/TimedTextUnitTest.zip?unzip=true",
+    ],
 }
diff --git a/media/libstagefright/webm/WebmFrameThread.cpp b/media/libstagefright/webm/WebmFrameThread.cpp
index cdbd745..7d1442b 100644
--- a/media/libstagefright/webm/WebmFrameThread.cpp
+++ b/media/libstagefright/webm/WebmFrameThread.cpp
@@ -336,7 +336,6 @@
 }
 
 void WebmFrameMediaSourceThread::run() {
-    int32_t count = 0;
     int64_t timestampUs = 0xdeadbeef;
     int64_t lastTimestampUs = 0; // Previous sample time stamp
     int64_t lastDurationUs = 0; // Previous sample duration
@@ -367,7 +366,6 @@
             buffer = NULL;
             continue;
         }
-        ++count;
 
         // adjust time-stamps after pause/resume
         if (mResumed) {
diff --git a/media/module/codecserviceregistrant/fuzzer/Android.bp b/media/module/codecserviceregistrant/fuzzer/Android.bp
index 0b9affd..1cb8c2b 100644
--- a/media/module/codecserviceregistrant/fuzzer/Android.bp
+++ b/media/module/codecserviceregistrant/fuzzer/Android.bp
@@ -41,5 +41,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmedia_codecserviceregistrant",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/module/foundation/AMessage.cpp b/media/module/foundation/AMessage.cpp
index b61dc47..7cc7c41 100644
--- a/media/module/foundation/AMessage.cpp
+++ b/media/module/foundation/AMessage.cpp
@@ -961,6 +961,11 @@
     return mItems.size();
 }
 
+/* static */
+size_t AMessage::maxAllowedEntries() {
+    return kMaxNumItems;
+}
+
 const char *AMessage::getEntryNameAt(size_t index, Type *type) const {
     if (index >= mItems.size()) {
         *type = kTypeInt32;
diff --git a/media/module/foundation/include/media/stagefright/foundation/AMessage.h b/media/module/foundation/include/media/stagefright/foundation/AMessage.h
index 6f73597..7594565 100644
--- a/media/module/foundation/include/media/stagefright/foundation/AMessage.h
+++ b/media/module/foundation/include/media/stagefright/foundation/AMessage.h
@@ -199,6 +199,7 @@
     };
 
     size_t countEntries() const;
+    static size_t maxAllowedEntries();
     const char *getEntryNameAt(size_t index, Type *type) const;
 
     /**
diff --git a/media/module/foundation/tests/AMessage_test.cpp b/media/module/foundation/tests/AMessage_test.cpp
index e08ed77..08062e5 100644
--- a/media/module/foundation/tests/AMessage_test.cpp
+++ b/media/module/foundation/tests/AMessage_test.cpp
@@ -53,6 +53,28 @@
     MOCK_METHOD(void, onMessageReceived, (const sp<AMessage>&), (override));
 };
 
+TEST(AMessage_tests, countsAndLimits) {
+  sp<AMessage> m1 = new AMessage();
+
+  // clear, countEntries, maxAllowedEntries
+
+  EXPECT_EQ(0, m1->countEntries());
+
+  m1->setInt32("smaller", INT32_MAX - 2);
+  m1->setInt64("big", INT64_MAX);
+  m1->setString("bigBallOfString", "whatever");
+  EXPECT_EQ(3, m1->countEntries());
+
+  m1->clear();
+  EXPECT_EQ(0, m1->countEntries());
+
+  EXPECT_TRUE(m1->maxAllowedEntries() > 0);
+  EXPECT_TRUE(AMessage::maxAllowedEntries() > 0);
+
+  // static function, make sure we get a consistent answer
+  EXPECT_EQ(m1->maxAllowedEntries(), AMessage::maxAllowedEntries());
+}
+
 TEST(AMessage_tests, settersAndGetters) {
   sp<AMessage> m1 = new AMessage();
 
diff --git a/media/utils/ProcessInfo.cpp b/media/utils/ProcessInfo.cpp
index 13f16b1..6296351 100644
--- a/media/utils/ProcessInfo.cpp
+++ b/media/utils/ProcessInfo.cpp
@@ -30,10 +30,64 @@
 static constexpr int32_t INVALID_ADJ = -10000;
 static constexpr int32_t NATIVE_ADJ = -1000;
 
+/* Make sure this matches with ActivityManager::PROCESS_STATE_NONEXISTENT
+ * #include <binder/ActivityManager.h>
+ * using ActivityManager::PROCESS_STATE_NONEXISTENT;
+ */
+static constexpr int32_t PROCESS_STATE_NONEXISTENT = 20;
+
 ProcessInfo::ProcessInfo() {}
 
+/*
+ * Checks whether the list of processes with given pids exist or not.
+ *
+ * Arguments:
+ *  - pids (input): List of pids for which to check whether they are Existent or not.
+ *  - existent (output): boolean vector corresponds to Existent state of each pids.
+ *
+ * On successful return:
+ *     - existent[i] true corresponds to pids[i] still active and
+ *     - existent[i] false corresponds to pids[i] already terminated (Nonexistent)
+ * On unsuccessful return, the output argument existent is invalid.
+ */
+bool ProcessInfo::checkProcessExistent(const std::vector<int32_t>& pids,
+                                       std::vector<bool>* existent) {
+    sp<IBinder> binder = defaultServiceManager()->waitForService(String16("processinfo"));
+    sp<IProcessInfoService> service = interface_cast<IProcessInfoService>(binder);
+
+    // Get the process state of the applications managed/tracked by the ActivityManagerService.
+    // Don't have to look into the native processes.
+    // If we really need the state of native process, then we can use ==> mOverrideMap
+    size_t count = pids.size();
+    std::vector<int32_t> states(count, PROCESS_STATE_NONEXISTENT);
+    status_t err = service->getProcessStatesFromPids(count,
+                                                     const_cast<int32_t*>(pids.data()),
+                                                     states.data());
+    if (err != OK) {
+        ALOGE("%s: IProcessInfoService::getProcessStatesFromPids failed with %d",
+              __func__, err);
+        return false;
+    }
+
+    existent->clear();
+    for (size_t index = 0; index < states.size(); index++) {
+        // If this process is not tracked by ActivityManagerService, look for overrides.
+        if (states[index] == PROCESS_STATE_NONEXISTENT) {
+            std::scoped_lock lock{mOverrideLock};
+            std::map<int, ProcessInfoOverride>::iterator it =
+                mOverrideMap.find(pids[index]);
+            if (it != mOverrideMap.end()) {
+                states[index] = it->second.procState;
+            }
+        }
+        existent->push_back(states[index] != PROCESS_STATE_NONEXISTENT);
+    }
+
+    return true;
+}
+
 bool ProcessInfo::getPriority(int pid, int* priority) {
-    sp<IBinder> binder = defaultServiceManager()->getService(String16("processinfo"));
+    sp<IBinder> binder = defaultServiceManager()->waitForService(String16("processinfo"));
     sp<IProcessInfoService> service = interface_cast<IProcessInfoService>(binder);
 
     size_t length = 1;
diff --git a/media/utils/include/mediautils/ProcessInfo.h b/media/utils/include/mediautils/ProcessInfo.h
index 9afa3df..c27c939 100644
--- a/media/utils/include/mediautils/ProcessInfo.h
+++ b/media/utils/include/mediautils/ProcessInfo.h
@@ -33,6 +33,8 @@
     virtual bool isPidUidTrusted(int pid, int uid);
     virtual bool overrideProcessInfo(int pid, int procState, int oomScore);
     virtual void removeProcessInfoOverride(int pid);
+    bool checkProcessExistent(const std::vector<int32_t>& pids,
+                              std::vector<bool>* existent) override;
 
 protected:
     virtual ~ProcessInfo();
diff --git a/media/utils/include/mediautils/ProcessInfoInterface.h b/media/utils/include/mediautils/ProcessInfoInterface.h
index b6529fc..e3384ba 100644
--- a/media/utils/include/mediautils/ProcessInfoInterface.h
+++ b/media/utils/include/mediautils/ProcessInfoInterface.h
@@ -17,16 +17,73 @@
 #ifndef PROCESS_INFO_INTERFACE_H_
 #define PROCESS_INFO_INTERFACE_H_
 
+#include <vector>
 #include <utils/RefBase.h>
 
 namespace android {
 
 struct ProcessInfoInterface : public RefBase {
+    /*
+     * Gets the priority of the process (with given pid) as oom score.
+     *
+     * @param[in] pid pid of the process.
+     * @param[out] priority of the process.
+     *
+     * @return true for successful return and false otherwise.
+     */
     virtual bool getPriority(int pid, int* priority) = 0;
+    /*
+     * Check whether the given pid is trusted or not.
+     *
+     * @param[in] pid pid of the process.
+     *
+     * @return true for trusted process and false otherwise.
+     */
     virtual bool isPidTrusted(int pid) = 0;
+    /*
+     * Check whether the given pid and uid is trusted or not.
+     *
+     * @param[in] pid pid of the process.
+     * @param[in] uid uid of the process.
+     *
+     * @return true for trusted process and false otherwise.
+     */
     virtual bool isPidUidTrusted(int pid, int uid) = 0;
+    /*
+     * Override process state and oom score of the pid.
+     *
+     * @param[in] pid pid of the process.
+     * @param[in] procState new state of the process to override with.
+     * @param[in] oomScore new oom score of the process to override with.
+     *
+     * @return true upon success and false otherwise.
+     */
     virtual bool overrideProcessInfo(int pid, int procState, int oomScore) = 0;
+    /*
+     * Remove the override info of the given process.
+     *
+     * @param[in] pid pid of the process.
+     */
     virtual void removeProcessInfoOverride(int pid) = 0;
+    /*
+     * Checks whether the list of processes with given pids exist or not.
+     *
+     * @param[in] pids List of pids for which to check whether they are Existent or not.
+     * @param[out] existent boolean vector corresponds to Existent state of each pids.
+     *
+     * @return true for successful return and false otherwise.
+     * On successful return:
+     *     - existent[i] true corresponds to pids[i] still active and
+     *     - existent[i] false corresponds to pids[i] already terminated (Nonexistent)
+     * On unsuccessful return, the output argument existent is invalid.
+     */
+    virtual bool checkProcessExistent(const std::vector<int32_t>& pids,
+                                      std::vector<bool>* existent) {
+        // A default implementation.
+        (void)pids;
+        (void)existent;
+        return false;
+    }
 
 protected:
     virtual ~ProcessInfoInterface() {}
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index e113efb..d9fdc3e 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -2901,6 +2901,9 @@
     if (!trackActive) {
         removeTrack_l(track);
     } else if (track->isFastTrack() || track->isOffloaded() || track->isDirect()) {
+        if (track->isPausePending()) {
+            track->pauseAck();
+        }
         track->mState = TrackBase::STOPPING_1;
     }
 
@@ -6483,9 +6486,13 @@
         if (right > GAIN_FLOAT_UNITY) {
             right = GAIN_FLOAT_UNITY;
         }
-
-        left *= v * mMasterBalanceLeft; // DirectOutputThread balance applied as track volume
-        right *= v * mMasterBalanceRight;
+        left *= v;
+        right *= v;
+        if (mAudioFlinger->getMode() != AUDIO_MODE_IN_COMMUNICATION
+                || audio_channel_count_from_out_mask(mChannelMask) > 1) {
+            left *= mMasterBalanceLeft; // DirectOutputThread balance applied as track volume
+            right *= mMasterBalanceRight;
+        }
     }
 
     track->processMuteEvent_l(mAudioFlinger->getOrCreateAudioManager(),
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index dd2d80a..21252d6 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -334,6 +334,16 @@
     return binder::Status::ok();
 }
 
+binder::Status SoundDoseManager::SoundDose::isSoundDoseHalSupported(bool* value) {
+    ALOGV("%s", __func__);
+    *value = false;
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        *value = soundDoseManager->isSoundDoseHalSupported();
+    }
+    return binder::Status::ok();
+}
+
 void SoundDoseManager::updateAttenuation(float attenuationDB, audio_devices_t deviceType) {
     std::lock_guard _l(mLock);
     ALOGV("%s: updating MEL processor attenuation for device type %d to %f",
@@ -397,6 +407,19 @@
     return mComputeCsdOnAllDevices;
 }
 
+bool SoundDoseManager::isSoundDoseHalSupported() const {
+    if (mDisableCsd) {
+        return false;
+    }
+
+    std::shared_ptr<ISoundDose> halSoundDose;
+    getHalSoundDose(&halSoundDose);
+    if (mHalSoundDose == nullptr) {
+        return false;
+    }
+    return true;
+}
+
 void SoundDoseManager::getHalSoundDose(std::shared_ptr<ISoundDose>* halSoundDose) const {
     std::lock_guard _l(mLock);
     *halSoundDose = mHalSoundDose;
diff --git a/services/audioflinger/sounddose/SoundDoseManager.h b/services/audioflinger/sounddose/SoundDoseManager.h
index d7a686a..5081ce4 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.h
+++ b/services/audioflinger/sounddose/SoundDoseManager.h
@@ -142,6 +142,7 @@
         binder::Status getCsd(float* value) override;
         binder::Status forceUseFrameworkMel(bool useFrameworkMel) override;
         binder::Status forceComputeCsdOnAllDevices(bool computeCsdOnAllDevices) override;
+        binder::Status isSoundDoseHalSupported(bool* value) override;
 
         wp<SoundDoseManager> mSoundDoseManager;
         const sp<media::ISoundDoseCallback> mSoundDoseCallback;
@@ -172,6 +173,7 @@
     void disableCsd();
     void setUseFrameworkMel(bool useFrameworkMel);
     void setComputeCsdOnAllDevices(bool computeCsdOnAllDevices);
+    bool isSoundDoseHalSupported() const;
     /** Returns the HAL sound dose interface or null if internal MEL computation is used. */
     void getHalSoundDose(std::shared_ptr<ISoundDose>* halSoundDose) const;
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 52a000f..876911d 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -301,6 +301,10 @@
         return mActiveClients;
     }
 
+    // Returns 0 if not all active clients have the same exclusive preferred device
+    // or the number of active clients with the same exclusive preferred device
+    size_t sameExclusivePreferredDevicesCount() const;
+
     bool useHwGain() const
     {
         return !devices().isEmpty() ? devices().itemAt(0)->hasGainController() : false;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index a46186b..7ee6566 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -238,6 +238,27 @@
     return clients;
 }
 
+size_t AudioOutputDescriptor::sameExclusivePreferredDevicesCount() const
+{
+    audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
+    size_t count = 0;
+    for (const auto &client : getClientIterable()) {
+        if (client->active()) {
+            if (!(client->hasPreferredDevice() &&
+                    client->isPreferredDeviceForExclusiveUse())) {
+                return 0;
+            }
+            if (deviceId == AUDIO_PORT_HANDLE_NONE) {
+                deviceId = client->preferredDeviceId();
+            } else if (deviceId != client->preferredDeviceId()) {
+                return 0;
+            }
+            count++;
+        }
+    }
+    return count;
+}
+
 bool AudioOutputDescriptor::isAnyActive(VolumeSource volumeSourceToIgnore) const
 {
     return std::find_if(begin(mActiveClients), end(mActiveClients),
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index a6821c7..c7e2103 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -166,8 +166,12 @@
         //   - cannot route from voice call RX OR
         //   - audio HAL version is < 3.0 and TX device is on the primary HW module
         if (getPhoneState() == AUDIO_MODE_IN_CALL) {
-            audio_devices_t txDevice = getDeviceForInputSource(
-                    AUDIO_SOURCE_VOICE_COMMUNICATION)->type();
+            audio_devices_t txDevice = AUDIO_DEVICE_NONE;
+            sp<DeviceDescriptor> txDeviceDesc =
+                    getDeviceForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
+            if (txDeviceDesc != nullptr) {
+                txDevice = txDeviceDesc->type();
+            }
             sp<AudioOutputDescriptor> primaryOutput = outputs.getPrimaryOutput();
             LOG_ALWAYS_FATAL_IF(primaryOutput == nullptr, "Primary output not found");
             DeviceVector availPrimaryInputDevices =
@@ -594,22 +598,26 @@
             }
         }
         switch (commDeviceType) {
-        case AUDIO_DEVICE_OUT_BLE_HEADSET:
-            device = availableDevices.getDevice(
-                    AUDIO_DEVICE_IN_BLE_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
-            break;
         case AUDIO_DEVICE_OUT_SPEAKER:
             device = availableDevices.getFirstExistingDevice({
                     AUDIO_DEVICE_IN_BACK_MIC, AUDIO_DEVICE_IN_BUILTIN_MIC,
                     AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_USB_HEADSET});
             break;
+        case AUDIO_DEVICE_OUT_BLE_HEADSET:
+            device = availableDevices.getDevice(
+                    AUDIO_DEVICE_IN_BLE_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
+            if (device != nullptr) {
+                break;
+            }
+            ALOGE("%s LE Audio selected for communication but input device not available",
+                    __func__);
+            FALLTHROUGH_INTENDED;
         default:    // FORCE_NONE
             device = availableDevices.getFirstExistingDevice({
                     AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_IN_USB_HEADSET,
                     AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BLUETOOTH_BLE,
                     AUDIO_DEVICE_IN_BUILTIN_MIC});
             break;
-
         }
         break;
 
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 22e4686..42f0d25 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -671,7 +671,10 @@
 
     audio_attributes_t attr = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
     auto txSourceDevice = mEngine->getInputDeviceForAttributes(attr);
-    ALOG_ASSERT(txSourceDevice != 0, "%s() input selected device not available", __func__);
+    if (txSourceDevice == nullptr) {
+        ALOGE("%s() selected input device not available", __func__);
+        return INVALID_OPERATION;
+    }
 
     ALOGV("%s device rxDevice %s txDevice %s", __func__,
           rxDevices.itemAt(0)->toString().c_str(), txSourceDevice->toString().c_str());
@@ -2225,8 +2228,7 @@
     outputDesc->setClientActive(client, true);
 
     if (client->hasPreferredDevice(true)) {
-        if (outputDesc->clientsList(true /*activeOnly*/).size() == 1 &&
-                client->isPreferredDeviceForExclusiveUse()) {
+        if (outputDesc->sameExclusivePreferredDevicesCount() > 0) {
             // Preferred device may be exclusive, use only if no other active clients on this output
             devices = DeviceVector(
                         mAvailableOutputDevices.getDeviceFromId(client->preferredDeviceId()));
@@ -2458,7 +2460,8 @@
             }
         }
         bool forceDeviceUpdate = false;
-        if (client->hasPreferredDevice(true)) {
+        if (client->hasPreferredDevice(true) &&
+                outputDesc->sameExclusivePreferredDevicesCount() < 2) {
             checkStrategyRoute(client->strategy(), AUDIO_IO_HANDLE_NONE);
             forceDeviceUpdate = true;
         }
@@ -5327,7 +5330,11 @@
     *session = (audio_session_t)mpClientInterface->newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
     *ioHandle = (audio_io_handle_t)mpClientInterface->newAudioUniqueId(AUDIO_UNIQUE_ID_USE_INPUT);
     audio_attributes_t attr = { .source = AUDIO_SOURCE_HOTWORD };
-    *device = mEngine->getInputDeviceForAttributes(attr)->type();
+    sp<DeviceDescriptor> deviceDesc = mEngine->getInputDeviceForAttributes(attr);
+    if (deviceDesc == nullptr) {
+        return INVALID_OPERATION;
+    }
+    *device = deviceDesc->type();
 
     return mSoundTriggerSessions.acquireSession(*session, *ioHandle);
 }
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 35411f9..b559318 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -301,7 +301,8 @@
     audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
 
-    if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
+    if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT
+          && stream != AUDIO_STREAM_ASSISTANT && stream != AUDIO_STREAM_CALL_ASSISTANT) {
         *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
             legacy2aidl_audio_io_handle_t_int32_t(AUDIO_IO_HANDLE_NONE));
         return Status::ok();
diff --git a/services/audiopolicy/service/SpatializerPoseController.cpp b/services/audiopolicy/service/SpatializerPoseController.cpp
index d325509..874bde4 100644
--- a/services/audiopolicy/service/SpatializerPoseController.cpp
+++ b/services/audiopolicy/service/SpatializerPoseController.cpp
@@ -108,8 +108,8 @@
               .freshnessTimeout = Ticks(kFreshnessTimeout).count(),
               .predictionDuration = []() -> float {
                   const int duration_ms =
-                          property_get_int32("audio.spatializer.prediction_duration_ms", 0);
-                  if (duration_ms > 0) {
+                          property_get_int32("audio.spatializer.prediction_duration_ms", -1);
+                  if (duration_ms >= 0) {
                       return duration_ms * 1'000'000LL;
                   } else {
                       return Ticks(kPredictionDuration).count();
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 1c922ce..e818759 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -120,6 +120,7 @@
     ],
 
     shared_libs: [
+        "libactivitymanager_aidl",
         "libbase",
         "libdl",
         "libexif",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 06adb05..1564ff3 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -69,6 +69,8 @@
 #include <private/android_filesystem_config.h>
 #include <system/camera_vendor_tags.h>
 #include <system/camera_metadata.h>
+#include <binder/IServiceManager.h>
+#include <binder/IActivityManager.h>
 
 #include <system/camera.h>
 
@@ -137,6 +139,8 @@
         "android.permission.CAMERA_OPEN_CLOSE_LISTENER");
 static const String16
         sCameraInjectExternalCameraPermission("android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
+// Constant integer for FGS Logging, used to denote the API type for logger
+static const int LOG_FGS_CAMERA_API = 1;
 const char *sFileName = "lastOpenSessionDumpFile";
 static constexpr int32_t kSystemNativeClientScore = resource_policy::PERCEPTIBLE_APP_ADJ;
 static constexpr int32_t kSystemNativeClientState =
@@ -1705,6 +1709,15 @@
     }
 
     *device = client;
+
+    const sp<IServiceManager> sm(defaultServiceManager());
+    const auto& mActivityManager = getActivityManager();
+    if (mActivityManager) {
+        mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
+            CameraThreadState::getCallingUid(),
+            CameraThreadState::getCallingPid());
+    }
+
     return ret;
 }
 
@@ -1851,6 +1864,13 @@
             ALOGE("%s: Error while creating the file: %s", __FUNCTION__, sFileName);
         }
     }
+    const sp<IServiceManager> sm(defaultServiceManager());
+    const auto& mActivityManager = getActivityManager();
+    if (mActivityManager) {
+        mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
+            CameraThreadState::getCallingUid(),
+            CameraThreadState::getCallingPid());
+    }
     return ret;
 }
 
@@ -3517,6 +3537,13 @@
     // client shouldn't be able to call into us anymore
     mClientPid = 0;
 
+    const auto& mActivityManager = getActivityManager();
+    if (mActivityManager) {
+        mActivityManager->logFgsApiEnd(LOG_FGS_CAMERA_API,
+            CameraThreadState::getCallingUid(),
+            CameraThreadState::getCallingPid());
+    }
+
     return res;
 }
 
@@ -3829,8 +3856,7 @@
 // ----------------------------------------------------------------------------
 
 void CameraService::Client::notifyError(int32_t errorCode,
-        const CaptureResultExtras& resultExtras) {
-    (void) resultExtras;
+        [[maybe_unused]] const CaptureResultExtras& resultExtras) {
     if (mRemoteCallback != NULL) {
         int32_t api1ErrorCode = CAMERA_ERROR_RELEASED;
         if (errorCode == hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISABLED) {
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 502ffb8..d8b14d7 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -29,6 +29,8 @@
 #include <binder/ActivityManager.h>
 #include <binder/AppOpsManager.h>
 #include <binder/BinderService.h>
+#include <binder/IServiceManager.h>
+#include <binder/IActivityManager.h>
 #include <binder/IAppOpsCallback.h>
 #include <binder/IUidObserver.h>
 #include <hardware/camera.h>
@@ -596,6 +598,20 @@
 
 private:
 
+    // TODO: b/263304156 update this to make use of a death callback for more
+    // robust/fault tolerant logging
+    static const sp<IActivityManager>& getActivityManager() {
+        static const char* kActivityService = "activity";
+        static const auto activityManager = []() -> sp<IActivityManager> {
+            const sp<IServiceManager> sm(defaultServiceManager());
+            if (sm != nullptr) {
+                 return interface_cast<IActivityManager>(sm->checkService(String16(kActivityService)));
+            }
+            return nullptr;
+        }();
+        return activityManager;
+    }
+
     /**
      * Typesafe version of device status, containing both the HAL-layer and the service interface-
      * layer values.
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index ee2ea6c..d71462f 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -1363,21 +1363,18 @@
             || l.mParameters.state == Parameters::VIDEO_SNAPSHOT);
 }
 
-void Camera2Client::releaseRecordingFrame(const sp<IMemory>& mem) {
-    (void)mem;
+void Camera2Client::releaseRecordingFrame([[maybe_unused]] const sp<IMemory>& mem) {
     ATRACE_CALL();
     ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
 }
 
-void Camera2Client::releaseRecordingFrameHandle(native_handle_t *handle) {
-    (void)handle;
+void Camera2Client::releaseRecordingFrameHandle([[maybe_unused]] native_handle_t *handle) {
     ATRACE_CALL();
     ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
 }
 
 void Camera2Client::releaseRecordingFrameHandleBatch(
-        const std::vector<native_handle_t*>& handles) {
-    (void)handles;
+        [[maybe_unused]] const std::vector<native_handle_t*>& handles) {
     ATRACE_CALL();
     ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
 }
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 3f5696d..216c9be 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -61,7 +61,7 @@
         bool systemNativeClient,
         const std::optional<String16>& clientFeatureId,
         const String8& cameraId,
-        int api1CameraId,
+        [[maybe_unused]] int api1CameraId,
         int cameraFacing,
         int sensorOrientation,
         int clientPid,
@@ -81,8 +81,6 @@
             servicePid,
             overrideToPortrait),
     mRemoteCallback(remoteCallback) {
-    // We don't need it for API2 clients, but Camera2ClientBase requires it.
-    (void) api1CameraId;
 }
 
 // Interface used by CameraService
@@ -1073,7 +1071,7 @@
             outputConfiguration.getSensorPixelModesUsed();
     if (SessionConfigurationUtils::checkAndOverrideSensorPixelModesUsed(
             sensorPixelModesUsed, format, width, height, getStaticInfo(cameraIdUsed),
-            /*allowRounding*/ false, &overriddenSensorPixelModesUsed) != OK) {
+            &overriddenSensorPixelModesUsed) != OK) {
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                 "sensor pixel modes used not valid for deferred stream");
     }
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 0d1d019..29d7e6f 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -324,26 +324,20 @@
     finishCameraStreamingOps();
 }
 
-void CameraOfflineSessionClient::notifyAutoFocus(uint8_t newState, int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void CameraOfflineSessionClient::notifyAutoFocus([[maybe_unused]] uint8_t newState,
+                [[maybe_unused]] int triggerId) {
     ALOGV("%s: Autofocus state now %d, last trigger %d",
           __FUNCTION__, newState, triggerId);
 }
 
-void CameraOfflineSessionClient::notifyAutoExposure(uint8_t newState, int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void CameraOfflineSessionClient::notifyAutoExposure([[maybe_unused]] uint8_t newState,
+                [[maybe_unused]] int triggerId) {
     ALOGV("%s: Autoexposure state now %d, last trigger %d",
             __FUNCTION__, newState, triggerId);
 }
 
-void CameraOfflineSessionClient::notifyAutoWhitebalance(uint8_t newState, int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void CameraOfflineSessionClient::notifyAutoWhitebalance([[maybe_unused]] uint8_t newState,
+                [[maybe_unused]] int triggerId) {
     ALOGV("%s: Auto-whitebalance state now %d, last trigger %d", __FUNCTION__, newState,
             triggerId);
 }
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 0a2819c..f1fc815 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -420,50 +420,38 @@
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyShutter(const CaptureResultExtras& resultExtras,
-                                                   nsecs_t timestamp) {
-    (void)resultExtras;
-    (void)timestamp;
-
+void Camera2ClientBase<TClientBase>::notifyShutter(
+                [[maybe_unused]] const CaptureResultExtras& resultExtras,
+                [[maybe_unused]] nsecs_t timestamp) {
     ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
             __FUNCTION__, resultExtras.requestId, timestamp);
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyAutoFocus(uint8_t newState,
-                                                     int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void Camera2ClientBase<TClientBase>::notifyAutoFocus([[maybe_unused]] uint8_t newState,
+                                                     [[maybe_unused]] int triggerId) {
     ALOGV("%s: Autofocus state now %d, last trigger %d",
           __FUNCTION__, newState, triggerId);
 
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyAutoExposure(uint8_t newState,
-                                                        int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void Camera2ClientBase<TClientBase>::notifyAutoExposure([[maybe_unused]] uint8_t newState,
+                                                        [[maybe_unused]] int triggerId) {
     ALOGV("%s: Autoexposure state now %d, last trigger %d",
             __FUNCTION__, newState, triggerId);
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyAutoWhitebalance(uint8_t newState,
-                                                            int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void Camera2ClientBase<TClientBase>::notifyAutoWhitebalance(
+                [[maybe_unused]] uint8_t newState,
+                [[maybe_unused]] int triggerId) {
     ALOGV("%s: Auto-whitebalance state now %d, last trigger %d",
             __FUNCTION__, newState, triggerId);
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyPrepared(int streamId) {
-    (void)streamId;
-
+void Camera2ClientBase<TClientBase>::notifyPrepared([[maybe_unused]] int streamId) {
     ALOGV("%s: Stream %d now prepared",
             __FUNCTION__, streamId);
 }
@@ -475,9 +463,8 @@
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyRepeatingRequestError(long lastFrameNumber) {
-    (void)lastFrameNumber;
-
+void Camera2ClientBase<TClientBase>::notifyRepeatingRequestError(
+            [[maybe_unused]] long lastFrameNumber) {
     ALOGV("%s: Repeating request was stopped. Last frame number is %ld",
             __FUNCTION__, lastFrameNumber);
 }
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 3b40da9..d2d1e38 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -2074,13 +2074,12 @@
 CameraProviderManager::ProviderInfo::ProviderInfo(
         const std::string &providerName,
         const std::string &providerInstance,
-        CameraProviderManager *manager) :
+        [[maybe_unused]] CameraProviderManager *manager) :
         mProviderName(providerName),
         mProviderInstance(providerInstance),
         mProviderTagid(generateVendorTagId(providerName)),
         mUniqueDeviceCount(0),
         mManager(manager) {
-    (void) mManager;
 }
 
 const std::string& CameraProviderManager::ProviderInfo::getType() const {
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index acf511b..aab6fd5 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -576,17 +576,15 @@
                     hardware::CameraInfo *info) const = 0;
             virtual bool isAPI1Compatible() const = 0;
             virtual status_t dumpState(int fd) = 0;
-            virtual status_t getCameraCharacteristics(bool overrideForPerfClass,
-                    CameraMetadata *characteristics, bool overrideToPortrait) {
-                (void) overrideForPerfClass;
-                (void) characteristics;
-                (void) overrideToPortrait;
+            virtual status_t getCameraCharacteristics(
+                    [[maybe_unused]] bool overrideForPerfClass,
+                    [[maybe_unused]] CameraMetadata *characteristics,
+                    [[maybe_unused]] bool overrideToPortrait) {
                 return INVALID_OPERATION;
             }
-            virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
-                    CameraMetadata *characteristics) const {
-                (void) physicalCameraId;
-                (void) characteristics;
+            virtual status_t getPhysicalCameraCharacteristics(
+                    [[maybe_unused]] const std::string& physicalCameraId,
+                    [[maybe_unused]] CameraMetadata *characteristics) const {
                 return INVALID_OPERATION;
             }
 
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index ae0cf51..a13b937 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -442,8 +442,7 @@
 }
 
 void HidlProviderInfo::serviceDied(uint64_t cookie,
-        const wp<hidl::base::V1_0::IBase>& who) {
-    (void) who;
+        [[maybe_unused]] const wp<hidl::base::V1_0::IBase>& who) {
     ALOGI("Camera provider '%s' has died; removing it", mProviderInstance.c_str());
     if (cookie != mId) {
         ALOGW("%s: Unexpected serviceDied cookie %" PRIu64 ", expected %" PRIu32,
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
index a556200..2ac38d5 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
@@ -451,10 +451,9 @@
     return OK;
 }
 
-void Camera3BufferManager::dump(int fd, const Vector<String16>& args) const {
+void Camera3BufferManager::dump(int fd, [[maybe_unused]] const Vector<String16>& args) const {
     Mutex::Autolock l(mLock);
 
-    (void) args;
     String8 lines;
     lines.appendFormat("      Total stream sets: %zu\n", mStreamSetMap.size());
     for (size_t i = 0; i < mStreamSetMap.size(); i++) {
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index a8e64de..1da0743 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -98,6 +98,9 @@
         mNeedFixupMonochromeTags(false),
         mOverrideForPerfClass(overrideForPerfClass),
         mOverrideToPortrait(overrideToPortrait),
+        mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
+        mComposerOutput(false),
+        mAutoframingOverride(ANDROID_CONTROL_AUTOFRAMING_OFF),
         mActivePhysicalId("")
 {
     ATRACE_CALL();
@@ -499,9 +502,8 @@
     return BAD_VALUE;
 }
 
-status_t Camera3Device::dump(int fd, const Vector<String16> &args) {
+status_t Camera3Device::dump(int fd, [[maybe_unused]] const Vector<String16> &args) {
     ATRACE_CALL();
-    (void)args;
 
     // Try to lock, but continue in case of failure (to avoid blocking in
     // deadlocks)
@@ -1364,12 +1366,54 @@
     set_camera_metadata_vendor_id(meta, mVendorTagId);
     filteredParams.unlock(meta);
     if (availableSessionKeys.count > 0) {
+        bool rotateAndCropSessionKey = false;
+        bool autoframingSessionKey = false;
         for (size_t i = 0; i < availableSessionKeys.count; i++) {
             camera_metadata_ro_entry entry = params.find(
                     availableSessionKeys.data.i32[i]);
             if (entry.count > 0) {
                 filteredParams.update(entry);
             }
+            if (ANDROID_SCALER_ROTATE_AND_CROP == availableSessionKeys.data.i32[i]) {
+                rotateAndCropSessionKey = true;
+            }
+            if (ANDROID_CONTROL_AUTOFRAMING == availableSessionKeys.data.i32[i]) {
+                autoframingSessionKey = true;
+            }
+        }
+
+        if (rotateAndCropSessionKey || autoframingSessionKey) {
+            sp<CaptureRequest> request = new CaptureRequest();
+            PhysicalCameraSettings settingsList;
+            settingsList.metadata = filteredParams;
+            request->mSettingsList.push_back(settingsList);
+
+            if (rotateAndCropSessionKey) {
+                sp<CaptureRequest> request = new CaptureRequest();
+                PhysicalCameraSettings settingsList;
+                settingsList.metadata = filteredParams;
+                request->mSettingsList.push_back(settingsList);
+
+                auto rotateAndCropEntry = filteredParams.find(ANDROID_SCALER_ROTATE_AND_CROP);
+                if (rotateAndCropEntry.count > 0 &&
+                        rotateAndCropEntry.data.u8[0] == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
+                    request->mRotateAndCropAuto = true;
+                } else {
+                    request->mRotateAndCropAuto = false;
+                }
+
+                overrideAutoRotateAndCrop(request, mOverrideToPortrait, mRotateAndCropOverride);
+            }
+
+            if (autoframingSessionKey) {
+                auto autoframingEntry = filteredParams.find(ANDROID_CONTROL_AUTOFRAMING);
+                if (autoframingEntry.count > 0 &&
+                        autoframingEntry.data.u8[0] == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+                    overrideAutoframing(request, mAutoframingOverride);
+                }
+            }
+
+            filteredParams = request->mSettingsList.begin()->metadata;
         }
     }
 
@@ -2398,7 +2442,7 @@
     }
 
     mGroupIdPhysicalCameraMap.clear();
-    bool composerSurfacePresent = false;
+    mComposerOutput = false;
     for (size_t i = 0; i < mOutputStreams.size(); i++) {
 
         // Don't configure bidi streams twice, nor add them twice to the list
@@ -2441,7 +2485,7 @@
         }
 
         if (outputStream->usage & GraphicBuffer::USAGE_HW_COMPOSER) {
-            composerSurfacePresent = true;
+            mComposerOutput = true;
         }
     }
 
@@ -2511,7 +2555,7 @@
         }
     }
 
-    mRequestThread->setComposerSurface(composerSurfacePresent);
+    mRequestThread->setComposerSurface(mComposerOutput);
 
     // Request thread needs to know to avoid using repeat-last-settings protocol
     // across configure_streams() calls
@@ -3466,6 +3510,17 @@
         latestRequestId = NAME_NOT_FOUND;
     }
 
+    for (size_t i = 0; i < mNextRequests.size(); i++) {
+        auto& nextRequest = mNextRequests.editItemAt(i);
+        sp<CaptureRequest> captureRequest = nextRequest.captureRequest;
+        // Do not override rotate&crop for stream configurations that include
+        // SurfaceViews(HW_COMPOSER) output, unless mOverrideToPortrait is set.
+        // The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
+        captureRequest->mRotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
+            overrideAutoRotateAndCrop(captureRequest);
+        captureRequest->mAutoframingChanged = overrideAutoframing(captureRequest);
+    }
+
     // 'mNextRequests' will at this point contain either a set of HFR batched requests
     //  or a single request from streaming or burst. In either case the first element
     //  should contain the latest camera settings that we need to check for any session
@@ -3615,19 +3670,15 @@
         bool triggersMixedIn = (triggerCount > 0 || mPrevTriggers > 0);
         mPrevTriggers = triggerCount;
 
-        // Do not override rotate&crop for stream configurations that include
-        // SurfaceViews(HW_COMPOSER) output, unless mOverrideToPortrait is set.
-        // The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
-        bool rotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
-            overrideAutoRotateAndCrop(captureRequest);
-        bool autoframingChanged = overrideAutoframing(captureRequest);
         bool testPatternChanged = overrideTestPattern(captureRequest);
 
         // If the request is the same as last, or we had triggers now or last time or
         // changing overrides this time
         bool newRequest =
-                (mPrevRequest != captureRequest || triggersMixedIn || rotateAndCropChanged ||
-                         autoframingChanged || testPatternChanged) &&
+                (mPrevRequest != captureRequest || triggersMixedIn ||
+                         captureRequest->mRotateAndCropChanged ||
+                         captureRequest->mAutoframingChanged ||
+                         testPatternChanged) &&
                 // Request settings are all the same within one batch, so only treat the first
                 // request in a batch as new
                 !(batchedRequest && i > 0);
@@ -4102,9 +4153,6 @@
         camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue) {
     ATRACE_CALL();
     Mutex::Autolock l(mTriggerMutex);
-    if (rotateAndCropValue == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
-        return BAD_VALUE;
-    }
     mRotateAndCropOverride = rotateAndCropValue;
     return OK;
 }
@@ -4113,9 +4161,6 @@
         camera_metadata_enum_android_control_autoframing_t autoframingValue) {
     ATRACE_CALL();
     Mutex::Autolock l(mTriggerMutex);
-    if (autoframingValue == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
-        return BAD_VALUE;
-    }
     mAutoframingOverride = autoframingValue;
     return OK;
 }
@@ -4703,13 +4748,20 @@
     return OK;
 }
 
-bool Camera3Device::RequestThread::overrideAutoRotateAndCrop(
-        const sp<CaptureRequest> &request) {
+bool Camera3Device::RequestThread::overrideAutoRotateAndCrop(const sp<CaptureRequest> &request) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mTriggerMutex);
+    return Camera3Device::overrideAutoRotateAndCrop(request, this->mOverrideToPortrait,
+            this->mRotateAndCropOverride);
+}
+
+bool Camera3Device::overrideAutoRotateAndCrop(const sp<CaptureRequest> &request,
+        bool overrideToPortrait,
+        camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride) {
     ATRACE_CALL();
 
-    if (mOverrideToPortrait) {
-        Mutex::Autolock l(mTriggerMutex);
-        uint8_t rotateAndCrop_u8 = mRotateAndCropOverride;
+    if (overrideToPortrait) {
+        uint8_t rotateAndCrop_u8 = rotateAndCropOverride;
         CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
         metadata.update(ANDROID_SCALER_ROTATE_AND_CROP,
                 &rotateAndCrop_u8, 1);
@@ -4717,24 +4769,44 @@
     }
 
     if (request->mRotateAndCropAuto) {
-        Mutex::Autolock l(mTriggerMutex);
         CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
 
         auto rotateAndCropEntry = metadata.find(ANDROID_SCALER_ROTATE_AND_CROP);
         if (rotateAndCropEntry.count > 0) {
-            if (rotateAndCropEntry.data.u8[0] == mRotateAndCropOverride) {
+            if (rotateAndCropEntry.data.u8[0] == rotateAndCropOverride) {
                 return false;
             } else {
-                rotateAndCropEntry.data.u8[0] = mRotateAndCropOverride;
+                rotateAndCropEntry.data.u8[0] = rotateAndCropOverride;
                 return true;
             }
         } else {
-            uint8_t rotateAndCrop_u8 = mRotateAndCropOverride;
-            metadata.update(ANDROID_SCALER_ROTATE_AND_CROP,
-                    &rotateAndCrop_u8, 1);
+            uint8_t rotateAndCrop_u8 = rotateAndCropOverride;
+            metadata.update(ANDROID_SCALER_ROTATE_AND_CROP, &rotateAndCrop_u8, 1);
             return true;
         }
     }
+
+    return false;
+}
+
+bool Camera3Device::overrideAutoframing(const sp<CaptureRequest> &request /*out*/,
+        camera_metadata_enum_android_control_autoframing_t autoframingOverride) {
+    CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+    auto autoframingEntry = metadata.find(ANDROID_CONTROL_AUTOFRAMING);
+    if (autoframingEntry.count > 0) {
+        if (autoframingEntry.data.u8[0] == autoframingOverride) {
+            return false;
+        } else {
+            autoframingEntry.data.u8[0] = autoframingOverride;
+            return true;
+        }
+    } else {
+        uint8_t autoframing_u8 = autoframingOverride;
+        metadata.update(ANDROID_CONTROL_AUTOFRAMING,
+                &autoframing_u8, 1);
+        return true;
+    }
+
     return false;
 }
 
@@ -4743,23 +4815,9 @@
 
     if (request->mAutoframingAuto) {
         Mutex::Autolock l(mTriggerMutex);
-        CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
-
-        auto autoframingEntry = metadata.find(ANDROID_CONTROL_AUTOFRAMING);
-        if (autoframingEntry.count > 0) {
-            if (autoframingEntry.data.u8[0] == mAutoframingOverride) {
-                return false;
-            } else {
-                autoframingEntry.data.u8[0] = mAutoframingOverride;
-                return true;
-            }
-        } else {
-            uint8_t autoframing_u8 = mAutoframingOverride;
-            metadata.update(ANDROID_CONTROL_AUTOFRAMING,
-                    &autoframing_u8, 1);
-            return true;
-        }
+        return Camera3Device::overrideAutoframing(request, mAutoframingOverride);
     }
+
     return false;
 }
 
@@ -5247,6 +5305,10 @@
     if (mRequestThread == nullptr) {
         return INVALID_OPERATION;
     }
+    if (rotateAndCropValue == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
+        return BAD_VALUE;
+    }
+    mRotateAndCropOverride = rotateAndCropValue;
     return mRequestThread->setRotateAndCropAutoBehavior(rotateAndCropValue);
 }
 
@@ -5258,6 +5320,10 @@
     if (mRequestThread == nullptr) {
         return INVALID_OPERATION;
     }
+    if (autoframingValue == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+        return BAD_VALUE;
+    }
+    mAutoframingOverride = autoframingValue;
     return mRequestThread->setAutoframingAutoBehaviour(autoframingValue);
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 6b98d9f..f3ffed2 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -625,9 +625,14 @@
         // overriding of ROTATE_AND_CROP value and adjustment of coordinates
         // in several other controls in both the request and the result
         bool                                mRotateAndCropAuto;
+        // Indicates that the ROTATE_AND_CROP value within 'mSettingsList' was modified
+        // irrespective of the original value.
+        bool                                mRotateAndCropChanged = false;
         // Whether this request has AUTOFRAMING_AUTO set, so need to override the AUTOFRAMING value
         // in the capture request.
         bool                                mAutoframingAuto;
+        // Indicates that the auto framing value within 'mSettingsList' was modified
+        bool                                mAutoframingChanged = false;
 
         // Whether this capture request has its zoom ratio set to 1.0x before
         // the framework overrides it for camera HAL consumption.
@@ -816,6 +821,15 @@
      */
     static nsecs_t getMonoToBoottimeOffset();
 
+    // Override rotate_and_crop control if needed
+    static bool    overrideAutoRotateAndCrop(const sp<CaptureRequest> &request /*out*/,
+            bool overrideToPortrait,
+            camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride);
+
+    // Override auto framing control if needed
+    static bool    overrideAutoframing(const sp<CaptureRequest> &request /*out*/,
+            camera_metadata_enum_android_control_autoframing_t autoframingOverride);
+
     struct RequestTrigger {
         // Metadata tag number, e.g. android.control.aePrecaptureTrigger
         uint32_t metadataTag;
@@ -973,7 +987,7 @@
         status_t           addFakeTriggerIds(const sp<CaptureRequest> &request);
 
         // Override rotate_and_crop control if needed; returns true if the current value was changed
-        bool               overrideAutoRotateAndCrop(const sp<CaptureRequest> &request);
+        bool               overrideAutoRotateAndCrop(const sp<CaptureRequest> &request /*out*/);
 
         // Override autoframing control if needed; returns true if the current value was changed
         bool               overrideAutoframing(const sp<CaptureRequest> &request);
@@ -1417,6 +1431,11 @@
     // Whether the camera framework overrides the device characteristics for
     // app compatibility reasons.
     bool mOverrideToPortrait;
+    camera_metadata_enum_android_scaler_rotate_and_crop_t mRotateAndCropOverride;
+    bool mComposerOutput;
+
+    // Auto framing override value
+    camera_metadata_enum_android_control_autoframing mAutoframingOverride;
 
     // Current active physical id of the logical multi-camera, if any
     std::string mActivePhysicalId;
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
index 19afd69..8c0ac71 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
@@ -67,8 +67,7 @@
     return INVALID_OPERATION;
 }
 
-void Camera3FakeStream::dump(int fd, const Vector<String16> &args) const {
-    (void) args;
+void Camera3FakeStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
     String8 lines;
     lines.appendFormat("    Stream[%d]: Fake\n", mId);
     write(fd, lines.string(), lines.size());
@@ -82,9 +81,8 @@
     return OK;
 }
 
-status_t Camera3FakeStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
-    (void) buffer;
-    (void) fenceFd;
+status_t Camera3FakeStream::detachBuffer([[maybe_unused]] sp<GraphicBuffer>* buffer,
+                [[maybe_unused]] int* fenceFd) {
     // Do nothing
     return OK;
 }
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index a78d01e..fbaaf7b 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -74,8 +74,7 @@
     return false;
 }
 
-void Camera3IOStreamBase::dump(int fd, const Vector<String16> &args) const {
-    (void) args;
+void Camera3IOStreamBase::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
     String8 lines;
 
     uint64_t consumerUsage = 0;
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 9a3f7ed..631bb43 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -104,17 +104,14 @@
 
 status_t Camera3InputStream::returnBufferCheckedLocked(
             const camera_stream_buffer &buffer,
-            nsecs_t timestamp,
-            nsecs_t readoutTimestamp,
-            bool output,
+            [[maybe_unused]] nsecs_t timestamp,
+            [[maybe_unused]] nsecs_t readoutTimestamp,
+            [[maybe_unused]] bool output,
             int32_t /*transform*/,
             const std::vector<size_t>&,
             /*out*/
             sp<Fence> *releaseFenceOut) {
 
-    (void)timestamp;
-    (void)readoutTimestamp;
-    (void)output;
     ALOG_ASSERT(!output, "Expected output to be false");
 
     status_t res;
@@ -218,8 +215,7 @@
     return OK;
 }
 
-void Camera3InputStream::dump(int fd, const Vector<String16> &args) const {
-    (void) args;
+void Camera3InputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
     String8 lines;
     lines.appendFormat("    Stream[%d]: Input\n", mId);
     write(fd, lines.string(), lines.size());
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 2227232..58db57a 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -394,13 +394,12 @@
             const camera_stream_buffer &buffer,
             nsecs_t timestamp,
             nsecs_t readoutTimestamp,
-            bool output,
+            [[maybe_unused]] bool output,
             int32_t transform,
             const std::vector<size_t>& surface_ids,
             /*out*/
             sp<Fence> *releaseFenceOut) {
 
-    (void)output;
     ALOG_ASSERT(output, "Expected output to be true");
 
     status_t res;
@@ -519,8 +518,7 @@
     return res;
 }
 
-void Camera3OutputStream::dump(int fd, const Vector<String16> &args) const {
-    (void) args;
+void Camera3OutputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
     String8 lines;
     lines.appendFormat("    Stream[%d]: Output\n", mId);
     lines.appendFormat("      Consumer name: %s\n", mConsumerName.string());
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 4d8495f..4395455 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -955,9 +955,8 @@
     }
 }
 
-void Camera3Stream::dump(int fd, const Vector<String16> &args) const
+void Camera3Stream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const
 {
-    (void)args;
     mBufferLimitLatency.dump(fd,
             "      Latency histogram for wait on max_buffers");
 }
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index 2f55def..1a6b2e0 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -185,9 +185,8 @@
     using getCameraDeviceInterface_V1_x_cb = std::function<void(Status status,
             const sp<device::V1_0::ICameraDevice>& device)>;
     virtual hardware::Return<void> getCameraDeviceInterface_V1_x(
-            const hardware::hidl_string& cameraDeviceName,
+            [[maybe_unused]] const hardware::hidl_string& cameraDeviceName,
             getCameraDeviceInterface_V1_x_cb _hidl_cb) override {
-        (void) cameraDeviceName;
         _hidl_cb(Status::OK, nullptr); //TODO: impl. of ver. 1.0 device interface
                                        //      otherwise enumeration will fail.
         return hardware::Void();
@@ -261,9 +260,8 @@
     virtual ~TestInteractionProxy() {}
 
     virtual bool registerForNotifications(
-            const std::string &serviceName,
+            [[maybe_unused]] const std::string &serviceName,
             const sp<hidl::manager::V1_0::IServiceNotification> &notification) override {
-        (void) serviceName;
         mManagerNotificationInterface = notification;
         return true;
     }
diff --git a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
index 8331136..b367571 100644
--- a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
+++ b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
@@ -355,8 +355,6 @@
 #include "DistortionMapperTest_OpenCvData.h"
 
 TEST(DistortionMapperTest, CompareToOpenCV) {
-    status_t res;
-
     float bigDistortion[] = {0.1, -0.003, 0.004, 0.02, 0.01};
 
     // Expect to match within sqrt(2) radius pixels
@@ -370,7 +368,7 @@
     using namespace openCvData;
 
     DistortionMapperInfo *mapperInfo = m.getMapperInfo();
-    res = m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, mapperInfo, /*clamp*/false,
+    m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, mapperInfo, /*clamp*/false,
             /*simple*/false);
 
     for (size_t i = 0; i < rawCoords.size(); i+=2) {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 48b27be..a00b0be 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -483,7 +483,7 @@
     }
     std::unordered_set<int32_t> overriddenSensorPixelModes;
     if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
-            physicalCameraMetadata, flexibleConsumer, &overriddenSensorPixelModes) != OK) {
+            physicalCameraMetadata, &overriddenSensorPixelModes) != OK) {
         String8 msg = String8::format("Camera %s: sensor pixel modes for stream with "
                 "format %#x are not valid",logicalCameraId.string(), format);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
@@ -757,7 +757,7 @@
             streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
             if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
                     streamInfo.format, streamInfo.width,
-                    streamInfo.height, metadataChosen, false /*flexibleConsumer*/,
+                    streamInfo.height, metadataChosen,
                     &streamInfo.sensorPixelModesUsed) != OK) {
                         ALOGE("%s: Deferred surface sensor pixel modes not valid",
                                 __FUNCTION__);
@@ -934,7 +934,7 @@
 
 status_t checkAndOverrideSensorPixelModesUsed(
         const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
-        const CameraMetadata &staticInfo, bool flexibleConsumer,
+        const CameraMetadata &staticInfo,
         std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
 
     const std::unordered_set<int32_t> &sensorPixelModesUsedSet =
@@ -943,6 +943,8 @@
         if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
                 sensorPixelModesUsedSet.end()) {
             // invalid value for non ultra high res sensors
+            ALOGE("%s ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION used on a device which doesn't "
+                    "support ultra high resolution capture", __FUNCTION__);
             return BAD_VALUE;
         }
         overriddenSensorPixelModesUsed->clear();
@@ -973,27 +975,30 @@
             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
             return OK;
         }
-        // We don't allow flexible consumer for max resolution mode.
         if (isInMaximumResolutionStreamConfigurationMap) {
-            overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
-            return OK;
-        }
-        if (isInDefaultStreamConfigurationMap || (flexibleConsumer && width < ROUNDING_WIDTH_CAP)) {
+            overriddenSensorPixelModesUsed->insert(
+                    ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+        } else {
             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
-            return OK;
         }
-        return BAD_VALUE;
+        return OK;
     }
 
     // Case2: The app has set sensorPixelModesUsed, we need to verify that they
     // are valid / err out.
     if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_DEFAULT) !=
             sensorPixelModesUsedSet.end() && !isInDefaultStreamConfigurationMap) {
+        ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_DEFAULT set by client, but stream f: %d size %d x %d"
+                " isn't present in default stream configuration map", __FUNCTION__, format, width,
+                height);
         return BAD_VALUE;
     }
 
    if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
             sensorPixelModesUsedSet.end() && !isInMaximumResolutionStreamConfigurationMap) {
+        ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION set by client, but stream f: "
+                "%d size %d x %d isn't present in default stream configuration map", __FUNCTION__,
+                format, width, height);
         return BAD_VALUE;
     }
     *overriddenSensorPixelModesUsed = sensorPixelModesUsedSet;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index b5654ac..2e35de0 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -145,7 +145,7 @@
 
 status_t checkAndOverrideSensorPixelModesUsed(
         const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
-        const CameraMetadata &staticInfo, bool flexibleConsumer,
+        const CameraMetadata &staticInfo,
         std::unordered_set<int32_t> *overriddenSensorPixelModesUsed);
 
 bool targetPerfClassPrimaryCamera(
diff --git a/services/mediametrics/fuzzer/Android.bp b/services/mediametrics/fuzzer/Android.bp
index 8b33f10..20a6378 100644
--- a/services/mediametrics/fuzzer/Android.bp
+++ b/services/mediametrics/fuzzer/Android.bp
@@ -68,5 +68,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmediametricsservice",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index c5957e9..cb5e783 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -444,6 +444,12 @@
     }
     AStatsEvent_writeInt32(event, hdrFormat);
 
+    int64_t codecId = 0;
+    if (item->getInt64("android.media.mediacodec.id", &codecId)) {
+        metrics_proto.set_codec_id(codecId);
+    }
+    AStatsEvent_writeInt64(event, codecId);
+
     int err = AStatsEvent_write(event);
     if (err < 0) {
       ALOGE("Failed to write codec metrics to statsd (%d)", err);
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index 2b8245e..a2bd5e1 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -17,6 +17,7 @@
         "aidl/android/media/MediaResourceParcel.aidl",
         "aidl/android/media/MediaResourcePolicyParcel.aidl",
         "aidl/android/media/ClientInfoParcel.aidl",
+        "aidl/android/media/ClientConfigParcel.aidl",
     ],
     path: "aidl",
 }
@@ -73,9 +74,11 @@
     name: "libresourcemanagerservice",
 
     srcs: [
+        "ResourceManagerMetrics.cpp",
         "ResourceManagerService.cpp",
         "ResourceObserverService.cpp",
         "ServiceLog.cpp",
+        "UidObserver.cpp",
 
         // TODO: convert to AIDL?
         "IMediaResourceMonitor.cpp",
@@ -92,6 +95,7 @@
         "libstatspull",
         "libstatssocket",
         "libprotobuf-cpp-lite",
+        "libactivitymanager_aidl",
     ],
 
     static_libs: [
diff --git a/services/mediaresourcemanager/OWNERS b/services/mediaresourcemanager/OWNERS
index 82abf8f..4fc3728 100644
--- a/services/mediaresourcemanager/OWNERS
+++ b/services/mediaresourcemanager/OWNERS
@@ -1 +1,3 @@
-dwkang@google.com
+girishshetty@google.com
+lajos@google.com
+wonsik@google.com
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
new file mode 100644
index 0000000..8d591df
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -0,0 +1,564 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceManagerMetrics"
+#include <utils/Log.h>
+#include <mediautils/ProcessInfo.h>
+
+#include <stats_media_metrics.h>
+
+#include "UidObserver.h"
+#include "ResourceManagerMetrics.h"
+
+#include <cmath>
+#include <sstream>
+
+namespace android {
+
+using stats::media_metrics::stats_write;
+using stats::media_metrics::MEDIA_CODEC_STARTED;
+using stats::media_metrics::MEDIA_CODEC_STOPPED;
+// Disabling this for now.
+#ifdef ENABLE_MEDIA_CODEC_CONCURRENT_USAGE_REPORTED
+using stats::media_metrics::MEDIA_CODEC_CONCURRENT_USAGE_REPORTED;
+#endif
+using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED;
+using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
+using stats::media_metrics::\
+    MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
+using stats::media_metrics::\
+    MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
+
+inline const char* getCodecType(MediaResourceSubType codecType) {
+    switch (codecType) {
+        case MediaResourceSubType::kAudioCodec:         return "Audio";
+        case MediaResourceSubType::kVideoCodec:         return "Video";
+        case MediaResourceSubType::kImageCodec:         return "Image";
+        case MediaResourceSubType::kUnspecifiedSubType:
+        default:
+                                                        return "Unspecified";
+    }
+    return "Unspecified";
+}
+
+static CodecBucket getCodecBucket(bool isHardware,
+                                  bool isEncoder,
+                                  MediaResourceSubType codecType) {
+    if (isHardware) {
+        switch (codecType) {
+            case MediaResourceSubType::kAudioCodec:
+                if (isEncoder) return HwAudioEncoder;
+                return HwAudioDecoder;
+            case MediaResourceSubType::kVideoCodec:
+                if (isEncoder) return HwVideoEncoder;
+                return HwVideoDecoder;
+            case MediaResourceSubType::kImageCodec:
+                if (isEncoder) return HwImageEncoder;
+                return HwImageDecoder;
+            case MediaResourceSubType::kUnspecifiedSubType:
+            default:
+                return CodecBucketUnspecified;
+        }
+    } else {
+        switch (codecType) {
+            case MediaResourceSubType::kAudioCodec:
+                if (isEncoder) return SwAudioEncoder;
+                return SwAudioDecoder;
+            case MediaResourceSubType::kVideoCodec:
+                if (isEncoder) return SwVideoEncoder;
+                return SwVideoDecoder;
+            case MediaResourceSubType::kImageCodec:
+                if (isEncoder) return SwImageEncoder;
+                return SwImageDecoder;
+            case MediaResourceSubType::kUnspecifiedSubType:
+            default:
+                return CodecBucketUnspecified;
+        }
+    }
+
+    return CodecBucketUnspecified;
+}
+
+static bool getLogMessage(int hwCount, int swCount, std::stringstream& logMsg) {
+    bool update = false;
+    logMsg.clear();
+
+    if (hwCount > 0) {
+        logMsg << " HW: " << hwCount;
+        update = true;
+    }
+    if (swCount > 0) {
+        logMsg << " SW: " << swCount;
+        update = true;
+    }
+
+    if (update) {
+        logMsg << " ] ";
+    }
+    return update;
+}
+
+ResourceManagerMetrics::ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo) {
+    // Create a process termination watcher, with 5seconds of polling frequency.
+    mUidObserver = sp<UidObserver>::make(processInfo,
+        [this] (int32_t pid, uid_t uid) {
+            onProcessTerminated(pid, uid);
+        });
+    mUidObserver->start();
+}
+
+ResourceManagerMetrics::~ResourceManagerMetrics() {
+    mUidObserver->stop();
+}
+
+void ResourceManagerMetrics::addPid(int pid, uid_t uid) {
+    if (uid != 0) {
+        std::scoped_lock lock(mLock);
+        mUidObserver->add(pid, uid);
+    }
+}
+
+void ResourceManagerMetrics::notifyClientCreated(const ClientInfoParcel& clientInfo) {
+    std::scoped_lock lock(mLock);
+    // Update the resource instance count.
+    std::map<std::string, int>::iterator found = mConcurrentResourceCountMap.find(clientInfo.name);
+    if (found == mConcurrentResourceCountMap.end()) {
+        mConcurrentResourceCountMap[clientInfo.name] = 1;
+    } else {
+        found->second++;
+    }
+}
+
+void ResourceManagerMetrics::notifyClientReleased(const ClientInfoParcel& clientInfo) {
+    bool stopCalled = true;
+    ClientConfigParcel clientConfig;
+    {
+        std::scoped_lock lock(mLock);
+        ClientConfigMap::iterator found = mClientConfigMap.find(clientInfo.id);
+        if (found != mClientConfigMap.end()) {
+            // Release is called without Stop!
+            stopCalled = false;
+            clientConfig = found->second;
+            // Update the timestamp for stopping the codec.
+            clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
+        }
+    }
+    if (!stopCalled) {
+        // call Stop to update the metrics.
+        notifyClientStopped(clientConfig);
+    }
+    {
+        std::scoped_lock lock(mLock);
+        // Update the resource instance count also.
+        std::map<std::string, int>::iterator found =
+            mConcurrentResourceCountMap.find(clientInfo.name);
+        if (found != mConcurrentResourceCountMap.end()) {
+            if (found->second > 0) {
+                found->second--;
+            }
+        }
+    }
+}
+
+void ResourceManagerMetrics::notifyClientStarted(const ClientConfigParcel& clientConfig) {
+    std::scoped_lock lock(mLock);
+    int pid = clientConfig.clientInfo.pid;
+    // We need to observer this process.
+    mUidObserver->add(pid, clientConfig.clientInfo.uid);
+
+    // Update the client config for thic client.
+    mClientConfigMap[clientConfig.clientInfo.id] = clientConfig;
+
+    // Update the concurrent codec count for this process.
+    CodecBucket codecBucket = getCodecBucket(clientConfig.isHardware,
+                                             clientConfig.isEncoder,
+                                             clientConfig.codecType);
+    increaseConcurrentCodecs(pid, codecBucket);
+
+    if (clientConfig.codecType == MediaResourceSubType::kVideoCodec ||
+        clientConfig.codecType == MediaResourceSubType::kImageCodec) {
+        // Update the pixel count for this process
+        increasePixelCount(pid, clientConfig.width * (long)clientConfig.height);
+    }
+
+    // System concurrent codec usage
+    int systemConcurrentCodecCount = mConcurrentCodecsMap[codecBucket];
+    // Process/Application concurrent codec usage for this type of codec
+    int appConcurrentCodecCount = mProcessConcurrentCodecsMap[pid].mCurrent[codecBucket];
+    // Process/Application's current pixel count.
+    long pixelCount = 0;
+    std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid);
+    if (it != mProcessPixelsMap.end()) {
+        pixelCount = it->second.mCurrent;
+    }
+
+    int result = stats_write(
+         MEDIA_CODEC_STARTED,
+         clientConfig.clientInfo.uid,
+         clientConfig.id,
+         clientConfig.clientInfo.name.c_str(),
+         static_cast<int32_t>(clientConfig.codecType),
+         clientConfig.isEncoder,
+         clientConfig.isHardware,
+         clientConfig.width, clientConfig.height,
+         systemConcurrentCodecCount,
+         appConcurrentCodecCount,
+         pixelCount);
+
+    ALOGV("%s: Pushed MEDIA_CODEC_STARTED atom: "
+          "Process[pid(%d): uid(%d)] "
+          "Codec: [%s: %ju] is %s %s %s "
+          "Timestamp: %jd "
+          "Resolution: %d x %d "
+          "ConcurrentCodec[%d]={System: %d App: %d} "
+          "result: %d",
+          __func__,
+          pid, clientConfig.clientInfo.uid,
+          clientConfig.clientInfo.name.c_str(),
+          clientConfig.id,
+          clientConfig.isHardware? "hardware" : "software",
+          getCodecType(clientConfig.codecType),
+          clientConfig.isEncoder? "encoder" : "decoder",
+          clientConfig.timeStamp,
+          clientConfig.width, clientConfig.height,
+          codecBucket, systemConcurrentCodecCount, appConcurrentCodecCount,
+          result);
+}
+
+void ResourceManagerMetrics::notifyClientStopped(const ClientConfigParcel& clientConfig) {
+    std::scoped_lock lock(mLock);
+    int pid = clientConfig.clientInfo.pid;
+    // Update the concurrent codec count for this process.
+    CodecBucket codecBucket = getCodecBucket(clientConfig.isHardware,
+                                             clientConfig.isEncoder,
+                                             clientConfig.codecType);
+    decreaseConcurrentCodecs(pid, codecBucket);
+
+    if (clientConfig.codecType == MediaResourceSubType::kVideoCodec ||
+        clientConfig.codecType == MediaResourceSubType::kImageCodec) {
+        // Update the pixel count for this process
+        decreasePixelCount(pid, clientConfig.width * (long)clientConfig.height);
+    }
+
+    // System concurrent codec usage
+    int systemConcurrentCodecCount = mConcurrentCodecsMap[codecBucket];
+    // Process/Application concurrent codec usage for this type of codec
+    int appConcurrentCodecCount = 0;
+    std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
+    if (found != mProcessConcurrentCodecsMap.end()) {
+        appConcurrentCodecCount = found->second.mCurrent[codecBucket];
+    }
+    // Process/Application's current pixel count.
+    long pixelCount = 0;
+    std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid);
+    if (it != mProcessPixelsMap.end()) {
+        pixelCount = it->second.mCurrent;
+    }
+
+    // calculate the usageTime as:
+    //  MediaCodecStopped.clientConfig.timeStamp -
+    //  MediaCodecStarted.clientConfig.timeStamp
+    int64_t usageTime = 0;
+    ClientConfigMap::iterator entry = mClientConfigMap.find(clientConfig.clientInfo.id);
+    if (entry != mClientConfigMap.end()) {
+        usageTime = clientConfig.timeStamp - entry->second.timeStamp;
+        // And we can erase this config now.
+        mClientConfigMap.erase(entry);
+    } else {
+        ALOGW("%s: Start Config is missing!", __func__);
+    }
+
+     int result = stats_write(
+         MEDIA_CODEC_STOPPED,
+         clientConfig.clientInfo.uid,
+         clientConfig.id,
+         clientConfig.clientInfo.name.c_str(),
+         static_cast<int32_t>(clientConfig.codecType),
+         clientConfig.isEncoder,
+         clientConfig.isHardware,
+         clientConfig.width, clientConfig.height,
+         systemConcurrentCodecCount,
+         appConcurrentCodecCount,
+         pixelCount,
+         usageTime);
+    ALOGV("%s: Pushed MEDIA_CODEC_STOPPED atom: "
+          "Process[pid(%d): uid(%d)] "
+          "Codec: [%s: %ju] is %s %s %s "
+          "Timestamp: %jd Usage time: %jd "
+          "Resolution: %d x %d "
+          "ConcurrentCodec[%d]={System: %d App: %d} "
+          "result: %d",
+          __func__,
+          pid, clientConfig.clientInfo.uid,
+          clientConfig.clientInfo.name.c_str(),
+          clientConfig.id,
+          clientConfig.isHardware? "hardware" : "software",
+          getCodecType(clientConfig.codecType),
+          clientConfig.isEncoder? "encoder" : "decoder",
+          clientConfig.timeStamp, usageTime,
+          clientConfig.width, clientConfig.height,
+          codecBucket, systemConcurrentCodecCount, appConcurrentCodecCount,
+          result);
+}
+
+void ResourceManagerMetrics::onProcessTerminated(int32_t pid, uid_t uid) {
+    std::scoped_lock lock(mLock);
+    // post MediaCodecConcurrentUsageReported for this terminated pid.
+    pushConcurrentUsageReport(pid, uid);
+}
+
+void ResourceManagerMetrics::pushConcurrentUsageReport(int32_t pid, uid_t uid) {
+    // Process/Application peak concurrent codec usage
+    std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
+    if (found == mProcessConcurrentCodecsMap.end()) {
+        ALOGI("%s: No MEDIA_CODEC_CONCURRENT_USAGE_REPORTED atom Entry for: "
+              "Application[pid(%d): uid(%d)]", __func__, pid, uid);
+        return;
+    }
+    const ConcurrentCodecsMap& codecsMap = found->second.mPeak;
+    int peakHwAudioEncoderCount = codecsMap[HwAudioEncoder];
+    int peakHwAudioDecoderCount = codecsMap[HwAudioDecoder];
+    int peakHwVideoEncoderCount = codecsMap[HwVideoEncoder];
+    int peakHwVideoDecoderCount = codecsMap[HwVideoDecoder];
+    int peakHwImageEncoderCount = codecsMap[HwImageEncoder];
+    int peakHwImageDecoderCount = codecsMap[HwImageDecoder];
+    int peakSwAudioEncoderCount = codecsMap[SwAudioEncoder];
+    int peakSwAudioDecoderCount = codecsMap[SwAudioDecoder];
+    int peakSwVideoEncoderCount = codecsMap[SwVideoEncoder];
+    int peakSwVideoDecoderCount = codecsMap[SwVideoDecoder];
+    int peakSwImageEncoderCount = codecsMap[SwImageEncoder];
+    int peakSwImageDecoderCount = codecsMap[SwImageDecoder];
+
+    long peakPixels = 0;
+    std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid);
+    if (it == mProcessPixelsMap.end()) {
+        ALOGI("%s: No Video Codec Entry for Application[pid(%d): uid(%d)]",
+              __func__, pid, uid);
+    } else {
+        peakPixels = it->second.mPeak;
+    }
+    std::string peakPixelsLog("Peak Pixels: " + std::to_string(peakPixels));
+
+    std::stringstream peakCodecLog;
+    peakCodecLog << "Peak { ";
+    std::stringstream logMsg;
+    if (getLogMessage(peakHwAudioEncoderCount, peakSwAudioEncoderCount, logMsg)) {
+        peakCodecLog << "AudioEnc[" << logMsg.str();
+    }
+    if (getLogMessage(peakHwAudioDecoderCount, peakSwAudioDecoderCount, logMsg)) {
+        peakCodecLog << "AudioDec[" << logMsg.str();
+    }
+    if (getLogMessage(peakHwVideoEncoderCount, peakSwVideoEncoderCount, logMsg)) {
+        peakCodecLog << "VideoEnc[" << logMsg.str();
+    }
+    if (getLogMessage(peakHwVideoDecoderCount, peakSwVideoDecoderCount, logMsg)) {
+        peakCodecLog << "VideoDec[" << logMsg.str();
+    }
+    if (getLogMessage(peakHwImageEncoderCount, peakSwImageEncoderCount, logMsg)) {
+        peakCodecLog << "ImageEnc[" << logMsg.str();
+    }
+    if (getLogMessage(peakHwImageDecoderCount, peakSwImageDecoderCount, logMsg)) {
+        peakCodecLog << "ImageDec[" << logMsg.str();
+    }
+    peakCodecLog << "}";
+
+#ifdef ENABLE_MEDIA_CODEC_CONCURRENT_USAGE_REPORTED
+    int result = stats_write(
+        MEDIA_CODEC_CONCURRENT_USAGE_REPORTED,
+        uid,
+        peakHwVideoDecoderCount,
+        peakHwVideoEncoderCount,
+        peakSwVideoDecoderCount,
+        peakSwVideoEncoderCount,
+        peakHwAudioDecoderCount,
+        peakHwAudioEncoderCount,
+        peakSwAudioDecoderCount,
+        peakSwAudioEncoderCount,
+        peakHwImageDecoderCount,
+        peakHwImageEncoderCount,
+        peakSwImageDecoderCount,
+        peakSwImageEncoderCount,
+        peakPixels);
+    ALOGI("%s: Pushed MEDIA_CODEC_CONCURRENT_USAGE_REPORTED atom: "
+          "Process[pid(%d): uid(%d)] %s %s result: %d",
+          __func__, pid, uid, peakCodecLog.str().c_str(), peakPixelsLog.c_str(), result);
+#else
+    ALOGI("%s: Concurrent Codec Usage Report for the Process[pid(%d): uid(%d)] is %s %s",
+          __func__, pid, uid, peakCodecLog.str().c_str(), peakPixelsLog.c_str());
+#endif
+}
+
+void ResourceManagerMetrics::pushReclaimAtom(const ClientInfoParcel& clientInfo,
+                        const std::vector<int>& priorities,
+                        const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
+                        const PidUidVector& idList, bool reclaimed) {
+    // Construct the metrics for codec reclaim as a pushed atom.
+    // 1. Information about the requester.
+    //  - UID and the priority (oom score)
+    int32_t callingPid = clientInfo.pid;
+    int32_t requesterUid = clientInfo.uid;
+    std::string clientName = clientInfo.name;
+    int requesterPriority = priorities[0];
+
+    //  2. Information about the codec.
+    //  - Name of the codec requested
+    //  - Number of concurrent codecs running.
+    int32_t noOfConcurrentCodecs = 0;
+    std::map<std::string, int>::iterator found = mConcurrentResourceCountMap.find(clientName);
+    if (found != mConcurrentResourceCountMap.end()) {
+        noOfConcurrentCodecs = found->second;
+    }
+
+    // 3. Information about the Reclaim:
+    // - Status of reclaim request
+    // - How many codecs are reclaimed
+    // - For each codecs reclaimed, information of the process that it belonged to:
+    //    - UID and the Priority (oom score)
+    int32_t reclaimStatus = MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
+    if (!reclaimed) {
+      if (clients.size() == 0) {
+        // No clients to reclaim from
+        reclaimStatus =
+            MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
+      } else {
+        // Couldn't reclaim resources from the clients
+        reclaimStatus =
+            MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
+      }
+    }
+    int32_t noOfCodecsReclaimed = clients.size();
+    int32_t targetIndex = 1;
+    for (PidUidVector::const_reference id : idList) {
+        int32_t targetUid = id.second;
+        int targetPriority = priorities[targetIndex];
+        // Post the pushed atom
+        int result = stats_write(
+            MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED,
+            requesterUid,
+            requesterPriority,
+            clientName.c_str(),
+            noOfConcurrentCodecs,
+            reclaimStatus,
+            noOfCodecsReclaimed,
+            targetIndex,
+            targetUid,
+            targetPriority);
+        ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
+              "Requester[pid(%d): uid(%d): priority(%d)] "
+              "Codec: [%s] "
+              "No of concurrent codecs: %d "
+              "Reclaim Status: %d "
+              "No of codecs reclaimed: %d "
+              "Target[%d][pid(%d): uid(%d): priority(%d)] result: %d",
+              __func__, callingPid, requesterUid, requesterPriority,
+              clientName.c_str(), noOfConcurrentCodecs,
+              reclaimStatus, noOfCodecsReclaimed,
+              targetIndex, id.first, targetUid, targetPriority, result);
+        targetIndex++;
+    }
+}
+
+void ResourceManagerMetrics::increaseConcurrentCodecs(int32_t pid,
+                                                      CodecBucket codecBucket) {
+    // Increase the codec usage across the system.
+    mConcurrentCodecsMap[codecBucket]++;
+
+    // Now update the codec usage for this (pid) process.
+    std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
+    if (found == mProcessConcurrentCodecsMap.end()) {
+        ConcurrentCodecs codecs;
+        codecs.mCurrent[codecBucket] = 1;
+        codecs.mPeak[codecBucket] = 1;
+        mProcessConcurrentCodecsMap.emplace(pid, codecs);
+    } else {
+        found->second.mCurrent[codecBucket]++;
+        // Check if it's the peak count for this slot.
+        if (found->second.mPeak[codecBucket] < found->second.mCurrent[codecBucket]) {
+            found->second.mPeak[codecBucket] = found->second.mCurrent[codecBucket];
+        }
+    }
+}
+
+void ResourceManagerMetrics::decreaseConcurrentCodecs(int32_t pid,
+                                                      CodecBucket codecBucket) {
+    // Decrease the codec usage across the system.
+    if (mConcurrentCodecsMap[codecBucket] > 0) {
+        mConcurrentCodecsMap[codecBucket]--;
+    }
+
+    // Now update the codec usage for this (pid) process.
+    std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
+    if (found != mProcessConcurrentCodecsMap.end()) {
+        if (found->second.mCurrent[codecBucket] > 0) {
+            found->second.mCurrent[codecBucket]--;
+        }
+    }
+}
+
+void ResourceManagerMetrics::increasePixelCount(int32_t pid, long pixels) {
+    // Now update the current pixel usage for this (pid) process.
+    std::map<int32_t, PixelCount>::iterator found = mProcessPixelsMap.find(pid);
+    if (found == mProcessPixelsMap.end()) {
+        PixelCount pixelCount {pixels, pixels};
+        mProcessPixelsMap.emplace(pid, pixelCount);
+    } else {
+        if (__builtin_add_overflow(found->second.mCurrent, pixels, &found->second.mCurrent)) {
+            ALOGI("Pixel Count overflow");
+            return;
+        }
+        // Check if it's the peak count for this slot.
+        if (found->second.mPeak < found->second.mCurrent) {
+            found->second.mPeak = found->second.mCurrent;
+        }
+    }
+}
+
+void ResourceManagerMetrics::decreasePixelCount(int32_t pid, long pixels) {
+    // Now update the current pixel usage for this (pid) process.
+    std::map<int32_t, PixelCount>::iterator found = mProcessPixelsMap.find(pid);
+    if (found != mProcessPixelsMap.end()) {
+        if (found->second.mCurrent < pixels) {
+            found->second.mCurrent = 0;
+        } else {
+            if (__builtin_sub_overflow(found->second.mCurrent, pixels, &found->second.mCurrent)) {
+                ALOGI("Pixel Count overflow");
+                return;
+            }
+        }
+    }
+}
+
+long ResourceManagerMetrics::getPeakConcurrentPixelCount(int pid) const {
+    std::map<int32_t, PixelCount>::const_iterator found = mProcessPixelsMap.find(pid);
+    if (found != mProcessPixelsMap.end()) {
+        return found->second.mPeak;
+    }
+
+    return 0;
+}
+
+long ResourceManagerMetrics::getCurrentConcurrentPixelCount(int pid) const {
+    std::map<int32_t, PixelCount>::const_iterator found = mProcessPixelsMap.find(pid);
+    if (found != mProcessPixelsMap.end()) {
+        return found->second.mCurrent;
+    }
+
+    return 0;
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.h b/services/mediaresourcemanager/ResourceManagerMetrics.h
new file mode 100644
index 0000000..b7810e5
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.h
@@ -0,0 +1,179 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_RESOURCEMANAGERMETRICS_H_
+#define ANDROID_MEDIA_RESOURCEMANAGERMETRICS_H_
+
+#include "ResourceManagerService.h"
+
+namespace android {
+
+using ::aidl::android::media::ClientInfoParcel;
+using ::aidl::android::media::ClientConfigParcel;
+using ::aidl::android::media::IResourceManagerClient;
+
+struct ProcessInfoInterface;
+
+class UidObserver;
+
+//
+// Enumeration for Codec bucket based on:
+//   - Encoder or Decoder
+//   - hardware implementation or not
+//   - Audio/Video/Image codec
+//
+enum CodecBucket {
+    CodecBucketUnspecified = 0,
+    HwAudioEncoder = 1,
+    HwAudioDecoder = 2,
+    HwVideoEncoder = 3,
+    HwVideoDecoder = 4,
+    HwImageEncoder = 5,
+    HwImageDecoder = 6,
+    SwAudioEncoder = 7,
+    SwAudioDecoder = 8,
+    SwVideoEncoder = 9,
+    SwVideoDecoder = 10,
+    SwImageEncoder = 11,
+    SwImageDecoder = 12,
+    CodecBucketMaxSize = 13,
+};
+
+// Map of client id and client configuration, when it was started last.
+typedef std::map<int64_t, ClientConfigParcel> ClientConfigMap;
+
+// Map of pid and the uid.
+typedef std::map<int32_t, uid_t> PidUidMap;
+
+// Map of concurrent codes by Codec type bucket.
+struct ConcurrentCodecsMap {
+    int& operator[](CodecBucket index) {
+        return mCodec[index];
+    }
+
+    const int& operator[](CodecBucket index) const {
+        return mCodec[index];
+    }
+
+private:
+    int mCodec[CodecBucketMaxSize] = {0};
+};
+
+// Current and Peak ConcurrentCodecMap for a process.
+struct ConcurrentCodecs {
+    ConcurrentCodecsMap mCurrent;
+    ConcurrentCodecsMap mPeak;
+};
+
+// Current and Peak pixel count for a process.
+struct PixelCount {
+    long mCurrent = 0;
+    long mPeak = 0;
+};
+
+//
+// ResourceManagerMetrics class that maintaines concurrent codec count based:
+//
+//  1. # of concurrent active codecs (initialized, but aren't released yet) of given
+//     implementation (by codec name) across the system.
+//
+//  2. # of concurrent codec usage (started, but not stopped yet), which is
+//  measured using codec type bucket (CodecBucket) for:
+//   - each process/application.
+//   - across the system.
+//  Also the peak count of the same for each process/application is maintained.
+//
+//  3. # of Peak Concurrent Pixels for each process/application.
+//  This should help with understanding the (video) memory usage per
+//  application.
+//
+//
+class ResourceManagerMetrics {
+public:
+    ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo);
+    ~ResourceManagerMetrics();
+
+    // To be called when a client is created.
+    void notifyClientCreated(const ClientInfoParcel& clientInfo);
+
+    // To be called when a client is released.
+    void notifyClientReleased(const ClientInfoParcel& clientInfo);
+
+    // To be called when a client is started.
+    void notifyClientStarted(const ClientConfigParcel& clientConfig);
+
+    // To be called when a client is stopped.
+    void notifyClientStopped(const ClientConfigParcel& clientConfig);
+
+    // To be called when after a reclaim event.
+    void pushReclaimAtom(const ClientInfoParcel& clientInfo,
+                         const std::vector<int>& priorities,
+                         const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
+                         const PidUidVector& idList, bool reclaimed);
+
+    // Add this pid/uid set to monitor for the process termination state.
+    void addPid(int pid, uid_t uid = 0);
+
+    // Get the peak concurrent pixel count (associated with the video codecs) for the process.
+    long getPeakConcurrentPixelCount(int pid) const;
+    // Get the current concurrent pixel count (associated with the video codecs) for the process.
+    long getCurrentConcurrentPixelCount(int pid) const;
+
+private:
+    ResourceManagerMetrics(const ResourceManagerMetrics&) = delete;
+    ResourceManagerMetrics(ResourceManagerMetrics&&) = delete;
+    ResourceManagerMetrics& operator=(const ResourceManagerMetrics&) = delete;
+    ResourceManagerMetrics& operator=(ResourceManagerMetrics&&) = delete;
+
+    // To increase/decrease the concurrent codec usage for a given CodecBucket.
+    void increaseConcurrentCodecs(int32_t pid, CodecBucket codecBucket);
+    void decreaseConcurrentCodecs(int32_t pid, CodecBucket codecBucket);
+
+    // To increase/decrease the concurrent pixels usage for a process.
+    void increasePixelCount(int32_t pid, long pixels);
+    void decreasePixelCount(int32_t pid, long pixels);
+
+    // Issued when the process/application with given pid/uid is terminated.
+    void onProcessTerminated(int32_t pid, uid_t uid);
+
+    // To push conccuret codec usage of a process/application.
+    void pushConcurrentUsageReport(int32_t pid, uid_t uid);
+
+private:
+    std::mutex mLock;
+
+    // Map of client id and the configuration.
+    ClientConfigMap mClientConfigMap;
+
+    // Concurrent and Peak Pixel count for each process/application.
+    std::map<int32_t, PixelCount> mProcessPixelsMap;
+
+    // Map of resources (name) and number of concurrent instances
+    std::map<std::string, int> mConcurrentResourceCountMap;
+
+    // Map of concurrent codes by CodecBucket across the system.
+    ConcurrentCodecsMap mConcurrentCodecsMap;
+    // Map of concurrent and peak codes by CodecBucket for each process/application.
+    std::map<int32_t, ConcurrentCodecs> mProcessConcurrentCodecsMap;
+
+    // Uid Observer to monitor the application termination.
+    sp<UidObserver> mUidObserver;
+};
+
+} // namespace android
+
+#endif  // ANDROID_MEDIA_RESOURCEMANAGERMETRICS_H_
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 1cef9d5..6822b06 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -35,23 +35,15 @@
 #include <sys/stat.h>
 #include <sys/time.h>
 #include <unistd.h>
-#include <stats_media_metrics.h>
 
 #include "IMediaResourceMonitor.h"
+#include "ResourceManagerMetrics.h"
 #include "ResourceManagerService.h"
 #include "ResourceObserverService.h"
 #include "ServiceLog.h"
 
 namespace android {
 
-using stats::media_metrics::stats_write;
-using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED;
-using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
-using stats::media_metrics::\
-    MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
-using stats::media_metrics::\
-    MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
-
 //static
 std::mutex ResourceManagerService::sCookieLock;
 //static
@@ -61,8 +53,8 @@
 
 class DeathNotifier : public RefBase {
 public:
-    DeathNotifier(const std::shared_ptr<ResourceManagerService> &service, int pid,
-            int64_t clientId);
+    DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
+                  const ClientInfoParcel& clientInfo);
 
     virtual ~DeathNotifier() {}
 
@@ -72,13 +64,12 @@
 
 protected:
     std::weak_ptr<ResourceManagerService> mService;
-    int mPid;
-    int64_t mClientId;
+    const ClientInfoParcel mClientInfo;
 };
 
 DeathNotifier::DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
-        int pid, int64_t clientId)
-    : mService(service), mPid(pid), mClientId(clientId) {}
+                             const ClientInfoParcel& clientInfo)
+    : mService(service), mClientInfo(clientInfo) {}
 
 //static
 void DeathNotifier::BinderDiedCallback(void* cookie) {
@@ -105,16 +96,16 @@
         return;
     }
 
-    service->overridePid(mPid, -1);
+    service->overridePid(mClientInfo.pid, -1);
     // thiz is freed in the call below, so it must be last call referring thiz
-    ClientInfoParcel clientInfo{.pid = mPid, .id = mClientId};
-    service->removeResource(clientInfo, false /*checkValid*/);
+    service->removeResource(mClientInfo, false /*checkValid*/);
 }
 
 class OverrideProcessInfoDeathNotifier : public DeathNotifier {
 public:
     OverrideProcessInfoDeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
-            int pid) : DeathNotifier(service, pid, 0) {}
+                                     const ClientInfoParcel& clientInfo)
+            : DeathNotifier(service, clientInfo) {}
 
     virtual ~OverrideProcessInfoDeathNotifier() {}
 
@@ -129,7 +120,7 @@
         return;
     }
 
-    service->removeProcessInfoOverride(mPid);
+    service->removeProcessInfoOverride(mClientInfo.pid);
 }
 
 template <typename T>
@@ -202,7 +193,11 @@
         ResourceInfo info;
         info.uid = uid;
         info.clientId = clientId;
-        info.name = name;
+        if (name.empty()) {
+            info.name = "<unknown client>";
+        } else {
+            info.name = name;
+        }
         info.client = client;
         info.cookie = 0;
         info.pendingRemoval = false;
@@ -292,10 +287,7 @@
             snprintf(buffer, SIZE, "        Id: %lld\n", (long long)infos[j].clientId);
             result.append(buffer);
 
-            std::string clientName = "<unknown client>";
-            if (infos[j].client != nullptr) {
-                clientName = infos[j].name;
-            }
+            std::string clientName = infos[j].name;
             snprintf(buffer, SIZE, "        Name: %s\n", clientName.c_str());
             result.append(buffer);
 
@@ -357,6 +349,8 @@
       mCpuBoostCount(0),
       mDeathRecipient(AIBinder_DeathRecipient_new(DeathNotifier::BinderDiedCallback)) {
     mSystemCB->noteResetVideo();
+    // Create ResourceManagerMetrics that handles all the metrics.
+    mResourceManagerMetrics = std::make_unique<ResourceManagerMetrics>(mProcessInfo);
 }
 
 //static
@@ -510,49 +504,16 @@
     }
     if (info.cookie == 0 && client != nullptr) {
         info.cookie = addCookieAndLink_l(client,
-                new DeathNotifier(ref<ResourceManagerService>(), pid, clientId));
+                new DeathNotifier(ref<ResourceManagerService>(), clientInfo));
     }
     if (mObserverService != nullptr && !resourceAdded.empty()) {
         mObserverService->onResourceAdded(uid, pid, resourceAdded);
     }
     notifyResourceGranted(pid, resources);
 
-    // Increase the instance count of the resource associated with this client.
-    increaseResourceInstanceCount(clientId, name);
-
     return Status::ok();
 }
 
-void ResourceManagerService::increaseResourceInstanceCount(int64_t clientId,
-                                                           const std::string& name) {
-    // Check whether this client has been looked into already.
-    if (mClientIdSet.find(clientId) == mClientIdSet.end()) {
-        mClientIdSet.insert(clientId);
-        // Update the resource instance count.
-        auto found = mConcurrentResourceCountMap.find(name);
-        if (found == mConcurrentResourceCountMap.end()) {
-            mConcurrentResourceCountMap[name] = 1;
-        } else {
-            found->second++;
-        }
-    }
-}
-
-void ResourceManagerService::decreaseResourceInstanceCount(int64_t clientId,
-                                                           const std::string& name) {
-    // Since this client has been removed, remove it from mClientIdSet
-    mClientIdSet.erase(clientId);
-    // Update the resource instance count also.
-    auto found = mConcurrentResourceCountMap.find(name);
-    if (found != mConcurrentResourceCountMap.end()) {
-        if (found->second == 1) {
-            mConcurrentResourceCountMap.erase(found);
-        } else {
-            found->second--;
-        }
-    }
-}
-
 Status ResourceManagerService::removeResource(const ClientInfoParcel& clientInfo,
         const std::vector<MediaResourceParcel>& resources) {
     int32_t pid = clientInfo.pid;
@@ -657,9 +618,8 @@
         onLastRemoved(it->second, info);
     }
 
-    // Since this client has been removed, decrease the corresponding
-    // resources instance count.
-    decreaseResourceInstanceCount(clientId, info.name);
+    // Since this client has been removed, update the metrics collector.
+    mResourceManagerMetrics->notifyClientReleased(clientInfo);
 
     removeCookieAndUnlink_l(info.client, info.cookie);
 
@@ -791,73 +751,19 @@
 void ResourceManagerService::pushReclaimAtom(const ClientInfoParcel& clientInfo,
                         const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
                         const PidUidVector& idVector, bool reclaimed) {
-    // Construct the metrics for codec reclaim as a pushed atom.
-    // 1. Information about the requester.
-    //  - UID and the priority (oom score)
     int32_t callingPid = clientInfo.pid;
-    int32_t requesterUid = clientInfo.uid;
-    std::string clientName = clientInfo.name;
     int requesterPriority = -1;
     getPriority_l(callingPid, &requesterPriority);
+    std::vector<int> priorities;
+    priorities.push_back(requesterPriority);
 
-    //  2. Information about the codec.
-    //  - Name of the codec requested
-    //  - Number of concurrent codecs running.
-    int32_t noOfConcurrentCodecs = 0;
-    auto found = mConcurrentResourceCountMap.find(clientName);
-    if (found != mConcurrentResourceCountMap.end()) {
-        noOfConcurrentCodecs = found->second;
-    }
-
-    // 3. Information about the Reclaim:
-    // - Status of reclaim request
-    // - How many codecs are reclaimed
-    // - For each codecs reclaimed, information of the process that it belonged to:
-    //    - UID and the Priority (oom score)
-    int32_t reclaimStatus = MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
-    if (!reclaimed) {
-      if (clients.size() == 0) {
-        // No clients to reclaim from
-        reclaimStatus =
-            MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
-      } else {
-        // Couldn't reclaim resources from the clients
-        reclaimStatus =
-            MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
-      }
-    }
-    int32_t noOfCodecsReclaimed = clients.size();
-    int32_t targetIndex = 1;
-    for (const auto& id : idVector) {
-        int32_t targetUid = id.second;
+    for (PidUidVector::const_reference id : idVector) {
         int targetPriority = -1;
         getPriority_l(id.first, &targetPriority);
-        // Post the pushed atom
-        int result = stats_write(
-            MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED,
-            requesterUid,
-            requesterPriority,
-            clientName.c_str(),
-            noOfConcurrentCodecs,
-            reclaimStatus,
-            noOfCodecsReclaimed,
-            targetIndex,
-            targetUid,
-            targetPriority);
-        ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
-              "Requester[pid(%d): uid(%d): priority(%d)] "
-              "Codec: [%s] "
-              "No of concurrent codecs: %d "
-              "Reclaim Status: %d "
-              "No of codecs reclaimed: %d "
-              "Target[%d][pid(%d): uid(%d): priority(%d)] "
-              "Atom Size: %d",
-              __func__, callingPid, requesterUid, requesterPriority,
-              clientName.c_str(), noOfConcurrentCodecs,
-              reclaimStatus, noOfCodecsReclaimed,
-              targetIndex, id.first, targetUid, targetPriority, result);
-        targetIndex++;
+        priorities.push_back(targetPriority);
     }
+    mResourceManagerMetrics->pushReclaimAtom(clientInfo, priorities, clients,
+                                             idVector, reclaimed);
 }
 
 bool ResourceManagerService::reclaimUnconditionallyFrom(
@@ -933,6 +839,7 @@
         mOverridePidMap.erase(originalPid);
         if (newPid != -1) {
             mOverridePidMap.emplace(originalPid, newPid);
+            mResourceManagerMetrics->addPid(newPid);
         }
     }
 
@@ -966,8 +873,12 @@
         return Status::fromServiceSpecificError(BAD_VALUE);
     }
 
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(pid),
+                                .uid = 0,
+                                .id = 0,
+                                .name = "<unknown client>"};
     uintptr_t cookie = addCookieAndLink_l(client,
-            new OverrideProcessInfoDeathNotifier(ref<ResourceManagerService>(), pid));
+            new OverrideProcessInfoDeathNotifier(ref<ResourceManagerService>(), clientInfo));
 
     mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{cookie, client});
 
@@ -1282,4 +1193,27 @@
     return true;
 }
 
+Status ResourceManagerService::notifyClientCreated(const ClientInfoParcel& clientInfo) {
+    mResourceManagerMetrics->notifyClientCreated(clientInfo);
+    return Status::ok();
+}
+
+Status ResourceManagerService::notifyClientStarted(const ClientConfigParcel& clientConfig) {
+    mResourceManagerMetrics->notifyClientStarted(clientConfig);
+    return Status::ok();
+}
+
+Status ResourceManagerService::notifyClientStopped(const ClientConfigParcel& clientConfig) {
+    mResourceManagerMetrics->notifyClientStopped(clientConfig);
+    return Status::ok();
+}
+
+long ResourceManagerService::getPeakConcurrentPixelCount(int pid) const {
+    return mResourceManagerMetrics->getPeakConcurrentPixelCount(pid);
+}
+
+long ResourceManagerService::getCurrentConcurrentPixelCount(int pid) const {
+    return mResourceManagerMetrics->getCurrentConcurrentPixelCount(pid);
+}
+
 } // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index 0016a19..b9756ae 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -39,6 +39,7 @@
 class ResourceObserverService;
 class ServiceLog;
 struct ProcessInfoInterface;
+class ResourceManagerMetrics;
 
 using Status = ::ndk::ScopedAStatus;
 using ::aidl::android::media::IResourceManagerClient;
@@ -46,6 +47,7 @@
 using ::aidl::android::media::MediaResourceParcel;
 using ::aidl::android::media::MediaResourcePolicyParcel;
 using ::aidl::android::media::ClientInfoParcel;
+using ::aidl::android::media::ClientConfigParcel;
 
 typedef std::map<std::tuple<
         MediaResource::Type, MediaResource::SubType, std::vector<uint8_t>>,
@@ -61,6 +63,7 @@
     bool pendingRemoval{false};
 };
 
+// vector of <PID, UID>
 typedef std::vector<std::pair<int32_t, uid_t>> PidUidVector;
 
 // TODO: convert these to std::map
@@ -118,6 +121,12 @@
 
     Status removeResource(const ClientInfoParcel& clientInfo, bool checkValid);
 
+    Status notifyClientCreated(const ClientInfoParcel& clientInfo) override;
+
+    Status notifyClientStarted(const ClientConfigParcel& clientConfig) override;
+
+    Status notifyClientStopped(const ClientConfigParcel& clientConfig) override;
+
 private:
     friend class ResourceManagerServiceTest;
     friend class DeathNotifier;
@@ -182,15 +191,15 @@
     void removeCookieAndUnlink_l(const std::shared_ptr<IResourceManagerClient>& client,
                                  uintptr_t cookie);
 
-    // To increase/decrease the number of instances of a given resource
-    // associated with a client.
-    void increaseResourceInstanceCount(int64_t clientId, const std::string& name);
-    void decreaseResourceInstanceCount(int64_t clientId, const std::string& name);
-
     void pushReclaimAtom(const ClientInfoParcel& clientInfo,
                          const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
                          const PidUidVector& idList, bool reclaimed);
 
+    // Get the peak concurrent pixel count (associated with the video codecs) for the process.
+    long getPeakConcurrentPixelCount(int pid) const;
+    // Get the current concurrent pixel count (associated with the video codecs) for the process.
+    long getCurrentConcurrentPixelCount(int pid) const;
+
     mutable Mutex mLock;
     sp<ProcessInfoInterface> mProcessInfo;
     sp<SystemCallbackInterface> mSystemCB;
@@ -211,11 +220,7 @@
     static std::map<uintptr_t, sp<DeathNotifier> > sCookieToDeathNotifierMap
             GUARDED_BY(sCookieLock);
     std::shared_ptr<ResourceObserverService> mObserverService;
-
-    // List of active clients
-    std::set<int64_t> mClientIdSet;
-    // Map of resources (name) and number of concurrent instances
-    std::map<std::string, int> mConcurrentResourceCountMap;
+    std::unique_ptr<ResourceManagerMetrics> mResourceManagerMetrics;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/services/mediaresourcemanager/UidObserver.cpp b/services/mediaresourcemanager/UidObserver.cpp
new file mode 100644
index 0000000..f321ebc
--- /dev/null
+++ b/services/mediaresourcemanager/UidObserver.cpp
@@ -0,0 +1,182 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceManagerMetrics"
+
+#include <android/binder_process.h>
+#include <mediautils/ProcessInfo.h>
+#include "UidObserver.h"
+
+namespace {
+const char* kActivityServiceName = "activity";
+}; // namespace anonymous
+
+namespace android {
+
+UidObserver::UidObserver(const sp<ProcessInfoInterface>& processInfo,
+                         OnProcessTerminated onProcessTerminated) :
+     mRegistered(false),
+     mOnProcessTerminated(std::move(onProcessTerminated)),
+     mProcessInfo(processInfo) {
+}
+
+UidObserver::~UidObserver() {
+    stop();
+}
+
+void UidObserver::start() {
+    // Use check service to see if the activity service is available
+    // If not available then register for notifications, instead of blocking
+    // till the service is ready
+    sp<IServiceManager> sm = defaultServiceManager();
+    sp<IBinder> binder = sm->checkService(String16(kActivityServiceName));
+    if (!binder) {
+        sm->registerForNotifications(String16(kActivityServiceName), this);
+    } else {
+        registerWithActivityManager();
+    }
+}
+
+void UidObserver::stop() {
+    std::scoped_lock lock{mLock};
+
+    if (mRegistered) {
+        // Unregistered with ActivityManager
+        mAm.unregisterUidObserver(this);
+        mAm.unlinkToDeath(this);
+        mRegistered = false;
+    }
+}
+
+void UidObserver::add(int pid, uid_t uid) {
+    bool needToRegister = false;
+    {
+        std::scoped_lock lock(mLock);
+        std::map<uid_t, std::set<int32_t>>::iterator found = mUids.find(uid);
+        if (found != mUids.end()) {
+            found->second.insert(pid);
+        } else {
+            std::set<int32_t> pids{pid};
+            mUids.emplace(uid, std::move(pids));
+        }
+        needToRegister = !mRegistered;
+    }
+    if (needToRegister) {
+        start();
+    }
+}
+
+void UidObserver::registerWithActivityManager() {
+    std::scoped_lock lock{mLock};
+
+    if (mRegistered) {
+        return;
+    }
+    status_t res = mAm.linkToDeath(this);
+    // Register for UID gone.
+    mAm.registerUidObserver(this, ActivityManager::UID_OBSERVER_GONE,
+                            ActivityManager::PROCESS_STATE_UNKNOWN,
+                            String16("mediaserver"));
+    if (res == OK) {
+        mRegistered = true;
+        ALOGV("UidObserver: Registered with ActivityManager");
+    }
+}
+
+void UidObserver::onServiceRegistration(const String16& name, const sp<IBinder>&) {
+    if (name != String16(kActivityServiceName)) {
+        return;
+    }
+
+    registerWithActivityManager();
+}
+
+void UidObserver::getTerminatedProcesses(const std::vector<int32_t>& pids,
+                                         std::vector<int32_t>& terminatedPids) {
+    std::vector<bool> existent;
+    terminatedPids.clear();
+    if (mProcessInfo->checkProcessExistent(pids, &existent)) {
+        for (size_t index = 0; index < existent.size(); index++) {
+            if (!existent[index]) {
+                // This process has been terminated already.
+                terminatedPids.push_back(pids[index]);
+            }
+        }
+    }
+}
+
+// This callback will be issued for every UID that is gone/terminated.
+// Since one UID could have multiple PIDs, this callback can be issued
+// multiple times with that same UID for each activity/pid.
+// So, we need to check which one among the PIDs (that share the same UID)
+// is gone.
+void UidObserver::onUidGone(uid_t uid, bool /*disabled*/) {
+    std::vector<int32_t> terminatedPids;
+    {
+        std::scoped_lock lock{mLock};
+        std::map<uid_t, std::set<int32_t>>::iterator found = mUids.find(uid);
+        if (found != mUids.end()) {
+            if (found->second.size() == 1) {
+                terminatedPids.push_back(*(found->second.begin()));
+                // Only one PID. So we can remove this UID entry.
+                mUids.erase(found);
+            } else {
+                // There are multiple PIDs with the same UID.
+                // Get the list of all terminated PIDs (with the same UID)
+                std::vector<int32_t> pids;
+                std::copy(found->second.begin(), found->second.end(), std::back_inserter(pids));
+                getTerminatedProcesses(pids, terminatedPids);
+                for (int32_t pid : terminatedPids) {
+                    // Remove all the terminated PIDs
+                    found->second.erase(pid);
+                }
+                // If all PIDs under this UID have terminated, remove this UID entry.
+                if (found->second.size() == 0) {
+                    mUids.erase(uid);
+                }
+            }
+        }
+    }
+
+    for (int32_t pid : terminatedPids) {
+        mOnProcessTerminated(pid, uid);
+    }
+}
+
+void UidObserver::onUidActive(uid_t /*uid*/) {
+}
+
+void UidObserver::onUidIdle(uid_t /*uid*/, bool /*disabled*/) {
+}
+
+void UidObserver::onUidStateChanged(uid_t /*uid*/,
+                                    int32_t /*procState*/,
+                                    int64_t /*procStateSeq*/,
+                                    int32_t /*capability*/) {
+}
+
+void UidObserver::onUidProcAdjChanged(uid_t /*uid*/) {
+}
+
+void UidObserver::binderDied(const wp<IBinder>& /*who*/) {
+    std::scoped_lock lock{mLock};
+    ALOGE("UidObserver: ActivityManager has died");
+    mRegistered = false;
+}
+
+}  // namespace android
diff --git a/services/mediaresourcemanager/UidObserver.h b/services/mediaresourcemanager/UidObserver.h
new file mode 100644
index 0000000..ed76839
--- /dev/null
+++ b/services/mediaresourcemanager/UidObserver.h
@@ -0,0 +1,116 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_UIDOBSERVER_H_
+#define ANDROID_MEDIA_UIDOBSERVER_H_
+
+#include <map>
+#include <set>
+#include <mutex>
+#include <functional>
+#include <binder/ActivityManager.h>
+#include <binder/IUidObserver.h>
+#include <binder/BinderService.h>
+
+namespace android {
+
+using OnProcessTerminated = std::function<void(int32_t pid, uid_t)>;
+
+struct ProcessInfoInterface;
+
+//
+// UidObserver class
+//
+// This class implements a callback mechanism to notify the termination of the
+// process/applications that are registered with this class.
+//
+// It uses ActivityManager get notification on when an UID is not existent
+// anymore.
+// Since one UID could have multiple PIDs, it uses ActivityManager
+// (through ProcessInfoInterface) to query for the process/application
+// state for the pids.
+//
+class UidObserver :
+        public BnUidObserver,
+        public virtual IBinder::DeathRecipient,
+        public virtual IServiceManager::LocalRegistrationCallback {
+public:
+    explicit UidObserver(const sp<ProcessInfoInterface>& processInfo,
+                         OnProcessTerminated onProcessTerminated);
+    virtual ~UidObserver();
+
+    // Start registration (with Application Manager)
+    void start();
+    // Stop registration (with Application Manager)
+    void stop();
+
+    // Add this pid/uid to set of Uid to be observed.
+    void add(int pid, uid_t uid);
+
+private:
+    UidObserver() = delete;
+    UidObserver(const UidObserver&) = delete;
+    UidObserver(UidObserver&&) = delete;
+    UidObserver& operator=(const UidObserver&) = delete;
+    UidObserver& operator=(UidObserver&&) = delete;
+
+    // IUidObserver implementation.
+    void onUidGone(uid_t uid, bool disabled) override;
+    void onUidActive(uid_t uid) override;
+    void onUidIdle(uid_t uid, bool disabled) override;
+    void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
+            int32_t capability) override;
+    void onUidProcAdjChanged(uid_t uid) override;
+
+    // IServiceManager::LocalRegistrationCallback implementation.
+    void onServiceRegistration(const String16& name,
+                    const sp<IBinder>& binder) override;
+
+    // IBinder::DeathRecipient implementation.
+    void binderDied(const wp<IBinder> &who) override;
+
+    // Registers with Application Manager for UID gone event
+    // to track the termination of Applications.
+    void registerWithActivityManager();
+
+    /*
+     * For a list of input pids, it will check whether the corresponding
+     * processes are already terminated or not.
+     *
+     * @param[in] pids List of pids to check whether they are terminated.
+     * @param[out] terminatedPids List of pid of terminated processes.
+     *
+     * Upon return, terminatedPids returns list of all the termibated pids
+     * that will be a subset of input pids (in that order).
+     * If none of the input pids have terminated, terminatedPids will be empty.
+     */
+    void getTerminatedProcesses(const std::vector<int32_t>& pids,
+                                std::vector<int32_t>& terminatedPids);
+
+    bool mRegistered = false;
+    std::mutex mLock;
+    ActivityManager mAm;
+    // map of UID and all the PIDs associated with it
+    // as one UID could have multiple PIDs.
+    std::map<uid_t, std::set<int32_t>> mUids;
+    OnProcessTerminated mOnProcessTerminated;
+    sp<ProcessInfoInterface> mProcessInfo;
+};
+
+}  // namespace android
+
+#endif  //ANDROID_MEDIA_UIDOBSERVER_H_
diff --git a/services/mediaresourcemanager/aidl/android/media/ClientConfigParcel.aidl b/services/mediaresourcemanager/aidl/android/media/ClientConfigParcel.aidl
new file mode 100644
index 0000000..3c9c8c7
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/ClientConfigParcel.aidl
@@ -0,0 +1,65 @@
+/**
+ * Copyright (c) 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.ClientInfoParcel;
+import android.media.MediaResourceSubType;
+
+/**
+ * Description of a Client(codec) configuration.
+ *
+ * {@hide}
+ */
+parcelable ClientConfigParcel {
+    /**
+     * Client info.
+     */
+    ClientInfoParcel clientInfo;
+
+    /**
+     * Type of codec (Audio/Video/Image).
+     */
+    MediaResourceSubType codecType;
+
+    /**
+     * true if this is an encoder, false if this is a decoder.
+     */
+    boolean isEncoder;
+
+    /**
+     * true if this is hardware codec, false otherwise.
+     */
+    boolean isHardware;
+
+    /*
+     * Video Resolution of the codec when it was configured, as width and height (in pixels).
+     */
+    int width;
+    int height;
+
+    /*
+     * Timestamp (in microseconds) when this configuration is created.
+     */
+    long timeStamp;
+    /*
+     * ID associated with the Codec.
+     * This will be used by the metrics:
+     * - Associate MediaCodecStarted with MediaCodecStopped Atom.
+     * - Correlate MediaCodecReported Atom for codec configuration parameters.
+     */
+    long id;
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
index 30ad41b..fcade38 100644
--- a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
@@ -20,6 +20,7 @@
 import android.media.MediaResourceParcel;
 import android.media.MediaResourcePolicyParcel;
 import android.media.ClientInfoParcel;
+import android.media.ClientConfigParcel;
 
 /**
  * ResourceManagerService interface that keeps track of media resource
@@ -125,4 +126,34 @@
      * @param pid pid from which resources will be reclaimed.
      */
     void reclaimResourcesFromClientsPendingRemoval(int pid);
+
+    /**
+     * Notify that the client has been created.
+     *
+     * This call is made to collect the (concurrent) metrics about the
+     * resources associated with the Codec (and also DRM sessions).
+     *
+     * @param clientInfo Information of the client.
+     */
+    void notifyClientCreated(in ClientInfoParcel clientInfo);
+
+    /**
+     * Notify that the client has been started.
+     *
+     * This call is made to collect the (concurrent) metrics about the
+     * resources associated with the Codec (and also DRM sessions).
+     *
+     * @param clientConfig Configuration information of the client.
+     */
+    void notifyClientStarted(in ClientConfigParcel clientConfig);
+
+    /**
+     * Notify that the client has been stopped.
+     *
+     * This call is made to collect the (concurrent) metrics about the
+     * resources associated with the Codec (and also DRM sessions).
+     *
+     * @param clientConfig Configuration information of the client.
+     */
+    void notifyClientStopped(in ClientConfigParcel clientConfig);
 }
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index 27d45d5..d98974f 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -45,6 +45,7 @@
         "libstats_media_metrics",
         "libstatspull",
         "libstatssocket",
+        "libactivitymanager_aidl",
     ],
     fuzz_config: {
         cc: [
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 16c5a4c..f903c62 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -23,6 +23,7 @@
         "libstats_media_metrics",
         "libstatspull",
         "libstatssocket",
+        "libactivitymanager_aidl",
     ],
     include_dirs: [
         "frameworks/av/include",
@@ -72,6 +73,7 @@
         "libstats_media_metrics",
         "libstatspull",
         "libstatssocket",
+        "libactivitymanager_aidl",
     ],
     include_dirs: [
         "frameworks/av/include",
diff --git a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
index 8fe2505..474ff0f 100644
--- a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
+++ b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
@@ -15,6 +15,7 @@
  */
 
 #include <gtest/gtest.h>
+#include <android/binder_process.h>
 
 #include "ResourceManagerService.h"
 #include <aidl/android/media/BnResourceManagerClient.h>
@@ -197,13 +198,20 @@
         return static_cast<TestClient*>(testClient.get());
     }
 
-    ResourceManagerServiceTestBase()
-        : mSystemCB(new TestSystemCallback()),
-          mService(::ndk::SharedRefBase::make<ResourceManagerService>(
-                  new TestProcessInfo, mSystemCB)),
-          mTestClient1(::ndk::SharedRefBase::make<TestClient>(kTestPid1, kTestUid1, mService)),
-          mTestClient2(::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService)),
-          mTestClient3(::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService)) {
+    ResourceManagerServiceTestBase() {
+        ALOGI("ResourceManagerServiceTestBase created");
+    }
+
+    void SetUp() override {
+        // Need thread pool to receive callbacks, otherwise oneway callbacks are
+        // silently ignored.
+        ABinderProcess_startThreadPool();
+        mSystemCB = new TestSystemCallback();
+        mService = ::ndk::SharedRefBase::make<ResourceManagerService>(
+            new TestProcessInfo, mSystemCB);
+        mTestClient1 = ::ndk::SharedRefBase::make<TestClient>(kTestPid1, kTestUid1, mService);
+        mTestClient2 = ::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService);
+        mTestClient3 = ::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService);
     }
 
     std::shared_ptr<IResourceManagerClient> createTestClient(int pid, int uid) {
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index 41cccb8..4e575f0 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -1367,6 +1367,143 @@
         // CPU boost is not expected to be reclaimed when marked as pending removal
         EXPECT_FALSE(toTestClient(cpuBoostMarkedClient)->checkIfReclaimedAndReset());
     }
+
+    inline void initClientConfigParcel(bool encoder, bool hw,
+                                       int32_t width, int32_t height,
+                                       int64_t id,
+                                       const ClientInfoParcel& clientInfo,
+                                       ClientConfigParcel& clientConfig) {
+        clientConfig.codecType = MediaResource::SubType::kVideoCodec;
+        clientConfig.isEncoder = encoder;
+        clientConfig.isHardware = hw;
+        clientConfig.width = width;
+        clientConfig.height = height;
+        clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
+        clientConfig.id = id;
+        clientConfig.clientInfo = clientInfo;
+    }
+
+    void testConcurrentCodecs() {
+        std::shared_ptr<IResourceManagerClient> testClient4 =
+            createTestClient(kTestPid1, kTestUid1);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(mTestClient1),
+                                     .name = "none"};
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient2),
+                                     .name = "none"};
+        ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient3),
+                                     .name = "none"};
+        ClientInfoParcel client4Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(testClient4),
+                                     .name = "none"};
+        ClientConfigParcel client1Config;
+        ClientConfigParcel client2Config;
+        ClientConfigParcel client3Config;
+        ClientConfigParcel client4Config;
+
+        // HW Video Encoder @ 1080P.
+        initClientConfigParcel(true, true, 1920, 1080, 11111111,
+                               client1Info, client1Config);
+        // HW Video Decoder @ 4K.
+        initClientConfigParcel(true, true, 2160, 3840, 22222222,
+                               client2Info, client2Config);
+        // SW Video Encoder @ 1080P.
+        initClientConfigParcel(true, true, 1920, 1080, 33333333,
+                               client3Info, client3Config);
+        // SW Video Decoder @ 4K.
+        initClientConfigParcel(true, true, 2160, 3840, 44444444,
+                               client4Info, client4Config);
+
+        // Start client1 at 1080P.
+        mService->notifyClientStarted(client1Config);
+        long peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+        long currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+        EXPECT_TRUE(peakPixelCountP1 = client1Config.width * client1Config.height);
+        EXPECT_TRUE(currentPixelCountP1 = client1Config.width * client1Config.height);
+
+        // Stop client1.
+        mService->notifyClientStopped(client1Config);
+        peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+        currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+        EXPECT_TRUE(peakPixelCountP1 == client1Config.width * client1Config.height);
+        EXPECT_TRUE(currentPixelCountP1 == 0);
+
+        // Start client1 at 1080P.
+        mService->notifyClientStarted(client1Config);
+        // Start client2 at 4K.
+        mService->notifyClientStarted(client2Config);
+
+        // Verify the Peak and Current Concurrent pixel count for both the process
+        // (kTestPid1, kTestPid2)
+        peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+        currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+        long peakPixelCountP2 = mService->getPeakConcurrentPixelCount(kTestPid2);
+        long currentPixelCountP2 = mService->getCurrentConcurrentPixelCount(kTestPid2);
+        EXPECT_TRUE(peakPixelCountP1 == client1Config.width * client1Config.height);
+        EXPECT_TRUE(currentPixelCountP1 == client1Config.width * client1Config.height);
+        EXPECT_TRUE(peakPixelCountP2 == client2Config.width * client2Config.height);
+        EXPECT_TRUE(currentPixelCountP2 == client2Config.width * client2Config.height);
+
+        // Start client3 at 1080P.
+        mService->notifyClientStarted(client3Config);
+        // Start client4 at 4K.
+        mService->notifyClientStarted(client4Config);
+
+        // Verify the Peak and Current Concurrent pixel count for both the process
+        // (kTestPid1, kTestPid2)
+        peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+        currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+        peakPixelCountP2 = mService->getPeakConcurrentPixelCount(kTestPid2);
+        currentPixelCountP2 = mService->getCurrentConcurrentPixelCount(kTestPid2);
+        EXPECT_TRUE(peakPixelCountP1 ==
+            (client1Config.width * client1Config.height +
+             client4Config.width * client4Config.height));
+        EXPECT_TRUE(currentPixelCountP1 ==
+            (client1Config.width * client1Config.height +
+             client4Config.width * client4Config.height));
+        EXPECT_TRUE(peakPixelCountP2 ==
+            (client2Config.width * client2Config.height +
+             client3Config.width * client3Config.height));
+        EXPECT_TRUE(currentPixelCountP2 ==
+            (client2Config.width * client2Config.height +
+             client3Config.width * client3Config.height));
+
+        // Stop client4
+        mService->notifyClientStopped(client4Config);
+        currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+        EXPECT_TRUE(currentPixelCountP1 == client1Config.width * client1Config.height);
+
+        // Stop client1
+        mService->notifyClientStopped(client1Config);
+
+        // Stop client2
+        mService->notifyClientStopped(client2Config);
+        currentPixelCountP2 = mService->getCurrentConcurrentPixelCount(kTestPid2);
+        EXPECT_TRUE(currentPixelCountP2 == client3Config.width * client3Config.height);
+        // Stop client3
+        mService->notifyClientStopped(client3Config);
+
+        // Verify the Peak and Current Concurrent pixel count for both the process
+        // (kTestPid1, kTestPid2)
+        peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+        currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+        peakPixelCountP2 = mService->getPeakConcurrentPixelCount(kTestPid2);
+        currentPixelCountP2 = mService->getCurrentConcurrentPixelCount(kTestPid2);
+        EXPECT_TRUE(peakPixelCountP1 ==
+            (client1Config.width * client1Config.height +
+             client4Config.width * client4Config.height));
+        EXPECT_TRUE(currentPixelCountP1 == 0);
+        EXPECT_TRUE(peakPixelCountP2 ==
+            (client2Config.width * client2Config.height +
+             client3Config.width * client3Config.height));
+        EXPECT_TRUE(currentPixelCountP2 == 0);
+    }
 };
 
 TEST_F(ResourceManagerServiceTest, config) {
@@ -1451,4 +1588,8 @@
     testReclaimResourcesFromMarkedClients_removesBiggestMarkedClientForSomeResources();
 }
 
+TEST_F(ResourceManagerServiceTest, concurrentCodecs) {
+    testConcurrentCodecs();
+}
+
 } // namespace android
diff --git a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
index a0d728c..85769d5 100644
--- a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
@@ -166,11 +166,14 @@
 
 class ResourceObserverServiceTest : public ResourceManagerServiceTestBase {
 public:
-    ResourceObserverServiceTest() : ResourceManagerServiceTestBase(),
-        mObserverService(::ndk::SharedRefBase::make<ResourceObserverService>()),
-        mTestObserver1(::ndk::SharedRefBase::make<TestObserver>("observer1")),
-        mTestObserver2(::ndk::SharedRefBase::make<TestObserver>("observer2")),
-        mTestObserver3(::ndk::SharedRefBase::make<TestObserver>("observer3")) {
+    ResourceObserverServiceTest() : ResourceManagerServiceTestBase() {}
+
+    void SetUp() override {
+        ResourceManagerServiceTestBase::SetUp();
+        mObserverService = ::ndk::SharedRefBase::make<ResourceObserverService>();
+        mTestObserver1 = ::ndk::SharedRefBase::make<TestObserver>("observer1");
+        mTestObserver2 = ::ndk::SharedRefBase::make<TestObserver>("observer2");
+        mTestObserver3 = ::ndk::SharedRefBase::make<TestObserver>("observer3");
         mService->setObserverService(mObserverService);
     }
 
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 51ef2d9..65854c8 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -418,7 +418,16 @@
     while (mThreadEnabled.load()) {
         loopCount++;
         int64_t timeoutNanos = -1;
-        if (isRunning() || (isIdle_l() && !isStandby_l())) {
+        if (isDisconnected_l()) {
+            if (!isStandby_l()) {
+                // If the stream is disconnected but not in standby mode, wait until standby time.
+                timeoutNanos = standbyTime - AudioClock::getNanoseconds();
+                timeoutNanos = std::max<int64_t>(0, timeoutNanos);
+            } // else {
+                // If the stream is disconnected and in standby mode, keep `timeoutNanos` as
+                // -1 to wait forever until next command as the stream can only be closed.
+            // }
+        } else if (isRunning() || (isIdle_l() && !isStandby_l())) {
             timeoutNanos = (isRunning() ? std::min(nextTimestampReportTime, nextDataReportTime)
                                         : standbyTime) - AudioClock::getNanoseconds();
             timeoutNanos = std::max<int64_t>(0, timeoutNanos);
@@ -430,7 +439,7 @@
             break;
         }
 
-        if (isRunning()) {
+        if (isRunning() && !isDisconnected_l()) {
             auto currentTimestamp = AudioClock::getNanoseconds();
             if (currentTimestamp >= nextDataReportTime) {
                 reportData_l();
@@ -441,12 +450,11 @@
                 if (sendCurrentTimestamp_l() != AAUDIO_OK) {
                     ALOGE("Failed to send current timestamp, stop updating timestamp");
                     disconnect_l();
-                } else {
-                    nextTimestampReportTime = timestampScheduler.nextAbsoluteTime();
                 }
+                nextTimestampReportTime = timestampScheduler.nextAbsoluteTime();
             }
         }
-        if (isIdle_l() && AudioClock::getNanoseconds() >= standbyTime) {
+        if ((isIdle_l() || isDisconnected_l()) && AudioClock::getNanoseconds() >= standbyTime) {
             aaudio_result_t result = standby_l();
             if (result != AAUDIO_OK) {
                 // If standby failed because of the function is not implemented, there is no