Merge "Effect AIDL: relax dynamics processing effect parameter validations" into main
diff --git a/media/OWNERS b/media/OWNERS
index 976fb9e..b926075 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -14,5 +14,8 @@
 taklee@google.com
 wonsik@google.com
 
+# For TEST_MAPPING tv-presubmit and tv-postsubmit configurations:
+per-file TEST_MAPPING = blindahl@google.com
+
 # go/android-fwk-media-solutions for info on areas of ownership.
 include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index cd5d354..d388596 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -44,5 +44,11 @@
             ],
             "file_patterns": ["(?i)drm|crypto"]
         }
+    ],
+    // Postsubmit tests for TV devices
+    "tv-postsubmit": [
+       {
+           "name": "DecoderRenderTest"
+       }
     ]
 }
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index a2b6a82..b3c02eb 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -88,6 +88,26 @@
 }
 
 flag {
+  name: "set_callback_stall"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for setCallback stall"
+  bug: "326010604"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
+  name: "set_state_early"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for setting state early to avoid a race condition"
+  bug: "298613711"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
   name: "teamfood"
   namespace: "codec_fwk"
   description: "Feature flag to track teamfood population"
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
index 256bcb8..722b13a 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.cpp
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -29,6 +29,12 @@
 
 #include "C2SoftAomEnc.h"
 
+/* Quantization param values defined by the spec */
+#define AOM_QP_MIN 0
+#define AOM_QP_MAX 63
+#define AOM_QP_DEFAULT_MIN AOM_QP_MIN
+#define AOM_QP_DEFAULT_MAX AOM_QP_MAX
+
 namespace android {
 
 constexpr char COMPONENT_NAME[] = "c2.android.av1.encoder";
@@ -175,6 +181,19 @@
                                      .inRange(C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)})
                     .withSetter(CodedColorAspectsSetter, mColorAspects)
                     .build());
+
+    addParameter(
+            DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+            .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+                    0 /* flexCount */, 0u /* stream */))
+            .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+                            {C2Config::I_FRAME, C2Config::P_FRAME}),
+                         C2F(mPictureQuantization, m.values[0].min).inRange(
+                            AOM_QP_DEFAULT_MIN, AOM_QP_DEFAULT_MAX),
+                         C2F(mPictureQuantization, m.values[0].max).inRange(
+                            AOM_QP_DEFAULT_MIN, AOM_QP_DEFAULT_MAX)})
+            .withSetter(PictureQuantizationSetter)
+            .build());
 }
 
 C2R C2SoftAomEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output>& me) {
@@ -307,6 +326,54 @@
     return C2R::Ok();
 }
 
+C2R C2SoftAomEnc::IntfImpl::PictureQuantizationSetter(
+        bool mayBlock, C2P<C2StreamPictureQuantizationTuning::output>& me) {
+    (void)mayBlock;
+    int32_t iMin = AOM_QP_DEFAULT_MIN, pMin = AOM_QP_DEFAULT_MIN;
+    int32_t iMax = AOM_QP_DEFAULT_MAX, pMax = AOM_QP_DEFAULT_MAX;
+    for (size_t i = 0; i < me.v.flexCount(); ++i) {
+        const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+        // layerMin is clamped to [AOM_QP_MIN, layerMax] to avoid error
+        // cases where layer.min > layer.max
+        int32_t layerMax = std::clamp(layer.max, AOM_QP_MIN, AOM_QP_MAX);
+        int32_t layerMin = std::clamp(layer.min, AOM_QP_MIN, layerMax);
+        if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+            iMax = layerMax;
+            iMin = layerMin;
+            ALOGV("iMin %d iMax %d", iMin, iMax);
+        } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+            pMax = layerMax;
+            pMin = layerMin;
+            ALOGV("pMin %d pMax %d", pMin, pMax);
+        }
+    }
+    ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d",
+          iMin, iMax, pMin, pMax);
+
+    // aom library takes same range for I/P picture type
+    int32_t maxFrameQP = std::min(iMax, pMax);
+    int32_t minFrameQP = std::max(iMin, pMin);
+    if (minFrameQP > maxFrameQP) {
+        minFrameQP = maxFrameQP;
+    }
+    // put them back into the structure
+    for (size_t i = 0; i < me.v.flexCount(); ++i) {
+        const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+        if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+            me.set().m.values[i].max = maxFrameQP;
+            me.set().m.values[i].min = minFrameQP;
+        }
+        else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+            me.set().m.values[i].max = maxFrameQP;
+            me.set().m.values[i].min = minFrameQP;
+        }
+    }
+    ALOGV("PictureQuantizationSetter(exit): minFrameQP = %d maxFrameQP = %d",
+          minFrameQP, maxFrameQP);
+    return C2R::Ok();
+}
+
 uint32_t C2SoftAomEnc::IntfImpl::getLevel_l() const {
         return mProfileLevel->level - LEVEL_AV1_2;
 }
@@ -558,6 +625,7 @@
         mQuality = mIntf->getQuality_l();
         mComplexity = mIntf->getComplexity_l();
         mAV1EncLevel = mIntf->getLevel_l();
+        mQpBounds = mIntf->getPictureQuantization_l();
     }
 
 
@@ -575,6 +643,18 @@
             break;
     }
 
+    if (mQpBounds->flexCount() > 0) {
+        // read min max qp for sequence
+        for (size_t i = 0; i < mQpBounds->flexCount(); ++i) {
+            const C2PictureQuantizationStruct &layer = mQpBounds->m.values[i];
+            if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+                mMaxQuantizer = layer.max;
+                mMinQuantizer = layer.min;
+                break;
+            }
+        }
+    }
+
     mCodecInterface = aom_codec_av1_cx();
     if (!mCodecInterface) goto CleanUp;
 
diff --git a/media/codec2/components/aom/C2SoftAomEnc.h b/media/codec2/components/aom/C2SoftAomEnc.h
index 7e5ea63..067b04f 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.h
+++ b/media/codec2/components/aom/C2SoftAomEnc.h
@@ -109,6 +109,7 @@
     std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
     std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mQpBounds;
 
     aom_codec_err_t setupCodecParameters();
 };
@@ -126,6 +127,8 @@
                                   const C2P<C2StreamPictureSizeInfo::input>& size,
                                   const C2P<C2StreamFrameRateInfo::output>& frameRate,
                                   const C2P<C2StreamBitrateInfo::output>& bitrate);
+    static C2R PictureQuantizationSetter(bool mayBlock,
+                                         C2P<C2StreamPictureQuantizationTuning::output> &me);
 
     // unsafe getters
     std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
@@ -150,6 +153,9 @@
     std::shared_ptr<C2StreamPixelFormatInfo::input> getPixelFormat_l() const {
         return mPixelFormat;
     }
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const {
+        return mPictureQuantization;
+    }
     uint32_t getSyncFramePeriod() const;
     static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me);
     static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
@@ -171,6 +177,7 @@
     std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
     std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
     std::shared_ptr<C2StreamPixelFormatInfo::input> mPixelFormat;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
 
 };
 
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 591d56d..7b63e75 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -21,6 +21,7 @@
 #include <audio_utils/primitives.h>
 #include <media/stagefright/foundation/MediaDefs.h>
 
+#include <C2Debug.h>
 #include <C2PlatformSupport.h>
 #include <SimpleC2Interface.h>
 
@@ -81,10 +82,6 @@
                     FLAC_COMPRESSION_LEVEL_MIN, FLAC_COMPRESSION_LEVEL_MAX)})
                 .withSetter(Setter<decltype(*mComplexity)>::NonStrictValueWithNoDeps)
                 .build());
-        addParameter(
-                DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
-                .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 4608))
-                .build());
 
         addParameter(
                 DefineParam(mPcmEncodingInfo, C2_PARAMKEY_PCM_ENCODING)
@@ -96,6 +93,26 @@
                 })
                 .withSetter((Setter<decltype(*mPcmEncodingInfo)>::StrictValueWithNoDeps))
                 .build());
+
+        addParameter(
+                DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+                .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMaxBlockSize))
+                .withFields({
+                    C2F(mInputMaxBufSize, value).any(),
+                })
+                .withSetter(MaxInputSizeSetter, mChannelCount, mPcmEncodingInfo)
+                .build());
+    }
+
+    static C2R MaxInputSizeSetter(bool mayBlock,
+            C2P<C2StreamMaxBufferSizeInfo::input> &me,
+            const C2P<C2StreamChannelCountInfo::input> &channelCount,
+            const C2P<C2StreamPcmEncodingInfo::input> &pcmEncoding) {
+        (void)mayBlock;
+        C2R res = C2R::Ok();
+        int bytesPerSample = pcmEncoding.v.value == C2Config::PCM_FLOAT ? 4 : 2;
+        me.set().value = kMaxBlockSize * bytesPerSample * channelCount.v.value;
+        return res;
     }
 
     uint32_t getSampleRate() const { return mSampleRate->value; }
@@ -446,6 +463,9 @@
 
     mBlockSize = FLAC__stream_encoder_get_blocksize(mFlacStreamEncoder);
 
+    // Update kMaxBlockSize to match maximum size used by the encoder
+    CHECK(mBlockSize <= kMaxBlockSize);
+
     ALOGV("encoder successfully configured");
     return OK;
 }
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.h b/media/codec2/components/flac/C2SoftFlacEnc.h
index a971ab5..1f3be3c 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.h
+++ b/media/codec2/components/flac/C2SoftFlacEnc.h
@@ -63,7 +63,8 @@
 
     std::shared_ptr<IntfImpl> mIntf;
     const unsigned int kInBlockSize = 1152;
-    const unsigned int kMaxNumChannels = 2;
+    static constexpr unsigned int kMaxNumChannels = 2;
+    static constexpr unsigned int kMaxBlockSize = 4608;
     FLAC__StreamEncoder* mFlacStreamEncoder;
     FLAC__int32* mInputBufferPcm32;
     std::shared_ptr<C2LinearBlock> mOutputBlock;
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 2137964..fd9488b 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -469,11 +469,12 @@
         mInitialized = false;
     }
 
+    bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0;
+
     if (!mInitialized) {
         uint8_t *vol_data[1]{};
         int32_t vol_size = 0;
 
-        bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0;
         if (codecConfig || volHeader) {
             vol_data[0] = bitstream;
             vol_size = inSize;
@@ -512,10 +513,11 @@
                 return;
             }
         }
-        if (codecConfig) {
-            fillEmptyWork(work);
-            return;
-        }
+    }
+
+    if (codecConfig) {
+        fillEmptyWork(work);
+        return;
     }
 
     size_t inPos = 0;
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
index cdc3be0..40bb26e 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.cpp
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -29,7 +29,6 @@
     #include <opus_multistream.h>
 }
 
-#define DEFAULT_FRAME_DURATION_MS 20
 namespace android {
 
 namespace {
@@ -38,7 +37,6 @@
 
 }  // namespace
 
-static const int kMaxNumChannelsSupported = 2;
 
 class C2SoftOpusEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
@@ -248,10 +246,11 @@
     mAnchorTimeStamp = 0;
     mProcessedSamples = 0;
     mFilledLen = 0;
-    mFrameDurationMs = DEFAULT_FRAME_DURATION_MS;
+    mFrameDurationMs = kDefaultFrameDurationMs;
     if (!mInputBufferPcm16) {
+        size_t frameSize = (mFrameDurationMs * kMaxSampleRateSupported) / 1000;
         mInputBufferPcm16 =
-            (int16_t*)malloc(kFrameSize * kMaxNumChannels * sizeof(int16_t));
+            (int16_t*)malloc(frameSize * kMaxNumChannelsSupported * sizeof(int16_t));
     }
     if (!mInputBufferPcm16) return C2_NO_MEMORY;
 
@@ -368,7 +367,9 @@
     }
 
     C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
-    err = pool->fetchLinearBlock(kMaxPayload, usage, &mOutputBlock);
+    int outCapacity =
+        kMaxPayload * ((inSize + mNumPcmBytesPerInputFrame) / mNumPcmBytesPerInputFrame);
+    err = pool->fetchLinearBlock(outCapacity, usage, &mOutputBlock);
     if (err != C2_OK) {
         ALOGE("fetchLinearBlock for Output failed with status %d", err);
         work->result = C2_NO_MEMORY;
@@ -497,11 +498,11 @@
         uint8_t* outPtr = wView.data() + mBytesEncoded;
         int encodedBytes =
             opus_multistream_encode(mEncoder, mInputBufferPcm16,
-                                    mNumSamplesPerFrame, outPtr, kMaxPayload - mBytesEncoded);
+                                    mNumSamplesPerFrame, outPtr, outCapacity - mBytesEncoded);
         ALOGV("encoded %i Opus bytes from %zu PCM bytes", encodedBytes,
               processSize);
 
-        if (encodedBytes < 0 || encodedBytes > (kMaxPayload - mBytesEncoded)) {
+        if (encodedBytes < 0 || encodedBytes > (outCapacity - mBytesEncoded)) {
             ALOGE("opus_encode failed, encodedBytes : %d", encodedBytes);
             mSignalledError = true;
             work->result = C2_CORRUPTED;
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.h b/media/codec2/components/opus/C2SoftOpusEnc.h
index 733a6bc..2c9f5e5 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.h
+++ b/media/codec2/components/opus/C2SoftOpusEnc.h
@@ -45,12 +45,13 @@
             uint32_t drainMode,
             const std::shared_ptr<C2BlockPool> &pool) override;
 private:
-    /* OPUS_FRAMESIZE_20_MS */
-    const int kFrameSize = 960;
-    const int kMaxSampleRate = 48000;
-    const int kMinSampleRate = 8000;
-    const int kMaxPayload = (4000 * kMaxSampleRate) / kMinSampleRate;
-    const int kMaxNumChannels = 8;
+    static const int kMaxNumChannelsSupported = 2;
+    static const int kMaxSampleRateSupported = 48000;
+    static const int kDefaultFrameDurationMs = 20;
+    // For a frame duration of 20ms, payload recommended size is 1276 as per
+    // https://www.opus-codec.org/docs/html_api/group__opusencoder.html.
+    // For 40ms, 60ms, .. payload size will change proportionately, 1276 x 2, 1276 x 3, ..
+    static const int kMaxPayload = 1500; // from tests/test_opus_encode.c
 
     std::shared_ptr<IntfImpl> mIntf;
     std::shared_ptr<C2LinearBlock> mOutputBlock;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index dab7b89..318f093 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -446,6 +446,7 @@
     {
         IntfImpl::Lock lock = mIntf->lock();
         mPixelFormatInfo = mIntf->getPixelFormat_l();
+        mColorAspects = mIntf->getDefaultColorAspects_l();
     }
 
     mWidth = 320;
@@ -591,6 +592,41 @@
         return;
     }
 
+    // handle dynamic config parameters
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects =
+            mIntf->getDefaultColorAspects_l();
+        lock.unlock();
+
+        if (mColorAspects->range != defaultColorAspects->range ||
+            mColorAspects->primaries != defaultColorAspects->primaries ||
+            mColorAspects->matrix != defaultColorAspects->matrix ||
+            mColorAspects->transfer != defaultColorAspects->transfer) {
+
+            mColorAspects->range = defaultColorAspects->range;
+            mColorAspects->primaries = defaultColorAspects->primaries;
+            mColorAspects->matrix = defaultColorAspects->matrix;
+            mColorAspects->transfer = defaultColorAspects->transfer;
+
+            C2StreamColorAspectsTuning::output colorAspect(0u, defaultColorAspects->range,
+                defaultColorAspects->primaries, defaultColorAspects->transfer,
+                defaultColorAspects->matrix);
+            std::vector<std::unique_ptr<C2SettingResult>> failures;
+            c2_status_t err = mIntf->config({&colorAspect}, C2_MAY_BLOCK, &failures);
+            if (err == C2_OK) {
+                work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(colorAspect));
+            } else {
+                ALOGE("Config update colorAspect failed");
+                mSignalledError = true;
+                work->workletsProcessed = 1u;
+                work->result = C2_CORRUPTED;
+                return;
+            }
+        }
+    }
+
     size_t inOffset = 0u;
     size_t inSize = 0u;
     C2ReadView rView = mDummyReadView;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
index e9d6dc9..93cc213 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.h
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -66,6 +66,7 @@
     // configurations used by component in process
     // (TODO: keep this in intf but make them internal only)
     std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+    std::shared_ptr<C2StreamColorAspectsTuning::output> mColorAspects;
 
     std::shared_ptr<IntfImpl> mIntf;
     vpx_codec_ctx_t *mCodecCtx;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 66ce5ea..1c5772f 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -354,12 +354,9 @@
     return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
 }
 
-C2R C2SoftVpxEnc::IntfImpl::PictureQuantizationSetter(bool mayBlock,
-                                                     C2P<C2StreamPictureQuantizationTuning::output>
-                                                     &me) {
+C2R C2SoftVpxEnc::IntfImpl::PictureQuantizationSetter(
+        bool mayBlock, C2P<C2StreamPictureQuantizationTuning::output>& me) {
     (void)mayBlock;
-    // these are the ones we're going to set, so want them to default
-    // to the DEFAULT values for the codec
     int32_t iMin = VPX_QP_DEFAULT_MIN, pMin = VPX_QP_DEFAULT_MIN;
     int32_t iMax = VPX_QP_DEFAULT_MAX, pMax = VPX_QP_DEFAULT_MAX;
     for (size_t i = 0; i < me.v.flexCount(); ++i) {
@@ -382,8 +379,8 @@
           iMin, iMax, pMin, pMax);
 
     // vpx library takes same range for I/P picture type
-    int32_t maxFrameQP = std::min({iMax, pMax});
-    int32_t minFrameQP = std::max({iMin, pMin});
+    int32_t maxFrameQP = std::min(iMax, pMax);
+    int32_t minFrameQP = std::max(iMin, pMin);
     if (minFrameQP > maxFrameQP) {
         minFrameQP = maxFrameQP;
     }
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index 01b0678..1c2a0fb 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -563,6 +563,8 @@
         auto mapRet = mDequeued.emplace(bid, *pBuffer);
         CHECK(mapRet.second);
     } else {
+        ALOGD("allocate error(%d): Dequeued(%zu), Dequeuable(%d)",
+              (int)res, mDequeued.size(), mDequeueable + 1);
         if (adjustDequeueConfLocked(updateDequeue)) {
             return;
         }
@@ -629,7 +631,11 @@
     ::android::status_t status = igbp->dequeueBuffer(
             &slotId, &fence, width, height, format, usage, &outBufferAge, &outTimestamps);
     if (status < ::android::OK) {
-        ALOGE("dequeueBuffer() error %d", (int)status);
+        if (status == ::android::TIMED_OUT || status == ::android::WOULD_BLOCK) {
+            ALOGW("BQ might not be ready for dequeueBuffer()");
+            return C2_BLOCKING;
+        }
+        ALOGE("BQ in inconsistent status. dequeueBuffer() error %d", (int)status);
         return C2_CORRUPTED;
     }
     cache->waitOnSlot(slotId);
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 2fa89e7..3eec0f3 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -1550,19 +1550,23 @@
 
 sp<Codec2Buffer> LinearOutputBuffers::wrap(const std::shared_ptr<C2Buffer> &buffer) {
     if (buffer == nullptr) {
-        ALOGV("[%s] using a dummy buffer", mName);
+        ALOGD("[%s] received null buffer", mName);
         return new LocalLinearBuffer(mFormat, new ABuffer(0));
     }
     if (buffer->data().type() != C2BufferData::LINEAR) {
-        ALOGV("[%s] non-linear buffer %d", mName, buffer->data().type());
+        ALOGW("[%s] non-linear buffer %d", mName, buffer->data().type());
         // We expect linear output buffers from the component.
         return nullptr;
     }
     if (buffer->data().linearBlocks().size() != 1u) {
-        ALOGV("[%s] no linear buffers", mName);
+        ALOGW("[%s] no linear buffers", mName);
         // We expect one and only one linear block from the component.
         return nullptr;
     }
+    if (buffer->data().linearBlocks().front().size() == 0) {
+        ALOGD("[%s] received 0-sized buffer", mName);
+        return new LocalLinearBuffer(mFormat, new ABuffer(0));
+    }
     sp<Codec2Buffer> clientBuffer = ConstLinearBlockBuffer::Allocate(mFormat, buffer);
     if (clientBuffer == nullptr) {
         ALOGD("[%s] ConstLinearBlockBuffer::Allocate failed", mName);
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 9c514f2..2550dcf 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -179,10 +179,17 @@
     if (!buffer
             || buffer->data().type() != C2BufferData::LINEAR
             || buffer->data().linearBlocks().size() != 1u) {
+        if (!buffer) {
+            ALOGD("ConstLinearBlockBuffer::Allocate: null buffer");
+        } else {
+            ALOGW("ConstLinearBlockBuffer::Allocate: type=%d # linear blocks=%zu",
+                  buffer->data().type(), buffer->data().linearBlocks().size());
+        }
         return nullptr;
     }
     C2ReadView readView(buffer->data().linearBlocks()[0].map().get());
     if (readView.error() != C2_OK) {
+        ALOGW("ConstLinearBlockBuffer::Allocate: readView.error()=%d", readView.error());
         return nullptr;
     }
     return new ConstLinearBlockBuffer(format, std::move(readView), buffer);
@@ -1137,7 +1144,7 @@
 
         std::optional<Smpte2086> smpte2086;
         status_t status = mapper.getSmpte2086(buffer.get(), &smpte2086);
-        if (status != OK) {
+        if (status != OK || !smpte2086) {
             err = C2_CORRUPTED;
         } else {
             if (smpte2086) {
@@ -1157,7 +1164,7 @@
 
         std::optional<Cta861_3> cta861_3;
         status = mapper.getCta861_3(buffer.get(), &cta861_3);
-        if (status != OK) {
+        if (status != OK || !cta861_3) {
             err = C2_CORRUPTED;
         } else {
             if (cta861_3) {
@@ -1176,7 +1183,7 @@
         dynamicInfo->reset();
         std::optional<std::vector<uint8_t>> vec;
         status_t status = mapper.getSmpte2094_40(buffer.get(), &vec);
-        if (status != OK) {
+        if (status != OK || !vec) {
             dynamicInfo->reset();
             err = C2_CORRUPTED;
         } else {
diff --git a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
index 1d2da6a..c7a04da 100644
--- a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
+++ b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
@@ -36,6 +36,7 @@
 
 [[clang::no_destroy]] static std::once_flag gSmOnce;
 sp<FakeServiceManager> gFakeServiceManager;
+sp<AudioFlingerServerAdapter> gAudioFlingerServerAdapter;
 
 bool addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
                 FuzzedDataProvider& fdp) {
@@ -47,22 +48,10 @@
     return true;
 }
 
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
-    FuzzedDataProvider fdp(data, size);
-
-    std::call_once(gSmOnce, [&] {
-        /* Create a FakeServiceManager instance and add required services */
-        gFakeServiceManager = sp<FakeServiceManager>::make();
-        setDefaultServiceManager(gFakeServiceManager);
-    });
-    gFakeServiceManager->clear();
-
-    for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
-                                "batterystats", "media.metrics"}) {
-        if (!addService(String16(service), gFakeServiceManager, fdp)) {
-            return 0;
-        }
-    }
+extern "C" int LLVMFuzzerInitialize(int* /*argc*/, char*** /*argv*/) {
+    /* Create a FakeServiceManager instance and add required services */
+    gFakeServiceManager = sp<FakeServiceManager>::make();
+    setDefaultServiceManager(gFakeServiceManager);
 
     auto configService = ndk::SharedRefBase::make<ConfigMock>();
     CHECK_EQ(NO_ERROR, AServiceManager_addService(configService.get()->asBinder().get(),
@@ -80,22 +69,37 @@
     // Disable creating thread pool for fuzzer instance of audio flinger and audio policy services
     AudioSystem::disableThreadPool();
 
-    const auto audioFlinger = sp<AudioFlinger>::make();
-    const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+    return 0;
+}
 
-    CHECK_EQ(NO_ERROR,
-             gFakeServiceManager->addService(
-                     String16(IAudioFlinger::DEFAULT_SERVICE_NAME), IInterface::asBinder(afAdapter),
-                     false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
 
-    const auto audioPolicyService = sp<AudioPolicyService>::make();
+    for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
+                                "batterystats", "media.metrics"}) {
+        if (!addService(String16(service), gFakeServiceManager, fdp)) {
+            return 0;
+        }
+    }
 
-    CHECK_EQ(NO_ERROR,
-             gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
-                                             false /* allowIsolated */,
-                                             IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+    // TODO(330882064) : Initialise Audio Flinger and Audio Policy services every time
+    std::call_once(gSmOnce, [&] {
+        const auto audioFlinger = sp<AudioFlinger>::make();
+        gAudioFlingerServerAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+        CHECK_EQ(NO_ERROR,
+                 gFakeServiceManager->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+                                                 IInterface::asBinder(gAudioFlingerServerAdapter),
+                                                 false /* allowIsolated */,
+                                                 IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
 
-    fuzzService(media::IAudioFlingerService::asBinder(afAdapter), std::move(fdp));
+        const auto audioPolicyService = sp<AudioPolicyService>::make();
+        CHECK_EQ(NO_ERROR,
+                 gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
+                                                 false /* allowIsolated */,
+                                                 IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+    });
+
+    fuzzService(media::IAudioFlingerService::asBinder(gAudioFlingerServerAdapter), std::move(fdp));
 
     return 0;
 }
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 3fd0911..97c9659 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -366,24 +366,26 @@
     if (mIsInput) {
         return sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), reply);
     } else {
-        if (mContext.isAsynchronous()) {
+        if (const auto state = getState(); state == StreamDescriptor::State::IDLE) {
             // Handle pause-flush-resume sequence. 'flush' from PAUSED goes to
             // IDLE. We move here from IDLE to ACTIVE (same as 'start' from PAUSED).
-            const auto state = getState();
-            if (state == StreamDescriptor::State::IDLE) {
-                StreamDescriptor::Reply localReply{};
-                StreamDescriptor::Reply* innerReply = reply ?: &localReply;
-                RETURN_STATUS_IF_ERROR(
-                        sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), innerReply));
-                if (innerReply->state != StreamDescriptor::State::ACTIVE) {
-                    ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
-                            __func__, toString(innerReply->state).c_str());
-                    return INVALID_OPERATION;
-                }
-                return OK;
+            StreamDescriptor::Reply localReply{};
+            StreamDescriptor::Reply* innerReply = reply ?: &localReply;
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), innerReply));
+            if (innerReply->state != StreamDescriptor::State::ACTIVE) {
+                ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
+                        __func__, toString(innerReply->state).c_str());
+                return INVALID_OPERATION;
             }
+            return OK;
+        } else if (state == StreamDescriptor::State::PAUSED) {
+            return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
+        } else {
+            ALOGE("%s: unexpected stream state: %s (expected IDLE or PAUSED)",
+                        __func__, toString(state).c_str());
+            return INVALID_OPERATION;
         }
-        return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
     }
 }
 
@@ -415,8 +417,10 @@
 
 void StreamHalAidl::onAsyncTransferReady() {
     if (auto state = getState(); state == StreamDescriptor::State::TRANSFERRING) {
-        // Retrieve the current state together with position counters.
-        updateCountersIfNeeded();
+        // Retrieve the current state together with position counters unconditionally
+        // to ensure that the state on our side gets updated.
+        sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
+                nullptr, true /*safeFromNonWorkerThread */);
     } else {
         ALOGW("%s: unexpected onTransferReady in the state %s", __func__, toString(state).c_str());
     }
@@ -424,8 +428,10 @@
 
 void StreamHalAidl::onAsyncDrainReady() {
     if (auto state = getState(); state == StreamDescriptor::State::DRAINING) {
-        // Retrieve the current state together with position counters.
-        updateCountersIfNeeded();
+        // Retrieve the current state together with position counters unconditionally
+        // to ensure that the state on our side gets updated.
+        sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
+                nullptr, true /*safeFromNonWorkerThread */);
     } else {
         ALOGW("%s: unexpected onDrainReady in the state %s", __func__, toString(state).c_str());
     }
@@ -502,27 +508,31 @@
                 "%s %s: must be invoked from the worker thread (%d)",
                 __func__, command.toString().c_str(), workerTid);
     }
-    if (!mContext.getCommandMQ()->writeBlocking(&command, 1)) {
-        ALOGE("%s: failed to write command %s to MQ", __func__, command.toString().c_str());
-        return NOT_ENOUGH_DATA;
-    }
     StreamDescriptor::Reply localReply{};
-    if (reply == nullptr) {
-        reply = &localReply;
-    }
-    if (!mContext.getReplyMQ()->readBlocking(reply, 1)) {
-        ALOGE("%s: failed to read from reply MQ, command %s", __func__, command.toString().c_str());
-        return NOT_ENOUGH_DATA;
-    }
     {
-        std::lock_guard l(mLock);
-        // Not every command replies with 'latencyMs' field filled out, substitute the last
-        // returned value in that case.
-        if (reply->latencyMs <= 0) {
-            reply->latencyMs = mLastReply.latencyMs;
+        std::lock_guard l(mCommandReplyLock);
+        if (!mContext.getCommandMQ()->writeBlocking(&command, 1)) {
+            ALOGE("%s: failed to write command %s to MQ", __func__, command.toString().c_str());
+            return NOT_ENOUGH_DATA;
         }
-        mLastReply = *reply;
-        mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
+        if (reply == nullptr) {
+            reply = &localReply;
+        }
+        if (!mContext.getReplyMQ()->readBlocking(reply, 1)) {
+            ALOGE("%s: failed to read from reply MQ, command %s",
+                    __func__, command.toString().c_str());
+            return NOT_ENOUGH_DATA;
+        }
+        {
+            std::lock_guard l(mLock);
+            // Not every command replies with 'latencyMs' field filled out, substitute the last
+            // returned value in that case.
+            if (reply->latencyMs <= 0) {
+                reply->latencyMs = mLastReply.latencyMs;
+            }
+            mLastReply = *reply;
+            mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
+        }
     }
     switch (reply->status) {
         case STATUS_OK: return OK;
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index 53d46e5..b20eb00 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -243,6 +243,15 @@
     const bool mIsInput;
     const audio_config_base_t mConfig;
     const StreamContextAidl mContext;
+    // This lock is used to make sending of a command and receiving a reply an atomic
+    // operation. Otherwise, when two threads are trying to send a command, they may both advance to
+    // reading of the reply once the HAL has consumed the command from the MQ, and that creates a
+    // race condition between them.
+    //
+    // Note that only access to command and reply MQs needs to be protected because the data MQ is
+    // only accessed by the I/O thread. Also, there is no need to protect lookup operations on the
+    // queues as they are thread-safe, only send/receive operation must be protected.
+    std::mutex mCommandReplyLock;
 
   private:
     static audio_config_base_t configToBase(const audio_config& config) {
@@ -256,6 +265,8 @@
         std::lock_guard l(mLock);
         return mLastReply.state;
     }
+    // Note: Since `sendCommand` takes mLock while holding mCommandReplyLock, never call
+    // it with `mLock` being held.
     status_t sendCommand(
             const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
             ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
diff --git a/media/libaudioprocessing/AudioMixerBase.cpp b/media/libaudioprocessing/AudioMixerBase.cpp
index 3d11d92..7e362f7 100644
--- a/media/libaudioprocessing/AudioMixerBase.cpp
+++ b/media/libaudioprocessing/AudioMixerBase.cpp
@@ -1122,7 +1122,7 @@
                     aux = t->auxBuffer + numFrames;
                 }
                 for (int outFrames = frameCount; outFrames > 0; ) {
-                    // t->in == nullptr can happen if the track was flushed just after having
+                    // t->mIn == nullptr can happen if the track was flushed just after having
                     // been enabled for mixing.
                     if (t->mIn == nullptr) {
                         break;
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 67518af..44ea2a4 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -353,45 +353,62 @@
         return status;
     }
 
-    std::vector<float> inFrames(samples);
-    std::vector<float> outFrames(frameCount * FCC_2);
+    std::vector<float> inputSamples;
+    std::vector<float> outputSamples(frameCount * FCC_2);
 
     if (isPreset() && mNextPreset != mPreset) {
         loadPreset();
     }
 
     if (isAuxiliary()) {
-        inFrames.assign(in, in + samples);
+        inputSamples.resize(samples);
+        inputSamples.assign(in, in + samples);
     } else {
-        // mono input is duplicated
+        // Resizing to stereo is required to duplicate mono input
+        inputSamples.resize(frameCount * FCC_2);
         if (channels >= FCC_2) {
             for (int i = 0; i < frameCount; i++) {
-                inFrames[FCC_2 * i] = in[channels * i] * kSendLevel;
-                inFrames[FCC_2 * i + 1] = in[channels * i + 1] * kSendLevel;
+                inputSamples[FCC_2 * i] = in[channels * i] * kSendLevel;
+                inputSamples[FCC_2 * i + 1] = in[channels * i + 1] * kSendLevel;
             }
         } else {
             for (int i = 0; i < frameCount; i++) {
-                inFrames[FCC_2 * i] = inFrames[FCC_2 * i + 1] = in[i] * kSendLevel;
+                inputSamples[FCC_2 * i] = inputSamples[FCC_2 * i + 1] = in[i] * kSendLevel;
             }
         }
     }
 
     if (isPreset() && mPreset == PresetReverb::Presets::NONE) {
-        std::fill(outFrames.begin(), outFrames.end(), 0);  // always stereo here
+        std::fill(outputSamples.begin(), outputSamples.end(), 0);  // always stereo here
     } else {
         if (!mEnabled && mSamplesToExitCount > 0) {
-            std::fill(outFrames.begin(), outFrames.end(), 0);
+            std::fill(outputSamples.begin(), outputSamples.end(), 0);
         }
+        int inputBufferIndex = 0;
+        int outputBufferIndex = 0;
+
+        // LVREV library supports max of int16_t frames at a time
+        constexpr int kMaxBlockFrames = std::numeric_limits<int16_t>::max();
+        const auto inputFrameSize = getInputFrameSize();
+        const auto outputFrameSize = getOutputFrameSize();
 
         /* Process the samples, producing a stereo output */
-        LVREV_ReturnStatus_en lvrevStatus =
-                LVREV_Process(mInstance,        /* Instance handle */
-                              inFrames.data(),  /* Input buffer */
-                              outFrames.data(), /* Output buffer */
-                              frameCount);      /* Number of samples to read */
-        if (lvrevStatus != LVREV_SUCCESS) {
-            LOG(ERROR) << __func__ << " LVREV_Process error: " << lvrevStatus;
-            return {EX_UNSUPPORTED_OPERATION, 0, 0};
+        for (int fc = frameCount; fc > 0;) {
+            int processFrames = std::min(fc, kMaxBlockFrames);
+            LVREV_ReturnStatus_en lvrevStatus =
+                    LVREV_Process(mInstance,                            /* Instance handle */
+                                  inputSamples.data() + inputBufferIndex,   /* Input buffer */
+                                  outputSamples.data() + outputBufferIndex, /* Output buffer */
+                                  processFrames); /* Number of samples to process */
+            if (lvrevStatus != LVREV_SUCCESS) {
+                LOG(ERROR) << __func__ << " LVREV_Process error: " << lvrevStatus;
+                return {EX_UNSUPPORTED_OPERATION, 0, 0};
+            }
+
+            fc -= processFrames;
+
+            inputBufferIndex += processFrames * inputFrameSize / sizeof(float);
+            outputBufferIndex += processFrames * outputFrameSize / sizeof(float);
         }
     }
     // Convert to 16 bits
@@ -401,14 +418,14 @@
         if (channels >= FCC_2) {
             for (int i = 0; i < frameCount; i++) {
                 // Mix with dry input
-                outFrames[FCC_2 * i] += in[channels * i];
-                outFrames[FCC_2 * i + 1] += in[channels * i + 1];
+                outputSamples[FCC_2 * i] += in[channels * i];
+                outputSamples[FCC_2 * i + 1] += in[channels * i + 1];
             }
         } else {
             for (int i = 0; i < frameCount; i++) {
                 // Mix with dry input
-                outFrames[FCC_2 * i] += in[i];
-                outFrames[FCC_2 * i + 1] += in[i];
+                outputSamples[FCC_2 * i] += in[i];
+                outputSamples[FCC_2 * i + 1] += in[i];
             }
         }
 
@@ -420,8 +437,8 @@
             float incr = (mVolume.right - vr) / frameCount;
 
             for (int i = 0; i < frameCount; i++) {
-                outFrames[FCC_2 * i] *= vl;
-                outFrames[FCC_2 * i + 1] *= vr;
+                outputSamples[FCC_2 * i] *= vl;
+                outputSamples[FCC_2 * i + 1] *= vr;
 
                 vl += incl;
                 vr += incr;
@@ -430,8 +447,8 @@
         } else if (volumeMode != VOLUME_OFF) {
             if (mVolume.left != kUnitVolume || mVolume.right != kUnitVolume) {
                 for (int i = 0; i < frameCount; i++) {
-                    outFrames[FCC_2 * i] *= mVolume.left;
-                    outFrames[FCC_2 * i + 1] *= mVolume.right;
+                    outputSamples[FCC_2 * i] *= mVolume.left;
+                    outputSamples[FCC_2 * i + 1] *= mVolume.right;
                 }
             }
             mPrevVolume = mVolume;
@@ -441,8 +458,8 @@
 
     if (outChannels > 2) {
         for (int i = 0; i < frameCount; i++) {
-            out[outChannels * i] = outFrames[FCC_2 * i];
-            out[outChannels * i + 1] = outFrames[FCC_2 * i + 1];
+            out[outChannels * i] = outputSamples[FCC_2 * i];
+            out[outChannels * i + 1] = outputSamples[FCC_2 * i + 1];
         }
         if (!isAuxiliary()) {
             for (int i = 0; i < frameCount; i++) {
@@ -454,10 +471,10 @@
         }
     } else {
         if (outChannels == FCC_1) {
-            From2iToMono_Float(outFrames.data(), out, frameCount);
+            From2iToMono_Float(outputSamples.data(), out, frameCount);
         } else {
             for (int i = 0; i < frameCount * FCC_2; i++) {
-                out[i] = outFrames[i];
+                out[i] = outputSamples[i];
             }
         }
     }
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index bb49b5a..bd43fe2 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -2098,9 +2098,12 @@
              displayHeight,
              cropLeft, cropTop);
     } else {
-        CHECK(inputFormat->findInt32("width", &displayWidth));
-        CHECK(inputFormat->findInt32("height", &displayHeight));
-
+        if (!inputFormat->findInt32("width", &displayWidth)
+            || !inputFormat->findInt32("height", &displayHeight)) {
+            ALOGW("Either video width or video height missing, reporting 0x0!");
+            notifyListener(MEDIA_SET_VIDEO_SIZE, 0, 0);
+            return;
+        }
         ALOGV("Video input format %d x %d", displayWidth, displayHeight);
     }
 
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index cc6f5e8..0401e82 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -3975,6 +3975,15 @@
                     switch (mState) {
                         case INITIALIZING:
                         {
+                            // Resource error during INITIALIZING state needs to be logged
+                            // through metrics, to be able to track such occurrences.
+                            if (isResourceError(err)) {
+                                mediametrics_setInt32(mMetricsHandle, kCodecError, err);
+                                mediametrics_setCString(mMetricsHandle, kCodecErrorState,
+                                                        stateString(mState).c_str());
+                                flushMediametrics();
+                                initMediametrics();
+                            }
                             setState(UNINITIALIZED);
                             break;
                         }
@@ -4912,8 +4921,8 @@
                 if (flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
                     mFlags |= kFlagUseCryptoAsync;
                     if ((mFlags & kFlagUseBlockModel)) {
-                        ALOGW("CrytoAsync not yet enabled for block model,\
-                                falling back to normal");
+                        ALOGW("CrytoAsync not yet enabled for block model, "
+                                "falling back to normal");
                     }
                 }
             }
@@ -4970,8 +4979,7 @@
 
             mDescrambler = static_cast<IDescrambler *>(descrambler);
             mBufferChannel->setDescrambler(mDescrambler);
-            if ((mFlags & kFlagUseCryptoAsync) &&
-                mCrypto  && (mDomain == DOMAIN_VIDEO)) {
+            if ((mFlags & kFlagUseCryptoAsync) && mCrypto) {
                 // set kFlagUseCryptoAsync but do-not use this for block model
                 // this is to propagate the error in onCryptoError()
                 // TODO (b/274628160): Enable Use of CONFIG_FLAG_USE_CRYPTO_ASYNC
@@ -6052,6 +6060,10 @@
         mErrorLog.clear();
     }
 
+    if (android::media::codec::provider_->set_state_early()) {
+        mState = newState;
+    }
+
     if (newState == UNINITIALIZED) {
         // return any straggling buffers, e.g. if we got here on an error
         returnBuffersToCodec();
@@ -6062,7 +6074,9 @@
         mFlags &= ~kFlagSawMediaServerDie;
     }
 
-    mState = newState;
+    if (!android::media::codec::provider_->set_state_early()) {
+        mState = newState;
+    }
 
     if (mBatteryChecker != nullptr) {
         mBatteryChecker->setExecuting(isExecuting());
@@ -6263,15 +6277,8 @@
         cryptoInfo->setInt32("secure", mFlags & kFlagIsSecure);
         sp<RefBase> obj;
         if (msg->findObject("cryptoInfos", &obj)) {
-            sp<CryptoInfosWrapper> infos{(CryptoInfosWrapper*)obj.get()};
-            sp<CryptoInfosWrapper> asyncInfos{
-                    new CryptoInfosWrapper(std::vector<std::unique_ptr<CodecCryptoInfo>>())};
-            for (std::unique_ptr<CodecCryptoInfo> &info : infos->value) {
-                if (info) {
-                    asyncInfos->value.emplace_back(new CryptoAsync::CryptoAsyncInfo(info));
-                }
-            }
-            buffer->meta()->setObject("cryptoInfos", asyncInfos);
+            // this object is a standalone object when created (no copy requied here)
+            buffer->meta()->setObject("cryptoInfos", obj);
         } else {
             size_t key_len = (key != nullptr)? 16 : 0;
             size_t iv_len = (iv != nullptr)? 16 : 0;
@@ -6410,7 +6417,6 @@
             }
         }
         if (mCryptoAsync) {
-            // TODO b/316565675 - enable async path for audio
             // prepare a message and enqueue
             sp<AMessage> cryptoInfo = new AMessage();
             buildCryptoInfoAMessage(cryptoInfo, CryptoAsync::kActionDecrypt);
@@ -6646,9 +6652,9 @@
     CHECK_EQ(info, &mPortBuffers[portIndex][index]);
     availBuffers->erase(availBuffers->begin());
 
-    CHECK(!info->mOwnedByClient);
     {
         Mutex::Autolock al(mBufferLock);
+        CHECK(!info->mOwnedByClient);
         info->mOwnedByClient = true;
 
         // set image-data
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index 0e89521..bfa361c 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -216,6 +216,7 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="low-latency" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.av1-dav1d.decoder" type="video/av01" variant="slow-cpu,!slow-cpu" rank="1024">
@@ -234,6 +235,7 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="low-latency" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.mpeg2.decoder" type="video/mpeg2" domain="tv">
@@ -351,6 +353,7 @@
                 <Limit name="bitrate" range="1-20000000" />
             </Variant>
             <Feature name="bitrate-modes" value="VBR,CBR" />
+            <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.hevc.encoder" type="video/hevc" variant="!slow-cpu">
@@ -365,6 +368,7 @@
             <Limit name="complexity" range="0-10"  default="0" />
             <Limit name="quality" range="0-100"  default="80" />
             <Feature name="bitrate-modes" value="VBR,CBR,CQ" />
+            <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp9.encoder" type="video/x-vnd.on2.vp9" variant="!slow-cpu">
@@ -377,6 +381,7 @@
             <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="bitrate-modes" value="VBR,CBR" />
+            <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.av1.encoder" type="video/av01" enabled="false" minsdk="34" variant="slow-cpu,!slow-cpu">
@@ -395,6 +400,7 @@
             <Limit name="quality" range="0-100"  default="80" />
             <Limit name="complexity" range="0-5"  default="0" />
             <Feature name="bitrate-modes" value="VBR,CBR,CQ" />
+            <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
     </Encoders>
diff --git a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
index 4218d2d..3f850c2 100644
--- a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
@@ -24,61 +24,64 @@
 
 namespace android {
 
-#define MAX_MEDIA_BUFFER_SIZE 2048
+static const android_pixel_format_t kColorFormats[] = {
+        HAL_PIXEL_FORMAT_RGBA_8888,
+        HAL_PIXEL_FORMAT_RGB_565,
+        HAL_PIXEL_FORMAT_BGRA_8888,
+        HAL_PIXEL_FORMAT_RGBA_1010102,
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, /* To cover the default case */
+};
 
-// Fuzzer entry point.
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
-    // Init our wrapper
+static const MediaSource::ReadOptions::SeekMode kSeekModes[] = {
+        MediaSource::ReadOptions::SeekMode::SEEK_PREVIOUS_SYNC,
+        MediaSource::ReadOptions::SeekMode::SEEK_NEXT_SYNC,
+        MediaSource::ReadOptions::SeekMode::SEEK_CLOSEST_SYNC,
+        MediaSource::ReadOptions::SeekMode::SEEK_CLOSEST,
+        MediaSource::ReadOptions::SeekMode::SEEK_FRAME_INDEX,
+};
+
+static const std::string kComponentNames[] = {
+        "c2.android.avc.decoder",  "c2.android.hevc.decoder", "c2.android.vp8.decoder",
+        "c2.android.vp9.decoder",  "c2.android.av1.decoder",  "c2.android.mpeg4.decoder",
+        "c2.android.h263.decoder",
+};
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
     FuzzedDataProvider fdp(data, size);
+    std::string component = fdp.PickValueInArray(kComponentNames);
+    AString componentName(component.c_str());
+    sp<MetaData> trackMeta = generateMetaData(&fdp, component);
+    sp<IMediaSource> source = sp<IMediaSourceFuzzImpl>::make(&fdp, gMaxMediaBufferSize);
 
-    std::string name = fdp.ConsumeRandomLengthString(fdp.remaining_bytes());
-    AString componentName(name.c_str());
-    sp<MetaData> trackMeta = generateMetaData(&fdp);
-    sp<IMediaSource> source = new IMediaSourceFuzzImpl(&fdp, MAX_MEDIA_BUFFER_SIZE);
-
-    // Image or video Decoder?
-    sp<FrameDecoder> decoder;
-    bool isVideoDecoder = fdp.ConsumeBool();
-    if (isVideoDecoder) {
-        decoder = new VideoFrameDecoder(componentName, trackMeta, source);
+    sp<FrameDecoder> decoder = nullptr;
+    if (fdp.ConsumeBool()) {
+        decoder = sp<MediaImageDecoder>::make(componentName, trackMeta, source);
     } else {
-        decoder = new MediaImageDecoder(componentName, trackMeta, source);
+        decoder = sp<VideoFrameDecoder>::make(componentName, trackMeta, source);
     }
 
-    while (fdp.remaining_bytes()) {
-        uint8_t switchCase = fdp.ConsumeIntegralInRange<uint8_t>(0, 3);
-        switch (switchCase) {
-            case 0: {
-                int64_t frameTimeUs = fdp.ConsumeIntegral<int64_t>();
-                int option = fdp.ConsumeIntegral<int>();
-                int colorFormat = fdp.ConsumeIntegral<int>();
-                decoder->init(frameTimeUs, option, colorFormat);
-                break;
-            }
-            case 1:
-                decoder->extractFrame();
-                break;
-            case 2: {
-                FrameRect rect;
-                rect.left = fdp.ConsumeIntegral<int32_t>();
-                rect.top = fdp.ConsumeIntegral<int32_t>();
-                rect.right = fdp.ConsumeIntegral<int32_t>();
-                rect.bottom = fdp.ConsumeIntegral<int32_t>();
-                decoder->extractFrame(&rect);
-                break;
-            }
-            case 3: {
-                sp<MetaData> trackMeta = generateMetaData(&fdp);
-                decoder->getMetadataOnly(trackMeta,
-                                         /*colorFormat*/ fdp.ConsumeIntegral<int>(),
-                                         /*thumbnail*/ fdp.ConsumeBool());
-                break;
-            }
-        }
+    if (decoder.get() &&
+        decoder->init(fdp.ConsumeIntegral<uint64_t>() /* frameTimeUs */,
+                      fdp.PickValueInArray(kSeekModes) /* option */,
+                      fdp.PickValueInArray(kColorFormats) /* colorFormat */) == OK) {
+        auto frameDecoderAPI = fdp.PickValueInArray<const std::function<void()>>({
+                [&]() { decoder->extractFrame(); },
+                [&]() {
+                    FrameRect rect(fdp.ConsumeIntegral<int32_t>() /* left */,
+                                   fdp.ConsumeIntegral<int32_t>() /* top */,
+                                   fdp.ConsumeIntegral<int32_t>() /* right */,
+                                   fdp.ConsumeIntegral<int32_t>() /* bottom */
+                    );
+                    decoder->extractFrame(&rect);
+                },
+                [&]() {
+                    FrameDecoder::getMetadataOnly(
+                            trackMeta, fdp.PickValueInArray(kColorFormats) /* colorFormat */,
+                            fdp.ConsumeBool() /* thumbnail */);
+                },
+        });
+        frameDecoderAPI();
     }
-
-    generated_mime_types.clear();
-
     return 0;
 }
 
diff --git a/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h b/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h
index 228c04a..5430530 100644
--- a/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h
+++ b/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h
@@ -20,69 +20,100 @@
 #include <media/stagefright/MetaData.h>
 #include "MediaMimeTypes.h"
 
-#define MAX_METADATA_BUF_SIZE 512
-
 namespace android {
 
 std::vector<std::shared_ptr<char>> generated_mime_types;
+constexpr uint8_t kMinKeyHeight = 32;
+constexpr uint8_t kMinKeyWidth = 32;
+constexpr uint16_t kMaxKeyHeight = 2160;
+constexpr uint16_t kMaxKeyWidth = 3840;
+size_t gMaxMediaBufferSize = 0;
 
-sp<MetaData> generateMetaData(FuzzedDataProvider *fdp) {
-    sp<MetaData> newMeta = new MetaData();
+sp<MetaData> generateMetaData(FuzzedDataProvider* fdp, std::string componentName = std::string()) {
+    sp<MetaData> newMeta = sp<MetaData>::make();
 
-    // random MIME Type
-    const char *mime_type;
-    size_t index = fdp->ConsumeIntegralInRange<size_t>(0, kMimeTypes.size());
-    // Let there be a chance of a true random string
-    if (index == kMimeTypes.size()) {
-        std::string mime_str = fdp->ConsumeRandomLengthString(64);
-        std::shared_ptr<char> mime_cstr(new char[mime_str.length()+1]);
-        generated_mime_types.push_back(mime_cstr);
-        strncpy(mime_cstr.get(), mime_str.c_str(), mime_str.length()+1);
-        mime_type = mime_cstr.get();
-    } else {
-        mime_type = kMimeTypes[index];
+    const char* mime;
+    if(!componentName.empty())
+    {
+        auto it = decoderToMediaType.find(componentName);
+        mime = it->second;
     }
-    newMeta->setCString(kKeyMIMEType, mime_type);
+    else{
+        size_t index = fdp->ConsumeIntegralInRange<size_t>(0, kMimeTypes.size());
+        // Let there be a chance of a true random string
+        if (index == kMimeTypes.size()) {
+            std::string mime_str = fdp->ConsumeRandomLengthString(64);
+            std::shared_ptr<char> mime_cstr(new char[mime_str.length()+1]);
+            generated_mime_types.push_back(mime_cstr);
+            strncpy(mime_cstr.get(), mime_str.c_str(), mime_str.length()+1);
+            mime = mime_cstr.get();
+        } else {
+            mime = kMimeTypes[index];
+        }
+    }
+    newMeta->setCString(kKeyMIMEType, mime);
 
-    // Thumbnail time
-    newMeta->setInt64(kKeyThumbnailTime, fdp->ConsumeIntegral<int64_t>());
+    auto height = fdp->ConsumeIntegralInRange<uint16_t>(kMinKeyHeight, kMaxKeyHeight);
+    auto width = fdp->ConsumeIntegralInRange<uint16_t>(kMinKeyWidth, kMaxKeyWidth);
+    newMeta->setInt32(kKeyHeight, height);
+    newMeta->setInt32(kKeyWidth, width);
 
-    // Values used by allocVideoFrame
-    newMeta->setInt32(kKeyRotation, fdp->ConsumeIntegral<int32_t>());
-    size_t profile_size =
-        fdp->ConsumeIntegralInRange<size_t>(0, MAX_METADATA_BUF_SIZE);
-    std::vector<uint8_t> profile_bytes =
-        fdp->ConsumeBytes<uint8_t>(profile_size);
-    newMeta->setData(kKeyIccProfile,
-                     fdp->ConsumeIntegral<int32_t>(),
-                     profile_bytes.empty() ? nullptr : profile_bytes.data(),
-                     profile_bytes.size());
-    newMeta->setInt32(kKeySARWidth, fdp->ConsumeIntegral<int32_t>());
-    newMeta->setInt32(kKeySARHeight, fdp->ConsumeIntegral<int32_t>());
-    newMeta->setInt32(kKeyDisplayWidth, fdp->ConsumeIntegral<int32_t>());
-    newMeta->setInt32(kKeyDisplayHeight, fdp->ConsumeIntegral<int32_t>());
+    gMaxMediaBufferSize = height * width;
 
-    // Values used by findThumbnailInfo
-    newMeta->setInt32(kKeyThumbnailWidth, fdp->ConsumeIntegral<int32_t>());
-    newMeta->setInt32(kKeyThumbnailHeight, fdp->ConsumeIntegral<int32_t>());
-    size_t thumbnail_size =
-        fdp->ConsumeIntegralInRange<size_t>(0, MAX_METADATA_BUF_SIZE);
-    std::vector<uint8_t> thumb_bytes =
-        fdp->ConsumeBytes<uint8_t>(thumbnail_size);
-    newMeta->setData(kKeyThumbnailHVCC,
-                     fdp->ConsumeIntegral<int32_t>(),
-                     thumb_bytes.empty() ? nullptr : thumb_bytes.data(),
-                     thumb_bytes.size());
+    if (fdp->ConsumeBool()) {
+        newMeta->setInt32(kKeyTileHeight,
+                          fdp->ConsumeIntegralInRange<uint16_t>(kMinKeyHeight, height));
+        newMeta->setInt32(kKeyTileWidth,
+                          fdp->ConsumeIntegralInRange<uint16_t>(kMinKeyWidth, width));
+        newMeta->setInt32(kKeyGridRows, fdp->ConsumeIntegral<uint8_t>());
+        newMeta->setInt32(kKeyGridCols, fdp->ConsumeIntegral<uint8_t>());
+    }
 
-    // Values used by findGridInfo
-    newMeta->setInt32(kKeyTileWidth, fdp->ConsumeIntegral<int32_t>());
-    newMeta->setInt32(kKeyTileHeight, fdp->ConsumeIntegral<int32_t>());
-    newMeta->setInt32(kKeyGridRows, fdp->ConsumeIntegral<int32_t>());
-    newMeta->setInt32(kKeyGridCols, fdp->ConsumeIntegral<int32_t>());
+    if (fdp->ConsumeBool()) {
+        newMeta->setInt32(kKeySARHeight, fdp->ConsumeIntegral<uint8_t>());
+        newMeta->setInt32(kKeySARWidth, fdp->ConsumeIntegral<uint8_t>());
+    }
 
-    // A few functions perform a CHECK() that height/width are set
-    newMeta->setInt32(kKeyHeight, fdp->ConsumeIntegral<int32_t>());
-    newMeta->setInt32(kKeyWidth, fdp->ConsumeIntegral<int32_t>());
+    if (fdp->ConsumeBool()) {
+        newMeta->setInt32(kKeyDisplayHeight,
+                          fdp->ConsumeIntegralInRange<uint16_t>(height, UINT16_MAX));
+        newMeta->setInt32(kKeyDisplayWidth,
+                          fdp->ConsumeIntegralInRange<uint16_t>(width, UINT16_MAX));
+    }
+
+    if (fdp->ConsumeBool()) {
+        newMeta->setRect(kKeyCropRect, fdp->ConsumeIntegral<int32_t>() /* left */,
+                         fdp->ConsumeIntegral<int32_t>() /* top */,
+                         fdp->ConsumeIntegral<int32_t>() /* right */,
+                         fdp->ConsumeIntegral<int32_t>() /* bottom */);
+    }
+
+    if (fdp->ConsumeBool()) {
+        newMeta->setInt32(kKeyRotation, fdp->ConsumeIntegralInRange<uint8_t>(0, 3) * 90);
+    }
+
+    if (fdp->ConsumeBool()) {
+        newMeta->setInt64(kKeyThumbnailTime, fdp->ConsumeIntegral<uint64_t>());
+        newMeta->setInt32(kKeyThumbnailHeight, fdp->ConsumeIntegral<uint8_t>());
+        newMeta->setInt32(kKeyThumbnailWidth, fdp->ConsumeIntegral<uint8_t>());
+
+        size_t thumbnailSize = fdp->ConsumeIntegral<size_t>();
+        std::vector<uint8_t> thumbnailData = fdp->ConsumeBytes<uint8_t>(thumbnailSize);
+        if (mime == MEDIA_MIMETYPE_VIDEO_AV1) {
+            newMeta->setData(kKeyThumbnailAV1C, fdp->ConsumeIntegral<int32_t>() /* type */,
+                             thumbnailData.data(), thumbnailData.size());
+        } else {
+            newMeta->setData(kKeyThumbnailHVCC, fdp->ConsumeIntegral<int32_t>() /* type */,
+                             thumbnailData.data(), thumbnailData.size());
+        }
+    }
+
+    if (fdp->ConsumeBool()) {
+        size_t profileSize = fdp->ConsumeIntegral<size_t>();
+        std::vector<uint8_t> profileData = fdp->ConsumeBytes<uint8_t>(profileSize);
+        newMeta->setData(kKeyIccProfile, fdp->ConsumeIntegral<int32_t>() /* type */,
+                         profileData.data(), profileData.size());
+    }
 
     return newMeta;
 }
diff --git a/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h b/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h
index e769950..7e6f662 100644
--- a/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h
+++ b/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h
@@ -19,31 +19,33 @@
 
 #include <media/stagefright/MediaSource.h>
 
+#define MAX_FRAMES 5
+
 namespace android {
 
 class IMediaSourceFuzzImpl : public IMediaSource {
  public:
-    IMediaSourceFuzzImpl(FuzzedDataProvider *_fdp, size_t _max_buffer_size) :
-        fdp(_fdp),
-        max_buffer_size(_max_buffer_size) {}
-    status_t start(MetaData*) override { return 0; }
-    status_t stop() override { return 0; }
-    sp<MetaData> getFormat() override { return nullptr; }
-    status_t read(MediaBufferBase**,
-        const MediaSource::ReadOptions*) override;
-    status_t readMultiple(Vector<MediaBufferBase*>*, uint32_t,
-        const MediaSource::ReadOptions*) override;
-    bool supportReadMultiple() override { return true; }
-    bool supportNonblockingRead() override { return true; }
-    status_t pause() override { return 0; }
+   IMediaSourceFuzzImpl(FuzzedDataProvider* _fdp, size_t _max_buffer_size)
+       : frames_read(0), fdp(_fdp), min_buffer_size(32 * 32), max_buffer_size(_max_buffer_size) {}
+   status_t start(MetaData*) override { return 0; }
+   status_t stop() override { return 0; }
+   sp<MetaData> getFormat() override { return nullptr; }
+   status_t read(MediaBufferBase**, const MediaSource::ReadOptions*) override;
+   status_t readMultiple(Vector<MediaBufferBase*>*, uint32_t,
+                         const MediaSource::ReadOptions*) override;
+   bool supportReadMultiple() override { return true; }
+   bool supportNonblockingRead() override { return true; }
+   status_t pause() override { return 0; }
 
  protected:
     IBinder* onAsBinder() { return nullptr; }
 
  private:
-    FuzzedDataProvider *fdp;
-    std::vector<std::shared_ptr<MediaBufferBase>> buffer_bases;
-    const size_t max_buffer_size;
+   uint8_t frames_read;
+   FuzzedDataProvider* fdp;
+   const size_t min_buffer_size;
+   const size_t max_buffer_size;
+   std::vector<uint8_t> buf;
 };
 
 // This class is simply to expose the destructor
@@ -53,32 +55,41 @@
     ~MediaBufferFuzzImpl() {}
 };
 
-status_t IMediaSourceFuzzImpl::read(MediaBufferBase **buffer,
-        const MediaSource::ReadOptions *options) {
+status_t IMediaSourceFuzzImpl::read(MediaBufferBase** buffer, const MediaSource::ReadOptions*) {
     Vector<MediaBufferBase*> buffers;
-    status_t ret = readMultiple(&buffers, 1, options);
+    status_t ret = readMultiple(&buffers, 1, nullptr);
     *buffer = buffers.empty() ? nullptr : buffers[0];
 
     return ret;
 }
 
-status_t IMediaSourceFuzzImpl::readMultiple(Vector<MediaBufferBase*>* buffers,
-        uint32_t maxNumBuffers, const MediaSource::ReadOptions*) {
-    uint32_t num_buffers =
-        fdp->ConsumeIntegralInRange<uint32_t>(0, maxNumBuffers);
-    for(uint32_t i = 0; i < num_buffers; i++) {
-        std::vector<uint8_t> buf = fdp->ConsumeBytes<uint8_t>(
-            fdp->ConsumeIntegralInRange<size_t>(0, max_buffer_size));
+status_t IMediaSourceFuzzImpl::readMultiple(Vector<MediaBufferBase*>* buffers, uint32_t,
+                                            const MediaSource::ReadOptions*) {
+    if (++frames_read == MAX_FRAMES) {
+        auto size = fdp->ConsumeIntegralInRange<size_t>(min_buffer_size, max_buffer_size);
+        buf = fdp->ConsumeBytes<uint8_t>(size);
+        if (buf.size() < size) {
+            buf.resize(size, 0);
+        }
 
-        std::shared_ptr<MediaBufferBase> mbb(
-            new MediaBufferFuzzImpl(buf.data(), buf.size()));
+        MediaBufferBase* mbb = new MediaBufferFuzzImpl(buf.data(), buf.size());
+        mbb->meta_data().setInt64(kKeyTime, fdp->ConsumeIntegral<uint64_t>());
+        buffers->push_back(mbb);
 
-        buffer_bases.push_back(mbb);
-        buffers->push_back(mbb.get());
+        return ERROR_END_OF_STREAM;
     }
 
-    // STATUS_OK
-    return 0;
+    auto size = fdp->ConsumeIntegralInRange<size_t>(min_buffer_size, max_buffer_size);
+    buf = fdp->ConsumeBytes<uint8_t>(size);
+    if (buf.size() < size) {
+        buf.resize(size, 0);
+    }
+
+    MediaBufferBase* mbb = new MediaBufferFuzzImpl(buf.data(), buf.size());
+    mbb->meta_data().setInt64(kKeyTime, fdp->ConsumeIntegral<uint64_t>());
+    buffers->push_back(mbb);
+
+    return OK;
 }
 
 } // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMimeTypes.h b/media/libstagefright/tests/fuzzers/MediaMimeTypes.h
index 9f337ac..de7814e 100644
--- a/media/libstagefright/tests/fuzzers/MediaMimeTypes.h
+++ b/media/libstagefright/tests/fuzzers/MediaMimeTypes.h
@@ -18,6 +18,7 @@
 #define FUZZER_MEDIAMIMETYPES_H_
 
 #include <media/stagefright/foundation/MediaDefs.h>
+#include <unordered_map>
 
 namespace android {
 
@@ -80,6 +81,15 @@
     MEDIA_MIMETYPE_DATA_TIMED_ID3
 };
 
+static const std::unordered_map<std::string, const char*> decoderToMediaType = {
+        {"c2.android.vp8.decoder", MEDIA_MIMETYPE_VIDEO_VP8},
+        {"c2.android.vp9.decoder", MEDIA_MIMETYPE_VIDEO_VP9},
+        {"c2.android.av1.decoder", MEDIA_MIMETYPE_VIDEO_AV1},
+        {"c2.android.avc.decoder", MEDIA_MIMETYPE_VIDEO_AVC},
+        {"c2.android.hevc.decoder", MEDIA_MIMETYPE_VIDEO_HEVC},
+        {"c2.android.mpeg4.decoder", MEDIA_MIMETYPE_VIDEO_MPEG4},
+        {"c2.android.h263.decoder", MEDIA_MIMETYPE_VIDEO_H263}};
+
 }  // namespace android
 
 #endif  // FUZZER_MEDIAMIMETYPES_H_
diff --git a/media/libstagefright/tests/fuzzers/corpus/0ef67b8a074fed50b8875df345ab2e62175c34c9 b/media/libstagefright/tests/fuzzers/corpus/0ef67b8a074fed50b8875df345ab2e62175c34c9
new file mode 100644
index 0000000..652581f
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/0ef67b8a074fed50b8875df345ab2e62175c34c9
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/60eb43c963545c0b2676dad3e4c38cfe87136bbc b/media/libstagefright/tests/fuzzers/corpus/60eb43c963545c0b2676dad3e4c38cfe87136bbc
new file mode 100644
index 0000000..60ca169
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/60eb43c963545c0b2676dad3e4c38cfe87136bbc
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/8c7cb9439f81a8e00b651b3658fe24116f37df7e b/media/libstagefright/tests/fuzzers/corpus/8c7cb9439f81a8e00b651b3658fe24116f37df7e
new file mode 100644
index 0000000..c03bcad
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/8c7cb9439f81a8e00b651b3658fe24116f37df7e
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/c624e73c16c59dfbc3c563416cfc962e3c3a96a0 b/media/libstagefright/tests/fuzzers/corpus/c624e73c16c59dfbc3c563416cfc962e3c3a96a0
new file mode 100644
index 0000000..52f2d5a
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/c624e73c16c59dfbc3c563416cfc962e3c3a96a0
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/c6aff0d7ccaf58a1964a6bcc51777bf1786503ca b/media/libstagefright/tests/fuzzers/corpus/c6aff0d7ccaf58a1964a6bcc51777bf1786503ca
new file mode 100644
index 0000000..83c522f
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/c6aff0d7ccaf58a1964a6bcc51777bf1786503ca
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/ec6bd6069f74a2f6e92442f88efb29288ad6f456 b/media/libstagefright/tests/fuzzers/corpus/ec6bd6069f74a2f6e92442f88efb29288ad6f456
new file mode 100644
index 0000000..62d259b
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/ec6bd6069f74a2f6e92442f88efb29288ad6f456
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/fbf47d9a9173df0a39285c94d89fcbc767d5e774 b/media/libstagefright/tests/fuzzers/corpus/fbf47d9a9173df0a39285c94d89fcbc767d5e774
new file mode 100644
index 0000000..db78b75
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/fbf47d9a9173df0a39285c94d89fcbc767d5e774
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
index d582062..f5e72f5 100644
--- a/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
@@ -38,6 +38,7 @@
 
 [[clang::no_destroy]] static std::once_flag gSmOnce;
 sp<FakeServiceManager> gFakeServiceManager;
+sp<AudioPolicyService> gAudioPolicyService;
 
 bool addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
                 FuzzedDataProvider& fdp) {
@@ -49,22 +50,10 @@
     return true;
 }
 
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
-    FuzzedDataProvider fdp(data, size);
-
-    std::call_once(gSmOnce, [&] {
-        /* Create a FakeServiceManager instance and add required services */
-        gFakeServiceManager = sp<FakeServiceManager>::make();
-        setDefaultServiceManager(gFakeServiceManager);
-    });
-    gFakeServiceManager->clear();
-
-    for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
-                                "batterystats", "media.metrics"}) {
-        if (!addService(String16(service), gFakeServiceManager, fdp)) {
-            return 0;
-        }
-    }
+extern "C" int LLVMFuzzerInitialize(int* /*argc*/, char*** /*argv*/) {
+    /* Create a FakeServiceManager instance and add required services */
+    gFakeServiceManager = sp<FakeServiceManager>::make();
+    setDefaultServiceManager(gFakeServiceManager);
 
     auto configService = ndk::SharedRefBase::make<ConfigMock>();
     CHECK_EQ(NO_ERROR, AServiceManager_addService(configService.get()->asBinder().get(),
@@ -82,22 +71,37 @@
     // Disable creating thread pool for fuzzer instance of audio flinger and audio policy services
     AudioSystem::disableThreadPool();
 
-    const auto audioFlinger = sp<AudioFlinger>::make();
-    const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+    return 0;
+}
 
-    CHECK_EQ(NO_ERROR,
-             gFakeServiceManager->addService(
-                     String16(IAudioFlinger::DEFAULT_SERVICE_NAME), IInterface::asBinder(afAdapter),
-                     false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
 
-    const auto audioPolicyService = sp<AudioPolicyService>::make();
+    for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
+                                "batterystats", "media.metrics"}) {
+        if (!addService(String16(service), gFakeServiceManager, fdp)) {
+            return 0;
+        }
+    }
 
-    CHECK_EQ(NO_ERROR,
-             gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
-                                             false /* allowIsolated */,
-                                             IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+    // TODO(330882064) : Initialise Audio Flinger and Audio Policy services every time
+    std::call_once(gSmOnce, [&] {
+        const auto audioFlinger = sp<AudioFlinger>::make();
+        const auto audioFlingerServerAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+        CHECK_EQ(NO_ERROR,
+                 gFakeServiceManager->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+                                                 IInterface::asBinder(audioFlingerServerAdapter),
+                                                 false /* allowIsolated */,
+                                                 IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
 
-    fuzzService(media::IAudioPolicyService::asBinder(audioPolicyService), std::move(fdp));
+        gAudioPolicyService = sp<AudioPolicyService>::make();
+        CHECK_EQ(NO_ERROR,
+                 gFakeServiceManager->addService(String16("media.audio_policy"),
+                                                 gAudioPolicyService, false /* allowIsolated */,
+                                                 IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+    });
+
+    fuzzService(media::IAudioPolicyService::asBinder(gAudioPolicyService), std::move(fdp));
 
     return 0;
 }
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 42afa1e..315e08d 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -123,8 +123,8 @@
     device->toAudioPort(&devicePort);
     if (status_t status = mpClientInterface->setDeviceConnectedState(&devicePort, state);
             status != OK) {
-        ALOGE("Error %d while setting connected state for device %s",
-                static_cast<int>(state),
+        ALOGE("Error %d while setting connected state %d for device %s",
+                status, static_cast<int>(state),
                 device->getDeviceTypeAddr().toString(false).c_str());
     }
 }
@@ -212,9 +212,9 @@
             if (checkOutputsForDevice(device, state, outputs) != NO_ERROR) {
                 mAvailableOutputDevices.remove(device);
 
-                mHwModules.cleanUpForDevice(device);
-
                 broadcastDeviceConnectionState(device, media::DeviceConnectedState::DISCONNECTED);
+
+                mHwModules.cleanUpForDevice(device);
                 return INVALID_OPERATION;
             }
 
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 74d3474..8642fd4 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -2373,6 +2373,116 @@
                 )
         );
 
+namespace {
+
+class AudioPolicyManagerTestClientOpenFails : public AudioPolicyManagerTestClient {
+  public:
+    status_t openOutput(audio_module_handle_t module,
+                        audio_io_handle_t *output,
+                        audio_config_t * halConfig,
+                        audio_config_base_t * mixerConfig,
+                        const sp<DeviceDescriptorBase>& device,
+                        uint32_t * latencyMs,
+                        audio_output_flags_t flags) override {
+        return mSimulateFailure ? BAD_VALUE :
+                AudioPolicyManagerTestClient::openOutput(
+                        module, output, halConfig, mixerConfig, device, latencyMs, flags);
+    }
+
+    status_t openInput(audio_module_handle_t module,
+                       audio_io_handle_t *input,
+                       audio_config_t * config,
+                       audio_devices_t * device,
+                       const String8 & address,
+                       audio_source_t source,
+                       audio_input_flags_t flags) override {
+        return mSimulateFailure ? BAD_VALUE :
+                AudioPolicyManagerTestClient::openInput(
+                        module, input, config, device, address, source, flags);
+    }
+
+    void setSimulateFailure(bool simulateFailure) { mSimulateFailure = simulateFailure; }
+
+  private:
+    bool mSimulateFailure = false;
+};
+
+}  // namespace
+
+using DeviceConnectionWithFormatTestParams =
+        std::tuple<audio_devices_t /*type*/, std::string /*name*/, std::string /*address*/,
+        audio_format_t /*format*/>;
+
+class AudioPolicyManagerTestDeviceConnectionFailed :
+        public AudioPolicyManagerTestWithConfigurationFile,
+        public testing::WithParamInterface<DeviceConnectionWithFormatTestParams> {
+  protected:
+    std::string getConfigFile() override { return sBluetoothConfig; }
+  AudioPolicyManagerTestClient* getClient() override {
+        mFullClient = new AudioPolicyManagerTestClientOpenFails;
+        return mFullClient;
+    }
+    void setSimulateOpenFailure(bool simulateFailure) {
+        mFullClient->setSimulateFailure(simulateFailure); }
+
+    static const std::string sBluetoothConfig;
+
+  private:
+    AudioPolicyManagerTestClientOpenFails* mFullClient;
+};
+
+const std::string AudioPolicyManagerTestDeviceConnectionFailed::sBluetoothConfig =
+        AudioPolicyManagerTestDeviceConnectionFailed::sExecutableDir +
+        "test_audio_policy_configuration_bluetooth.xml";
+
+TEST_P(AudioPolicyManagerTestDeviceConnectionFailed, SetDeviceConnectedStateHasAddress) {
+    const audio_devices_t type = std::get<0>(GetParam());
+    const std::string name = std::get<1>(GetParam());
+    const std::string address = std::get<2>(GetParam());
+    const audio_format_t format = std::get<3>(GetParam());
+
+    EXPECT_EQ(0, mClient->getConnectedDevicePortCount());
+    EXPECT_EQ(0, mClient->getDisconnectedDevicePortCount());
+
+    setSimulateOpenFailure(true);
+    ASSERT_EQ(INVALID_OPERATION, mManager->setDeviceConnectionState(
+            type, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+            address.c_str(), name.c_str(), format));
+
+    // Since the failure happens when opening input/output, the device must be connected
+    // first and then disconnected.
+    EXPECT_EQ(1, mClient->getConnectedDevicePortCount());
+    EXPECT_EQ(1, mClient->getDisconnectedDevicePortCount());
+
+    if (mClient->getConnectedDevicePortCount() > 0) {
+        auto port = mClient->getLastConnectedDevicePort();
+        EXPECT_EQ(type, port->ext.device.type);
+        EXPECT_EQ(0, strncmp(port->ext.device.address, address.c_str(),
+                        AUDIO_DEVICE_MAX_ADDRESS_LEN)) << "\"" << port->ext.device.address << "\"";
+    }
+    if (mClient->getDisconnectedDevicePortCount() > 0) {
+        auto port = mClient->getLastDisconnectedDevicePort();
+        EXPECT_EQ(type, port->ext.device.type);
+        EXPECT_EQ(0, strncmp(port->ext.device.address, address.c_str(),
+                        AUDIO_DEVICE_MAX_ADDRESS_LEN)) << "\"" << port->ext.device.address << "\"";
+    }
+}
+
+INSTANTIATE_TEST_CASE_P(
+        DeviceConnectionFailure,
+        AudioPolicyManagerTestDeviceConnectionFailed,
+        testing::Values(
+                DeviceConnectionWithFormatTestParams({AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET,
+                            "bt_hfp_in", "00:11:22:33:44:55", AUDIO_FORMAT_DEFAULT}),
+                DeviceConnectionWithFormatTestParams({AUDIO_DEVICE_OUT_BLUETOOTH_SCO,
+                            "bt_hfp_out", "00:11:22:33:44:55", AUDIO_FORMAT_DEFAULT}),
+                DeviceConnectionWithFormatTestParams({AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+                            "bt_a2dp_out", "00:11:22:33:44:55", AUDIO_FORMAT_DEFAULT}),
+                DeviceConnectionWithFormatTestParams({AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+                            "bt_a2dp_out", "00:11:22:33:44:66", AUDIO_FORMAT_LDAC})
+                )
+        );
+
 class AudioPolicyManagerCarTest : public AudioPolicyManagerTestDynamicPolicy {
 protected:
     std::string getConfigFile() override { return sCarConfig; }
@@ -3283,4 +3393,4 @@
 
     // unregister effect should succeed since effect shall have been restore on the client session
     ASSERT_EQ(NO_ERROR, mManager->unregisterEffect(effectId));
-}
\ No newline at end of file
+}
diff --git a/services/audiopolicy/tests/resources/Android.bp b/services/audiopolicy/tests/resources/Android.bp
index 3f6a6e3..535dd7a 100644
--- a/services/audiopolicy/tests/resources/Android.bp
+++ b/services/audiopolicy/tests/resources/Android.bp
@@ -11,6 +11,7 @@
     name: "audiopolicytest_configuration_files",
     srcs: [
         "test_audio_policy_configuration.xml",
+        "test_audio_policy_configuration_bluetooth.xml",
         "test_audio_policy_primary_only_configuration.xml",
         "test_car_ap_atmos_offload_configuration.xml",
         "test_invalid_audio_policy_configuration.xml",
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration_bluetooth.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration_bluetooth.xml
new file mode 100644
index 0000000..0cf1688
--- /dev/null
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration_bluetooth.xml
@@ -0,0 +1,157 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<audioPolicyConfiguration version="7.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <globalConfiguration speaker_drc_enabled="true"/>
+
+    <modules>
+        <!-- Primary module -->
+        <module name="primary" halVersion="2.0">
+            <attachedDevices>
+                <item>Speaker</item>
+                <item>Built-In Mic</item>
+            </attachedDevices>
+            <defaultOutputDevice>Speaker</defaultOutputDevice>
+            <mixPorts>
+                <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="primary input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bt_hfp_output" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bt_hfp_input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 11025 16000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_IN_MONO"/>
+                </mixPort>
+                <mixPort name="voip_tx" role="sink"
+                         flags="AUDIO_INPUT_FLAG_VOIP_TX">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000 32000 48000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+                </mixPort>
+                <mixPort name="voip_rx" role="source"
+                         flags="AUDIO_OUTPUT_FLAG_VOIP_RX">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                           samplingRates="8000 16000 32000 48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+            </mixPorts>
+            <devicePorts>
+                <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
+                </devicePort>
+                <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+                </devicePort>
+                <devicePort tagName="Hdmi" type="AUDIO_DEVICE_OUT_HDMI" role="sink"
+                            encodedFormats="AUDIO_FORMAT_AC3">
+                </devicePort>
+                <devicePort tagName="Hdmi-In Mic" type="AUDIO_DEVICE_IN_HDMI" role="source">
+                </devicePort>
+                <devicePort tagName="BT SCO" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO" role="sink" />
+                <devicePort tagName="BT SCO Headset Mic" type="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET"
+                            role="source" />
+                <devicePort tagName="BT A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink"
+                            encodedFormats="AUDIO_FORMAT_SBC">
+                    <profile name="" format="AUDIO_FORMAT_PCM_8_BIT"
+                             samplingRates="44100 48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </devicePort>
+                <devicePort tagName="USB Device Out" type="AUDIO_DEVICE_OUT_USB_DEVICE" role="sink">
+                </devicePort>
+                <devicePort tagName="USB Device In" type="AUDIO_DEVICE_IN_USB_DEVICE" role="source">
+                </devicePort>
+            </devicePorts>
+            <routes>
+                <route type="mix" sink="Speaker"
+                       sources="primary output,voip_rx"/>
+                <route type="mix" sink="primary input"
+                       sources="Built-In Mic,Hdmi-In Mic,USB Device In"/>
+                <route type="mix" sink="voip_tx"
+                       sources="Built-In Mic"/>
+                <route type="mix" sink="Hdmi"
+                       sources="primary output"/>
+                <route type="mix" sink="BT SCO"
+                       sources="mixport_bt_hfp_output"/>
+                <route type="mix" sink="mixport_bt_hfp_input"
+                       sources="BT SCO Headset Mic"/>
+                <route type="mix" sink="BT A2DP Out"
+                       sources="primary output"/>
+                <route type="mix" sink="USB Device Out"
+                       sources="primary output"/>
+            </routes>
+        </module>
+
+        <!-- Remote Submix module -->
+        <module name="r_submix" halVersion="2.0">
+            <attachedDevices>
+                <item>Remote Submix In</item>
+            </attachedDevices>
+            <mixPorts>
+                <mixPort name="r_submix output" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="r_submix input" role="sink">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+           </mixPorts>
+           <devicePorts>
+               <devicePort tagName="Remote Submix Out" type="AUDIO_DEVICE_OUT_REMOTE_SUBMIX"  role="sink">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+               </devicePort>
+               <devicePort tagName="Remote Submix In" type="AUDIO_DEVICE_IN_REMOTE_SUBMIX"  role="source">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </devicePort>
+            </devicePorts>
+            <routes>
+                <route type="mix" sink="Remote Submix Out"
+                       sources="r_submix output"/>
+                <route type="mix" sink="r_submix input"
+                       sources="Remote Submix In"/>
+            </routes>
+        </module>
+
+        <!-- Software Bluetooth Module -->
+        <module name="bluetooth" halVersion="2.0">
+          <mixPorts>
+            <mixPort name="a2dp_sw_output" role="source">
+              <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                       samplingRates="44100 48000 88200 96000"
+                       channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+            </mixPort>
+          </mixPorts>
+          <devicePorts>
+            <devicePort tagName="BTS A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink">
+              <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                       samplingRates="44100 48000 88200 96000"
+                       channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+            </devicePort>
+          </devicePorts>
+          <routes>
+            <route type="mix" sink="BTS A2DP Out"
+                   sources="a2dp_sw_output"/>
+          </routes>
+        </module>
+
+      </modules>
+</audioPolicyConfiguration>
diff --git a/services/medialog/fuzzer/Android.bp b/services/medialog/fuzzer/Android.bp
index 34aeb12..84a1ce6 100644
--- a/services/medialog/fuzzer/Android.bp
+++ b/services/medialog/fuzzer/Android.bp
@@ -35,7 +35,7 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-audio-fuzzing-reports@google.com",
         ],
         componentid: 155276,
         hotlists: [
diff --git a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
index 433332c..c6793a9 100644
--- a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
+++ b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
@@ -33,6 +33,8 @@
 constexpr size_t kLogItemsLowWater = 1;
 // high water mark
 constexpr size_t kLogItemsHighWater = 2;
+constexpr size_t kMaxItemLength = 16;
+constexpr size_t kMaxApis = 64;
 
 class MediaMetricsServiceFuzzer {
    public:
@@ -304,10 +306,11 @@
     }
 
     FuzzedDataProvider fdp2 = FuzzedDataProvider(data, size);
-
-    while (fdp2.remaining_bytes()) {
+    size_t apiCount = 0;
+    while (fdp2.remaining_bytes() && ++apiCount <= kMaxApis) {
         // make a test item
-        auto item = std::make_shared<mediametrics::Item>(fdp2.ConsumeRandomLengthString().c_str());
+        auto item = std::make_shared<mediametrics::Item>(
+                fdp2.ConsumeRandomLengthString(kMaxItemLength).c_str());
         (*item).set("event", fdp2.ConsumeRandomLengthString().c_str());
 
         // get the actions and execute them
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
index cd00937..a8a1de1 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.cpp
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -109,23 +109,17 @@
     return CodecBucketUnspecified;
 }
 
-static bool getLogMessage(int hwCount, int swCount, std::stringstream& logMsg) {
-    bool update = false;
-    logMsg.clear();
+static std::string getLogMessage(const std::string& firstKey, const long& firstValue,
+                                 const std::string& secondKey, const long& secondValue) {
 
-    if (hwCount > 0) {
-        logMsg << " HW: " << hwCount;
-        update = true;
+    std::stringstream logMsg;
+    if (firstValue > 0) {
+        logMsg << firstKey << firstValue;
     }
-    if (swCount > 0) {
-        logMsg << " SW: " << swCount;
-        update = true;
+    if (secondValue > 0) {
+        logMsg << secondKey << secondValue;
     }
-
-    if (update) {
-        logMsg << " ] ";
-    }
-    return update;
+    return logMsg.str();
 }
 
 ResourceManagerMetrics::ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo) {
@@ -364,6 +358,15 @@
     std::scoped_lock lock(mLock);
     // post MediaCodecConcurrentUsageReported for this terminated pid.
     pushConcurrentUsageReport(pid, uid);
+    // Remove all the metrics associated with this process.
+    std::map<int32_t, ConcurrentCodecs>::iterator it1 = mProcessConcurrentCodecsMap.find(pid);
+    if (it1 != mProcessConcurrentCodecsMap.end()) {
+        mProcessConcurrentCodecsMap.erase(it1);
+    }
+    std::map<int32_t, PixelCount>::iterator it2 = mProcessPixelsMap.find(pid);
+    if (it2 != mProcessPixelsMap.end()) {
+        mProcessPixelsMap.erase(it2);
+    }
 }
 
 void ResourceManagerMetrics::pushConcurrentUsageReport(int32_t pid, uid_t uid) {
@@ -400,24 +403,30 @@
 
     std::stringstream peakCodecLog;
     peakCodecLog << "Peak { ";
-    std::stringstream logMsg;
-    if (getLogMessage(peakHwAudioEncoderCount, peakSwAudioEncoderCount, logMsg)) {
-        peakCodecLog << "AudioEnc[" << logMsg.str();
+    std::string logMsg;
+    logMsg = getLogMessage(" HW: ", peakHwAudioEncoderCount, " SW: ", peakSwAudioEncoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "AudioEnc[ " << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwAudioDecoderCount, peakSwAudioDecoderCount, logMsg)) {
-        peakCodecLog << "AudioDec[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwAudioDecoderCount, " SW: ", peakSwAudioDecoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "AudioDec[" << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwVideoEncoderCount, peakSwVideoEncoderCount, logMsg)) {
-        peakCodecLog << "VideoEnc[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwVideoEncoderCount, " SW: ", peakSwVideoEncoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "VideoEnc[" << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwVideoDecoderCount, peakSwVideoDecoderCount, logMsg)) {
-        peakCodecLog << "VideoDec[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwVideoDecoderCount, " SW: ", peakSwVideoDecoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "VideoDec[" << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwImageEncoderCount, peakSwImageEncoderCount, logMsg)) {
-        peakCodecLog << "ImageEnc[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwImageEncoderCount, " SW: ", peakSwImageEncoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "ImageEnc[" << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwImageDecoderCount, peakSwImageDecoderCount, logMsg)) {
-        peakCodecLog << "ImageDec[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwImageDecoderCount, " SW: ", peakSwImageDecoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "ImageDec[" << logMsg << " ] ";
     }
     peakCodecLog << "}";
 
@@ -705,4 +714,114 @@
     return 0;
 }
 
+static std::string getConcurrentInstanceCount(const std::map<std::string, int>& resourceMap) {
+    if (resourceMap.empty()) {
+        return "";
+    }
+    std::stringstream concurrentInstanceInfo;
+    for (const auto& [name, count] : resourceMap) {
+        if (count > 0) {
+            concurrentInstanceInfo << "      Name: " << name << " Instances: " << count << "\n";
+        }
+    }
+
+    std::string info = concurrentInstanceInfo.str();
+    if (info.empty()) {
+        return "";
+    }
+    return "    Current Concurrent Codec Instances:\n" + info;
+}
+
+static std::string getAppsPixelCount(const std::map<int32_t, PixelCount>& pixelMap) {
+    if (pixelMap.empty()) {
+        return "";
+    }
+    std::stringstream pixelInfo;
+    for (const auto& [pid, pixelCount] : pixelMap) {
+        std::string logMsg = getLogMessage(" Current Pixels: ", pixelCount.mCurrent,
+                                           " Peak Pixels: ", pixelCount.mPeak);
+        if (!logMsg.empty()) {
+            pixelInfo  << "      PID[" << pid << "]: {" << logMsg << " }\n";
+        }
+    }
+
+    return "    Applications Pixel Usage:\n" + pixelInfo.str();
+}
+
+static std::string getCodecUsageMetrics(const ConcurrentCodecsMap& codecsMap) {
+    int peakHwAudioEncoderCount = codecsMap[HwAudioEncoder];
+    int peakHwAudioDecoderCount = codecsMap[HwAudioDecoder];
+    int peakHwVideoEncoderCount = codecsMap[HwVideoEncoder];
+    int peakHwVideoDecoderCount = codecsMap[HwVideoDecoder];
+    int peakHwImageEncoderCount = codecsMap[HwImageEncoder];
+    int peakHwImageDecoderCount = codecsMap[HwImageDecoder];
+    int peakSwAudioEncoderCount = codecsMap[SwAudioEncoder];
+    int peakSwAudioDecoderCount = codecsMap[SwAudioDecoder];
+    int peakSwVideoEncoderCount = codecsMap[SwVideoEncoder];
+    int peakSwVideoDecoderCount = codecsMap[SwVideoDecoder];
+    int peakSwImageEncoderCount = codecsMap[SwImageEncoder];
+    int peakSwImageDecoderCount = codecsMap[SwImageDecoder];
+    std::stringstream usageMetrics;
+    std::string logMsg;
+    logMsg = getLogMessage(" HW: ", peakHwAudioEncoderCount, " SW: ", peakSwAudioEncoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "AudioEnc[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwAudioDecoderCount, " SW: ", peakSwAudioDecoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "AudioDec[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwVideoEncoderCount, " SW: ", peakSwVideoEncoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "VideoEnc[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwVideoDecoderCount, " SW: ", peakSwVideoDecoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "VideoDec[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwImageEncoderCount, " SW: ", peakSwImageEncoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "ImageEnc[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwImageDecoderCount, " SW: ", peakSwImageDecoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "ImageDec[" << logMsg << " ] ";
+    }
+
+    return usageMetrics.str();
+}
+
+static std::string getAppsCodecUsageMetrics(
+        const std::map<int32_t, ConcurrentCodecs>& processCodecsMap) {
+    if (processCodecsMap.empty()) {
+        return "";
+    }
+    std::stringstream codecUsage;
+    std::string info;
+    for (const auto& [pid, codecMap] : processCodecsMap) {
+        codecUsage << "      PID[" << pid << "]: ";
+        info = getCodecUsageMetrics(codecMap.mCurrent);
+        if (!info.empty()) {
+            codecUsage << "Current Codec Usage: { " << info << "} ";
+        }
+        info = getCodecUsageMetrics(codecMap.mPeak);
+        if (!info.empty()) {
+            codecUsage << "Peak Codec Usage: { " << info << "}";
+        }
+        codecUsage << "\n";
+    }
+
+    return "    Applications Codec Usage:\n" + codecUsage.str();
+}
+
+
+std::string ResourceManagerMetrics::dump() const {
+    std::string metricsLog("  Metrics logs:\n");
+    metricsLog += getConcurrentInstanceCount(mConcurrentResourceCountMap);
+    metricsLog += getAppsPixelCount(mProcessPixelsMap);
+    metricsLog += getAppsCodecUsageMetrics(mProcessConcurrentCodecsMap);
+
+    return std::move(metricsLog);
+}
+
 } // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.h b/services/mediaresourcemanager/ResourceManagerMetrics.h
index 7a5a89f..9904f7d 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.h
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.h
@@ -171,6 +171,9 @@
     // Get the current concurrent pixel count (associated with the video codecs) for the process.
     long getCurrentConcurrentPixelCount(int pid) const;
 
+    // retrieves metrics log.
+    std::string dump() const;
+
 private:
     ResourceManagerMetrics(const ResourceManagerMetrics&) = delete;
     ResourceManagerMetrics(ResourceManagerMetrics&&) = delete;
@@ -204,9 +207,9 @@
     // Map of resources (name) and number of concurrent instances
     std::map<std::string, int> mConcurrentResourceCountMap;
 
-    // Map of concurrent codes by CodecBucket across the system.
+    // Map of concurrent codecs by CodecBucket across the system.
     ConcurrentCodecsMap mConcurrentCodecsMap;
-    // Map of concurrent and peak codes by CodecBucket for each process/application.
+    // Map of concurrent and peak codecs by CodecBucket for each process/application.
     std::map<int32_t, ConcurrentCodecs> mProcessConcurrentCodecsMap;
 
     // Uid Observer to monitor the application termination.
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index d37d893..9c2fb7c 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -108,10 +108,17 @@
         serviceLog = mServiceLog->toString("    " /* linePrefix */);
     }
 
-    // Get all the resource (and overload pid) logs
+    // Get all the resource (and overload pid) log.
     std::string resourceLog;
     getResourceDump(resourceLog);
 
+    // Get all the metrics log.
+    std::string metricsLog;
+    {
+        std::scoped_lock lock{mLock};
+        metricsLog = mResourceManagerMetrics->dump();
+    }
+
     const size_t SIZE = 256;
     char buffer[SIZE];
     snprintf(buffer, SIZE, "ResourceManagerService: %p\n", this);
@@ -123,11 +130,16 @@
             supportsSecureWithNonSecureCodec);
     result.append(buffer);
 
+    // Add resource log.
     result.append(resourceLog.c_str());
 
+    // Add service log.
     result.append("  Events logs (most recent at top):\n");
     result.append(serviceLog);
 
+    // Add metrics log.
+    result.append(metricsLog.c_str());
+
     write(fd, result.c_str(), result.size());
     return OK;
 }