Merge "Add audio aconfig OWNERS" into main
diff --git a/Android.bp b/Android.bp
index 72b8721..afb1341 100644
--- a/Android.bp
+++ b/Android.bp
@@ -133,3 +133,19 @@
     frozen: true,
 
 }
+
+latest_av_audio_types_aidl = "av-audio-types-aidl-V1"
+
+cc_defaults {
+    name: "latest_av_audio_types_aidl_ndk_shared",
+    shared_libs: [
+        latest_av_audio_types_aidl + "-ndk",
+    ],
+}
+
+cc_defaults {
+    name: "latest_av_audio_types_aidl_ndk_static",
+    static_libs: [
+        latest_av_audio_types_aidl + "-ndk",
+    ],
+}
diff --git a/camera/Android.bp b/camera/Android.bp
index 22f1633..4c5b160 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -46,6 +46,7 @@
 aconfig_declarations {
     name: "camera_platform_flags",
     package: "com.android.internal.camera.flags",
+    container: "system",
     srcs: ["camera_platform.aconfig"],
 }
 
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index 5d2a263..46a4cf2 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -1,4 +1,5 @@
 package: "com.android.internal.camera.flags"
+container: "system"
 
 flag {
      namespace: "camera_platform"
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index c2a7549..9912fbe 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -88,6 +88,7 @@
     bool initCamera();
     void invokeCamera();
     void invokeSetParameters();
+    native_handle_t* createNativeHandle();
     sp<Camera> mCamera = nullptr;
     FuzzedDataProvider* mFDP = nullptr;
 
@@ -102,6 +103,18 @@
     };
 };
 
+native_handle_t* CameraFuzzer::createNativeHandle() {
+    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kMinElements, kMaxElements);
+    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+    native_handle_t* handle = native_handle_create(numFds, numInts);
+    for (int32_t i = 0; i < numFds; ++i) {
+        std::string filename = mFDP->ConsumeRandomLengthString(kMaxBytes);
+        int32_t fd = open(filename.c_str(), O_RDWR | O_CREAT | O_TRUNC);
+        handle->data[i] = fd;
+    }
+    return handle;
+}
+
 bool CameraFuzzer::initCamera() {
     ProcessState::self()->startThreadPool();
     sp<IServiceManager> sm = defaultServiceManager();
@@ -288,15 +301,11 @@
                 },
                 [&]() {
                     int64_t timestamp = mFDP->ConsumeIntegral<int64_t>();
-                    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    native_handle_t* handle = native_handle_create(numFds, numInts);
+                    native_handle_t* handle = createNativeHandle();
                     mCamera->recordingFrameHandleCallbackTimestamp(timestamp, handle);
                 },
                 [&]() {
-                    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    native_handle_t* handle = native_handle_create(numFds, numInts);
+                    native_handle_t* handle = createNativeHandle();
                     mCamera->releaseRecordingFrameHandle(handle);
                 },
                 [&]() { mCamera->releaseRecordingFrame(iMem); },
@@ -305,9 +314,7 @@
                     for (int8_t i = 0;
                          i < mFDP->ConsumeIntegralInRange<int8_t>(kMinElements, kMaxElements);
                          ++i) {
-                        int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        native_handle_t* handle = native_handle_create(numFds, numInts);
+                        native_handle_t* handle = createNativeHandle();
                         handles.push_back(handle);
                     }
                     mCamera->releaseRecordingFrameHandleBatch(handles);
@@ -317,9 +324,7 @@
                     for (int8_t i = 0;
                          i < mFDP->ConsumeIntegralInRange<int8_t>(kMinElements, kMaxElements);
                          ++i) {
-                        int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        native_handle_t* handle = native_handle_create(numFds, numInts);
+                        native_handle_t* handle = createNativeHandle();
                         handles.push_back(handle);
                     }
                     std::vector<nsecs_t> timestamps;
diff --git a/media/OWNERS b/media/OWNERS
index 976fb9e..b926075 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -14,5 +14,8 @@
 taklee@google.com
 wonsik@google.com
 
+# For TEST_MAPPING tv-presubmit and tv-postsubmit configurations:
+per-file TEST_MAPPING = blindahl@google.com
+
 # go/android-fwk-media-solutions for info on areas of ownership.
 include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index cd5d354..1a637ac 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -44,5 +44,16 @@
             ],
             "file_patterns": ["(?i)drm|crypto"]
         }
+    ],
+    // Postsubmit tests for TV devices
+    "tv-postsubmit": [
+        {
+            "name": "CtsMediaDecoderTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.decoder.cts.DecoderRenderTest"
+                }
+            ]
+        }
     ]
 }
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index b3c02eb..d662585 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -44,6 +44,16 @@
 }
 
 flag {
+  name: "input_surface_throttle"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for input surface throttle"
+  bug: "342269852"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
   name: "large_audio_frame_finish"
   namespace: "codec_fwk"
   description: "Implementation flag for large audio frame finishing tasks"
@@ -101,7 +111,17 @@
   name: "set_state_early"
   namespace: "codec_fwk"
   description: "Bugfix flag for setting state early to avoid a race condition"
-  bug: "298613711"
+  bug: "298613712"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
+  name: "stop_hal_before_surface"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for setting state early to avoid a race condition"
+  bug: "339247977"
   metadata {
     purpose: PURPOSE_BUGFIX
   }
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index b1d4ad4..2f659a2 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -8,18 +8,21 @@
 aconfig_declarations {
     name: "com.android.media.audioserver-aconfig",
     package: "com.android.media.audioserver",
+    container: "system",
     srcs: ["audioserver.aconfig"],
 }
 
 aconfig_declarations {
     name: "com.android.media.audio-aconfig",
     package: "com.android.media.audio",
+    container: "system",
     srcs: ["audio.aconfig"],
 }
 
 aconfig_declarations {
     name: "com.android.media.aaudio-aconfig",
     package: "com.android.media.aaudio",
+    container: "system",
     srcs: ["aaudio.aconfig"],
 }
 
@@ -63,6 +66,7 @@
 aconfig_declarations {
     name: "android.media.audio-aconfig",
     package: "android.media.audio",
+    container: "system",
     srcs: ["audio_framework.aconfig"],
     visibility: ["//visibility:private"],
 }
@@ -70,6 +74,7 @@
 aconfig_declarations {
     name: "android.media.audiopolicy-aconfig",
     package: "android.media.audiopolicy",
+    container: "system",
     srcs: ["audiopolicy_framework.aconfig"],
     visibility: ["//visibility:private"],
 }
@@ -77,6 +82,7 @@
 aconfig_declarations {
     name: "android.media.midi-aconfig",
     package: "android.media.midi",
+    container: "system",
     srcs: ["midi_flags.aconfig"],
     visibility: ["//visibility:private"],
 }
diff --git a/media/audio/aconfig/aaudio.aconfig b/media/audio/aconfig/aaudio.aconfig
index 7196525..c160109 100644
--- a/media/audio/aconfig/aaudio.aconfig
+++ b/media/audio/aconfig/aaudio.aconfig
@@ -3,6 +3,7 @@
 # Please add flags in alphabetical order.
 
 package: "com.android.media.aaudio"
+container: "system"
 
 flag {
     name: "sample_rate_conversion"
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index 73cb8ca..8ca4f9e 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -3,6 +3,7 @@
 # Please add flags in alphabetical order.
 
 package: "com.android.media.audio"
+container: "system"
 
 flag {
     name: "alarm_min_volume_zero"
diff --git a/media/audio/aconfig/audiopolicy_framework.aconfig b/media/audio/aconfig/audiopolicy_framework.aconfig
index 833730a..80e64ad 100644
--- a/media/audio/aconfig/audiopolicy_framework.aconfig
+++ b/media/audio/aconfig/audiopolicy_framework.aconfig
@@ -4,6 +4,7 @@
 # Please add flags in alphabetical order.
 
 package: "android.media.audiopolicy"
+container: "system"
 
 flag {
     name: "audio_policy_update_mixing_rules_api"
diff --git a/media/audio/aconfig/audioserver.aconfig b/media/audio/aconfig/audioserver.aconfig
index 21ea1a2..5c6504f 100644
--- a/media/audio/aconfig/audioserver.aconfig
+++ b/media/audio/aconfig/audioserver.aconfig
@@ -3,6 +3,7 @@
 # Please add flags in alphabetical order.
 
 package: "com.android.media.audioserver"
+container: "system"
 
 flag {
     name: "direct_track_reprioritization"
diff --git a/media/audio/aconfig/midi_flags.aconfig b/media/audio/aconfig/midi_flags.aconfig
index ff9238a..efb643f 100644
--- a/media/audio/aconfig/midi_flags.aconfig
+++ b/media/audio/aconfig/midi_flags.aconfig
@@ -4,6 +4,7 @@
 # Please add flags in alphabetical order.
 
 package: "android.media.midi"
+container: "system"
 
 flag {
     name: "virtual_ump"
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 7b63e75..780660e 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -155,7 +155,7 @@
     mSignalledError = false;
     mSignalledOutputEos = false;
     mIsFirstFrame = true;
-    mAnchorTimeStamp = 0ull;
+    mAnchorTimeStamp = 0;
     mProcessedSamples = 0u;
     mEncoderWriteData = false;
     mEncoderReturnedNbBytes = 0;
@@ -186,7 +186,7 @@
     mSignalledError = false;
     mSignalledOutputEos = false;
     mIsFirstFrame = true;
-    mAnchorTimeStamp = 0ull;
+    mAnchorTimeStamp = 0;
     mProcessedSamples = 0u;
     mEncoderWriteData = false;
     mEncoderReturnedNbBytes = 0;
@@ -236,7 +236,7 @@
               inSize, (int)work->input.ordinal.timestamp.peeku(),
               (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
     if (mIsFirstFrame && inSize) {
-        mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+        mAnchorTimeStamp = work->input.ordinal.timestamp.peekll();
         mIsFirstFrame = false;
     }
 
@@ -405,7 +405,7 @@
     C2WriteView wView = mOutputBlock->map().get();
     uint8_t* outData = wView.data();
     const uint32_t sampleRate = mIntf->getSampleRate();
-    const uint64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
+    const int64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
     ALOGV("writing %zu bytes of encoded data on output", bytes);
     // increment mProcessedSamples to maintain audio synchronization during
     // play back
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.h b/media/codec2/components/flac/C2SoftFlacEnc.h
index 1f3be3c..ed9c298 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.h
+++ b/media/codec2/components/flac/C2SoftFlacEnc.h
@@ -72,7 +72,7 @@
     bool mSignalledOutputEos;
     uint32_t mBlockSize;
     bool mIsFirstFrame;
-    uint64_t mAnchorTimeStamp;
+    int64_t mAnchorTimeStamp;
     uint64_t mProcessedSamples;
     // should the data received by the callback be written to the output port
     bool mEncoderWriteData;
diff --git a/media/codec2/components/mpeg2/Android.bp b/media/codec2/components/mpeg2/Android.bp
index a58044c..e644ee3 100644
--- a/media/codec2/components/mpeg2/Android.bp
+++ b/media/codec2/components/mpeg2/Android.bp
@@ -14,6 +14,10 @@
         "libcodec2_soft_sanitize_signed-defaults",
     ],
 
+    cflags: [
+        "-DKEEP_THREADS_ACTIVE=0",
+    ],
+
     srcs: ["C2SoftMpeg2Dec.cpp"],
 
     static_libs: ["libmpeg2dec"],
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 491098d..562dcf5 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -16,6 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2SoftMpeg2Dec"
+#ifndef KEEP_THREADS_ACTIVE
+#define KEEP_THREADS_ACTIVE 0
+#endif
 #include <log/log.h>
 
 #include <media/stagefright/foundation/MediaDefs.h>
@@ -433,7 +436,7 @@
 
     s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size = sizeof(ivdext_fill_mem_rec_ip_t);
     s_fill_mem_ip.u4_share_disp_buf = 0;
-    s_fill_mem_ip.u4_keep_threads_active = 1;
+    s_fill_mem_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
     s_fill_mem_ip.e_output_format = mIvColorformat;
     s_fill_mem_ip.u4_deinterlace = 1;
     s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
@@ -475,7 +478,7 @@
     s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = mHeight;
     s_init_ip.u4_share_disp_buf = 0;
     s_init_ip.u4_deinterlace = 1;
-    s_init_ip.u4_keep_threads_active = 1;
+    s_init_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
     s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
     s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorformat;
     s_init_op.s_ivd_init_op_t.u4_size = sizeof(ivdext_init_op_t);
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index dab7b89..318f093 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -446,6 +446,7 @@
     {
         IntfImpl::Lock lock = mIntf->lock();
         mPixelFormatInfo = mIntf->getPixelFormat_l();
+        mColorAspects = mIntf->getDefaultColorAspects_l();
     }
 
     mWidth = 320;
@@ -591,6 +592,41 @@
         return;
     }
 
+    // handle dynamic config parameters
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects =
+            mIntf->getDefaultColorAspects_l();
+        lock.unlock();
+
+        if (mColorAspects->range != defaultColorAspects->range ||
+            mColorAspects->primaries != defaultColorAspects->primaries ||
+            mColorAspects->matrix != defaultColorAspects->matrix ||
+            mColorAspects->transfer != defaultColorAspects->transfer) {
+
+            mColorAspects->range = defaultColorAspects->range;
+            mColorAspects->primaries = defaultColorAspects->primaries;
+            mColorAspects->matrix = defaultColorAspects->matrix;
+            mColorAspects->transfer = defaultColorAspects->transfer;
+
+            C2StreamColorAspectsTuning::output colorAspect(0u, defaultColorAspects->range,
+                defaultColorAspects->primaries, defaultColorAspects->transfer,
+                defaultColorAspects->matrix);
+            std::vector<std::unique_ptr<C2SettingResult>> failures;
+            c2_status_t err = mIntf->config({&colorAspect}, C2_MAY_BLOCK, &failures);
+            if (err == C2_OK) {
+                work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(colorAspect));
+            } else {
+                ALOGE("Config update colorAspect failed");
+                mSignalledError = true;
+                work->workletsProcessed = 1u;
+                work->result = C2_CORRUPTED;
+                return;
+            }
+        }
+    }
+
     size_t inOffset = 0u;
     size_t inSize = 0u;
     C2ReadView rView = mDummyReadView;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
index e9d6dc9..93cc213 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.h
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -66,6 +66,7 @@
     // configurations used by component in process
     // (TODO: keep this in intf but make them internal only)
     std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+    std::shared_ptr<C2StreamColorAspectsTuning::output> mColorAspects;
 
     std::shared_ptr<IntfImpl> mIntf;
     vpx_codec_ctx_t *mCodecCtx;
diff --git a/media/codec2/core/Android.bp b/media/codec2/core/Android.bp
index 7d5740b..c205dcd 100644
--- a/media/codec2/core/Android.bp
+++ b/media/codec2/core/Android.bp
@@ -26,9 +26,6 @@
         "//apex_available:platform",
         "com.android.media.swcodec",
     ],
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: ["C2.cpp"],
diff --git a/media/codec2/hal/aidl/Android.bp b/media/codec2/hal/aidl/Android.bp
index 48b6e21..e16e2b1 100644
--- a/media/codec2/hal/aidl/Android.bp
+++ b/media/codec2/hal/aidl/Android.bp
@@ -8,6 +8,7 @@
     name: "libcodec2_aidl_client",
 
     defaults: [
+        "aconfig_lib_cc_static_link.defaults",
         "libcodec2_hal_selection",
     ],
 
@@ -65,6 +66,7 @@
     ],
 
     defaults: [
+        "aconfig_lib_cc_static_link.defaults",
         "libcodec2_hal_selection",
     ],
 
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index 1c2a0fb..dbbabfe 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -173,7 +173,7 @@
 }
 
 GraphicsTracker::GraphicsTracker(int maxDequeueCount)
-    : mBufferCache(new BufferCache()), mMaxDequeue{maxDequeueCount},
+    : mBufferCache(new BufferCache()), mNumDequeueing{0}, mMaxDequeue{maxDequeueCount},
     mMaxDequeueCommitted{maxDequeueCount},
     mDequeueable{maxDequeueCount},
     mTotalDequeued{0}, mTotalCancelled{0}, mTotalDropped{0}, mTotalReleased{0},
@@ -235,6 +235,7 @@
         const sp<IGraphicBufferProducer>& igbp, uint32_t generation) {
     // TODO: wait until operations to previous IGBP is completed.
     std::shared_ptr<BufferCache> prevCache;
+    int prevDequeueRequested = 0;
     int prevDequeueCommitted;
 
     std::unique_lock<std::mutex> cl(mConfigLock);
@@ -243,6 +244,9 @@
         mInConfig = true;
         prevCache = mBufferCache;
         prevDequeueCommitted = mMaxDequeueCommitted;
+        if (mMaxDequeueRequested.has_value()) {
+            prevDequeueRequested = mMaxDequeueRequested.value();
+        }
     }
     // NOTE: Switching to the same surface is blocked from MediaCodec.
     // Switching to the same surface might not work if tried, since disconnect()
@@ -263,6 +267,11 @@
         mInConfig = false;
         return C2_BAD_VALUE;
     }
+    ALOGD("new surface in configuration: maxDequeueRequested(%d), maxDequeueCommitted(%d)",
+          prevDequeueRequested, prevDequeueCommitted);
+    if (prevDequeueRequested > 0 && prevDequeueRequested > prevDequeueCommitted) {
+        prevDequeueCommitted = prevDequeueRequested;
+    }
     if (igbp) {
         ret = igbp->setMaxDequeuedBufferCount(prevDequeueCommitted);
         if (ret != ::android::OK) {
@@ -280,6 +289,34 @@
         std::unique_lock<std::mutex> l(mLock);
         mInConfig = false;
         mBufferCache = newCache;
+        // {@code dequeued} is the number of currently dequeued buffers.
+        // {@code prevDequeueCommitted} is max dequeued buffer at any moment
+        //  from the new surface.
+        // {@code newDequeueable} is hence the current # of dequeueable buffers
+        //  if no change occurs.
+        int dequeued = mDequeued.size() + mNumDequeueing;
+        int newDequeueable = prevDequeueCommitted - dequeued;
+        if (newDequeueable < 0) {
+            // This will not happen.
+            // But if this happens, we respect the value and try to continue.
+            ALOGE("calculated new dequeueable is negative: %d max(%d),dequeued(%d)",
+                  newDequeueable, prevDequeueCommitted, dequeued);
+        }
+
+        if (mMaxDequeueRequested.has_value() && mMaxDequeueRequested == prevDequeueCommitted) {
+            mMaxDequeueRequested.reset();
+        }
+        mMaxDequeue = mMaxDequeueCommitted = prevDequeueCommitted;
+
+        int delta = newDequeueable - mDequeueable;
+        if (delta > 0) {
+            writeIncDequeueableLocked(delta);
+        } else if (delta < 0) {
+            drainDequeueableLocked(-delta);
+        }
+        ALOGV("new surfcace dequeueable %d(delta %d), maxDequeue %d",
+              newDequeueable, delta, mMaxDequeue);
+        mDequeueable = newDequeueable;
     }
     return C2_OK;
 }
@@ -529,6 +566,7 @@
             ALOGE("writing end for the waitable object seems to be closed");
             return C2_BAD_STATE;
         }
+        mNumDequeueing++;
         mDequeueable--;
         *cache = mBufferCache;
         return C2_OK;
@@ -543,6 +581,7 @@
                     bool cached, int slot, const sp<Fence> &fence,
                     std::shared_ptr<BufferItem> *pBuffer, bool *updateDequeue) {
     std::unique_lock<std::mutex> l(mLock);
+    mNumDequeueing--;
     if (res == C2_OK) {
         if (cached) {
             auto it = cache->mBuffers.find(slot);
@@ -655,7 +694,8 @@
             ALOGE("allocate by dequeueBuffer() successful, but requestBuffer() failed %d",
                   status);
             igbp->cancelBuffer(slotId, fence);
-            return C2_CORRUPTED;
+            // This might be due to life-cycle end and/or surface switching.
+            return C2_BLOCKING;
         }
         *buffer = std::make_shared<BufferItem>(generation, slotId, realloced, fence);
         if (!*buffer) {
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
index dd6c869..762030b 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
@@ -234,6 +234,7 @@
     // Maps bufferId to buffer
     std::map<uint64_t, std::shared_ptr<BufferItem>> mDequeued;
     std::set<uint64_t> mDeallocating;
+    int mNumDequeueing;
 
     // These member variables are read and modified accessed as follows.
     // 1. mConfigLock being held
diff --git a/media/codec2/hal/common/Android.bp b/media/codec2/hal/common/Android.bp
index 7d7b285..4c9da33 100644
--- a/media/codec2/hal/common/Android.bp
+++ b/media/codec2/hal/common/Android.bp
@@ -31,6 +31,10 @@
     ],
 
     static_libs: ["aconfig_mediacodec_flags_c_lib"],
+
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
 }
 
 cc_library_static {
diff --git a/media/codec2/hal/common/MultiAccessUnitHelper.cpp b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
index 8086ef2..b1fa82f 100644
--- a/media/codec2/hal/common/MultiAccessUnitHelper.cpp
+++ b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
@@ -27,6 +27,7 @@
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
+static inline constexpr  uint32_t MAX_SUPPORTED_SIZE = ( 10 * 512000 * 8 * 2u);
 namespace android {
 
 static C2R MultiAccessUnitParamsSetter(
@@ -39,8 +40,6 @@
         res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.thresholdSize)));
     } else if (me.v.maxSize < me.v.thresholdSize) {
         me.set().maxSize = me.v.thresholdSize;
-    } else if (me.v.thresholdSize == 0 && me.v.maxSize > 0) {
-        me.set().thresholdSize = me.v.maxSize;
     }
     std::vector<std::unique_ptr<C2SettingResult>> failures;
     res.retrieveFailures(&failures);
@@ -61,9 +60,9 @@
             .withDefault(new C2LargeFrame::output(0u, 0, 0))
             .withFields({
                 C2F(mLargeFrameParams, maxSize).inRange(
-                        0, c2_min(UINT_MAX, 10 * 512000 * 8 * 2u)),
+                        0, c2_min(UINT_MAX, MAX_SUPPORTED_SIZE)),
                 C2F(mLargeFrameParams, thresholdSize).inRange(
-                        0, c2_min(UINT_MAX, 10 * 512000 * 8 * 2u))
+                        0, c2_min(UINT_MAX, MAX_SUPPORTED_SIZE))
             })
             .withSetter(MultiAccessUnitParamsSetter)
             .build());
@@ -115,6 +114,18 @@
     return false;
 }
 
+bool MultiAccessUnitInterface::getMaxInputSize(
+        C2StreamMaxBufferSizeInfo::input* const maxInputSize) const {
+    if (maxInputSize == nullptr || mC2ComponentIntf == nullptr) {
+        return false;
+    }
+    c2_status_t err = mC2ComponentIntf->query_vb({maxInputSize}, {}, C2_MAY_BLOCK, nullptr);
+    if (err != OK) {
+        return false;
+    }
+    return true;
+}
+
 //C2MultiAccessUnitBuffer
 class C2MultiAccessUnitBuffer : public C2Buffer {
     public:
@@ -128,6 +139,7 @@
 MultiAccessUnitHelper::MultiAccessUnitHelper(
         const std::shared_ptr<MultiAccessUnitInterface>& intf,
         std::shared_ptr<C2BlockPool>& linearPool):
+        mMultiAccessOnOffAllowed(true),
         mInit(false),
         mInterface(intf),
         mLinearPool(linearPool) {
@@ -152,6 +164,63 @@
     return result;
 }
 
+bool MultiAccessUnitHelper::tryReconfigure(const std::unique_ptr<C2Param> &param) {
+    C2LargeFrame::output *lfp = C2LargeFrame::output::From(param.get());
+    if (lfp == nullptr) {
+        return false;
+    }
+    bool isDecoder = (mInterface->kind() == C2Component::KIND_DECODER) ? true : false;
+    if (!isDecoder) {
+        C2StreamMaxBufferSizeInfo::input maxInputSize(0);
+        if (!mInterface->getMaxInputSize(&maxInputSize)) {
+            LOG(ERROR) << "Error in reconfigure: "
+                    << "Encoder failed to respond with a valid max input size";
+            return false;
+        }
+        // This is assuming a worst case compression ratio of 1:1
+        // In no case the encoder should give an output more than
+        // what is being provided to the encoder in a single call.
+        if (lfp->maxSize < maxInputSize.value) {
+            lfp->maxSize = maxInputSize.value;
+        }
+    }
+    lfp->maxSize =
+            (lfp->maxSize > MAX_SUPPORTED_SIZE) ? MAX_SUPPORTED_SIZE :
+                    (lfp->maxSize < 0) ? 0 : lfp->maxSize;
+    lfp->thresholdSize =
+            (lfp->thresholdSize > MAX_SUPPORTED_SIZE) ? MAX_SUPPORTED_SIZE :
+                    (lfp->thresholdSize < 0) ? 0 : lfp->thresholdSize;
+    C2LargeFrame::output currentConfig = mInterface->getLargeFrameParam();
+    if ((currentConfig.maxSize == lfp->maxSize)
+            && (currentConfig.thresholdSize == lfp->thresholdSize)) {
+        // no need to update
+        return false;
+    }
+    if (isDecoder) {
+        bool isOnOffTransition =
+                (currentConfig.maxSize == 0 && lfp->maxSize != 0)
+                || (currentConfig.maxSize != 0 && lfp->maxSize == 0);
+            if (isOnOffTransition && !mMultiAccessOnOffAllowed) {
+                LOG(ERROR) << "Setting new configs not allowed"
+                        << " MaxSize: " << lfp->maxSize
+                        << " ThresholdSize: " << lfp->thresholdSize;
+                return false;
+            }
+    }
+    std::vector<C2Param*> config{lfp};
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+    if (C2_OK != mInterface->config(config, C2_MAY_BLOCK, &failures)) {
+        LOG(ERROR) << "Dynamic config not applied for"
+                << " MaxSize: " << lfp->maxSize
+                << " ThresholdSize: " << lfp->thresholdSize;
+        return false;
+    }
+    LOG(DEBUG) << "Updated from param maxSize "
+            << lfp->maxSize
+            << " ThresholdSize " << lfp->thresholdSize;
+    return true;
+}
+
 std::shared_ptr<MultiAccessUnitInterface> MultiAccessUnitHelper::getInterface() {
     return mInterface;
 }
@@ -163,6 +232,7 @@
 void MultiAccessUnitHelper::reset() {
     std::lock_guard<std::mutex> l(mLock);
     mFrameHolder.clear();
+    mMultiAccessOnOffAllowed = true;
 }
 
 c2_status_t MultiAccessUnitHelper::error(
@@ -181,6 +251,7 @@
         }
     }
     mFrameHolder.clear();
+    mMultiAccessOnOffAllowed = true;
     return C2_OK;
 }
 
@@ -232,16 +303,23 @@
         uint64_t newFrameIdx = mFrameIndex++;
         // TODO: Do not split buffers if component inherantly supports MultipleFrames.
         // if thats case, only replace frameindex.
-        auto cloneInputWork = [&newFrameIdx](std::unique_ptr<C2Work>& inWork, uint32_t flags) {
+        auto cloneInputWork = [&frameInfo, &newFrameIdx, this]
+                (std::unique_ptr<C2Work>& inWork, uint32_t flags) -> std::unique_ptr<C2Work> {
             std::unique_ptr<C2Work> newWork(new C2Work);
             newWork->input.flags = (C2FrameData::flags_t)flags;
             newWork->input.ordinal = inWork->input.ordinal;
             newWork->input.ordinal.frameIndex = newFrameIdx;
             if (!inWork->input.configUpdate.empty()) {
                 for (std::unique_ptr<C2Param>& param : inWork->input.configUpdate) {
-                    newWork->input.configUpdate.push_back(
-                            std::move(C2Param::Copy(*(param.get()))));
+                    if (param->index() == C2LargeFrame::output::PARAM_TYPE) {
+                        if (tryReconfigure(param)) {
+                            frameInfo.mConfigUpdate.push_back(std::move(param));
+                        }
+                    } else {
+                        newWork->input.configUpdate.push_back(std::move(param));
+                    }
                 }
+                inWork->input.configUpdate.clear();
             }
             newWork->input.infoBuffers = (inWork->input.infoBuffers);
             if (!inWork->worklets.empty() && inWork->worklets.front() != nullptr) {
@@ -331,6 +409,7 @@
             frameInfo.mLargeFrameTuning = multiAccessParams;
             std::lock_guard<std::mutex> l(mLock);
             mFrameHolder.push_back(std::move(frameInfo));
+            mMultiAccessOnOffAllowed = false;
         }
     }
     return C2_OK;
@@ -360,6 +439,7 @@
             std::list<MultiAccessUnitInfo>::iterator frame =
                     mFrameHolder.begin();
             while (!foundFrame && frame != mFrameHolder.end()) {
+                c2_status_t res = C2_OK;
                 auto it = frame->mComponentFrameIds.find(thisFrameIndex);
                 if (it != frame->mComponentFrameIds.end()) {
                     foundFrame = true;
@@ -369,8 +449,7 @@
                     if (work->result != C2_OK
                             || work->worklets.empty()
                             || !work->worklets.front()
-                            || (frame->mLargeFrameTuning.thresholdSize == 0
-                            || frame->mLargeFrameTuning.maxSize == 0)) {
+                            || frame->mLargeFrameTuning.maxSize == 0) {
                         if (removeEntry) {
                             frame->mComponentFrameIds.erase(it);
                             removeEntry = false;
@@ -388,10 +467,27 @@
                         addOutWork(frame->mLargeWork);
                         frame->reset();
                         if (workResult != C2_OK) {
-                            frame->mAccessUnitInfos.clear();
+                            frame->mComponentFrameIds.clear();
+                            removeEntry = false;
                         }
-                    } else if (C2_OK != processWorklets(*frame, work, addOutWork)) {
-                        LOG(DEBUG) << "Error while processing work";
+                    } else if (C2_OK != (res = processWorklets(*frame, work, addOutWork))) {
+                        // Upon error in processing worklets, we return the work with
+                        // result set to the error. This should indicate the error to the
+                        // framework and thus doing what is necessary to handle the
+                        // error.
+                        LOG(DEBUG) << "Error while processing worklets";
+                        if (frame->mLargeWork == nullptr) {
+                            frame->mLargeWork.reset(new C2Work);
+                            frame->mLargeWork->input.ordinal = frame->inOrdinal;
+                            frame->mLargeWork->input.ordinal.frameIndex =
+                                    frame->inOrdinal.frameIndex;
+                        }
+                        frame->mLargeWork->result = res;
+                        finalizeWork(*frame);
+                        addOutWork(frame->mLargeWork);
+                        frame->reset();
+                        frame->mComponentFrameIds.clear();
+                        removeEntry = false;
                     }
                     if (removeEntry) {
                         LOG(DEBUG) << "Removing entry: " << thisFrameIndex
@@ -528,9 +624,6 @@
 
         LOG(DEBUG) << "maxOutSize " << frame.mLargeFrameTuning.maxSize
                 << " threshold " << frame.mLargeFrameTuning.thresholdSize;
-        if ((*worklet)->output.buffers.size() > 0) {
-            allocateWork(frame, true, true);
-        }
         LOG(DEBUG) << "This worklet has " << (*worklet)->output.buffers.size() << " buffers"
                 << " ts: " << (*worklet)->output.ordinal.timestamp.peekull();
         int64_t workletTimestamp = (*worklet)->output.ordinal.timestamp.peekull();
@@ -552,43 +645,39 @@
                     inputSize -= (inputSize % frameSize);
                 }
                 while (inputOffset < inputSize) {
-                    if (frame.mWview->offset() >= frame.mLargeFrameTuning.thresholdSize) {
+                    if ((frame.mWview != nullptr)
+                            && (frame.mWview->offset() >= frame.mLargeFrameTuning.thresholdSize)) {
                         frame.mLargeWork->result = C2_OK;
                         finalizeWork(frame, flagsForCopy);
                         addWork(frame.mLargeWork);
                         frame.reset();
-                        allocateWork(frame, true, true);
                     }
                     if (mInterface->kind() == C2Component::KIND_ENCODER) {
                         if (inputSize > frame.mLargeFrameTuning.maxSize) {
-                            LOG(ERROR) << "Enc: Output buffer too small for AU, configured with "
-                                    << frame.mLargeFrameTuning.maxSize
-                                    << " block size: " << blocks.front().size()
-                                    << "alloc size " << frame.mWview->size();
-                            if (frame.mLargeWork
-                                    && frame.mWview && frame.mWview->offset() > 0) {
+                            LOG(WARNING) << "WARNING Encoder:"
+                                    << " Output buffer too small for configuration"
+                                    << " configured max size " << frame.mLargeFrameTuning.maxSize
+                                    << " access unit size " << inputSize;
+                            if (frame.mLargeWork && (frame.mWview && frame.mWview->offset() > 0)) {
+                                frame.mLargeWork->result = C2_OK;
                                 finalizeWork(frame, flagsForCopy);
                                 addWork(frame.mLargeWork);
                                 frame.reset();
-                                allocateWork(frame, true, false);
                             }
-                            frame.mLargeWork->result = C2_NO_MEMORY;
-                            finalizeWork(frame, 0, true);
-                            addWork(frame.mLargeWork);
-                            frame.reset();
-                            return C2_NO_MEMORY;
-                        } else if (inputSize > frame.mWview->size()) {
+                            frame.mLargeFrameTuning.maxSize = inputSize;
+                        } else if ((frame.mWview != nullptr)
+                                && (inputSize > frame.mWview->size())) {
                             LOG(DEBUG) << "Enc: Large frame hitting bufer limit, current size "
                                 << frame.mWview->offset();
-                            if (frame.mLargeWork
-                                    && frame.mWview && frame.mWview->offset() > 0) {
+                            if (frame.mWview->offset() > 0) {
+                                frame.mLargeWork->result = C2_OK;
                                 finalizeWork(frame, flagsForCopy);
                                 addWork(frame.mLargeWork);
                                 frame.reset();
-                                allocateWork(frame, true, true);
                             }
                         }
                     }
+                    allocateWork(frame, true, true);
                     C2ReadView rView = blocks.front().map().get();
                     if (rView.error()) {
                         LOG(ERROR) << "Buffer read view error";
@@ -683,26 +772,39 @@
             frame.mWview->setOffset(0);
             std::shared_ptr<C2Buffer> c2Buffer = C2Buffer::CreateLinearBuffer(
                     frame.mBlock->share(0, size, ::C2Fence()));
-            if (frame.mAccessUnitInfos.size() > 0) {
-                if (finalFlags & C2FrameData::FLAG_END_OF_STREAM) {
-                    frame.mAccessUnitInfos.back().flags |=
-                            C2FrameData::FLAG_END_OF_STREAM;
-                }
-                std::shared_ptr<C2AccessUnitInfos::output> largeFrame =
-                        C2AccessUnitInfos::output::AllocShared(
-                        frame.mAccessUnitInfos.size(), 0u, frame.mAccessUnitInfos);
-                frame.mInfos.push_back(largeFrame);
-                frame.mAccessUnitInfos.clear();
-            }
-            for (auto &info : frame.mInfos) {
-                c2Buffer->setInfo(std::const_pointer_cast<C2Info>(info));
-            }
             frame.mLargeWork->worklets.front()->output.buffers.push_back(std::move(c2Buffer));
-            frame.mInfos.clear();
-            frame.mBlock.reset();
-            frame.mWview.reset();
+        }
+        if (frame.mLargeWork->worklets.front()->output.buffers.size() > 0) {
+            std::shared_ptr<C2Buffer>& c2Buffer =
+                frame.mLargeWork->worklets.front()->output.buffers.front();
+            if (c2Buffer != nullptr) {
+                if (frame.mAccessUnitInfos.size() > 0) {
+                    if (finalFlags & C2FrameData::FLAG_END_OF_STREAM) {
+                        frame.mAccessUnitInfos.back().flags |= C2FrameData::FLAG_END_OF_STREAM;
+                    }
+                    std::shared_ptr<C2AccessUnitInfos::output> largeFrame =
+                            C2AccessUnitInfos::output::AllocShared(
+                                    frame.mAccessUnitInfos.size(), 0u, frame.mAccessUnitInfos);
+                    frame.mInfos.push_back(largeFrame);
+                    frame.mAccessUnitInfos.clear();
+                }
+                for (auto &info : frame.mInfos) {
+                    c2Buffer->setInfo(std::const_pointer_cast<C2Info>(info));
+                }
+            }
+        }
+        if (frame.mConfigUpdate.size() > 0) {
+            outFrameData.configUpdate.insert(
+                    outFrameData.configUpdate.end(),
+                    make_move_iterator(frame.mConfigUpdate.begin()),
+                    make_move_iterator(frame.mConfigUpdate.end()));
         }
     }
+    frame.mConfigUpdate.clear();
+    frame.mInfos.clear();
+    frame.mBlock.reset();
+    frame.mWview.reset();
+
     LOG(DEBUG) << "Multi access-unitflag setting as " << finalFlags;
     return C2_OK;
 }
@@ -735,6 +837,7 @@
     mBlock.reset();
     mWview.reset();
     mInfos.clear();
+    mConfigUpdate.clear();
     mAccessUnitInfos.clear();
     mLargeWork.reset();
 }
diff --git a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
index bb4464c..070a1f5 100644
--- a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
+++ b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
@@ -46,6 +46,7 @@
 protected:
     bool getDecoderSampleRateAndChannelCount(
             uint32_t * const sampleRate_, uint32_t * const channelCount_) const;
+    bool getMaxInputSize(C2StreamMaxBufferSizeInfo::input* const maxInputSize) const;
     const std::shared_ptr<C2ComponentInterface> mC2ComponentIntf;
     std::shared_ptr<C2LargeFrame::output> mLargeFrameParams;
     C2ComponentKindSetting mKind;
@@ -140,6 +141,11 @@
         std::vector<std::shared_ptr<const C2Info>> mInfos;
 
         /*
+         * Vector for holding config updates from the wrapper
+         */
+        std::vector<std::unique_ptr<C2Param>> mConfigUpdate;
+
+        /*
          * C2AccessUnitInfos for the current buffer
          */
         std::vector<C2AccessUnitInfosStruct> mAccessUnitInfos;
@@ -170,6 +176,11 @@
     };
 
     /*
+     * Reconfigure helper
+     */
+    bool tryReconfigure(const std::unique_ptr<C2Param> &p);
+
+    /*
      * Creates a linear block to be used with work
      */
     c2_status_t createLinearBlock(MultiAccessUnitInfo &frame);
@@ -195,6 +206,14 @@
             uint32_t size,
             int64_t timestamp);
 
+    // Flag to allow dynamic on/off settings on this helper.
+    // Once enabled and buffers in transit, it is not possible
+    // to turn this module off by setting the max output value
+    // to 0. This is because the skip cut buffer expects the
+    // metadata to be always present along with a valid buffer.
+    // This flag is used to monitor that state of this module.
+    bool mMultiAccessOnOffAllowed;
+
     bool mInit;
 
     // Interface of this module
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index ab47b7c..36907e1 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -263,9 +263,6 @@
     ALOGV("mComponent->reset() timeConsumed=%" PRId64 " us", timeConsumed);
     ASSERT_EQ(err, C2_OK);
 
-    err = mComponent->start();
-    ASSERT_EQ(err, C2_OK);
-
     // Query supported params by the component
     std::vector<std::shared_ptr<C2ParamDescriptor>> params;
     startTime = getNowUs();
@@ -298,6 +295,9 @@
               timeConsumed);
     }
 
+    err = mComponent->start();
+    ASSERT_EQ(err, C2_OK);
+
     std::list<std::unique_ptr<C2Work>> workList;
     startTime = getNowUs();
     err = mComponent->queue(&workList);
diff --git a/media/codec2/hal/plugin/FilterWrapper.cpp b/media/codec2/hal/plugin/FilterWrapper.cpp
index 197d6e7..b926150 100644
--- a/media/codec2/hal/plugin/FilterWrapper.cpp
+++ b/media/codec2/hal/plugin/FilterWrapper.cpp
@@ -49,11 +49,6 @@
             std::weak_ptr<FilterWrapper> filterWrapper)
         : mIntf(intf), mFilterWrapper(filterWrapper) {
         takeFilters(std::move(filters));
-        for (size_t i = 0; i < mFilters.size(); ++i) {
-            mControlParamTypes.insert(
-                    mFilters[i].desc.controlParams.begin(),
-                    mFilters[i].desc.controlParams.end());
-        }
     }
 
     ~WrappedDecoderInterface() override = default;
@@ -91,6 +86,12 @@
 
         // TODO: documentation
         mFilters = std::move(filters);
+        mControlParamTypes.clear();
+        for (size_t i = 0; i < mFilters.size(); ++i) {
+            mControlParamTypes.insert(
+                    mFilters[i].desc.controlParams.begin(),
+                    mFilters[i].desc.controlParams.end());
+        }
         mTypeToIndexForQuery.clear();
         mTypeToIndexForConfig.clear();
         for (size_t i = 0; i < mFilters.size(); ++i) {
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 362373e..7076bac 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -91,6 +91,10 @@
         "libcodec2_client",
     ],
 
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
+
     sanitize: {
         cfi: true,
         misc_undefined: [
diff --git a/media/codec2/sfplugin/C2AidlNode.cpp b/media/codec2/sfplugin/C2AidlNode.cpp
index 93c9d8b..0f23205 100644
--- a/media/codec2/sfplugin/C2AidlNode.cpp
+++ b/media/codec2/sfplugin/C2AidlNode.cpp
@@ -105,6 +105,10 @@
     return mImpl->onInputBufferDone(index);
 }
 
+void C2AidlNode::onInputBufferEmptied() {
+    return mImpl->onInputBufferEmptied();
+}
+
 android_dataspace C2AidlNode::getDataspace() {
     return mImpl->getDataspace();
 }
diff --git a/media/codec2/sfplugin/C2AidlNode.h b/media/codec2/sfplugin/C2AidlNode.h
index 365a41d..9dd3504 100644
--- a/media/codec2/sfplugin/C2AidlNode.h
+++ b/media/codec2/sfplugin/C2AidlNode.h
@@ -68,13 +68,19 @@
     void setFrameSize(uint32_t width, uint32_t height);
 
     /**
-     * Clean up work item reference.
+     * Notify that the input buffer reference is no longer needed by the component.
+     * Clean up if necessary.
      *
      * \param index input work index
      */
     void onInputBufferDone(c2_cntr64_t index);
 
     /**
+     * Notify input buffer is emptied.
+     */
+    void onInputBufferEmptied();
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     android_dataspace getDataspace();
diff --git a/media/codec2/sfplugin/C2NodeImpl.cpp b/media/codec2/sfplugin/C2NodeImpl.cpp
index 6f53e0f..585072d 100644
--- a/media/codec2/sfplugin/C2NodeImpl.cpp
+++ b/media/codec2/sfplugin/C2NodeImpl.cpp
@@ -25,6 +25,7 @@
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
+#include <android_media_codec.h>
 #include <android/fdsan.h>
 #include <media/stagefright/foundation/ColorUtils.h>
 #include <ui/Fence.h>
@@ -373,7 +374,10 @@
     }
     work->worklets.clear();
     work->worklets.emplace_back(new C2Worklet);
-    mBufferIdsInUse.lock()->emplace(work->input.ordinal.frameIndex.peeku(), buffer);
+    {
+        Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+        buffers->mIdsInUse.emplace(work->input.ordinal.frameIndex.peeku(), buffer);
+    }
     mQueueThread->queue(comp, fenceFd, std::move(work), std::move(fd0), std::move(fd1));
 
     return OK;
@@ -405,29 +409,74 @@
 }
 
 void C2NodeImpl::onInputBufferDone(c2_cntr64_t index) {
-    if (mAidlHal) {
-        if (!mAidlBufferSource) {
-            ALOGD("Buffer source not set (index=%llu)", index.peekull());
-            return;
-        }
-    } else {
-        if (!mBufferSource) {
-            ALOGD("Buffer source not set (index=%llu)", index.peekull());
-            return;
-        }
-    }
-
-    int32_t bufferId = 0;
-    {
-        decltype(mBufferIdsInUse)::Locked bufferIds(mBufferIdsInUse);
-        auto it = bufferIds->find(index.peeku());
-        if (it == bufferIds->end()) {
+    if (android::media::codec::provider_->input_surface_throttle()) {
+        Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+        auto it = buffers->mIdsInUse.find(index.peeku());
+        if (it == buffers->mIdsInUse.end()) {
             ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
             return;
         }
-        bufferId = it->second;
-        (void)bufferIds->erase(it);
+        int32_t bufferId = it->second;
+        (void)buffers->mIdsInUse.erase(it);
+        buffers->mAvailableIds.push_back(bufferId);
+    } else {
+        if (!hasBufferSource()) {
+            return;
+        }
+        int32_t bufferId = 0;
+        {
+            Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+            auto it = buffers->mIdsInUse.find(index.peeku());
+            if (it == buffers->mIdsInUse.end()) {
+                ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
+                return;
+            }
+            bufferId = it->second;
+            (void)buffers->mIdsInUse.erase(it);
+        }
+        notifyInputBufferEmptied(bufferId);
     }
+}
+
+void C2NodeImpl::onInputBufferEmptied() {
+    if (!android::media::codec::provider_->input_surface_throttle()) {
+        ALOGE("onInputBufferEmptied should not be called "
+              "when input_surface_throttle is false");
+        return;
+    }
+    if (!hasBufferSource()) {
+        return;
+    }
+    int32_t bufferId = 0;
+    {
+        Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+        if (buffers->mAvailableIds.empty()) {
+            ALOGV("The codec is ready to take more input buffers "
+                    "but no input buffers are ready yet.");
+            return;
+        }
+        bufferId = buffers->mAvailableIds.front();
+        buffers->mAvailableIds.pop_front();
+    }
+    notifyInputBufferEmptied(bufferId);
+}
+
+bool C2NodeImpl::hasBufferSource() {
+    if (mAidlHal) {
+        if (!mAidlBufferSource) {
+            ALOGD("Buffer source not set");
+            return false;
+        }
+    } else {
+        if (!mBufferSource) {
+            ALOGD("Buffer source not set");
+            return false;
+        }
+    }
+    return true;
+}
+
+void C2NodeImpl::notifyInputBufferEmptied(int32_t bufferId) {
     if (mAidlHal) {
         ::ndk::ScopedFileDescriptor nullFence;
         (void)mAidlBufferSource->onInputBufferEmptied(bufferId, nullFence);
diff --git a/media/codec2/sfplugin/C2NodeImpl.h b/media/codec2/sfplugin/C2NodeImpl.h
index e060fd8..cc826b4 100644
--- a/media/codec2/sfplugin/C2NodeImpl.h
+++ b/media/codec2/sfplugin/C2NodeImpl.h
@@ -73,13 +73,19 @@
     void setFrameSize(uint32_t width, uint32_t height);
 
     /**
-     * Clean up work item reference.
+     * Notify that the input buffer reference is no longer needed by the component.
+     * Clean up if necessary.
      *
      * \param index input work index
      */
     void onInputBufferDone(c2_cntr64_t index);
 
     /**
+     * Notify input buffer is emptied.
+     */
+    void onInputBufferEmptied();
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     android_dataspace getDataspace();
@@ -118,12 +124,24 @@
     c2_cntr64_t mPrevInputTimestamp; // input timestamp for previous frame
     c2_cntr64_t mPrevCodecTimestamp; // adjusted (codec) timestamp for previous frame
 
-    Mutexed<std::map<uint64_t, uint32_t>> mBufferIdsInUse;
+    // Tracks the status of buffers
+    struct BuffersTracker {
+        BuffersTracker() = default;
+
+        // Keeps track of buffers that are used by the component. Maps timestamp -> ID
+        std::map<uint64_t, uint32_t> mIdsInUse;
+        // Keeps track of the buffer IDs that are available after being released from the component.
+        std::list<uint32_t> mAvailableIds;
+    };
+    Mutexed<BuffersTracker> mBuffersTracker;
 
     class QueueThread;
     sp<QueueThread> mQueueThread;
 
     bool mAidlHal;
+
+    bool hasBufferSource();
+    void notifyInputBufferEmptied(int32_t bufferId);
 };
 
 }  // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index ce02c88..98e25e2 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -291,6 +291,10 @@
     return mImpl->onInputBufferDone(index);
 }
 
+void C2OMXNode::onInputBufferEmptied() {
+    return mImpl->onInputBufferEmptied();
+}
+
 android_dataspace C2OMXNode::getDataspace() {
     return mImpl->getDataspace();
 }
diff --git a/media/codec2/sfplugin/C2OMXNode.h b/media/codec2/sfplugin/C2OMXNode.h
index d077202..5549b88 100644
--- a/media/codec2/sfplugin/C2OMXNode.h
+++ b/media/codec2/sfplugin/C2OMXNode.h
@@ -86,13 +86,19 @@
     void setFrameSize(uint32_t width, uint32_t height);
 
     /**
-     * Clean up work item reference.
+     * Notify that the input buffer reference is no longer needed by the component.
+     * Clean up if necessary.
      *
      * \param index input work index
      */
     void onInputBufferDone(c2_cntr64_t index);
 
     /**
+     * Notify input buffer is emptied.
+     */
+    void onInputBufferEmptied();
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     android_dataspace getDataspace();
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 463b63f..ca0aabb 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -444,6 +444,10 @@
         mNode->onInputBufferDone(index);
     }
 
+    void onInputBufferEmptied() override {
+        mNode->onInputBufferEmptied();
+    }
+
     android_dataspace getDataspace() override {
         return mNode->getDataspace();
     }
@@ -663,6 +667,10 @@
         mNode->onInputBufferDone(index);
     }
 
+    void onInputBufferEmptied() override {
+        mNode->onInputBufferEmptied();
+    }
+
     android_dataspace getDataspace() override {
         return mNode->getDataspace();
     }
@@ -2227,8 +2235,23 @@
     // So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
     // prior to comp->stop().
     // See also b/300350761.
-    mChannel->stopUseOutputSurface(pushBlankBuffer);
-    status_t err = comp->stop();
+    //
+    // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes.
+    // so we are reverting back to the logical sequence of the operations when
+    // AIDL HALs are selected.
+    // When the HIDL HALs are selected, we retained workaround(the reversed
+    // order) as default in order to keep legacy behavior.
+    bool stopHalBeforeSurface =
+            Codec2Client::IsAidlSelected() ||
+            property_get_bool("debug.codec2.stop_hal_before_surface", false);
+    status_t err = C2_OK;
+    if (stopHalBeforeSurface && android::media::codec::provider_->stop_hal_before_surface()) {
+        err = comp->stop();
+        mChannel->stopUseOutputSurface(pushBlankBuffer);
+    } else {
+        mChannel->stopUseOutputSurface(pushBlankBuffer);
+        err = comp->stop();
+    }
     if (err != C2_OK) {
         // TODO: convert err into status_t
         mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
@@ -2323,8 +2346,22 @@
     // So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
     // prior to comp->release().
     // See also b/300350761.
-    mChannel->stopUseOutputSurface(pushBlankBuffer);
-    comp->release();
+    //
+    // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes.
+    // so we are reverting back to the logical sequence of the operations when
+    // AIDL HALs are selected.
+    // When the HIDL HALs are selected, we retained workaround(the reversed
+    // order) as default in order to keep legacy behavior.
+    bool stopHalBeforeSurface =
+            Codec2Client::IsAidlSelected() ||
+            property_get_bool("debug.codec2.stop_hal_before_surface", false);
+    if (stopHalBeforeSurface && android::media::codec::provider_->stop_hal_before_surface()) {
+        comp->release();
+        mChannel->stopUseOutputSurface(pushBlankBuffer);
+    } else {
+        mChannel->stopUseOutputSurface(pushBlankBuffer);
+        comp->release();
+    }
 
     {
         Mutexed<State>::Locked state(mState);
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 3984b83..f0a4180 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1069,6 +1069,10 @@
             return;
         }
     }
+    if (android::media::codec::provider_->input_surface_throttle()
+            && mInputSurface != nullptr) {
+        mInputSurface->onInputBufferEmptied();
+    }
     size_t numActiveSlots = 0;
     while (!mPipelineWatcher.lock()->pipelineFull()) {
         sp<MediaCodecBuffer> inBuffer;
@@ -2784,7 +2788,16 @@
 }
 
 void CCodecBufferChannel::setInfoBuffer(const std::shared_ptr<C2InfoBuffer> &buffer) {
-    mInfoBuffers.push_back(buffer);
+    if (mInputSurface == nullptr) {
+        mInfoBuffers.push_back(buffer);
+    } else {
+        std::list<std::unique_ptr<C2Work>> items;
+        std::unique_ptr<C2Work> work(new C2Work);
+        work->input.infoBuffers.emplace_back(*buffer);
+        work->worklets.emplace_back(new C2Worklet);
+        items.push_back(std::move(work));
+        c2_status_t err = mComponent->queue(&items);
+    }
 }
 
 status_t toStatusT(c2_status_t c2s, c2_operation_t c2op) {
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index 4bf6cd0..c158c5b 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -102,6 +102,7 @@
     }
 
     /**
+     * Notify that the input buffer reference is no longer needed.
      * Clean up C2Work related references if necessary. No-op by default.
      *
      * \param index index of input work.
@@ -109,6 +110,12 @@
     virtual void onInputBufferDone(c2_cntr64_t /* index */) {}
 
     /**
+     * Signal one input buffer as emptied.
+     * No-op by default.
+     */
+    virtual void onInputBufferEmptied() {}
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     virtual android_dataspace getDataspace() { return mDataSpace; }
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
index 77a76e8..7a33af4 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -32,10 +32,15 @@
 namespace android {
 
 
-static bool isAtLeast(int version, const char *codeName) {
-    char deviceCodeName[PROP_VALUE_MAX];
-    __system_property_get("ro.build.version.codename", deviceCodeName);
-    return android_get_device_api_level() >= version || !strcmp(deviceCodeName, codeName);
+static bool isAtLeast(int version, const std::string codeName) {
+    static std::once_flag sCheckOnce;
+    static std::string sDeviceCodeName;
+    static int sDeviceApiLevel;
+    std::call_once(sCheckOnce, [&](){
+        sDeviceCodeName = base::GetProperty("ro.build.version.codename", "");
+        sDeviceApiLevel = android_get_device_api_level();
+    });
+    return sDeviceApiLevel >= version || sDeviceCodeName == codeName;
 }
 
 bool isAtLeastT() {
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 9f57bfd..dc06ee6 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -53,6 +53,7 @@
     ],
 
     defaults: [
+	"aconfig_lib_cc_static_link.defaults",
         "libcodec2_hal_selection",
     ],
 
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 2afe80c..d6b1163 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1707,14 +1707,14 @@
         mSelectedDeviceId = deviceId;
         if (mStatus == NO_ERROR) {
             if (isOffloadedOrDirect_l()) {
-                if (mState == STATE_STOPPED || mState == STATE_FLUSHED) {
-                    ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId);
-                    result = restoreTrack_l("setOutputDevice", true /* forceRestore */);
-                } else {
+                if (isPlaying_l()) {
                     ALOGW("%s(%d). Offloaded or Direct track is not STOPPED or FLUSHED. "
                           "State: %s.",
                             __func__, mPortId, stateToString(mState));
                     result = INVALID_OPERATION;
+                } else {
+                    ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId);
+                    result = restoreTrack_l("setOutputDevice", true /* forceRestore */);
                 }
             } else {
                 // allow track invalidation when track is not playing to propagate
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index 02c865d..1071beb 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -74,7 +74,7 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-audio-fuzzing-reports@google.com",
         ],
         componentid: 155276,
         hotlists: ["4593311"],
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index dd8f021..1a6b949 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -227,11 +227,11 @@
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_hardware_audio_effect_ndk_shared",
         "latest_android_media_audio_common_types_ndk_shared",
+        "latest_av_audio_types_aidl_ndk_shared",
     ],
     shared_libs: [
         "android.hardware.common-V2-ndk",
         "android.hardware.common.fmq-V1-ndk",
-        "av-audio-types-aidl-V1-ndk",
         "libaudio_aidl_conversion_common_cpp",
         "libaudio_aidl_conversion_common_ndk",
         "libaudio_aidl_conversion_common_ndk_cpp",
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 2447b18..9f21404 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -386,7 +386,7 @@
         return runCb([](CbRef cb) { cb->onWriteReady(); });
     }
     ndk::ScopedAStatus onError() override {
-        return runCb([](CbRef cb) { cb->onError(); });
+        return runCb([](CbRef cb) { cb->onError(true /*isHardError*/); });
     }
     ndk::ScopedAStatus onDrainReady() override {
         return runCb([](CbRef cb) { cb->onDrainReady(); });
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index c35a60e..3fe2046 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -57,7 +57,9 @@
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::IEffect;
 using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::kEventFlagDataMqNotEmpty;
 using ::aidl::android::hardware::audio::effect::kEventFlagDataMqUpdate;
+using ::aidl::android::hardware::audio::effect::kEventFlagNotEmpty;
 using ::aidl::android::hardware::audio::effect::kReopenSupportedVersion;
 using ::aidl::android::hardware::audio::effect::State;
 
@@ -199,6 +201,7 @@
                               efState & kEventFlagDataMqUpdate) {
         ALOGV("%s %s V%d receive dataMQUpdate eventFlag from HAL", __func__, effectName.c_str(),
               halVersion);
+
         mConversion->reopen();
     }
     auto statusQ = mConversion->getStatusMQ();
@@ -224,12 +227,22 @@
               floatsToWrite, mInBuffer->audioBuffer(), inputQ->availableToWrite());
         return INVALID_OPERATION;
     }
-    efGroup->wake(aidl::android::hardware::audio::effect::kEventFlagNotEmpty);
+
+    // for V2 audio effect HAL, expect different EventFlag to avoid bit conflict with FMQ_NOT_EMPTY
+    efGroup->wake(halVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty
+                                                        : kEventFlagNotEmpty);
 
     IEffect::Status retStatus{};
-    if (!statusQ->readBlocking(&retStatus, 1) || retStatus.status != OK ||
-        (size_t)retStatus.fmqConsumed != floatsToWrite || retStatus.fmqProduced == 0) {
-        ALOGE("%s read status failed: %s", __func__, retStatus.toString().c_str());
+    if (!statusQ->readBlocking(&retStatus, 1)) {
+        ALOGE("%s %s V%d read status from status FMQ failed", __func__, effectName.c_str(),
+              halVersion);
+        return INVALID_OPERATION;
+    }
+    if (retStatus.status != OK || (size_t)retStatus.fmqConsumed != floatsToWrite ||
+        retStatus.fmqProduced == 0) {
+        ALOGE("%s read status failed: %s, consumed %d (of %zu) produced %d", __func__,
+              retStatus.toString().c_str(), retStatus.fmqConsumed, floatsToWrite,
+              retStatus.fmqProduced);
         return INVALID_OPERATION;
     }
 
diff --git a/media/libaudiohal/impl/EffectProxy.cpp b/media/libaudiohal/impl/EffectProxy.cpp
index 9aa02e2..fb4658f 100644
--- a/media/libaudiohal/impl/EffectProxy.cpp
+++ b/media/libaudiohal/impl/EffectProxy.cpp
@@ -82,8 +82,7 @@
 ndk::ScopedAStatus EffectProxy::setOffloadParam(const effect_offload_param_t* offload) {
     const auto& itor = std::find_if(mSubEffects.begin(), mSubEffects.end(), [&](const auto& sub) {
         const auto& desc = sub.descriptor;
-        return offload->isOffload ==
-               (desc.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL);
+        return offload->isOffload == desc.common.flags.offloadIndication;
     });
     if (itor == mSubEffects.end()) {
         ALOGE("%s no %soffload sub-effect found", __func__, offload->isOffload ? "" : "non-");
@@ -93,7 +92,7 @@
     }
 
     mActiveSubIdx = std::distance(mSubEffects.begin(), itor);
-    ALOGI("%s: active %soffload sub-effect %zu descriptor: %s", __func__,
+    ALOGI("%s: active %soffload sub-effect %zu: %s", __func__,
           offload->isOffload ? "" : "non-", mActiveSubIdx,
           ::android::audio::utils::toString(mSubEffects[mActiveSubIdx].descriptor.common.id.uuid)
                   .c_str());
@@ -163,7 +162,7 @@
 
 ndk::ScopedAStatus EffectProxy::getDescriptor(Descriptor* desc) {
     *desc = mSubEffects[mActiveSubIdx].descriptor;
-    desc->common.id.uuid = desc->common.id.proxy.value();
+    desc->common = mDescriptorCommon;
     return ndk::ScopedAStatus::ok();
 }
 
@@ -185,42 +184,35 @@
     return ndk::ScopedAStatus::ok();
 }
 
+// Sub-effects are required to have identical features, so here we return the SW sub-effect
+// descriptor, with the implementation UUID replaced with proxy UUID, and flags setting respect all
+// sub-effects.
 Descriptor::Common EffectProxy::buildDescriptorCommon(
         const AudioUuid& uuid, const std::vector<Descriptor>& subEffectDescs) {
-    // initial flag values before we know which sub-effect to active (with setOffloadParam)
-    // align to HIDL EffectProxy flags
-    Descriptor::Common common = {.flags = {.type = Flags::Type::INSERT,
-                                           .insert = Flags::Insert::LAST,
-                                           .volume = Flags::Volume::CTRL}};
-
+    Descriptor::Common swCommon;
+    const Flags& firstFlag = subEffectDescs[0].common.flags;
+    bool offloadExist = false;
     for (const auto& desc : subEffectDescs) {
-        if (desc.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL) {
-            common.flags.hwAcceleratorMode = Flags::HardwareAccelerator::TUNNEL;
+        if (desc.common.flags.offloadIndication) {
+            offloadExist = true;
+        } else {
+            swCommon = desc.common;
         }
-
-        // set indication if any sub-effect indication was set
-        common.flags.offloadIndication |= desc.common.flags.offloadIndication;
-        common.flags.deviceIndication |= desc.common.flags.deviceIndication;
-        common.flags.audioModeIndication |= desc.common.flags.audioModeIndication;
-        common.flags.audioSourceIndication |= desc.common.flags.audioSourceIndication;
-        // Set to NONE if any sub-effect not supporting any Volume command
-        if (desc.common.flags.volume == Flags::Volume::NONE) {
-            common.flags.volume = Flags::Volume::NONE;
-        }
-        // set to AUXILIARY if any sub-effect is of AUXILIARY type
-        if (desc.common.flags.type == Flags::Type::AUXILIARY) {
-            common.flags.type = Flags::Type::AUXILIARY;
+        if (desc.common.flags.audioModeIndication != firstFlag.audioModeIndication ||
+            desc.common.flags.audioSourceIndication != firstFlag.audioSourceIndication ||
+            desc.common.flags.sinkMetadataIndication != firstFlag.sinkMetadataIndication ||
+            desc.common.flags.sourceMetadataIndication != firstFlag.sourceMetadataIndication ||
+            desc.common.flags.deviceIndication != firstFlag.deviceIndication) {
+            ALOGW("Inconsistent flags %s vs %s", desc.common.flags.toString().c_str(),
+                  firstFlag.toString().c_str());
         }
     }
 
-    // copy type UUID from any of sub-effects, all sub-effects should have same type
-    common.id.type = subEffectDescs[0].common.id.type;
+    swCommon.flags.offloadIndication = offloadExist;
     // replace implementation UUID with proxy UUID.
-    common.id.uuid = uuid;
-    common.id.proxy = std::nullopt;
-    common.name = "Proxy";
-    common.implementor = "AOSP";
-    return common;
+    swCommon.id.uuid = uuid;
+    swCommon.id.proxy = std::nullopt;
+    return swCommon;
 }
 
 // Handle with active sub-effect first, only send to other sub-effects when success
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index 3b2f344..64cc7ed 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -188,7 +188,6 @@
         aidlEffect = ndk::SharedRefBase::make<EffectProxy>(
                 aidlUuid, mProxyUuidDescriptorMap.at(aidlUuid) /* sub-effect descriptor list */,
                 mFactory);
-        mProxyList.emplace_back(std::static_pointer_cast<EffectProxy>(aidlEffect));
     } else {
         RETURN_STATUS_IF_ERROR(
                 statusTFromBinderStatus(mFactory->createEffect(aidlUuid, &aidlEffect)));
@@ -205,15 +204,9 @@
 }
 
 status_t EffectsFactoryHalAidl::dumpEffects(int fd) {
-    status_t ret = OK;
-    // record the error ret and continue dump as many effects as possible
-    for (const auto& proxy : mProxyList) {
-        if (status_t temp = BAD_VALUE; proxy && (temp = proxy->dump(fd, nullptr, 0)) != OK) {
-            ret = temp;
-        }
-    }
+    // TODO: b/333803769 improve the effect dump implementation
     RETURN_STATUS_IF_ERROR(mFactory->dump(fd, nullptr, 0));
-    return ret;
+    return OK;
 }
 
 status_t EffectsFactoryHalAidl::allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) {
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.h b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
index 73089b0..3b8628c 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
@@ -84,9 +84,6 @@
     // Query result of pre and post processing from effect factory
     const std::vector<Processing> mAidlProcessings;
 
-    // list of the EffectProxy instances
-    std::list<std::shared_ptr<EffectProxy>> mProxyList;
-
     virtual ~EffectsFactoryHalAidl() = default;
     status_t getHalDescriptorWithImplUuid(
             const ::aidl::android::media::audio::common::AudioUuid& uuid,
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
index 263e3e9..cbade70 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.cpp
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -136,8 +136,8 @@
     // 'sinks' will not be updated because 'setAudioPatch' only needs IDs. Here we log
     // the source arguments, where only the audio configuration and device specifications
     // are relevant.
-    ALOGD("%s: [disregard IDs] sources: %s, sinks: %s",
-            __func__, ::android::internal::ToString(sources).c_str(),
+    ALOGD("%s: patch ID: %d, [disregard IDs] sources: %s, sinks: %s",
+            __func__, *patchId, ::android::internal::ToString(sources).c_str(),
             ::android::internal::ToString(sinks).c_str());
     auto fillPortConfigs = [&](
             const std::vector<AudioPortConfig>& configs,
@@ -181,7 +181,9 @@
     };
     // When looking up port configs, the destinationPortId is only used for mix ports.
     // Thus, we process device port configs first, and look up the destination port ID from them.
-    bool sourceIsDevice = std::any_of(sources.begin(), sources.end(),
+    const bool sourceIsDevice = std::any_of(sources.begin(), sources.end(),
+            [](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
+    const bool sinkIsDevice = std::any_of(sinks.begin(), sinks.end(),
             [](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
     const std::vector<AudioPortConfig>& devicePortConfigs =
             sourceIsDevice ? sources : sinks;
@@ -202,10 +204,29 @@
         existingPatchIt->second = patch;
     } else {
         bool created = false;
-        RETURN_STATUS_IF_ERROR(findOrCreatePatch(patch, &patch, &created));
+        // When the framework does not specify a patch ID, only the mix port config
+        // is used for finding an existing patch. That's because the framework assumes
+        // that there can only be one patch for an I/O thread.
+        PatchMatch match = sourceIsDevice && sinkIsDevice ?
+                MATCH_BOTH : (sourceIsDevice ? MATCH_SINKS : MATCH_SOURCES);
+        auto requestedPatch = patch;
+        RETURN_STATUS_IF_ERROR(findOrCreatePatch(patch, match,
+                                                 &patch, &created));
         // No cleanup of the patch is needed, it is managed by the framework.
         *patchId = patch.id;
         if (!created) {
+            requestedPatch.id = patch.id;
+            if (patch != requestedPatch) {
+                ALOGI("%s: Updating transient patch. Current: %s, new: %s",
+                        __func__, patch.toString().c_str(), requestedPatch.toString().c_str());
+                // Since matching may be done by mix port only, update the patch if the device port
+                // config has changed.
+                patch = requestedPatch;
+                RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                                mModule->setAudioPatch(patch, &patch)));
+                existingPatchIt = mPatches.find(patch.id);
+                existingPatchIt->second = patch;
+            }
             // The framework might have "created" a patch which already existed due to
             // stream creation. Need to release the ownership from the stream.
             for (auto& s : mStreams) {
@@ -274,18 +295,18 @@
 }
 
 status_t Hal2AidlMapper::findOrCreatePatch(
-        const AudioPatch& requestedPatch, AudioPatch* patch, bool* created) {
+        const AudioPatch& requestedPatch, PatchMatch match, AudioPatch* patch, bool* created) {
     std::set<int32_t> sourcePortConfigIds(requestedPatch.sourcePortConfigIds.begin(),
             requestedPatch.sourcePortConfigIds.end());
     std::set<int32_t> sinkPortConfigIds(requestedPatch.sinkPortConfigIds.begin(),
             requestedPatch.sinkPortConfigIds.end());
-    return findOrCreatePatch(sourcePortConfigIds, sinkPortConfigIds, patch, created);
+    return findOrCreatePatch(sourcePortConfigIds, sinkPortConfigIds, match, patch, created);
 }
 
 status_t Hal2AidlMapper::findOrCreatePatch(
         const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds,
-        AudioPatch* patch, bool* created) {
-    auto patchIt = findPatch(sourcePortConfigIds, sinkPortConfigIds);
+        PatchMatch match, AudioPatch* patch, bool* created) {
+    auto patchIt = findPatch(sourcePortConfigIds, sinkPortConfigIds, match);
     if (patchIt == mPatches.end()) {
         AudioPatch requestedPatch, appliedPatch;
         requestedPatch.sourcePortConfigIds.insert(requestedPatch.sourcePortConfigIds.end(),
@@ -456,7 +477,8 @@
 }
 
 Hal2AidlMapper::Patches::iterator Hal2AidlMapper::findPatch(
-        const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds) {
+        const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds,
+        PatchMatch match) {
     return std::find_if(mPatches.begin(), mPatches.end(),
             [&](const auto& pair) {
                 const auto& p = pair.second;
@@ -464,7 +486,15 @@
                         p.sourcePortConfigIds.begin(), p.sourcePortConfigIds.end());
                 std::set<int32_t> patchSinks(
                         p.sinkPortConfigIds.begin(), p.sinkPortConfigIds.end());
-                return sourcePortConfigIds == patchSrcs && sinkPortConfigIds == patchSinks; });
+                switch (match) {
+                    case MATCH_SOURCES:
+                        return sourcePortConfigIds == patchSrcs;
+                    case MATCH_SINKS:
+                        return sinkPortConfigIds == patchSinks;
+                    case MATCH_BOTH:
+                        return sourcePortConfigIds == patchSrcs && sinkPortConfigIds == patchSinks;
+                }
+            });
 }
 
 Hal2AidlMapper::Ports::iterator Hal2AidlMapper::findPort(const AudioDevice& device) {
@@ -816,10 +846,10 @@
     }
     if (isInput) {
         RETURN_STATUS_IF_ERROR(findOrCreatePatch(
-                        {devicePortConfigId}, {mixPortConfig->id}, patch, &created));
+                        {devicePortConfigId}, {mixPortConfig->id}, MATCH_BOTH, patch, &created));
     } else {
         RETURN_STATUS_IF_ERROR(findOrCreatePatch(
-                        {mixPortConfig->id}, {devicePortConfigId}, patch, &created));
+                        {mixPortConfig->id}, {devicePortConfigId}, MATCH_BOTH, patch, &created));
     }
     if (created) {
         cleanups->add(&Hal2AidlMapper::resetPatch, patch->id);
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.h b/media/libaudiohal/impl/Hal2AidlMapper.h
index f302c23..c70c8af 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.h
+++ b/media/libaudiohal/impl/Hal2AidlMapper.h
@@ -133,6 +133,8 @@
     using Streams = std::map<wp<StreamHalInterface>,
             std::pair<int32_t /*mix port config ID*/, int32_t /*patch ID*/>>;
 
+    enum PatchMatch { MATCH_SOURCES, MATCH_SINKS, MATCH_BOTH };
+
     const std::string mInstance;
     const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
 
@@ -150,11 +152,13 @@
             ::aidl::android::media::audio::common::AudioPortConfig* result, bool *created);
     void eraseConnectedPort(int32_t portId);
     status_t findOrCreatePatch(
-        const std::set<int32_t>& sourcePortConfigIds,
-        const std::set<int32_t>& sinkPortConfigIds,
+            const std::set<int32_t>& sourcePortConfigIds,
+            const std::set<int32_t>& sinkPortConfigIds,
+            PatchMatch match,
         ::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
     status_t findOrCreatePatch(
         const ::aidl::android::hardware::audio::core::AudioPatch& requestedPatch,
+        PatchMatch match,
         ::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
     status_t findOrCreateDevicePortConfig(
             const ::aidl::android::media::audio::common::AudioDevice& device,
@@ -175,7 +179,7 @@
         const std::set<int32_t>& destinationPortIds,
         ::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
     Patches::iterator findPatch(const std::set<int32_t>& sourcePortConfigIds,
-            const std::set<int32_t>& sinkPortConfigIds);
+            const std::set<int32_t>& sinkPortConfigIds, PatchMatch match);
     Ports::iterator findPort(const ::aidl::android::media::audio::common::AudioDevice& device);
     Ports::iterator findPort(
             const ::aidl::android::media::audio::common::AudioConfig& config,
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 97c9659..6c0dc76 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -200,8 +200,12 @@
     StreamDescriptor::Reply reply;
     switch (state) {
         case StreamDescriptor::State::ACTIVE:
+        case StreamDescriptor::State::DRAINING:
+        case StreamDescriptor::State::TRANSFERRING:
             RETURN_STATUS_IF_ERROR(pause(&reply));
-            if (reply.state != StreamDescriptor::State::PAUSED) {
+            if (reply.state != StreamDescriptor::State::PAUSED &&
+                    reply.state != StreamDescriptor::State::DRAIN_PAUSED &&
+                    reply.state != StreamDescriptor::State::TRANSFER_PAUSED) {
                 ALOGE("%s: unexpected stream state: %s (expected PAUSED)",
                         __func__, toString(reply.state).c_str());
                 return INVALID_OPERATION;
@@ -209,6 +213,7 @@
             FALLTHROUGH_INTENDED;
         case StreamDescriptor::State::PAUSED:
         case StreamDescriptor::State::DRAIN_PAUSED:
+        case StreamDescriptor::State::TRANSFER_PAUSED:
             if (mIsInput) return flush();
             RETURN_STATUS_IF_ERROR(flush(&reply));
             if (reply.state != StreamDescriptor::State::IDLE) {
@@ -248,20 +253,71 @@
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    const auto state = getState();
-    StreamDescriptor::Reply reply;
-    if (state == StreamDescriptor::State::STANDBY) {
-        RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
-        return sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true);
+    if (!mContext.isMmapped()) {
+        return BAD_VALUE;
     }
-
-    return INVALID_OPERATION;
+    StreamDescriptor::Reply reply;
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    switch (reply.state) {
+        case StreamDescriptor::State::STANDBY:
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
+            if (reply.state != StreamDescriptor::State::IDLE) {
+                ALOGE("%s: unexpected stream state: %s (expected IDLE)",
+                        __func__, toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            FALLTHROUGH_INTENDED;
+        case StreamDescriptor::State::IDLE:
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true));
+            if (reply.state != StreamDescriptor::State::ACTIVE) {
+                ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
+                        __func__, toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            FALLTHROUGH_INTENDED;
+        case StreamDescriptor::State::ACTIVE:
+            return OK;
+        case StreamDescriptor::State::DRAINING:
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
+            if (reply.state != StreamDescriptor::State::ACTIVE) {
+                ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
+                        __func__, toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            return OK;
+        default:
+            ALOGE("%s: not supported from %s stream state %s",
+                    __func__, mIsInput ? "input" : "output", toString(reply.state).c_str());
+            return INVALID_OPERATION;
+    }
 }
 
 status_t StreamHalAidl::stop() {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return standby();
+    if (!mContext.isMmapped()) {
+        return BAD_VALUE;
+    }
+    StreamDescriptor::Reply reply;
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    if (const auto state = reply.state; state == StreamDescriptor::State::ACTIVE) {
+        return drain(false /*earlyNotify*/, nullptr);
+    } else if (state == StreamDescriptor::State::DRAINING) {
+        RETURN_STATUS_IF_ERROR(pause());
+        return flush();
+    } else if (state == StreamDescriptor::State::PAUSED) {
+        return flush();
+    } else if (state != StreamDescriptor::State::IDLE &&
+            state != StreamDescriptor::State::STANDBY) {
+        ALOGE("%s: not supported from %s stream state %s",
+                __func__, mIsInput ? "input" : "output", toString(state).c_str());
+        return INVALID_OPERATION;
+    }
+    return OK;
 }
 
 status_t StreamHalAidl::getLatency(uint32_t *latency) {
@@ -276,11 +332,12 @@
     return OK;
 }
 
-status_t StreamHalAidl::getObservablePosition(int64_t *frames, int64_t *timestamp) {
+status_t StreamHalAidl::getObservablePosition(int64_t* frames, int64_t* timestamp,
+        StatePositions* statePositions) {
     ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
-    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, statePositions));
     *frames = std::max<int64_t>(0, reply.observable.frames);
     *timestamp = std::max<int64_t>(0, reply.observable.timeNs);
     return OK;
@@ -323,8 +380,11 @@
             return INVALID_OPERATION;
         }
     }
+    StreamContextAidl::DataMQ::Error fmqError = StreamContextAidl::DataMQ::Error::NONE;
+    std::string fmqErrorMsg;
     if (!mIsInput) {
-        bytes = std::min(bytes, mContext.getDataMQ()->availableToWrite());
+        bytes = std::min(bytes,
+                mContext.getDataMQ()->availableToWrite(&fmqError, &fmqErrorMsg));
     }
     StreamDescriptor::Command burst =
             StreamDescriptor::Command::make<StreamDescriptor::Command::Tag::burst>(bytes);
@@ -341,12 +401,14 @@
         LOG_ALWAYS_FATAL_IF(*transferred > bytes,
                 "%s: HAL module read %zu bytes, which exceeds requested count %zu",
                 __func__, *transferred, bytes);
-        if (auto toRead = mContext.getDataMQ()->availableToRead();
+        if (auto toRead = mContext.getDataMQ()->availableToRead(&fmqError, &fmqErrorMsg);
                 toRead != 0 && !mContext.getDataMQ()->read(static_cast<int8_t*>(buffer), toRead)) {
             ALOGE("%s: failed to read %zu bytes to data MQ", __func__, toRead);
             return NOT_ENOUGH_DATA;
         }
     }
+    LOG_ALWAYS_FATAL_IF(fmqError != StreamContextAidl::DataMQ::Error::NONE,
+            "%s", fmqErrorMsg.c_str());
     mStreamPowerLog.log(buffer, *transferred);
     return OK;
 }
@@ -379,10 +441,12 @@
                 return INVALID_OPERATION;
             }
             return OK;
-        } else if (state == StreamDescriptor::State::PAUSED) {
+        } else if (state == StreamDescriptor::State::PAUSED ||
+                   state == StreamDescriptor::State::TRANSFER_PAUSED ||
+                   state == StreamDescriptor::State::DRAIN_PAUSED) {
             return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
         } else {
-            ALOGE("%s: unexpected stream state: %s (expected IDLE or PAUSED)",
+            ALOGE("%s: unexpected stream state: %s (expected IDLE or one of *PAUSED states)",
                         __func__, toString(state).c_str());
             return INVALID_OPERATION;
         }
@@ -430,8 +494,12 @@
     if (auto state = getState(); state == StreamDescriptor::State::DRAINING) {
         // Retrieve the current state together with position counters unconditionally
         // to ensure that the state on our side gets updated.
-        sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
-                nullptr, true /*safeFromNonWorkerThread */);
+        sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), nullptr,
+                    true /*safeFromNonWorkerThread */);
+        // For compatibility with HIDL behavior, apply a "soft" position reset
+        // after receiving the "drain ready" callback.
+        std::lock_guard l(mLock);
+        mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames;
     } else {
         ALOGW("%s: unexpected onDrainReady in the state %s", __func__, toString(state).c_str());
     }
@@ -439,15 +507,8 @@
 
 void StreamHalAidl::onAsyncError() {
     std::lock_guard l(mLock);
-    if (mLastReply.state == StreamDescriptor::State::IDLE ||
-        mLastReply.state == StreamDescriptor::State::DRAINING ||
-        mLastReply.state == StreamDescriptor::State::TRANSFERRING) {
-        mLastReply.state = StreamDescriptor::State::ERROR;
-        ALOGW("%s: onError received", __func__);
-    } else {
-        ALOGW("%s: unexpected onError in the state %s", __func__,
-                toString(mLastReply.state).c_str());
-    }
+    ALOGW("%s: received in the state %s", __func__, toString(mLastReply.state).c_str());
+    mLastReply.state = StreamDescriptor::State::ERROR;
 }
 
 status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
@@ -498,9 +559,9 @@
 }
 
 status_t StreamHalAidl::sendCommand(
-        const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
+        const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command,
         ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
-        bool safeFromNonWorkerThread) {
+        bool safeFromNonWorkerThread, StatePositions* statePositions) {
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (!safeFromNonWorkerThread) {
         const pid_t workerTid = mWorkerTid.load(std::memory_order_acquire);
@@ -532,6 +593,23 @@
             }
             mLastReply = *reply;
             mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
+            if (!mIsInput && reply->status == STATUS_OK) {
+                if (command.getTag() == StreamDescriptor::Command::standby &&
+                        reply->state == StreamDescriptor::State::STANDBY) {
+                    mStatePositions.framesAtStandby = reply->observable.frames;
+                } else if (command.getTag() == StreamDescriptor::Command::flush &&
+                           reply->state == StreamDescriptor::State::IDLE) {
+                    mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
+                } else if (!mContext.isAsynchronous() &&
+                        command.getTag() == StreamDescriptor::Command::drain &&
+                        (reply->state == StreamDescriptor::State::IDLE ||
+                                reply->state == StreamDescriptor::State::DRAINING)) {
+                    mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
+                } // for asynchronous drain, the frame count is saved in 'onAsyncDrainReady'
+            }
+            if (statePositions != nullptr) {
+                *statePositions = mStatePositions;
+            }
         }
     }
     switch (reply->status) {
@@ -547,7 +625,8 @@
 }
 
 status_t StreamHalAidl::updateCountersIfNeeded(
-        ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply) {
+        ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
+        StatePositions* statePositions) {
     bool doUpdate = false;
     {
         std::lock_guard l(mLock);
@@ -557,10 +636,13 @@
         // Since updates are paced, it is OK to perform them from any thread, they should
         // not interfere with I/O operations of the worker.
         return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
-                reply, true /*safeFromNonWorkerThread */);
+                reply, true /*safeFromNonWorkerThread */, statePositions);
     } else if (reply != nullptr) {  // provide cached reply
         std::lock_guard l(mLock);
         *reply = mLastReply;
+        if (statePositions != nullptr) {
+            *statePositions = mStatePositions;
+        }
     }
     return OK;
 }
@@ -618,7 +700,7 @@
 status_t StreamOutHalAidl::setVolume(float left, float right) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    size_t channelCount = audio_channel_out_mask_from_count(mConfig.channel_mask);
+    size_t channelCount = audio_channel_count_from_out_mask(mConfig.channel_mask);
     if (channelCount == 0) channelCount = 2;
     std::vector<float> volumes(channelCount);
     if (channelCount == 1) {
@@ -647,21 +729,27 @@
     return transfer(const_cast<void*>(buffer), bytes, written);
 }
 
-status_t StreamOutHalAidl::getRenderPosition(uint32_t *dspFrames) {
+status_t StreamOutHalAidl::getRenderPosition(uint64_t *dspFrames) {
     if (dspFrames == nullptr) {
         return BAD_VALUE;
     }
     int64_t aidlFrames = 0, aidlTimestamp = 0;
-    RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp));
-    *dspFrames = static_cast<uint32_t>(aidlFrames);
+    StatePositions statePositions{};
+    RETURN_STATUS_IF_ERROR(
+            getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions));
+    // Number of audio frames since the stream has exited standby.
+    // See the table at the start of 'StreamHalInterface' on when it needs to reset.
+    int64_t mostRecentResetPoint;
+    if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
+        mostRecentResetPoint = statePositions.framesAtStandby;
+    } else {
+        mostRecentResetPoint =
+                std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+    }
+    *dspFrames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
     return OK;
 }
 
-status_t StreamOutHalAidl::getNextWriteTimestamp(int64_t *timestamp __unused) {
-    // Obsolete, use getPresentationPosition.
-    return INVALID_OPERATION;
-}
-
 status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
     ALOGD("%p %s", this, __func__);
     TIME_CHECK();
@@ -715,13 +803,26 @@
         return BAD_VALUE;
     }
     int64_t aidlFrames = 0, aidlTimestamp = 0;
-    RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp));
-    *frames = aidlFrames;
+    StatePositions statePositions{};
+    RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions));
+    // See the table at the start of 'StreamHalInterface'.
+    if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
+        *frames = aidlFrames;
+    } else {
+        const int64_t mostRecentResetPoint =
+                std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+        *frames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
+    }
     timestamp->tv_sec = aidlTimestamp / NANOS_PER_SECOND;
     timestamp->tv_nsec = aidlTimestamp - timestamp->tv_sec * NANOS_PER_SECOND;
     return OK;
 }
 
+status_t StreamOutHalAidl::presentationComplete() {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    return OK;
+}
+
 status_t StreamOutHalAidl::updateSourceMetadata(
         const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
     TIME_CHECK();
@@ -851,10 +952,10 @@
     }
 }
 
-void StreamOutHalAidl::onError() {
+void StreamOutHalAidl::onError(bool isHardError) {
     onAsyncError();
     if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
-        clientCb->onError();
+        clientCb->onError(isHardError);
     }
 }
 
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index b20eb00..9cb2cff 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -95,7 +95,8 @@
     size_t getBufferSizeBytes() const { return mFrameSizeBytes * mBufferSizeFrames; }
     size_t getBufferSizeFrames() const { return mBufferSizeFrames; }
     size_t getBufferDurationMs(int32_t sampleRate) const {
-        return sampleRate != 0 ? mBufferSizeFrames * MILLIS_PER_SECOND / sampleRate : 0;
+        auto bufferSize = mIsMmapped ? getMmapBurstSize() : mBufferSizeFrames;
+        return sampleRate != 0 ? bufferSize * MILLIS_PER_SECOND / sampleRate : 0;
     }
     CommandMQ* getCommandMQ() const { return mCommandMQ.get(); }
     DataMQ* getDataMQ() const { return mDataMQ.get(); }
@@ -104,7 +105,7 @@
     bool isAsynchronous() const { return mIsAsynchronous; }
     bool isMmapped() const { return mIsMmapped; }
     const MmapBufferDescriptor& getMmapBufferDescriptor() const { return mMmapBufferDescriptor; }
-
+    size_t getMmapBurstSize() const { return mMmapBufferDescriptor.burstSizeFrames;}
   private:
     static std::unique_ptr<DataMQ> maybeCreateDataMQ(
             const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
@@ -194,6 +195,11 @@
     // For tests.
     friend class sp<StreamHalAidl>;
 
+    struct StatePositions {
+        int64_t framesAtFlushOrDrain;
+        int64_t framesAtStandby;
+    };
+
     template<class T>
     static std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> getStreamCommon(
             const std::shared_ptr<T>& stream);
@@ -212,7 +218,8 @@
     status_t getLatency(uint32_t *latency);
 
     // Always returns non-negative values.
-    status_t getObservablePosition(int64_t *frames, int64_t *timestamp);
+    status_t getObservablePosition(int64_t* frames, int64_t* timestamp,
+            StatePositions* statePositions = nullptr);
 
     // Always returns non-negative values.
     status_t getHardwarePosition(int64_t *frames, int64_t *timestamp);
@@ -268,11 +275,13 @@
     // Note: Since `sendCommand` takes mLock while holding mCommandReplyLock, never call
     // it with `mLock` being held.
     status_t sendCommand(
-            const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command,
             ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
-            bool safeFromNonWorkerThread = false);
+            bool safeFromNonWorkerThread = false,
+            StatePositions* statePositions = nullptr);
     status_t updateCountersIfNeeded(
-            ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr);
+            ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
+            StatePositions* statePositions = nullptr);
 
     const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> mStream;
     const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
@@ -280,6 +289,9 @@
     std::mutex mLock;
     ::aidl::android::hardware::audio::core::StreamDescriptor::Reply mLastReply GUARDED_BY(mLock);
     int64_t mLastReplyExpirationNs GUARDED_BY(mLock) = 0;
+    // Cached values of observable positions when the stream last entered certain state.
+    // Updated for output streams only.
+    StatePositions mStatePositions GUARDED_BY(mLock) = {};
     // mStreamPowerLog is used for audio signal power logging.
     StreamPowerLog mStreamPowerLog;
     std::atomic<pid_t> mWorkerTid = -1;
@@ -308,10 +320,7 @@
 
     // Return the number of audio frames written by the audio dsp to DAC since
     // the output has exited standby.
-    status_t getRenderPosition(uint32_t *dspFrames) override;
-
-    // Get the local time at which the next write to the audio driver will be presented.
-    status_t getNextWriteTimestamp(int64_t *timestamp) override;
+    status_t getRenderPosition(uint64_t *dspFrames) override;
 
     // Set the callback for notifying completion of non-blocking write and drain.
     status_t setCallback(wp<StreamOutHalInterfaceCallback> callback) override;
@@ -331,12 +340,19 @@
     // Requests notification when data buffered by the driver/hardware has been played.
     status_t drain(bool earlyNotify) override;
 
-    // Notifies to the audio driver to flush the queued data.
+    // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+    // already be paused before calling 'flush'.
     status_t flush() override;
 
     // Return a recent count of the number of audio frames presented to an external observer.
+    // This excludes frames which have been written but are still in the pipeline. See the
+    // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+    // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
     status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) override;
 
+    // Notifies the HAL layer that the framework considers the current playback as completed.
+    status_t presentationComplete() override;
+
     // Called when the metadata of the stream's source has been changed.
     status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
 
@@ -370,7 +386,7 @@
     // StreamOutHalInterfaceCallback
     void onWriteReady() override;
     void onDrainReady() override;
-    void onError() override;
+    void onError(bool isHardError) override;
 
   private:
     friend class sp<StreamOutHalAidl>;
@@ -413,6 +429,7 @@
 
     // Return a recent count of the number of audio frames received and
     // the clock time associated with that frame count.
+    // The count must not reset to zero when a PCM input enters standby.
     status_t getCapturePosition(int64_t *frames, int64_t *time) override;
 
     // Get active microphones
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 77c75db..a931fdd 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -17,6 +17,8 @@
 #define LOG_TAG "StreamHalHidl"
 //#define LOG_NDEBUG 0
 
+#include <cinttypes>
+
 #include <android/hidl/manager/1.0/IServiceManager.h>
 #include <hwbinder/IPCThreadState.h>
 #include <media/AudioParameter.h>
@@ -589,32 +591,39 @@
     return OK;
 }
 
-status_t StreamOutHalHidl::getRenderPosition(uint32_t *dspFrames) {
+status_t StreamOutHalHidl::getRenderPosition(uint64_t *dspFrames) {
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
     Result retval;
+    uint32_t halPosition = 0;
     Return<void> ret = mStream->getRenderPosition(
             [&](Result r, uint32_t d) {
                 retval = r;
                 if (retval == Result::OK) {
-                    *dspFrames = d;
+                    halPosition = d;
                 }
             });
-    return processReturn("getRenderPosition", ret, retval);
-}
+    status_t status = processReturn("getRenderPosition", ret, retval);
+    if (status != OK) {
+        return status;
+    }
+    // Maintain a 64-bit render position using the 32-bit result from the HAL.
+    // This delta calculation relies on the arithmetic overflow behavior
+    // of integers. For example (100 - 0xFFFFFFF0) = 116.
+    std::lock_guard l(mPositionMutex);
+    const auto truncatedPosition = (uint32_t)mRenderPosition;
+    int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
+    (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
 
-status_t StreamOutHalHidl::getNextWriteTimestamp(int64_t *timestamp) {
-    TIME_CHECK();
-    if (mStream == 0) return NO_INIT;
-    Result retval;
-    Return<void> ret = mStream->getNextWriteTimestamp(
-            [&](Result r, int64_t t) {
-                retval = r;
-                if (retval == Result::OK) {
-                    *timestamp = t;
-                }
-            });
-    return processReturn("getRenderPosition", ret, retval);
+    if (deltaHalPosition >= 0) {
+        mRenderPosition += deltaHalPosition;
+    } else if (mExpectRetrograde) {
+        mExpectRetrograde = false;
+        mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition);
+        ALOGW("Retrograde motion of %" PRId32 " frames", -deltaHalPosition);
+    }
+    *dspFrames = mRenderPosition;
+    return OK;
 }
 
 status_t StreamOutHalHidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
@@ -667,9 +676,23 @@
 status_t StreamOutHalHidl::flush() {
     TIME_CHECK();
     if (mStream == 0) return NO_INIT;
+    {
+        std::lock_guard l(mPositionMutex);
+        mRenderPosition = 0;
+        mExpectRetrograde = false;
+    }
     return processReturn("pause", mStream->flush());
 }
 
+status_t StreamOutHalHidl::standby() {
+    {
+        std::lock_guard l(mPositionMutex);
+        mRenderPosition = 0;
+        mExpectRetrograde = false;
+    }
+    return StreamHalHidl::standby();
+}
+
 status_t StreamOutHalHidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
@@ -696,6 +719,16 @@
     }
 }
 
+status_t StreamOutHalHidl::presentationComplete() {
+    // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when
+    // transitioning between tracks.
+    // The HAL resets the frame position without flush/stop being called, but calls back prior to
+    // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of
+    // mRenderPosition.
+    mExpectRetrograde = true;
+    return OK;
+}
+
 #if MAJOR_VERSION == 2
 status_t StreamOutHalHidl::updateSourceMetadata(
         const StreamOutHalInterface::SourceMetadata& /* sourceMetadata */) {
@@ -964,7 +997,7 @@
     sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
     if (callback == 0) return;
     ALOGV("asyncCallback onError");
-    callback->onError();
+    callback->onError(false /*isHardError*/);
 }
 
 void StreamOutHalHidl::onCodecFormatChanged(const std::vector<uint8_t>& metadataBs) {
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 48da633..433e0a3 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -18,10 +18,12 @@
 #define ANDROID_HARDWARE_STREAM_HAL_HIDL_H
 
 #include <atomic>
+#include <mutex>
 
 #include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStream.h)
 #include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamIn.h)
 #include PATH(android/hardware/audio/FILE_VERSION/IStreamOut.h)
+#include <android-base/thread_annotations.h>
 #include <fmq/EventFlag.h>
 #include <fmq/MessageQueue.h>
 #include <media/audiohal/EffectHalInterface.h>
@@ -119,6 +121,9 @@
 
 class StreamOutHalHidl : public StreamOutHalInterface, public StreamHalHidl {
   public:
+    // Put the audio hardware input/output into standby mode (from StreamHalInterface).
+    status_t standby() override;
+
     // Return the frame size (number of bytes per sample) of a stream.
     virtual status_t getFrameSize(size_t *size);
 
@@ -136,10 +141,7 @@
 
     // Return the number of audio frames written by the audio dsp to DAC since
     // the output has exited standby.
-    virtual status_t getRenderPosition(uint32_t *dspFrames);
-
-    // Get the local time at which the next write to the audio driver will be presented.
-    virtual status_t getNextWriteTimestamp(int64_t *timestamp);
+    virtual status_t getRenderPosition(uint64_t *dspFrames);
 
     // Set the callback for notifying completion of non-blocking write and drain.
     virtual status_t setCallback(wp<StreamOutHalInterfaceCallback> callback);
@@ -159,12 +161,19 @@
     // Requests notification when data buffered by the driver/hardware has been played.
     virtual status_t drain(bool earlyNotify);
 
-    // Notifies to the audio driver to flush the queued data.
+    // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+    // already be paused before calling 'flush'.
     virtual status_t flush();
 
     // Return a recent count of the number of audio frames presented to an external observer.
+    // This excludes frames which have been written but are still in the pipeline. See the
+    // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+    // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
     virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
 
+    // Notifies the HAL layer that the framework considers the current playback as completed.
+    status_t presentationComplete() override;
+
     // Called when the metadata of the stream's source has been changed.
     status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
 
@@ -221,6 +230,10 @@
     std::unique_ptr<StatusMQ> mStatusMQ;
     std::atomic<pid_t> mWriterClient;
     EventFlag* mEfGroup;
+    std::mutex mPositionMutex;
+    // Used to expand correctly the 32-bit position from the HAL.
+    uint64_t mRenderPosition GUARDED_BY(mPositionMutex) = 0;
+    bool mExpectRetrograde GUARDED_BY(mPositionMutex) = false; // See 'presentationComplete'.
 
     // Can not be constructed directly by clients.
     StreamOutHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& stream);
@@ -250,6 +263,7 @@
 
     // Return a recent count of the number of audio frames received and
     // the clock time associated with that frame count.
+    // The count must not reset to zero when a PCM input enters standby.
     virtual status_t getCapturePosition(int64_t *frames, int64_t *time);
 
     // Get active microphones
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index 37615af..4bd7e3d 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -107,7 +107,7 @@
   public:
     virtual void onWriteReady() {}
     virtual void onDrainReady() {}
-    virtual void onError() {}
+    virtual void onError(bool /*isHardError*/) {}
 
   protected:
     StreamOutHalInterfaceCallback() = default;
@@ -135,6 +135,38 @@
     virtual ~StreamOutHalInterfaceLatencyModeCallback() = default;
 };
 
+/**
+ * On position reporting. There are two methods: 'getRenderPosition' and
+ * 'getPresentationPosition'. The first difference is that they may have a
+ * time offset because "render" position relates to what happens between
+ * ADSP and DAC, while "observable" position is relative to the external
+ * observer. The second difference is that 'getRenderPosition' always
+ * resets on standby (for all types of stream data) according to its
+ * definition. Since the original C definition of 'getRenderPosition' used
+ * 32-bit frame counters, and also because in complex playback chains that
+ * include wireless devices the "observable" position has more practical
+ * meaning, 'getRenderPosition' does not exist in the AIDL HAL interface.
+ * The table below summarizes frame count behavior for 'getPresentationPosition':
+ *
+ *               | Mixed      | Direct       | Direct
+ *               |            | non-offload  | offload
+ * ==============|============|==============|==============
+ *  PCM and      | Continuous |              |
+ *  encapsulated |            |              |
+ *  bitstream    |            |              |
+ * --------------|------------| Continuous†  |
+ *  Bitstream    |            |              | Reset on
+ *  encapsulated |            |              | flush, drain
+ *  into PCM     |            |              | and standby
+ *               | Not        |              |
+ * --------------| supported  |--------------|
+ *  Bitstream    |            | Reset on     |
+ *               |            | flush, drain |
+ *               |            | and standby  |
+ *               |            |              |
+ *
+ * † - on standby, reset of the frame count happens at the framework level.
+ */
 class StreamOutHalInterface : public virtual StreamHalInterface {
   public:
     // Return the audio hardware driver estimated latency in milliseconds.
@@ -151,10 +183,7 @@
 
     // Return the number of audio frames written by the audio dsp to DAC since
     // the output has exited standby.
-    virtual status_t getRenderPosition(uint32_t *dspFrames) = 0;
-
-    // Get the local time at which the next write to the audio driver will be presented.
-    virtual status_t getNextWriteTimestamp(int64_t *timestamp) = 0;
+    virtual status_t getRenderPosition(uint64_t *dspFrames) = 0;
 
     // Set the callback for notifying completion of non-blocking write and drain.
     // The callback must be owned by someone else. The output stream does not own it
@@ -176,12 +205,19 @@
     // Requests notification when data buffered by the driver/hardware has been played.
     virtual status_t drain(bool earlyNotify) = 0;
 
-    // Notifies to the audio driver to flush the queued data.
+    // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+    // already be paused before calling 'flush'.
     virtual status_t flush() = 0;
 
     // Return a recent count of the number of audio frames presented to an external observer.
+    // This excludes frames which have been written but are still in the pipeline. See the
+    // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+    // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
     virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) = 0;
 
+    // Notifies the HAL layer that the framework considers the current playback as completed.
+    virtual status_t presentationComplete() = 0;
+
     struct SourceMetadata {
         std::vector<playback_track_metadata_v7_t> tracks;
     };
@@ -270,6 +306,7 @@
 
     // Return a recent count of the number of audio frames received and
     // the clock time associated with that frame count.
+    // The count must not reset to zero when a PCM input enters standby.
     virtual status_t getCapturePosition(int64_t *frames, int64_t *time) = 0;
 
     // Get active microphones
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index 5106874..0bd6fb0 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -39,6 +39,7 @@
 using ::aidl::android::hardware::audio::core::VendorParameter;
 using ::aidl::android::media::audio::common::AudioChannelLayout;
 using ::aidl::android::media::audio::common::AudioConfig;
+using ::aidl::android::media::audio::common::AudioDevice;
 using ::aidl::android::media::audio::common::AudioDeviceDescription;
 using ::aidl::android::media::audio::common::AudioDeviceType;
 using ::aidl::android::media::audio::common::AudioFormatDescription;
@@ -160,6 +161,24 @@
             createProfile(PcmType::INT_16_BIT, {AudioChannelLayout::LAYOUT_STEREO}, {48000})};
     Configuration c;
 
+    AudioPort micInDevice =
+            createPort(c.nextPortId++, "Built-In Mic", 0, true,
+                       createPortDeviceExt(AudioDeviceType::IN_MICROPHONE,
+                                           1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE));
+    micInDevice.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(micInDevice);
+
+    AudioPort micInBackDevice =
+            createPort(c.nextPortId++, "Built-In Back Mic", 0, true,
+                       createPortDeviceExt(AudioDeviceType::IN_MICROPHONE_BACK, 0));
+    micInDevice.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(micInBackDevice);
+
+    AudioPort primaryInMix =
+            createPort(c.nextPortId++, "primary input", 0, true, createPortMixExt(0, 1));
+    primaryInMix.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(primaryInMix);
+
     AudioPort btOutDevice =
             createPort(c.nextPortId++, "BT A2DP Out", 0, false,
                        createPortDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
@@ -172,6 +191,7 @@
     btOutMix.profiles = standardPcmAudioProfiles;
     c.ports.push_back(btOutMix);
 
+    c.routes.push_back(createRoute({micInDevice, micInBackDevice}, primaryInMix));
     c.routes.push_back(createRoute({btOutMix}, btOutDevice));
 
     return c;
@@ -184,6 +204,11 @@
     explicit ModuleMock(const Configuration& config) : mConfig(config) {}
     bool isScreenTurnedOn() const { return mIsScreenTurnedOn; }
     ScreenRotation getScreenRotation() const { return mScreenRotation; }
+    std::vector<AudioPatch> getPatches() {
+        std::vector<AudioPatch> result;
+        getAudioPatches(&result);
+        return result;
+    }
 
   private:
     ndk::ScopedAStatus setModuleDebug(
@@ -1141,3 +1166,51 @@
     EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
     EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
 }
+
+TEST_F(Hal2AidlMapperTest, ChangeTransientPatchDevice) {
+    std::mutex mutex;  // Only needed for cleanups.
+    auto mapperAccessor = std::make_unique<LockedAccessor<Hal2AidlMapper>>(*mMapper, mutex);
+    Hal2AidlMapper::Cleanups cleanups(*mapperAccessor);
+    AudioConfig config;
+    config.base.channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+            AudioChannelLayout::LAYOUT_STEREO);
+    config.base.format =
+            AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = PcmType::INT_16_BIT};
+    config.base.sampleRate = 48000;
+    AudioDevice defaultDevice;
+    defaultDevice.type.type = AudioDeviceType::IN_DEFAULT;
+    AudioPortConfig mixPortConfig;
+    AudioPatch transientPatch;
+    ASSERT_EQ(OK, mMapper->prepareToOpenStream(43 /*ioHandle*/, defaultDevice,
+                                               AudioIoFlags::make<AudioIoFlags::input>(0),
+                                               AudioSource::DEFAULT, &cleanups, &config,
+                                               &mixPortConfig, &transientPatch));
+    cleanups.disarmAll();
+    ASSERT_NE(0, transientPatch.id);
+    ASSERT_NE(0, mixPortConfig.id);
+    sp<StreamHalInterface> stream = sp<StreamHalMock>::make();
+    mMapper->addStream(stream, mixPortConfig.id, transientPatch.id);
+
+    AudioPatch patch{};
+    int32_t patchId;
+    AudioPortConfig backMicPortConfig;
+    backMicPortConfig.channelMask = config.base.channelMask;
+    backMicPortConfig.format = config.base.format;
+    backMicPortConfig.sampleRate = aidl::android::media::audio::common::Int{config.base.sampleRate};
+    backMicPortConfig.flags = AudioIoFlags::make<AudioIoFlags::input>(0);
+    backMicPortConfig.ext = createPortDeviceExt(AudioDeviceType::IN_MICROPHONE_BACK, 0);
+    ASSERT_EQ(OK, mMapper->createOrUpdatePatch({backMicPortConfig}, {mixPortConfig}, &patchId,
+                                               &cleanups));
+    cleanups.disarmAll();
+    ASSERT_EQ(android::OK,
+              mMapper->findPortConfig(backMicPortConfig.ext.get<AudioPortExt::device>().device,
+                                      &backMicPortConfig));
+    EXPECT_NE(0, backMicPortConfig.id);
+
+    EXPECT_EQ(transientPatch.id, patchId);
+    auto patches = mModule->getPatches();
+    auto patchIt = findById(patches, patchId);
+    ASSERT_NE(patchIt, patches.end());
+    EXPECT_EQ(std::vector<int32_t>{backMicPortConfig.id}, patchIt->sourcePortConfigIds);
+    EXPECT_EQ(std::vector<int32_t>{mixPortConfig.id}, patchIt->sinkPortConfigIds);
+}
diff --git a/media/libeffects/data/Android.bp b/media/libeffects/data/Android.bp
new file mode 100644
index 0000000..2acf229
--- /dev/null
+++ b/media/libeffects/data/Android.bp
@@ -0,0 +1,19 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+prebuilt_etc {
+    name: "framework-audio_effects.xml",
+    src: "audio_effects.xml",
+    filename: "audio_effects.xml",
+}
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.cpp b/media/libeffects/downmix/aidl/EffectDownmix.cpp
index de60ca4..883d41d 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.cpp
+++ b/media/libeffects/downmix/aidl/EffectDownmix.cpp
@@ -177,7 +177,10 @@
      * in the life cycle of workerThread (threadLoop).
      */
     uint32_t efState = 0;
-    if (!mEventFlag || ::android::OK != mEventFlag->wait(kEventFlagNotEmpty, &efState)) {
+    if (!mEventFlag ||
+        ::android::OK != mEventFlag->wait(mDataMqNotEmptyEf, &efState, 0 /* no timeout */,
+                                          true /* retry */) ||
+        !(efState & mDataMqNotEmptyEf)) {
         LOG(ERROR) << getEffectName() << __func__ << ": StatusEventFlag invalid";
     }
 
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
index fdc16e3..836e034 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
@@ -213,11 +213,12 @@
     RETURN_OK_IF(mState != State::INIT);
     mImplContext = createContext(common);
     RETURN_IF(!mContext || !mImplContext, EX_NULL_POINTER, "createContextFailed");
-    int version = 0;
-    RETURN_IF(!getInterfaceVersion(&version).isOk(), EX_UNSUPPORTED_OPERATION,
+    RETURN_IF(!getInterfaceVersion(&mVersion).isOk(), EX_UNSUPPORTED_OPERATION,
               "FailedToGetInterfaceVersion");
     mImplContext->setVersion(version);
     mEventFlag = mImplContext->getStatusEventFlag();
+    mDataMqNotEmptyEf =
+            mVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty : kEventFlagNotEmpty;
 
     if (specific.has_value()) {
         RETURN_IF_ASTATUS_NOT_OK(setParameterSpecific(specific.value()), "setSpecParamErr");
@@ -231,8 +232,9 @@
 
     mState = State::IDLE;
     mContext->dupeFmq(ret);
-    RETURN_IF(createThread(getEffectName()) != RetCode::SUCCESS, EX_UNSUPPORTED_OPERATION,
-              "FailedToCreateWorker");
+    RETURN_IF(createThread(getEffectNameWithVersion()) != RetCode::SUCCESS,
+              EX_UNSUPPORTED_OPERATION, "FailedToCreateWorker");
+    LOG(INFO) << getEffectNameWithVersion() << __func__;
     return ndk::ScopedAStatus::ok();
 }
 
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index 9c440df..ada301b 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -118,26 +118,21 @@
 
 RetCode DynamicsProcessingContext::setPreEq(
         const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
-    return setDpChannels_l<dp_fx::DPEq>(channels, mEngineArchitecture.preEqStage.inUse,
-                                        StageType::PREEQ);
+    return setDpChannels_l<dp_fx::DPEq>(channels, StageType::PREEQ);
 }
 
 RetCode DynamicsProcessingContext::setPostEq(
         const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
-    return setDpChannels_l<dp_fx::DPEq>(channels, mEngineArchitecture.postEqStage.inUse,
-                                        StageType::POSTEQ);
+    return setDpChannels_l<dp_fx::DPEq>(channels, StageType::POSTEQ);
 }
 
 RetCode DynamicsProcessingContext::setMbc(
         const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
-    return setDpChannels_l<dp_fx::DPMbc>(channels, mEngineArchitecture.mbcStage.inUse,
-                                         StageType::MBC);
+    return setDpChannels_l<dp_fx::DPMbc>(channels, StageType::MBC);
 }
 
 RetCode DynamicsProcessingContext::setPreEqBand(
         const std::vector<DynamicsProcessing::EqBandConfig>& bands) {
-    RETURN_VALUE_IF(!mEngineArchitecture.preEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
-                    "preEqNotInUse");
     RETURN_VALUE_IF(
             !validateBandConfig(bands, mChannelCount, mEngineArchitecture.preEqStage.bandCount),
             RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
@@ -146,8 +141,6 @@
 
 RetCode DynamicsProcessingContext::setPostEqBand(
         const std::vector<DynamicsProcessing::EqBandConfig>& bands) {
-    RETURN_VALUE_IF(!mEngineArchitecture.postEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
-                    "postEqNotInUse");
     RETURN_VALUE_IF(
             !validateBandConfig(bands, mChannelCount, mEngineArchitecture.postEqStage.bandCount),
             RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
@@ -156,8 +149,6 @@
 
 RetCode DynamicsProcessingContext::setMbcBand(
         const std::vector<DynamicsProcessing::MbcBandConfig>& bands) {
-    RETURN_VALUE_IF(!mEngineArchitecture.mbcStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
-                    "mbcNotInUse");
     RETURN_VALUE_IF(
             !validateBandConfig(bands, mChannelCount, mEngineArchitecture.mbcStage.bandCount),
             RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
@@ -166,8 +157,6 @@
 
 RetCode DynamicsProcessingContext::setLimiter(
         const std::vector<DynamicsProcessing::LimiterConfig>& limiters) {
-    RETURN_VALUE_IF(!mEngineArchitecture.limiterInUse, RetCode::ERROR_ILLEGAL_PARAMETER,
-                    "limiterNotInUse");
     RETURN_VALUE_IF(!validateLimiterConfig(limiters, mChannelCount),
                     RetCode::ERROR_ILLEGAL_PARAMETER, "limiterConfigNotValid");
     return setBands_l<DynamicsProcessing::LimiterConfig>(limiters, StageType::LIMITER);
@@ -419,9 +408,7 @@
         }
         freqs[band.band] = band.cutoffFrequencyHz;
     }
-    return std::is_sorted(freqs.begin(), freqs.end(), [](const auto& a, const auto& b) {
-        return a.second <= b.second; //index is already sorted as map key
-    });
+    return true;
 }
 
 bool DynamicsProcessingContext::validateLimiterConfig(
@@ -442,17 +429,10 @@
 
 template <typename D>
 RetCode DynamicsProcessingContext::setDpChannels_l(
-        const std::vector<DynamicsProcessing::ChannelConfig>& channels, bool stageInUse,
-        StageType type) {
+        const std::vector<DynamicsProcessing::ChannelConfig>& channels, StageType type) {
     RetCode ret = RetCode::SUCCESS;
     std::unordered_set<int> channelSet;
 
-    if (!stageInUse) {
-        LOG(WARNING) << __func__ << " not in use " << ::android::internal::ToString(channels);
-        return RetCode::ERROR_ILLEGAL_PARAMETER;
-    }
-
-    RETURN_VALUE_IF(!stageInUse, RetCode::ERROR_ILLEGAL_PARAMETER, "stageNotInUse");
     for (auto& it : channels) {
         if (0 != channelSet.count(it.channel)) {
             LOG(WARNING) << __func__ << " duplicated channel " << it.channel;
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
index a059dd0..ce657db 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
@@ -99,7 +99,7 @@
     dp_fx::DPEq* getEqWithType_l(StageType type, int ch);
     template <typename D>
     RetCode setDpChannels_l(const std::vector<DynamicsProcessing::ChannelConfig>& channels,
-                            bool stageInUse, StageType type);
+                            StageType type);
     template <typename T /* BandConfig */>
     RetCode setBands_l(const std::vector<T>& bands, StageType type);
     RetCode setDpChannelBand_l(const std::any& anyConfig, StageType type,
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 44ea2a4..3ae3edc 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -19,6 +19,7 @@
 #define LOG_TAG "ReverbContext"
 #include <android-base/logging.h>
 #include <Utils.h>
+#include <audio_utils/primitives.h>
 
 #include "ReverbContext.h"
 #include "VectorArithmetic.h"
@@ -347,6 +348,15 @@
             mCommon.output.base.channelMask);
     int frameCount = mCommon.input.frameCount;
 
+    if (mBypass) {
+        if (isAuxiliary()) {
+            memset(out, 0, getOutputFrameSize() * frameCount);
+        } else {
+            memcpy_to_float_from_float_with_clamping(out, in, samples, 1);
+        }
+        return {STATUS_OK, samples, outChannels * frameCount};
+    }
+
     // Reverb only effects the stereo channels in multichannel source.
     if (channels < 1 || channels > LVM_MAX_CHANNELS) {
         LOG(ERROR) << __func__ << " process invalid PCM channels " << channels;
diff --git a/media/libeffects/visualizer/aidl/Visualizer.cpp b/media/libeffects/visualizer/aidl/Visualizer.cpp
index 9c2b71e..9b493d4 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.cpp
+++ b/media/libeffects/visualizer/aidl/Visualizer.cpp
@@ -72,7 +72,7 @@
                           .uuid = getEffectImplUuidVisualizer(),
                           .proxy = std::nullopt},
                    .flags = {.type = Flags::Type::INSERT,
-                             .insert = Flags::Insert::LAST,
+                             .insert = Flags::Insert::FIRST,
                              .volume = Flags::Volume::NONE},
                    .name = VisualizerImpl::kEffectName,
                    .implementor = "The Android Open Source Project"},
diff --git a/media/libheif/OWNERS b/media/libheif/OWNERS
new file mode 100644
index 0000000..a61ad21
--- /dev/null
+++ b/media/libheif/OWNERS
@@ -0,0 +1,2 @@
+include platform/frameworks/av:/media/janitors/avic_OWNERS
+include platform/frameworks/av:/media/janitors/codec_OWNERS
\ No newline at end of file
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 840897f..9cd0e6e 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -81,9 +81,6 @@
 cc_library_shared {
     name: "libmedia_omx",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: [
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index 649f813..b5867a6 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -30,9 +30,6 @@
     name: "libmedia_helper",
     vendor_available: true,
     min_sdk_version: "29",
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
     srcs: [
         "AudioParameter.cpp",
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index 507da29..5ff5a33 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -44,7 +44,7 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-media-playback+bugs@google.com",
         ],
         componentid: 155276,
         hotlists: [
@@ -130,7 +130,6 @@
         "libplayerservice_datasource",
     ],
     shared_libs: [
-        "libmediaplayerservice",
         "libdatasource",
         "libdrmframework",
         "libstagefright_httplive",
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 1593aa0..e06efac 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -7575,6 +7575,22 @@
             return true;
         }
 
+        // When Acodec receive an error event at LoadedToIdleState, it will not release
+        // allocated buffers, which will cause gralloc buffer leak issue. We need to first release
+        // these buffers and then process the error event
+        case OMX_EventError:
+        {
+            if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) {
+                mCodec->freeBuffersOnPort(kPortIndexInput);
+            }
+
+            if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) {
+                mCodec->freeBuffersOnPort(kPortIndexOutput);
+            }
+
+            return BaseState::onOMXEvent(event, data1, data2);
+        }
+
         default:
             return BaseState::onOMXEvent(event, data1, data2);
     }
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 886285e..c9a2eea 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -318,6 +318,10 @@
         "aconfig_mediacodec_flags_c_lib",
     ],
 
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
+
     static_libs: [
         "android.media.codec-aconfig-cc",
         "libstagefright_esds",
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 4e378d4..0401e82 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -3975,6 +3975,15 @@
                     switch (mState) {
                         case INITIALIZING:
                         {
+                            // Resource error during INITIALIZING state needs to be logged
+                            // through metrics, to be able to track such occurrences.
+                            if (isResourceError(err)) {
+                                mediametrics_setInt32(mMetricsHandle, kCodecError, err);
+                                mediametrics_setCString(mMetricsHandle, kCodecErrorState,
+                                                        stateString(mState).c_str());
+                                flushMediametrics();
+                                initMediametrics();
+                            }
                             setState(UNINITIALIZED);
                             break;
                         }
@@ -4912,8 +4921,8 @@
                 if (flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
                     mFlags |= kFlagUseCryptoAsync;
                     if ((mFlags & kFlagUseBlockModel)) {
-                        ALOGW("CrytoAsync not yet enabled for block model,\
-                                falling back to normal");
+                        ALOGW("CrytoAsync not yet enabled for block model, "
+                                "falling back to normal");
                     }
                 }
             }
@@ -4970,8 +4979,7 @@
 
             mDescrambler = static_cast<IDescrambler *>(descrambler);
             mBufferChannel->setDescrambler(mDescrambler);
-            if ((mFlags & kFlagUseCryptoAsync) &&
-                mCrypto  && (mDomain == DOMAIN_VIDEO)) {
+            if ((mFlags & kFlagUseCryptoAsync) && mCrypto) {
                 // set kFlagUseCryptoAsync but do-not use this for block model
                 // this is to propagate the error in onCryptoError()
                 // TODO (b/274628160): Enable Use of CONFIG_FLAG_USE_CRYPTO_ASYNC
@@ -6269,15 +6277,8 @@
         cryptoInfo->setInt32("secure", mFlags & kFlagIsSecure);
         sp<RefBase> obj;
         if (msg->findObject("cryptoInfos", &obj)) {
-            sp<CryptoInfosWrapper> infos{(CryptoInfosWrapper*)obj.get()};
-            sp<CryptoInfosWrapper> asyncInfos{
-                    new CryptoInfosWrapper(std::vector<std::unique_ptr<CodecCryptoInfo>>())};
-            for (std::unique_ptr<CodecCryptoInfo> &info : infos->value) {
-                if (info) {
-                    asyncInfos->value.emplace_back(new CryptoAsync::CryptoAsyncInfo(info));
-                }
-            }
-            buffer->meta()->setObject("cryptoInfos", asyncInfos);
+            // this object is a standalone object when created (no copy requied here)
+            buffer->meta()->setObject("cryptoInfos", obj);
         } else {
             size_t key_len = (key != nullptr)? 16 : 0;
             size_t iv_len = (iv != nullptr)? 16 : 0;
@@ -6416,7 +6417,6 @@
             }
         }
         if (mCryptoAsync) {
-            // TODO b/316565675 - enable async path for audio
             // prepare a message and enqueue
             sp<AMessage> cryptoInfo = new AMessage();
             buildCryptoInfoAMessage(cryptoInfo, CryptoAsync::kActionDecrypt);
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index aaf7465..1008445 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -249,6 +249,11 @@
         sampleMetaData.setInt32(kKeyIsMuxerData, 1);
     }
 
+    if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
+        sampleMetaData.setInt32(kKeyIsCodecConfig, true);
+        ALOGV("BUFFER_FLAG_CODEC_CONFIG");
+    }
+
     if (flags & MediaCodec::BUFFER_FLAG_EOS) {
         sampleMetaData.setInt32(kKeyIsEndOfStream, 1);
         ALOGV("BUFFER_FLAG_EOS");
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index 604dcb0..714e312 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -111,8 +111,9 @@
         }
     }
 
-    int finalUsage = usage | consumerUsage;
-    ALOGV("gralloc usage: %#x(producer) + %#x(consumer) = %#x", usage, consumerUsage, finalUsage);
+    uint64_t finalUsage = (uint32_t) usage | (uint32_t) consumerUsage;
+    ALOGV("gralloc usage: %#x(producer) + %#x(consumer) = 0x%" PRIx64,
+            usage, consumerUsage, finalUsage);
     err = native_window_set_usage(nativeWindow, finalUsage);
     if (err != NO_ERROR) {
         ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
@@ -126,7 +127,7 @@
         return err;
     }
 
-    ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage %#x",
+    ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage 0x%" PRIx64,
             nativeWindow, width, height, format, rotation, finalUsage);
     return NO_ERROR;
 }
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
index eb9ac0f..bf29b1d 100644
--- a/media/libstagefright/VideoRenderQualityTracker.cpp
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -302,13 +302,6 @@
         mRenderDurationMs += (actualRenderTimeUs - mLastRenderTimeUs) / 1000;
     }
 
-    // Now that a frame has been rendered, the previously skipped frames can be processed as skipped
-    // frames since the app is not skipping them to terminate playback.
-    for (int64_t contentTimeUs : mPendingSkippedFrameContentTimeUsList) {
-        processMetricsForSkippedFrame(contentTimeUs);
-    }
-    mPendingSkippedFrameContentTimeUsList = {};
-
     // We can render a pending queued frame if it's the last frame of the video, so release it
     // immediately.
     if (contentTimeUs == mTunnelFrameQueuedContentTimeUs && mTunnelFrameQueuedContentTimeUs != -1) {
@@ -332,9 +325,25 @@
                   (long long) contentTimeUs, (long long) nextExpectedFrame.contentTimeUs);
             break;
         }
+        // Process all skipped frames before the dropped frame.
+        while (!mPendingSkippedFrameContentTimeUsList.empty()) {
+            if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) {
+                break;
+            }
+            processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front());
+            mPendingSkippedFrameContentTimeUsList.pop_front();
+        }
         processMetricsForDroppedFrame(nextExpectedFrame.contentTimeUs,
                                       nextExpectedFrame.desiredRenderTimeUs);
     }
+    // Process all skipped frames before the rendered frame.
+    while (!mPendingSkippedFrameContentTimeUsList.empty()) {
+        if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) {
+            break;
+        }
+        processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front());
+        mPendingSkippedFrameContentTimeUsList.pop_front();
+    }
     processMetricsForRenderedFrame(nextExpectedFrame.contentTimeUs,
                                    nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs,
                                    freezeEventOut, judderEventOut);
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index b3be24b..d50bc1e 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -139,6 +139,7 @@
                 <Limit name="bitrate" range="1-40000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.hevc.decoder" type="video/hevc" variant="slow-cpu,!slow-cpu">
@@ -160,6 +161,7 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp8.decoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
@@ -178,6 +180,7 @@
                 <Limit name="bitrate" range="1-40000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp9.decoder" type="video/x-vnd.on2.vp9" variant="slow-cpu,!slow-cpu">
@@ -197,6 +200,7 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="slow-cpu,!slow-cpu">
@@ -216,6 +220,7 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Feature name="low-latency" />
             <Attribute name="software-codec" />
         </MediaCodec>
@@ -235,6 +240,7 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Feature name="low-latency" />
             <Attribute name="software-codec" />
         </MediaCodec>
@@ -353,6 +359,7 @@
                 <Limit name="bitrate" range="1-20000000" />
             </Variant>
             <Feature name="bitrate-modes" value="VBR,CBR" />
+            <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.hevc.encoder" type="video/hevc" variant="!slow-cpu">
@@ -367,6 +374,7 @@
             <Limit name="complexity" range="0-10"  default="0" />
             <Limit name="quality" range="0-100"  default="80" />
             <Feature name="bitrate-modes" value="VBR,CBR,CQ" />
+            <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp9.encoder" type="video/x-vnd.on2.vp9" variant="!slow-cpu">
@@ -379,6 +387,7 @@
             <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="bitrate-modes" value="VBR,CBR" />
+            <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.av1.encoder" type="video/av01" enabled="false" minsdk="34" variant="slow-cpu,!slow-cpu">
@@ -397,6 +406,7 @@
             <Limit name="quality" range="0-100"  default="80" />
             <Limit name="complexity" range="0-5"  default="0" />
             <Feature name="bitrate-modes" value="VBR,CBR,CQ" />
+            <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
     </Encoders>
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 79ab009..630817c 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -20,9 +20,6 @@
 cc_library_shared {
     name: "libstagefright_omx",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: [
@@ -218,9 +215,6 @@
 cc_library_shared {
     name: "libstagefright_omx_utils",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
     srcs: ["OMXUtils.cpp"],
     export_include_dirs: [
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
index 22b13f6..bb850ca 100644
--- a/media/libstagefright/renderfright/Android.bp
+++ b/media/libstagefright/renderfright/Android.bp
@@ -84,9 +84,6 @@
     name: "librenderfright",
     defaults: ["librenderfright_defaults"],
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     cflags: [
diff --git a/media/libstagefright/rtsp/fuzzer/Android.bp b/media/libstagefright/rtsp/fuzzer/Android.bp
index a2791ba..ff64af5 100644
--- a/media/libstagefright/rtsp/fuzzer/Android.bp
+++ b/media/libstagefright/rtsp/fuzzer/Android.bp
@@ -29,11 +29,19 @@
     header_libs: [
         "libstagefright_rtsp_headers",
     ],
-    fuzz_config:{
+    fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-media-playback@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "This fuzzer targets the APIs of libstagefright_rtsp",
+        vector: "local_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
@@ -44,7 +52,7 @@
     ],
     defaults: [
         "libstagefright_rtsp_fuzzer_defaults",
-    ]
+    ],
 }
 
 cc_fuzz {
@@ -55,7 +63,7 @@
     defaults: [
         "libstagefright_rtsp_fuzzer_defaults",
     ],
-    shared_libs:[
+    shared_libs: [
         "libandroid_net",
         "libbase",
         "libstagefright",
diff --git a/media/libstagefright/tests/fuzzers/Android.bp b/media/libstagefright/tests/fuzzers/Android.bp
index 2bcfd67..43542c5 100644
--- a/media/libstagefright/tests/fuzzers/Android.bp
+++ b/media/libstagefright/tests/fuzzers/Android.bp
@@ -32,6 +32,15 @@
         "liblog",
         "media_permission-aidl-cpp",
     ],
+    fuzz_config: {
+        componentid: 42195,
+        hotlists: ["4593311"],
+        description: "The fuzzer targets the APIs of libstagefright",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
+    },
 }
 
 cc_fuzz {
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
index 70d73c8..5ac2a54 100644
--- a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
@@ -13,94 +13,221 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-// Authors: corbin.souffrant@leviathansecurity.com
-//          dylan.katz@leviathansecurity.com
 
-#include <MediaMuxerFuzzer.h>
-#include <cutils/ashmem.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <media/stagefright/MediaMuxer.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/MediaDefs.h>
 
 namespace android {
+const uint8_t kMinSize = 0;
+const uint8_t kMinTrackCount = 0;
 
-// Can't seem to get setBuffer or setString working. It always segfaults on a
-// null pointer read or memleaks. So that functionality is missing.
-void createMessage(AMessage *msg, FuzzedDataProvider *fdp) {
-  size_t count = fdp->ConsumeIntegralInRange<size_t>(0, 32);
-  while (fdp->remaining_bytes() > 0 && count > 0) {
-    uint8_t function_id =
-        fdp->ConsumeIntegralInRange<uint8_t>(0, amessage_setvals.size() - 1);
-    amessage_setvals[function_id](msg, fdp);
-    count--;
-  }
+enum kBufferFlags { BUFFER_FLAG_SYNCFRAME = 1, BUFFER_FLAG_CODECCONFIG = 2, BUFFER_FLAG_EOS = 4 };
+
+constexpr char kMuxerFile[] = "MediaMuxer";
+
+const std::string kAudioMimeTypes[] = {
+        MEDIA_MIMETYPE_AUDIO_AMR_NB,
+        MEDIA_MIMETYPE_AUDIO_AMR_WB,
+        MEDIA_MIMETYPE_AUDIO_MPEG,
+        MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
+        MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
+        MEDIA_MIMETYPE_AUDIO_MIDI,
+        MEDIA_MIMETYPE_AUDIO_AAC,
+        MEDIA_MIMETYPE_AUDIO_QCELP,
+        MEDIA_MIMETYPE_AUDIO_VORBIS,
+        MEDIA_MIMETYPE_AUDIO_OPUS,
+        MEDIA_MIMETYPE_AUDIO_G711_ALAW,
+        MEDIA_MIMETYPE_AUDIO_G711_MLAW,
+        MEDIA_MIMETYPE_AUDIO_RAW,
+        MEDIA_MIMETYPE_AUDIO_FLAC,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS,
+        MEDIA_MIMETYPE_AUDIO_MSGSM,
+        MEDIA_MIMETYPE_AUDIO_AC3,
+        MEDIA_MIMETYPE_AUDIO_EAC3,
+        MEDIA_MIMETYPE_AUDIO_EAC3_JOC,
+        MEDIA_MIMETYPE_AUDIO_AC4,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L3,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L4,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L3,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L4,
+        MEDIA_MIMETYPE_AUDIO_SCRAMBLED,
+        MEDIA_MIMETYPE_AUDIO_ALAC,
+        MEDIA_MIMETYPE_AUDIO_WMA,
+        MEDIA_MIMETYPE_AUDIO_MS_ADPCM,
+        MEDIA_MIMETYPE_AUDIO_DVI_IMA_ADPCM,
+        MEDIA_MIMETYPE_AUDIO_DTS,
+        MEDIA_MIMETYPE_AUDIO_DTS_HD,
+        MEDIA_MIMETYPE_AUDIO_DTS_HD_MA,
+        MEDIA_MIMETYPE_AUDIO_DTS_UHD,
+        MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1,
+        MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2,
+        MEDIA_MIMETYPE_AUDIO_EVRC,
+        MEDIA_MIMETYPE_AUDIO_EVRCB,
+        MEDIA_MIMETYPE_AUDIO_EVRCWB,
+        MEDIA_MIMETYPE_AUDIO_EVRCNW,
+        MEDIA_MIMETYPE_AUDIO_AMR_WB_PLUS,
+        MEDIA_MIMETYPE_AUDIO_APTX,
+        MEDIA_MIMETYPE_AUDIO_DRA,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_MAT,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_1_0,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_0,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_1,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_TRUEHD,
+        MEDIA_MIMETYPE_AUDIO_AAC_MP4,
+        MEDIA_MIMETYPE_AUDIO_AAC_MAIN,
+        MEDIA_MIMETYPE_AUDIO_AAC_LC,
+        MEDIA_MIMETYPE_AUDIO_AAC_SSR,
+        MEDIA_MIMETYPE_AUDIO_AAC_LTP,
+        MEDIA_MIMETYPE_AUDIO_AAC_HE_V1,
+        MEDIA_MIMETYPE_AUDIO_AAC_SCALABLE,
+        MEDIA_MIMETYPE_AUDIO_AAC_ERLC,
+        MEDIA_MIMETYPE_AUDIO_AAC_LD,
+        MEDIA_MIMETYPE_AUDIO_AAC_HE_V2,
+        MEDIA_MIMETYPE_AUDIO_AAC_ELD,
+        MEDIA_MIMETYPE_AUDIO_AAC_XHE,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADIF,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_MAIN,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LC,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SSR,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LTP,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V1,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SCALABLE,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ERLC,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LD,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V2,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ELD,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_XHE,
+        MEDIA_MIMETYPE_AUDIO_AAC_LATM_LC,
+        MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V1,
+        MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V2,
+        MEDIA_MIMETYPE_AUDIO_IEC61937,
+        MEDIA_MIMETYPE_AUDIO_IEC60958,
+};
+
+const std::string kVideoMimeTypes[] = {
+        MEDIA_MIMETYPE_VIDEO_VP8,       MEDIA_MIMETYPE_VIDEO_VP9,
+        MEDIA_MIMETYPE_VIDEO_AV1,       MEDIA_MIMETYPE_VIDEO_AVC,
+        MEDIA_MIMETYPE_VIDEO_HEVC,      MEDIA_MIMETYPE_VIDEO_MPEG4,
+        MEDIA_MIMETYPE_VIDEO_H263,      MEDIA_MIMETYPE_VIDEO_MPEG2,
+        MEDIA_MIMETYPE_VIDEO_RAW,       MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
+        MEDIA_MIMETYPE_VIDEO_SCRAMBLED, MEDIA_MIMETYPE_VIDEO_DIVX,
+        MEDIA_MIMETYPE_VIDEO_DIVX3,     MEDIA_MIMETYPE_VIDEO_XVID,
+        MEDIA_MIMETYPE_VIDEO_MJPEG,
+};
+
+void getSampleAudioFormat(FuzzedDataProvider& fdp, AMessage* format) {
+    std::string mimeType = fdp.PickValueInArray(kAudioMimeTypes);
+    format->setString("mime", mimeType.c_str(), mimeType.length());
+    format->setInt32("sample-rate", fdp.ConsumeIntegral<int32_t>());
+    format->setInt32("channel-count", fdp.ConsumeIntegral<int32_t>());
+}
+
+void getSampleVideoFormat(FuzzedDataProvider& fdp, AMessage* format) {
+    std::string mimeType = fdp.PickValueInArray(kVideoMimeTypes);
+    format->setString("mime", mimeType.c_str(), mimeType.length());
+    format->setInt32("height", fdp.ConsumeIntegral<int32_t>());
+    format->setInt32("width", fdp.ConsumeIntegral<int32_t>());
+    format->setInt32("time-lapse-fps", fdp.ConsumeIntegral<int32_t>());
 }
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
-  FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    FuzzedDataProvider fdp(data, size);
 
-  size_t data_size = fdp.ConsumeIntegralInRange<size_t>(0, size);
-  int fd = ashmem_create_region("mediamuxer_fuzz_region", data_size);
-  if (fd < 0)
+    // memfd_create() creates an anonymous file and returns a file
+    // descriptor that refers to it. MFD_ALLOW_SEALING allows sealing
+    // operations on this file.
+    int32_t fd = memfd_create(kMuxerFile, MFD_ALLOW_SEALING);
+    if (fd == -1) {
+        ALOGE("memfd_create failed: %s", strerror(errno));
+        return 0;
+    }
+
+    auto outputFormat = (MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(
+            MediaMuxer::OutputFormat::OUTPUT_FORMAT_MPEG_4,
+            MediaMuxer::OutputFormat::OUTPUT_FORMAT_LIST_END);
+
+    sp<MediaMuxer> mMuxer = MediaMuxer::create(fd, outputFormat);
+    if (mMuxer == nullptr) {
+        close(fd);
+        return 0;
+    }
+
+    // Used to consume a maximum of 80% of the data to send buffer data to writeSampleData().
+    // This ensures that we don't completely exhaust data and use the rest 20% for fuzzing
+    // of APIs.
+    const size_t kMaxSize = (size * 80) / 100;
+    while (fdp.remaining_bytes()) {
+        auto invokeMediaMuxerAPI = fdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    // Using 'return' here due to a timeout bug present in OGGWriter.cpp
+                    // (b/310316183).
+                    if (outputFormat == MediaMuxer::OutputFormat::OUTPUT_FORMAT_OGG) {
+                        return;
+                    }
+
+                    sp<AMessage> format = sp<AMessage>::make();
+                    fdp.ConsumeBool() ? getSampleAudioFormat(fdp, format.get())
+                                      : getSampleVideoFormat(fdp, format.get());
+
+                    mMuxer->addTrack(fdp.ConsumeBool() ? format : nullptr);
+                },
+                [&]() {
+                    mMuxer->setLocation(fdp.ConsumeIntegral<int32_t>() /* latitude */,
+                                        fdp.ConsumeIntegral<int32_t>() /* longitude */);
+                },
+                [&]() { mMuxer->setOrientationHint(fdp.ConsumeIntegral<int32_t>() /* degrees */); },
+                [&]() { mMuxer->start(); },
+                [&]() {
+                    std::vector<uint8_t> sample = fdp.ConsumeBytes<uint8_t>(
+                            fdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize));
+                    sp<ABuffer> buffer = sp<ABuffer>::make(sample.data(), sample.size());
+
+                    size_t offset = fdp.ConsumeIntegralInRange<size_t>(kMinSize, sample.size());
+                    size_t length =
+                            fdp.ConsumeIntegralInRange<size_t>(kMinSize, buffer->size() - offset);
+                    buffer->setRange(offset, length);
+
+                    sp<AMessage> meta = buffer->meta();
+                    meta->setInt64("sample-file-offset", fdp.ConsumeIntegral<int64_t>());
+                    meta->setInt64("last-sample-index-in-chunk", fdp.ConsumeIntegral<int64_t>());
+
+                    uint32_t flags = 0;
+                    if (fdp.ConsumeBool()) {
+                        flags |= kBufferFlags::BUFFER_FLAG_SYNCFRAME;
+                    }
+                    if (fdp.ConsumeBool()) {
+                        flags |= kBufferFlags::BUFFER_FLAG_CODECCONFIG;
+                    }
+                    if (fdp.ConsumeBool()) {
+                        flags |= kBufferFlags::BUFFER_FLAG_EOS;
+                    }
+
+                    size_t trackIndex = fdp.ConsumeBool()
+                                                ? fdp.ConsumeIntegralInRange<size_t>(
+                                                          kMinTrackCount, mMuxer->getTrackCount())
+                                                : fdp.ConsumeIntegral<size_t>();
+                    int64_t timeUs = fdp.ConsumeIntegral<int64_t>();
+                    mMuxer->writeSampleData(fdp.ConsumeBool() ? buffer : nullptr, trackIndex,
+                                            timeUs, flags);
+                },
+                [&]() {
+                    mMuxer->getTrackFormat(
+                            fdp.ConsumeBool() ? fdp.ConsumeIntegralInRange<size_t>(
+                                                        kMinTrackCount, mMuxer->getTrackCount())
+                                              : fdp.ConsumeIntegral<size_t>() /* idx */);
+                },
+                [&]() { mMuxer->stop(); },
+        });
+
+        invokeMediaMuxerAPI();
+    }
+
+    close(fd);
     return 0;
-
-  uint8_t *sh_data = static_cast<uint8_t *>(
-      mmap(NULL, data_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0));
-  if (sh_data == MAP_FAILED)
-    return 0;
-
-  MediaMuxer::OutputFormat format =
-      (MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(0, 4);
-  sp<MediaMuxer> mMuxer = MediaMuxer::create(fd, format);
-  if (mMuxer == nullptr) {
-    return 0;
-  }
-
-  while (fdp.remaining_bytes() > 1) {
-    switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 4)) {
-    case 0: {
-      // For some reason it only likes mp4s here...
-      if (format == 1 || format == 4)
-        break;
-
-      sp<AMessage> a_format(new AMessage);
-      createMessage(a_format.get(), &fdp);
-      mMuxer->addTrack(a_format);
-      break;
-    }
-    case 1: {
-      mMuxer->start();
-      break;
-    }
-    case 2: {
-      int degrees = fdp.ConsumeIntegral<int>();
-      mMuxer->setOrientationHint(degrees);
-      break;
-    }
-    case 3: {
-      int latitude = fdp.ConsumeIntegral<int>();
-      int longitude = fdp.ConsumeIntegral<int>();
-      mMuxer->setLocation(latitude, longitude);
-      break;
-    }
-    case 4: {
-      size_t buf_size = fdp.ConsumeIntegralInRange<size_t>(0, data_size);
-      sp<ABuffer> a_buffer(new ABuffer(buf_size));
-
-      size_t trackIndex = fdp.ConsumeIntegral<size_t>();
-      int64_t timeUs = fdp.ConsumeIntegral<int64_t>();
-      uint32_t flags = fdp.ConsumeIntegral<uint32_t>();
-      mMuxer->writeSampleData(a_buffer, trackIndex, timeUs, flags);
-    }
-    }
-  }
-
-  if (fdp.ConsumeBool())
-    mMuxer->stop();
-
-  munmap(sh_data, data_size);
-  close(fd);
-  return 0;
 }
 } // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
deleted file mode 100644
index 7d4421d..0000000
--- a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-// Authors: corbin.souffrant@leviathansecurity.com
-//          dylan.katz@leviathansecurity.com
-
-#pragma once
-
-#include <fuzzer/FuzzedDataProvider.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-namespace android {
-
-// Mappings vectors are the list of attributes that the MediaMuxer
-// class looks for in the message.
-static std::vector<const char *> floatMappings{
-    "capture-rate",
-    "time-lapse-fps",
-    "frame-rate",
-};
-
-static std::vector<const char *> int64Mappings{
-    "exif-offset",    "exif-size", "target-time",
-    "thumbnail-time", "timeUs",    "durationUs",
-};
-
-static std::vector<const char *> int32Mappings{"loop",
-                                               "time-scale",
-                                               "crypto-mode",
-                                               "crypto-default-iv-size",
-                                               "crypto-encrypted-byte-block",
-                                               "crypto-skip-byte-block",
-                                               "frame-count",
-                                               "max-bitrate",
-                                               "pcm-big-endian",
-                                               "temporal-layer-count",
-                                               "temporal-layer-id",
-                                               "thumbnail-width",
-                                               "thumbnail-height",
-                                               "track-id",
-                                               "valid-samples",
-                                               "color-format",
-                                               "ca-system-id",
-                                               "is-sync-frame",
-                                               "bitrate",
-                                               "max-bitrate",
-                                               "width",
-                                               "height",
-                                               "sar-width",
-                                               "sar-height",
-                                               "display-width",
-                                               "display-height",
-                                               "is-default",
-                                               "tile-width",
-                                               "tile-height",
-                                               "grid-rows",
-                                               "grid-cols",
-                                               "rotation-degrees",
-                                               "channel-count",
-                                               "sample-rate",
-                                               "bits-per-sample",
-                                               "channel-mask",
-                                               "encoder-delay",
-                                               "encoder-padding",
-                                               "is-adts",
-                                               "frame-rate",
-                                               "max-height",
-                                               "max-width",
-                                               "max-input-size",
-                                               "haptic-channel-count",
-                                               "pcm-encoding",
-                                               "aac-profile"};
-
-static const std::vector<std::function<void(AMessage *, FuzzedDataProvider *)>>
-    amessage_setvals = {
-        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
-          msg->setRect("crop", fdp->ConsumeIntegral<int32_t>(),
-                       fdp->ConsumeIntegral<int32_t>(),
-                       fdp->ConsumeIntegral<int32_t>(),
-                       fdp->ConsumeIntegral<int32_t>());
-        },
-        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
-          msg->setFloat(floatMappings[fdp->ConsumeIntegralInRange<size_t>(
-                            0, floatMappings.size() - 1)],
-                        fdp->ConsumeFloatingPoint<float>());
-        },
-        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
-          msg->setInt64(int64Mappings[fdp->ConsumeIntegralInRange<size_t>(
-                            0, int64Mappings.size() - 1)],
-                        fdp->ConsumeIntegral<int64_t>());
-        },
-        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
-          msg->setInt32(int32Mappings[fdp->ConsumeIntegralInRange<size_t>(
-                            0, int32Mappings.size() - 1)],
-                        fdp->ConsumeIntegral<int32_t>());
-        }};
-} // namespace android
diff --git a/media/libstagefright/timedtext/test/fuzzer/Android.bp b/media/libstagefright/timedtext/test/fuzzer/Android.bp
index 6590ebb..8724d51 100644
--- a/media/libstagefright/timedtext/test/fuzzer/Android.bp
+++ b/media/libstagefright/timedtext/test/fuzzer/Android.bp
@@ -48,8 +48,16 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-media-playback@google.com",
         ],
-        componentid: 155276,
+        componentid: 42195,
+        hotlists: [
+            "4593311",
+        ],
+        description: "This fuzzer targets the APIs of libstagefright_timedtext",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/libstagefright/webm/WebmFrameThread.cpp b/media/libstagefright/webm/WebmFrameThread.cpp
index 7d1442b..e20a08d 100644
--- a/media/libstagefright/webm/WebmFrameThread.cpp
+++ b/media/libstagefright/webm/WebmFrameThread.cpp
@@ -354,6 +354,17 @@
         }
 
         MetaDataBase &md = buffer->meta_data();
+
+        if (mType == kVideoType) {
+            int32_t isCodecConfig = 0;
+            if (md.findInt32(kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig) {
+                ALOGI("ignoring CSD for video track");
+                buffer->release();
+                buffer = NULL;
+                continue;
+            }
+        }
+
         CHECK(md.findInt64(kKeyTime, &timestampUs));
         if (mStartTimeUs == kUninitialized) {
             mStartTimeUs = timestampUs;
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index 2f204f9..2c5e81a 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -16,9 +16,6 @@
 cc_library_shared {
     name: "libstagefright_xmlparser",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: [
diff --git a/media/module/bqhelper/Android.bp b/media/module/bqhelper/Android.bp
index c4dadd0..f9b7dea 100644
--- a/media/module/bqhelper/Android.bp
+++ b/media/module/bqhelper/Android.bp
@@ -69,9 +69,6 @@
     name: "libstagefright_bufferqueue_helper",
     defaults: ["libstagefright_bufferqueue-defaults"],
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     min_sdk_version: "29",
 
     shared_libs: [ "libgui" ],
diff --git a/media/module/bufferpool/2.0/AccessorImpl.cpp b/media/module/bufferpool/2.0/AccessorImpl.cpp
index 1d2562e..202d803 100644
--- a/media/module/bufferpool/2.0/AccessorImpl.cpp
+++ b/media/module/bufferpool/2.0/AccessorImpl.cpp
@@ -609,7 +609,7 @@
         }
         if (ret == false) {
             ALOGW("buffer status message processing failure - message : %d connection : %lld",
-                  message.newStatus, (long long)message.connectionId);
+                  (int)message.newStatus, (long long)message.connectionId);
         }
     }
     messages.clear();
diff --git a/media/module/bufferpool/2.0/Android.bp b/media/module/bufferpool/2.0/Android.bp
index 930b026..bdab103 100644
--- a/media/module/bufferpool/2.0/Android.bp
+++ b/media/module/bufferpool/2.0/Android.bp
@@ -60,7 +60,4 @@
     vendor_available: true,
     // TODO: b/147147992
     double_loadable: true,
-    vndk: {
-        enabled: true,
-    },
 }
diff --git a/media/module/foundation/Android.bp b/media/module/foundation/Android.bp
index dc8384d..edf4cb5 100644
--- a/media/module/foundation/Android.bp
+++ b/media/module/foundation/Android.bp
@@ -33,9 +33,6 @@
 cc_defaults {
     name: "libstagefright_foundation_defaults",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     host_supported: true,
     double_loadable: true,
 
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 9ec7700..3d873df 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -192,7 +192,6 @@
     header_libs: [
         "libstagefright_headers",
         "libmedia_headers",
-        "libstagefright_headers",
     ],
 
     shared_libs: [
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 5b7319a..e340b40 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -113,6 +113,7 @@
 
     export_shared_lib_headers: [
         "libpermission",
+        "packagemanager_aidl-cpp",
     ],
 
     required: [
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 97c80a8..a6783c7 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1922,10 +1922,11 @@
     if (mPrimaryHardwareDev == nullptr) {
         return 0;
     }
+    if (mInputBufferSizeOrderedDevs.empty()) {
+        return 0;
+    }
     mHardwareStatus = AUDIO_HW_GET_INPUT_BUFFER_SIZE;
 
-    sp<DeviceHalInterface> dev = mPrimaryHardwareDev.load()->hwDevice();
-
     std::vector<audio_channel_mask_t> channelMasks = {channelMask};
     if (channelMask != AUDIO_CHANNEL_IN_MONO) {
         channelMasks.push_back(AUDIO_CHANNEL_IN_MONO);
@@ -1955,6 +1956,22 @@
 
     mHardwareStatus = AUDIO_HW_IDLE;
 
+    auto getInputBufferSize = [](const sp<DeviceHalInterface>& dev, audio_config_t config,
+                                 size_t* bytes) -> status_t {
+        if (!dev) {
+            return BAD_VALUE;
+        }
+        status_t result = dev->getInputBufferSize(&config, bytes);
+        if (result == BAD_VALUE) {
+            // Retry with the config suggested by the HAL.
+            result = dev->getInputBufferSize(&config, bytes);
+        }
+        if (result != OK || *bytes == 0) {
+            return BAD_VALUE;
+        }
+        return result;
+    };
+
     // Change parameters of the configuration each iteration until we find a
     // configuration that the device will support, or HAL suggests what it supports.
     audio_config_t config = AUDIO_CONFIG_INITIALIZER;
@@ -1966,16 +1983,15 @@
                 config.sample_rate = testSampleRate;
 
                 size_t bytes = 0;
-                audio_config_t loopConfig = config;
-                status_t result = dev->getInputBufferSize(&config, &bytes);
-                if (result == BAD_VALUE) {
-                    // Retry with the config suggested by the HAL.
-                    result = dev->getInputBufferSize(&config, &bytes);
+                ret = BAD_VALUE;
+                for (const AudioHwDevice* dev : mInputBufferSizeOrderedDevs) {
+                    ret = getInputBufferSize(dev->hwDevice(), config, &bytes);
+                    if (ret == OK) {
+                        break;
+                    }
                 }
-                if (result != OK || bytes == 0) {
-                    config = loopConfig;
-                    continue;
-                }
+                if (ret == BAD_VALUE) continue;
+
                 if (config.sample_rate != sampleRate || config.channel_mask != channelMask ||
                     config.format != format) {
                     uint32_t dstChannelCount = audio_channel_count_from_in_mask(channelMask);
@@ -2160,6 +2176,13 @@
     }
 }
 
+void AudioFlinger::onHardError(std::set<audio_port_handle_t>& trackPortIds) {
+    ALOGI("releasing tracks due to a hard error occurred on an I/O thread");
+    for (const auto portId : trackPortIds) {
+        AudioSystem::releaseOutput(portId);
+    }
+}
+
 // removeClient_l() must be called with AudioFlinger::clientMutex() held
 void AudioFlinger::removeClient_l(pid_t pid)
 {
@@ -2603,12 +2626,43 @@
     }
 
     mAudioHwDevs.add(handle, audioDevice);
+    if (strcmp(name, AUDIO_HARDWARE_MODULE_ID_STUB) != 0) {
+        mInputBufferSizeOrderedDevs.insert(audioDevice);
+    }
 
     ALOGI("loadHwModule() Loaded %s audio interface, handle %d", name, handle);
 
     return audioDevice;
 }
 
+// Sort AudioHwDevice to be traversed in the getInputBufferSize call in the following order:
+// Primary, Usb, Bluetooth, A2DP, other modules, remote submix.
+/* static */
+bool AudioFlinger::inputBufferSizeDevsCmp(const AudioHwDevice* lhs, const AudioHwDevice* rhs) {
+    static const std::map<std::string_view, int> kPriorities = {
+        { AUDIO_HARDWARE_MODULE_ID_PRIMARY, 0 }, { AUDIO_HARDWARE_MODULE_ID_USB, 1 },
+        { AUDIO_HARDWARE_MODULE_ID_BLUETOOTH, 2 }, { AUDIO_HARDWARE_MODULE_ID_A2DP, 3 },
+        { AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX, std::numeric_limits<int>::max() }
+    };
+
+    const std::string_view lhsName = lhs->moduleName();
+    const std::string_view rhsName = rhs->moduleName();
+
+    auto lhsPriority = std::numeric_limits<int>::max() - 1;
+    if (const auto lhsIt = kPriorities.find(lhsName); lhsIt != kPriorities.end()) {
+        lhsPriority = lhsIt->second;
+    }
+    auto rhsPriority = std::numeric_limits<int>::max() - 1;
+    if (const auto rhsIt = kPriorities.find(rhsName); rhsIt != kPriorities.end()) {
+        rhsPriority = rhsIt->second;
+    }
+
+    if (lhsPriority != rhsPriority) {
+        return lhsPriority < rhsPriority;
+    }
+    return lhsName < rhsName;
+}
+
 // ----------------------------------------------------------------------------
 
 uint32_t AudioFlinger::getPrimaryOutputSamplingRate() const
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 0f75d6e..719ff39 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -396,6 +396,8 @@
     void onSupportedLatencyModesChanged(
             audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes) final
             EXCLUDES_AudioFlinger_ClientMutex;
+    void onHardError(std::set<audio_port_handle_t>& trackPortIds) final
+            EXCLUDES_AudioFlinger_ClientMutex;
 
     // ---- end of IAfThreadCallback interface
 
@@ -629,6 +631,10 @@
     DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*> mAudioHwDevs
             GUARDED_BY(hardwareMutex()) {nullptr /* defValue */};
 
+    static bool inputBufferSizeDevsCmp(const AudioHwDevice* lhs, const AudioHwDevice* rhs);
+    std::set<AudioHwDevice*, decltype(&inputBufferSizeDevsCmp)>
+            mInputBufferSizeOrderedDevs GUARDED_BY(hardwareMutex()) {inputBufferSizeDevsCmp};
+
      const sp<DevicesFactoryHalInterface> mDevicesFactoryHal =
              DevicesFactoryHalInterface::create();
      /* const */ sp<DevicesFactoryHalCallback> mDevicesFactoryHalCallback;  // set onFirstRef().
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index 46a67e8..c2a58c6 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -115,9 +115,11 @@
             const sp<AudioIoDescriptor>& ioDesc,
             pid_t pid = 0) EXCLUDES_AudioFlinger_ClientMutex = 0;
     virtual void onNonOffloadableGlobalEffectEnable() EXCLUDES_AudioFlinger_Mutex = 0;
-    virtual void onSupportedLatencyModesChanged(
-            audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes)
+    virtual void onSupportedLatencyModesChanged(audio_io_handle_t output,
+                                                const std::vector<audio_latency_mode_t>& modes)
             EXCLUDES_AudioFlinger_ClientMutex = 0;
+
+    virtual void onHardError(std::set<audio_port_handle_t>& trackPortIds) = 0;
 };
 
 class IAfThreadBase : public virtual RefBase {
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 1d7c356..9bbddf6 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -2690,14 +2690,17 @@
             }
         }
 
-        // Set DIRECT flag if current thread is DirectOutputThread. This can
-        // happen when the playback is rerouted to direct output thread by
+        // Set DIRECT/OFFLOAD flag if current thread is DirectOutputThread/OffloadThread.
+        // This can happen when the playback is rerouted to direct output/offload thread by
         // dynamic audio policy.
         // Do NOT report the flag changes back to client, since the client
-        // doesn't explicitly request a direct flag.
+        // doesn't explicitly request a direct/offload flag.
         audio_output_flags_t trackFlags = *flags;
         if (mType == DIRECT) {
             trackFlags = static_cast<audio_output_flags_t>(trackFlags | AUDIO_OUTPUT_FLAG_DIRECT);
+        } else if (mType == OFFLOAD) {
+            trackFlags = static_cast<audio_output_flags_t>(trackFlags |
+                                   AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT);
         }
         *afTrackFlags = trackFlags;
 
@@ -2997,6 +3000,23 @@
     }
 }
 
+std::set<audio_port_handle_t> PlaybackThread::getTrackPortIds_l()
+{
+    std::set<int32_t> result;
+    for (const auto& t : mTracks) {
+        if (t->isExternalTrack()) {
+            result.insert(t->portId());
+        }
+    }
+    return result;
+}
+
+std::set<audio_port_handle_t> PlaybackThread::getTrackPortIds()
+{
+    audio_utils::lock_guard _l(mutex());
+    return getTrackPortIds_l();
+}
+
 String8 PlaybackThread::getParameters(const String8& keys)
 {
     audio_utils::lock_guard _l(mutex());
@@ -3050,9 +3070,9 @@
     mCallbackThread->resetDraining();
 }
 
-void PlaybackThread::onError()
+void PlaybackThread::onError(bool isHardError)
 {
-    mCallbackThread->setAsyncError();
+    mCallbackThread->setAsyncError(isHardError);
 }
 
 void PlaybackThread::onCodecFormatChanged(
@@ -3357,9 +3377,9 @@
         return NO_ERROR;
     } else {
         status_t status;
-        uint32_t frames;
+        uint64_t frames = 0;
         status = mOutput->getRenderPosition(&frames);
-        *dspFrames = (size_t)frames;
+        *dspFrames = (uint32_t)frames;
         return status;
     }
 }
@@ -5357,11 +5377,15 @@
     broadcast_l();
 }
 
-void PlaybackThread::onAsyncError()
+void PlaybackThread::onAsyncError(bool isHardError)
 {
+    auto allTrackPortIds = getTrackPortIds();
     for (int i = AUDIO_STREAM_SYSTEM; i < (int)AUDIO_STREAM_CNT; i++) {
         invalidateTracks((audio_stream_type_t)i);
     }
+    if (isHardError) {
+        mAfThreadCallback->onHardError(allTrackPortIds);
+    }
 }
 
 void MixerThread::threadLoop_mix()
@@ -5903,7 +5927,7 @@
                 vaf = v * sendLevel * (1. / MAX_GAIN_INT);
             }
 
-            track->setFinalVolume(vrf, vlf);
+            track->setFinalVolume(vlf, vrf);
 
             // Delegate volume control to effect in track effect chain if needed
             if (chain != 0 && chain->setVolume_l(&vl, &vr)) {
@@ -7099,7 +7123,7 @@
         mPlaybackThread(playbackThread),
         mWriteAckSequence(0),
         mDrainSequence(0),
-        mAsyncError(false)
+        mAsyncError(ASYNC_ERROR_NONE)
 {
 }
 
@@ -7113,7 +7137,7 @@
     while (!exitPending()) {
         uint32_t writeAckSequence;
         uint32_t drainSequence;
-        bool asyncError;
+        AsyncError asyncError;
 
         {
             audio_utils::unique_lock _l(mutex());
@@ -7134,7 +7158,7 @@
             drainSequence = mDrainSequence;
             mDrainSequence &= ~1;
             asyncError = mAsyncError;
-            mAsyncError = false;
+            mAsyncError = ASYNC_ERROR_NONE;
         }
         {
             const sp<PlaybackThread> playbackThread = mPlaybackThread.promote();
@@ -7145,8 +7169,8 @@
                 if (drainSequence & 1) {
                     playbackThread->resetDraining(drainSequence >> 1);
                 }
-                if (asyncError) {
-                    playbackThread->onAsyncError();
+                if (asyncError != ASYNC_ERROR_NONE) {
+                    playbackThread->onAsyncError(asyncError == ASYNC_ERROR_HARD);
                 }
             }
         }
@@ -7196,10 +7220,10 @@
     }
 }
 
-void AsyncCallbackThread::setAsyncError()
+void AsyncCallbackThread::setAsyncError(bool isHardError)
 {
     audio_utils::lock_guard _l(mutex());
-    mAsyncError = true;
+    mAsyncError = isHardError ? ASYNC_ERROR_HARD : ASYNC_ERROR_SOFT;
     mWaitWorkCV.notify_one();
 }
 
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index ddf0669..98e3298 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -946,7 +946,7 @@
     // StreamOutHalInterfaceCallback implementation
     virtual     void        onWriteReady();
     virtual     void        onDrainReady();
-    virtual     void        onError();
+    virtual     void        onError(bool /*isHardError*/);
 
 public: // AsyncCallbackThread
                 void        resetWriteBlocked(uint32_t sequence);
@@ -958,7 +958,7 @@
     virtual bool shouldStandby_l() REQUIRES(mutex(), ThreadBase_ThreadLoop);
     virtual void onAddNewTrack_l() REQUIRES(mutex());
 public:  // AsyncCallbackThread
-                void        onAsyncError(); // error reported by AsyncCallbackThread
+                void        onAsyncError(bool isHardError); // error reported by AsyncCallbackThread
 protected:
     // StreamHalInterfaceCodecFormatCallback implementation
                 void        onCodecFormatChanged(
@@ -1371,6 +1371,8 @@
     bool destroyTrack_l(const sp<IAfTrack>& track) final REQUIRES(mutex());
 
     void removeTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex());
+    std::set<audio_port_handle_t> getTrackPortIds_l() REQUIRES(mutex());
+    std::set<audio_port_handle_t> getTrackPortIds();
 
     void readOutputParameters_l() REQUIRES(mutex());
     MetadataUpdate updateMetadata_l() final REQUIRES(mutex());
@@ -1834,7 +1836,7 @@
             void        resetWriteBlocked();
             void        setDraining(uint32_t sequence);
             void        resetDraining();
-            void        setAsyncError();
+            void        setAsyncError(bool isHardError);
 
 private:
     const wp<PlaybackThread>   mPlaybackThread;
@@ -1848,7 +1850,8 @@
     uint32_t                   mDrainSequence;
     audio_utils::condition_variable mWaitWorkCV;
     mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kAsyncCallbackThread_Mutex};
-    bool                       mAsyncError;
+    enum AsyncError { ASYNC_ERROR_NONE, ASYNC_ERROR_SOFT, ASYNC_ERROR_HARD };
+    AsyncError                 mAsyncError;
 
     audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::AsyncCallbackThread_Mutex) {
         return mMutex;
diff --git a/services/audioflinger/datapath/AudioStreamIn.cpp b/services/audioflinger/datapath/AudioStreamIn.cpp
index 76618f4..165ac25 100644
--- a/services/audioflinger/datapath/AudioStreamIn.cpp
+++ b/services/audioflinger/datapath/AudioStreamIn.cpp
@@ -58,7 +58,7 @@
 
     if (mHalFormatHasProportionalFrames &&
             (flags & AUDIO_INPUT_FLAG_DIRECT) == AUDIO_INPUT_FLAG_DIRECT) {
-        // For DirectRecord reset timestamp to 0 on standby.
+        // For DirectRecord reset position to 0 on standby.
         const uint64_t adjustedPosition = (halPosition <= mFramesReadAtStandby) ?
                 0 : (halPosition - mFramesReadAtStandby);
         // Scale from HAL sample rate to application rate.
diff --git a/services/audioflinger/datapath/AudioStreamOut.cpp b/services/audioflinger/datapath/AudioStreamOut.cpp
index 9851f3a..a686ff6 100644
--- a/services/audioflinger/datapath/AudioStreamOut.cpp
+++ b/services/audioflinger/datapath/AudioStreamOut.cpp
@@ -51,42 +51,17 @@
         return NO_INIT;
     }
 
-    uint32_t halPosition = 0;
+    uint64_t halPosition = 0;
     const status_t status = stream->getRenderPosition(&halPosition);
     if (status != NO_ERROR) {
         return status;
     }
-
-    // Maintain a 64-bit render position using the 32-bit result from the HAL.
-    // This delta calculation relies on the arithmetic overflow behavior
-    // of integers. For example (100 - 0xFFFFFFF0) = 116.
-    const auto truncatedPosition = (uint32_t)mRenderPosition;
-    int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
-    (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
-
-    if (deltaHalPosition > 0) {
-        mRenderPosition += deltaHalPosition;
-    } else if (mExpectRetrograde) {
-        mExpectRetrograde = false;
-        mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition);
-    }
     // Scale from HAL sample rate to application rate.
-    *frames = mRenderPosition / mRateMultiplier;
+    *frames = halPosition / mRateMultiplier;
 
     return status;
 }
 
-// return bottom 32-bits of the render position
-status_t AudioStreamOut::getRenderPosition(uint32_t *frames)
-{
-    uint64_t position64 = 0;
-    const status_t status = getRenderPosition(&position64);
-    if (status == NO_ERROR) {
-        *frames = (uint32_t)position64;
-    }
-    return status;
-}
-
 status_t AudioStreamOut::getPresentationPosition(uint64_t *frames, struct timespec *timestamp)
 {
     if (stream == nullptr) {
@@ -101,7 +76,7 @@
 
     if (mHalFormatHasProportionalFrames &&
             (flags & AUDIO_OUTPUT_FLAG_DIRECT) == AUDIO_OUTPUT_FLAG_DIRECT) {
-        // For DirectTrack reset timestamp to 0 on standby.
+        // For DirectTrack reset position to 0 on standby.
         const uint64_t adjustedPosition = (halPosition <= mFramesWrittenAtStandby) ?
                 0 : (halPosition - mFramesWrittenAtStandby);
         // Scale from HAL sample rate to application rate.
@@ -179,8 +154,6 @@
 
 int AudioStreamOut::flush()
 {
-    mRenderPosition = 0;
-    mExpectRetrograde = false;
     mFramesWritten = 0;
     mFramesWrittenAtStandby = 0;
     const status_t result = stream->flush();
@@ -189,12 +162,14 @@
 
 int AudioStreamOut::standby()
 {
-    mRenderPosition = 0;
-    mExpectRetrograde = false;
     mFramesWrittenAtStandby = mFramesWritten;
     return stream->standby();
 }
 
+void AudioStreamOut::presentationComplete() {
+    stream->presentationComplete();
+}
+
 ssize_t AudioStreamOut::write(const void *buffer, size_t numBytes)
 {
     size_t bytesWritten;
diff --git a/services/audioflinger/datapath/AudioStreamOut.h b/services/audioflinger/datapath/AudioStreamOut.h
index ea41bba..2c9fb3e 100644
--- a/services/audioflinger/datapath/AudioStreamOut.h
+++ b/services/audioflinger/datapath/AudioStreamOut.h
@@ -51,9 +51,6 @@
 
     virtual ~AudioStreamOut();
 
-    // Get the bottom 32-bits of the 64-bit render position.
-    status_t getRenderPosition(uint32_t *frames);
-
     virtual status_t getRenderPosition(uint64_t *frames);
 
     virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
@@ -91,21 +88,14 @@
     virtual status_t flush();
     virtual status_t standby();
 
-    // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when
-    // transitioning between tracks.
-    // The HAL resets the frame position without flush/stop being called, but calls back prior to
-    // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of
-    // mRenderPosition.
-    virtual void presentationComplete() { mExpectRetrograde = true; }
+    virtual void presentationComplete();
 
 protected:
     uint64_t mFramesWritten = 0; // reset by flush
     uint64_t mFramesWrittenAtStandby = 0;
-    uint64_t mRenderPosition = 0; // reset by flush, standby, or presentation complete
     int mRateMultiplier = 1;
     bool mHalFormatHasProportionalFrames = false;
     size_t mHalFrameSize = 0;
-    bool mExpectRetrograde = false; // see presentationComplete
 };
 
 } // namespace android
diff --git a/services/audioparameterparser/Android.bp b/services/audioparameterparser/Android.bp
index f5feece..1c1c1e1 100644
--- a/services/audioparameterparser/Android.bp
+++ b/services/audioparameterparser/Android.bp
@@ -35,10 +35,10 @@
     name: "android.hardware.audio.parameter_parser.example_defaults",
     defaults: [
         "latest_android_hardware_audio_core_ndk_shared",
+        "latest_av_audio_types_aidl_ndk_shared",
     ],
 
     shared_libs: [
-        "av-audio-types-aidl-V1-ndk",
         "libbase",
         "libbinder_ndk",
     ],
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index d027564..747af4a 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -561,6 +561,7 @@
             audio_port_config config = {};
             devicePort->toAudioPortConfig(&config);
             config.config_mask = AUDIO_PORT_CONFIG_GAIN;
+            config.gain.mode = gains[0]->getMode();
             config.gain.values[0] = gainValueMb;
             return mClientInterface->setAudioPortConfig(&config, 0) == NO_ERROR;
         }
diff --git a/services/audiopolicy/engine/common/Android.bp b/services/audiopolicy/engine/common/Android.bp
index d7eb2c8..878e0e9 100644
--- a/services/audiopolicy/engine/common/Android.bp
+++ b/services/audiopolicy/engine/common/Android.bp
@@ -61,4 +61,7 @@
         "com.android.media.audio-aconfig-cc",
         "server_configurable_flags",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
 }
diff --git a/services/audiopolicy/engineconfigurable/Android.bp b/services/audiopolicy/engineconfigurable/Android.bp
index aaf89a0..2c3c4be 100644
--- a/services/audiopolicy/engineconfigurable/Android.bp
+++ b/services/audiopolicy/engineconfigurable/Android.bp
@@ -53,4 +53,7 @@
         "libutils",
         "libxml2",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
 }
diff --git a/services/audiopolicy/enginedefault/Android.bp b/services/audiopolicy/enginedefault/Android.bp
index 1563d5f..f5958ba 100644
--- a/services/audiopolicy/enginedefault/Android.bp
+++ b/services/audiopolicy/enginedefault/Android.bp
@@ -40,4 +40,7 @@
         "libutils",
         "libxml2",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
 }
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 315e08d..5c4ab7b 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -2086,7 +2086,14 @@
 
         // sampling rate match
         if (samplingRate > SAMPLE_RATE_HZ_DEFAULT) {
-            currentMatchCriteria[4] = outputDesc->getSamplingRate();
+            int diff;  // avoid unsigned integer overflow.
+            __builtin_sub_overflow(outputDesc->getSamplingRate(), samplingRate, &diff);
+
+            // prefer the closest output sampling rate greater than or equal to target
+            // if none exists, prefer the closest output sampling rate less than target.
+            //
+            // criteria is offset to make non-negative.
+            currentMatchCriteria[4] = diff >= 0 ? -diff + 200'000'000 : diff + 100'000'000;
         }
 
         // performance flags match
@@ -6390,6 +6397,15 @@
         if ((desc->mFlags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0
                 && !isOutputOnlyAvailableRouteToSomeDevice(desc)) {
             outputsClosed.push_back(desc->mIoHandle);
+            nextAudioPortGeneration();
+            ssize_t index = mAudioPatches.indexOfKey(desc->getPatchHandle());
+            if (index >= 0) {
+                sp<AudioPatch> patchDesc = mAudioPatches.valueAt(index);
+                (void) /*status_t status*/ mpClientInterface->releaseAudioPatch(
+                            patchDesc->getAfHandle(), 0);
+                mAudioPatches.removeItemsAt(index);
+                mpClientInterface->onAudioPatchListUpdate();
+            }
             desc->close();
         }
     }
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 2718604..58b3e51 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -3649,19 +3649,18 @@
         cleanUpFailedRequests(/*sendRequestError*/ true);
         // Check if any stream is abandoned.
         checkAndStopRepeatingRequest();
+        // Inform waitUntilRequestProcessed thread of a failed request ID
+        wakeupLatestRequest(/*failedRequestId*/true, latestRequestId);
         return true;
     } else if (res != OK) {
         cleanUpFailedRequests(/*sendRequestError*/ false);
+        // Inform waitUntilRequestProcessed thread of a failed request ID
+        wakeupLatestRequest(/*failedRequestId*/true, latestRequestId);
         return false;
     }
 
     // Inform waitUntilRequestProcessed thread of a new request ID
-    {
-        Mutex::Autolock al(mLatestRequestMutex);
-
-        mLatestRequestId = latestRequestId;
-        mLatestRequestSignal.signal();
-    }
+    wakeupLatestRequest(/*failedRequestId*/false, latestRequestId);
 
     // Submit a batch of requests to HAL.
     // Use flush lock only when submitting multilple requests in a batch.
@@ -4393,12 +4392,7 @@
                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
                         captureRequest->mResultExtras);
             }
-            {
-                Mutex::Autolock al(mLatestRequestMutex);
-
-                mLatestFailedRequestId = captureRequest->mResultExtras.requestId;
-                mLatestRequestSignal.signal();
-            }
+            wakeupLatestRequest(/*failedRequestId*/true, captureRequest->mResultExtras.requestId);
         }
 
         // Remove yet-to-be submitted inflight request from inflightMap
@@ -5060,6 +5054,20 @@
     return OK;
 }
 
+void  Camera3Device::RequestThread::wakeupLatestRequest(
+        bool latestRequestFailed,
+        int32_t latestRequestId) {
+    Mutex::Autolock al(mLatestRequestMutex);
+
+    if (latestRequestFailed) {
+        mLatestFailedRequestId = latestRequestId;
+    } else {
+        mLatestRequestId = latestRequestId;
+    }
+    mLatestRequestSignal.signal();
+}
+
+
 /**
  * PreparerThread inner class methods
  */
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 9a2f2b1..1820702 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -1026,6 +1026,11 @@
             const sp<CaptureRequest> &request,
             const CameraMetadata& injectedSessionParams);
 
+        /**
+         * signal mLatestRequestmutex
+         **/
+        void wakeupLatestRequest(bool latestRequestFailed, int32_t latestRequestId);
+
       protected:
 
         virtual bool threadLoop();
diff --git a/services/camera/virtualcamera/VirtualCameraStream.cc b/services/camera/virtualcamera/VirtualCameraStream.cc
index 03da171..fad6cac 100644
--- a/services/camera/virtualcamera/VirtualCameraStream.cc
+++ b/services/camera/virtualcamera/VirtualCameraStream.cc
@@ -26,8 +26,6 @@
 
 #include "EGL/egl.h"
 #include "aidl/android/hardware/camera/device/Stream.h"
-#include "aidl/android/hardware/camera/device/StreamBuffer.h"
-#include "aidl/android/hardware/graphics/common/PixelFormat.h"
 #include "aidlcommonsupport/NativeHandle.h"
 #include "android/hardware_buffer.h"
 #include "cutils/native_handle.h"
@@ -39,52 +37,33 @@
 namespace virtualcamera {
 
 using ::aidl::android::hardware::camera::device::Stream;
-using ::aidl::android::hardware::camera::device::StreamBuffer;
 using ::aidl::android::hardware::common::NativeHandle;
-using ::aidl::android::hardware::graphics::common::PixelFormat;
 
 namespace {
 
-sp<GraphicBuffer> createBlobGraphicBuffer(GraphicBufferMapper& mapper,
-                                          buffer_handle_t bufferHandle) {
-  uint64_t allocationSize;
-  uint64_t usage;
-  uint64_t layerCount;
-  if (mapper.getAllocationSize(bufferHandle, &allocationSize) != NO_ERROR ||
-      mapper.getUsage(bufferHandle, &usage) != NO_ERROR ||
-      mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR) {
-    ALOGE("Error fetching metadata for the imported BLOB buffer handle.");
-    return nullptr;
-  }
-
-  return sp<GraphicBuffer>::make(
-      bufferHandle, GraphicBuffer::HandleWrapMethod::TAKE_HANDLE,
-      allocationSize, /*height=*/1, static_cast<int>(ui::PixelFormat::BLOB),
-      layerCount, usage, 0);
-}
-
-sp<GraphicBuffer> createYCbCr420GraphicBuffer(GraphicBufferMapper& mapper,
-                                              buffer_handle_t bufferHandle) {
+sp<GraphicBuffer> createGraphicBuffer(GraphicBufferMapper& mapper,
+                                      const buffer_handle_t bufferHandle) {
   uint64_t width;
   uint64_t height;
   uint64_t usage;
   uint64_t layerCount;
+  ui::PixelFormat pixelFormat;
   if (mapper.getWidth(bufferHandle, &width) != NO_ERROR ||
       mapper.getHeight(bufferHandle, &height) != NO_ERROR ||
       mapper.getUsage(bufferHandle, &usage) != NO_ERROR ||
-      mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR) {
+      mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR ||
+      mapper.getPixelFormatRequested(bufferHandle, &pixelFormat) != NO_ERROR) {
     ALOGE("Error fetching metadata for the imported YCbCr420 buffer handle.");
     return nullptr;
   }
 
   return sp<GraphicBuffer>::make(
       bufferHandle, GraphicBuffer::HandleWrapMethod::TAKE_HANDLE, width, height,
-      static_cast<int>(ui::PixelFormat::YCBCR_420_888), /*layers=*/1, usage,
-      width);
+      static_cast<int>(pixelFormat), layerCount, usage, width);
 }
 
 std::shared_ptr<AHardwareBuffer> importBufferInternal(
-    const NativeHandle& aidlHandle, const Stream& streamConfig) {
+    const NativeHandle& aidlHandle) {
   if (aidlHandle.fds.empty()) {
     ALOGE("Empty handle - nothing to import");
     return nullptr;
@@ -103,12 +82,9 @@
     return nullptr;
   }
 
-  sp<GraphicBuffer> buf =
-      streamConfig.format == PixelFormat::BLOB
-          ? createBlobGraphicBuffer(mapper, bufferHandle)
-          : createYCbCr420GraphicBuffer(mapper, bufferHandle);
+  sp<GraphicBuffer> buf = createGraphicBuffer(mapper, bufferHandle);
 
-  if (buf->initCheck() != NO_ERROR) {
+  if (buf == nullptr || buf->initCheck() != NO_ERROR) {
     ALOGE("Imported graphic buffer is not correcly initialized.");
     return nullptr;
   }
@@ -128,7 +104,7 @@
 
 std::shared_ptr<AHardwareBuffer> VirtualCameraStream::importBuffer(
     const ::aidl::android::hardware::camera::device::StreamBuffer& buffer) {
-  auto hwBufferPtr = importBufferInternal(buffer.buffer, mStreamConfig);
+  auto hwBufferPtr = importBufferInternal(buffer.buffer);
   if (hwBufferPtr != nullptr) {
     std::lock_guard<std::mutex> lock(mLock);
     mBuffers.emplace(std::piecewise_construct,
diff --git a/services/camera/virtualcamera/util/EglProgram.cc b/services/camera/virtualcamera/util/EglProgram.cc
index 510fd33..85ff735 100644
--- a/services/camera/virtualcamera/util/EglProgram.cc
+++ b/services/camera/virtualcamera/util/EglProgram.cc
@@ -88,7 +88,7 @@
     })";
 
 constexpr char kExternalRgbaTextureFragmentShader[] = R"(#version 300 es
-    #extension GL_OES_EGL_image_external : require
+    #extension GL_OES_EGL_image_external_essl3 : require
     #extension GL_EXT_YUV_target : require
     precision mediump float;
     in vec2 vTextureCoord;
diff --git a/services/mediametrics/include/mediametricsservice/AudioTypes.h b/services/mediametrics/include/mediametricsservice/AudioTypes.h
index b5fe28b..59654bf 100644
--- a/services/mediametrics/include/mediametricsservice/AudioTypes.h
+++ b/services/mediametrics/include/mediametricsservice/AudioTypes.h
@@ -18,6 +18,7 @@
 
 #include <string>
 #include <unordered_map>
+#include <vector>
 
 namespace android::mediametrics::types {
 
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
index cd00937..a8a1de1 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.cpp
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -109,23 +109,17 @@
     return CodecBucketUnspecified;
 }
 
-static bool getLogMessage(int hwCount, int swCount, std::stringstream& logMsg) {
-    bool update = false;
-    logMsg.clear();
+static std::string getLogMessage(const std::string& firstKey, const long& firstValue,
+                                 const std::string& secondKey, const long& secondValue) {
 
-    if (hwCount > 0) {
-        logMsg << " HW: " << hwCount;
-        update = true;
+    std::stringstream logMsg;
+    if (firstValue > 0) {
+        logMsg << firstKey << firstValue;
     }
-    if (swCount > 0) {
-        logMsg << " SW: " << swCount;
-        update = true;
+    if (secondValue > 0) {
+        logMsg << secondKey << secondValue;
     }
-
-    if (update) {
-        logMsg << " ] ";
-    }
-    return update;
+    return logMsg.str();
 }
 
 ResourceManagerMetrics::ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo) {
@@ -364,6 +358,15 @@
     std::scoped_lock lock(mLock);
     // post MediaCodecConcurrentUsageReported for this terminated pid.
     pushConcurrentUsageReport(pid, uid);
+    // Remove all the metrics associated with this process.
+    std::map<int32_t, ConcurrentCodecs>::iterator it1 = mProcessConcurrentCodecsMap.find(pid);
+    if (it1 != mProcessConcurrentCodecsMap.end()) {
+        mProcessConcurrentCodecsMap.erase(it1);
+    }
+    std::map<int32_t, PixelCount>::iterator it2 = mProcessPixelsMap.find(pid);
+    if (it2 != mProcessPixelsMap.end()) {
+        mProcessPixelsMap.erase(it2);
+    }
 }
 
 void ResourceManagerMetrics::pushConcurrentUsageReport(int32_t pid, uid_t uid) {
@@ -400,24 +403,30 @@
 
     std::stringstream peakCodecLog;
     peakCodecLog << "Peak { ";
-    std::stringstream logMsg;
-    if (getLogMessage(peakHwAudioEncoderCount, peakSwAudioEncoderCount, logMsg)) {
-        peakCodecLog << "AudioEnc[" << logMsg.str();
+    std::string logMsg;
+    logMsg = getLogMessage(" HW: ", peakHwAudioEncoderCount, " SW: ", peakSwAudioEncoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "AudioEnc[ " << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwAudioDecoderCount, peakSwAudioDecoderCount, logMsg)) {
-        peakCodecLog << "AudioDec[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwAudioDecoderCount, " SW: ", peakSwAudioDecoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "AudioDec[" << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwVideoEncoderCount, peakSwVideoEncoderCount, logMsg)) {
-        peakCodecLog << "VideoEnc[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwVideoEncoderCount, " SW: ", peakSwVideoEncoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "VideoEnc[" << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwVideoDecoderCount, peakSwVideoDecoderCount, logMsg)) {
-        peakCodecLog << "VideoDec[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwVideoDecoderCount, " SW: ", peakSwVideoDecoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "VideoDec[" << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwImageEncoderCount, peakSwImageEncoderCount, logMsg)) {
-        peakCodecLog << "ImageEnc[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwImageEncoderCount, " SW: ", peakSwImageEncoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "ImageEnc[" << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwImageDecoderCount, peakSwImageDecoderCount, logMsg)) {
-        peakCodecLog << "ImageDec[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwImageDecoderCount, " SW: ", peakSwImageDecoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "ImageDec[" << logMsg << " ] ";
     }
     peakCodecLog << "}";
 
@@ -705,4 +714,114 @@
     return 0;
 }
 
+static std::string getConcurrentInstanceCount(const std::map<std::string, int>& resourceMap) {
+    if (resourceMap.empty()) {
+        return "";
+    }
+    std::stringstream concurrentInstanceInfo;
+    for (const auto& [name, count] : resourceMap) {
+        if (count > 0) {
+            concurrentInstanceInfo << "      Name: " << name << " Instances: " << count << "\n";
+        }
+    }
+
+    std::string info = concurrentInstanceInfo.str();
+    if (info.empty()) {
+        return "";
+    }
+    return "    Current Concurrent Codec Instances:\n" + info;
+}
+
+static std::string getAppsPixelCount(const std::map<int32_t, PixelCount>& pixelMap) {
+    if (pixelMap.empty()) {
+        return "";
+    }
+    std::stringstream pixelInfo;
+    for (const auto& [pid, pixelCount] : pixelMap) {
+        std::string logMsg = getLogMessage(" Current Pixels: ", pixelCount.mCurrent,
+                                           " Peak Pixels: ", pixelCount.mPeak);
+        if (!logMsg.empty()) {
+            pixelInfo  << "      PID[" << pid << "]: {" << logMsg << " }\n";
+        }
+    }
+
+    return "    Applications Pixel Usage:\n" + pixelInfo.str();
+}
+
+static std::string getCodecUsageMetrics(const ConcurrentCodecsMap& codecsMap) {
+    int peakHwAudioEncoderCount = codecsMap[HwAudioEncoder];
+    int peakHwAudioDecoderCount = codecsMap[HwAudioDecoder];
+    int peakHwVideoEncoderCount = codecsMap[HwVideoEncoder];
+    int peakHwVideoDecoderCount = codecsMap[HwVideoDecoder];
+    int peakHwImageEncoderCount = codecsMap[HwImageEncoder];
+    int peakHwImageDecoderCount = codecsMap[HwImageDecoder];
+    int peakSwAudioEncoderCount = codecsMap[SwAudioEncoder];
+    int peakSwAudioDecoderCount = codecsMap[SwAudioDecoder];
+    int peakSwVideoEncoderCount = codecsMap[SwVideoEncoder];
+    int peakSwVideoDecoderCount = codecsMap[SwVideoDecoder];
+    int peakSwImageEncoderCount = codecsMap[SwImageEncoder];
+    int peakSwImageDecoderCount = codecsMap[SwImageDecoder];
+    std::stringstream usageMetrics;
+    std::string logMsg;
+    logMsg = getLogMessage(" HW: ", peakHwAudioEncoderCount, " SW: ", peakSwAudioEncoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "AudioEnc[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwAudioDecoderCount, " SW: ", peakSwAudioDecoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "AudioDec[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwVideoEncoderCount, " SW: ", peakSwVideoEncoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "VideoEnc[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwVideoDecoderCount, " SW: ", peakSwVideoDecoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "VideoDec[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwImageEncoderCount, " SW: ", peakSwImageEncoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "ImageEnc[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwImageDecoderCount, " SW: ", peakSwImageDecoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "ImageDec[" << logMsg << " ] ";
+    }
+
+    return usageMetrics.str();
+}
+
+static std::string getAppsCodecUsageMetrics(
+        const std::map<int32_t, ConcurrentCodecs>& processCodecsMap) {
+    if (processCodecsMap.empty()) {
+        return "";
+    }
+    std::stringstream codecUsage;
+    std::string info;
+    for (const auto& [pid, codecMap] : processCodecsMap) {
+        codecUsage << "      PID[" << pid << "]: ";
+        info = getCodecUsageMetrics(codecMap.mCurrent);
+        if (!info.empty()) {
+            codecUsage << "Current Codec Usage: { " << info << "} ";
+        }
+        info = getCodecUsageMetrics(codecMap.mPeak);
+        if (!info.empty()) {
+            codecUsage << "Peak Codec Usage: { " << info << "}";
+        }
+        codecUsage << "\n";
+    }
+
+    return "    Applications Codec Usage:\n" + codecUsage.str();
+}
+
+
+std::string ResourceManagerMetrics::dump() const {
+    std::string metricsLog("  Metrics logs:\n");
+    metricsLog += getConcurrentInstanceCount(mConcurrentResourceCountMap);
+    metricsLog += getAppsPixelCount(mProcessPixelsMap);
+    metricsLog += getAppsCodecUsageMetrics(mProcessConcurrentCodecsMap);
+
+    return std::move(metricsLog);
+}
+
 } // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.h b/services/mediaresourcemanager/ResourceManagerMetrics.h
index 7a5a89f..9904f7d 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.h
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.h
@@ -171,6 +171,9 @@
     // Get the current concurrent pixel count (associated with the video codecs) for the process.
     long getCurrentConcurrentPixelCount(int pid) const;
 
+    // retrieves metrics log.
+    std::string dump() const;
+
 private:
     ResourceManagerMetrics(const ResourceManagerMetrics&) = delete;
     ResourceManagerMetrics(ResourceManagerMetrics&&) = delete;
@@ -204,9 +207,9 @@
     // Map of resources (name) and number of concurrent instances
     std::map<std::string, int> mConcurrentResourceCountMap;
 
-    // Map of concurrent codes by CodecBucket across the system.
+    // Map of concurrent codecs by CodecBucket across the system.
     ConcurrentCodecsMap mConcurrentCodecsMap;
-    // Map of concurrent and peak codes by CodecBucket for each process/application.
+    // Map of concurrent and peak codecs by CodecBucket for each process/application.
     std::map<int32_t, ConcurrentCodecs> mProcessConcurrentCodecsMap;
 
     // Uid Observer to monitor the application termination.
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index d37d893..9c2fb7c 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -108,10 +108,17 @@
         serviceLog = mServiceLog->toString("    " /* linePrefix */);
     }
 
-    // Get all the resource (and overload pid) logs
+    // Get all the resource (and overload pid) log.
     std::string resourceLog;
     getResourceDump(resourceLog);
 
+    // Get all the metrics log.
+    std::string metricsLog;
+    {
+        std::scoped_lock lock{mLock};
+        metricsLog = mResourceManagerMetrics->dump();
+    }
+
     const size_t SIZE = 256;
     char buffer[SIZE];
     snprintf(buffer, SIZE, "ResourceManagerService: %p\n", this);
@@ -123,11 +130,16 @@
             supportsSecureWithNonSecureCodec);
     result.append(buffer);
 
+    // Add resource log.
     result.append(resourceLog.c_str());
 
+    // Add service log.
     result.append("  Events logs (most recent at top):\n");
     result.append(serviceLog);
 
+    // Add metrics log.
+    result.append(metricsLog.c_str());
+
     write(fd, result.c_str(), result.size());
     return OK;
 }
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index 5bac062..3f04f69 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -47,7 +47,7 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "girishshetty@google.com",
         ],
         componentid: 155276,
         hotlists: [
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 6a64823..5dfec30 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -29,6 +29,9 @@
         "libactivitymanager_aidl",
         "server_configurable_flags",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
     include_dirs: [
         "frameworks/av/include",
         "frameworks/av/services/mediaresourcemanager",