Merge "Audio Fuzzers : Initialise audio services one time" into main
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index 1cf63b0..e9b757b 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -4,9 +4,30 @@
 hidden_api_txt_checksorted_hook = ${REPO_ROOT}/tools/platform-compat/hiddenapi/checksorted_sha.sh ${PREUPLOAD_COMMIT} ${REPO_ROOT}
 
 [Builtin Hooks]
+bpfmt = true
 clang_format = true
 
 [Builtin Hooks Options]
+# Enable sort and limit subfolder checks
+bpfmt = -s
+    media/audio/
+    media/audioserver/
+    media/libaaudio/
+    media/libaudioclient/
+    media/libaudiofoundation/
+    media/libaudiohal/
+    media/libaudioprocessing/
+    media/libaudiousecasevalidation/
+    media/libeffects/
+    media/libmediametrics/
+    media/libnbaio/
+    media/libnblog/
+    services/audioflinger/
+    services/audioparameterparser/
+    services/audiopolicy/
+    services/medialog/
+    services/oboeservice/
+
 # Only turn on clang-format check for the following subfolders.
 clang_format = --commit ${PREUPLOAD_COMMIT} --style file --extensions c,h,cc,cpp
                media/libaudioclient/tests/
diff --git a/drm/libmediadrmrkp/Android.bp b/drm/libmediadrmrkp/Android.bp
index f13eb62..b1a01e4 100644
--- a/drm/libmediadrmrkp/Android.bp
+++ b/drm/libmediadrmrkp/Android.bp
@@ -5,7 +5,7 @@
         "src/**/*.cpp",
     ],
     export_include_dirs: [
-        "include"
+        "include",
     ],
     shared_libs: [
         "libbinder_ndk",
@@ -17,7 +17,7 @@
         "android.hardware.drm-V1-ndk",
         "android.hardware.security.rkp-V3-ndk",
         "libbase",
-        "libcppbor_external",
+        "libcppbor",
     ],
     defaults: [
         "keymint_use_latest_hal_aidl_ndk_shared",
@@ -42,7 +42,7 @@
         "android.hardware.drm-V1-ndk",
         "android.hardware.security.rkp-V3-ndk",
         "libbase",
-        "libcppbor_external",
+        "libcppbor",
         "libmediadrmrkp",
     ],
     vendor: true,
@@ -50,4 +50,4 @@
         "-Wall",
         "-Werror",
     ],
-}
\ No newline at end of file
+}
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 591d56d..7b63e75 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -21,6 +21,7 @@
 #include <audio_utils/primitives.h>
 #include <media/stagefright/foundation/MediaDefs.h>
 
+#include <C2Debug.h>
 #include <C2PlatformSupport.h>
 #include <SimpleC2Interface.h>
 
@@ -81,10 +82,6 @@
                     FLAC_COMPRESSION_LEVEL_MIN, FLAC_COMPRESSION_LEVEL_MAX)})
                 .withSetter(Setter<decltype(*mComplexity)>::NonStrictValueWithNoDeps)
                 .build());
-        addParameter(
-                DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
-                .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 4608))
-                .build());
 
         addParameter(
                 DefineParam(mPcmEncodingInfo, C2_PARAMKEY_PCM_ENCODING)
@@ -96,6 +93,26 @@
                 })
                 .withSetter((Setter<decltype(*mPcmEncodingInfo)>::StrictValueWithNoDeps))
                 .build());
+
+        addParameter(
+                DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+                .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMaxBlockSize))
+                .withFields({
+                    C2F(mInputMaxBufSize, value).any(),
+                })
+                .withSetter(MaxInputSizeSetter, mChannelCount, mPcmEncodingInfo)
+                .build());
+    }
+
+    static C2R MaxInputSizeSetter(bool mayBlock,
+            C2P<C2StreamMaxBufferSizeInfo::input> &me,
+            const C2P<C2StreamChannelCountInfo::input> &channelCount,
+            const C2P<C2StreamPcmEncodingInfo::input> &pcmEncoding) {
+        (void)mayBlock;
+        C2R res = C2R::Ok();
+        int bytesPerSample = pcmEncoding.v.value == C2Config::PCM_FLOAT ? 4 : 2;
+        me.set().value = kMaxBlockSize * bytesPerSample * channelCount.v.value;
+        return res;
     }
 
     uint32_t getSampleRate() const { return mSampleRate->value; }
@@ -446,6 +463,9 @@
 
     mBlockSize = FLAC__stream_encoder_get_blocksize(mFlacStreamEncoder);
 
+    // Update kMaxBlockSize to match maximum size used by the encoder
+    CHECK(mBlockSize <= kMaxBlockSize);
+
     ALOGV("encoder successfully configured");
     return OK;
 }
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.h b/media/codec2/components/flac/C2SoftFlacEnc.h
index a971ab5..1f3be3c 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.h
+++ b/media/codec2/components/flac/C2SoftFlacEnc.h
@@ -63,7 +63,8 @@
 
     std::shared_ptr<IntfImpl> mIntf;
     const unsigned int kInBlockSize = 1152;
-    const unsigned int kMaxNumChannels = 2;
+    static constexpr unsigned int kMaxNumChannels = 2;
+    static constexpr unsigned int kMaxBlockSize = 4608;
     FLAC__StreamEncoder* mFlacStreamEncoder;
     FLAC__int32* mInputBufferPcm32;
     std::shared_ptr<C2LinearBlock> mOutputBlock;
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 2137964..fd9488b 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -469,11 +469,12 @@
         mInitialized = false;
     }
 
+    bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0;
+
     if (!mInitialized) {
         uint8_t *vol_data[1]{};
         int32_t vol_size = 0;
 
-        bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0;
         if (codecConfig || volHeader) {
             vol_data[0] = bitstream;
             vol_size = inSize;
@@ -512,10 +513,11 @@
                 return;
             }
         }
-        if (codecConfig) {
-            fillEmptyWork(work);
-            return;
-        }
+    }
+
+    if (codecConfig) {
+        fillEmptyWork(work);
+        return;
     }
 
     size_t inPos = 0;
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
index cdc3be0..40bb26e 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.cpp
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -29,7 +29,6 @@
     #include <opus_multistream.h>
 }
 
-#define DEFAULT_FRAME_DURATION_MS 20
 namespace android {
 
 namespace {
@@ -38,7 +37,6 @@
 
 }  // namespace
 
-static const int kMaxNumChannelsSupported = 2;
 
 class C2SoftOpusEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
@@ -248,10 +246,11 @@
     mAnchorTimeStamp = 0;
     mProcessedSamples = 0;
     mFilledLen = 0;
-    mFrameDurationMs = DEFAULT_FRAME_DURATION_MS;
+    mFrameDurationMs = kDefaultFrameDurationMs;
     if (!mInputBufferPcm16) {
+        size_t frameSize = (mFrameDurationMs * kMaxSampleRateSupported) / 1000;
         mInputBufferPcm16 =
-            (int16_t*)malloc(kFrameSize * kMaxNumChannels * sizeof(int16_t));
+            (int16_t*)malloc(frameSize * kMaxNumChannelsSupported * sizeof(int16_t));
     }
     if (!mInputBufferPcm16) return C2_NO_MEMORY;
 
@@ -368,7 +367,9 @@
     }
 
     C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
-    err = pool->fetchLinearBlock(kMaxPayload, usage, &mOutputBlock);
+    int outCapacity =
+        kMaxPayload * ((inSize + mNumPcmBytesPerInputFrame) / mNumPcmBytesPerInputFrame);
+    err = pool->fetchLinearBlock(outCapacity, usage, &mOutputBlock);
     if (err != C2_OK) {
         ALOGE("fetchLinearBlock for Output failed with status %d", err);
         work->result = C2_NO_MEMORY;
@@ -497,11 +498,11 @@
         uint8_t* outPtr = wView.data() + mBytesEncoded;
         int encodedBytes =
             opus_multistream_encode(mEncoder, mInputBufferPcm16,
-                                    mNumSamplesPerFrame, outPtr, kMaxPayload - mBytesEncoded);
+                                    mNumSamplesPerFrame, outPtr, outCapacity - mBytesEncoded);
         ALOGV("encoded %i Opus bytes from %zu PCM bytes", encodedBytes,
               processSize);
 
-        if (encodedBytes < 0 || encodedBytes > (kMaxPayload - mBytesEncoded)) {
+        if (encodedBytes < 0 || encodedBytes > (outCapacity - mBytesEncoded)) {
             ALOGE("opus_encode failed, encodedBytes : %d", encodedBytes);
             mSignalledError = true;
             work->result = C2_CORRUPTED;
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.h b/media/codec2/components/opus/C2SoftOpusEnc.h
index 733a6bc..2c9f5e5 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.h
+++ b/media/codec2/components/opus/C2SoftOpusEnc.h
@@ -45,12 +45,13 @@
             uint32_t drainMode,
             const std::shared_ptr<C2BlockPool> &pool) override;
 private:
-    /* OPUS_FRAMESIZE_20_MS */
-    const int kFrameSize = 960;
-    const int kMaxSampleRate = 48000;
-    const int kMinSampleRate = 8000;
-    const int kMaxPayload = (4000 * kMaxSampleRate) / kMinSampleRate;
-    const int kMaxNumChannels = 8;
+    static const int kMaxNumChannelsSupported = 2;
+    static const int kMaxSampleRateSupported = 48000;
+    static const int kDefaultFrameDurationMs = 20;
+    // For a frame duration of 20ms, payload recommended size is 1276 as per
+    // https://www.opus-codec.org/docs/html_api/group__opusencoder.html.
+    // For 40ms, 60ms, .. payload size will change proportionately, 1276 x 2, 1276 x 3, ..
+    static const int kMaxPayload = 1500; // from tests/test_opus_encode.c
 
     std::shared_ptr<IntfImpl> mIntf;
     std::shared_ptr<C2LinearBlock> mOutputBlock;
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index 01b0678..1c2a0fb 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -563,6 +563,8 @@
         auto mapRet = mDequeued.emplace(bid, *pBuffer);
         CHECK(mapRet.second);
     } else {
+        ALOGD("allocate error(%d): Dequeued(%zu), Dequeuable(%d)",
+              (int)res, mDequeued.size(), mDequeueable + 1);
         if (adjustDequeueConfLocked(updateDequeue)) {
             return;
         }
@@ -629,7 +631,11 @@
     ::android::status_t status = igbp->dequeueBuffer(
             &slotId, &fence, width, height, format, usage, &outBufferAge, &outTimestamps);
     if (status < ::android::OK) {
-        ALOGE("dequeueBuffer() error %d", (int)status);
+        if (status == ::android::TIMED_OUT || status == ::android::WOULD_BLOCK) {
+            ALOGW("BQ might not be ready for dequeueBuffer()");
+            return C2_BLOCKING;
+        }
+        ALOGE("BQ in inconsistent status. dequeueBuffer() error %d", (int)status);
         return C2_CORRUPTED;
     }
     cache->waitOnSlot(slotId);
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
index d1f0fb5..4c2ef9c 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
@@ -83,6 +83,7 @@
     }
 }
 
+// @VsrTest = 3.2-001.003
 TEST_P(Codec2MasterHalTest, MustUseAidlBeyond202404) {
     static int sVendorApiLevel = android::base::GetIntProperty("ro.vendor.api_level", 0);
     if (sVendorApiLevel < 202404) {
diff --git a/media/codec2/sfplugin/C2AidlNode.cpp b/media/codec2/sfplugin/C2AidlNode.cpp
index c20f50d..93c9d8b 100644
--- a/media/codec2/sfplugin/C2AidlNode.cpp
+++ b/media/codec2/sfplugin/C2AidlNode.cpp
@@ -29,9 +29,6 @@
 using ::aidl::android::media::IAidlBufferSource;
 using ::aidl::android::media::IAidlNode;
 
-using ::android::media::CommandStateSet;
-using ::android::media::NodeStatusLoaded;
-
 // Conversion
 using ::android::media::aidl_conversion::toAidlStatus;
 
@@ -43,13 +40,6 @@
     return toAidlStatus(mImpl->freeNode());
 }
 
-::ndk::ScopedAStatus C2AidlNode::sendCommand(int32_t cmd, int32_t param) {
-    if (cmd == CommandStateSet && param == NodeStatusLoaded) {
-        mImpl->onFirstInputFrame();
-    }
-    return toAidlStatus(ERROR_UNSUPPORTED);
-}
-
 ::ndk::ScopedAStatus C2AidlNode::getConsumerUsage(int64_t* _aidl_return) {
     uint64_t usage;
     mImpl->getConsumerUsageBits(&usage);
diff --git a/media/codec2/sfplugin/C2AidlNode.h b/media/codec2/sfplugin/C2AidlNode.h
index a7c328e..365a41d 100644
--- a/media/codec2/sfplugin/C2AidlNode.h
+++ b/media/codec2/sfplugin/C2AidlNode.h
@@ -34,8 +34,6 @@
     // IAidlNode
     ::ndk::ScopedAStatus freeNode() override;
 
-    ::ndk::ScopedAStatus sendCommand(int32_t cmd, int32_t param) override;
-
     ::ndk::ScopedAStatus getConsumerUsage(int64_t *_aidl_return) override;
 
     ::ndk::ScopedAStatus getInputBufferParams(
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 2fa89e7..3eec0f3 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -1550,19 +1550,23 @@
 
 sp<Codec2Buffer> LinearOutputBuffers::wrap(const std::shared_ptr<C2Buffer> &buffer) {
     if (buffer == nullptr) {
-        ALOGV("[%s] using a dummy buffer", mName);
+        ALOGD("[%s] received null buffer", mName);
         return new LocalLinearBuffer(mFormat, new ABuffer(0));
     }
     if (buffer->data().type() != C2BufferData::LINEAR) {
-        ALOGV("[%s] non-linear buffer %d", mName, buffer->data().type());
+        ALOGW("[%s] non-linear buffer %d", mName, buffer->data().type());
         // We expect linear output buffers from the component.
         return nullptr;
     }
     if (buffer->data().linearBlocks().size() != 1u) {
-        ALOGV("[%s] no linear buffers", mName);
+        ALOGW("[%s] no linear buffers", mName);
         // We expect one and only one linear block from the component.
         return nullptr;
     }
+    if (buffer->data().linearBlocks().front().size() == 0) {
+        ALOGD("[%s] received 0-sized buffer", mName);
+        return new LocalLinearBuffer(mFormat, new ABuffer(0));
+    }
     sp<Codec2Buffer> clientBuffer = ConstLinearBlockBuffer::Allocate(mFormat, buffer);
     if (clientBuffer == nullptr) {
         ALOGD("[%s] ConstLinearBlockBuffer::Allocate failed", mName);
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 9c514f2..2550dcf 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -179,10 +179,17 @@
     if (!buffer
             || buffer->data().type() != C2BufferData::LINEAR
             || buffer->data().linearBlocks().size() != 1u) {
+        if (!buffer) {
+            ALOGD("ConstLinearBlockBuffer::Allocate: null buffer");
+        } else {
+            ALOGW("ConstLinearBlockBuffer::Allocate: type=%d # linear blocks=%zu",
+                  buffer->data().type(), buffer->data().linearBlocks().size());
+        }
         return nullptr;
     }
     C2ReadView readView(buffer->data().linearBlocks()[0].map().get());
     if (readView.error() != C2_OK) {
+        ALOGW("ConstLinearBlockBuffer::Allocate: readView.error()=%d", readView.error());
         return nullptr;
     }
     return new ConstLinearBlockBuffer(format, std::move(readView), buffer);
@@ -1137,7 +1144,7 @@
 
         std::optional<Smpte2086> smpte2086;
         status_t status = mapper.getSmpte2086(buffer.get(), &smpte2086);
-        if (status != OK) {
+        if (status != OK || !smpte2086) {
             err = C2_CORRUPTED;
         } else {
             if (smpte2086) {
@@ -1157,7 +1164,7 @@
 
         std::optional<Cta861_3> cta861_3;
         status = mapper.getCta861_3(buffer.get(), &cta861_3);
-        if (status != OK) {
+        if (status != OK || !cta861_3) {
             err = C2_CORRUPTED;
         } else {
             if (cta861_3) {
@@ -1176,7 +1183,7 @@
         dynamicInfo->reset();
         std::optional<std::vector<uint8_t>> vec;
         status_t status = mapper.getSmpte2094_40(buffer.get(), &vec);
-        if (status != OK) {
+        if (status != OK || !vec) {
             dynamicInfo->reset();
             err = C2_CORRUPTED;
         } else {
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 37a7a4f..692f700 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -687,6 +687,11 @@
                 const MediaCodecsXmlParser::AttributeMap &attrMap = typeIt->second;
                 std::unique_ptr<MediaCodecInfo::CapabilitiesWriter> caps =
                     codecInfo->addMediaType(mediaType.c_str());
+
+                // we could detect tunneled playback via the playback interface, but we never did
+                // that for the advertised feature, so for now use only the advertised feature.
+                bool canDoTunneledPlayback = false;
+
                 for (const auto &v : attrMap) {
                     std::string key = v.first;
                     std::string value = v.second;
@@ -707,6 +712,11 @@
                         // Ignore trailing bad characters and default to 0.
                         (void)sscanf(value.c_str(), "%d", &intValue);
                         caps->addDetail(key.c_str(), intValue);
+
+                        if (key.compare(
+                                MediaCodecInfo::Capabilities::FEATURE_TUNNELED_PLAYBACK) == 0) {
+                            canDoTunneledPlayback = true;
+                        }
                     } else {
                         caps->addDetail(key.c_str(), value.c_str());
                     }
@@ -774,6 +784,17 @@
                         }
                     }
                 }
+
+                if (android::media::codec::provider_->null_output_surface_support() &&
+                        android::media::codec::provider_->null_output_surface()) {
+                    // all non-tunneled video decoders support detached surface mode
+                    if (trait.kind == C2Component::KIND_DECODER &&
+                            trait.domain == C2Component::DOMAIN_VIDEO &&
+                            !canDoTunneledPlayback) {
+                        caps->addDetail(
+                                MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE, 0);
+                    }
+                }
             }
         }
     }
diff --git a/media/libaaudio/examples/input_monitor/Android.bp b/media/libaaudio/examples/input_monitor/Android.bp
index 72adfd7..fc55290 100644
--- a/media/libaaudio/examples/input_monitor/Android.bp
+++ b/media/libaaudio/examples/input_monitor/Android.bp
@@ -11,7 +11,10 @@
     name: "input_monitor",
     gtest: false,
     srcs: ["src/input_monitor.cpp"],
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
@@ -20,7 +23,10 @@
     name: "input_monitor_callback",
     gtest: false,
     srcs: ["src/input_monitor_callback.cpp"],
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
diff --git a/media/libaaudio/examples/loopback/Android.bp b/media/libaaudio/examples/loopback/Android.bp
index b18aeec..bde1024 100644
--- a/media/libaaudio/examples/loopback/Android.bp
+++ b/media/libaaudio/examples/loopback/Android.bp
@@ -11,13 +11,16 @@
     name: "aaudio_loopback",
     gtest: false,
     srcs: ["src/loopback.cpp"],
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
     static_libs: ["libsndfile"],
     include_dirs: ["external/oboe/apps/OboeTester/app/src/main/cpp"],
     shared_libs: [
         "libaaudio",
         "libaudioutils",
-        "liblog"
-        ],
+        "liblog",
+    ],
     header_libs: ["libaaudio_example_utils"],
 }
diff --git a/media/libaaudio/examples/write_sine/Android.bp b/media/libaaudio/examples/write_sine/Android.bp
index 1c7e0f1..70b1764 100644
--- a/media/libaaudio/examples/write_sine/Android.bp
+++ b/media/libaaudio/examples/write_sine/Android.bp
@@ -10,7 +10,10 @@
 cc_test {
     name: "write_sine",
     srcs: ["src/write_sine.cpp"],
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
@@ -18,7 +21,10 @@
 cc_test {
     name: "write_sine_callback",
     srcs: ["src/write_sine_callback.cpp"],
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index 1f6f950..8aaa4a0 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -91,13 +91,10 @@
     cflags: [
         "-Wall",
         "-Werror",
-        // By default, all symbols are hidden.
-
-        // "-fvisibility=hidden",
-        // AAUDIO_API is used to explicitly export a function or a variable as a visible symbol.
         "-Wno-unused-parameter",
         "-Wthread-safety",
 
+        // AAUDIO_API is used to explicitly export a function or a variable as a visible symbol.
         "-DAAUDIO_API=__attribute__((visibility(\"default\")))",
     ],
 
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index f865603..2447b18 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -478,15 +478,21 @@
                 __func__, ret.desc.toString().c_str());
         return NO_INIT;
     }
-    *outStream = sp<StreamOutHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
+    auto stream = sp<StreamOutHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
             std::move(ret.stream), mVendorExt, this /*callbackBroker*/);
-    void* cbCookie = (*outStream).get();
+    *outStream = stream;
+    /* StreamOutHalInterface* */ void* cbCookie = (*outStream).get();
     {
         std::lock_guard l(mLock);
         mCallbacks.emplace(cbCookie, Callbacks{});
         mMapper.addStream(*outStream, mixPortConfig.id, aidlPatch.id);
     }
-    if (streamCb) streamCb->setCookie(cbCookie);
+    if (streamCb) {
+        streamCb->setCookie(cbCookie);
+        // Although StreamOutHalAidl implements StreamOutHalInterfaceCallback,
+        // we always go via the CallbackBroker for consistency.
+        setStreamOutCallback(cbCookie, stream);
+    }
     eventCb->setCookie(cbCookie);
     cleanups.disarmAll();
     return OK;
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index ce56d87..0af164f 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -82,7 +82,12 @@
           mConfig(configToBase(config)),
           mContext(std::move(context)),
           mStream(stream),
-          mVendorExt(vext) {
+          mVendorExt(vext),
+          mLastReplyLifeTimeNs(
+                  std::min(static_cast<size_t>(100),
+                          2 * mContext.getBufferDurationMs(mConfig.sample_rate))
+                  * NANOS_PER_MILLISECOND)
+{
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     {
         std::lock_guard l(mLock);
@@ -285,8 +290,7 @@
     ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
-    // TODO: switch to updateCountersIfNeeded once we sort out mWorkerTid initialization
-    RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), &reply, true));
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
     *frames = std::max<int64_t>(0, reply.hardware.frames);
     *timestamp = std::max<int64_t>(0, reply.hardware.timeNs);
     return OK;
@@ -409,6 +413,41 @@
     return statusTFromBinderStatus(mStream->prepareToClose());
 }
 
+void StreamHalAidl::onAsyncTransferReady() {
+    if (auto state = getState(); state == StreamDescriptor::State::TRANSFERRING) {
+        // Retrieve the current state together with position counters unconditionally
+        // to ensure that the state on our side gets updated.
+        sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
+                nullptr, true /*safeFromNonWorkerThread */);
+    } else {
+        ALOGW("%s: unexpected onTransferReady in the state %s", __func__, toString(state).c_str());
+    }
+}
+
+void StreamHalAidl::onAsyncDrainReady() {
+    if (auto state = getState(); state == StreamDescriptor::State::DRAINING) {
+        // Retrieve the current state together with position counters unconditionally
+        // to ensure that the state on our side gets updated.
+        sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
+                nullptr, true /*safeFromNonWorkerThread */);
+    } else {
+        ALOGW("%s: unexpected onDrainReady in the state %s", __func__, toString(state).c_str());
+    }
+}
+
+void StreamHalAidl::onAsyncError() {
+    std::lock_guard l(mLock);
+    if (mLastReply.state == StreamDescriptor::State::IDLE ||
+        mLastReply.state == StreamDescriptor::State::DRAINING ||
+        mLastReply.state == StreamDescriptor::State::TRANSFERRING) {
+        mLastReply.state = StreamDescriptor::State::ERROR;
+        ALOGW("%s: onError received", __func__);
+    } else {
+        ALOGW("%s: unexpected onError in the state %s", __func__,
+                toString(mLastReply.state).c_str());
+    }
+}
+
 status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
                                          struct audio_mmap_buffer_info *info) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
@@ -467,26 +506,31 @@
                 "%s %s: must be invoked from the worker thread (%d)",
                 __func__, command.toString().c_str(), workerTid);
     }
-    if (!mContext.getCommandMQ()->writeBlocking(&command, 1)) {
-        ALOGE("%s: failed to write command %s to MQ", __func__, command.toString().c_str());
-        return NOT_ENOUGH_DATA;
-    }
     StreamDescriptor::Reply localReply{};
-    if (reply == nullptr) {
-        reply = &localReply;
-    }
-    if (!mContext.getReplyMQ()->readBlocking(reply, 1)) {
-        ALOGE("%s: failed to read from reply MQ, command %s", __func__, command.toString().c_str());
-        return NOT_ENOUGH_DATA;
-    }
     {
-        std::lock_guard l(mLock);
-        // Not every command replies with 'latencyMs' field filled out, substitute the last
-        // returned value in that case.
-        if (reply->latencyMs <= 0) {
-            reply->latencyMs = mLastReply.latencyMs;
+        std::lock_guard l(mCommandReplyLock);
+        if (!mContext.getCommandMQ()->writeBlocking(&command, 1)) {
+            ALOGE("%s: failed to write command %s to MQ", __func__, command.toString().c_str());
+            return NOT_ENOUGH_DATA;
         }
-        mLastReply = *reply;
+        if (reply == nullptr) {
+            reply = &localReply;
+        }
+        if (!mContext.getReplyMQ()->readBlocking(reply, 1)) {
+            ALOGE("%s: failed to read from reply MQ, command %s",
+                    __func__, command.toString().c_str());
+            return NOT_ENOUGH_DATA;
+        }
+        {
+            std::lock_guard l(mLock);
+            // Not every command replies with 'latencyMs' field filled out, substitute the last
+            // returned value in that case.
+            if (reply->latencyMs <= 0) {
+                reply->latencyMs = mLastReply.latencyMs;
+            }
+            mLastReply = *reply;
+            mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
+        }
     }
     switch (reply->status) {
         case STATUS_OK: return OK;
@@ -502,14 +546,17 @@
 
 status_t StreamHalAidl::updateCountersIfNeeded(
         ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply) {
-    if (mWorkerTid.load(std::memory_order_acquire) == gettid()) {
-        if (const auto state = getState(); state != StreamDescriptor::State::ACTIVE &&
-                state != StreamDescriptor::State::DRAINING &&
-                state != StreamDescriptor::State::TRANSFERRING) {
-            return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), reply);
-        }
+    bool doUpdate = false;
+    {
+        std::lock_guard l(mLock);
+        doUpdate = uptimeNanos() > mLastReplyExpirationNs;
     }
-    if (reply != nullptr) {
+    if (doUpdate) {
+        // Since updates are paced, it is OK to perform them from any thread, they should
+        // not interfere with I/O operations of the worker.
+        return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
+                reply, true /*safeFromNonWorkerThread */);
+    } else if (reply != nullptr) {  // provide cached reply
         std::lock_guard l(mLock);
         *reply = mLastReply;
     }
@@ -545,7 +592,7 @@
 
 StreamOutHalAidl::~StreamOutHalAidl() {
     if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
-        broker->clearCallbacks(this);
+        broker->clearCallbacks(static_cast<StreamOutHalInterface*>(this));
     }
 }
 
@@ -614,21 +661,14 @@
 }
 
 status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
+    ALOGD("%p %s", this, __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (!mContext.isAsynchronous()) {
         ALOGE("%s: the callback is intended for asynchronous streams only", __func__);
         return INVALID_OPERATION;
     }
-    if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
-        if (auto cb = callback.promote(); cb != nullptr) {
-            broker->setStreamOutCallback(this, cb);
-        } else {
-            // It is expected that the framework never passes a null pointer.
-            // In the AIDL model callbacks can't be "unregistered".
-            LOG_ALWAYS_FATAL("%s: received an expired or null callback pointer", __func__);
-        }
-    }
+    mClientCallback = callback;
     return OK;
 }
 
@@ -751,7 +791,7 @@
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
-        broker->setStreamOutEventCallback(this, callback);
+        broker->setStreamOutEventCallback(static_cast<StreamOutHalInterface*>(this), callback);
     }
     return OK;
 }
@@ -785,7 +825,8 @@
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
-        broker->setStreamOutLatencyModeCallback(this, callback);
+        broker->setStreamOutLatencyModeCallback(
+                static_cast<StreamOutHalInterface*>(this), callback);
     }
     return OK;
 };
@@ -794,6 +835,27 @@
     return StreamHalAidl::exit();
 }
 
+void StreamOutHalAidl::onWriteReady() {
+    onAsyncTransferReady();
+    if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
+        clientCb->onWriteReady();
+    }
+}
+
+void StreamOutHalAidl::onDrainReady() {
+    onAsyncDrainReady();
+    if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
+        clientCb->onDrainReady();
+    }
+}
+
+void StreamOutHalAidl::onError() {
+    onAsyncError();
+    if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
+        clientCb->onError();
+    }
+}
+
 status_t StreamOutHalAidl::filterAndUpdateOffloadMetadata(AudioParameter &parameters) {
     TIME_CHECK();
     bool updateMetadata = false;
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index 4acc6ac..b20eb00 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -32,6 +32,7 @@
 #include <media/audiohal/StreamHalInterface.h>
 #include <media/AidlConversionUtil.h>
 #include <media/AudioParameter.h>
+#include <mediautils/Synchronization.h>
 
 #include "ConversionHelperAidl.h"
 #include "StreamPowerLog.h"
@@ -93,6 +94,9 @@
     }
     size_t getBufferSizeBytes() const { return mFrameSizeBytes * mBufferSizeFrames; }
     size_t getBufferSizeFrames() const { return mBufferSizeFrames; }
+    size_t getBufferDurationMs(int32_t sampleRate) const {
+        return sampleRate != 0 ? mBufferSizeFrames * MILLIS_PER_SECOND / sampleRate : 0;
+    }
     CommandMQ* getCommandMQ() const { return mCommandMQ.get(); }
     DataMQ* getDataMQ() const { return mDataMQ.get(); }
     size_t getFrameSizeBytes() const { return mFrameSizeBytes; }
@@ -232,9 +236,22 @@
 
     status_t exit();
 
+    void onAsyncTransferReady();
+    void onAsyncDrainReady();
+    void onAsyncError();
+
     const bool mIsInput;
     const audio_config_base_t mConfig;
     const StreamContextAidl mContext;
+    // This lock is used to make sending of a command and receiving a reply an atomic
+    // operation. Otherwise, when two threads are trying to send a command, they may both advance to
+    // reading of the reply once the HAL has consumed the command from the MQ, and that creates a
+    // race condition between them.
+    //
+    // Note that only access to command and reply MQs needs to be protected because the data MQ is
+    // only accessed by the I/O thread. Also, there is no need to protect lookup operations on the
+    // queues as they are thread-safe, only send/receive operation must be protected.
+    std::mutex mCommandReplyLock;
 
   private:
     static audio_config_base_t configToBase(const audio_config& config) {
@@ -248,6 +265,8 @@
         std::lock_guard l(mLock);
         return mLastReply.state;
     }
+    // Note: Since `sendCommand` takes mLock while holding mCommandReplyLock, never call
+    // it with `mLock` being held.
     status_t sendCommand(
             const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
             ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
@@ -257,8 +276,10 @@
 
     const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> mStream;
     const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
+    const int64_t mLastReplyLifeTimeNs;
     std::mutex mLock;
     ::aidl::android::hardware::audio::core::StreamDescriptor::Reply mLastReply GUARDED_BY(mLock);
+    int64_t mLastReplyExpirationNs GUARDED_BY(mLock) = 0;
     // mStreamPowerLog is used for audio signal power logging.
     StreamPowerLog mStreamPowerLog;
     std::atomic<pid_t> mWorkerTid = -1;
@@ -266,7 +287,9 @@
 
 class CallbackBroker;
 
-class StreamOutHalAidl : public StreamOutHalInterface, public StreamHalAidl {
+class StreamOutHalAidl : public virtual StreamOutHalInterface,
+                         public virtual StreamOutHalInterfaceCallback,
+                         public StreamHalAidl {
   public:
     // Extract the output stream parameters and set by AIDL APIs.
     status_t setParameters(const String8& kvPairs) override;
@@ -344,6 +367,11 @@
 
     status_t exit() override;
 
+    // StreamOutHalInterfaceCallback
+    void onWriteReady() override;
+    void onDrainReady() override;
+    void onError() override;
+
   private:
     friend class sp<StreamOutHalAidl>;
 
@@ -352,6 +380,7 @@
 
     const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOut> mStream;
     const wp<CallbackBroker> mCallbackBroker;
+    mediautils::atomic_wp<StreamOutHalInterfaceCallback> mClientCallback;
 
     AudioOffloadMetadata mOffloadMetadata;
 
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
index da28204..c2aa278 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
@@ -147,8 +147,12 @@
                 return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
             }
             default: {
-                ALOGE("%s %d invalid command %u", __func__, __LINE__, command);
-                return BAD_VALUE;
+                // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+                VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                        aidl::android::legacy2aidl_EffectParameterReader_VendorExtension(param));
+                aidlParam =
+                        MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer, vendor, ext);
+                break;
             }
         }
     } else {
@@ -175,17 +179,19 @@
                 if (!range) {
                     return BAD_VALUE;
                 }
+                std::vector<Spatialization::Level> levels;
                 for (const auto level : ::ndk::enum_range<Spatialization::Level>()) {
                     const auto spatializer =
                             Spatializer::make<Spatializer::spatializationLevel>(level);
                     if (spatializer >= range->min && spatializer <= range->max) {
-                        if (status_t status = param.writeToValue(&level); status != OK) {
-                            ALOGW("%s %d: write level %s to value failed %d", __func__, __LINE__,
-                                  toString(level).c_str(), status);
-                            return status;
-                        }
+                        levels.emplace_back(level);
                     }
                 }
+                const uint8_t num = levels.size();
+                RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
+                for (const auto level : levels) {
+                    RETURN_STATUS_IF_ERROR(param.writeToValue(&level));
+                }
                 return OK;
             }
             case SPATIALIZER_PARAM_LEVEL: {
@@ -234,15 +240,14 @@
                 const auto& supportedLayouts = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
                         aidlParam, Spatializer, spatializer, Spatializer::supportedChannelLayout,
                         std::vector<AudioChannelLayout>));
+                // audio_channel_mask_t is uint32_t enum, write number in 32bit
+                const uint32_t num = supportedLayouts.size();
+                RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
                 for (const auto& layout : supportedLayouts) {
                     audio_channel_mask_t mask = VALUE_OR_RETURN_STATUS(
                             ::aidl::android::aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
                                     layout, false /* isInput */));
-                    if (status_t status = param.writeToValue(&mask); status != OK) {
-                        ALOGW("%s %d: write mask %s to value failed %d", __func__, __LINE__,
-                              layout.toString().c_str(), status);
-                        return status;
-                    }
+                    RETURN_STATUS_IF_ERROR(param.writeToValue(&mask));
                 }
                 return OK;
             }
@@ -252,17 +257,19 @@
                 if (!range) {
                     return BAD_VALUE;
                 }
+                std::vector<Spatialization::Mode> modes;
                 for (const auto mode : ::ndk::enum_range<Spatialization::Mode>()) {
                     if (const auto spatializer =
                                 Spatializer::make<Spatializer::spatializationMode>(mode);
                         spatializer >= range->min && spatializer <= range->max) {
-                        if (status_t status = param.writeToValue(&mode); status != OK) {
-                            ALOGW("%s %d: write mode %s to value failed %d", __func__, __LINE__,
-                                  toString(mode).c_str(), status);
-                            return status;
-                        }
+                        modes.emplace_back(mode);
                     }
                 }
+                const uint8_t num = modes.size();
+                RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
+                for (const auto mode : modes) {
+                    RETURN_STATUS_IF_ERROR(param.writeToValue(&mode));
+                }
                 return OK;
             }
             case SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION: {
@@ -271,17 +278,18 @@
                 if (!range) {
                     return BAD_VALUE;
                 }
+                std::vector<HeadTracking::ConnectionMode> modes;
                 for (const auto mode : ::ndk::enum_range<HeadTracking::ConnectionMode>()) {
                     if (const auto spatializer =
                                 Spatializer::make<Spatializer::headTrackingConnectionMode>(mode);
                         spatializer < range->min || spatializer > range->max) {
-                        continue;
+                        modes.emplace_back(mode);
                     }
-                    if (status_t status = param.writeToValue(&mode); status != OK) {
-                        ALOGW("%s %d: write mode %s to value failed %d", __func__, __LINE__,
-                              toString(mode).c_str(), status);
-                        return status;
-                    }
+                }
+                const uint8_t num = modes.size();
+                RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
+                for (const auto mode : modes) {
+                    RETURN_STATUS_IF_ERROR(param.writeToValue(&mode));
                 }
                 return OK;
             }
@@ -317,8 +325,7 @@
                 return OK;
             }
             default: {
-                ALOGE("%s %d invalid command %u", __func__, __LINE__, command);
-                return BAD_VALUE;
+                VENDOR_EXTENSION_GET_AND_RETURN(Spatializer, spatializer, param);
             }
         }
     } else {
diff --git a/media/libaudioprocessing/tests/fuzzer/Android.bp b/media/libaudioprocessing/tests/fuzzer/Android.bp
index 8fb6fff..e4780cf 100644
--- a/media/libaudioprocessing/tests/fuzzer/Android.bp
+++ b/media/libaudioprocessing/tests/fuzzer/Android.bp
@@ -8,23 +8,23 @@
 }
 
 cc_fuzz {
-  name: "libaudioprocessing_resampler_fuzzer",
-  srcs: [
-    "libaudioprocessing_resampler_fuzzer.cpp",
-  ],
-  defaults: ["libaudioprocessing_test_defaults"],
-  static_libs: [
-    "libsndfile",
-  ],
+    name: "libaudioprocessing_resampler_fuzzer",
+    srcs: [
+        "libaudioprocessing_resampler_fuzzer.cpp",
+    ],
+    defaults: ["libaudioprocessing_test_defaults"],
+    static_libs: [
+        "libsndfile",
+    ],
 }
 
 cc_fuzz {
-  name: "libaudioprocessing_record_buffer_converter_fuzzer",
-  srcs: [
-    "libaudioprocessing_record_buffer_converter_fuzzer.cpp",
-  ],
-  defaults: ["libaudioprocessing_test_defaults"],
-  static_libs: [
-    "libsndfile",
-  ],
+    name: "libaudioprocessing_record_buffer_converter_fuzzer",
+    srcs: [
+        "libaudioprocessing_record_buffer_converter_fuzzer.cpp",
+    ],
+    defaults: ["libaudioprocessing_test_defaults"],
+    static_libs: [
+        "libsndfile",
+    ],
 }
diff --git a/media/libeffects/downmix/tests/Android.bp b/media/libeffects/downmix/tests/Android.bp
index 392a6fa..8cecbe2 100644
--- a/media/libeffects/downmix/tests/Android.bp
+++ b/media/libeffects/downmix/tests/Android.bp
@@ -14,7 +14,7 @@
 //
 // Use "atest downmix_tests" to run.
 cc_test {
-    name:"downmix_tests",
+    name: "downmix_tests",
     gtest: true,
     host_supported: true,
     vendor: true,
@@ -45,7 +45,7 @@
 // test application and outputs then compares files in a local directory
 // on device (/data/local/tmp/downmixtest/).
 cc_test {
-    name:"downmixtest",
+    name: "downmixtest",
     host_supported: false,
     proprietary: true,
 
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 0568fbd..9bb3264 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -12,7 +12,7 @@
 cc_test {
     name: "EffectReverbTest",
     defaults: [
-      "libeffects-test-defaults",
+        "libeffects-test-defaults",
     ],
     srcs: [
         "EffectReverbTest.cpp",
@@ -29,7 +29,7 @@
 cc_test {
     name: "EffectBundleTest",
     defaults: [
-      "libeffects-test-defaults",
+        "libeffects-test-defaults",
     ],
     srcs: [
         "EffectBundleTest.cpp",
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index 90406e9..fff2feb 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -564,10 +564,40 @@
 
 RetCode BundleContext::setForcedDevice(
         const ::aidl::android::media::audio::common::AudioDeviceDescription& device) {
-    RETURN_VALUE_IF(true != isDeviceSupportedVirtualizer({device}), RetCode::ERROR_EFFECT_LIB_ERROR,
-                    " deviceNotSupportVirtualizer");
-    mForceDevice = device;
-    return RetCode::SUCCESS;
+    RetCode ret = RetCode::SUCCESS;
+    bool enableVirtualizer = mType == lvm::BundleEffectType::VIRTUALIZER && mEnabled;
+
+    if (isDeviceSupportedVirtualizer({device})) {
+        mVirtualizerForcedDevice = device;
+    } else {
+        // disabling forced virtualization mode
+        AudioDeviceDescription noneDevice;
+        if (device != noneDevice) {
+            // device is not supported, make it behave as a reset of forced mode but return an error
+            ret = RetCode::ERROR_ILLEGAL_PARAMETER;
+        }
+        // verify whether the virtualization should be enabled or disabled
+        if (!isDeviceSupportedVirtualizer(mOutputDevice)) {
+            enableVirtualizer = false;
+        }
+        mVirtualizerForcedDevice = noneDevice;
+    }
+
+    if (enableVirtualizer) {
+        if (mVirtualizerTempDisabled) {
+            LOG(VERBOSE) << __func__ << " re-enable virtualizer";
+            enableOperatingMode();
+            mVirtualizerTempDisabled = false;
+        }
+    } else {
+        if (!mVirtualizerTempDisabled) {
+            LOG(VERBOSE) << __func__ << " disable virtualizer";
+            disableOperatingMode();
+            mVirtualizerTempDisabled = true;
+        }
+    }
+
+    return ret;
 }
 
 RetCode BundleContext::initControlParameter(LVM_ControlParams_t& params) const {
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
index ba3997a..044c8dd 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -76,7 +76,7 @@
     RetCode setForcedDevice(
             const ::aidl::android::media::audio::common::AudioDeviceDescription& device);
     aidl::android::media::audio::common::AudioDeviceDescription getForcedDevice() const {
-        return mForceDevice;
+        return mVirtualizerForcedDevice;
     }
     std::vector<Virtualizer::ChannelAngle> getSpeakerAngles(
             const Virtualizer::SpeakerAnglesPayload payload);
@@ -93,8 +93,6 @@
     bool mEnabled = false;
     LVM_Handle_t mInstance;
 
-    aidl::android::media::audio::common::AudioDeviceDescription mVirtualizerForcedDevice;
-
     int mSamplesPerSecond = 0;
     int mSamplesToExitCountEq = 0;
     int mSamplesToExitCountBb = 0;
@@ -120,7 +118,7 @@
     // Virtualizer
     int mVirtStrengthSaved = 0; /* Conversion between Get/Set */
     bool mVirtualizerTempDisabled = false;
-    ::aidl::android::media::audio::common::AudioDeviceDescription mForceDevice;
+    ::aidl::android::media::audio::common::AudioDeviceDescription mVirtualizerForcedDevice;
     // Volume
     float mLevelSaveddB = 0; /* for when mute is set, level must be saved */
     float mVolumedB = 0;
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 67518af..44ea2a4 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -353,45 +353,62 @@
         return status;
     }
 
-    std::vector<float> inFrames(samples);
-    std::vector<float> outFrames(frameCount * FCC_2);
+    std::vector<float> inputSamples;
+    std::vector<float> outputSamples(frameCount * FCC_2);
 
     if (isPreset() && mNextPreset != mPreset) {
         loadPreset();
     }
 
     if (isAuxiliary()) {
-        inFrames.assign(in, in + samples);
+        inputSamples.resize(samples);
+        inputSamples.assign(in, in + samples);
     } else {
-        // mono input is duplicated
+        // Resizing to stereo is required to duplicate mono input
+        inputSamples.resize(frameCount * FCC_2);
         if (channels >= FCC_2) {
             for (int i = 0; i < frameCount; i++) {
-                inFrames[FCC_2 * i] = in[channels * i] * kSendLevel;
-                inFrames[FCC_2 * i + 1] = in[channels * i + 1] * kSendLevel;
+                inputSamples[FCC_2 * i] = in[channels * i] * kSendLevel;
+                inputSamples[FCC_2 * i + 1] = in[channels * i + 1] * kSendLevel;
             }
         } else {
             for (int i = 0; i < frameCount; i++) {
-                inFrames[FCC_2 * i] = inFrames[FCC_2 * i + 1] = in[i] * kSendLevel;
+                inputSamples[FCC_2 * i] = inputSamples[FCC_2 * i + 1] = in[i] * kSendLevel;
             }
         }
     }
 
     if (isPreset() && mPreset == PresetReverb::Presets::NONE) {
-        std::fill(outFrames.begin(), outFrames.end(), 0);  // always stereo here
+        std::fill(outputSamples.begin(), outputSamples.end(), 0);  // always stereo here
     } else {
         if (!mEnabled && mSamplesToExitCount > 0) {
-            std::fill(outFrames.begin(), outFrames.end(), 0);
+            std::fill(outputSamples.begin(), outputSamples.end(), 0);
         }
+        int inputBufferIndex = 0;
+        int outputBufferIndex = 0;
+
+        // LVREV library supports max of int16_t frames at a time
+        constexpr int kMaxBlockFrames = std::numeric_limits<int16_t>::max();
+        const auto inputFrameSize = getInputFrameSize();
+        const auto outputFrameSize = getOutputFrameSize();
 
         /* Process the samples, producing a stereo output */
-        LVREV_ReturnStatus_en lvrevStatus =
-                LVREV_Process(mInstance,        /* Instance handle */
-                              inFrames.data(),  /* Input buffer */
-                              outFrames.data(), /* Output buffer */
-                              frameCount);      /* Number of samples to read */
-        if (lvrevStatus != LVREV_SUCCESS) {
-            LOG(ERROR) << __func__ << " LVREV_Process error: " << lvrevStatus;
-            return {EX_UNSUPPORTED_OPERATION, 0, 0};
+        for (int fc = frameCount; fc > 0;) {
+            int processFrames = std::min(fc, kMaxBlockFrames);
+            LVREV_ReturnStatus_en lvrevStatus =
+                    LVREV_Process(mInstance,                            /* Instance handle */
+                                  inputSamples.data() + inputBufferIndex,   /* Input buffer */
+                                  outputSamples.data() + outputBufferIndex, /* Output buffer */
+                                  processFrames); /* Number of samples to process */
+            if (lvrevStatus != LVREV_SUCCESS) {
+                LOG(ERROR) << __func__ << " LVREV_Process error: " << lvrevStatus;
+                return {EX_UNSUPPORTED_OPERATION, 0, 0};
+            }
+
+            fc -= processFrames;
+
+            inputBufferIndex += processFrames * inputFrameSize / sizeof(float);
+            outputBufferIndex += processFrames * outputFrameSize / sizeof(float);
         }
     }
     // Convert to 16 bits
@@ -401,14 +418,14 @@
         if (channels >= FCC_2) {
             for (int i = 0; i < frameCount; i++) {
                 // Mix with dry input
-                outFrames[FCC_2 * i] += in[channels * i];
-                outFrames[FCC_2 * i + 1] += in[channels * i + 1];
+                outputSamples[FCC_2 * i] += in[channels * i];
+                outputSamples[FCC_2 * i + 1] += in[channels * i + 1];
             }
         } else {
             for (int i = 0; i < frameCount; i++) {
                 // Mix with dry input
-                outFrames[FCC_2 * i] += in[i];
-                outFrames[FCC_2 * i + 1] += in[i];
+                outputSamples[FCC_2 * i] += in[i];
+                outputSamples[FCC_2 * i + 1] += in[i];
             }
         }
 
@@ -420,8 +437,8 @@
             float incr = (mVolume.right - vr) / frameCount;
 
             for (int i = 0; i < frameCount; i++) {
-                outFrames[FCC_2 * i] *= vl;
-                outFrames[FCC_2 * i + 1] *= vr;
+                outputSamples[FCC_2 * i] *= vl;
+                outputSamples[FCC_2 * i + 1] *= vr;
 
                 vl += incl;
                 vr += incr;
@@ -430,8 +447,8 @@
         } else if (volumeMode != VOLUME_OFF) {
             if (mVolume.left != kUnitVolume || mVolume.right != kUnitVolume) {
                 for (int i = 0; i < frameCount; i++) {
-                    outFrames[FCC_2 * i] *= mVolume.left;
-                    outFrames[FCC_2 * i + 1] *= mVolume.right;
+                    outputSamples[FCC_2 * i] *= mVolume.left;
+                    outputSamples[FCC_2 * i + 1] *= mVolume.right;
                 }
             }
             mPrevVolume = mVolume;
@@ -441,8 +458,8 @@
 
     if (outChannels > 2) {
         for (int i = 0; i < frameCount; i++) {
-            out[outChannels * i] = outFrames[FCC_2 * i];
-            out[outChannels * i + 1] = outFrames[FCC_2 * i + 1];
+            out[outChannels * i] = outputSamples[FCC_2 * i];
+            out[outChannels * i + 1] = outputSamples[FCC_2 * i + 1];
         }
         if (!isAuxiliary()) {
             for (int i = 0; i < frameCount; i++) {
@@ -454,10 +471,10 @@
         }
     } else {
         if (outChannels == FCC_1) {
-            From2iToMono_Float(outFrames.data(), out, frameCount);
+            From2iToMono_Float(outputSamples.data(), out, frameCount);
         } else {
             for (int i = 0; i < frameCount * FCC_2; i++) {
-                out[i] = outFrames[i];
+                out[i] = outputSamples[i];
             }
         }
     }
diff --git a/media/libeffects/spatializer/tests/Android.bp b/media/libeffects/spatializer/tests/Android.bp
index 704e873..818e094 100644
--- a/media/libeffects/spatializer/tests/Android.bp
+++ b/media/libeffects/spatializer/tests/Android.bp
@@ -12,7 +12,7 @@
 cc_test {
     name: "SpatializerTest",
     defaults: [
-      "libeffects-test-defaults",
+        "libeffects-test-defaults",
     ],
     host_supported: false,
     srcs: [
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 590a7b7..840897f 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -223,7 +223,6 @@
         "com.android.media",
     ],
 
-
     srcs: ["MidiIoWrapper.cpp"],
 
     static_libs: [
@@ -278,6 +277,10 @@
         "libutils",
     ],
 
+    static_libs: [
+        "android.media.codec-aconfig-cc",
+    ],
+
     include_dirs: [
         "system/libhidl/transport/token/1.0/utils/include",
     ],
diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp
index 86ad997..c45c5c3 100644
--- a/media/libmedia/MediaCodecInfo.cpp
+++ b/media/libmedia/MediaCodecInfo.cpp
@@ -36,6 +36,7 @@
 constexpr char MediaCodecInfo::Capabilities::FEATURE_MULTIPLE_FRAMES[];
 constexpr char MediaCodecInfo::Capabilities::FEATURE_SECURE_PLAYBACK[];
 constexpr char MediaCodecInfo::Capabilities::FEATURE_TUNNELED_PLAYBACK[];
+constexpr char MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE[];
 
 void MediaCodecInfo::Capabilities::getSupportedProfileLevels(
         Vector<ProfileLevel> *profileLevels) const {
diff --git a/media/libmedia/include/media/MediaCodecInfo.h b/media/libmedia/include/media/MediaCodecInfo.h
index 54f565a..88a2dc4 100644
--- a/media/libmedia/include/media/MediaCodecInfo.h
+++ b/media/libmedia/include/media/MediaCodecInfo.h
@@ -69,6 +69,7 @@
         constexpr static char FEATURE_MULTIPLE_FRAMES[] = "feature-multiple-frames";
         constexpr static char FEATURE_SECURE_PLAYBACK[] = "feature-secure-playback";
         constexpr static char FEATURE_TUNNELED_PLAYBACK[] = "feature-tunneled-playback";
+        constexpr static char FEATURE_DETACHED_SURFACE[] = "feature-detached-surface";
 
         /**
          * Returns the supported levels for each supported profile in a target array.
diff --git a/media/libmediametrics/Android.bp b/media/libmediametrics/Android.bp
index 8a38dd7..5214dfe 100644
--- a/media/libmediametrics/Android.bp
+++ b/media/libmediametrics/Android.bp
@@ -16,8 +16,8 @@
     name: "libmediametrics",
 
     srcs: [
-        "MediaMetricsItem.cpp",
         "MediaMetrics.cpp",
+        "MediaMetricsItem.cpp",
     ],
 
     shared_libs: [
@@ -40,8 +40,8 @@
 
     sanitize: {
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
         cfi: true,
     },
@@ -50,8 +50,8 @@
     stubs: {
         symbol_file: "libmediametrics.map.txt",
         versions: [
-            "1" ,
-        ]
+            "1",
+        ],
     },
 
     header_abi_checker: {
@@ -65,7 +65,7 @@
         "//frameworks/base/apex/media/framework",
         "//frameworks/base/core/jni",
         "//frameworks/base/media/jni",
-	"//packages/modules/Media/apex/framework",
+        "//packages/modules/Media/apex/framework",
     ],
 }
 
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index f9ceef2..1593aa0 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -21,6 +21,8 @@
 #define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
 #endif
 
+#include <android_media_codec.h>
+
 #include <inttypes.h>
 #include <utils/Trace.h>
 
@@ -9314,6 +9316,12 @@
                 // adaptive playback is not supported
                 caps->removeDetail(MediaCodecInfo::Capabilities::FEATURE_ADAPTIVE_PLAYBACK);
             }
+
+            // all non-tunneled video decoders support detached surface mode
+            if (android::media::codec::provider_->null_output_surface_support() &&
+                    android::media::codec::provider_->null_output_surface()) {
+                caps->addDetail(MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE, 0);
+            }
         }
     }
 
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 896e021..886285e 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -319,6 +319,7 @@
     ],
 
     static_libs: [
+        "android.media.codec-aconfig-cc",
         "libstagefright_esds",
         "libstagefright_color_conversion",
         "libyuv_static",
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index a18dbfe..e918b5e 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -2436,47 +2436,6 @@
     return OK;
 }
 
-bool MPEG4Writer::isSampleMetadataValid(size_t trackIndex, int64_t timeUs) {
-    // Track Index starts from zero, so it should be at least 1 less than size.
-    if (trackIndex >= mTracks.size()) {
-        ALOGE("Incorrect trackIndex %zu, mTracks->size() %zu", trackIndex, mTracks.size());
-        return false;
-    }
-
-    List<Track *>::iterator it = mTracks.begin();
-
-    // (*it) is already pointing to trackIndex 0.
-    for (int i = 1; i <= trackIndex; i++) {
-        it++;
-    }
-
-    return (*it)->isTimestampValid(timeUs);
-}
-
-bool MPEG4Writer::Track::isTimestampValid(int64_t timeUs) {
-    // No timescale if HEIF
-    if (mIsHeif) {
-        return true;
-    }
-
-    // Make sure abs(timeUs) does not overflow
-    if (timeUs == INT64_MIN) {
-       return false;
-    }
-
-    // Ensure that the timeUs value does not have extremely low or high values
-    // that would cause an underflow or overflow, like in the calculation -
-    // mdhdDuration = (trakDurationUs * mTimeScale + 5E5) / 1E6
-    if (abs(timeUs) >= (INT64_MAX - 5E5) / mTimeScale) {
-        return false;
-    }
-    // Limit check for calculations in ctts box
-    if (abs(timeUs) + kMaxCttsOffsetTimeUs >= INT64_MAX / mTimeScale) {
-        return false;
-    }
-    return true;
-}
-
 bool MPEG4Writer::Track::isExifData(
         MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const {
     if (!mIsHeif) {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index e4f3b83..19e51fd 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -30,6 +30,8 @@
 
 #include "include/SoftwareRenderer.h"
 
+#include <android_media_codec.h>
+
 #include <android/api-level.h>
 #include <android/content/pm/IPackageManagerNative.h>
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
@@ -3017,6 +3019,13 @@
     return PostAndAwaitResponse(msg, &response);
 }
 
+status_t MediaCodec::detachOutputSurface() {
+    sp<AMessage> msg = new AMessage(kWhatDetachSurface, this);
+
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
+
 status_t MediaCodec::setSurface(const sp<Surface> &surface) {
     sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
     msg->setObject("surface", surface);
@@ -4676,7 +4685,7 @@
                     }
 
                     mResourceManagerProxy->removeClient();
-                    mReleaseSurface.reset();
+                    mDetachedSurface.reset();
 
                     if (mReplyID != nullptr) {
                         postPendingRepliesAndDeferredMessages("kWhatReleaseCompleted");
@@ -4849,6 +4858,23 @@
                 mFlags |= kFlagPushBlankBuffersOnShutdown;
             }
 
+            uint32_t flags;
+            CHECK(msg->findInt32("flags", (int32_t *)&flags));
+
+            if (android::media::codec::provider_->null_output_surface_support()) {
+                if (obj == nullptr
+                        && (flags & CONFIGURE_FLAG_DETACHED_SURFACE)
+                        && !(flags & CONFIGURE_FLAG_ENCODE)) {
+                    sp<Surface> surface = getOrCreateDetachedSurface();
+                    if (surface == nullptr) {
+                        mErrorLog.log(
+                                LOG_TAG, "Detached surface mode is not supported by this codec");
+                        PostReplyWithError(replyID, INVALID_OPERATION);
+                    }
+                    obj = surface;
+                }
+            }
+
             if (obj != NULL) {
                 if (!format->findInt32(KEY_ALLOW_FRAME_DROP, &mAllowFrameDroppingBySurface)) {
                     // allow frame dropping by surface by default
@@ -4872,8 +4898,6 @@
 
             mApiUsageMetrics.isUsingOutputSurface = true;
 
-            uint32_t flags;
-            CHECK(msg->findInt32("flags", (int32_t *)&flags));
             if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL ||
                 flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
                 if (!(mFlags & kFlagIsAsync)) {
@@ -4994,6 +5018,23 @@
             break;
         }
 
+        case kWhatDetachSurface:
+        {
+            // detach surface is equivalent to setSurface(mDetachedSurface)
+            sp<Surface> surface = getOrCreateDetachedSurface();
+
+            if (surface == nullptr) {
+                sp<AReplyToken> replyID;
+                CHECK(msg->senderAwaitsResponse(&replyID));
+                mErrorLog.log(LOG_TAG, "Detaching surface is not supported by the codec.");
+                PostReplyWithError(replyID, INVALID_OPERATION);
+                break;
+            }
+
+            msg->setObject("surface", surface);
+        }
+        [[fallthrough]];
+
         case kWhatSetSurface:
         {
             sp<AReplyToken> replyID;
@@ -5011,14 +5052,17 @@
                     sp<Surface> surface = static_cast<Surface *>(obj.get());
                     if (mSurface == NULL) {
                         // do not support setting surface if it was not set
-                        mErrorLog.log(LOG_TAG,
-                                      "Cannot set surface if the codec is not configured with "
-                                      "a surface already");
+                        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                                      "Cannot %s surface if the codec is not configured with "
+                                      "a surface already",
+                                      msg->what() == kWhatDetachSurface ? "detach" : "set"));
                         err = INVALID_OPERATION;
                     } else if (obj == NULL) {
                         // do not support unsetting surface
                         mErrorLog.log(LOG_TAG, "Unsetting surface is not supported");
                         err = BAD_VALUE;
+                    } else if (android::media::codec::provider_->null_output_surface_support()) {
+                        err = handleSetSurface(surface, true /* callCodec */);
                     } else {
                         uint32_t generation;
                         err = connectToSurface(surface, &generation);
@@ -5052,7 +5096,8 @@
 
                 default:
                     mErrorLog.log(LOG_TAG, base::StringPrintf(
-                            "setSurface() is valid only at Executing states; currently %s",
+                            "%sSurface() is valid only at Executing states; currently %s",
+                            msg->what() == kWhatDetachSurface ? "detach" : "set",
                             apiStateString().c_str()));
                     err = INVALID_OPERATION;
                     break;
@@ -5273,30 +5318,40 @@
 
             bool forceSync = false;
             if (asyncNotify != nullptr && mSurface != NULL) {
-                if (!mReleaseSurface) {
-                    uint64_t usage = 0;
-                    if (mSurface->getConsumerUsage(&usage) != OK) {
-                        usage = 0;
-                    }
-                    mReleaseSurface.reset(new ReleaseSurface(usage));
-                }
-                if (mSurface != mReleaseSurface->getSurface()) {
-                    uint32_t generation;
-                    status_t err = connectToSurface(mReleaseSurface->getSurface(), &generation);
-                    ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
-                    if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
-                        err = mCodec->setSurface(mReleaseSurface->getSurface(), generation);
-                        ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
-                    }
-                    if (err == OK) {
-                        (void)disconnectFromSurface();
-                        mSurface = mReleaseSurface->getSurface();
-                        mSurfaceGeneration = generation;
-                    } else {
-                        // We were not able to switch the surface, so force
+                if (android::media::codec::provider_->null_output_surface_support()) {
+                    if (handleSetSurface(getOrCreateDetachedSurface(), true /* callCodec */,
+                                         true /* onShutDown */) != OK) {
+                        // We were not able to detach the surface, so force
                         // synchronous release.
                         forceSync = true;
                     }
+                } else {
+                    if (!mDetachedSurface) {
+                        uint64_t usage = 0;
+                        if (mSurface->getConsumerUsage(&usage) != OK) {
+                            usage = 0;
+                        }
+                        mDetachedSurface.reset(new ReleaseSurface(usage));
+                    }
+                    if (mSurface != mDetachedSurface->getSurface()) {
+                        uint32_t generation;
+                        status_t err =
+                            connectToSurface(mDetachedSurface->getSurface(), &generation);
+                        ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
+                        if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
+                            err = mCodec->setSurface(mDetachedSurface->getSurface(), generation);
+                            ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
+                        }
+                        if (err == OK) {
+                            (void)disconnectFromSurface();
+                            mSurface = mDetachedSurface->getSurface();
+                            mSurfaceGeneration = generation;
+                        } else {
+                            // We were not able to switch the surface, so force
+                            // synchronous release.
+                            forceSync = true;
+                        }
+                    }
                 }
             }
 
@@ -6591,9 +6646,9 @@
     CHECK_EQ(info, &mPortBuffers[portIndex][index]);
     availBuffers->erase(availBuffers->begin());
 
-    CHECK(!info->mOwnedByClient);
     {
         Mutex::Autolock al(mBufferLock);
+        CHECK(!info->mOwnedByClient);
         info->mOwnedByClient = true;
 
         // set image-data
@@ -6612,6 +6667,23 @@
     return index;
 }
 
+sp<Surface> MediaCodec::getOrCreateDetachedSurface() {
+    if (mDomain != DOMAIN_VIDEO || (mFlags & kFlagIsEncoder)) {
+        return nullptr;
+    }
+
+    if (!mDetachedSurface) {
+        uint64_t usage = 0;
+        if (!mSurface || mSurface->getConsumerUsage(&usage) != OK) {
+            // TODO: should we use a/the default consumer usage?
+            usage = 0;
+        }
+        mDetachedSurface.reset(new ReleaseSurface(usage));
+    }
+
+    return mDetachedSurface->getSurface();
+}
+
 status_t MediaCodec::connectToSurface(const sp<Surface> &surface, uint32_t *generation) {
     status_t err = OK;
     if (surface != NULL) {
@@ -6685,7 +6757,56 @@
     return err;
 }
 
+status_t MediaCodec::handleSetSurface(const sp<Surface> &surface, bool callCodec, bool onShutDown) {
+    uint32_t generation;
+    status_t err = OK;
+    if (surface != nullptr) {
+        err = connectToSurface(surface, &generation);
+        if (err == ALREADY_EXISTS) {
+            // reconnecting to same surface
+            return OK;
+        }
+
+        if (err == OK && callCodec) {
+            if (mFlags & kFlagUsesSoftwareRenderer) {
+                if (mSoftRenderer != NULL
+                        && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
+                    pushBlankBuffersToNativeWindow(mSurface.get());
+                }
+                // do not create a new software renderer on shutdown (release)
+                // as it will not be used anyway
+                if (!onShutDown) {
+                    surface->setDequeueTimeout(-1);
+                    mSoftRenderer = new SoftwareRenderer(surface);
+                    // TODO: check if this was successful
+                }
+            } else {
+                err = mCodec->setSurface(surface, generation);
+            }
+
+            mReliabilityContextMetrics.setOutputSurfaceCount++;
+        }
+    }
+
+    if (err == OK) {
+        if (mSurface != NULL) {
+            (void)disconnectFromSurface();
+        }
+
+        if (surface != NULL) {
+            mSurface = surface;
+            mSurfaceGeneration = generation;
+        }
+    }
+
+    return err;
+}
+
 status_t MediaCodec::handleSetSurface(const sp<Surface> &surface) {
+    if (android::media::codec::provider_->null_output_surface_support()) {
+        return handleSetSurface(surface, false /* callCodec */);
+    }
+
     status_t err = OK;
     if (mSurface != NULL) {
         (void)disconnectFromSurface();
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 1ff8acf..054a4b8 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -77,9 +77,6 @@
     virtual void setStartTimeOffsetMs(int ms) { mStartTimeOffsetMs = ms; }
     virtual int32_t getStartTimeOffsetMs() const { return mStartTimeOffsetMs; }
     virtual status_t setNextFd(int fd);
-    // Returns true if the timestamp is valid which is compatible with the Mpeg4.
-    // Note that this overloads that method in the base class.
-    bool isSampleMetadataValid(size_t trackIndex, int64_t timeUs) override;
 
 protected:
     virtual ~MPEG4Writer();
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 9ecb12e..7169b1e 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -96,6 +96,7 @@
         CONFIGURE_FLAG_ENCODE           = 1,
         CONFIGURE_FLAG_USE_BLOCK_MODEL  = 2,
         CONFIGURE_FLAG_USE_CRYPTO_ASYNC = 4,
+        CONFIGURE_FLAG_DETACHED_SURFACE = 8,
     };
 
     enum BufferFlags {
@@ -274,6 +275,8 @@
 
     status_t setSurface(const sp<Surface> &nativeWindow);
 
+    status_t detachOutputSurface();
+
     status_t requestIDRFrame();
 
     // Notification will be posted once there "is something to do", i.e.
@@ -368,6 +371,7 @@
         kWhatInit                           = 'init',
         kWhatConfigure                      = 'conf',
         kWhatSetSurface                     = 'sSur',
+        kWhatDetachSurface                  = 'dSur',
         kWhatCreateInputSurface             = 'cisf',
         kWhatSetInputSurface                = 'sisf',
         kWhatStart                          = 'strt',
@@ -474,6 +478,10 @@
     uint32_t mSurfaceGeneration = 0;
     SoftwareRenderer *mSoftRenderer;
 
+    // Get the detached BufferQueue surface for a video decoder, and create it
+    // if it did not yet exist.
+    sp<Surface> getOrCreateDetachedSurface();
+
     Mutex mMetricsLock;
     mediametrics_handle_t mMetricsHandle = 0;
     bool mMetricsToUpload = false;
@@ -642,6 +650,13 @@
     status_t queueCSDInputBuffer(size_t bufferIndex);
 
     status_t handleSetSurface(const sp<Surface> &surface);
+
+    // Common reimplementation of changing the output surface.
+    // Handles setting null surface, which is used during configure and init.
+    // Set |callCodec| to true if the codec needs to be notified (e.g. during executing state).
+    // Setting |onShutdown| to true will avoid extra work, if this is used for detaching on
+    // delayed release.
+    status_t handleSetSurface(const sp<Surface> &surface, bool callCodec, bool onShutdown = false);
     status_t connectToSurface(const sp<Surface> &surface, uint32_t *generation);
     status_t disconnectFromSurface();
 
@@ -714,7 +729,7 @@
     sp<AMessage> mMsgPollForRenderedBuffers;
 
     class ReleaseSurface;
-    std::unique_ptr<ReleaseSurface> mReleaseSurface;
+    std::unique_ptr<ReleaseSurface> mDetachedSurface;
 
     std::list<sp<AMessage>> mLeftover;
     status_t handleLeftover(size_t index);
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl
index 0679e41..d428e99 100644
--- a/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl
@@ -52,7 +52,7 @@
     void onInputBufferAdded(int bufferID);
 
     /**
-     * Called from OnEmptyBufferDone. If we have a BQ buffer available,
+     * If we have a BQ buffer available,
      * fill it with a new frame of data; otherwise, just mark it as available.
      *
      * fence contains the fence's fd that the callee should wait on before
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
index 6cac19e..cf880c2 100644
--- a/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
@@ -37,7 +37,6 @@
     }
 
     void freeNode();
-    void sendCommand(int cmd, int param);
     long getConsumerUsage();
     InputBufferParams getInputBufferParams();
     void setConsumerUsage(long usage);
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h
index 4b38294..85de688 100644
--- a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h
@@ -28,7 +28,7 @@
 namespace android::media {
 
 /*
- * This class is used to emulate feed OMX codecs from a Surface via BufferQueue or
+ * This class is used to feed codec encoders from a Surface via BufferQueue or
  * HW producer using AIDL binder interfaces.
  *
  * See media/stagefright/bqhelper/GraphicBufferSource.h for documentation.
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h
index 64de7ae..364efe2 100644
--- a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h
@@ -29,12 +29,4 @@
     BUFFERFLAG_ENDOFFRAME = (1 << 4)
 };
 
-enum C2NodeCommand : int32_t {
-    CommandStateSet = 1
-};
-
-enum C2NodeStatus : int32_t {
-    NodeStatusLoaded = 1
-};
-
 }  // namespace android::media
diff --git a/media/module/bufferpool/1.0/vts/multi.cpp b/media/module/bufferpool/1.0/vts/multi.cpp
index d8cc285..21f47d3 100644
--- a/media/module/bufferpool/1.0/vts/multi.cpp
+++ b/media/module/bufferpool/1.0/vts/multi.cpp
@@ -24,6 +24,7 @@
 #include <hidl/HidlSupport.h>
 #include <hidl/HidlTransportSupport.h>
 #include <hidl/LegacySupport.h>
+#include <hidl/ServiceManagement.h>
 #include <hidl/Status.h>
 #include <signal.h>
 #include <sys/types.h>
@@ -36,6 +37,7 @@
 
 using android::hardware::configureRpcThreadpool;
 using android::hardware::hidl_handle;
+using android::hardware::isHidlSupported;
 using android::hardware::media::bufferpool::V1_0::IClientManager;
 using android::hardware::media::bufferpool::V1_0::ResultStatus;
 using android::hardware::media::bufferpool::V1_0::implementation::BufferId;
@@ -178,6 +180,7 @@
   ResultStatus status;
   PipeMessage message;
 
+  if (!isHidlSupported()) GTEST_SKIP() << "HIDL is not supported on this device";
   ASSERT_TRUE(receiveMessage(mResultPipeFds, &message));
 
   android::sp<IClientManager> receiver = IClientManager::getService();
diff --git a/media/module/codecs/amrwb/enc/Android.bp b/media/module/codecs/amrwb/enc/Android.bp
index 8780136..04f36b5 100644
--- a/media/module/codecs/amrwb/enc/Android.bp
+++ b/media/module/codecs/amrwb/enc/Android.bp
@@ -79,67 +79,31 @@
     arch: {
         arm: {
             srcs: [
-                "src/asm/ARMV5E/convolve_opt.s",
-                "src/asm/ARMV5E/cor_h_vec_opt.s",
-                "src/asm/ARMV5E/Deemph_32_opt.s",
-                "src/asm/ARMV5E/Dot_p_opt.s",
-                "src/asm/ARMV5E/Filt_6k_7k_opt.s",
-                "src/asm/ARMV5E/Norm_Corr_opt.s",
-                "src/asm/ARMV5E/pred_lt4_1_opt.s",
-                "src/asm/ARMV5E/residu_asm_opt.s",
-                "src/asm/ARMV5E/scale_sig_opt.s",
-                "src/asm/ARMV5E/Syn_filt_32_opt.s",
-                "src/asm/ARMV5E/syn_filt_opt.s",
+                "src/asm/ARMV7/convolve_neon.s",
+                "src/asm/ARMV7/cor_h_vec_neon.s",
+                "src/asm/ARMV7/Deemph_32_neon.s",
+                "src/asm/ARMV7/Dot_p_neon.s",
+                "src/asm/ARMV7/Filt_6k_7k_neon.s",
+                "src/asm/ARMV7/Norm_Corr_neon.s",
+                "src/asm/ARMV7/pred_lt4_1_neon.s",
+                "src/asm/ARMV7/residu_asm_neon.s",
+                "src/asm/ARMV7/scale_sig_neon.s",
+                "src/asm/ARMV7/Syn_filt_32_neon.s",
+                "src/asm/ARMV7/syn_filt_neon.s",
             ],
 
             cflags: [
                 "-DARM",
+                "-DARMV7",
                 "-DASM_OPT",
+                // don't actually generate neon instructions, see bug 26932980
+                "-mfpu=vfpv3",
             ],
-            local_include_dirs: ["src/asm/ARMV5E"],
+            local_include_dirs: [
+                "src/asm/ARMV7",
+            ],
 
             instruction_set: "arm",
-
-            neon: {
-                exclude_srcs: [
-                    "src/asm/ARMV5E/convolve_opt.s",
-                    "src/asm/ARMV5E/cor_h_vec_opt.s",
-                    "src/asm/ARMV5E/Deemph_32_opt.s",
-                    "src/asm/ARMV5E/Dot_p_opt.s",
-                    "src/asm/ARMV5E/Filt_6k_7k_opt.s",
-                    "src/asm/ARMV5E/Norm_Corr_opt.s",
-                    "src/asm/ARMV5E/pred_lt4_1_opt.s",
-                    "src/asm/ARMV5E/residu_asm_opt.s",
-                    "src/asm/ARMV5E/scale_sig_opt.s",
-                    "src/asm/ARMV5E/Syn_filt_32_opt.s",
-                    "src/asm/ARMV5E/syn_filt_opt.s",
-                ],
-
-                srcs: [
-                    "src/asm/ARMV7/convolve_neon.s",
-                    "src/asm/ARMV7/cor_h_vec_neon.s",
-                    "src/asm/ARMV7/Deemph_32_neon.s",
-                    "src/asm/ARMV7/Dot_p_neon.s",
-                    "src/asm/ARMV7/Filt_6k_7k_neon.s",
-                    "src/asm/ARMV7/Norm_Corr_neon.s",
-                    "src/asm/ARMV7/pred_lt4_1_neon.s",
-                    "src/asm/ARMV7/residu_asm_neon.s",
-                    "src/asm/ARMV7/scale_sig_neon.s",
-                    "src/asm/ARMV7/Syn_filt_32_neon.s",
-                    "src/asm/ARMV7/syn_filt_neon.s",
-                ],
-
-                // don't actually generate neon instructions, see bug 26932980
-                cflags: [
-                    "-DARMV7",
-                    "-mfpu=vfpv3",
-                ],
-                local_include_dirs: [
-                    "src/asm/ARMV5E",
-                    "src/asm/ARMV7",
-                ],
-            },
-
         },
     },
 
diff --git a/media/module/extractors/mkv/MatroskaExtractor.cpp b/media/module/extractors/mkv/MatroskaExtractor.cpp
index 6900341..f326db1 100644
--- a/media/module/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/module/extractors/mkv/MatroskaExtractor.cpp
@@ -1769,6 +1769,30 @@
 
 }
 
+status_t MatroskaExtractor::synthesizeVP9(TrackInfo* trackInfo, size_t index) {
+    BlockIterator iter(this, trackInfo->mTrackNum, index);
+    if (iter.eos()) {
+        return ERROR_MALFORMED;
+    }
+
+    const mkvparser::Block* block = iter.block();
+    if (block->GetFrameCount() <= 0) {
+        return ERROR_MALFORMED;
+    }
+
+    const mkvparser::Block::Frame& frame = block->GetFrame(0);
+    auto tmpData = heapbuffer<unsigned char>(frame.len);
+    long n = frame.Read(mReader, tmpData.get());
+    if (n != 0) {
+        return ERROR_MALFORMED;
+    }
+
+    if (!MakeVP9CodecSpecificData(trackInfo->mMeta, tmpData.get(), frame.len)) {
+        return ERROR_MALFORMED;
+    }
+
+    return OK;
+}
 
 static inline bool isValidInt32ColourValue(long long value) {
     return value != mkvparser::Colour::kValueNotPresent
@@ -2002,6 +2026,8 @@
                       // specified in http://www.webmproject.org/vp9/profiles/.
                       AMediaFormat_setBuffer(meta,
                              AMEDIAFORMAT_KEY_CSD_0, codecPrivate, codecPrivateSize);
+                    } else {
+                        isSetCsdFrom1stFrame = true;
                     }
                 } else if (!strcmp("V_AV1", codecID)) {
                     AMediaFormat_setString(meta, AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_VIDEO_AV1);
@@ -2254,6 +2280,13 @@
                 mTracks.pop();
                 continue;
             }
+        } else if ((!strcmp("V_VP9", codecID) && codecPrivateSize == 0) ||
+                   (!strcmp(mimetype, MEDIA_MIMETYPE_VIDEO_VP9) && isSetCsdFrom1stFrame)) {
+            // Attempt to recover from VP9 track without codec private data
+            err = synthesizeVP9(trackInfo, n);
+            if (err != OK) {
+                ALOGW("ignoring error %d in synthesizeVP9", err);
+            }
         }
         // the TrackInfo owns the metadata now
         meta = nullptr;
@@ -2279,6 +2312,8 @@
         int64_t thumbnailTimeUs = 0;
         size_t maxBlockSize = 0;
         while (!iter.eos() && j < 20) {
+            int64_t blockTimeUs = iter.blockTimeUs();
+
             if (iter.block()->IsKey()) {
                 ++j;
 
@@ -2289,9 +2324,13 @@
 
                 if (blockSize > maxBlockSize) {
                     maxBlockSize = blockSize;
-                    thumbnailTimeUs = iter.blockTimeUs();
+                    thumbnailTimeUs = blockTimeUs;
                 }
             }
+            // Exit after 20s if we've already found at least one key frame.
+            if (blockTimeUs > 20000000 && maxBlockSize > 0) {
+                break;
+            }
             iter.advance();
         }
         AMediaFormat_setInt64(info->mMeta,
diff --git a/media/module/extractors/mkv/include/MatroskaExtractor.h b/media/module/extractors/mkv/include/MatroskaExtractor.h
index 99fad17..2e4d955 100644
--- a/media/module/extractors/mkv/include/MatroskaExtractor.h
+++ b/media/module/extractors/mkv/include/MatroskaExtractor.h
@@ -95,6 +95,7 @@
     status_t synthesizeAVCC(TrackInfo *trackInfo, size_t index);
     status_t synthesizeMPEG2(TrackInfo *trackInfo, size_t index);
     status_t synthesizeMPEG4(TrackInfo *trackInfo, size_t index);
+    status_t synthesizeVP9(TrackInfo* trackInfo, size_t index);
     status_t initTrackInfo(
             const mkvparser::Track *track,
             AMediaFormat *meta,
diff --git a/media/module/metadatautils/MetaDataUtils.cpp b/media/module/metadatautils/MetaDataUtils.cpp
index db60f04..0895bb5 100644
--- a/media/module/metadatautils/MetaDataUtils.cpp
+++ b/media/module/metadatautils/MetaDataUtils.cpp
@@ -81,6 +81,177 @@
     return true;
 }
 
+// Check if the next 24 bits are VP9 SYNC_CODE
+static bool isVp9SyncCode(ABitReader &bits) {
+    if (bits.numBitsLeft() < 24) {
+        return false;
+    }
+    return bits.getBits(24) == 0x498342;
+}
+
+// This parses bitdepth and subsampling in a VP9 uncompressed header
+// (refer section bitdepth_colorspace_sampling in 6.2 of the VP9 bitstream spec)
+static bool getVp9BitdepthChromaSubSampling(ABitReader &bits,
+        int32_t profile,
+        int32_t *bitDepth,
+        int32_t *chromaSubsampling) {
+    if (profile >= 2) {
+        if (bits.numBitsLeft() < 1) {
+            return false;
+        }
+        *bitDepth = bits.getBits(1) ? 12 : 10;
+    } else {
+        *bitDepth = 8;
+    }
+
+    uint32_t colorspace;
+    if (!bits.getBitsGraceful(3, &colorspace)) {
+        return false;
+    }
+
+    *chromaSubsampling = -1;
+    if (colorspace != 7 /*SRGB*/) {
+        // Skip yuv_range_flag
+        if (!bits.skipBits(1)) {
+            return false;
+        }
+        // Check for subsampling only for profiles 1 and 3.
+        if (profile == 1 || profile == 3) {
+            uint32_t ss_x;
+            uint32_t ss_y;
+            if (bits.getBitsGraceful(1, &ss_x) && bits.getBitsGraceful(1, &ss_y)) {
+                *chromaSubsampling = ss_x << 1 & ss_y;
+            } else {
+                return false;
+            }
+        } else {
+            *chromaSubsampling = 3;
+        }
+    } else {
+        if (profile == 1 || profile == 3) {
+            *chromaSubsampling = 0;
+        }
+    }
+    return true;
+}
+// The param data contains the first frame data, starting with the uncompressed frame
+// header. This uncompressed header (refer section 6.2 of the VP9 bitstream spec) is
+// used to parse profile, bitdepth and subsampling.
+bool MakeVP9CodecSpecificData(AMediaFormat* meta, const uint8_t* data, size_t size) {
+    if (meta == nullptr || data == nullptr || size == 0) {
+        return false;
+    }
+
+    ABitReader bits(data, size);
+
+    // First 2 bits of the uncompressed header should be the frame_marker.
+    if (bits.getBits(2) != 0b10) {
+        return false;
+    }
+
+    int32_t profileLowBit = bits.getBits(1);
+    int32_t profileHighBit = bits.getBits(1);
+    int32_t profile = profileHighBit * 2 + profileLowBit;
+
+    // One reserved '0' bit if profile is 3.
+    if (profile == 3 && bits.getBits(1) != 0) {
+        return false;
+    }
+
+    // If show_existing_frame is set, we get no more data. Since this is
+    // expected to be the first frame, we can return false which will cascade
+    // into ERROR_MALFORMED.
+    if (bits.getBits(1)) {
+        return false;
+    }
+
+    int32_t frame_type = bits.getBits(1);
+
+    // Upto 7 bits could be read till now, which were guaranteed to be available
+    // since size > 0. Check for bits available before reading them from now on.
+    if (bits.numBitsLeft() < 2) {
+        return false;
+    }
+
+    int32_t show_frame = bits.getBits(1);
+    int32_t error_resilient_mode = bits.getBits(1);
+    int32_t bitDepth = 8;
+    int32_t chromaSubsampling = -1;
+
+    if (frame_type == 0 /* KEY_FRAME */) {
+        // Check for sync code.
+        if (!isVp9SyncCode(bits)) {
+            return false;
+        }
+
+        if (!getVp9BitdepthChromaSubSampling(bits, profile, &bitDepth, &chromaSubsampling)) {
+            return false;
+        }
+    } else {
+        int32_t intra_only = 0;
+        if (!show_frame) {
+            if (bits.numBitsLeft() < 1) {
+                return false;
+            }
+            intra_only = bits.getBits(1);
+        }
+
+        if (!error_resilient_mode) {
+            if (bits.numBitsLeft() < 2) {
+                return false;
+            }
+            // ignore reset_frame_context
+            bits.skipBits(2);
+        }
+
+        if (!intra_only) {
+            // Require first frame to be either KEY_FRAME or INTER_FRAME with intra_only set to true
+            return false;
+        }
+
+        // Check for sync code.
+        if (!isVp9SyncCode(bits)) {
+            return false;
+        }
+
+        if (profile > 0) {
+            if (!getVp9BitdepthChromaSubSampling(bits, profile, &bitDepth, &chromaSubsampling)) {
+                return false;
+            }
+        } else {
+            bitDepth = 8;
+            chromaSubsampling = 3;
+        }
+    }
+    int32_t csdSize = 6;
+    if (chromaSubsampling != -1) {
+        csdSize += 3;
+    }
+
+    // Create VP9 Codec Feature Metadata (CodecPrivate) that can be parsed
+    // https://www.webmproject.org/docs/container/#vp9-codec-feature-metadata-codecprivate
+    sp<ABuffer> csd = sp<ABuffer>::make(csdSize);
+    uint8_t* csdData = csd->data();
+
+    *csdData++ = 0x01 /* FEATURE PROFILE */;
+    *csdData++ = 0x01 /* length */;
+    *csdData++ = profile;
+
+    *csdData++ = 0x03 /* FEATURE BITDEPTH */;
+    *csdData++ = 0x01 /* length */;
+    *csdData++ = bitDepth;
+
+    // csdSize more than 6 means chroma subsampling data was found.
+    if (csdSize > 6) {
+        *csdData++ = 0x04 /* FEATURE SUBSAMPLING */;
+        *csdData++ = 0x01 /* length */;
+        *csdData++ = chromaSubsampling;
+    }
+
+    AMediaFormat_setBuffer(meta, AMEDIAFORMAT_KEY_CSD_0, csd->data(), csd->size());
+    return true;
+}
+
 bool MakeAACCodecSpecificData(MetaDataBase &meta, const uint8_t *data, size_t size) {
     if (data == nullptr || size < 7) {
         return false;
diff --git a/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h b/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
index dcaf27f..69cf21a 100644
--- a/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
+++ b/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
@@ -38,6 +38,8 @@
 void parseVorbisComment(
         AMediaFormat *fileMeta, const char *comment, size_t commentLength);
 
+bool MakeVP9CodecSpecificData(AMediaFormat* meta, const uint8_t* data, size_t size);
+
 }  // namespace android
 
 #endif  // META_DATA_UTILS_H_
diff --git a/media/tests/benchmark/src/native/decoder/C2Decoder.cpp b/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
index 6539f24..f9a6b1c 100644
--- a/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
+++ b/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
@@ -106,7 +106,7 @@
         work->input.ordinal.frameIndex = mNumInputFrame;
         work->input.buffers.clear();
         int size = frameInfo[mNumInputFrame].size;
-        int alignedSize = ALIGN(size, PAGE_SIZE);
+        int alignedSize = ALIGN(size, getpagesize());
         if (size) {
             std::shared_ptr<C2LinearBlock> block;
             status = mLinearPool->fetchLinearBlock(
diff --git a/services/audioflinger/timing/tests/Android.bp b/services/audioflinger/timing/tests/Android.bp
index d1e5563..94eaa6a 100644
--- a/services/audioflinger/timing/tests/Android.bp
+++ b/services/audioflinger/timing/tests/Android.bp
@@ -13,7 +13,7 @@
     host_supported: true,
 
     srcs: [
-        "mediasyncevent_tests.cpp"
+        "mediasyncevent_tests.cpp",
     ],
 
     header_libs: [
@@ -38,7 +38,7 @@
     host_supported: true,
 
     srcs: [
-        "monotonicframecounter_tests.cpp"
+        "monotonicframecounter_tests.cpp",
     ],
 
     static_libs: [
@@ -54,26 +54,26 @@
 }
 
 cc_test {
-     name: "synchronizedrecordstate_tests",
+    name: "synchronizedrecordstate_tests",
 
-     host_supported: true,
+    host_supported: true,
 
-     srcs: [
-         "synchronizedrecordstate_tests.cpp"
-     ],
+    srcs: [
+        "synchronizedrecordstate_tests.cpp",
+    ],
 
-     header_libs: [
-         "libaudioclient_headers",
-     ],
+    header_libs: [
+        "libaudioclient_headers",
+    ],
 
-     static_libs: [
-         "liblog",
-         "libutils", // RefBase
-     ],
+    static_libs: [
+        "liblog",
+        "libutils", // RefBase
+    ],
 
-     cflags: [
-         "-Wall",
-         "-Werror",
-         "-Wextra",
-     ],
- }
\ No newline at end of file
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+}
diff --git a/services/audiopolicy/config/Android.bp b/services/audiopolicy/config/Android.bp
index 86600f4..321181d 100644
--- a/services/audiopolicy/config/Android.bp
+++ b/services/audiopolicy/config/Android.bp
@@ -31,41 +31,49 @@
     vendor: true,
     src: ":a2dp_in_audio_policy_configuration",
 }
+
 prebuilt_etc {
     name: "a2dp_audio_policy_configuration.xml",
     vendor: true,
     src: ":a2dp_audio_policy_configuration",
 }
+
 prebuilt_etc {
     name: "audio_policy_configuration.xml",
     vendor: true,
     src: ":audio_policy_configuration_generic",
 }
+
 prebuilt_etc {
     name: "r_submix_audio_policy_configuration.xml",
     vendor: true,
     src: ":r_submix_audio_policy_configuration",
 }
+
 prebuilt_etc {
     name: "audio_policy_volumes.xml",
     vendor: true,
     src: ":audio_policy_volumes",
 }
+
 prebuilt_etc {
     name: "default_volume_tables.xml",
     vendor: true,
     src: ":default_volume_tables",
 }
+
 prebuilt_etc {
     name: "surround_sound_configuration_5_0.xml",
     vendor: true,
     src: ":surround_sound_configuration_5_0",
 }
+
 prebuilt_etc {
     name: "usb_audio_policy_configuration.xml",
     vendor: true,
     src: ":usb_audio_policy_configuration",
 }
+
 prebuilt_etc {
     name: "primary_audio_policy_configuration.xml",
     src: ":primary_audio_policy_configuration",
@@ -76,50 +84,62 @@
     name: "a2dp_in_audio_policy_configuration",
     srcs: ["a2dp_in_audio_policy_configuration.xml"],
 }
+
 filegroup {
     name: "a2dp_audio_policy_configuration",
     srcs: ["a2dp_audio_policy_configuration.xml"],
 }
+
 filegroup {
     name: "primary_audio_policy_configuration",
     srcs: ["primary_audio_policy_configuration.xml"],
 }
+
 filegroup {
     name: "surround_sound_configuration_5_0",
     srcs: ["surround_sound_configuration_5_0.xml"],
 }
+
 filegroup {
     name: "default_volume_tables",
     srcs: ["default_volume_tables.xml"],
 }
+
 filegroup {
     name: "audio_policy_volumes",
     srcs: ["audio_policy_volumes.xml"],
 }
+
 filegroup {
     name: "audio_policy_configuration_generic",
     srcs: ["audio_policy_configuration_generic.xml"],
 }
+
 filegroup {
     name: "audio_policy_configuration_generic_configurable",
     srcs: ["audio_policy_configuration_generic_configurable.xml"],
 }
+
 filegroup {
     name: "usb_audio_policy_configuration",
     srcs: ["usb_audio_policy_configuration.xml"],
 }
+
 filegroup {
     name: "r_submix_audio_policy_configuration",
     srcs: ["r_submix_audio_policy_configuration.xml"],
 }
+
 filegroup {
     name: "bluetooth_audio_policy_configuration_7_0",
     srcs: ["bluetooth_audio_policy_configuration_7_0.xml"],
 }
+
 filegroup {
     name: "bluetooth_with_le_audio_policy_configuration_7_0",
     srcs: ["bluetooth_with_le_audio_policy_configuration_7_0.xml"],
 }
+
 filegroup {
     name: "hearing_aid_audio_policy_configuration_7_0",
     srcs: ["hearing_aid_audio_policy_configuration_7_0.xml"],
diff --git a/services/audiopolicy/engineconfigurable/config/Android.bp b/services/audiopolicy/engineconfigurable/config/Android.bp
index b3d1f97..a733c3f 100644
--- a/services/audiopolicy/engineconfigurable/config/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/Android.bp
@@ -30,10 +30,12 @@
     vendor: true,
     src: ":audio_policy_engine_criteria",
 }
+
 filegroup {
     name: "audio_policy_engine_criterion_types_template",
     srcs: ["example/common/audio_policy_engine_criterion_types.xml.in"],
 }
+
 filegroup {
     name: "audio_policy_engine_criteria",
     srcs: ["example/common/audio_policy_engine_criteria.xml"],
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
index ee62d5e..98b8e78 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
@@ -31,18 +31,21 @@
     src: ":PolicyClass",
     sub_dir: "parameter-framework/Structure/Policy",
 }
+
 prebuilt_etc {
     name: "PolicySubsystem.xml",
     vendor: true,
     src: ":PolicySubsystem",
     sub_dir: "parameter-framework/Structure/Policy",
 }
+
 prebuilt_etc {
     name: "PolicySubsystem-CommonTypes.xml",
     vendor: true,
     src: ":buildcommontypesstructure_gen",
     sub_dir: "parameter-framework/Structure/Policy",
 }
+
 genrule {
     name: "buildcommontypesstructure_gen",
     defaults: ["buildcommontypesstructurerule"],
@@ -52,34 +55,42 @@
     name: "product_strategies_structure_template",
     srcs: ["examples/common/Structure/ProductStrategies.xml.in"],
 }
+
 filegroup {
     name: "PolicySubsystem",
     srcs: ["examples/common/Structure/PolicySubsystem.xml"],
 }
+
 filegroup {
     name: "PolicySubsystem-no-strategy",
     srcs: ["examples/common/Structure/PolicySubsystem-no-strategy.xml"],
 }
+
 filegroup {
     name: "common_types_structure_template",
     srcs: ["examples/common/Structure/PolicySubsystem-CommonTypes.xml.in"],
 }
+
 filegroup {
     name: "PolicyClass",
     srcs: ["examples/common/Structure/PolicyClass.xml"],
 }
+
 filegroup {
     name: "volumes.pfw",
     srcs: ["examples/Settings/volumes.pfw"],
 }
+
 filegroup {
     name: "device_for_input_source.pfw",
     srcs: ["examples/Settings/device_for_input_source.pfw"],
 }
+
 filegroup {
     name: "ParameterFrameworkConfigurationPolicy.userdebug.xml",
     srcs: ["examples/ParameterFrameworkConfigurationPolicy.userdebug.xml"],
 }
+
 filegroup {
     name: "ParameterFrameworkConfigurationPolicy.user.xml",
     srcs: ["examples/ParameterFrameworkConfigurationPolicy.user.xml"],
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index cc191ca..b6b9720 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -49,12 +49,12 @@
 using aidl_utils::statusTFromBinderStatus;
 using android::content::AttributionSourceState;
 using binder::Status;
+using internal::ToString;
 using media::HeadTrackingMode;
 using media::Pose3f;
 using media::SensorPoseProvider;
 using media::audio::common::HeadTracking;
 using media::audio::common::Spatialization;
-using ::android::internal::ToString;
 
 using namespace std::chrono_literals;
 
@@ -348,7 +348,8 @@
     bool activeLevelFound = false;
     for (const auto spatializationLevel : spatializationLevels) {
         if (!aidl_utils::isValidEnum(spatializationLevel)) {
-            ALOGW("%s: ignoring spatializationLevel:%d", __func__, (int)spatializationLevel);
+            ALOGW("%s: ignoring spatializationLevel:%s", __func__,
+                  ToString(spatializationLevel).c_str());
             continue;
         }
         if (spatializationLevel == Spatialization::Level::NONE) {
@@ -375,7 +376,8 @@
 
     for (const auto spatializationMode : spatializationModes) {
         if (!aidl_utils::isValidEnum(spatializationMode)) {
-            ALOGW("%s: ignoring spatializationMode:%d", __func__, (int)spatializationMode);
+            ALOGW("%s: ignoring spatializationMode:%s", __func__,
+                  ToString(spatializationMode).c_str());
             continue;
         }
         // we don't detect duplicates.
@@ -406,27 +408,26 @@
         return BAD_VALUE;
     }
 
-    //TODO b/273373363: use AIDL enum when available
     if (com::android::media::audio::dsa_over_bt_le_audio()
             && mSupportsHeadTracking) {
-        mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED;
-        std::vector<uint8_t> headtrackingConnectionModes;
+        mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
+        std::vector<HeadTracking::ConnectionMode> headtrackingConnectionModes;
         status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION,
                 &headtrackingConnectionModes);
         if (status == NO_ERROR) {
             for (const auto htConnectionMode : headtrackingConnectionModes) {
-                if (htConnectionMode < HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED ||
-                        htConnectionMode > HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL) {
-                    ALOGW("%s: ignoring HT connection mode:%d", __func__, (int)htConnectionMode);
+                if (htConnectionMode < HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED ||
+                    htConnectionMode > HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL) {
+                    ALOGW("%s: ignoring HT connection mode:%s", __func__,
+                          ToString(htConnectionMode).c_str());
                     continue;
                 }
-                mSupportedHeadtrackingConnectionModes.insert(
-                        static_cast<headtracking_connection_t> (htConnectionMode));
+                mSupportedHeadtrackingConnectionModes.insert(htConnectionMode);
             }
             ALOGW_IF(mSupportedHeadtrackingConnectionModes.find(
-                    HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED)
-                        == mSupportedHeadtrackingConnectionModes.end(),
-                    "%s: HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED not reported", __func__);
+                    HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED) ==
+                        mSupportedHeadtrackingConnectionModes.end(),
+                    "%s: Headtracking FRAMEWORK_PROCESSED not reported", __func__);
         }
     }
 
@@ -553,12 +554,12 @@
     }
     audio_utils::lock_guard lock(mMutex);
     *level = mLevel;
-    ALOGV("%s level %d", __func__, (int)*level);
+    ALOGV("%s level %s", __func__, ToString(*level).c_str());
     return Status::ok();
 }
 
 Status Spatializer::isHeadTrackingSupported(bool *supports) {
-    ALOGV("%s mSupportsHeadTracking %d", __func__, mSupportsHeadTracking);
+    ALOGV("%s mSupportsHeadTracking %s", __func__, ToString(mSupportsHeadTracking).c_str());
     if (supports == nullptr) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
@@ -853,7 +854,7 @@
 }
 
 void Spatializer::onActualModeChange(HeadTrackingMode mode) {
-    std::string modeStr = media::toString(mode);
+    std::string modeStr = ToString(mode);
     ALOGV("%s(%s)", __func__, modeStr.c_str());
     sp<AMessage> msg = new AMessage(EngineCallbackHandler::kWhatOnActualModeChange, mHandler);
     msg->setInt32(EngineCallbackHandler::kModeKey, static_cast<int>(mode));
@@ -861,7 +862,7 @@
 }
 
 void Spatializer::onActualModeChangeMsg(HeadTrackingMode mode) {
-    ALOGV("%s(%d)", __func__, (int) mode);
+    ALOGV("%s(%s)", __func__, ToString(mode).c_str());
     sp<media::ISpatializerHeadTrackingCallback> callback;
     HeadTracking::Mode spatializerMode;
     {
@@ -880,7 +881,7 @@
                     spatializerMode = HeadTracking::Mode::RELATIVE_SCREEN;
                     break;
                 default:
-                    LOG_ALWAYS_FATAL("Unknown mode: %d", static_cast<int>(mode));
+                    LOG_ALWAYS_FATAL("Unknown mode: %s", ToString(mode).c_str());
             }
         }
         mActualHeadTrackingMode = spatializerMode;
@@ -894,7 +895,7 @@
             }
         }
         callback = mHeadTrackingCallback;
-        mLocalLog.log("%s: updating mode to %s", __func__, media::toString(mode).c_str());
+        mLocalLog.log("%s: updating mode to %s", __func__, ToString(mode).c_str());
     }
     if (callback != nullptr) {
         callback->onHeadTrackingModeChanged(spatializerMode);
@@ -1052,24 +1053,23 @@
     }
 }
 
-//TODO b/273373363: use AIDL enum when available
 audio_latency_mode_t Spatializer::selectHeadtrackingConnectionMode_l() {
     if (!com::android::media::audio::dsa_over_bt_le_audio()) {
         return AUDIO_LATENCY_MODE_LOW;
     }
     // mSupportedLatencyModes is ordered according to system preferences loaded in
     // mOrderedLowLatencyModes
-    mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED;
+    mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
     audio_latency_mode_t requestedLatencyMode = mSupportedLatencyModes[0];
     if (requestedLatencyMode == AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE) {
         if (mSupportedHeadtrackingConnectionModes.find(
-                HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL)
+                HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL)
                     != mSupportedHeadtrackingConnectionModes.end()) {
-            mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL;
+            mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL;
         } else if (mSupportedHeadtrackingConnectionModes.find(
-                HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_SW)
+                HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW)
                     != mSupportedHeadtrackingConnectionModes.end()) {
-            mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_SW;
+            mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW;
         } else {
             // if the engine does not support direct reading of IMU data, do not allow
             // DYNAMIC_SPATIAL_AUDIO_HARDWARE mode and fallback to next mode
@@ -1213,7 +1213,7 @@
         base::StringAppendF(&ss, " %s", ToString(mode).c_str());
     }
     base::StringAppendF(&ss, "], Desired: %s, Actual %s\n",
-                        media::toString(mDesiredHeadTrackingMode).c_str(),
+                        ToString(mDesiredHeadTrackingMode).c_str(),
                         ToString(mActualHeadTrackingMode).c_str());
 
     base::StringAppendF(&ss, "%smSpatializationModes: [", prefixSpace.c_str());
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 24788dc..355df18 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -486,11 +486,13 @@
     bool mSupportsHeadTracking;
     /** List of supported headtracking connection modes reported by the spatializer.
      * If the list is empty, the spatializer does not support any optional connection
-     * mode and mode HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED is assumed.
+     * mode and mode HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED is assumed.
      */
-    std::unordered_set<headtracking_connection_t> mSupportedHeadtrackingConnectionModes;
+    std::unordered_set<media::audio::common::HeadTracking::ConnectionMode>
+            mSupportedHeadtrackingConnectionModes;
     /** Selected HT connection mode when several modes are supported by the spatializer */
-    headtracking_connection_t mHeadtrackingConnectionMode;
+    media::audio::common::HeadTracking::ConnectionMode mHeadtrackingConnectionMode =
+            media::audio::common::HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
 
     // Looper thread for mEngine callbacks
     class EngineCallbackHandler;
diff --git a/services/medialog/Android.bp b/services/medialog/Android.bp
index 8088ef0..fdb56e5 100644
--- a/services/medialog/Android.bp
+++ b/services/medialog/Android.bp
@@ -30,7 +30,7 @@
     ],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 }
diff --git a/services/medialog/fuzzer/Android.bp b/services/medialog/fuzzer/Android.bp
index c96c37b..84a1ce6 100644
--- a/services/medialog/fuzzer/Android.bp
+++ b/services/medialog/fuzzer/Android.bp
@@ -30,12 +30,12 @@
         "frameworks/av/services/medialog",
     ],
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-audio-fuzzing-reports@google.com",
         ],
         componentid: 155276,
         hotlists: [