Merge "Add OWNERS for VTS module vts_mediaProfiles_validate_test"
diff --git a/.clang-format b/.clang-format
new file mode 100644
index 0000000..a7614d2
--- /dev/null
+++ b/.clang-format
@@ -0,0 +1,13 @@
+BasedOnStyle: Google
+Standard: Cpp11
+AccessModifierOffset: -2
+AllowShortFunctionsOnASingleLine: Inline
+ColumnLimit: 100
+CommentPragmas: NOLINT:.*
+DerivePointerAlignment: false
+IncludeBlocks: Preserve
+IndentWidth: 4
+ContinuationIndentWidth: 8
+PointerAlignment: Left
+TabWidth: 4
+UseTab: Never
diff --git a/OWNERS b/OWNERS
index 7f523a2..0be1196 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,10 +1,8 @@
-chz@google.com
+# Bug component: 1344
 elaurent@google.com
 etalvala@google.com
 hkuang@google.com
 lajos@google.com
-marcone@google.com
 
-# LON
-olly@google.com
-andrewlewis@google.com
+# go/android-fwk-media-solutions for info on areas of ownership.
+include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index ae920c0..b1c81bf 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -1,2 +1,5 @@
 [Hook Scripts]
 mainline_hook = ${REPO_ROOT}/frameworks/av/tools/mainline_hook_partial.sh ${REPO_ROOT} ${PREUPLOAD_FILES}
+
+hidden_api_txt_checksorted_hook = ${REPO_ROOT}/tools/platform-compat/hiddenapi/checksorted_sha.sh ${PREUPLOAD_COMMIT} ${REPO_ROOT}
+
diff --git a/apex/Android.bp b/apex/Android.bp
index 5a88d24..85fc7c9 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -90,6 +90,35 @@
     name: "com.android.media-bootclasspath-fragment",
     contents: ["updatable-media"],
     apex_available: ["com.android.media"],
+
+    api: {
+        stub_libs: [
+            // Stubs for the APIs provided by updatable-media. This has to be
+            // specified explicitly because updatable-media is not a
+            // java_sdk_library.
+            "framework-media",
+        ],
+    },
+
+    // The bootclasspath_fragments that provide APIs on which this depends.
+    fragments: [
+        {
+            apex: "com.android.art",
+            module: "art-bootclasspath-fragment",
+        },
+    ],
+
+    // Additional stubs libraries that this fragment's contents use which are
+    // not provided by another bootclasspath_fragment.
+    additional_stubs: [
+        "android-non-updatable",
+    ],
+
+    // Additional hidden API flag files to override the defaults. This must only be
+    // modified by the Soong or platform compat team.
+    hidden_api: {
+        max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+    },
 }
 
 filegroup {
diff --git a/apex/OWNERS b/apex/OWNERS
index a60d04b..54802d4 100644
--- a/apex/OWNERS
+++ b/apex/OWNERS
@@ -1,9 +1,7 @@
-chz@google.com
-dwkang@google.com
+essick@google.com
 jiyong@google.com
 lajos@google.com
-marcone@google.com
-wjia@google.com
+nchalko@google.com
 
 include platform/packages/modules/common:/MODULES_OWNERS
 
diff --git a/apex/hiddenapi/OWNERS b/apex/hiddenapi/OWNERS
new file mode 100644
index 0000000..ac8a2b6
--- /dev/null
+++ b/apex/hiddenapi/OWNERS
@@ -0,0 +1,5 @@
+# soong-team@ as the hiddenapi files are tightly coupled with Soong
+file:platform/build/soong:/OWNERS
+
+# compat-team@ for changes to hiddenapi files
+file:tools/platform-compat:/OWNERS
diff --git a/apex/hiddenapi/hiddenapi-max-target-o-low-priority.txt b/apex/hiddenapi/hiddenapi-max-target-o-low-priority.txt
new file mode 100644
index 0000000..32bbb10
--- /dev/null
+++ b/apex/hiddenapi/hiddenapi-max-target-o-low-priority.txt
@@ -0,0 +1,6 @@
+Landroid/media/MediaSession2$ControllerInfo;-><init>(Landroid/content/Context;IILjava/lang/String;Landroid/os/IInterface;)V
+Landroid/media/MediaSession2$ControllerInfo;->getPackageName()Ljava/lang/String;
+Landroid/media/MediaSession2$ControllerInfo;->getProvider()Landroid/media/update/MediaSession2Provider$ControllerInfoProvider;
+Landroid/media/MediaSession2$ControllerInfo;->getUid()I
+Landroid/media/MediaSession2$ControllerInfo;->isTrusted()Z
+Landroid/media/MediaSession2$ControllerInfo;->mProvider:Landroid/media/update/MediaSession2Provider$ControllerInfoProvider;
diff --git a/camera/OWNERS b/camera/OWNERS
index d6b95da..47d1d19 100644
--- a/camera/OWNERS
+++ b/camera/OWNERS
@@ -2,7 +2,7 @@
 etalvala@google.com
 jchowdhary@google.com
 shuzhenwang@google.com
-yinchiayeh@google.com
+ruchamk@google.com
 # backup owner
 cychen@google.com
 zhijunhe@google.com
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index ccbfaa9..da887a2 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -139,6 +139,8 @@
             return !(*this == other);
         }
         bool operator < (const Callback& other) const {
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wordered-compare-function-pointers"
             if (*this == other) return false;
             if (mContext != other.mContext) return mContext < other.mContext;
             if (mPhysicalCamAvailable != other.mPhysicalCamAvailable) {
@@ -152,6 +154,7 @@
             }
             if (mAvailable != other.mAvailable) return mAvailable < other.mAvailable;
             return mUnavailable < other.mUnavailable;
+#pragma GCC diagnostic pop
         }
         bool operator > (const Callback& other) const {
             return (*this != other && !(*this < other));
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.h b/camera/ndk/ndk_vendor/impl/ACameraManager.h
index 85da3e9..f25f5cb 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.h
@@ -135,6 +135,8 @@
             return !(*this == other);
         }
         bool operator < (const Callback& other) const {
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wordered-compare-function-pointers"
             if (*this == other) return false;
             if (mContext != other.mContext) return mContext < other.mContext;
             if (mAvailable != other.mAvailable) return mAvailable < other.mAvailable;
@@ -145,6 +147,7 @@
             if (mPhysicalCamUnavailable != other.mPhysicalCamUnavailable)
                     return mPhysicalCamUnavailable < other.mPhysicalCamUnavailable;
             return mUnavailable < other.mUnavailable;
+#pragma GCC diagnostic pop
         }
         bool operator > (const Callback& other) const {
             return (*this != other && !(*this < other));
diff --git a/drm/libmediadrm/Android.bp b/drm/libmediadrm/Android.bp
index a82a61d..5ce7241 100644
--- a/drm/libmediadrm/Android.bp
+++ b/drm/libmediadrm/Android.bp
@@ -65,7 +65,7 @@
     ],
 
     static_libs: [
-        "resourcemanager_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk",
     ],
 
     export_shared_lib_headers: [
diff --git a/drm/libmediadrm/fuzzer/Android.bp b/drm/libmediadrm/fuzzer/Android.bp
index 2b04702..9faed3a 100644
--- a/drm/libmediadrm/fuzzer/Android.bp
+++ b/drm/libmediadrm/fuzzer/Android.bp
@@ -35,7 +35,7 @@
     static_libs: [
         "libmediadrm",
         "liblog",
-        "resourcemanager_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk",
     ],
     header_libs: [
         "libmedia_headers",
diff --git a/media/OWNERS b/media/OWNERS
index 3e194f0..099729f 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -1,24 +1,21 @@
-andrewlewis@google.com
-chz@google.com
-dwkang@google.com
+# Bug component: 1344
 elaurent@google.com
 essick@google.com
-gkasten@google.com
 hkuang@google.com
 hunga@google.com
 jiabin@google.com
 jmtrivi@google.com
 lajos@google.com
-marcone@google.com
 mnaganov@google.com
 nchalko@google.com
-pawin@google.com
 philburk@google.com
 pmclean@google.com
 quxiangfang@google.com
-rachad@google.com
 rago@google.com
 robertshih@google.com
 taklee@google.com
-wjia@google.com
 wonsik@google.com
+ytai@google.com
+
+# go/android-fwk-media-solutions for info on areas of ownership.
+include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index 332696d..342d771 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -289,13 +289,14 @@
     mOutputDelayRingBufferFilled = 0;
     mBuffersInfo.clear();
 
-    // To make the codec behave the same before and after a reset, we need to invalidate the
-    // streaminfo struct. This does that:
-    mStreamInfo->sampleRate = 0; // TODO: mStreamInfo is read only
-
+    status_t status = UNKNOWN_ERROR;
+    if (mAACDecoder) {
+        aacDecoder_Close(mAACDecoder);
+        status = initDecoder();
+    }
     mSignalledError = false;
 
-    return C2_OK;
+    return status == OK ? C2_OK : C2_CORRUPTED;
 }
 
 void C2SoftAacDec::onReset() {
@@ -514,8 +515,8 @@
 
                 // TODO: error handling, proper usage, etc.
                 C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
-                c2_status_t err = pool->fetchLinearBlock(
-                        numSamples * sizeof(int16_t), usage, &block);
+                size_t bufferSize = numSamples * sizeof(int16_t);
+                c2_status_t err = pool->fetchLinearBlock(bufferSize, usage, &block);
                 if (err != C2_OK) {
                     ALOGD("failed to fetch a linear block (%d)", err);
                     return std::bind(fillEmptyWork, _1, C2_NO_MEMORY);
@@ -529,7 +530,7 @@
                     mSignalledError = true;
                     return std::bind(fillEmptyWork, _1, C2_CORRUPTED);
                 }
-                return [buffer = createLinearBuffer(block)](
+                return [buffer = createLinearBuffer(block, 0, bufferSize)](
                         const std::unique_ptr<C2Work> &work) {
                     work->result = C2_OK;
                     C2FrameData &output = work->worklets.front()->output;
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
index c08e02b..e92d38d 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
@@ -143,7 +143,7 @@
     if (!mIsWide) {
         Speech_Decode_Frame_reset(mAmrHandle);
     } else {
-        pvDecoder_AmrWb_Reset(mAmrHandle, 0 /* reset_all */);
+        pvDecoder_AmrWb_Reset(mAmrHandle, 1 /* reset_all */);
     }
     mSignalledError = false;
     mSignalledOutputEos = false;
@@ -361,7 +361,13 @@
 
     work->worklets.front()->output.flags = work->input.flags;
     work->worklets.front()->output.buffers.clear();
-    work->worklets.front()->output.buffers.push_back(createLinearBuffer(block));
+    // we filled the output buffer to (intptr_t)output - (intptr_t)wView.data()
+    // use calOutSize as that contains the expected number of samples
+    ALOGD_IF(calOutSize != ((intptr_t)output - (intptr_t)wView.data()),
+            "Expected %zu output bytes, but filled %lld",
+             calOutSize, (long long)((intptr_t)output - (intptr_t)wView.data()));
+    work->worklets.front()->output.buffers.push_back(
+            createLinearBuffer(block, 0, calOutSize));
     work->worklets.front()->output.ordinal = work->input.ordinal;
     if (eos) {
         mSignalledOutputEos = true;
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index dfad226..6c4b7d9 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -592,21 +592,11 @@
 }
 
 std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
-        const std::shared_ptr<C2LinearBlock> &block) {
-    return createLinearBuffer(block, block->offset(), block->size());
-}
-
-std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
         const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size) {
     return C2Buffer::CreateLinearBuffer(block->share(offset, size, ::C2Fence()));
 }
 
 std::shared_ptr<C2Buffer> SimpleC2Component::createGraphicBuffer(
-        const std::shared_ptr<C2GraphicBlock> &block) {
-    return createGraphicBuffer(block, C2Rect(block->width(), block->height()));
-}
-
-std::shared_ptr<C2Buffer> SimpleC2Component::createGraphicBuffer(
         const std::shared_ptr<C2GraphicBlock> &block, const C2Rect &crop) {
     return C2Buffer::CreateGraphicBuffer(block->share(crop, ::C2Fence()));
 }
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index 22d5714..e5e16d8 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -140,15 +140,9 @@
 
 
     std::shared_ptr<C2Buffer> createLinearBuffer(
-            const std::shared_ptr<C2LinearBlock> &block);
-
-    std::shared_ptr<C2Buffer> createLinearBuffer(
             const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size);
 
     std::shared_ptr<C2Buffer> createGraphicBuffer(
-            const std::shared_ptr<C2GraphicBlock> &block);
-
-    std::shared_ptr<C2Buffer> createGraphicBuffer(
             const std::shared_ptr<C2GraphicBlock> &block,
             const C2Rect &crop);
 
diff --git a/media/codec2/components/g711/C2SoftG711Dec.cpp b/media/codec2/components/g711/C2SoftG711Dec.cpp
index f9299af..f952f22 100644
--- a/media/codec2/components/g711/C2SoftG711Dec.cpp
+++ b/media/codec2/components/g711/C2SoftG711Dec.cpp
@@ -199,7 +199,7 @@
 
     work->worklets.front()->output.flags = work->input.flags;
     work->worklets.front()->output.buffers.clear();
-    work->worklets.front()->output.buffers.push_back(createLinearBuffer(block));
+    work->worklets.front()->output.buffers.push_back(createLinearBuffer(block, 0, outSize));
     work->worklets.front()->output.ordinal = work->input.ordinal;
 
     if (eos) {
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 2fa4f25..f857e87 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -26,6 +26,11 @@
 #include <media/stagefright/foundation/MediaDefs.h>
 
 namespace android {
+namespace {
+
+constexpr uint8_t NEUTRAL_UV_VALUE = 128;
+
+}  // namespace
 
 // codecname set and passed in as a compile flag from Android.bp
 constexpr char COMPONENT_NAME[] = CODECNAME;
@@ -51,8 +56,8 @@
         DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
             .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
             .withFields({
-                C2F(mSize, width).inRange(2, 2048, 2),
-                C2F(mSize, height).inRange(2, 2048, 2),
+                C2F(mSize, width).inRange(2, 4096, 2),
+                C2F(mSize, height).inRange(2, 4096, 2),
             })
             .withSetter(SizeSetter)
             .build());
@@ -464,7 +469,8 @@
                                         const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
                                         size_t srcYStride, size_t srcUStride, size_t srcVStride,
                                         size_t dstYStride, size_t dstUVStride,
-                                        uint32_t width, uint32_t height) {
+                                        uint32_t width, uint32_t height,
+                                        bool isMonochrome) {
 
   for (size_t i = 0; i < height; ++i) {
     memcpy(dstY, srcY, width);
@@ -472,6 +478,17 @@
     dstY += dstYStride;
   }
 
+  if (isMonochrome) {
+    // Fill with neutral U/V values.
+    for (size_t i = 0; i < height / 2; ++i) {
+      memset(dstV, NEUTRAL_UV_VALUE, width / 2);
+      memset(dstU, NEUTRAL_UV_VALUE, width / 2);
+      dstV += dstUVStride;
+      dstU += dstUVStride;
+    }
+    return;
+  }
+
   for (size_t i = 0; i < height / 2; ++i) {
     memcpy(dstV, srcV, width / 2);
     srcV += srcVStride;
@@ -557,7 +574,7 @@
     const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
     size_t srcYStride, size_t srcUStride, size_t srcVStride,
     size_t dstYStride, size_t dstUVStride,
-    size_t width, size_t height) {
+    size_t width, size_t height, bool isMonochrome) {
 
   for (size_t y = 0; y < height; ++y) {
     for (size_t x = 0; x < width; ++x) {
@@ -568,6 +585,17 @@
     dstY += dstYStride;
   }
 
+  if (isMonochrome) {
+    // Fill with neutral U/V values.
+    for (size_t y = 0; y < (height + 1) / 2; ++y) {
+      memset(dstV, NEUTRAL_UV_VALUE, (width + 1) / 2);
+      memset(dstU, NEUTRAL_UV_VALUE, (width + 1) / 2);
+      dstV += dstUVStride;
+      dstU += dstUVStride;
+    }
+    return;
+  }
+
   for (size_t y = 0; y < (height + 1) / 2; ++y) {
     for (size_t x = 0; x < (width + 1) / 2; ++x) {
       dstU[x] = (uint8_t)(srcU[x] >> 2);
@@ -623,8 +651,16 @@
     }
   }
 
-  // TODO(vigneshv): Add support for monochrome videos since AV1 supports it.
-  CHECK(buffer->image_format == libgav1::kImageFormatYuv420);
+  if (!(buffer->image_format == libgav1::kImageFormatYuv420 ||
+        buffer->image_format == libgav1::kImageFormatMonochrome400)) {
+    ALOGE("image_format %d not supported", buffer->image_format);
+    mSignalledError = true;
+    work->workletsProcessed = 1u;
+    work->result = C2_CORRUPTED;
+    return false;
+  }
+  const bool isMonochrome =
+      buffer->image_format == libgav1::kImageFormatMonochrome400;
 
   std::shared_ptr<C2GraphicBlock> block;
   uint32_t format = HAL_PIXEL_FORMAT_YV12;
@@ -636,6 +672,13 @@
     if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
         defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
         defaultColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+      if (buffer->image_format != libgav1::kImageFormatYuv420) {
+        ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
+        mSignalledError = true;
+        work->result = C2_OMITTED;
+        work->workletsProcessed = 1u;
+        return false;
+      }
       format = HAL_PIXEL_FORMAT_RGBA_1010102;
     }
   }
@@ -682,21 +725,18 @@
           (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
           srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight);
     } else {
-      convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
-                                          srcY, srcU, srcV,
-                                          srcYStride / 2, srcUStride / 2, srcVStride / 2,
-                                          dstYStride, dstUVStride,
-                                          mWidth, mHeight);
+      convertYUV420Planar16ToYUV420Planar(
+          dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
+          srcVStride / 2, dstYStride, dstUVStride, mWidth, mHeight,
+          isMonochrome);
     }
   } else {
     const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
     const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
     const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
-    copyOutputBufferToYV12Frame(dstY, dstU, dstV,
-                                srcY, srcU, srcV,
-                                srcYStride, srcUStride, srcVStride,
-                                dstYStride, dstUVStride,
-                                mWidth, mHeight);
+    copyOutputBufferToYV12Frame(
+        dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+        dstYStride, dstUVStride, mWidth, mHeight, isMonochrome);
   }
   finishWork(buffer->user_private_data, work, std::move(block));
   block = nullptr;
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index 4bc1777..b7a5686 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -245,6 +245,19 @@
                 })
                 .withSetter(CodedColorAspectsSetter, mColorAspects)
                 .build());
+
+        addParameter(
+                DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+                .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+                        0 /* flexCount */, 0u /* stream */))
+                .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+                                {C2Config::picture_type_t(I_FRAME),
+                                  C2Config::picture_type_t(P_FRAME),
+                                  C2Config::picture_type_t(B_FRAME)}),
+                             C2F(mPictureQuantization, m.values[0].min).any(),
+                             C2F(mPictureQuantization, m.values[0].max).any()})
+                .withSetter(PictureQuantizationSetter)
+                .build());
     }
 
     static C2R InputDelaySetter(
@@ -464,9 +477,69 @@
         me.set().matrix = coded.v.matrix;
         return C2R::Ok();
     }
+    static C2R PictureQuantizationSetter(bool mayBlock,
+                                         C2P<C2StreamPictureQuantizationTuning::output> &me) {
+        (void)mayBlock;
+
+        // these are the ones we're going to set, so want them to default
+        // to the DEFAULT values for the codec
+        int32_t iMin = HEVC_QP_MIN, pMin = HEVC_QP_MIN, bMin = HEVC_QP_MIN;
+        int32_t iMax = HEVC_QP_MAX, pMax = HEVC_QP_MAX, bMax = HEVC_QP_MAX;
+
+        for (size_t i = 0; i < me.v.flexCount(); ++i) {
+            const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+            // layerMin is clamped to [HEVC_QP_MIN, layerMax] to avoid error
+            // cases where layer.min > layer.max
+            int32_t layerMax = std::clamp(layer.max, HEVC_QP_MIN, HEVC_QP_MAX);
+            int32_t layerMin = std::clamp(layer.min, HEVC_QP_MIN, layerMax);
+            if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+                iMax = layerMax;
+                iMin = layerMin;
+                ALOGV("iMin %d iMax %d", iMin, iMax);
+            } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+                pMax = layerMax;
+                pMin = layerMin;
+                ALOGV("pMin %d pMax %d", pMin, pMax);
+            } else if (layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+                bMax = layerMax;
+                bMin = layerMin;
+                ALOGV("bMin %d bMax %d", bMin, bMax);
+            }
+        }
+
+        ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d b %d-%d",
+              iMin, iMax, pMin, pMax, bMin, bMax);
+
+        int32_t maxFrameQP = std::min(std::min(iMax, pMax), bMax);
+        int32_t minFrameQP = std::max(std::max(iMin, pMin), bMin);
+        if (minFrameQP > maxFrameQP) {
+            minFrameQP = maxFrameQP;
+        }
+
+        // put them back into the structure
+        for (size_t i = 0; i < me.v.flexCount(); ++i) {
+            const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+            if (layer.type_ == C2Config::picture_type_t(I_FRAME) ||
+                layer.type_ == C2Config::picture_type_t(P_FRAME) ||
+                layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+                me.set().m.values[i].max = maxFrameQP;
+                me.set().m.values[i].min = minFrameQP;
+            }
+        }
+
+        ALOGV("PictureQuantizationSetter(exit): i = p = b = %d-%d",
+              minFrameQP, maxFrameQP);
+
+        return C2R::Ok();
+    }
     std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() {
         return mCodedColorAspects;
     }
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const {
+        return mPictureQuantization;
+    }
 
    private:
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
@@ -482,6 +555,7 @@
     std::shared_ptr<C2StreamGopTuning::output> mGop;
     std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
     std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
 };
 
 static size_t GetCPUCoreCount() {
@@ -654,12 +728,41 @@
         mEncParams.s_coding_tools_prms.i4_max_temporal_layers = 3;
     }
 
-    switch (mBitrateMode->value) {
-        case C2Config::BITRATE_IGNORE:
-            mEncParams.s_config_prms.i4_rate_control_mode = 3;
-            mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_frame_qp[0] =
-                getQpFromQuality(mQuality->value);
+    // we resolved out-of-bound and unspecified values in PictureQuantizationSetter()
+    // so we can start with defaults that are overridden as needed.
+    int32_t maxFrameQP = mEncParams.s_config_prms.i4_max_frame_qp;
+    int32_t minFrameQP = mEncParams.s_config_prms.i4_min_frame_qp;
+
+    for (size_t i = 0; i < mQpBounds->flexCount(); ++i) {
+        const C2PictureQuantizationStruct &layer = mQpBounds->m.values[i];
+
+        // no need to loop, hevc library takes same range for I/P/B picture type
+        if (layer.type_ == C2Config::picture_type_t(I_FRAME) ||
+            layer.type_ == C2Config::picture_type_t(P_FRAME) ||
+            layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+
+            maxFrameQP = layer.max;
+            minFrameQP = layer.min;
             break;
+        }
+    }
+    mEncParams.s_config_prms.i4_max_frame_qp = maxFrameQP;
+    mEncParams.s_config_prms.i4_min_frame_qp = minFrameQP;
+
+    ALOGV("MaxFrameQp: %d MinFrameQp: %d", maxFrameQP, minFrameQP);
+
+    mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_frame_qp[0] =
+        std::clamp(kDefaultInitQP, minFrameQP, maxFrameQP);
+
+    switch (mBitrateMode->value) {
+        case C2Config::BITRATE_IGNORE: {
+            mEncParams.s_config_prms.i4_rate_control_mode = 3;
+            // ensure initial qp values are within our newly configured bounds
+            int32_t frameQp = getQpFromQuality(mQuality->value);
+            mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_frame_qp[0] =
+                std::clamp(frameQp, minFrameQP, maxFrameQP);
+            break;
+        }
         case C2Config::BITRATE_CONST:
             mEncParams.s_config_prms.i4_rate_control_mode = 5;
             break;
@@ -723,6 +826,7 @@
         mGop = mIntf->getGop_l();
         mRequestSync = mIntf->getRequestSync_l();
         mColorAspects = mIntf->getCodedColorAspects_l();
+        mQpBounds = mIntf->getPictureQuantization_l();;
     }
 
     c2_status_t status = initEncParams();
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.h b/media/codec2/components/hevc/C2SoftHevcEnc.h
index 9dbf682..4217a8b 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.h
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.h
@@ -42,6 +42,11 @@
 #define DEFAULT_B_FRAMES     0
 #define DEFAULT_RC_LOOKAHEAD 0
 
+#define HEVC_QP_MIN 1
+#define HEVC_QP_MAX 51
+
+constexpr int32_t kDefaultInitQP = 32;
+
 struct C2SoftHevcEnc : public SimpleC2Component {
     class IntfImpl;
 
@@ -90,6 +95,7 @@
     std::shared_ptr<C2StreamGopTuning::output> mGop;
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
     std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mQpBounds;
 #ifdef FILE_DUMP_ENABLE
     char mInFile[200];
     char mOutFile[200];
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.cpp b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
index 7137767..50690af 100644
--- a/media/codec2/components/mp3/C2SoftMp3Dec.cpp
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
@@ -16,6 +16,7 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2SoftMp3Dec"
+#include <inttypes.h>
 #include <log/log.h>
 
 #include <numeric>
@@ -320,6 +321,13 @@
     return C2_OK;
 }
 
+static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+    work->worklets.front()->output.flags = work->input.flags;
+    work->worklets.front()->output.buffers.clear();
+    work->worklets.front()->output.ordinal = work->input.ordinal;
+    work->workletsProcessed = 1u;
+}
+
 // TODO: Can overall error checking be improved? As in the check for validity of
 //       work, pool ptr, work->input.buffers.size() == 1, ...
 // TODO: Blind removal of 529 samples from the output may not work. Because
@@ -485,17 +493,17 @@
         }
     }
 
-    uint64_t outTimeStamp = mProcessedSamples * 1000000ll / samplingRate;
-    mProcessedSamples += ((outSize - outOffset) / (numChannels * sizeof(int16_t)));
-    ALOGV("out buffer attr. offset %d size %d timestamp %u", outOffset, outSize - outOffset,
-          (uint32_t)(mAnchorTimeStamp + outTimeStamp));
-    decodedSizes.clear();
-    work->worklets.front()->output.flags = work->input.flags;
-    work->worklets.front()->output.buffers.clear();
-    work->worklets.front()->output.buffers.push_back(
-            createLinearBuffer(block, outOffset, outSize - outOffset));
-    work->worklets.front()->output.ordinal = work->input.ordinal;
-    work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp;
+    fillEmptyWork(work);
+    if (samplingRate && numChannels) {
+        int64_t outTimeStamp = mProcessedSamples * 1000000ll / samplingRate;
+        mProcessedSamples += ((outSize - outOffset) / (numChannels * sizeof(int16_t)));
+        ALOGV("out buffer attr. offset %d size %d timestamp %" PRId64 " ", outOffset,
+               outSize - outOffset, mAnchorTimeStamp + outTimeStamp);
+        decodedSizes.clear();
+        work->worklets.front()->output.buffers.push_back(
+                createLinearBuffer(block, outOffset, outSize - outOffset));
+        work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp;
+    }
     if (eos) {
         mSignalledOutputEos = true;
         ALOGV("signalled EOS");
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.h b/media/codec2/components/mp3/C2SoftMp3Dec.h
index 402bdc4..e2dfcf3 100644
--- a/media/codec2/components/mp3/C2SoftMp3Dec.h
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.h
@@ -63,7 +63,7 @@
     bool mSignalledError;
     bool mSignalledOutputEos;
     bool mGaplessBytes;
-    uint64_t mAnchorTimeStamp;
+    int64_t mAnchorTimeStamp;
     uint64_t mProcessedSamples;
 
     status_t initDecoder();
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index ddd312f..81f4679 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -228,7 +228,6 @@
         const std::shared_ptr<IntfImpl> &intfImpl)
     : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
       mIntf(intfImpl),
-      mDecHandle(nullptr),
       mOutputBuffer{},
       mInitialized(false) {
 }
@@ -244,9 +243,7 @@
 
 c2_status_t C2SoftMpeg4Dec::onStop() {
     if (mInitialized) {
-        if (mDecHandle) {
-            PVCleanUpVideoDecoder(mDecHandle);
-        }
+        PVCleanUpVideoDecoder(&mVideoDecControls);
         mInitialized = false;
     }
     for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
@@ -269,28 +266,15 @@
 }
 
 void C2SoftMpeg4Dec::onRelease() {
-    if (mInitialized) {
-        if (mDecHandle) {
-            PVCleanUpVideoDecoder(mDecHandle);
-            delete mDecHandle;
-            mDecHandle = nullptr;
-        }
-        mInitialized = false;
-    }
+    (void)onStop();
     if (mOutBlock) {
         mOutBlock.reset();
     }
-    for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
-        if (mOutputBuffer[i]) {
-            free(mOutputBuffer[i]);
-            mOutputBuffer[i] = nullptr;
-        }
-    }
 }
 
 c2_status_t C2SoftMpeg4Dec::onFlush_sm() {
     if (mInitialized) {
-        if (PV_TRUE != PVResetVideoDecoder(mDecHandle)) {
+        if (PV_TRUE != PVResetVideoDecoder(&mVideoDecControls)) {
             return C2_CORRUPTED;
         }
     }
@@ -305,14 +289,8 @@
 #else
     mIsMpeg4 = false;
 #endif
-    if (!mDecHandle) {
-        mDecHandle = new tagvideoDecControls;
-    }
-    if (!mDecHandle) {
-        ALOGE("mDecHandle is null");
-        return NO_MEMORY;
-    }
-    memset(mDecHandle, 0, sizeof(tagvideoDecControls));
+
+    memset(&mVideoDecControls, 0, sizeof(tagvideoDecControls));
 
     /* TODO: bring these values to 352 and 288. It cannot be done as of now
      * because, h263 doesn't seem to allow port reconfiguration. In OMX, the
@@ -368,10 +346,6 @@
 }
 
 c2_status_t C2SoftMpeg4Dec::ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool) {
-    if (!mDecHandle) {
-        ALOGE("not supposed to be here, invalid decoder context");
-        return C2_CORRUPTED;
-    }
 
     mOutputBufferSize = align(mIntf->getMaxWidth(), 16) * align(mIntf->getMaxHeight(), 16) * 3 / 2;
     for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
@@ -402,10 +376,10 @@
 
 bool C2SoftMpeg4Dec::handleResChange(const std::unique_ptr<C2Work> &work) {
     uint32_t disp_width, disp_height;
-    PVGetVideoDimensions(mDecHandle, (int32 *)&disp_width, (int32 *)&disp_height);
+    PVGetVideoDimensions(&mVideoDecControls, (int32 *)&disp_width, (int32 *)&disp_height);
 
     uint32_t buf_width, buf_height;
-    PVGetBufferDimensions(mDecHandle, (int32 *)&buf_width, (int32 *)&buf_height);
+    PVGetBufferDimensions(&mVideoDecControls, (int32 *)&buf_width, (int32 *)&buf_height);
 
     CHECK_LE(disp_width, buf_width);
     CHECK_LE(disp_height, buf_height);
@@ -426,13 +400,14 @@
         }
 
         if (!mIsMpeg4) {
-            PVCleanUpVideoDecoder(mDecHandle);
+            PVCleanUpVideoDecoder(&mVideoDecControls);
 
             uint8_t *vol_data[1]{};
             int32_t vol_size = 0;
 
             if (!PVInitVideoDecoder(
-                    mDecHandle, vol_data, &vol_size, 1, mIntf->getMaxWidth(), mIntf->getMaxHeight(), H263_MODE)) {
+                    &mVideoDecControls, vol_data, &vol_size, 1, mIntf->getMaxWidth(),
+                                                        mIntf->getMaxHeight(), H263_MODE)) {
                 ALOGE("Error in PVInitVideoDecoder H263_MODE while resChanged was set to true");
                 mSignalledError = true;
                 work->result = C2_CORRUPTED;
@@ -522,7 +497,7 @@
     uint32_t *start_code = (uint32_t *)bitstream;
     bool volHeader = *start_code == 0xB0010000;
     if (volHeader) {
-        PVCleanUpVideoDecoder(mDecHandle);
+        PVCleanUpVideoDecoder(&mVideoDecControls);
         mInitialized = false;
     }
 
@@ -537,7 +512,7 @@
         }
         MP4DecodingMode mode = (mIsMpeg4) ? MPEG4_MODE : H263_MODE;
         if (!PVInitVideoDecoder(
-                mDecHandle, vol_data, &vol_size, 1,
+                &mVideoDecControls, vol_data, &vol_size, 1,
                 mIntf->getMaxWidth(), mIntf->getMaxHeight(), mode)) {
             ALOGE("PVInitVideoDecoder failed. Unsupported content?");
             mSignalledError = true;
@@ -545,7 +520,7 @@
             return;
         }
         mInitialized = true;
-        MP4DecodingMode actualMode = PVGetDecBitstreamMode(mDecHandle);
+        MP4DecodingMode actualMode = PVGetDecBitstreamMode(&mVideoDecControls);
         if (mode != actualMode) {
             ALOGE("Decoded mode not same as actual mode of the decoder");
             mSignalledError = true;
@@ -553,7 +528,7 @@
             return;
         }
 
-        PVSetPostProcType(mDecHandle, 0);
+        PVSetPostProcType(&mVideoDecControls, 0);
         if (handleResChange(work)) {
             ALOGI("Setting width and height");
             C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
@@ -590,7 +565,7 @@
             return;
         }
 
-        uint32_t yFrameSize = sizeof(uint8) * mDecHandle->size;
+        uint32_t yFrameSize = sizeof(uint8) * mVideoDecControls.size;
         if (mOutputBufferSize < yFrameSize * 3 / 2){
             ALOGE("Too small output buffer: %zu bytes", mOutputBufferSize);
             mSignalledError = true;
@@ -599,7 +574,7 @@
         }
 
         if (!mFramesConfigured) {
-            PVSetReferenceYUV(mDecHandle,mOutputBuffer[1]);
+            PVSetReferenceYUV(&mVideoDecControls,mOutputBuffer[1]);
             mFramesConfigured = true;
         }
 
@@ -610,7 +585,7 @@
         uint8_t *bitstreamTmp = bitstream;
         uint32_t timestamp = workIndex;
         if (PVDecodeVopHeader(
-                    mDecHandle, &bitstreamTmp, &timestamp, &tmpInSize,
+                    &mVideoDecControls, &bitstreamTmp, &timestamp, &tmpInSize,
                     &header_info, &useExtTimestamp,
                     mOutputBuffer[mNumSamplesOutput & 1]) != PV_TRUE) {
             ALOGE("failed to decode vop header.");
@@ -642,7 +617,7 @@
             continue;
         }
 
-        if (PVDecodeVopBody(mDecHandle, &tmpInSize) != PV_TRUE) {
+        if (PVDecodeVopBody(&mVideoDecControls, &tmpInSize) != PV_TRUE) {
             ALOGE("failed to decode video frame.");
             mSignalledError = true;
             work->result = C2_CORRUPTED;
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
index 716a095..fed04c9 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
@@ -19,8 +19,8 @@
 
 #include <SimpleC2Component.h>
 
+#include <mp4dec_api.h>
 
-struct tagvideoDecControls;
 
 namespace android {
 
@@ -54,7 +54,7 @@
     bool handleResChange(const std::unique_ptr<C2Work> &work);
 
     std::shared_ptr<IntfImpl> mIntf;
-    tagvideoDecControls *mDecHandle;
+    tagvideoDecControls mVideoDecControls;
     std::shared_ptr<C2GraphicBlock> mOutBlock;
     uint8_t *mOutputBuffer[kNumOutputBuffers];
     size_t  mOutputBufferSize;
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index 3c87531..3bfec66 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -436,16 +436,18 @@
         }
 
         ++mNumInputFrames;
-        std::unique_ptr<C2StreamInitDataInfo::output> csd =
-            C2StreamInitDataInfo::output::AllocUnique(outputSize, 0u);
-        if (!csd) {
-            ALOGE("CSD allocation failed");
-            mSignalledError = true;
-            work->result = C2_NO_MEMORY;
-            return;
+        if (outputSize) {
+            std::unique_ptr<C2StreamInitDataInfo::output> csd =
+                C2StreamInitDataInfo::output::AllocUnique(outputSize, 0u);
+            if (!csd) {
+                ALOGE("CSD allocation failed");
+                mSignalledError = true;
+                work->result = C2_NO_MEMORY;
+                return;
+            }
+            memcpy(csd->m.value, outPtr, outputSize);
+            work->worklets.front()->output.configUpdate.push_back(std::move(csd));
         }
-        memcpy(csd->m.value, outPtr, outputSize);
-        work->worklets.front()->output.configUpdate.push_back(std::move(csd));
     }
 
     // handle dynamic bitrate change
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
index b47275f..370d33c 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.cpp
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -78,6 +78,19 @@
                 .build());
 
         addParameter(
+            DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+                .withDefault(new C2StreamBitrateModeTuning::output(
+                        0u, C2Config::BITRATE_VARIABLE))
+                .withFields({
+                    C2F(mBitrateMode, value).oneOf({
+                        C2Config::BITRATE_CONST,
+                        C2Config::BITRATE_VARIABLE})
+                })
+                .withSetter(
+                    Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+                .build());
+
+        addParameter(
                 DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
                 .withDefault(new C2StreamBitrateInfo::output(0u, 128000))
                 .withFields({C2F(mBitrate, value).inRange(500, 512000)})
@@ -100,12 +113,14 @@
     uint32_t getSampleRate() const { return mSampleRate->value; }
     uint32_t getChannelCount() const { return mChannelCount->value; }
     uint32_t getBitrate() const { return mBitrate->value; }
+    uint32_t getBitrateMode() const { return mBitrateMode->value; }
     uint32_t getComplexity() const { return mComplexity->value; }
 
 private:
     std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+    std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
     std::shared_ptr<C2StreamComplexityTuning::output> mComplexity;
     std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
 };
@@ -135,6 +150,7 @@
     mSampleRate = mIntf->getSampleRate();
     mChannelCount = mIntf->getChannelCount();
     uint32_t bitrate = mIntf->getBitrate();
+    uint32_t bitrateMode = mIntf->getBitrateMode();
     int complexity = mIntf->getComplexity();
     mNumSamplesPerFrame = mSampleRate / (1000 / mFrameDurationMs);
     mNumPcmBytesPerInputFrame =
@@ -189,14 +205,24 @@
         return C2_BAD_VALUE;
     }
 
-    // Constrained VBR
-    if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR(1) != OPUS_OK)) {
-        ALOGE("failed to set vbr type");
-        return C2_BAD_VALUE;
-    }
-    if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR_CONSTRAINT(1) !=
-            OPUS_OK)) {
-        ALOGE("failed to set vbr constraint");
+    if (bitrateMode == C2Config::BITRATE_VARIABLE) {
+        // Constrained VBR
+        if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR(1) != OPUS_OK)) {
+            ALOGE("failed to set vbr type");
+            return C2_BAD_VALUE;
+        }
+        if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR_CONSTRAINT(1) !=
+                OPUS_OK)) {
+            ALOGE("failed to set vbr constraint");
+            return C2_BAD_VALUE;
+        }
+    } else if (bitrateMode == C2Config::BITRATE_CONST) {
+        if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR(0) != OPUS_OK)) {
+            ALOGE("failed to set cbr type");
+            return C2_BAD_VALUE;
+        }
+    } else {
+        ALOGE("unknown bitrate mode");
         return C2_BAD_VALUE;
     }
 
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 2953d90..45e2ca8 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -149,8 +149,16 @@
 #else
         addParameter(
                 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
-                .withConstValue(new C2StreamProfileLevelInfo::input(0u,
-                        C2Config::PROFILE_UNUSED, C2Config::LEVEL_UNUSED))
+                .withDefault(new C2StreamProfileLevelInfo::input(0u,
+                        C2Config::PROFILE_VP8_0, C2Config::LEVEL_UNUSED))
+                .withFields({
+                    C2F(mProfileLevel, profile).equalTo(
+                        PROFILE_VP8_0
+                    ),
+                    C2F(mProfileLevel, level).equalTo(
+                        LEVEL_UNUSED),
+                })
+                .withSetter(ProfileLevelSetter, mSize)
                 .build());
 #endif
 
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index dc4c658..7486d27 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -484,14 +484,8 @@
     switch (layout.type) {
         case C2PlanarLayout::TYPE_RGB:
         case C2PlanarLayout::TYPE_RGBA: {
-            std::shared_ptr<C2StreamColorAspectsInfo::output> colorAspects;
-            {
-                IntfImpl::Lock lock = mIntf->lock();
-                colorAspects = mIntf->getCodedColorAspects_l();
-            }
             ConvertRGBToPlanarYUV(mConversionBuffer.data(), stride, vstride,
-                                  mConversionBuffer.size(), *rView.get(),
-                                  colorAspects->matrix, colorAspects->range);
+                                  mConversionBuffer.size(), *rView.get());
             vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, width, height,
                          mStrideAlign, mConversionBuffer.data());
             break;
@@ -641,7 +635,8 @@
             }
             work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
             work->worklets.front()->output.buffers.clear();
-            std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block);
+            std::shared_ptr<C2Buffer> buffer =
+                createLinearBuffer(block, 0, encoded_packet->data.frame.sz);
             if (encoded_packet->data.frame.flags & VPX_FRAME_IS_KEY) {
                 buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
                         0u /* stream id */, C2Config::SYNC_FRAME));
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index c70ffb9..926b2e9 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -324,21 +324,35 @@
                 .withConstValue(new C2StreamIntraRefreshTuning::output(
                              0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
                 .build());
-
+#ifdef VP9
         addParameter(
-        DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
-        .withDefault(new C2StreamProfileLevelInfo::output(
-                0u, PROFILE_VP9_0, LEVEL_VP9_4_1))
-        .withFields({
-            C2F(mProfileLevel, profile).equalTo(
-                PROFILE_VP9_0
-            ),
-            C2F(mProfileLevel, level).equalTo(
-                LEVEL_VP9_4_1),
-        })
-        .withSetter(ProfileLevelSetter)
-        .build());
-
+                DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                .withDefault(new C2StreamProfileLevelInfo::output(
+                        0u, PROFILE_VP9_0, LEVEL_VP9_4_1))
+                .withFields({
+                    C2F(mProfileLevel, profile).equalTo(
+                        PROFILE_VP9_0
+                    ),
+                    C2F(mProfileLevel, level).equalTo(
+                        LEVEL_VP9_4_1),
+                })
+                .withSetter(ProfileLevelSetter)
+                .build());
+#else
+        addParameter(
+                DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                .withDefault(new C2StreamProfileLevelInfo::output(
+                        0u, PROFILE_VP8_0, LEVEL_UNUSED))
+                .withFields({
+                    C2F(mProfileLevel, profile).equalTo(
+                        PROFILE_VP8_0
+                    ),
+                    C2F(mProfileLevel, level).equalTo(
+                        LEVEL_UNUSED),
+                })
+                .withSetter(ProfileLevelSetter)
+                .build());
+#endif
         addParameter(
                 DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
                 .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
@@ -444,9 +458,6 @@
     std::shared_ptr<C2StreamBitrateModeTuning::output> getBitrateMode_l() const { return mBitrateMode; }
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
     std::shared_ptr<C2StreamTemporalLayeringTuning::output> getTemporalLayers_l() const { return mLayering; }
-    std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() const {
-        return mCodedColorAspects;
-    }
     uint32_t getSyncFramePeriod() const {
         if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
             return 0;
diff --git a/media/codec2/components/xaac/C2SoftXaacDec.cpp b/media/codec2/components/xaac/C2SoftXaacDec.cpp
index 6deafda..8bf4b72 100644
--- a/media/codec2/components/xaac/C2SoftXaacDec.cpp
+++ b/media/codec2/components/xaac/C2SoftXaacDec.cpp
@@ -361,9 +361,8 @@
     C2WriteView wView = block->map().get();
     int16_t* outBuffer = reinterpret_cast<int16_t*>(wView.data());
     memcpy(outBuffer, mOutputDrainBuffer, mOutputDrainBufferWritePos);
-    mOutputDrainBufferWritePos = 0;
 
-    auto fillWork = [buffer = createLinearBuffer(block)](
+    auto fillWork = [buffer = createLinearBuffer(block, 0, mOutputDrainBufferWritePos)](
         const std::unique_ptr<C2Work>& work) {
         uint32_t flags = 0;
         if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
@@ -376,6 +375,9 @@
         work->worklets.front()->output.ordinal = work->input.ordinal;
         work->workletsProcessed = 1u;
     };
+
+    mOutputDrainBufferWritePos = 0;
+
     if (work && work->input.ordinal.frameIndex == c2_cntr64_t(mCurFrameIndex)) {
         fillWork(work);
     } else {
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index bdf2027..7eaed08 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -262,6 +262,8 @@
     kParamIndexTunneledMode, // struct
     kParamIndexTunnelHandle, // int32[]
     kParamIndexTunnelSystemTime, // int64
+    kParamIndexTunnelHoldRender, // bool
+    kParamIndexTunnelStartRender, // bool
 
     // dmabuf allocator
     kParamIndexStoreDmaBufUsage,  // store, struct
@@ -2365,6 +2367,30 @@
         C2PortTunnelSystemTime;
 constexpr char C2_PARAMKEY_OUTPUT_RENDER_TIME[] = "output.render-time";
 
+/**
+ * Tunneled mode video peek signaling flag.
+ *
+ * When a video frame is pushed to the decoder with this parameter set to true,
+ * the decoder must decode the frame, signal partial completion, and hold on the
+ * frame until C2StreamTunnelStartRender is set to true (which resets this
+ * flag). Flush will also result in the frames being returned back to the
+ * client (but not rendered).
+ */
+typedef C2StreamParam<C2Info, C2EasyBoolValue, kParamIndexTunnelHoldRender>
+        C2StreamTunnelHoldRender;
+constexpr char C2_PARAMKEY_TUNNEL_HOLD_RENDER[] = "output.tunnel-hold-render";
+
+/**
+ * Tunneled mode video peek signaling flag.
+ *
+ * Upon receiving this flag, the decoder shall set C2StreamTunnelHoldRender to
+ * false, which shall cause any frames held for rendering to be immediately
+ * displayed, regardless of their timestamps.
+*/
+typedef C2StreamParam<C2Info, C2EasyBoolValue, kParamIndexTunnelStartRender>
+        C2StreamTunnelStartRender;
+constexpr char C2_PARAMKEY_TUNNEL_START_RENDER[] = "output.tunnel-start-render";
+
 C2ENUM(C2PlatformConfig::encoding_quality_level_t, uint32_t,
     NONE,
     S_HANDHELD,
diff --git a/media/codec2/core/include/C2Work.h b/media/codec2/core/include/C2Work.h
index 67084cc..794402f 100644
--- a/media/codec2/core/include/C2Work.h
+++ b/media/codec2/core/include/C2Work.h
@@ -145,10 +145,35 @@
          */
         FLAG_INCOMPLETE = (1 << 3),
         /**
+         * This frame has been corrected due to a bitstream error. This is a hint, and in most cases
+         * can be ignored. This flag can be set by components on their output to signal the clients
+         * that errors may be present but the frame should be used nonetheless. It can also be set
+         * by clients to signal that the input frame has been corrected, but nonetheless should be
+         * processed.
+         */
+        FLAG_CORRECTED = (1 << 4),
+        /**
+         * This frame is corrupt due to a bitstream error. This is similar to FLAG_CORRECTED,
+         * with the exception that this is a hint that downstream components should not process this
+         * frame.
+         * <p>
+         * If set on the input by the client, the input is likely non-processable and should be
+         * handled similarly to uncorrectable bitstream error detected. For components that operat
+         * on whole access units, this flag can be propagated to the output. Other components should
+         * aim to detect access unit boundaries to determine if any part of the input frame can be
+         * processed.
+         * <p>
+         * If set by the component, this signals to the client that the output is non-usable -
+         * including possibly the metadata that may also be non-usable; -- however, the component
+         * will try to recover on successive input frames.
+         */
+        FLAG_CORRUPT = (1 << 5),
+
+        /**
          * This frame contains only codec-specific configuration data, and no actual access unit.
          *
-         * \deprecated pass codec configuration with using the \todo codec-specific configuration
-         * info together with the access unit.
+         * \deprecated pass codec configuration with using the C2InitData info parameter together
+         *             with the access unit.
          */
         FLAG_CODEC_CONFIG  = (1u << 31),
     };
diff --git a/media/codec2/fuzzer/C2Fuzzer.cpp b/media/codec2/fuzzer/C2Fuzzer.cpp
index 51e1013..e469d8b 100644
--- a/media/codec2/fuzzer/C2Fuzzer.cpp
+++ b/media/codec2/fuzzer/C2Fuzzer.cpp
@@ -194,12 +194,12 @@
   }
 
   std::vector<C2Param*> configParams;
+  C2StreamPictureSizeInfo::input inputSize(0u, kWidthOfVideo, kHeightOfVideo);
+  C2StreamSampleRateInfo::output sampleRateInfo(0u, kSamplingRateOfAudio);
+  C2StreamChannelCountInfo::output channelCountInfo(0u, kChannelsOfAudio);
   if (domain.value == DOMAIN_VIDEO) {
-    C2StreamPictureSizeInfo::input inputSize(0u, kWidthOfVideo, kHeightOfVideo);
     configParams.push_back(&inputSize);
   } else if (domain.value == DOMAIN_AUDIO) {
-    C2StreamSampleRateInfo::output sampleRateInfo(0u, kSamplingRateOfAudio);
-    C2StreamChannelCountInfo::output channelCountInfo(0u, kChannelsOfAudio);
     configParams.push_back(&sampleRateInfo);
     configParams.push_back(&channelCountInfo);
   }
@@ -239,17 +239,17 @@
 }
 
 void Codec2Fuzzer::decodeFrames(const uint8_t* data, size_t size) {
-  mBufferSource = new BufferSource(data, size);
-  if (!mBufferSource) {
+  std::unique_ptr<BufferSource> bufferSource = std::make_unique<BufferSource>(data, size);
+  if (!bufferSource) {
     return;
   }
-  mBufferSource->parse();
+  bufferSource->parse();
   c2_status_t status = C2_OK;
   size_t numFrames = 0;
-  while (!mBufferSource->isEos()) {
+  while (!bufferSource->isEos()) {
     uint8_t* frame = nullptr;
     size_t frameSize = 0;
-    FrameData frameData = mBufferSource->getFrame();
+    FrameData frameData = bufferSource->getFrame();
     frame = std::get<0>(frameData);
     frameSize = std::get<1>(frameData);
 
@@ -298,7 +298,6 @@
   mConditionalVariable.wait_for(waitForDecodeComplete, kC2FuzzerTimeOut, [this] { return mEos; });
   std::list<std::unique_ptr<C2Work>> c2flushedWorks;
   mComponent->flush_sm(C2Component::FLUSH_COMPONENT, &c2flushedWorks);
-  delete mBufferSource;
 }
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
diff --git a/media/codec2/fuzzer/C2Fuzzer.h b/media/codec2/fuzzer/C2Fuzzer.h
index d5ac81a..da76885 100644
--- a/media/codec2/fuzzer/C2Fuzzer.h
+++ b/media/codec2/fuzzer/C2Fuzzer.h
@@ -104,7 +104,6 @@
     static constexpr size_t kMarkerSuffixSize = 3;
   };
 
-  BufferSource* mBufferSource;
   bool mEos = false;
   C2BlockPool::local_id_t mBlockPoolId;
 
diff --git a/media/codec2/hidl/1.0/vts/.clang-format b/media/codec2/hidl/1.0/vts/.clang-format
deleted file mode 120000
index 136279c..0000000
--- a/media/codec2/hidl/1.0/vts/.clang-format
+++ /dev/null
@@ -1 +0,0 @@
-../../../../../../../build/soong/scripts/system-clang-format
\ No newline at end of file
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index abd8b2d..74b099c 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -46,8 +46,8 @@
         {"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.info"},
         {"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac",
          "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"},
-        {"audio/mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz.info"},
-        {"audio/mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3",
+        {"mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz.info"},
+        {"mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3",
          "bbb_mp3_stereo_192kbps_48000hz_multi_frame.info"},
         {"3gpp", "sine_amrnb_1ch_12kbps_8000hz.amrnb", "sine_amrnb_1ch_12kbps_8000hz.info"},
         {"3gpp", "sine_amrnb_1ch_12kbps_8000hz.amrnb",
@@ -303,7 +303,7 @@
     int streamCount = 0;
 
     for (size_t i = 0; i < gCompToFiles.size(); ++i) {
-        if (mMime.find(gCompToFiles[i].mime) != std::string::npos) {
+        if (!mMime.compare("audio/" + gCompToFiles[i].mime)) {
             if (streamCount == streamIndex) {
                 mInputFile = sResourceDir + gCompToFiles[i].inputFile;
                 mInfoFile = sResourceDir + gCompToFiles[i].infoFile;
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
index d77b943..1dc037a 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
@@ -81,6 +81,7 @@
         mEos = false;
         mCsd = false;
         mFramesReceived = 0;
+        mEncoderFrameSize = 0;
         mWorkResult = C2_OK;
         mOutputSize = 0u;
         getInputMaxBufSize();
@@ -146,6 +147,7 @@
     uint32_t mFramesReceived;
     int32_t mInputMaxBufSize;
     uint64_t mOutputSize;
+    uint32_t mEncoderFrameSize;
     std::list<uint64_t> mFlushedIndices;
 
     C2BlockPool::local_id_t mBlockPoolId;
@@ -301,11 +303,22 @@
 c2_status_t Codec2AudioEncHidlTestBase::getSamplesPerFrame(int32_t nChannels,
                                                            int32_t* samplesPerFrame) {
     std::vector<std::unique_ptr<C2Param>> queried;
-    c2_status_t c2err = mComponent->query({}, {C2StreamMaxBufferSizeInfo::input::PARAM_TYPE},
+    c2_status_t c2err = mComponent->query({}, {C2StreamAudioFrameSizeInfo::input::PARAM_TYPE},
                                           C2_DONT_BLOCK, &queried);
+    size_t offset = sizeof(C2Param);
+    if (c2err == C2_OK && queried.size()) {
+        C2Param* param = queried[0].get();
+        mEncoderFrameSize = *(uint32_t*)((uint8_t*)param + offset);
+        if (mEncoderFrameSize) {
+            *samplesPerFrame = mEncoderFrameSize;
+            return C2_OK;
+        }
+    }
+
+    c2err = mComponent->query({}, {C2StreamMaxBufferSizeInfo::input::PARAM_TYPE}, C2_DONT_BLOCK,
+                              &queried);
     if (c2err != C2_OK || queried.size() == 0) return c2err;
 
-    size_t offset = sizeof(C2Param);
     C2Param* param = queried[0].get();
     uint32_t maxInputSize = *(uint32_t*)((uint8_t*)param + offset);
     *samplesPerFrame = std::min((maxInputSize / (nChannels * 2)), kMaxSamplesPerFrame);
@@ -440,10 +453,13 @@
     ALOGV("EncodeTest");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     bool signalEOS = std::get<2>(GetParam());
-    // Ratio w.r.t to mInputMaxBufSize
-    int32_t inputMaxBufRatio = std::get<3>(GetParam());
-    mSamplesPerFrame = ((mInputMaxBufSize / inputMaxBufRatio) / (mNumChannels * 2));
-
+    // Set samples per frame based on inputMaxBufRatio if component does not
+    // advertise supported frame size
+    if (!mEncoderFrameSize) {
+        // Ratio w.r.t to mInputMaxBufSize
+        int32_t inputMaxBufRatio = std::get<3>(GetParam());
+        mSamplesPerFrame = ((mInputMaxBufSize / inputMaxBufRatio) / (mNumChannels * 2));
+    }
     ALOGV("signalEOS %d mInputMaxBufSize %d mSamplesPerFrame %d", signalEOS, mInputMaxBufSize,
           mSamplesPerFrame);
 
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index c331d0b..73af188 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -64,13 +64,15 @@
         {"3gpp", "bbb_h263_352x288_300kbps_12fps.h263", "bbb_h263_352x288_300kbps_12fps.info", ""},
         {"mp4v-es", "bbb_mpeg4_352x288_512kbps_30fps.m4v", "bbb_mpeg4_352x288_512kbps_30fps.info",
          ""},
-        {"vp8", "bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_176x144_240kbps_60fps.info", ""},
-        {"vp8", "bbb_vp8_640x360_2mbps_30fps.vp8", "bbb_vp8_640x360_2mbps_30fps.info",
+        {"x-vnd.on2.vp8", "bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_176x144_240kbps_60fps.info",
+         ""},
+        {"x-vnd.on2.vp8", "bbb_vp8_640x360_2mbps_30fps.vp8", "bbb_vp8_640x360_2mbps_30fps.info",
          "bbb_vp8_640x360_2mbps_30fps_chksm.md5"},
-        {"vp9", "bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_176x144_285kbps_60fps.info", ""},
-        {"vp9", "bbb_vp9_640x360_1600kbps_30fps.vp9", "bbb_vp9_640x360_1600kbps_30fps.info",
-         "bbb_vp9_640x360_1600kbps_30fps_chksm.md5"},
-        {"vp9", "bbb_vp9_704x480_280kbps_24fps_altref_2.vp9",
+        {"x-vnd.on2.vp9", "bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_176x144_285kbps_60fps.info",
+         ""},
+        {"x-vnd.on2.vp9", "bbb_vp9_640x360_1600kbps_30fps.vp9",
+         "bbb_vp9_640x360_1600kbps_30fps.info", "bbb_vp9_640x360_1600kbps_30fps_chksm.md5"},
+        {"x-vnd.on2.vp9", "bbb_vp9_704x480_280kbps_24fps_altref_2.vp9",
          "bbb_vp9_704x480_280kbps_24fps_altref_2.info", ""},
         {"av01", "bbb_av1_640_360.av1", "bbb_av1_640_360.info", "bbb_av1_640_360_chksum.md5"},
         {"av01", "bbb_av1_176_144.av1", "bbb_av1_176_144.info", "bbb_av1_176_144_chksm.md5"},
@@ -379,7 +381,7 @@
     int streamCount = 0;
 
     for (size_t i = 0; i < gCompToFiles.size(); ++i) {
-        if (mMime.find(gCompToFiles[i].mime) != std::string::npos) {
+        if (!mMime.compare("video/" + gCompToFiles[i].mime)) {
             if (streamCount == streamIndex) {
                 mInputFile = sResourceDir + gCompToFiles[i].inputFile;
                 mInfoFile = sResourceDir + gCompToFiles[i].infoFile;
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
index 6a00edd..a6507e7 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
@@ -334,6 +334,12 @@
     int bytesCount = nWidth * nHeight * 3 >> 1;
     int32_t timestampIncr = ENCODER_TIMESTAMP_INCREMENT;
     c2_status_t err = C2_OK;
+
+    // Query component's memory usage flags
+    std::vector<std::unique_ptr<C2Param>> params;
+    C2StreamUsageTuning::input compUsage(0u, 0u);
+    component->query({&compUsage}, {}, C2_DONT_BLOCK, &params);
+
     while (1) {
         if (nFrames == 0) break;
         uint32_t flags = 0;
@@ -384,7 +390,8 @@
         }
         std::shared_ptr<C2GraphicBlock> block;
         err = graphicPool->fetchGraphicBlock(nWidth, nHeight, HAL_PIXEL_FORMAT_YV12,
-                                             {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE},
+                                             {C2MemoryUsage::CPU_READ | compUsage.value,
+                                                 C2MemoryUsage::CPU_WRITE | compUsage.value},
                                              &block);
         if (err != C2_OK) {
             fprintf(stderr, "fetchGraphicBlock failed : %d\n", err);
diff --git a/media/codec2/hidl/services/Android.bp b/media/codec2/hidl/services/Android.bp
index 5a23217..4ec67b0 100644
--- a/media/codec2/hidl/services/Android.bp
+++ b/media/codec2/hidl/services/Android.bp
@@ -52,7 +52,7 @@
 
     // minijail is used to protect against unexpected system calls.
     shared_libs: [
-        "libavservices_minijail_vendor",
+        "libavservices_minijail",
         "libbinder",
     ],
     required: ["android.hardware.media.c2@1.1-default-seccomp_policy"],
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index d942606..10a6896 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -665,6 +665,10 @@
         mCodec->mCallback->onOutputBuffersChanged();
     }
 
+    void onFirstTunnelFrameReady() override {
+        mCodec->mCallback->onFirstTunnelFrameReady();
+    }
+
 private:
     CCodec *mCodec;
 };
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 5a58fd8..cdd9275 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -205,6 +205,7 @@
     int32_t flags = 0;
     int32_t tmp = 0;
     bool eos = false;
+    bool tunnelFirstFrame = false;
     if (buffer->meta()->findInt32("eos", &tmp) && tmp) {
         eos = true;
         mInputMetEos = true;
@@ -213,6 +214,9 @@
     if (buffer->meta()->findInt32("csd", &tmp) && tmp) {
         flags |= C2FrameData::FLAG_CODEC_CONFIG;
     }
+    if (buffer->meta()->findInt32("tunnel-first-frame", &tmp) && tmp) {
+        tunnelFirstFrame = true;
+    }
     ALOGV("[%s] queueInputBuffer: buffer->size() = %zu", mName, buffer->size());
     std::list<std::unique_ptr<C2Work>> items;
     std::unique_ptr<C2Work> work(new C2Work);
@@ -244,7 +248,7 @@
                 bool released = input->buffers->releaseBuffer(buffer, nullptr, true);
                 ALOGV("[%s] queueInputBuffer: buffer copied; %sreleased",
                       mName, released ? "" : "not ");
-                buffer.clear();
+                buffer = copy;
             } else {
                 ALOGW("[%s] queueInputBuffer: failed to copy a buffer; this may cause input "
                       "buffer starvation on component.", mName);
@@ -272,6 +276,12 @@
             }
         }
     } else if (eos) {
+        Mutexed<Input>::Locked input(mInput);
+        if (input->frameReassembler) {
+            usesFrameReassembler = true;
+            // drain any pending items with eos
+            input->frameReassembler.process(buffer, &items);
+        }
         flags |= C2FrameData::FLAG_END_OF_STREAM;
     }
     if (usesFrameReassembler) {
@@ -284,6 +294,13 @@
         // TODO: fill info's
 
         work->input.configUpdate = std::move(mParamsToBeSet);
+        if (tunnelFirstFrame) {
+            C2StreamTunnelHoldRender::input tunnelHoldRender{
+                0u /* stream */,
+                C2_TRUE /* value */
+            };
+            work->input.configUpdate.push_back(C2Param::Copy(tunnelHoldRender));
+        }
         work->worklets.clear();
         work->worklets.emplace_back(new C2Worklet);
 
@@ -324,10 +341,10 @@
     } else {
         Mutexed<Input>::Locked input(mInput);
         bool released = false;
-        if (buffer) {
-            released = input->buffers->releaseBuffer(buffer, nullptr, true);
-        } else if (copy) {
+        if (copy) {
             released = input->extraBuffers.releaseSlot(copy, nullptr, true);
+        } else if (buffer) {
+            released = input->buffers->releaseBuffer(buffer, nullptr, true);
         }
         ALOGV("[%s] queueInputBuffer: buffer%s %sreleased",
               mName, (buffer == nullptr) ? "(copy)" : "", released ? "" : "not ");
@@ -1706,6 +1723,15 @@
                 }
                 break;
             }
+            case C2StreamTunnelHoldRender::CORE_INDEX: {
+                C2StreamTunnelHoldRender::output firstTunnelFrameHoldRender;
+                if (!(worklet->output.flags & C2FrameData::FLAG_INCOMPLETE)) break;
+                if (!firstTunnelFrameHoldRender.updateFrom(*param)) break;
+                if (firstTunnelFrameHoldRender.value != C2_TRUE) break;
+                ALOGV("[%s] onWorkDone: first tunnel frame ready", mName);
+                mCCodecCallback->onFirstTunnelFrameReady();
+                break;
+            }
             default:
                 ALOGV("[%s] onWorkDone: unrecognized config update (%08X)",
                       mName, param->index());
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 45da003..5a2aca2 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -45,6 +45,7 @@
     virtual void onError(status_t err, enum ActionCode actionCode) = 0;
     virtual void onOutputFramesRendered(int64_t mediaTimeUs, nsecs_t renderTimeNs) = 0;
     virtual void onOutputBuffersChanged() = 0;
+    virtual void onFirstTunnelFrameReady() = 0;
 };
 
 /**
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index ba44074..cfa89cf 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -129,6 +129,7 @@
     if (!copy->copy(c2buffer)) {
         return nullptr;
     }
+    copy->meta()->extend(buffer->meta());
     return copy;
 }
 
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 27e87e6..5646095 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -938,6 +938,14 @@
             return value == 0 ? C2_FALSE : C2_TRUE;
         }));
 
+    add(ConfigMapper("android._trigger-tunnel-peek", C2_PARAMKEY_TUNNEL_START_RENDER, "value")
+       .limitTo(D::PARAM & D::VIDEO & D::DECODER)
+       .withMapper([](C2Value v) -> C2Value {
+           int32_t value = 0;
+           (void)v.get(&value);
+           return value == 0 ? C2_FALSE : C2_TRUE;
+       }));
+
     /* still to do
     constexpr char KEY_PUSH_BLANK_BUFFERS_ON_STOP[] = "push-blank-buffers-on-shutdown";
 
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index a26f89e..9b3d3fe 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -67,7 +67,8 @@
             s.compare(s.size() - suffixLen, suffixLen, suffix) == 0;
 }
 
-void addSupportedProfileLevels(
+// returns true if component advertised supported profile level(s)
+bool addSupportedProfileLevels(
         std::shared_ptr<Codec2Client::Interface> intf,
         MediaCodecInfo::CapabilitiesWriter *caps,
         const Traits& trait, const std::string &mediaType) {
@@ -87,12 +88,12 @@
     c2_status_t err = intf->querySupportedValues(profileQuery, C2_DONT_BLOCK);
     ALOGV("query supported profiles -> %s | %s", asString(err), asString(profileQuery[0].status));
     if (err != C2_OK || profileQuery[0].status != C2_OK) {
-        return;
+        return false;
     }
 
     // we only handle enumerated values
     if (profileQuery[0].values.type != C2FieldSupportedValues::VALUES) {
-        return;
+        return false;
     }
 
     // determine if codec supports HDR
@@ -125,6 +126,8 @@
     supportsHdr |= (mediaType == MIMETYPE_VIDEO_VP9);
     supportsHdr |= (mediaType == MIMETYPE_VIDEO_AV1);
 
+    bool added = false;
+
     for (C2Value::Primitive profile : profileQuery[0].values.values) {
         pl.profile = (C2Config::profile_t)profile.ref<uint32_t>();
         std::vector<std::unique_ptr<C2SettingResult>> failures;
@@ -165,6 +168,7 @@
         } else if (!mapper) {
             caps->addProfileLevel(pl.profile, pl.level);
         }
+        added = true;
 
         // for H.263 also advertise the second highest level if the
         // codec supports level 45, as level 45 only covers level 10
@@ -188,6 +192,7 @@
             }
         }
     }
+    return added;
 }
 
 void addSupportedColorFormats(
@@ -551,7 +556,15 @@
                     }
                 }
 
-                addSupportedProfileLevels(intf, caps.get(), trait, mediaType);
+                if (!addSupportedProfileLevels(intf, caps.get(), trait, mediaType)) {
+                    // TODO(b/193279646) This will get fixed in C2InterfaceHelper
+                    // Some components may not advertise supported values if they use a const
+                    // param for profile/level (they support only one profile). For now cover
+                    // only VP8 here until it is fixed.
+                    if (mediaType == MIMETYPE_VIDEO_VP8) {
+                        caps->addProfileLevel(VP8ProfileMain, VP8Level_Version0);
+                    }
+                }
                 addSupportedColorFormats(intf, caps.get(), trait, mediaType);
             }
         }
diff --git a/media/codec2/sfplugin/FrameReassembler.cpp b/media/codec2/sfplugin/FrameReassembler.cpp
index cf1be17..b52506c 100644
--- a/media/codec2/sfplugin/FrameReassembler.cpp
+++ b/media/codec2/sfplugin/FrameReassembler.cpp
@@ -88,8 +88,7 @@
         const sp<MediaCodecBuffer> &buffer,
         std::list<std::unique_ptr<C2Work>> *items) {
     int64_t timeUs;
-    if (buffer->size() == 0u
-            || !buffer->meta()->findInt64("timeUs", &timeUs)) {
+    if (!buffer->meta()->findInt64("timeUs", &timeUs)) {
         return C2_BAD_VALUE;
     }
 
diff --git a/media/codec2/sfplugin/tests/FrameReassembler_test.cpp b/media/codec2/sfplugin/tests/FrameReassembler_test.cpp
index 6738ee7..0be934a 100644
--- a/media/codec2/sfplugin/tests/FrameReassembler_test.cpp
+++ b/media/codec2/sfplugin/tests/FrameReassembler_test.cpp
@@ -53,7 +53,8 @@
             C2Config::pcm_encoding_t encoding,
             size_t inputFrameSizeInBytes,
             size_t count,
-            size_t expectedOutputSize) {
+            size_t expectedOutputSize,
+            bool separateEos) {
         FrameReassembler frameReassembler;
         frameReassembler.init(
                 mPool,
@@ -67,7 +68,7 @@
 
         size_t inputIndex = 0, outputIndex = 0;
         size_t expectCount = 0;
-        for (size_t i = 0; i < count; ++i) {
+        for (size_t i = 0; i < count + (separateEos ? 1 : 0); ++i) {
             sp<MediaCodecBuffer> buffer = new MediaCodecBuffer(
                     new AMessage, new ABuffer(inputFrameSizeInBytes));
             buffer->setRange(0, inputFrameSizeInBytes);
@@ -77,8 +78,12 @@
             if (i == count - 1) {
                 buffer->meta()->setInt32("eos", 1);
             }
-            for (size_t j = 0; j < inputFrameSizeInBytes; ++j, ++inputIndex) {
-                buffer->base()[j] = (inputIndex & 0xFF);
+            if (i == count && separateEos) {
+                buffer->setRange(0, 0);
+            } else {
+                for (size_t j = 0; j < inputFrameSizeInBytes; ++j, ++inputIndex) {
+                    buffer->base()[j] = (inputIndex & 0xFF);
+                }
             }
             std::list<std::unique_ptr<C2Work>> items;
             ASSERT_EQ(C2_OK, frameReassembler.process(buffer, &items));
@@ -105,7 +110,8 @@
                 ASSERT_EQ(encoderFrameSize * BytesPerSample(encoding), view.capacity());
                 for (size_t j = 0; j < view.capacity(); ++j, ++outputIndex) {
                     ASSERT_TRUE(outputIndex < inputIndex
-                             || inputIndex == inputFrameSizeInBytes * count);
+                             || inputIndex == inputFrameSizeInBytes * count)
+                        << "inputIndex = " << inputIndex << " outputIndex = " << outputIndex;
                     uint8_t expected = outputIndex < inputIndex ? (outputIndex & 0xFF) : 0;
                     if (expectCount < 10) {
                         ++expectCount;
@@ -137,204 +143,239 @@
 // Push frames with exactly the same size as the encoder requested.
 TEST_F(FrameReassemblerTest, PushExactFrameSize) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            1024 /* input frame size in bytes = 1024 samples * 1 channel * 1 bytes/sample */,
-            10 /* count */,
-            10240 /* expected output size = 10 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            2048 /* input frame size in bytes = 1024 samples * 1 channel * 2 bytes/sample */,
-            10 /* count */,
-            20480 /* expected output size = 10 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            4096 /* input frame size in bytes = 1024 samples * 1 channel * 4 bytes/sample */,
-            10 /* count */,
-            40960 /* expected output size = 10 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                1024 /* input frame size in bytes = 1024 samples * 1 channel * 1 bytes/sample */,
+                10 /* count */,
+                10240 /* expected output size = 10 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                2048 /* input frame size in bytes = 1024 samples * 1 channel * 2 bytes/sample */,
+                10 /* count */,
+                20480 /* expected output size = 10 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                4096 /* input frame size in bytes = 1024 samples * 1 channel * 4 bytes/sample */,
+                10 /* count */,
+                40960 /* expected output size = 10 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push frames with half the size that the encoder requested.
 TEST_F(FrameReassemblerTest, PushHalfFrameSize) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            512 /* input frame size in bytes = 512 samples * 1 channel * 1 bytes per sample */,
-            10 /* count */,
-            5120 /* expected output size = 5 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            1024 /* input frame size in bytes = 512 samples * 1 channel * 2 bytes per sample */,
-            10 /* count */,
-            10240 /* expected output size = 5 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            2048 /* input frame size in bytes = 512 samples * 1 channel * 4 bytes per sample */,
-            10 /* count */,
-            20480 /* expected output size = 5 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                512 /* input frame size in bytes = 512 samples * 1 channel * 1 bytes/sample */,
+                10 /* count */,
+                5120 /* expected output size = 5 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                1024 /* input frame size in bytes = 512 samples * 1 channel * 2 bytes/sample */,
+                10 /* count */,
+                10240 /* expected output size = 5 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                2048 /* input frame size in bytes = 512 samples * 1 channel * 4 bytes/sample */,
+                10 /* count */,
+                20480 /* expected output size = 5 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push frames with twice the size that the encoder requested.
 TEST_F(FrameReassemblerTest, PushDoubleFrameSize) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            2048 /* input frame size in bytes = 2048 samples * 1 channel * 1 bytes per sample */,
-            10 /* count */,
-            20480 /* expected output size = 20 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            4096 /* input frame size in bytes = 2048 samples * 1 channel * 2 bytes per sample */,
-            10 /* count */,
-            40960 /* expected output size = 20 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            8192 /* input frame size in bytes = 2048 samples * 1 channel * 4 bytes per sample */,
-            10 /* count */,
-            81920 /* expected output size = 20 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                2048 /* input frame size in bytes = 2048 samples * 1 channel * 1 bytes/sample */,
+                10 /* count */,
+                20480 /* expected output size = 20 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                4096 /* input frame size in bytes = 2048 samples * 1 channel * 2 bytes/sample */,
+                10 /* count */,
+                40960 /* expected output size = 20 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                8192 /* input frame size in bytes = 2048 samples * 1 channel * 4 bytes/sample */,
+                10 /* count */,
+                81920 /* expected output size = 20 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push frames with a little bit larger (+5 samples) than the requested size.
 TEST_F(FrameReassemblerTest, PushLittleLargerFrameSize) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            1029 /* input frame size in bytes = 1029 samples * 1 channel * 1 bytes per sample */,
-            10 /* count */,
-            11264 /* expected output size = 11 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            2058 /* input frame size in bytes = 1029 samples * 1 channel * 2 bytes per sample */,
-            10 /* count */,
-            22528 /* expected output size = 11 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            4116 /* input frame size in bytes = 1029 samples * 1 channel * 4 bytes per sample */,
-            10 /* count */,
-            45056 /* expected output size = 11 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                1029 /* input frame size in bytes = 1029 samples * 1 channel * 1 bytes/sample */,
+                10 /* count */,
+                11264 /* expected output size = 11 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                2058 /* input frame size in bytes = 1029 samples * 1 channel * 2 bytes/sample */,
+                10 /* count */,
+                22528 /* expected output size = 11 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                4116 /* input frame size in bytes = 1029 samples * 1 channel * 4 bytes/sample */,
+                10 /* count */,
+                45056 /* expected output size = 11 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push frames with a little bit smaller (-5 samples) than the requested size.
 TEST_F(FrameReassemblerTest, PushLittleSmallerFrameSize) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            1019 /* input frame size in bytes = 1019 samples * 1 channel * 1 bytes per sample */,
-            10 /* count */,
-            10240 /* expected output size = 10 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            2038 /* input frame size in bytes = 1019 samples * 1 channel * 2 bytes per sample */,
-            10 /* count */,
-            20480 /* expected output size = 10 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            4076 /* input frame size in bytes = 1019 samples * 1 channel * 4 bytes per sample */,
-            10 /* count */,
-            40960 /* expected output size = 10 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                1019 /* input frame size in bytes = 1019 samples * 1 channel * 1 bytes/sample */,
+                10 /* count */,
+                10240 /* expected output size = 10 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                2038 /* input frame size in bytes = 1019 samples * 1 channel * 2 bytes/sample */,
+                10 /* count */,
+                20480 /* expected output size = 10 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                4076 /* input frame size in bytes = 1019 samples * 1 channel * 4 bytes/sample */,
+                10 /* count */,
+                40960 /* expected output size = 10 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push single-byte frames
 TEST_F(FrameReassemblerTest, PushSingleByte) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            1 /* input frame size in bytes */,
-            100000 /* count */,
-            100352 /* expected output size = 98 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            1 /* input frame size in bytes */,
-            100000 /* count */,
-            100352 /* expected output size = 49 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            1 /* input frame size in bytes */,
-            100000 /* count */,
-            102400 /* expected output size = 25 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                1 /* input frame size in bytes */,
+                100000 /* count */,
+                100352 /* expected output size = 98 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                1 /* input frame size in bytes */,
+                100000 /* count */,
+                100352 /* expected output size = 49 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                1 /* input frame size in bytes */,
+                100000 /* count */,
+                102400 /* expected output size = 25 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push one big chunk.
 TEST_F(FrameReassemblerTest, PushBigChunk) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            100000 /* input frame size in bytes */,
-            1 /* count */,
-            100352 /* expected output size = 98 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            100000 /* input frame size in bytes */,
-            1 /* count */,
-            100352 /* expected output size = 49 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            100000 /* input frame size in bytes */,
-            1 /* count */,
-            102400 /* expected output size = 25 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                100000 /* input frame size in bytes */,
+                1 /* count */,
+                100352 /* expected output size = 98 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                100000 /* input frame size in bytes */,
+                1 /* count */,
+                100352 /* expected output size = 49 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                100000 /* input frame size in bytes */,
+                1 /* count */,
+                102400 /* expected output size = 25 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 } // namespace android
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 00bf84f..4d939fa 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -92,6 +92,7 @@
 
 ALookup<C2Config::bitrate_mode_t, int32_t> sBitrateModes = {
     { C2Config::BITRATE_CONST,      BITRATE_MODE_CBR },
+    { C2Config::BITRATE_CONST_SKIP_ALLOWED, BITRATE_MODE_CBR_FD },
     { C2Config::BITRATE_VARIABLE,   BITRATE_MODE_VBR },
     { C2Config::BITRATE_IGNORE,     BITRATE_MODE_CQ },
 };
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index bee6b7f..54f4cff 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -748,6 +748,16 @@
 
             // We really don't know what this is; lock the buffer and pass it through ---
             // the client may know how to interpret it.
+
+            // unlock previous allocation if it was successful
+            if (err == OK) {
+                err = GraphicBufferMapper::get().unlock(mBuffer);
+                if (err) {
+                    ALOGE("failed transaction: unlock");
+                    return C2_CORRUPTED;
+                }
+            }
+
             void *pointer = nullptr;
             err = GraphicBufferMapper::get().lock(
                     const_cast<native_handle_t *>(mBuffer), grallocUsage, rect, &pointer);
diff --git a/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp b/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
index 4555203..cc4f222 100644
--- a/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
+++ b/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
@@ -498,13 +498,16 @@
     /* check frame rate */
     for (i = 0; i < encParams->nLayers; i++)
     {
+        if (encOption->encFrameRate[i] <= 0. || encOption->encFrameRate[i] > 120)
+        {
+            goto CLEAN_UP;
+        }
         encParams->LayerFrameRate[i] = encOption->encFrameRate[i];
     }
 
     if (encParams->nLayers > 1)
     {
-        if (encOption->encFrameRate[0] == encOption->encFrameRate[1] ||
-                encOption->encFrameRate[0] == 0. || encOption->encFrameRate[1] == 0.) /* 7/31/03 */
+        if (encOption->encFrameRate[0] == encOption->encFrameRate[1])
             goto CLEAN_UP;
     }
     /* set max frame rate */
diff --git a/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp b/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
index 4338c43..c04f7f3 100644
--- a/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
+++ b/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
@@ -219,6 +219,9 @@
 ; FUNCTION CODE
 ----------------------------------------------------------------------------*/
 
+#if __has_attribute(no_sanitize)
+__attribute__((no_sanitize("integer")))
+#endif
 void pvmp3_st_intensity(int32 xr[SUBBANDS_NUMBER*FILTERBANK_BANDS],
                         int32 xl[SUBBANDS_NUMBER*FILTERBANK_BANDS],
                         int32 is_pos,
diff --git a/media/janitors/media_solutions_OWNERS b/media/janitors/media_solutions_OWNERS
new file mode 100644
index 0000000..8dc1c7b
--- /dev/null
+++ b/media/janitors/media_solutions_OWNERS
@@ -0,0 +1,10 @@
+# Bug component: 1344
+# go/android-fwk-media-solutions for info on areas of ownership.
+
+# Main owners:
+aquilescanta@google.com
+krocard@google.com
+
+# In case of emergency:
+andrewlewis@google.com #{LAST_RESORT_SUGGESTION}
+olly@google.com #{LAST_RESORT_SUGGESTION}
diff --git a/media/libaudiohal/FactoryHalHidl.cpp b/media/libaudiohal/FactoryHalHidl.cpp
index e420d07..c19d2c2 100644
--- a/media/libaudiohal/FactoryHalHidl.cpp
+++ b/media/libaudiohal/FactoryHalHidl.cpp
@@ -94,7 +94,7 @@
 }  // namespace
 
 void* createPreferredImpl(const std::string& package, const std::string& interface) {
-    for (auto version = detail::sAudioHALVersions; version != nullptr; ++version) {
+    for (auto version = detail::sAudioHALVersions; *version != nullptr; ++version) {
         void* rawInterface = nullptr;
         if (hasHalService(package, *version, interface)
                 && createHalService(*version, interface, &rawInterface)) {
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
index 7e5caed..1eadd27 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
@@ -313,9 +313,9 @@
      */
     pInstance->eqBiquad.resize(pParams->NBands,
                                android::audio_utils::BiquadFilter<LVM_FLOAT>(pParams->NrChannels));
-    LVEQNB_ClearFilterHistory(pInstance);
 
     if (bChange || modeChange) {
+        LVEQNB_ClearFilterHistory(pInstance);
         /*
          * If the sample rate has changed clear the history
          */
diff --git a/media/libeffects/lvm/tests/EffectBundleTest.cpp b/media/libeffects/lvm/tests/EffectBundleTest.cpp
index 881ffb1..018cb7c 100644
--- a/media/libeffects/lvm/tests/EffectBundleTest.cpp
+++ b/media/libeffects/lvm/tests/EffectBundleTest.cpp
@@ -14,29 +14,39 @@
  * limitations under the License.
  */
 
+#include <system/audio_effects/effect_bassboost.h>
+#include <system/audio_effects/effect_equalizer.h>
+#include <system/audio_effects/effect_virtualizer.h>
 #include "EffectTestHelper.h"
-using namespace android;
 
-// Update isBassBoost, if the order of effects is updated
-constexpr effect_uuid_t kEffectUuids[] = {
+using namespace android;
+typedef enum {
+    EFFECT_BASS_BOOST,
+    EFFECT_EQUALIZER,
+    EFFECT_VIRTUALIZER,
+    EFFECT_VOLUME
+} effect_type_t;
+
+const std::map<effect_type_t, effect_uuid_t> kEffectUuids = {
         // NXP SW BassBoost
-        {0x8631f300, 0x72e2, 0x11df, 0xb57e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
-        // NXP SW Virtualizer
-        {0x1d4033c0, 0x8557, 0x11df, 0x9f2d, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        {EFFECT_BASS_BOOST,
+         {0x8631f300, 0x72e2, 0x11df, 0xb57e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}},
         // NXP SW Equalizer
-        {0xce772f20, 0x847d, 0x11df, 0xbb17, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        {EFFECT_EQUALIZER,
+         {0xce772f20, 0x847d, 0x11df, 0xbb17, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}},
+        // NXP SW Virtualizer
+        {EFFECT_VIRTUALIZER,
+         {0x1d4033c0, 0x8557, 0x11df, 0x9f2d, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}},
         // NXP SW Volume
-        {0x119341a0, 0x8469, 0x11df, 0x81f9, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        {EFFECT_VOLUME, {0x119341a0, 0x8469, 0x11df, 0x81f9, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}},
 };
 
-static bool isBassBoost(const effect_uuid_t* uuid) {
-    // Update this, if the order of effects in kEffectUuids is updated
-    return uuid == &kEffectUuids[0];
-}
+const size_t kNumEffectUuids = std::size(kEffectUuids);
 
-constexpr size_t kNumEffectUuids = std::size(kEffectUuids);
+constexpr float kMinAmplitude = -1.0f;
+constexpr float kMaxAmplitude = 1.0f;
 
-typedef std::tuple<int, int, int, int, int> SingleEffectTestParam;
+using SingleEffectTestParam = std::tuple<int, int, int, int, int>;
 class SingleEffectTest : public ::testing::TestWithParam<SingleEffectTestParam> {
   public:
     SingleEffectTest()
@@ -46,7 +56,8 @@
           mFrameCount(EffectTestHelper::kFrameCounts[std::get<2>(GetParam())]),
           mLoopCount(EffectTestHelper::kLoopCounts[std::get<3>(GetParam())]),
           mTotalFrameCount(mFrameCount * mLoopCount),
-          mUuid(&kEffectUuids[std::get<4>(GetParam())]) {}
+          mEffectType((effect_type_t)std::get<4>(GetParam())),
+          mUuid(kEffectUuids.at(mEffectType)) {}
 
     const size_t mChMask;
     const size_t mChannelCount;
@@ -54,7 +65,8 @@
     const size_t mFrameCount;
     const size_t mLoopCount;
     const size_t mTotalFrameCount;
-    const effect_uuid_t* mUuid;
+    const effect_type_t mEffectType;
+    const effect_uuid_t mUuid;
 };
 
 // Tests applying a single effect
@@ -63,7 +75,7 @@
                  << "chMask: " << mChMask << " sampleRate: " << mSampleRate
                  << " frameCount: " << mFrameCount << " loopCount: " << mLoopCount);
 
-    EffectTestHelper effect(mUuid, mChMask, mChMask, mSampleRate, mFrameCount, mLoopCount);
+    EffectTestHelper effect(&mUuid, mChMask, mChMask, mSampleRate, mFrameCount, mLoopCount);
 
     ASSERT_NO_FATAL_FAILURE(effect.createEffect());
     ASSERT_NO_FATAL_FAILURE(effect.setConfig());
@@ -72,7 +84,7 @@
     std::vector<float> input(mTotalFrameCount * mChannelCount);
     std::vector<float> output(mTotalFrameCount * mChannelCount);
     std::minstd_rand gen(mChMask);
-    std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+    std::uniform_real_distribution<> dis(kMinAmplitude, kMaxAmplitude);
     for (auto& in : input) {
         in = dis(gen);
     }
@@ -88,7 +100,7 @@
                            ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
                            ::testing::Range(0, (int)kNumEffectUuids)));
 
-typedef std::tuple<int, int, int, int> SingleEffectComparisonTestParam;
+using SingleEffectComparisonTestParam = std::tuple<int, int, int, int>;
 class SingleEffectComparisonTest
     : public ::testing::TestWithParam<SingleEffectComparisonTestParam> {
   public:
@@ -97,13 +109,15 @@
           mFrameCount(EffectTestHelper::kFrameCounts[std::get<1>(GetParam())]),
           mLoopCount(EffectTestHelper::kLoopCounts[std::get<2>(GetParam())]),
           mTotalFrameCount(mFrameCount * mLoopCount),
-          mUuid(&kEffectUuids[std::get<3>(GetParam())]) {}
+          mEffectType((effect_type_t)std::get<3>(GetParam())),
+          mUuid(kEffectUuids.at(mEffectType)) {}
 
     const size_t mSampleRate;
     const size_t mFrameCount;
     const size_t mLoopCount;
     const size_t mTotalFrameCount;
-    const effect_uuid_t* mUuid;
+    const effect_type_t mEffectType;
+    const effect_uuid_t mUuid;
 };
 
 // Compares first two channels in multi-channel output to stereo output when same effect is applied
@@ -115,7 +129,7 @@
     std::vector<float> monoInput(mTotalFrameCount);
 
     std::minstd_rand gen(mSampleRate);
-    std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+    std::uniform_real_distribution<> dis(kMinAmplitude, kMaxAmplitude);
     for (auto& in : monoInput) {
         in = dis(gen);
     }
@@ -126,7 +140,7 @@
                     mTotalFrameCount * sizeof(float) * FCC_1);
 
     // Apply effect on stereo channels
-    EffectTestHelper stereoEffect(mUuid, AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_STEREO,
+    EffectTestHelper stereoEffect(&mUuid, AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_STEREO,
                                   mSampleRate, mFrameCount, mLoopCount);
 
     ASSERT_NO_FATAL_FAILURE(stereoEffect.createEffect());
@@ -142,7 +156,7 @@
 
     for (size_t chMask : EffectTestHelper::kChMasks) {
         size_t channelCount = audio_channel_count_from_out_mask(chMask);
-        EffectTestHelper testEffect(mUuid, chMask, chMask, mSampleRate, mFrameCount, mLoopCount);
+        EffectTestHelper testEffect(&mUuid, chMask, chMask, mSampleRate, mFrameCount, mLoopCount);
 
         ASSERT_NO_FATAL_FAILURE(testEffect.createEffect());
         ASSERT_NO_FATAL_FAILURE(testEffect.setConfig());
@@ -170,7 +184,7 @@
         memcpy_to_i16_from_float(stereoTestI16.data(), stereoTestOutput.data(),
                                  mTotalFrameCount * FCC_2);
 
-        if (isBassBoost(mUuid)) {
+        if (EFFECT_BASS_BOOST == mEffectType) {
             // SNR must be above the threshold
             float snr = computeSnr<int16_t>(stereoRefI16.data(), stereoTestI16.data(),
                                             mTotalFrameCount * FCC_2);
@@ -191,6 +205,135 @@
                            ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
                            ::testing::Range(0, (int)kNumEffectUuids)));
 
+using SingleEffectDefaultSetParamTestParam = std::tuple<int, int, int>;
+class SingleEffectDefaultSetParamTest
+    : public ::testing::TestWithParam<SingleEffectDefaultSetParamTestParam> {
+  public:
+    SingleEffectDefaultSetParamTest()
+        : mChMask(EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
+          mChannelCount(audio_channel_count_from_out_mask(mChMask)),
+          mSampleRate(16000),
+          mFrameCount(EffectTestHelper::kFrameCounts[std::get<1>(GetParam())]),
+          mLoopCount(1),
+          mTotalFrameCount(mFrameCount * mLoopCount),
+          mEffectType((effect_type_t)std::get<2>(GetParam())),
+          mUuid(kEffectUuids.at(mEffectType)) {}
+
+    const size_t mChMask;
+    const size_t mChannelCount;
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    const size_t mTotalFrameCount;
+    const effect_type_t mEffectType;
+    const effect_uuid_t mUuid;
+};
+
+// Tests verifying that redundant setParam calls do not alter output
+TEST_P(SingleEffectDefaultSetParamTest, SimpleProcess) {
+    SCOPED_TRACE(testing::Message()
+                 << "chMask: " << mChMask << " sampleRate: " << mSampleRate
+                 << " frameCount: " << mFrameCount << " loopCount: " << mLoopCount);
+    // effect.process() handles mTotalFrameCount * mChannelCount samples in each call.
+    // This test calls process() twice per effect, hence total samples when allocating
+    // input and output vectors is twice the number of samples processed in one call.
+    size_t totalNumSamples = 2 * mTotalFrameCount * mChannelCount;
+    // Initialize input buffer with deterministic pseudo-random values
+    std::vector<float> input(totalNumSamples);
+    std::minstd_rand gen(mChMask);
+    std::uniform_real_distribution<> dis(kMinAmplitude, kMaxAmplitude);
+    for (auto& in : input) {
+        in = dis(gen);
+    }
+
+    uint32_t key;
+    int32_t value1, value2;
+    switch (mEffectType) {
+        case EFFECT_BASS_BOOST:
+            key = BASSBOOST_PARAM_STRENGTH;
+            value1 = 1;
+            value2 = 14;
+            break;
+        case EFFECT_VIRTUALIZER:
+            key = VIRTUALIZER_PARAM_STRENGTH;
+            value1 = 0;
+            value2 = 100;
+            break;
+        case EFFECT_EQUALIZER:
+            key = EQ_PARAM_CUR_PRESET;
+            value1 = 0;
+            value2 = 1;
+            break;
+        case EFFECT_VOLUME:
+            key = 0 /* VOLUME_PARAM_LEVEL */;
+            value1 = 0;
+            value2 = -100;
+            break;
+        default:
+            FAIL() << "Unsupported effect type : " << mEffectType;
+    }
+
+    EffectTestHelper refEffect(&mUuid, mChMask, mChMask, mSampleRate, mFrameCount, mLoopCount);
+
+    ASSERT_NO_FATAL_FAILURE(refEffect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(refEffect.setConfig());
+
+    if (EFFECT_BASS_BOOST == mEffectType) {
+        ASSERT_NO_FATAL_FAILURE(refEffect.setParam<int16_t>(key, value1));
+    } else {
+        ASSERT_NO_FATAL_FAILURE(refEffect.setParam<int32_t>(key, value1));
+    }
+    std::vector<float> refOutput(totalNumSamples);
+    float* pInput = input.data();
+    float* pOutput = refOutput.data();
+    ASSERT_NO_FATAL_FAILURE(refEffect.process(pInput, pOutput));
+
+    pInput += totalNumSamples / 2;
+    pOutput += totalNumSamples / 2;
+    ASSERT_NO_FATAL_FAILURE(refEffect.process(pInput, pOutput));
+    ASSERT_NO_FATAL_FAILURE(refEffect.releaseEffect());
+
+    EffectTestHelper testEffect(&mUuid, mChMask, mChMask, mSampleRate, mFrameCount, mLoopCount);
+
+    ASSERT_NO_FATAL_FAILURE(testEffect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(testEffect.setConfig());
+
+    if (EFFECT_BASS_BOOST == mEffectType) {
+        ASSERT_NO_FATAL_FAILURE(testEffect.setParam<int16_t>(key, value1));
+    } else {
+        ASSERT_NO_FATAL_FAILURE(testEffect.setParam<int32_t>(key, value1));
+    }
+
+    std::vector<float> testOutput(totalNumSamples);
+    pInput = input.data();
+    pOutput = testOutput.data();
+    ASSERT_NO_FATAL_FAILURE(testEffect.process(pInput, pOutput));
+
+    // Call setParam once to change the parameters, and then call setParam again
+    // to restore the parameters to the initial state, making the first setParam
+    // call redundant
+    if (EFFECT_BASS_BOOST == mEffectType) {
+        ASSERT_NO_FATAL_FAILURE(testEffect.setParam<int16_t>(key, value2));
+        ASSERT_NO_FATAL_FAILURE(testEffect.setParam<int16_t>(key, value1));
+    } else {
+        ASSERT_NO_FATAL_FAILURE(testEffect.setParam<int32_t>(key, value2));
+        ASSERT_NO_FATAL_FAILURE(testEffect.setParam<int32_t>(key, value1));
+    }
+
+    pInput += totalNumSamples / 2;
+    pOutput += totalNumSamples / 2;
+    ASSERT_NO_FATAL_FAILURE(testEffect.process(pInput, pOutput));
+    ASSERT_NO_FATAL_FAILURE(testEffect.releaseEffect());
+    ASSERT_TRUE(areNearlySame(refOutput.data(), testOutput.data(), totalNumSamples))
+            << "Outputs do not match with default setParam calls";
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        EffectBundleTestAll, SingleEffectDefaultSetParamTest,
+        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumChMasks),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
+                           ::testing::Range(0, (int)kNumEffectUuids)));
+
 int main(int argc, char** argv) {
     ::testing::InitGoogleTest(&argc, argv);
     int status = RUN_ALL_TESTS();
diff --git a/media/libeffects/lvm/tests/EffectTestHelper.cpp b/media/libeffects/lvm/tests/EffectTestHelper.cpp
index 625c15a..ec727c7 100644
--- a/media/libeffects/lvm/tests/EffectTestHelper.cpp
+++ b/media/libeffects/lvm/tests/EffectTestHelper.cpp
@@ -50,23 +50,6 @@
     ASSERT_EQ(reply, 0) << "cmd_enable reply non zero " << reply;
 }
 
-void EffectTestHelper::setParam(uint32_t type, uint32_t value) {
-    int reply = 0;
-    uint32_t replySize = sizeof(reply);
-    uint32_t paramData[2] = {type, value};
-    auto effectParam = new effect_param_t[sizeof(effect_param_t) + sizeof(paramData)];
-    memcpy(&effectParam->data[0], &paramData[0], sizeof(paramData));
-    effectParam->psize = sizeof(paramData[0]);
-    effectParam->vsize = sizeof(paramData[1]);
-    int status = (*mEffectHandle)
-                         ->command(mEffectHandle, EFFECT_CMD_SET_PARAM,
-                                   sizeof(effect_param_t) + sizeof(paramData), effectParam,
-                                   &replySize, &reply);
-    delete[] effectParam;
-    ASSERT_EQ(status, 0) << "set_param returned an error " << status;
-    ASSERT_EQ(reply, 0) << "set_param reply non zero " << reply;
-}
-
 void EffectTestHelper::process(float* input, float* output) {
     audio_buffer_t inBuffer = {.frameCount = mFrameCount, .f32 = input};
     audio_buffer_t outBuffer = {.frameCount = mFrameCount, .f32 = output};
diff --git a/media/libeffects/lvm/tests/EffectTestHelper.h b/media/libeffects/lvm/tests/EffectTestHelper.h
index 3854d46..bcee84e 100644
--- a/media/libeffects/lvm/tests/EffectTestHelper.h
+++ b/media/libeffects/lvm/tests/EffectTestHelper.h
@@ -50,6 +50,23 @@
     return snr;
 }
 
+template <typename T>
+static float areNearlySame(const T* ref, const T* tst, size_t count) {
+    T delta;
+    if constexpr (std::is_floating_point_v<T>) {
+        delta = std::numeric_limits<T>::epsilon();
+    } else {
+        delta = 1;
+    }
+    for (size_t i = 0; i < count; ++i) {
+        const double diff(tst[i] - ref[i]);
+        if (abs(diff) > delta) {
+            return false;
+        }
+    }
+    return true;
+}
+
 class EffectTestHelper {
   public:
     EffectTestHelper(const effect_uuid_t* uuid, size_t inChMask, size_t outChMask,
@@ -65,7 +82,25 @@
     void createEffect();
     void releaseEffect();
     void setConfig();
-    void setParam(uint32_t type, uint32_t val);
+    template <typename VALUE_DTYPE>
+    void setParam(uint32_t type, VALUE_DTYPE const value) {
+        int reply = 0;
+        uint32_t replySize = sizeof(reply);
+
+        uint8_t paramData[sizeof(effect_param_t) + sizeof(type) + sizeof(value)];
+        auto effectParam = (effect_param_t*)paramData;
+
+        memcpy(&effectParam->data[0], &type, sizeof(type));
+        memcpy(&effectParam->data[sizeof(type)], &value, sizeof(value));
+        effectParam->psize = sizeof(type);
+        effectParam->vsize = sizeof(value);
+        int status = (*mEffectHandle)
+                             ->command(mEffectHandle, EFFECT_CMD_SET_PARAM,
+                                       sizeof(effect_param_t) + sizeof(type) + sizeof(value),
+                                       effectParam, &replySize, &reply);
+        ASSERT_EQ(status, 0) << "set_param returned an error " << status;
+        ASSERT_EQ(reply, 0) << "set_param reply non zero " << reply;
+    };
     void process(float* input, float* output);
 
     // Corresponds to SNR for 1 bit difference between two int16_t signals
diff --git a/media/libeffects/preprocessing/.clang-format b/media/libeffects/preprocessing/.clang-format
deleted file mode 120000
index f1b4f69..0000000
--- a/media/libeffects/preprocessing/.clang-format
+++ /dev/null
@@ -1 +0,0 @@
-../../../../../build/soong/scripts/system-clang-format
\ No newline at end of file
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 273d91c..fcac551 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -15,6 +15,7 @@
  */
 
 //#define LOG_NDEBUG 0
+#include "include/HeifDecoderAPI.h"
 #define LOG_TAG "HeifDecoderImpl"
 
 #include "HeifDecoderImpl.h"
@@ -464,7 +465,7 @@
 }
 
 bool HeifDecoderImpl::setOutputColor(HeifColorFormat heifColor) {
-    if (heifColor == mOutputColor) {
+    if (heifColor == (HeifColorFormat)mOutputColor) {
         return true;
     }
 
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 6214f5e..8115880 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -394,11 +394,11 @@
     ],
 
     static_libs: [
-        "resourcemanager_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk",
     ],
 
     export_static_lib_headers: [
-        "resourcemanager_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk",
     ],
 
     export_include_dirs: [
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 389249e..3eb2f06 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -2854,10 +2854,43 @@
 
     CHECK(msg->findInt32("payload-type", &payloadType));
 
+    int32_t rtpSeq = 0, rtpTime = 0;
+    int64_t ntpTime = 0, recvTimeUs = 0;
+
     Parcel in;
     in.writeInt32(payloadType);
 
     switch (payloadType) {
+        case ARTPSource::RTP_FIRST_PACKET:
+        {
+            CHECK(msg->findInt32("rtp-time", &rtpTime));
+            CHECK(msg->findInt32("rtp-seq-num", &rtpSeq));
+            CHECK(msg->findInt64("recv-time-us", &recvTimeUs));
+            in.writeInt32(rtpTime);
+            in.writeInt32(rtpSeq);
+            in.writeInt32(recvTimeUs >> 32);
+            in.writeInt32(recvTimeUs & 0xFFFFFFFF);
+            break;
+        }
+        case ARTPSource::RTCP_FIRST_PACKET:
+        {
+            CHECK(msg->findInt64("recv-time-us", &recvTimeUs));
+            in.writeInt32(recvTimeUs >> 32);
+            in.writeInt32(recvTimeUs & 0xFFFFFFFF);
+            break;
+        }
+        case ARTPSource::RTCP_SR:
+        {
+            CHECK(msg->findInt32("rtp-time", &rtpTime));
+            CHECK(msg->findInt64("ntp-time", &ntpTime));
+            CHECK(msg->findInt64("recv-time-us", &recvTimeUs));
+            in.writeInt32(rtpTime);
+            in.writeInt32(ntpTime >> 32);
+            in.writeInt32(ntpTime & 0xFFFFFFFF);
+            in.writeInt32(recvTimeUs >> 32);
+            in.writeInt32(recvTimeUs & 0xFFFFFFFF);
+            break;
+        }
         case ARTPSource::RTCP_TSFB:   // RTCP TSFB
         case ARTPSource::RTCP_PSFB:   // RTCP PSFB
         case ARTPSource::RTP_AUTODOWN:
@@ -2880,6 +2913,8 @@
             int32_t feedbackType, bitrate;
             int32_t highestSeqNum, baseSeqNum, prevExpected;
             int32_t numBufRecv, prevNumBufRecv;
+            int32_t latestRtpTime, jbTimeMs, rtpRtcpSrTimeGapMs;
+            int64_t recvTimeUs;
             CHECK(msg->findInt32("feedback-type", &feedbackType));
             CHECK(msg->findInt32("bit-rate", &bitrate));
             CHECK(msg->findInt32("highest-seq-num", &highestSeqNum));
@@ -2887,6 +2922,10 @@
             CHECK(msg->findInt32("prev-expected", &prevExpected));
             CHECK(msg->findInt32("num-buf-recv", &numBufRecv));
             CHECK(msg->findInt32("prev-num-buf-recv", &prevNumBufRecv));
+            CHECK(msg->findInt32("latest-rtp-time", &latestRtpTime));
+            CHECK(msg->findInt64("recv-time-us", &recvTimeUs));
+            CHECK(msg->findInt32("rtp-jitter-time-ms", &jbTimeMs));
+            CHECK(msg->findInt32("rtp-rtcpsr-time-gap-ms", &rtpRtcpSrTimeGapMs));
             in.writeInt32(feedbackType);
             in.writeInt32(bitrate);
             in.writeInt32(highestSeqNum);
@@ -2894,6 +2933,11 @@
             in.writeInt32(prevExpected);
             in.writeInt32(numBufRecv);
             in.writeInt32(prevNumBufRecv);
+            in.writeInt32(latestRtpTime);
+            in.writeInt32(recvTimeUs >> 32);
+            in.writeInt32(recvTimeUs & 0xFFFFFFFF);
+            in.writeInt32(jbTimeMs);
+            in.writeInt32(rtpRtcpSrTimeGapMs);
             break;
         }
         case ARTPSource::RTP_CVO:
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
index d2d978a..4d6a483 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -395,23 +395,13 @@
                 CHECK(msg->findInt64("ntp-time", (int64_t *)&ntpTime));
 
                 onTimeUpdate(trackIndex, rtpTime, ntpTime);
-                break;
-            }
-
-            int32_t firstRTCP;
-            if (msg->findInt32("first-rtcp", &firstRTCP)) {
-                // There won't be an access unit here, it's just a notification
-                // that the data communication worked since we got the first
-                // rtcp packet.
-                ALOGV("first-rtcp");
-                break;
             }
 
             int32_t IMSRxNotice;
             if (msg->findInt32("rtcp-event", &IMSRxNotice)) {
-                int32_t payloadType, feedbackType;
+                int32_t payloadType = 0, feedbackType = 0;
                 CHECK(msg->findInt32("payload-type", &payloadType));
-                CHECK(msg->findInt32("feedback-type", &feedbackType));
+                msg->findInt32("feedback-type", &feedbackType);
 
                 sp<AMessage> notify = dupNotify();
                 notify->setInt32("what", kWhatIMSRxNotice);
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 9533ae5..8e05de8 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -145,15 +145,17 @@
         return;
     }
 
-    // Close socket before posting message to RTSPSource message handler.
-    if (mHandler != NULL) {
-        close(mHandler->getARTSPConnection()->getSocket());
-    }
-
     sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
 
     sp<AMessage> dummy;
     msg->postAndAwaitResponse(&dummy);
+
+    // Close socket after posting message to RTSPSource message handler.
+    if (mHandler != NULL && mHandler->getARTSPConnection()->getSocket() >= 0) {
+        ALOGD("closing rtsp socket if not closed yet.");
+        close(mHandler->getARTSPConnection()->getSocket());
+    }
+
 }
 
 status_t NuPlayer::RTSPSource::feedMoreTSData() {
diff --git a/media/libmediaplayerservice/tests/Android.bp b/media/libmediaplayerservice/tests/Android.bp
index 98626fd..99202b8 100644
--- a/media/libmediaplayerservice/tests/Android.bp
+++ b/media/libmediaplayerservice/tests/Android.bp
@@ -30,7 +30,7 @@
     ],
 
     static_libs: [
-        "resourcemanager_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk",
     ],
 
     include_dirs: [
diff --git a/media/libmediatranscoding/Android.bp b/media/libmediatranscoding/Android.bp
index 9375e4e..058d0c0 100644
--- a/media/libmediatranscoding/Android.bp
+++ b/media/libmediatranscoding/Android.bp
@@ -58,7 +58,7 @@
     export_include_dirs: ["include"],
 
     static_libs: [
-        "mediatranscoding_aidl_interface-ndk_platform",
+        "mediatranscoding_aidl_interface-ndk",
     ],
 
     cflags: [
diff --git a/media/libmediatranscoding/tests/Android.bp b/media/libmediatranscoding/tests/Android.bp
index 17a43df..80a055e 100644
--- a/media/libmediatranscoding/tests/Android.bp
+++ b/media/libmediatranscoding/tests/Android.bp
@@ -25,7 +25,7 @@
     ],
 
     static_libs: [
-        "mediatranscoding_aidl_interface-ndk_platform",
+        "mediatranscoding_aidl_interface-ndk",
     ],
 
     cflags: [
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 224ec8b..4cd8996 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -3270,10 +3270,12 @@
     if (err != OK) {
         ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).",
                 sidebandHandle, err);
-        return err;
     }
 
-    return OK;
+    native_handle_close(sidebandHandle);
+    native_handle_delete(sidebandHandle);
+
+    return err;
 }
 
 status_t ACodec::setVideoPortFormatType(
@@ -5361,21 +5363,21 @@
                             err = mOMXNode->getParameter(
                                     (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacDrcPresentation,
                                     &presentation, sizeof(presentation));
-                            if (err != OK) {
-                                return err;
+                            if (err == OK) {
+                                notify->setInt32("aac-encoded-target-level",
+                                                 presentation.nEncodedTargetLevel);
+                                notify->setInt32("aac-drc-cut-level", presentation.nDrcCut);
+                                notify->setInt32("aac-drc-boost-level", presentation.nDrcBoost);
+                                notify->setInt32("aac-drc-heavy-compression",
+                                                 presentation.nHeavyCompression);
+                                notify->setInt32("aac-target-ref-level",
+                                                 presentation.nTargetReferenceLevel);
+                                notify->setInt32("aac-drc-effect-type",
+                                                 presentation.nDrcEffectType);
+                                notify->setInt32("aac-drc-album-mode", presentation.nDrcAlbumMode);
+                                notify->setInt32("aac-drc-output-loudness",
+                                                 presentation.nDrcOutputLoudness);
                             }
-                            notify->setInt32("aac-encoded-target-level",
-                                             presentation.nEncodedTargetLevel);
-                            notify->setInt32("aac-drc-cut-level", presentation.nDrcCut);
-                            notify->setInt32("aac-drc-boost-level", presentation.nDrcBoost);
-                            notify->setInt32("aac-drc-heavy-compression",
-                                             presentation.nHeavyCompression);
-                            notify->setInt32("aac-target-ref-level",
-                                             presentation.nTargetReferenceLevel);
-                            notify->setInt32("aac-drc-effect-type", presentation.nDrcEffectType);
-                            notify->setInt32("aac-drc-album-mode", presentation.nDrcAlbumMode);
-                            notify->setInt32("aac-drc-output-loudness",
-                                             presentation.nDrcOutputLoudness);
                         }
                     }
                     break;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 6bf7612..061bb85 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -410,6 +410,7 @@
     kWhatSignaledInputEOS    = 'seos',
     kWhatOutputFramesRendered = 'outR',
     kWhatOutputBuffersChanged = 'outC',
+    kWhatFirstTunnelFrameReady = 'ftfR',
 };
 
 class BufferCallback : public CodecBase::BufferCallback {
@@ -472,6 +473,7 @@
     virtual void onSignaledInputEOS(status_t err) override;
     virtual void onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) override;
     virtual void onOutputBuffersChanged() override;
+    virtual void onFirstTunnelFrameReady() override;
 private:
     const sp<AMessage> mNotify;
 };
@@ -592,6 +594,12 @@
     notify->post();
 }
 
+void CodecCallback::onFirstTunnelFrameReady() {
+    sp<AMessage> notify(mNotify->dup());
+    notify->setInt32("what", kWhatFirstTunnelFrameReady);
+    notify->post();
+}
+
 }  // namespace
 
 ////////////////////////////////////////////////////////////////////////////////
@@ -690,6 +698,7 @@
       mTunneledInputWidth(0),
       mTunneledInputHeight(0),
       mTunneled(false),
+      mTunnelPeekState(TunnelPeekState::kEnabledNoBuffer),
       mHaveInputSurface(false),
       mHavePendingInputBuffers(false),
       mCpuBoostRequested(false),
@@ -879,6 +888,65 @@
     }
 }
 
+constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
+    switch(state) {
+        case TunnelPeekState::kEnabledNoBuffer:
+            return "EnabledNoBuffer";
+        case TunnelPeekState::kDisabledNoBuffer:
+            return "DisabledNoBuffer";
+        case TunnelPeekState::kBufferDecoded:
+            return "BufferDecoded";
+        case TunnelPeekState::kBufferRendered:
+            return "BufferRendered";
+        case TunnelPeekState::kDisabledQueued:
+            return "DisabledQueued";
+        case TunnelPeekState::kEnabledQueued:
+            return "EnabledQueued";
+        default:
+            return default_string;
+    }
+}
+
+void MediaCodec::updateTunnelPeek(const sp<AMessage> &msg) {
+    int32_t tunnelPeek = 0;
+    if (!msg->findInt32("tunnel-peek", &tunnelPeek)){
+        return;
+    }
+
+    TunnelPeekState previousState = mTunnelPeekState;
+    if(tunnelPeek == 0){
+        switch (mTunnelPeekState) {
+            case TunnelPeekState::kEnabledNoBuffer:
+                mTunnelPeekState = TunnelPeekState::kDisabledNoBuffer;
+                break;
+            case TunnelPeekState::kEnabledQueued:
+                mTunnelPeekState = TunnelPeekState::kDisabledQueued;
+                break;
+            default:
+                ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
+                return;
+        }
+    } else {
+        switch (mTunnelPeekState) {
+            case TunnelPeekState::kDisabledNoBuffer:
+                mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+                break;
+            case TunnelPeekState::kDisabledQueued:
+                mTunnelPeekState = TunnelPeekState::kEnabledQueued;
+                break;
+            case TunnelPeekState::kBufferDecoded:
+                msg->setInt32("android._trigger-tunnel-peek", 1);
+                mTunnelPeekState = TunnelPeekState::kBufferRendered;
+                break;
+            default:
+                ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
+                return;
+        }
+    }
+
+    ALOGV("TunnelPeekState: %s -> %s", asString(previousState), asString(mTunnelPeekState));
+}
+
 bool MediaCodec::Histogram::setup(int nbuckets, int64_t width, int64_t floor)
 {
     if (nbuckets <= 0 || width <= 0) {
@@ -1280,6 +1348,12 @@
     return msg->post();
 }
 
+status_t MediaCodec::setOnFirstTunnelFrameReadyNotification(const sp<AMessage> &notify) {
+    sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
+    msg->setMessage("first-tunnel-frame-ready", notify);
+    return msg->post();
+}
+
 status_t MediaCodec::configure(
         const sp<AMessage> &format,
         const sp<Surface> &nativeWindow,
@@ -2618,9 +2692,17 @@
 
                 case kWhatOutputFramesRendered:
                 {
-                    // ignore these in all states except running, and check that we have a
-                    // notification set
-                    if (mState == STARTED && mOnFrameRenderedNotification != NULL) {
+                    // ignore these in all states except running
+                    if (mState != STARTED) {
+                        break;
+                    }
+                    TunnelPeekState previousState = mTunnelPeekState;
+                    mTunnelPeekState = TunnelPeekState::kBufferRendered;
+                    ALOGV("TunnelPeekState: %s -> %s",
+                          asString(previousState),
+                          asString(TunnelPeekState::kBufferRendered));
+                    // check that we have a notification set
+                    if (mOnFrameRenderedNotification != NULL) {
                         sp<AMessage> notify = mOnFrameRenderedNotification->dup();
                         notify->setMessage("data", msg);
                         notify->post();
@@ -2628,6 +2710,48 @@
                     break;
                 }
 
+                case kWhatFirstTunnelFrameReady:
+                {
+                    if (mState != STARTED) {
+                        break;
+                    }
+                    TunnelPeekState previousState = mTunnelPeekState;
+                    switch(mTunnelPeekState) {
+                        case TunnelPeekState::kDisabledNoBuffer:
+                        case TunnelPeekState::kDisabledQueued:
+                            mTunnelPeekState = TunnelPeekState::kBufferDecoded;
+                            ALOGV("First tunnel frame ready");
+                            ALOGV("TunnelPeekState: %s -> %s",
+                                  asString(previousState),
+                                  asString(mTunnelPeekState));
+                            break;
+                        case TunnelPeekState::kEnabledNoBuffer:
+                        case TunnelPeekState::kEnabledQueued:
+                            {
+                                sp<AMessage> parameters = new AMessage();
+                                parameters->setInt32("android._trigger-tunnel-peek", 1);
+                                mCodec->signalSetParameters(parameters);
+                            }
+                            mTunnelPeekState = TunnelPeekState::kBufferRendered;
+                            ALOGV("First tunnel frame ready");
+                            ALOGV("TunnelPeekState: %s -> %s",
+                                  asString(previousState),
+                                  asString(mTunnelPeekState));
+                            break;
+                        default:
+                            ALOGV("Ignoring first tunnel frame ready, TunnelPeekState: %s",
+                                  asString(mTunnelPeekState));
+                            break;
+                    }
+
+                    if (mOnFirstTunnelFrameReadyNotification != nullptr) {
+                        sp<AMessage> notify = mOnFirstTunnelFrameReadyNotification->dup();
+                        notify->setMessage("data", msg);
+                        notify->post();
+                    }
+                    break;
+                }
+
                 case kWhatFillThisBuffer:
                 {
                     /* size_t index = */updateBuffers(kPortIndexInput, msg);
@@ -2844,6 +2968,9 @@
             if (msg->findMessage("on-frame-rendered", &notify)) {
                 mOnFrameRenderedNotification = notify;
             }
+            if (msg->findMessage("first-tunnel-frame-ready", &notify)) {
+                mOnFirstTunnelFrameReadyNotification = notify;
+            }
             break;
         }
 
@@ -3085,6 +3212,11 @@
             }
             sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
+            TunnelPeekState previousState = mTunnelPeekState;
+            mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+            ALOGV("TunnelPeekState: %s -> %s",
+                  asString(previousState),
+                  asString(TunnelPeekState::kEnabledNoBuffer));
 
             mReplyID = replyID;
             setState(STARTING);
@@ -3517,6 +3649,11 @@
 
             mCodec->signalFlush();
             returnBuffersToCodec();
+            TunnelPeekState previousState = mTunnelPeekState;
+            mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+            ALOGV("TunnelPeekState: %s -> %s",
+                  asString(previousState),
+                  asString(TunnelPeekState::kEnabledNoBuffer));
             break;
         }
 
@@ -4058,6 +4195,28 @@
         buffer->meta()->setInt32("csd", true);
     }
 
+    if (mTunneled) {
+        TunnelPeekState previousState = mTunnelPeekState;
+        switch(mTunnelPeekState){
+            case TunnelPeekState::kEnabledNoBuffer:
+                buffer->meta()->setInt32("tunnel-first-frame", 1);
+                mTunnelPeekState = TunnelPeekState::kEnabledQueued;
+                ALOGV("TunnelPeekState: %s -> %s",
+                      asString(previousState),
+                      asString(mTunnelPeekState));
+                break;
+            case TunnelPeekState::kDisabledNoBuffer:
+                buffer->meta()->setInt32("tunnel-first-frame", 1);
+                mTunnelPeekState = TunnelPeekState::kDisabledQueued;
+                ALOGV("TunnelPeekState: %s -> %s",
+                      asString(previousState),
+                      asString(mTunnelPeekState));
+                break;
+            default:
+                break;
+        }
+    }
+
     status_t err = OK;
     if (hasCryptoOrDescrambler() && !c2Buffer && !memory) {
         AString *errorDetailMsg;
@@ -4426,6 +4585,7 @@
 
 status_t MediaCodec::onSetParameters(const sp<AMessage> &params) {
     updateLowLatency(params);
+    updateTunnelPeek(params);
     mCodec->signalSetParameters(params);
 
     return OK;
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 0107c32..b07f8f7 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -943,10 +943,17 @@
 
             sp<MediaCodecBuffer> outbuf;
             status_t err = mEncoder->getOutputBuffer(index, &outbuf);
-            if (err != OK || outbuf == NULL || outbuf->data() == NULL
-                || outbuf->size() == 0) {
+            if (err != OK || outbuf == NULL || outbuf->data() == NULL) {
                 signalEOS();
                 break;
+            } else if (outbuf->size() == 0) {
+                // Zero length CSD buffers are not treated as an error
+                if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
+                    mEncoder->releaseOutputBuffer(index);
+                } else {
+                    signalEOS();
+                }
+                break;
             }
 
             MediaBufferBase *mbuf = new MediaBuffer(outbuf->size());
diff --git a/media/libstagefright/OWNERS b/media/libstagefright/OWNERS
index 0cc2294..e67496e 100644
--- a/media/libstagefright/OWNERS
+++ b/media/libstagefright/OWNERS
@@ -1,11 +1,9 @@
+# Bug component: 1344
 set noparent
-chz@google.com
 essick@google.com
 lajos@google.com
-marcone@google.com
 taklee@google.com
 wonsik@google.com
 
-# LON
-olly@google.com
-andrewlewis@google.com
+# go/android-fwk-media-solutions for info on areas of ownership.
+include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index c51c048..a4ec232 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -237,7 +237,7 @@
             <Limit name="sample-rate" ranges="8000,12000,16000,24000,48000" />
             <Limit name="bitrate" range="500-512000" />
             <Limit name="complexity" range="0-10"  default="5" />
-            <Feature name="bitrate-modes" value="CBR" />
+            <Feature name="bitrate-modes" value="CBR,VBR" />
         </MediaCodec>
         <MediaCodec name="c2.android.h263.encoder" type="video/3gpp">
             <Alias name="OMX.google.h263.encoder" />
diff --git a/media/libstagefright/httplive/Android.bp b/media/libstagefright/httplive/Android.bp
index 0b0acbf..7acf735 100644
--- a/media/libstagefright/httplive/Android.bp
+++ b/media/libstagefright/httplive/Android.bp
@@ -29,7 +29,6 @@
     ],
 
     include_dirs: [
-        "frameworks/av/media/libstagefright",
         "frameworks/native/include/media/openmax",
     ],
 
@@ -65,6 +64,8 @@
 
     header_libs: [
         "libbase_headers",
+        "libstagefright_headers",
+        "libstagefright_httplive_headers",
     ],
 
     static_libs: [
@@ -74,3 +75,8 @@
     ],
 
 }
+
+cc_library_headers {
+    name: "libstagefright_httplive_headers",
+    export_include_dirs: ["."],
+}
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 3bad015..0d7cadd 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -23,7 +23,7 @@
 #include "M3UParser.h"
 #include "PlaylistFetcher.h"
 
-#include "mpeg2ts/AnotherPacketSource.h"
+#include <AnotherPacketSource.h>
 
 #include <cutils/properties.h>
 #include <media/MediaHTTPService.h>
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index 7a6d487..ceea41d 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -24,7 +24,7 @@
 
 #include <utils/String8.h>
 
-#include "mpeg2ts/ATSParser.h"
+#include <ATSParser.h>
 
 namespace android {
 
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index b23aa8a..907b326 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -24,9 +24,9 @@
 #include "HTTPDownloader.h"
 #include "LiveSession.h"
 #include "M3UParser.h"
-#include "include/ID3.h"
-#include "mpeg2ts/AnotherPacketSource.h"
-#include "mpeg2ts/HlsSampleDecryptor.h"
+#include <ID3.h>
+#include <AnotherPacketSource.h>
+#include <HlsSampleDecryptor.h>
 
 #include <datasource/DataURISource.h>
 #include <media/stagefright/foundation/ABitReader.h>
diff --git a/media/libstagefright/httplive/PlaylistFetcher.h b/media/libstagefright/httplive/PlaylistFetcher.h
index 5d3f9c1..2e28164 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.h
+++ b/media/libstagefright/httplive/PlaylistFetcher.h
@@ -21,7 +21,7 @@
 #include <media/stagefright/foundation/AHandler.h>
 #include <openssl/aes.h>
 
-#include "mpeg2ts/ATSParser.h"
+#include <ATSParser.h>
 #include "LiveSession.h"
 
 namespace android {
diff --git a/media/libstagefright/httplive/fuzzer/Android.bp b/media/libstagefright/httplive/fuzzer/Android.bp
new file mode 100644
index 0000000..14097b0
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/Android.bp
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_httplive_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_httplive_license",
+    ],
+}
+
+cc_fuzz {
+    name: "httplive_fuzzer",
+    srcs: [
+        "httplive_fuzzer.cpp",
+    ],
+    static_libs: [
+        "libstagefright_httplive",
+        "libstagefright_id3",
+        "libstagefright_metadatautils",
+        "libstagefright_mpeg2support",
+        "liblog",
+        "libcutils",
+        "libdatasource",
+        "libmedia",
+        "libstagefright",
+        "libutils",
+    ],
+    header_libs: [
+        "libbase_headers",
+        "libstagefright_foundation_headers",
+        "libstagefright_headers",
+        "libstagefright_httplive_headers",
+    ],
+    shared_libs: [
+        "libcrypto",
+        "libstagefright_foundation",
+        "libhidlbase",
+        "libhidlmemory",
+        "android.hidl.allocator@1.0",
+    ],
+    corpus: ["corpus/*"],
+    dictionary: "httplive_fuzzer.dict",
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/libstagefright/httplive/fuzzer/README.md b/media/libstagefright/httplive/fuzzer/README.md
new file mode 100644
index 0000000..3a64ea4
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/README.md
@@ -0,0 +1,56 @@
+# Fuzzer for libstagefright_httplive
+
+## Plugin Design Considerations
+The fuzzer plugin for libstagefright_httplive is designed based on the understanding of the library and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data.Also, several .m3u8 files are hand-crafted and added to the corpus directory to increase the code coverage. This ensures more code paths are reached by the fuzzer.
+
+libstagefright_httplive supports the following parameters:
+1. Final Result (parameter name: `finalResult`)
+2. Flags (parameter name: `flags`)
+3. Time Us (parameter name: `timeUs`)
+4. Track Index (parameter name: `trackIndex`)
+5. Index (parameter name: `index`)
+6. Select (parameter name: `select`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `finalResult` | `-34` to `-1` | Value obtained from FuzzedDataProvider|
+| `flags` | `0` to `1` | Value obtained from FuzzedDataProvider|
+| `timeUs` | `0` to `10000000` | Value obtained from FuzzedDataProvider|
+| `trackIndex` | `UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `index` | `UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `select` | `True` to `False` | Value obtained from FuzzedDataProvider|
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the httplive module.
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build httplive_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) httplive_fuzzer
+```
+#### Steps to run
+To run on device
+```
+  $ adb push $ANDROID_PRODUCT_OUT/data/fuzz/$(TARGET_ARCH)/lib /data/fuzz/$(TARGET_ARCH)/lib
+  $ adb push $ANDROID_PRODUCT_OUT/data/fuzz/$(TARGET_ARCH)/httplive_fuzzer /data/fuzz/$(TARGET_ARCH)/httplive_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/httplive_fuzzer/httplive_fuzzer /data/fuzz/${TARGET_ARCH}/httplive_fuzzer/corpus
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/libstagefright/httplive/fuzzer/corpus/crypt.key b/media/libstagefright/httplive/fuzzer/corpus/crypt.key
new file mode 100644
index 0000000..f9d5d7f
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/crypt.key
@@ -0,0 +1,2 @@

+ÏŒüÐ5Љ_xïHÎ3
diff --git a/media/libstagefright/httplive/fuzzer/corpus/encrypted.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/encrypted.m3u8
new file mode 100644
index 0000000..32b0eac
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/encrypted.m3u8
@@ -0,0 +1,12 @@
+#EXTM3U
+#EXT-X-TARGETDURATION:10
+#EXT-X-ALLOW-CACHE:YES
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXT-X-VERSION:3
+#EXT-X-MEDIA-SEQUENCE:1
+#EXT-X-KEY:METHOD=AES-128,URI="../../fuzz/arm64/httplive_fuzzer/corpus/crypt.key"
+#EXTINF:10.000,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:5.092,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/hls.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/hls.m3u8
new file mode 100644
index 0000000..9338e04
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/hls.m3u8
@@ -0,0 +1,8 @@
+#EXTM3U
+#EXT-X-TARGETDURATION:10
+#EXT-X-MEDIA-SEQUENCE:0
+#EXTINF:10, no desc
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:10, no desc
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index1.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index1.m3u8
new file mode 100644
index 0000000..e1eff58
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index1.m3u8
@@ -0,0 +1,14 @@
+#EXTM3U
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=NONE
+#EXT-X-DISCONTINUITY-SEQUENCE:0
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence0.ts
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index2.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index2.m3u8
new file mode 100644
index 0000000..37a0189
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index2.m3u8
@@ -0,0 +1,6 @@
+#EXTM3U
+#EXT-X-INDEPENDENT-SEGMENTS
+#EXT-X-STREAM-INF:CLOSED-CAPTIONS=NONE,BANDWIDTH=165340,RESOLUTION=256x144,CODECS="mp4a.40.5,avc1.42c00b"
+https://non.existentsite.com/test-doesnt-dereference-these-paths/prog_index.m3u8
+#EXT-X-STREAM-INF:CLOSED-CAPTIONS=NONE,BANDWIDTH=344388,RESOLUTION=426x240,CODECS="mp4a.40.5,avc1.4d4015"
+https://non.existentsite.com/test-doesnt-dereference-these-paths/prog_index1.m3u8
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index3.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index3.m3u8
new file mode 100644
index 0000000..1b7f489
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index3.m3u8
@@ -0,0 +1,13 @@
+#EXTM3U
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=AES-128,URI="https://demo.unified-streaming.com/video/tears-of-steel/aes.key",IV=0X99b74007b6254e4bd1c6e03631cad15b
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence3.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index4.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index4.m3u8
new file mode 100644
index 0000000..89ba37c
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index4.m3u8
@@ -0,0 +1,15 @@
+#EXTM3U
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=SAMPLE-AES,URI="data:text/plain;charset=utf-8,a4cd9995a1aa91e1",IV=0X99b74007b6254e4bd1c6e03631cad15b
+#EXT-X-DISCONTINUITY-SEQUENCE:0
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence0.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index5.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index5.m3u8
new file mode 100644
index 0000000..2120de4
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index5.m3u8
@@ -0,0 +1,14 @@
+#EXTM3U
+#EXT-X-TARGETDURATION:11
+#EXT-X-KEY:METHOD=NONE
+#EXT-X-MEDIA-SEQUENCE:0
+#EXT-X-VERSION:4
+#EXTINF:10.0,
+#EXT-X-BYTERANGE:10@0
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:10.0,
+#EXT-X-BYTERANGE:20@10
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:10.0,
+#EXT-X-BYTERANGE:80
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index6.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index6.m3u8
new file mode 100644
index 0000000..588368a
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index6.m3u8
@@ -0,0 +1,12 @@
+#EXTM3U
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=AES-128,URI="data:text/plain;charset=utf-8,a4cd9995a1aa91e1",IV=0x30303030303030303030303030303030
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence3.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index7.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index7.m3u8
new file mode 100644
index 0000000..b09948e
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index7.m3u8
@@ -0,0 +1,46 @@
+#EXTM3U
+#EXT-X-VERSION:4
+## Created with Unified Streaming Platform  (version=1.11.3-24438)
+#EXT-X-SESSION-KEY:METHOD=AES-128,URI="https://demo.unified-streaming.com/video/tears-of-steel/aes.key"
+
+# AUDIO groups
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio-aacl-64",LANGUAGE="en",NAME="English",DEFAULT=YES,AUTOSELECT=YES,CHANNELS="2"
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio-aacl-128",LANGUAGE="en",NAME="English",DEFAULT=YES,AUTOSELECT=YES,CHANNELS="2"
+
+# SUBTITLES groups
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="textstream",LANGUAGE="en",NAME="English",DEFAULT=YES,AUTOSELECT=YES,URI="tears-of-steel-aes-textstream_eng=1000.m3u8"
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="textstream",LANGUAGE="ru",NAME="Russian",AUTOSELECT=YES,URI="tears-of-steel-aes-textstream_rus=1000.m3u8"
+
+# variants
+#EXT-X-STREAM-INF:BANDWIDTH=494000,CODECS="mp4a.40.2,avc1.42C00D",RESOLUTION=224x100,FRAME-RATE=24,AUDIO="audio-aacl-64",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=64008-video_eng=401000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=933000,CODECS="mp4a.40.2,avc1.42C016",RESOLUTION=448x200,FRAME-RATE=24,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng=751000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1198000,CODECS="mp4a.40.2,avc1.4D401F",RESOLUTION=784x350,FRAME-RATE=24,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng=1001000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1728000,CODECS="mp4a.40.2,avc1.640028",RESOLUTION=1680x750,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng=1501000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=2469000,CODECS="mp4a.40.2,avc1.640028",RESOLUTION=1680x750,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng=2200000.m3u8
+
+# variants
+#EXT-X-STREAM-INF:BANDWIDTH=1025000,CODECS="mp4a.40.2,hvc1.1.6.L150.90",RESOLUTION=1680x750,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-64",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=64008-video_eng_1=902000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1368000,CODECS="mp4a.40.2,hvc1.1.6.L150.90",RESOLUTION=2576x1150,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng_1=1161000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1815000,CODECS="mp4a.40.2,hvc1.1.6.L150.90",RESOLUTION=3360x1500,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng_1=1583000.m3u8
+
+# variants
+#EXT-X-STREAM-INF:BANDWIDTH=69000,CODECS="mp4a.40.2",AUDIO="audio-aacl-64",SUBTITLES="textstream"
+tears-of-steel-aes-audio_eng=64008.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=137000,CODECS="mp4a.40.2",AUDIO="audio-aacl-128",SUBTITLES="textstream"
+tears-of-steel-aes-audio_eng=128002.m3u8
+
+# keyframes
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=54000,CODECS="avc1.42C00D",RESOLUTION=224x100,URI="keyframes/tears-of-steel-aes-video_eng=401000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=100000,CODECS="avc1.42C016",RESOLUTION=448x200,URI="keyframes/tears-of-steel-aes-video_eng=751000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=133000,CODECS="avc1.4D401F",RESOLUTION=784x350,URI="keyframes/tears-of-steel-aes-video_eng=1001000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=120000,CODECS="hvc1.1.6.L150.90",RESOLUTION=1680x750,VIDEO-RANGE=SDR,URI="keyframes/tears-of-steel-aes-video_eng_1=902000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=154000,CODECS="hvc1.1.6.L150.90",RESOLUTION=2576x1150,VIDEO-RANGE=SDR,URI="keyframes/tears-of-steel-aes-video_eng_1=1161000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=210000,CODECS="hvc1.1.6.L150.90",RESOLUTION=3360x1500,VIDEO-RANGE=SDR,URI="keyframes/tears-of-steel-aes-video_eng_1=1583000.m3u8"
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index8.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index8.m3u8
new file mode 100644
index 0000000..353d589
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index8.m3u8
@@ -0,0 +1,13 @@
+#EXTM3U
+#EXT-X-VERSION:5
+
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",NAME="English stereo",LANGUAGE="en",AUTOSELECT=YES,URI="../../fuzz/arm64/httplive_fuzzer/index1.m3u8"
+
+#EXT-X-STREAM-INF:BANDWIDTH=628000,CODECS="avc1.42c00d,mp4a.40.2",RESOLUTION=320x180,AUDIO="audio"
+../../fuzz/arm64/httplive_fuzzer/index1.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=928000,CODECS="avc1.42c00d,mp4a.40.2",RESOLUTION=480x270,AUDIO="audio"
+../../fuzz/arm64/httplive_fuzzer/index2.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1728000,CODECS="avc1.42c00d,mp4a.40.2",RESOLUTION=640x360,AUDIO="audio"
+../../fuzz/arm64/httplive_fuzzer/index3.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=2528000,CODECS="avc1.42c00d,mp4a.40.2",RESOLUTION=960x540,AUDIO="audio"
+../../fuzz/arm64/httplive_fuzzer/index1.m3u8
diff --git a/media/libstagefright/httplive/fuzzer/corpus/prog_index.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/prog_index.m3u8
new file mode 100644
index 0000000..eb88422
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/prog_index.m3u8
@@ -0,0 +1,17 @@
+#EXTM3U
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",LANGUAGE="eng",NAME="English",AUTOSELECT=YES,DEFAULT=YES,URI="corpus/index1.m3u8"
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",LANGUAGE="fre",NAME="Français",AUTOSELECT=YES,DEFAULT=NO,URI="corpus/index1.m3u8"
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",LANGUAGE="sp",NAME="Espanol",AUTOSELECT=YES,DEFAULT=NO,URI="corpus/index1.m3u8"
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=NONE
+#EXT-X-DISCONTINUITY-SEQUENCE:0
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/httplive_fuzzer.cpp b/media/libstagefright/httplive/fuzzer/httplive_fuzzer.cpp
new file mode 100644
index 0000000..aa777b3
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/httplive_fuzzer.cpp
@@ -0,0 +1,298 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fstream>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <LiveDataSource.h>
+#include <LiveSession.h>
+#include <media/MediaHTTPConnection.h>
+#include <media/MediaHTTPService.h>
+#include <media/mediaplayer_common.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/ALooperRoster.h>
+#include <string>
+#include <utils/Log.h>
+
+using namespace std;
+using namespace android;
+
+constexpr char kFileNamePrefix[] = "/data/local/tmp/httplive-";
+constexpr char kFileNameSuffix[] = ".m3u8";
+constexpr char kFileUrlPrefix[] = "file://";
+constexpr int64_t kOffSet = 0;
+constexpr int32_t kReadyMarkMs = 5000;
+constexpr int32_t kPrepareMarkMs = 1500;
+constexpr int32_t kErrorNoMax = -1;
+constexpr int32_t kErrorNoMin = -34;
+constexpr int32_t kMaxTimeUs = 1000;
+constexpr int32_t kRandomStringLength = 64;
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+
+constexpr LiveSession::StreamType kValidStreamType[] = {
+    LiveSession::STREAMTYPE_AUDIO, LiveSession::STREAMTYPE_VIDEO,
+    LiveSession::STREAMTYPE_SUBTITLES, LiveSession::STREAMTYPE_METADATA};
+
+constexpr MediaSource::ReadOptions::SeekMode kValidSeekMode[] = {
+    MediaSource::ReadOptions::SeekMode::SEEK_PREVIOUS_SYNC,
+    MediaSource::ReadOptions::SeekMode::SEEK_NEXT_SYNC,
+    MediaSource::ReadOptions::SeekMode::SEEK_CLOSEST_SYNC,
+    MediaSource::ReadOptions::SeekMode::SEEK_CLOSEST,
+    MediaSource::ReadOptions::SeekMode::SEEK_FRAME_INDEX};
+
+constexpr media_track_type kValidMediaTrackType[] = {
+    MEDIA_TRACK_TYPE_UNKNOWN,  MEDIA_TRACK_TYPE_VIDEO,
+    MEDIA_TRACK_TYPE_AUDIO,    MEDIA_TRACK_TYPE_TIMEDTEXT,
+    MEDIA_TRACK_TYPE_SUBTITLE, MEDIA_TRACK_TYPE_METADATA};
+
+struct TestAHandler : public AHandler {
+public:
+  TestAHandler(std::function<void()> signalEosFunction)
+      : mSignalEosFunction(signalEosFunction) {}
+  virtual ~TestAHandler() {}
+
+protected:
+  void onMessageReceived(const sp<AMessage> &msg) override {
+    int32_t what = -1;
+    msg->findInt32("what", &what);
+    switch (what) {
+    case LiveSession::kWhatError:
+    case LiveSession::kWhatPrepared:
+    case LiveSession::kWhatPreparationFailed: {
+      mSignalEosFunction();
+      break;
+    }
+    }
+    return;
+  }
+
+private:
+  std::function<void()> mSignalEosFunction;
+};
+
+struct TestMediaHTTPConnection : public MediaHTTPConnection {
+public:
+  TestMediaHTTPConnection() {}
+  virtual ~TestMediaHTTPConnection() {}
+
+  virtual bool connect(const char * /*uri*/,
+                       const KeyedVector<String8, String8> * /*headers*/) {
+    return true;
+  }
+
+  virtual void disconnect() { return; }
+
+  virtual ssize_t readAt(off64_t /*offset*/, void * /*data*/, size_t size) {
+    return size;
+  }
+
+  virtual off64_t getSize() { return 0; }
+  virtual status_t getMIMEType(String8 * /*mimeType*/) { return NO_ERROR; }
+  virtual status_t getUri(String8 * /*uri*/) { return NO_ERROR; }
+
+private:
+  DISALLOW_EVIL_CONSTRUCTORS(TestMediaHTTPConnection);
+};
+
+struct TestMediaHTTPService : public MediaHTTPService {
+public:
+  TestMediaHTTPService() {}
+  ~TestMediaHTTPService(){};
+
+  virtual sp<MediaHTTPConnection> makeHTTPConnection() {
+    mediaHTTPConnection = sp<TestMediaHTTPConnection>::make();
+    return mediaHTTPConnection;
+  }
+
+private:
+  sp<TestMediaHTTPConnection> mediaHTTPConnection = nullptr;
+  DISALLOW_EVIL_CONSTRUCTORS(TestMediaHTTPService);
+};
+
+class HttpLiveFuzzer {
+public:
+  void process(const uint8_t *data, size_t size);
+  void deInitLiveSession();
+  ~HttpLiveFuzzer() { deInitLiveSession(); }
+
+private:
+  void invokeLiveDataSource();
+  void createM3U8File(const uint8_t *data, size_t size);
+  void initLiveDataSource();
+  void invokeLiveSession();
+  void initLiveSession();
+  void invokeDequeueAccessUnit();
+  void invokeConnectAsync();
+  void invokeSeekTo();
+  void invokeGetConfig();
+  void signalEos();
+  string generateFileName();
+  sp<LiveDataSource> mLiveDataSource = nullptr;
+  sp<LiveSession> mLiveSession = nullptr;
+  sp<ALooper> mLiveLooper = nullptr;
+  sp<TestMediaHTTPService> httpService = nullptr;
+  sp<TestAHandler> mHandler = nullptr;
+  FuzzedDataProvider *mFDP = nullptr;
+  bool mEosReached = false;
+  std::mutex mDownloadCompleteMutex;
+  std::condition_variable mConditionalVariable;
+};
+
+string HttpLiveFuzzer::generateFileName() {
+  return kFileNamePrefix + to_string(getpid()) + kFileNameSuffix;
+}
+
+void HttpLiveFuzzer::createM3U8File(const uint8_t *data, size_t size) {
+  ofstream m3u8File;
+  string currentFileName = generateFileName();
+  m3u8File.open(currentFileName, ios::out | ios::binary);
+  m3u8File.write((char *)data, size);
+  m3u8File.close();
+}
+
+void HttpLiveFuzzer::initLiveDataSource() {
+  mLiveDataSource = sp<LiveDataSource>::make();
+}
+
+void HttpLiveFuzzer::invokeLiveDataSource() {
+  initLiveDataSource();
+  size_t size = mFDP->ConsumeIntegralInRange<size_t>(kRangeMin, kRangeMax);
+  sp<ABuffer> buffer = new ABuffer(size);
+  mLiveDataSource->queueBuffer(buffer);
+  uint8_t *data = new uint8_t[size];
+  mLiveDataSource->readAtNonBlocking(kOffSet, data, size);
+  int32_t finalResult = mFDP->ConsumeIntegralInRange(kErrorNoMin, kErrorNoMax);
+  mLiveDataSource->queueEOS(finalResult);
+  mLiveDataSource->reset();
+  mLiveDataSource->countQueuedBuffers();
+  mLiveDataSource->initCheck();
+  delete[] data;
+}
+
+void HttpLiveFuzzer::initLiveSession() {
+  ALooperRoster looperRoster;
+  mHandler =
+      sp<TestAHandler>::make(std::bind(&HttpLiveFuzzer::signalEos, this));
+  mLiveLooper = sp<ALooper>::make();
+  mLiveLooper->setName("http live");
+  mLiveLooper->start();
+  sp<AMessage> notify = sp<AMessage>::make(0, mHandler);
+  httpService = new TestMediaHTTPService();
+  uint32_t flags = mFDP->ConsumeIntegral<uint32_t>();
+  mLiveSession = sp<LiveSession>::make(notify, flags, httpService);
+  mLiveLooper->registerHandler(mLiveSession);
+  looperRoster.registerHandler(mLiveLooper, mHandler);
+}
+
+void HttpLiveFuzzer::invokeDequeueAccessUnit() {
+  LiveSession::StreamType stream = mFDP->PickValueInArray(kValidStreamType);
+  sp<ABuffer> buffer;
+  mLiveSession->dequeueAccessUnit(stream, &buffer);
+}
+
+void HttpLiveFuzzer::invokeSeekTo() {
+  int64_t timeUs = mFDP->ConsumeIntegralInRange<int64_t>(0, kMaxTimeUs);
+  MediaSource::ReadOptions::SeekMode mode =
+      mFDP->PickValueInArray(kValidSeekMode);
+  mLiveSession->seekTo(timeUs, mode);
+}
+
+void HttpLiveFuzzer::invokeGetConfig() {
+  mLiveSession->getTrackCount();
+  size_t trackIndex = mFDP->ConsumeIntegral<size_t>();
+  mLiveSession->getTrackInfo(trackIndex);
+  media_track_type type = mFDP->PickValueInArray(kValidMediaTrackType);
+  mLiveSession->getSelectedTrack(type);
+  sp<MetaData> meta;
+  LiveSession::StreamType stream = mFDP->PickValueInArray(kValidStreamType);
+  mLiveSession->getStreamFormatMeta(stream, &meta);
+  mLiveSession->getKeyForStream(stream);
+  if (stream != LiveSession::STREAMTYPE_SUBTITLES) {
+    mLiveSession->getSourceTypeForStream(stream);
+  }
+}
+
+void HttpLiveFuzzer::invokeConnectAsync() {
+  string currentFileName = generateFileName();
+  string url = kFileUrlPrefix + currentFileName;
+  string str_1 = mFDP->ConsumeRandomLengthString(kRandomStringLength);
+  string str_2 = mFDP->ConsumeRandomLengthString(kRandomStringLength);
+
+  KeyedVector<String8, String8> headers;
+  headers.add(String8(str_1.c_str()), String8(str_2.c_str()));
+  mLiveSession->connectAsync(url.c_str(), &headers);
+}
+
+void HttpLiveFuzzer::invokeLiveSession() {
+  initLiveSession();
+  BufferingSettings bufferingSettings;
+  bufferingSettings.mInitialMarkMs = kPrepareMarkMs;
+  bufferingSettings.mResumePlaybackMarkMs = kReadyMarkMs;
+  mLiveSession->setBufferingSettings(bufferingSettings);
+  invokeConnectAsync();
+  std::unique_lock waitForDownloadComplete(mDownloadCompleteMutex);
+  mConditionalVariable.wait(waitForDownloadComplete,
+                            [this] { return mEosReached; });
+  if (mLiveSession->isSeekable()) {
+    invokeSeekTo();
+  }
+  invokeDequeueAccessUnit();
+  size_t index = mFDP->ConsumeIntegral<size_t>();
+  bool select = mFDP->ConsumeBool();
+  mLiveSession->selectTrack(index, select);
+  mLiveSession->hasDynamicDuration();
+  int64_t firstTimeUs =
+      mFDP->ConsumeIntegralInRange<int64_t>(kRangeMin, kRangeMax);
+  int64_t timeUs = mFDP->ConsumeIntegralInRange<int64_t>(kRangeMin, kRangeMax);
+  int32_t discontinuitySeq = mFDP->ConsumeIntegral<int32_t>();
+  mLiveSession->calculateMediaTimeUs(firstTimeUs, timeUs, discontinuitySeq);
+  invokeGetConfig();
+}
+
+void HttpLiveFuzzer::process(const uint8_t *data, size_t size) {
+  mFDP = new FuzzedDataProvider(data, size);
+  createM3U8File(data, size);
+  invokeLiveDataSource();
+  invokeLiveSession();
+  delete mFDP;
+}
+
+void HttpLiveFuzzer::deInitLiveSession() {
+  if (mLiveSession != nullptr) {
+    mLiveSession->disconnect();
+    mLiveLooper->unregisterHandler(mLiveSession->id());
+    mLiveLooper->stop();
+  }
+  mLiveSession.clear();
+  mLiveLooper.clear();
+}
+
+void HttpLiveFuzzer::signalEos() {
+  mEosReached = true;
+  {
+    std::lock_guard<std::mutex> waitForDownloadComplete(mDownloadCompleteMutex);
+  }
+  mConditionalVariable.notify_one();
+  return;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  HttpLiveFuzzer httpliveFuzzer;
+  httpliveFuzzer.process(data, size);
+  return 0;
+}
diff --git a/media/libstagefright/httplive/fuzzer/httplive_fuzzer.dict b/media/libstagefright/httplive/fuzzer/httplive_fuzzer.dict
new file mode 100644
index 0000000..703cc7e
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/httplive_fuzzer.dict
@@ -0,0 +1,15 @@
+#m3u8-Tags
+kw1="#EXTM3U"
+kw2="#EXT-X-VERSION:"
+kw3="#EXT-X-TARGETDURATION:"
+kw4="#EXT-X-PLAYLIST-TYPE:"
+kw5="#EXTINF:"
+kw6="#EXT-X-ENDLIST"
+kw7="#EXT-X-MEDIA-SEQUENCE:"
+kw8="#EXT-X-KEY:METHOD=NONE"
+kw9="#EXT-X-DISCONTINUITY:"
+kw10="#EXT-X-DISCONTINUITY-SEQUENCE:0"
+kw11="#EXT-X-STREAM-INF:BANDWIDTH="
+kw12="#EXT-X-STREAM-INF:CODECS="
+kw13="#EXT-X-BYTERANGE:"
+kw14="#EXT-X-MEDIA"
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index 2e98fec..efb2f86 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -178,6 +178,10 @@
          * Notify MediaCodec that output buffers are changed.
          */
         virtual void onOutputBuffersChanged() = 0;
+        /**
+         * Notify MediaCodec that the first tunnel frame is ready.
+         */
+        virtual void onFirstTunnelFrameReady() = 0;
     };
 
     /**
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 8952376..0660ccf 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -129,6 +129,8 @@
 
     status_t setOnFrameRenderedNotification(const sp<AMessage> &notify);
 
+    status_t setOnFirstTunnelFrameReadyNotification(const sp<AMessage> &notify);
+
     status_t createInputSurface(sp<IGraphicBufferProducer>* bufferProducer);
 
     status_t setInputSurface(const sp<PersistentSurface> &surface);
@@ -367,6 +369,30 @@
         bool mOwnedByClient;
     };
 
+   // This type is used to track the tunnel mode video peek state machine:
+    //
+    // DisabledNoBuffer -> EnabledNoBuffer  when tunnel-peek = true
+    // DisabledQueued   -> EnabledQueued    when tunnel-peek = true
+    // DisabledNoBuffer -> DisabledQueued   when first frame queued
+    // EnabledNoBuffer  -> DisabledNoBuffer when tunnel-peek = false
+    // EnabledQueued    -> DisabledQueued   when tunnel-peek = false
+    // EnabledNoBuffer  -> EnabledQueued    when first frame queued
+    // DisabledNoBuffer -> BufferDecoded    when kWhatFirstTunnelFrameReady
+    // DisabledQueued   -> BufferDecoded    when kWhatFirstTunnelFrameReady
+    // EnabledNoBuffer  -> BufferDecoded    when kWhatFirstTunnelFrameReady
+    // EnabledQueued    -> BufferDecoded    when kWhatFirstTunnelFrameReady
+    // BufferDecoded    -> BufferRendered   when kWhatFrameRendered
+    // <all states>     -> EnabledNoBuffer  when flush
+    // <all states>     -> EnabledNoBuffer  when stop then configure then start
+    enum struct TunnelPeekState {
+        kDisabledNoBuffer,
+        kEnabledNoBuffer,
+        kDisabledQueued,
+        kEnabledQueued,
+        kBufferDecoded,
+        kBufferRendered,
+    };
+
     struct ResourceManagerServiceProxy;
 
     State mState;
@@ -393,12 +419,15 @@
     void flushMediametrics();
     void updateEphemeralMediametrics(mediametrics_handle_t item);
     void updateLowLatency(const sp<AMessage> &msg);
+    constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
+    void updateTunnelPeek(const sp<AMessage> &msg);
 
     sp<AMessage> mOutputFormat;
     sp<AMessage> mInputFormat;
     sp<AMessage> mCallback;
     sp<AMessage> mOnFrameRenderedNotification;
     sp<AMessage> mAsyncReleaseCompleteNotification;
+    sp<AMessage> mOnFirstTunnelFrameReadyNotification;
 
     sp<ResourceManagerServiceProxy> mResourceManagerProxy;
 
@@ -434,6 +463,7 @@
     int32_t mTunneledInputWidth;
     int32_t mTunneledInputHeight;
     bool mTunneled;
+    TunnelPeekState mTunnelPeekState;
 
     sp<IDescrambler> mDescrambler;
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index bee96b1..ece643d 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -557,12 +557,14 @@
 }
 
 constexpr int32_t BITRATE_MODE_CBR = 2;
+constexpr int32_t BITRATE_MODE_CBR_FD = 3;
 constexpr int32_t BITRATE_MODE_CQ = 0;
 constexpr int32_t BITRATE_MODE_VBR = 1;
 
 inline static const char *asString_BitrateMode(int32_t i, const char *def = "??") {
     switch (i) {
         case BITRATE_MODE_CBR:  return "CBR";
+        case BITRATE_MODE_CBR_FD: return "CBR_FD";
         case BITRATE_MODE_CQ:   return "CQ";
         case BITRATE_MODE_VBR:  return "VBR";
         default:                return def;
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index e1cc5ec..3f4d662 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -44,6 +44,7 @@
       mNextExpectedSeqNo(0),
       mAccessUnitDamaged(false),
       mFirstIFrameProvided(false),
+      mLastCvo(-1),
       mLastIFrameProvidedAtMs(0),
       mLastRtpTimeJitterDataUs(0),
       mWidth(0),
@@ -137,7 +138,7 @@
     }
     source->putInterArrivalJitterData(rtpTime, nowTimeUs);
 
-    const int64_t startTimeMs = source->mFirstSysTime / 1000;
+    const int64_t startTimeMs = source->mSysAnchorTime / 1000;
     const int64_t nowTimeMs = nowTimeUs / 1000;
     const int32_t staticJitterTimeMs = source->getStaticJitterTimeMs();
     const int32_t baseJitterTimeMs = source->getBaseJitterTimeMs();
@@ -195,33 +196,38 @@
 
     if (!isExpired) {
         ALOGV("buffering in jitter buffer.");
+        // set an alarm for jitter buffer time expiration.
+        // adding 1ms because jitter buffer time is keep changing.
+        int64_t expTimeUs = (RtpToMs(std::abs(diffTimeRtp), clockRate) + 1) * 1000;
+        source->setJbAlarmTime(nowTimeUs, expTimeUs);
         return NOT_ENOUGH_DATA;
     }
 
     if (isFirstLineBroken) {
-        if (isSecondLineBroken) {
-            int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
-            ALOGE("buffer too late... \t RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
+        int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
+        String8 info;
+        info.appendFormat("RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
                     "Seq# %d \t ExpSeq# %d \t"
                     "JitterMs %d + (%d + %d * %.3f)",
                     (long long)diffTimeRtp, (long long)totalDiffTimeMs,
                     buffer->int32Data(), mNextExpectedSeqNo,
                     jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+        if (isSecondLineBroken) {
+            ALOGE("%s", info.string());
             printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
             printRTPTime(rtpTime, playedTimeRtp, expiredTimeRtp, isExpired);
 
-            mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
         }  else {
-            ALOGW("=== WARNING === buffer arrived after %d + %d = %d ms === WARNING === ",
-                    jitterTimeMs, tryJbTimeMs, jitterTimeMs + tryJbTimeMs);
+            ALOGW("%s", info.string());
         }
     }
 
     if (mNextExpectedSeqNoValid) {
-        int32_t size = queue->size();
+        mNextExpectedSeqNo = pickStartSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
         int32_t cntRemove = deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
 
         if (cntRemove > 0) {
+            int32_t size = queue->size();
             source->noticeAbandonBuffer(cntRemove);
             ALOGW("delete %d of %d buffers", cntRemove, size);
         }
@@ -441,7 +447,6 @@
     uint32_t rtpTimeStartAt;
     CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTimeStartAt));
     uint32_t startSeqNo = buffer->int32Data();
-    bool pFrame = nalType == 0x1;
 
     if (data[1] & 0x40) {
         // Huh? End bit also set on the first buffer.
@@ -451,8 +456,6 @@
         complete = true;
     } else {
         List<sp<ABuffer> >::iterator it = ++queue->begin();
-        int32_t connected = 1;
-        bool snapped = false;
         while (it != queue->end()) {
             ALOGV("sequence length %zu", totalCount);
 
@@ -463,33 +466,26 @@
 
             if ((uint32_t)buffer->int32Data() != expectedSeqNo) {
                 ALOGD("sequence not complete, expected seqNo %u, got %u, nalType %u",
-                     expectedSeqNo, (unsigned)buffer->int32Data(), nalType);
-                snapped = true;
-
-                if (!pFrame) {
-                    return WRONG_SEQUENCE_NUMBER;
-                }
-            }
-
-            if (!snapped) {
-                connected++;
+                     expectedSeqNo, (uint32_t)buffer->int32Data(), nalType);
             }
 
             uint32_t rtpTime;
             CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
-            if (size < 2
-                    || data[0] != indicator
+            if (size < 2) {
+                ALOGV("Ignoring malformed FU buffer.");
+                it = queue->erase(it);
+                continue;
+            }
+            if (data[0] != indicator
                     || (data[1] & 0x1f) != nalType
                     || (data[1] & 0x80)
                     || rtpTime != rtpTimeStartAt) {
-                ALOGV("Ignoring malformed FU buffer.");
-
-                // Delete the whole start of the FU.
-
-                mNextExpectedSeqNo = expectedSeqNo + 1;
-                deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
-
-                return MALFORMED_PACKET;
+                // Assembler already have given enough time by jitter buffer
+                ALOGD("Seems another frame. Incomplete frame [%d ~ %d) \t %d FUs",
+                        startSeqNo, expectedSeqNo, (int)queue->distance(queue->begin(), it));
+                expectedSeqNo = (uint32_t)buffer->int32Data();
+                complete = true;
+                break;
             }
 
             totalSize += size - 2;
@@ -498,14 +494,6 @@
             expectedSeqNo = (uint32_t)buffer->int32Data() + 1;
 
             if (data[1] & 0x40) {
-                if (pFrame && !recycleUnit(startSeqNo, expectedSeqNo,
-                            connected, totalCount, 0.5f)) {
-                    mNextExpectedSeqNo = expectedSeqNo;
-                    deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
-
-                    return MALFORMED_PACKET;
-                }
-
                 // This is the last fragment.
                 complete = true;
                 break;
@@ -557,6 +545,9 @@
 
     if (cvo >= 0) {
         unit->meta()->setInt32("cvo", cvo);
+        mLastCvo = cvo;
+    } else if (mLastCvo >= 0) {
+        unit->meta()->setInt32("cvo", mLastCvo);
     }
     if (source != nullptr) {
         unit->meta()->setObject("source", source);
@@ -621,35 +612,32 @@
     msg->post();
 }
 
-int32_t AAVCAssembler::pickProperSeq(const Queue *queue,
+int32_t AAVCAssembler::pickStartSeq(const Queue *queue,
         uint32_t first, int64_t play, int64_t jit) {
+    // pick the first sequence number has the start bit.
     sp<ABuffer> buffer = *(queue->begin());
-    int32_t nextSeqNo = buffer->int32Data();
+    int32_t firstSeqNo = buffer->int32Data();
 
-    Queue::const_iterator it = queue->begin();
-    while (it != queue->end()) {
-        int64_t rtpTime = findRTPTime(first, *it);
-        // if pkt in time exists, that should be the next pivot
+    // This only works for FU-A type & non-start sequence
+    unsigned nalType = buffer->data()[0] & 0x1f;
+    if (nalType != 28 || buffer->data()[1] & 0x80) {
+        return firstSeqNo;
+    }
+
+    for (auto it : *queue) {
+        const uint8_t *data = it->data();
+        int64_t rtpTime = findRTPTime(first, it);
         if (rtpTime + jit >= play) {
-            nextSeqNo = (*it)->int32Data();
             break;
         }
-        it++;
+        if ((data[1] & 0x80)) {
+            const int32_t seqNo = it->int32Data();
+            ALOGE("finding [HEAD] pkt. \t Seq# (%d ~ )[%d", firstSeqNo, seqNo);
+            firstSeqNo = seqNo;
+            break;
+        }
     }
-    return nextSeqNo;
-}
-
-bool AAVCAssembler::recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
-        size_t avail, float goodRatio) {
-    float total = end - start;
-    float valid = connected;
-    float exist = avail;
-    bool isRecycle = (valid / total) >= goodRatio;
-
-    ALOGV("checking p-frame losses.. recvBufs %f valid %f diff %f recycle? %d",
-            exist, valid, total, isRecycle);
-
-    return isRecycle;
+    return firstSeqNo;
 }
 
 int32_t AAVCAssembler::deleteUnitUnderSeq(Queue *queue, uint32_t seq) {
diff --git a/media/libstagefright/rtsp/AAVCAssembler.h b/media/libstagefright/rtsp/AAVCAssembler.h
index 8d19773..2f8b8ba 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.h
+++ b/media/libstagefright/rtsp/AAVCAssembler.h
@@ -22,6 +22,7 @@
 
 #include <utils/List.h>
 #include <utils/RefBase.h>
+#include <utils/String8.h>
 
 namespace android {
 
@@ -47,6 +48,7 @@
     uint32_t mNextExpectedSeqNo;
     bool mAccessUnitDamaged;
     bool mFirstIFrameProvided;
+    int32_t mLastCvo;
     uint64_t mLastIFrameProvidedAtMs;
     int64_t mLastRtpTimeJitterDataUs;
     int32_t mWidth;
@@ -64,9 +66,7 @@
 
     void submitAccessUnit();
 
-    int32_t pickProperSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
-    bool recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
-            size_t avail, float goodRatio);
+    int32_t pickStartSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
     int32_t deleteUnitUnderSeq(Queue *q, uint32_t seq);
 
     DISALLOW_EVIL_CONSTRUCTORS(AAVCAssembler);
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index d32e85d..b240339 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -51,6 +51,7 @@
       mNextExpectedSeqNo(0),
       mAccessUnitDamaged(false),
       mFirstIFrameProvided(false),
+      mLastCvo(-1),
       mLastIFrameProvidedAtMs(0),
       mLastRtpTimeJitterDataUs(0),
       mWidth(0),
@@ -147,7 +148,7 @@
     }
     source->putInterArrivalJitterData(rtpTime, nowTimeUs);
 
-    const int64_t startTimeMs = source->mFirstSysTime / 1000;
+    const int64_t startTimeMs = source->mSysAnchorTime / 1000;
     const int64_t nowTimeMs = nowTimeUs / 1000;
     const int32_t staticJitterTimeMs = source->getStaticJitterTimeMs();
     const int32_t baseJitterTimeMs = source->getBaseJitterTimeMs();
@@ -205,33 +206,38 @@
 
     if (!isExpired) {
         ALOGV("buffering in jitter buffer.");
+        // set an alarm for jitter buffer time expiration.
+        // adding 1ms because jitter buffer time is keep changing.
+        int64_t expTimeUs = (RtpToMs(std::abs(diffTimeRtp), clockRate) + 1) * 1000;
+        source->setJbAlarmTime(nowTimeUs, expTimeUs);
         return NOT_ENOUGH_DATA;
     }
 
     if (isFirstLineBroken) {
-        if (isSecondLineBroken) {
-            int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
-            ALOGE("buffer too late... \t RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
+        int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
+        String8 info;
+        info.appendFormat("RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
                     "Seq# %d \t ExpSeq# %d \t"
                     "JitterMs %d + (%d + %d * %.3f)",
                     (long long)diffTimeRtp, (long long)totalDiffTimeMs,
                     buffer->int32Data(), mNextExpectedSeqNo,
                     jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+        if (isSecondLineBroken) {
+            ALOGE("%s", info.string());
             printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
             printRTPTime(rtpTime, playedTimeRtp, expiredTimeRtp, isExpired);
 
-            mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
         }  else {
-            ALOGW("=== WARNING === buffer arrived after %d + %d = %d ms === WARNING === ",
-                    jitterTimeMs, tryJbTimeMs, jitterTimeMs + tryJbTimeMs);
+            ALOGW("%s", info.string());
         }
     }
 
     if (mNextExpectedSeqNoValid) {
-        int32_t size = queue->size();
+        mNextExpectedSeqNo = pickStartSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
         int32_t cntRemove = deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
 
         if (cntRemove > 0) {
+            int32_t size = queue->size();
             source->noticeAbandonBuffer(cntRemove);
             ALOGW("delete %d of %d buffers", cntRemove, size);
         }
@@ -466,7 +472,6 @@
     uint32_t rtpTimeStartAt;
     CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTimeStartAt));
     uint32_t startSeqNo = buffer->int32Data();
-    bool pFrame = (nalType < 0x10);
 
     if (data[2] & 0x40) {
         // Huh? End bit also set on the first buffer.
@@ -476,8 +481,6 @@
         complete = true;
     } else {
         List<sp<ABuffer> >::iterator it = ++queue->begin();
-        int32_t connected = 1;
-        bool snapped = false;
         while (it != queue->end()) {
             ALOGV("sequence length %zu", totalCount);
 
@@ -488,33 +491,26 @@
 
             if ((uint32_t)buffer->int32Data() != expectedSeqNo) {
                 ALOGV("sequence not complete, expected seqNo %u, got %u, nalType %u",
-                     expectedSeqNo, (uint32_t)buffer->int32Data(), nalType);
-                snapped = true;
-
-                if (!pFrame) {
-                    return WRONG_SEQUENCE_NUMBER;
-                }
-            }
-
-            if (!snapped) {
-                connected++;
+                     expectedSeqNo, (unsigned)buffer->int32Data(), nalType);
             }
 
             uint32_t rtpTime;
             CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
-            if (size < 3
-                    || ((data[0] >> 1) & H265_NALU_MASK) != indicator
+            if (size < 3) {
+                ALOGV("Ignoring malformed FU buffer.");
+                it = queue->erase(it);
+                continue;
+            }
+            if (((data[0] >> 1) & H265_NALU_MASK) != indicator
                     || (data[2] & H265_NALU_MASK) != nalType
                     || (data[2] & 0x80)
                     || rtpTime != rtpTimeStartAt) {
-                ALOGV("Ignoring malformed FU buffer.");
-
-                // Delete the whole start of the FU.
-
-                mNextExpectedSeqNo = expectedSeqNo + 1;
-                deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
-
-                return MALFORMED_PACKET;
+                // Assembler already have given enough time by jitter buffer
+                ALOGD("Seems another frame. Incomplete frame [%d ~ %d) \t %d FUs",
+                        startSeqNo, expectedSeqNo, (int)queue->distance(queue->begin(), it));
+                expectedSeqNo = (uint32_t)buffer->int32Data();
+                complete = true;
+                break;
             }
 
             totalSize += size - 3;
@@ -523,13 +519,6 @@
             expectedSeqNo = (uint32_t)buffer->int32Data() + 1;
 
             if (data[2] & 0x40) {
-                if (pFrame && !recycleUnit(startSeqNo, expectedSeqNo,
-                        connected, totalCount, 0.5f)) {
-                    mNextExpectedSeqNo = expectedSeqNo;
-                    deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
-
-                    return MALFORMED_PACKET;
-                }
                 // This is the last fragment.
                 complete = true;
                 break;
@@ -579,6 +568,9 @@
 
     if (cvo >= 0) {
         unit->meta()->setInt32("cvo", cvo);
+        mLastCvo = cvo;
+    } else if (mLastCvo >= 0) {
+        unit->meta()->setInt32("cvo", mLastCvo);
     }
 
     addSingleNALUnit(unit);
@@ -635,35 +627,32 @@
     msg->post();
 }
 
-int32_t AHEVCAssembler::pickProperSeq(const Queue *queue,
+int32_t AHEVCAssembler::pickStartSeq(const Queue *queue,
         uint32_t first, int64_t play, int64_t jit) {
+    // pick the first sequence number has the start bit.
     sp<ABuffer> buffer = *(queue->begin());
-    int32_t nextSeqNo = buffer->int32Data();
+    int32_t firstSeqNo = buffer->int32Data();
 
-    Queue::const_iterator it = queue->begin();
-    while (it != queue->end()) {
-        int64_t rtpTime = findRTPTime(first, *it);
-        // if pkt in time exists, that should be the next pivot
+    // This only works for FU-A type & non-start sequence
+    unsigned nalType = buffer->data()[0] & 0x1f;
+    if (nalType != 28 || buffer->data()[2] & 0x80) {
+        return firstSeqNo;
+    }
+
+    for (auto it : *queue) {
+        const uint8_t *data = it->data();
+        int64_t rtpTime = findRTPTime(first, it);
         if (rtpTime + jit >= play) {
-            nextSeqNo = (*it)->int32Data();
             break;
         }
-        it++;
+        if ((data[2] & 0x80)) {
+            const int32_t seqNo = it->int32Data();
+            ALOGE("finding [HEAD] pkt. \t Seq# (%d ~ )[%d", firstSeqNo, seqNo);
+            firstSeqNo = seqNo;
+            break;
+        }
     }
-    return nextSeqNo;
-}
-
-bool AHEVCAssembler::recycleUnit(uint32_t start, uint32_t end,  uint32_t connected,
-         size_t avail, float goodRatio) {
-    float total = end - start;
-    float valid = connected;
-    float exist = avail;
-    bool isRecycle = (valid / total) >= goodRatio;
-
-    ALOGV("checking p-frame losses.. recvBufs %f valid %f diff %f recycle? %d",
-            exist, valid, total, isRecycle);
-
-    return isRecycle;
+    return firstSeqNo;
 }
 
 int32_t AHEVCAssembler::deleteUnitUnderSeq(Queue *queue, uint32_t seq) {
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/AHEVCAssembler.h
index 68777a7..9575d8c 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.h
+++ b/media/libstagefright/rtsp/AHEVCAssembler.h
@@ -22,6 +22,7 @@
 
 #include <utils/List.h>
 #include <utils/RefBase.h>
+#include <utils/String8.h>
 
 namespace android {
 
@@ -48,6 +49,7 @@
     uint32_t mNextExpectedSeqNo;
     bool mAccessUnitDamaged;
     bool mFirstIFrameProvided;
+    int32_t mLastCvo;
     uint64_t mLastIFrameProvidedAtMs;
     int64_t mLastRtpTimeJitterDataUs;
     int32_t mWidth;
@@ -65,9 +67,7 @@
 
     void submitAccessUnit();
 
-    int32_t pickProperSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
-    bool recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
-             size_t avail, float goodRatio);
+    int32_t pickStartSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
     int32_t deleteUnitUnderSeq(Queue *queue, uint32_t seq);
 
     DISALLOW_EVIL_CONSTRUCTORS(AHEVCAssembler);
diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp
index 8f4df8e..169df46 100644
--- a/media/libstagefright/rtsp/APacketSource.cpp
+++ b/media/libstagefright/rtsp/APacketSource.cpp
@@ -594,4 +594,15 @@
     return mFormat;
 }
 
+bool APacketSource::isVideo() {
+    bool isVideo = false;
+
+    const char *mime;
+    if (mFormat->findCString(kKeyMIMEType, &mime)) {
+        isVideo = !strncasecmp(mime, "video/", 6);
+    }
+
+    return isVideo;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/rtsp/APacketSource.h b/media/libstagefright/rtsp/APacketSource.h
index 530e537..2b9b5ba 100644
--- a/media/libstagefright/rtsp/APacketSource.h
+++ b/media/libstagefright/rtsp/APacketSource.h
@@ -33,6 +33,8 @@
 
     virtual sp<MetaData> getFormat();
 
+    bool isVideo();
+
 protected:
     virtual ~APacketSource();
 
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 33c85a7..0bd342a 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -18,9 +18,7 @@
 #define LOG_TAG "ARTPConnection"
 #include <utils/Log.h>
 
-#include "ARTPAssembler.h"
 #include "ARTPConnection.h"
-
 #include "ARTPSource.h"
 #include "ASessionDescription.h"
 
@@ -104,6 +102,11 @@
     msg->post();
 }
 
+void ARTPConnection::seekStream() {
+    sp<AMessage> msg = new AMessage(kWhatSeekStream, this);
+    msg->post();
+}
+
 void ARTPConnection::removeStream(int rtpSocket, int rtcpSocket) {
     sp<AMessage> msg = new AMessage(kWhatRemoveStream, this);
     msg->setInt32("rtp-socket", rtpSocket);
@@ -283,6 +286,12 @@
             break;
         }
 
+        case kWhatSeekStream:
+        {
+            onSeekStream(msg);
+            break;
+        }
+
         case kWhatRemoveStream:
         {
             onRemoveStream(msg);
@@ -295,6 +304,12 @@
             break;
         }
 
+        case kWhatAlarmStream:
+        {
+            onAlarmStream(msg);
+            break;
+        }
+
         case kWhatInjectPacket:
         {
             onInjectPacket(msg);
@@ -353,6 +368,18 @@
     }
 }
 
+void ARTPConnection::onSeekStream(const sp<AMessage> &msg) {
+    (void)msg; // unused param as of now.
+    List<StreamInfo>::iterator it = mStreams.begin();
+    while (it != mStreams.end()) {
+        for (size_t i = 0; i < it->mSources.size(); ++i) {
+            sp<ARTPSource> source = it->mSources.valueAt(i);
+            source->timeReset();
+        }
+        ++it;
+    }
+}
+
 void ARTPConnection::onRemoveStream(const sp<AMessage> &msg) {
     int32_t rtpSocket, rtcpSocket;
     CHECK(msg->findInt32("rtp-socket", &rtpSocket));
@@ -440,14 +467,16 @@
 
             if (err == -ECONNRESET) {
                 // socket failure, this stream is dead, Jim.
-                sp<AMessage> notify = it->mNotifyMsg->dup();
-                notify->setInt32("rtcp-event", 1);
-                notify->setInt32("payload-type", 400);
-                notify->setInt32("feedback-type", 1);
-                notify->setInt32("sender", it->mSources.valueAt(0)->getSelfID());
-                notify->post();
+                for (size_t i = 0; i < it->mSources.size(); ++i) {
+                    sp<AMessage> notify = it->mNotifyMsg->dup();
+                    notify->setInt32("rtcp-event", 1);
+                    notify->setInt32("payload-type", 400);
+                    notify->setInt32("feedback-type", 1);
+                    notify->setInt32("sender", it->mSources.valueAt(i)->getSelfID());
+                    notify->post();
 
-                ALOGW("failed to receive RTP/RTCP datagram.");
+                    ALOGW("failed to receive RTP/RTCP datagram.");
+                }
                 it = mStreams.erase(it);
                 continue;
             }
@@ -479,8 +508,6 @@
                     if (n != (ssize_t)buffer->size()) {
                         ALOGW("failed to send RTCP TMMBR (%s).",
                                 n >= 0 ? "connection gone" : strerror(errno));
-
-                        it = mStreams.erase(it);
                         continue;
                     }
                 }
@@ -531,8 +558,6 @@
                 if (n != (ssize_t)buffer->size()) {
                     ALOGW("failed to send RTCP receiver report (%s).",
                             n >= 0 ? "connection gone" : strerror(errno));
-
-                    it = mStreams.erase(it);
                     continue;
                 }
 
@@ -548,6 +573,13 @@
     }
 }
 
+void ARTPConnection::onAlarmStream(const sp<AMessage> msg) {
+    sp<ARTPSource> source = nullptr;
+    if (msg->findObject("source", (sp<android::RefBase>*)&source)) {
+        source->processRTPPacket();
+    }
+}
+
 status_t ARTPConnection::receive(StreamInfo *s, bool receiveRTP) {
     ALOGV("receiving %s", receiveRTP ? "RTP" : "RTCP");
 
@@ -585,7 +617,14 @@
     } while (nbytes < 0 && errno == EINTR);
 
     if (nbytes <= 0) {
-        return -ECONNRESET;
+        ALOGW("failed to recv rtp packet. cause=%s", strerror(errno));
+        // ECONNREFUSED may happen in next recvfrom() calling if one of
+        // outgoing packet can not be delivered to remote by using sendto()
+        if (errno == ECONNREFUSED) {
+            return -ECONNREFUSED;
+        } else {
+            return -ECONNRESET;
+        }
     }
 
     buffer->setRange(0, nbytes);
@@ -629,16 +668,14 @@
                     pRemoteRTCPAddr, sizeSockSt);
         } while (n < 0 && errno == EINTR);
 
+        if (n < 0) {
+            ALOGW("failed to send rtcp packet. cause=%s", strerror(errno));
+        }
+
         return n;
 }
 
 status_t ARTPConnection::parseRTP(StreamInfo *s, const sp<ABuffer> &buffer) {
-    if (s->mNumRTPPacketsReceived++ == 0) {
-        sp<AMessage> notify = s->mNotifyMsg->dup();
-        notify->setInt32("first-rtp", true);
-        notify->post();
-    }
-
     size_t size = buffer->size();
 
     if (size < 12) {
@@ -720,9 +757,23 @@
         meta->setInt32("cvo", cvoDegrees);
     }
 
-    buffer->setInt32Data(u16at(&data[2]));
+    int32_t seq = u16at(&data[2]);
+    buffer->setInt32Data(seq);
     buffer->setRange(payloadOffset, size - payloadOffset);
 
+    if (s->mNumRTPPacketsReceived++ == 0) {
+        sp<AMessage> notify = s->mNotifyMsg->dup();
+        notify->setInt32("first-rtp", true);
+        notify->setInt32("rtcp-event", 1);
+        notify->setInt32("payload-type", ARTPSource::RTP_FIRST_PACKET);
+        notify->setInt32("rtp-time", (int32_t)rtpTime);
+        notify->setInt32("rtp-seq-num", seq);
+        notify->setInt64("recv-time-us", ALooper::GetNowUs());
+        notify->post();
+
+        ALOGD("send first-rtp event to upper layer");
+    }
+
     source->processRTPPacket(buffer);
 
     return OK;
@@ -779,14 +830,12 @@
     if (s->mNumRTCPPacketsReceived++ == 0) {
         sp<AMessage> notify = s->mNotifyMsg->dup();
         notify->setInt32("first-rtcp", true);
+        notify->setInt32("rtcp-event", 1);
+        notify->setInt32("payload-type", ARTPSource::RTCP_FIRST_PACKET);
+        notify->setInt64("recv-time-us", ALooper::GetNowUs());
         notify->post();
 
-        ALOGI("send first-rtcp event to upper layer as ImsRxNotice");
-        sp<AMessage> imsNotify = s->mNotifyMsg->dup();
-        imsNotify->setInt32("rtcp-event", 1);
-        imsNotify->setInt32("payload-type", 101);
-        imsNotify->setInt32("feedback-type", 0);
-        imsNotify->post();
+        ALOGD("send first-rtcp event to upper layer");
     }
 
     const uint8_t *data = buffer->data();
@@ -883,7 +932,7 @@
     int64_t nowUs = ALooper::GetNowUs();
     int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
     int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
-    source->notifyPktInfo(bitrate, true /* isRegular */);
+    source->notifyPktInfo(bitrate, nowUs, true /* isRegular */);
 
     source->byeReceived();
 
@@ -1065,11 +1114,14 @@
                 srcId, info->mSessionDesc, info->mIndex, info->mNotifyMsg);
 
         if (mFlags & kViLTEConnection) {
+            setStaticJitterTimeMs(50);
             source->setPeriodicFIR(false);
         }
 
         source->setSelfID(mSelfID);
         source->setStaticJitterTimeMs(mStaticJitterTimeMs);
+        sp<AMessage> timer = new AMessage(kWhatAlarmStream, this);
+        source->setJbTimer(timer);
         info->mSources.add(srcId, source);
     } else {
         source = info->mSources.valueAt(index);
@@ -1117,7 +1169,7 @@
             for (size_t i = 0; i < s->mSources.size(); ++i) {
                 sp<ARTPSource> source = s->mSources.valueAt(i);
                 if (source->isNeedToEarlyNotify()) {
-                    source->notifyPktInfo(bitrate, false /* isRegular */);
+                    source->notifyPktInfo(bitrate, nowUs, false /* isRegular */);
                     mLastEarlyNotifyTimeUs = nowUs + (1000000ll * 3600 * 24); // after 1 day
                 }
             }
@@ -1148,7 +1200,7 @@
             buffer->setRange(0, 0);
             for (size_t i = 0; i < s->mSources.size(); ++i) {
                 sp<ARTPSource> source = s->mSources.valueAt(i);
-                source->notifyPktInfo(bitrate, true /* isRegular */);
+                source->notifyPktInfo(bitrate, nowUs, true /* isRegular */);
             }
             ++it;
         }
diff --git a/media/libstagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/ARTPConnection.h
index ea0a374..36cca31 100644
--- a/media/libstagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/ARTPConnection.h
@@ -40,7 +40,7 @@
             const sp<ASessionDescription> &sessionDesc, size_t index,
             const sp<AMessage> &notify,
             bool injected);
-
+    void seekStream();
     void removeStream(int rtpSocket, int rtcpSocket);
 
     void injectPacket(int index, const sp<ABuffer> &buffer);
@@ -69,9 +69,11 @@
 private:
     enum {
         kWhatAddStream,
+        kWhatSeekStream,
         kWhatRemoveStream,
         kWhatPollStreams,
         kWhatInjectPacket,
+        kWhatAlarmStream,
     };
 
     static const int64_t kSelectTimeoutUs;
@@ -94,8 +96,10 @@
     int32_t mCumulativeBytes;
 
     void onAddStream(const sp<AMessage> &msg);
+    void onSeekStream(const sp<AMessage> &msg);
     void onRemoveStream(const sp<AMessage> &msg);
     void onPollStreams();
+    void onAlarmStream(const sp<AMessage> msg);
     void onInjectPacket(const sp<AMessage> &msg);
     void onSendReceiverReports();
     void checkRxBitrate(int64_t nowUs);
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index 8787d65..38a370b 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -44,10 +44,11 @@
         uint32_t id,
         const sp<ASessionDescription> &sessionDesc, size_t index,
         const sp<AMessage> &notify)
-    : mFirstSeqNumber(0),
-      mFirstRtpTime(0),
+    : mFirstRtpTime(0),
       mFirstSysTime(0),
       mClockRate(0),
+      mSysAnchorTime(0),
+      mLastSysAnchorTimeUpdatedUs(0),
       mFirstSsrc(0),
       mHighestNackNumber(0),
       mID(id),
@@ -58,9 +59,14 @@
       mPrevNumBuffersReceived(0),
       mPrevExpectedForRR(0),
       mPrevNumBuffersReceivedForRR(0),
+      mLatestRtpTime(0),
       mStaticJbTimeMs(kStaticJitterTimeMs),
-      mLastNTPTime(0),
-      mLastNTPTimeUpdateUs(0),
+      mLastSrRtpTime(0),
+      mLastSrNtpTime(0),
+      mLastSrUpdateTimeUs(0),
+      mIsFirstRtpRtcpGap(true),
+      mAvgRtpRtcpGapMs(0),
+      mAvgUnderlineDelayMs(0),
       mIssueFIRRequests(false),
       mIssueFIRByAssembler(false),
       mLastFIRRequestUs(-1),
@@ -106,6 +112,7 @@
     int32_t clockRate, numChannels;
     ASessionDescription::ParseFormatDesc(desc.c_str(), &clockRate, &numChannels);
     mClockRate = clockRate;
+    mLastJbAlarmTimeUs = 0;
     mJitterCalc = new JitterCalc(mClockRate);
 }
 
@@ -119,34 +126,144 @@
     }
 }
 
+void ARTPSource::processRTPPacket() {
+    if (mAssembler != NULL && !mQueue.empty()) {
+        mAssembler->onPacketReceived(this);
+    }
+}
+
 void ARTPSource::timeUpdate(uint32_t rtpTime, uint64_t ntpTime) {
-    mLastNTPTime = ntpTime;
-    mLastNTPTimeUpdateUs = ALooper::GetNowUs();
+    mLastSrRtpTime = rtpTime;
+    mLastSrNtpTime = ntpTime;
+    mLastSrUpdateTimeUs = ALooper::GetNowUs();
 
     sp<AMessage> notify = mNotify->dup();
     notify->setInt32("time-update", true);
     notify->setInt32("rtp-time", rtpTime);
     notify->setInt64("ntp-time", ntpTime);
+    notify->setInt32("rtcp-event", 1);
+    notify->setInt32("payload-type", RTCP_SR);
+    notify->setInt64("recv-time-us", mLastSrUpdateTimeUs);
     notify->post();
 }
 
-bool ARTPSource::queuePacket(const sp<ABuffer> &buffer) {
-    uint32_t seqNum = (uint32_t)buffer->int32Data();
+void ARTPSource::timeReset() {
+    mFirstRtpTime = 0;
+    mFirstSysTime = 0;
+    mSysAnchorTime = 0;
+    mLastSysAnchorTimeUpdatedUs = 0;
+    mFirstSsrc = 0;
+    mHighestNackNumber = 0;
+    mHighestSeqNumber = 0;
+    mPrevExpected = 0;
+    mBaseSeqNumber = 0;
+    mNumBuffersReceived = 0;
+    mPrevNumBuffersReceived = 0;
+    mPrevExpectedForRR = 0;
+    mPrevNumBuffersReceivedForRR = 0;
+    mLatestRtpTime = 0;
+    mLastSrRtpTime = 0;
+    mLastSrNtpTime = 0;
+    mLastSrUpdateTimeUs = 0;
+    mIsFirstRtpRtcpGap = true;
+    mAvgRtpRtcpGapMs = 0;
+    mAvgUnderlineDelayMs = 0;
+    mIssueFIRByAssembler = false;
+    mLastFIRRequestUs = -1;
+}
 
-    int32_t ssrc = 0;
+void ARTPSource::calcTimeGapRtpRtcp(const sp<ABuffer> &buffer, int64_t nowUs) {
+    if (mLastSrUpdateTimeUs == 0) {
+        return;
+    }
+
+    int64_t elapsedMs = (nowUs - mLastSrUpdateTimeUs) / 1000;
+    int64_t elapsedRtpTime = (elapsedMs * (mClockRate / 1000));
+    uint32_t rtpTime;
+    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+
+    int64_t anchorRtpTime = mLastSrRtpTime + elapsedRtpTime;
+    int64_t rtpTimeGap = anchorRtpTime - rtpTime;
+    // rtpTime can not be faster than it's anchor time.
+    // because rtpTime(of rtp packet) represents it's a frame captured time and
+    // anchorRtpTime(of rtcp:sr packet) represents it's a rtp packetized time.
+    if (rtpTimeGap < 0 || rtpTimeGap > (mClockRate * 60)) {
+        // ignore invalid delay gap such as negative delay or later than 1 min.
+        return;
+    }
+
+    int64_t rtpTimeGapMs = (rtpTimeGap * 1000 / mClockRate);
+    if (mIsFirstRtpRtcpGap) {
+        mIsFirstRtpRtcpGap = false;
+        mAvgRtpRtcpGapMs = rtpTimeGapMs;
+    } else {
+        // This is measuring avg rtp timestamp distance between rtp and rtcp:sr packet.
+        // Rtp timestamp of rtp packet represents it's raw frame captured time.
+        // Rtp timestamp of rtcp:sr packet represents it's packetization time.
+        // So that, this value is showing how much time delayed to be a rtp packet
+        // from a raw frame captured time.
+        // This value maybe referred to know a/v sync and sender's own delay of this media stream.
+        mAvgRtpRtcpGapMs = ((mAvgRtpRtcpGapMs * 15) + rtpTimeGapMs) / 16;
+    }
+}
+
+void ARTPSource::calcUnderlineDelay(const sp<ABuffer> &buffer, int64_t nowUs) {
+    int64_t elapsedMs = (nowUs - mSysAnchorTime) / 1000;
+    int64_t elapsedRtpTime = (elapsedMs * (mClockRate / 1000));
+    int64_t expectedRtpTime = mFirstRtpTime + elapsedRtpTime;
+
+    int32_t rtpTime;
+    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+    int32_t delayMs = (expectedRtpTime - rtpTime) / (mClockRate / 1000);
+
+    mAvgUnderlineDelayMs = ((mAvgUnderlineDelayMs * 15) + delayMs) / 16;
+}
+
+void ARTPSource::adjustAnchorTimeIfRequired(int64_t nowUs) {
+    if (nowUs - mLastSysAnchorTimeUpdatedUs < 1000000L) {
+        return;
+    }
+
+    if (mAvgUnderlineDelayMs < -30) {
+        // adjust underline delay a quarter of desired delay like step by step.
+        mSysAnchorTime += (int64_t)(mAvgUnderlineDelayMs * 1000 / 4);
+        ALOGD("anchor time updated: original(%lld), anchor(%lld), diffMs(%lld)",
+                (long long)mFirstSysTime, (long long)mSysAnchorTime,
+                (long long)(mFirstSysTime - mSysAnchorTime) / 1000);
+
+        mAvgUnderlineDelayMs = 0;
+        mLastSysAnchorTimeUpdatedUs = nowUs;
+
+        // reset a jitter stastics since an anchor time adjusted.
+        mJitterCalc->init(mFirstRtpTime, mSysAnchorTime, 0, mStaticJbTimeMs * 1000);
+    }
+}
+
+bool ARTPSource::queuePacket(const sp<ABuffer> &buffer) {
+    int64_t nowUs = ALooper::GetNowUs();
+    uint32_t seqNum = (uint32_t)buffer->int32Data();
+    int32_t ssrc = 0, rtpTime = 0;
+
     buffer->meta()->findInt32("ssrc", &ssrc);
+    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+    mLatestRtpTime = rtpTime;
 
     if (mNumBuffersReceived++ == 0 && mFirstSysTime == 0) {
-        uint32_t firstRtpTime;
-        CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&firstRtpTime));
-        mFirstSysTime = ALooper::GetNowUs();
+        mFirstSysTime = nowUs;
+        mSysAnchorTime = nowUs;
+        mLastSysAnchorTimeUpdatedUs = nowUs;
         mHighestSeqNumber = seqNum;
         mBaseSeqNumber = seqNum;
-        mFirstRtpTime = firstRtpTime;
+        mFirstRtpTime = rtpTime;
         mFirstSsrc = ssrc;
         ALOGD("first-rtp arrived: first-rtp-time=%u, sys-time=%lld, seq-num=%u, ssrc=%d",
                 mFirstRtpTime, (long long)mFirstSysTime, mHighestSeqNumber, mFirstSsrc);
         mJitterCalc->init(mFirstRtpTime, mFirstSysTime, 0, mStaticJbTimeMs * 1000);
+        if (mQueue.size() > 0) {
+            ALOGD("clearing buffers which belonged to previous timeline"
+                    " since a base timeline has been changed.");
+            mQueue.clear();
+        }
         mQueue.push_back(buffer);
         return true;
     }
@@ -156,6 +273,10 @@
         return false;
     }
 
+    calcTimeGapRtpRtcp(buffer, nowUs);
+    calcUnderlineDelay(buffer, nowUs);
+    adjustAnchorTimeIfRequired(nowUs);
+
     // Only the lower 16-bit of the sequence numbers are transmitted,
     // derive the high-order bits by choosing the candidate closest
     // to the highest sequence number (extended to 32 bits) received so far.
@@ -340,11 +461,11 @@
 
     uint32_t LSR = 0;
     uint32_t DLSR = 0;
-    if (mLastNTPTime != 0) {
-        LSR = (mLastNTPTime >> 16) & 0xffffffff;
+    if (mLastSrNtpTime != 0) {
+        LSR = (mLastSrNtpTime >> 16) & 0xffffffff;
 
         DLSR = (uint32_t)
-            ((ALooper::GetNowUs() - mLastNTPTimeUpdateUs) * 65536.0 / 1E6);
+            ((ALooper::GetNowUs() - mLastSrUpdateTimeUs) * 65536.0 / 1E6);
     }
 
     data[24] = LSR >> 24;
@@ -543,6 +664,35 @@
     mJitterCalc->putInterArrivalData(timeStamp, arrivalTime);
 }
 
+void ARTPSource::setJbTimer(const sp<AMessage> timer) {
+    mJbTimer = timer;
+}
+
+void ARTPSource::setJbAlarmTime(int64_t nowTimeUs, int64_t alarmAfterUs) {
+    if (mJbTimer == NULL) {
+        return;
+    }
+    int64_t alarmTimeUs = nowTimeUs + alarmAfterUs;
+    bool alarm = false;
+    if (mLastJbAlarmTimeUs <= nowTimeUs) {
+        // no more alarm in pending.
+        mLastJbAlarmTimeUs = nowTimeUs + alarmAfterUs;
+        alarm = true;
+    } else if (mLastJbAlarmTimeUs > alarmTimeUs + 5000L) {
+        // bring an alarm forward more than 5ms.
+        mLastJbAlarmTimeUs = alarmTimeUs;
+        alarm = true;
+    } else {
+        // would not set alarm if it is close with before one.
+    }
+
+    if (alarm) {
+        sp<AMessage> notify = mJbTimer->dup();
+        notify->setObject("source", this);
+        notify->post(alarmAfterUs);
+    }
+}
+
 bool ARTPSource::isNeedToEarlyNotify() {
     uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
     int32_t intervalExpectedInNow = expected - mPrevExpected;
@@ -553,7 +703,7 @@
     return false;
 }
 
-void ARTPSource::notifyPktInfo(int32_t bitrate, bool isRegular) {
+void ARTPSource::notifyPktInfo(int32_t bitrate, int64_t nowUs, bool isRegular) {
     int32_t payloadType = isRegular ? RTP_QUALITY : RTP_QUALITY_EMC;
 
     sp<AMessage> notify = mNotify->dup();
@@ -567,6 +717,11 @@
     notify->setInt32("prev-expected", mPrevExpected);
     notify->setInt32("num-buf-recv", mNumBuffersReceived);
     notify->setInt32("prev-num-buf-recv", mPrevNumBuffersReceived);
+    notify->setInt32("latest-rtp-time", mLatestRtpTime);
+    notify->setInt64("recv-time-us", nowUs);
+    notify->setInt32("rtp-jitter-time-ms",
+            std::max(getBaseJitterTimeMs(), getStaticJitterTimeMs()));
+    notify->setInt32("rtp-rtcpsr-time-gap-ms", (int32_t)mAvgRtpRtcpGapMs);
     notify->post();
 
     if (isRegular) {
diff --git a/media/libstagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/ARTPSource.h
index 0edff23..4984e91 100644
--- a/media/libstagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/ARTPSource.h
@@ -31,7 +31,7 @@
 
 namespace android {
 
-const uint32_t kStaticJitterTimeMs = 50;   // 50ms
+const uint32_t kStaticJitterTimeMs = 100;   // 100ms
 
 struct ABuffer;
 struct AMessage;
@@ -49,6 +49,8 @@
         RTCP_FIRST_PACKET = 101,
         RTP_QUALITY = 102,
         RTP_QUALITY_EMC = 103,
+        RTCP_SR = 200,
+        RTCP_RR = 201,
         RTCP_TSFB = 205,
         RTCP_PSFB = 206,
         RTP_CVO = 300,
@@ -56,6 +58,8 @@
     };
 
     void processRTPPacket(const sp<ABuffer> &buffer);
+    void processRTPPacket();
+    void timeReset();
     void timeUpdate(uint32_t rtpTime, uint64_t ntpTime);
     void byeReceived();
 
@@ -76,19 +80,23 @@
     void setStaticJitterTimeMs(const uint32_t jbTimeMs);
     void putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime);
     void putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime);
+    void setJbTimer(const sp<AMessage> timer);
+    void setJbAlarmTime(int64_t nowTimeUs, int64_t alarmAfterUs);
 
     bool isNeedToEarlyNotify();
-    void notifyPktInfo(int32_t bitrate, bool isRegular);
+    void notifyPktInfo(int32_t bitrate, int64_t nowUs, bool isRegular);
     // FIR needs to be sent by missing packet or broken video image.
     void onIssueFIRByAssembler();
 
     void noticeAbandonBuffer(int cnt=1);
 
-    int32_t mFirstSeqNumber;
     uint32_t mFirstRtpTime;
     int64_t mFirstSysTime;
     int32_t mClockRate;
 
+    int64_t mSysAnchorTime;
+    int64_t mLastSysAnchorTimeUpdatedUs;
+
     int32_t mFirstSsrc;
     int32_t mHighestNackNumber;
 
@@ -103,11 +111,14 @@
     uint32_t mPrevExpectedForRR;
     int32_t mPrevNumBuffersReceivedForRR;
 
+    uint32_t mLatestRtpTime;
+
     List<sp<ABuffer> > mQueue;
     sp<ARTPAssembler> mAssembler;
 
     int32_t mStaticJbTimeMs;
     sp<JitterCalc> mJitterCalc;
+    sp<AMessage> mJbTimer;
 
     typedef struct infoNACK {
         uint16_t seqNum;
@@ -120,8 +131,14 @@
     std::map<uint16_t, infoNACK> mNACKMap;
     int getSeqNumToNACK(List<int>& list, int size);
 
-    uint64_t mLastNTPTime;
-    int64_t mLastNTPTimeUpdateUs;
+    uint32_t mLastSrRtpTime;
+    uint64_t mLastSrNtpTime;
+    int64_t mLastSrUpdateTimeUs;
+
+    bool mIsFirstRtpRtcpGap;
+    double mAvgRtpRtcpGapMs;
+    double mAvgUnderlineDelayMs;
+    int64_t mLastJbAlarmTimeUs;
 
     bool mIssueFIRRequests;
     bool mIssueFIRByAssembler;
@@ -130,6 +147,10 @@
 
     sp<AMessage> mNotify;
 
+    void calcTimeGapRtpRtcp(const sp<ABuffer> &buffer, int64_t nowUs);
+    void calcUnderlineDelay(const sp<ABuffer> &buffer, int64_t nowUs);
+    void adjustAnchorTimeIfRequired(int64_t nowUs);
+
     bool queuePacket(const sp<ABuffer> &buffer);
 
     DISALLOW_EVIL_CONSTRUCTORS(ARTPSource);
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index ec70952..11c7aeb 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -204,8 +204,6 @@
     mRTPTimeBase = 0;
     mNumRTPSent = 0;
     mNumRTPOctetsSent = 0;
-    mLastRTPTime = 0;
-    mLastNTPTime = 0;
 
     mOpponentID = 0;
     mBitrate = 192000;
@@ -216,6 +214,7 @@
     mRTPSockNetwork = 0;
 
     mMode = INVALID;
+    mClockRate = 16000;
 }
 
 status_t ARTPWriter::addSource(const sp<MediaSource> &source) {
@@ -265,15 +264,28 @@
         updateSocketNetwork(sockNetwork);
 
     if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
+        // rfc6184: RTP Payload Format for H.264 Video
+        // The clock rate in the "a=rtpmap" line MUST be 90000.
         mMode = H264;
+        mClockRate = 90000;
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+        // rfc7798: RTP Payload Format for High Efficiency Video Coding (HEVC)
+        // The clock rate in the "a=rtpmap" line MUST be 90000.
         mMode = H265;
+        mClockRate = 90000;
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_H263)) {
         mMode = H263;
+        // rfc4629: RTP Payload Format for ITU-T Rec. H.263 Video
+        // The clock rate in the "a=rtpmap" line MUST be 90000.
+        mClockRate = 90000;
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
         mMode = AMR_NB;
+        // rfc4867: RTP Payload Format ... (AMR) and (AMR-WB)
+        // The RTP clock rate in "a=rtpmap" MUST be 8000 for AMR and 16000 for AMR-WB
+        mClockRate = 8000;
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) {
         mMode = AMR_WB;
+        mClockRate = 16000;
     } else {
         TRESPASS();
     }
@@ -327,16 +339,17 @@
 
     while (buffer->range_length() > 0) {
         const uint8_t *NALPtr = (const uint8_t *)buffer->data() + buffer->range_offset();
+        uint8_t nalType = (*NALPtr) & H264_NALU_MASK;
 
         MediaBufferBase **targetPtr = NULL;
-        if ((*NALPtr & H264_NALU_MASK) == H264_NALU_SPS) {
+        if (nalType == H264_NALU_SPS) {
             targetPtr = spsBuffer;
-        } else if ((*NALPtr & H264_NALU_MASK) == H264_NALU_PPS) {
+        } else if (nalType == H264_NALU_PPS) {
             targetPtr = ppsBuffer;
         } else {
             return;
         }
-        ALOGV("SPS(7) or PPS(8) found. Type %d", *NALPtr & H264_NALU_MASK);
+        ALOGV("SPS(7) or PPS(8) found. Type %d", nalType);
 
         uint32_t bufferSize = buffer->range_length();
         MediaBufferBase *&target = *targetPtr;
@@ -417,18 +430,18 @@
             }
         }
 
+        uint32_t targetSize;
         if (target != NULL) {
             target->release();
         }
-        uint32_t targetSize;
         // note that targetSize is never 0 as the first byte is never part
         // of a start prefix
         if (isBoundFound) {
             targetSize = i - SPCSize + 1;
-            target = MediaBufferBase::Create(j);
+            target = MediaBufferBase::Create(targetSize);
             memcpy(target->data(),
                    (const uint8_t *)buffer->data() + buffer->range_offset(),
-                   j);
+                   targetSize);
             buffer->set_range(buffer->range_offset() + targetSize + SPCSize,
                               buffer->range_length() - targetSize - SPCSize);
         } else {
@@ -645,19 +658,27 @@
     data[6] = (mSourceID >> 8) & 0xff;
     data[7] = mSourceID & 0xff;
 
-    data[8] = mLastNTPTime >> (64 - 8);
-    data[9] = (mLastNTPTime >> (64 - 16)) & 0xff;
-    data[10] = (mLastNTPTime >> (64 - 24)) & 0xff;
-    data[11] = (mLastNTPTime >> 32) & 0xff;
-    data[12] = (mLastNTPTime >> 24) & 0xff;
-    data[13] = (mLastNTPTime >> 16) & 0xff;
-    data[14] = (mLastNTPTime >> 8) & 0xff;
-    data[15] = mLastNTPTime & 0xff;
+    uint64_t ntpTime = GetNowNTP();
+    data[8] = ntpTime >> (64 - 8);
+    data[9] = (ntpTime >> (64 - 16)) & 0xff;
+    data[10] = (ntpTime >> (64 - 24)) & 0xff;
+    data[11] = (ntpTime >> 32) & 0xff;
+    data[12] = (ntpTime >> 24) & 0xff;
+    data[13] = (ntpTime >> 16) & 0xff;
+    data[14] = (ntpTime >> 8) & 0xff;
+    data[15] = ntpTime & 0xff;
 
-    data[16] = (mLastRTPTime >> 24) & 0xff;
-    data[17] = (mLastRTPTime >> 16) & 0xff;
-    data[18] = (mLastRTPTime >> 8) & 0xff;
-    data[19] = mLastRTPTime & 0xff;
+    // A current rtpTime can be calculated from ALooper::GetNowUs().
+    // This is expecting a timestamp of raw frame from a media source is
+    // on the same time context across components in android media framework
+    // which can be queried by ALooper::GetNowUs().
+    // In other words, ALooper::GetNowUs() is on the same timeline as the time
+    // of kKeyTime in a MediaBufferBase
+    uint32_t rtpTime = getRtpTime(ALooper::GetNowUs());
+    data[16] = (rtpTime >> 24) & 0xff;
+    data[17] = (rtpTime >> 16) & 0xff;
+    data[18] = (rtpTime >> 8) & 0xff;
+    data[19] = rtpTime & 0xff;
 
     data[20] = mNumRTPSent >> 24;
     data[21] = (mNumRTPSent >> 16) & 0xff;
@@ -779,6 +800,13 @@
     return (hi << 32) | lo;
 }
 
+uint32_t ARTPWriter::getRtpTime(int64_t timeUs) {
+    int32_t clockPerMs = mClockRate / 1000;
+    int64_t rtpTime = mRTPTimeBase + (timeUs * clockPerMs / 1000LL);
+
+    return (uint32_t)rtpTime;
+}
+
 void ARTPWriter::dumpSessionDesc() {
     AString sdp;
     sdp = "v=0\r\n";
@@ -980,7 +1008,7 @@
 
     sendVPSSPSPPSIfIFrame(mediaBuf, timeUs);
 
-    uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100ll);
+    uint32_t rtpTime = getRtpTime(timeUs);
 
     CHECK(mediaBuf->range_length() > 0);
     const uint8_t *mediaData =
@@ -994,12 +1022,14 @@
     }
 
     sp<ABuffer> buffer = new ABuffer(kMaxPacketSize);
-
     if (mediaBuf->range_length() + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE
             + RTP_PAYLOAD_ROOM_SIZE <= buffer->capacity()) {
         // The data fits into a single packet
         uint8_t *data = buffer->data();
         data[0] = 0x80;
+        if (mRTPCVOExtMap > 0) {
+            data[0] |= 0x10;
+        }
         if (isNonVCL) {
             data[1] = mPayloadType;  // Marker bit should not be set in case of Non-VCL
         } else {
@@ -1016,144 +1046,6 @@
         data[10] = (mSourceID >> 8) & 0xff;
         data[11] = mSourceID & 0xff;
 
-        memcpy(&data[12],
-               mediaData, mediaBuf->range_length());
-
-        buffer->setRange(0, mediaBuf->range_length() + 12);
-
-        send(buffer, false /* isRTCP */);
-
-        ++mSeqNo;
-        ++mNumRTPSent;
-        mNumRTPOctetsSent += buffer->size() - 12;
-    } else {
-        // FU-A
-
-        unsigned nalType = (mediaData[0] >> 1) & H265_NALU_MASK;
-        ALOGV("H265 nalType 0x%x, data[0]=0x%x", nalType, mediaData[0]);
-        size_t offset = 2; //H265 payload header is 16 bit.
-
-        bool firstPacket = true;
-        while (offset < mediaBuf->range_length()) {
-            size_t size = mediaBuf->range_length() - offset;
-            bool lastPacket = true;
-            if (size + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE +
-                    RTP_FU_HEADER_SIZE + RTP_PAYLOAD_ROOM_SIZE > buffer->capacity()) {
-                lastPacket = false;
-                size = buffer->capacity() - TCPIP_HEADER_SIZE - RTP_HEADER_SIZE -
-                    RTP_HEADER_EXT_SIZE - RTP_FU_HEADER_SIZE - RTP_PAYLOAD_ROOM_SIZE;
-            }
-
-            uint8_t *data = buffer->data();
-            data[0] = 0x80;
-            data[1] = (lastPacket ? (1 << 7) : 0x00) | mPayloadType;  // M-bit
-            data[2] = (mSeqNo >> 8) & 0xff;
-            data[3] = mSeqNo & 0xff;
-            data[4] = rtpTime >> 24;
-            data[5] = (rtpTime >> 16) & 0xff;
-            data[6] = (rtpTime >> 8) & 0xff;
-            data[7] = rtpTime & 0xff;
-            data[8] = mSourceID >> 24;
-            data[9] = (mSourceID >> 16) & 0xff;
-            data[10] = (mSourceID >> 8) & 0xff;
-            data[11] = mSourceID & 0xff;
-
-            /*  H265 payload header is 16 bit
-                 0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7
-                +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-                |F|     Type  |  Layer ID | TID |
-                +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-            */
-            ALOGV("H265 payload header 0x%x %x", mediaData[0], mediaData[1]);
-            // excludes Type from 1st byte of H265 payload header.
-            data[12] = mediaData[0] & 0x81;
-            // fills Type as FU (49 == 0x31)
-            data[12] = data[12] | (0x31 << 1);
-            data[13] = mediaData[1];
-
-            ALOGV("H265 FU header 0x%x %x", data[12], data[13]);
-
-            CHECK(!firstPacket || !lastPacket);
-            /*
-                FU INDICATOR HDR
-                0 1 2 3 4 5 6 7
-                +-+-+-+-+-+-+-+
-                |S|E|   Type  |
-                +-+-+-+-+-+-+-+
-            */
-
-            data[14] =
-                (firstPacket ? 0x80 : 0x00)
-                | (lastPacket ? 0x40 : 0x00)
-                | (nalType & H265_NALU_MASK);
-            ALOGV("H265 FU indicator 0x%x", data[14]);
-
-            memcpy(&data[15], &mediaData[offset], size);
-
-            buffer->setRange(0, 15 + size);
-
-            send(buffer, false /* isRTCP */);
-
-            ++mSeqNo;
-            ++mNumRTPSent;
-            mNumRTPOctetsSent += buffer->size() - 12;
-
-            firstPacket = false;
-            offset += size;
-        }
-    }
-
-    mLastRTPTime = rtpTime;
-    mLastNTPTime = GetNowNTP();
-
-}
-
-void ARTPWriter::sendAVCData(MediaBufferBase *mediaBuf) {
-    // 12 bytes RTP header + 2 bytes for the FU-indicator and FU-header.
-    CHECK_GE(kMaxPacketSize, 12u + 2u);
-
-    int64_t timeUs;
-    CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
-
-    sendSPSPPSIfIFrame(mediaBuf, timeUs);
-
-    uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100LL);
-
-    CHECK(mediaBuf->range_length() > 0);
-    const uint8_t *mediaData =
-        (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
-
-    int32_t sps, pps;
-    bool isSpsPps = false;
-    if (mediaBuf->meta_data().findInt32(kKeySps, &sps) ||
-            mediaBuf->meta_data().findInt32(kKeyPps, &pps)) {
-        isSpsPps = true;
-    }
-
-    mTrafficRec->updateClock(ALooper::GetNowUs() / 1000);
-    sp<ABuffer> buffer = new ABuffer(kMaxPacketSize);
-    if (mediaBuf->range_length() + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE
-            + RTP_PAYLOAD_ROOM_SIZE <= buffer->capacity()) {
-        // The data fits into a single packet
-        uint8_t *data = buffer->data();
-        data[0] = 0x80;
-        if (mRTPCVOExtMap > 0)
-            data[0] |= 0x10;
-        if (isSpsPps)
-            data[1] = mPayloadType;  // Marker bit should not be set in case of sps/pps
-        else
-            data[1] = (1 << 7) | mPayloadType;
-        data[2] = (mSeqNo >> 8) & 0xff;
-        data[3] = mSeqNo & 0xff;
-        data[4] = rtpTime >> 24;
-        data[5] = (rtpTime >> 16) & 0xff;
-        data[6] = (rtpTime >> 8) & 0xff;
-        data[7] = rtpTime & 0xff;
-        data[8] = mSourceID >> 24;
-        data[9] = (mSourceID >> 16) & 0xff;
-        data[10] = (mSourceID >> 8) & 0xff;
-        data[11] = mSourceID & 0xff;
-
         int rtpExtIndex = 0;
         if (mRTPCVOExtMap > 0) {
             /*
@@ -1202,8 +1094,9 @@
     } else {
         // FU-A
 
-        unsigned nalType = mediaData[0];
-        size_t offset = 1;
+        unsigned nalType = (mediaData[0] >> 1) & H265_NALU_MASK;
+        ALOGV("H265 nalType 0x%x, data[0]=0x%x", nalType, mediaData[0]);
+        size_t offset = 2; //H265 payload header is 16 bit.
 
         bool firstPacket = true;
         while (offset < mediaBuf->range_length()) {
@@ -1218,8 +1111,9 @@
 
             uint8_t *data = buffer->data();
             data[0] = 0x80;
-            if (lastPacket && mRTPCVOExtMap > 0)
+            if (lastPacket && mRTPCVOExtMap > 0) {
                 data[0] |= 0x10;
+            }
             data[1] = (lastPacket ? (1 << 7) : 0x00) | mPayloadType;  // M-bit
             data[2] = (mSeqNo >> 8) & 0xff;
             data[3] = mSeqNo & 0xff;
@@ -1245,14 +1139,219 @@
                 rtpExtIndex = 8;
             }
 
-            data[12 + rtpExtIndex] = 28 | (nalType & 0xe0);
+            /*  H265 payload header is 16 bit
+                 0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7
+                +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+                |F|    Type   |  Layer ID | TID |
+                +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+            */
+            ALOGV("H265 payload header 0x%x %x", mediaData[0], mediaData[1]);
+            // excludes Type from 1st byte of H265 payload header.
+            data[12 + rtpExtIndex] = mediaData[0] & 0x81;
+            // fills Type as FU (49 == 0x31)
+            data[12 + rtpExtIndex] = data[12 + rtpExtIndex] | (0x31 << 1);
+            data[13 + rtpExtIndex] = mediaData[1];
+
+            ALOGV("H265 FU header 0x%x %x", data[12 + rtpExtIndex], data[13 + rtpExtIndex]);
 
             CHECK(!firstPacket || !lastPacket);
+            /*
+                FU INDICATOR HDR
+                 0 1 2 3 4 5 6 7
+                +-+-+-+-+-+-+-+-+
+                |S|E|   Type    |
+                +-+-+-+-+-+-+-+-+
+            */
+
+            data[14 + rtpExtIndex] =
+                (firstPacket ? 0x80 : 0x00)
+                | (lastPacket ? 0x40 : 0x00)
+                | (nalType & H265_NALU_MASK);
+            ALOGV("H265 FU indicator 0x%x", data[14]);
+
+            memcpy(&data[15 + rtpExtIndex], &mediaData[offset], size);
+
+            buffer->setRange(0, 15 + rtpExtIndex + size);
+
+            send(buffer, false /* isRTCP */);
+
+            ++mSeqNo;
+            ++mNumRTPSent;
+            mNumRTPOctetsSent += buffer->size() - (12 + rtpExtIndex);
+
+            firstPacket = false;
+            offset += size;
+        }
+    }
+}
+
+void ARTPWriter::sendAVCData(MediaBufferBase *mediaBuf) {
+    // 12 bytes RTP header + 2 bytes for the FU-indicator and FU-header.
+    CHECK_GE(kMaxPacketSize, 12u + 2u);
+
+    int64_t timeUs;
+    CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
+
+    sendSPSPPSIfIFrame(mediaBuf, timeUs);
+
+    uint32_t rtpTime = getRtpTime(timeUs);
+
+    CHECK(mediaBuf->range_length() > 0);
+    const uint8_t *mediaData =
+        (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
+
+    int32_t sps, pps;
+    bool isSpsPps = false;
+    if (mediaBuf->meta_data().findInt32(kKeySps, &sps) ||
+            mediaBuf->meta_data().findInt32(kKeyPps, &pps)) {
+        isSpsPps = true;
+    }
+
+    mTrafficRec->updateClock(ALooper::GetNowUs() / 1000);
+    sp<ABuffer> buffer = new ABuffer(kMaxPacketSize);
+    if (mediaBuf->range_length() + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE
+            + RTP_PAYLOAD_ROOM_SIZE <= buffer->capacity()) {
+        // The data fits into a single packet
+        uint8_t *data = buffer->data();
+        data[0] = 0x80;
+        if (mRTPCVOExtMap > 0) {
+            data[0] |= 0x10;
+        }
+        if (isSpsPps) {
+            data[1] = mPayloadType;  // Marker bit should not be set in case of sps/pps
+        } else {
+            data[1] = (1 << 7) | mPayloadType;
+        }
+        data[2] = (mSeqNo >> 8) & 0xff;
+        data[3] = mSeqNo & 0xff;
+        data[4] = rtpTime >> 24;
+        data[5] = (rtpTime >> 16) & 0xff;
+        data[6] = (rtpTime >> 8) & 0xff;
+        data[7] = rtpTime & 0xff;
+        data[8] = mSourceID >> 24;
+        data[9] = (mSourceID >> 16) & 0xff;
+        data[10] = (mSourceID >> 8) & 0xff;
+        data[11] = mSourceID & 0xff;
+
+        int rtpExtIndex = 0;
+        if (mRTPCVOExtMap > 0) {
+            /*
+                0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+               |       0xBE    |    0xDE       |           length=3            |
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+               |  ID   | L=0   |     data      |  ID   |  L=1  |   data...
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+                     ...data   |    0 (pad)    |    0 (pad)    |  ID   | L=3   |
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+               |                          data                                 |
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+
+              In the one-byte header form of extensions, the 16-bit value required
+              by the RTP specification for a header extension, labeled in the RTP
+              specification as "defined by profile", takes the fixed bit pattern
+              0xBEDE (the first version of this specification was written on the
+              feast day of the Venerable Bede).
+            */
+            data[12] = 0xBE;
+            data[13] = 0xDE;
+            // put a length of RTP Extension.
+            data[14] = 0x00;
+            data[15] = 0x01;
+            // put extmap of RTP assigned for CVO.
+            data[16] = (mRTPCVOExtMap << 4) | 0x0;
+            // put image degrees as per CVO specification.
+            data[17] = mRTPCVODegrees;
+            data[18] = 0x0;
+            data[19] = 0x0;
+            rtpExtIndex = 8;
+        }
+
+        memcpy(&data[12 + rtpExtIndex],
+               mediaData, mediaBuf->range_length());
+
+        buffer->setRange(0, mediaBuf->range_length() + (12 + rtpExtIndex));
+
+        send(buffer, false /* isRTCP */);
+
+        ++mSeqNo;
+        ++mNumRTPSent;
+        mNumRTPOctetsSent += buffer->size() - (12 + rtpExtIndex);
+    } else {
+        // FU-A
+
+        unsigned nalType = mediaData[0] & H264_NALU_MASK;
+        ALOGV("H264 nalType 0x%x, data[0]=0x%x", nalType, mediaData[0]);
+        size_t offset = 1;
+
+        bool firstPacket = true;
+        while (offset < mediaBuf->range_length()) {
+            size_t size = mediaBuf->range_length() - offset;
+            bool lastPacket = true;
+            if (size + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE +
+                    RTP_FU_HEADER_SIZE + RTP_PAYLOAD_ROOM_SIZE > buffer->capacity()) {
+                lastPacket = false;
+                size = buffer->capacity() - TCPIP_HEADER_SIZE - RTP_HEADER_SIZE -
+                    RTP_HEADER_EXT_SIZE - RTP_FU_HEADER_SIZE - RTP_PAYLOAD_ROOM_SIZE;
+            }
+
+            uint8_t *data = buffer->data();
+            data[0] = 0x80;
+            if (lastPacket && mRTPCVOExtMap > 0) {
+                data[0] |= 0x10;
+            }
+            data[1] = (lastPacket ? (1 << 7) : 0x00) | mPayloadType;  // M-bit
+            data[2] = (mSeqNo >> 8) & 0xff;
+            data[3] = mSeqNo & 0xff;
+            data[4] = rtpTime >> 24;
+            data[5] = (rtpTime >> 16) & 0xff;
+            data[6] = (rtpTime >> 8) & 0xff;
+            data[7] = rtpTime & 0xff;
+            data[8] = mSourceID >> 24;
+            data[9] = (mSourceID >> 16) & 0xff;
+            data[10] = (mSourceID >> 8) & 0xff;
+            data[11] = mSourceID & 0xff;
+
+            int rtpExtIndex = 0;
+            if (lastPacket && mRTPCVOExtMap > 0) {
+                data[12] = 0xBE;
+                data[13] = 0xDE;
+                data[14] = 0x00;
+                data[15] = 0x01;
+                data[16] = (mRTPCVOExtMap << 4) | 0x0;
+                data[17] = mRTPCVODegrees;
+                data[18] = 0x0;
+                data[19] = 0x0;
+                rtpExtIndex = 8;
+            }
+
+            /*  H264 payload header is 8 bit
+                 0 1 2 3 4 5 6 7
+                +-+-+-+-+-+-+-+-+
+                |F|NRI|  Type   |
+                +-+-+-+-+-+-+-+-+
+            */
+            ALOGV("H264 payload header 0x%x", mediaData[0]);
+            // excludes Type from 1st byte of H264 payload header.
+            data[12 + rtpExtIndex] = mediaData[0] & 0xe0;
+            // fills Type as FU (28 == 0x1C)
+            data[12 + rtpExtIndex] = data[12 + rtpExtIndex] | 0x1C;
+
+            CHECK(!firstPacket || !lastPacket);
+            /*
+                FU header
+                 0 1 2 3 4 5 6 7
+                +-+-+-+-+-+-+-+-+
+                |S|E|R|  Type   |
+                +-+-+-+-+-+-+-+-+
+            */
 
             data[13 + rtpExtIndex] =
                 (firstPacket ? 0x80 : 0x00)
                 | (lastPacket ? 0x40 : 0x00)
-                | (nalType & 0x1f);
+                | (nalType & H264_NALU_MASK);
+            ALOGV("H264 FU header 0x%x", data[13]);
 
             memcpy(&data[14 + rtpExtIndex], &mediaData[offset], size);
 
@@ -1268,9 +1367,6 @@
             offset += size;
         }
     }
-
-    mLastRTPTime = rtpTime;
-    mLastNTPTime = GetNowNTP();
 }
 
 void ARTPWriter::sendH263Data(MediaBufferBase *mediaBuf) {
@@ -1279,7 +1375,7 @@
     int64_t timeUs;
     CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
 
-    uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100LL);
+    uint32_t rtpTime = getRtpTime(timeUs);
 
     const uint8_t *mediaData =
         (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
@@ -1330,9 +1426,6 @@
         ++mNumRTPSent;
         mNumRTPOctetsSent += buffer->size() - 12;
     }
-
-    mLastRTPTime = rtpTime;
-    mLastNTPTime = GetNowNTP();
 }
 
 void ARTPWriter::updateCVODegrees(int32_t cvoDegrees) {
@@ -1415,7 +1508,7 @@
 
     int64_t timeUs;
     CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
-    uint32_t rtpTime = mRTPTimeBase + (timeUs / (isWide ? 250 : 125));
+    uint32_t rtpTime = getRtpTime(timeUs);
 
     // hexdump(mediaData, mediaLength);
 
@@ -1489,9 +1582,6 @@
     ++mSeqNo;
     ++mNumRTPSent;
     mNumRTPOctetsSent += buffer->size() - 12;
-
-    mLastRTPTime = rtpTime;
-    mLastNTPTime = GetNowNTP();
 }
 
 void ARTPWriter::makeSocketPairAndBind(String8& localIp, int localPort,
diff --git a/media/libstagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/ARTPWriter.h
index 28d6ec5..2982cf6 100644
--- a/media/libstagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/ARTPWriter.h
@@ -108,14 +108,13 @@
     MediaBufferBase *mSPSBuf;
     MediaBufferBase *mPPSBuf;
 
+    uint32_t mClockRate;
     uint32_t mSourceID;
     uint32_t mPayloadType;
     uint32_t mSeqNo;
     uint32_t mRTPTimeBase;
     uint32_t mNumRTPSent;
     uint32_t mNumRTPOctetsSent;
-    uint32_t mLastRTPTime;
-    uint64_t mLastNTPTime;
 
     uint32_t mOpponentID;
     uint32_t mBitrate;
@@ -136,6 +135,7 @@
     } mMode;
 
     static uint64_t GetNowNTP();
+    uint32_t getRtpTime(int64_t timeUs);
 
     void initState();
     void onRead(const sp<AMessage> &msg);
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 0fdf431..988cec7 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -74,7 +74,8 @@
 
 // The allowed maximum number of stale access units at the beginning of
 // a new sequence.
-static int32_t kMaxAllowedStaleAccessUnits = 20;
+static int32_t kMaxAllowedStaleAudioAccessUnits = 20;
+static int32_t kMaxAllowedStaleVideoAccessUnits = 400;
 
 static int64_t kTearDownTimeoutUs = 3000000ll;
 
@@ -108,6 +109,10 @@
     }
 }
 
+static int32_t GetMaxAllowedStaleCount(bool isVideo) {
+    return isVideo ? kMaxAllowedStaleVideoAccessUnits : kMaxAllowedStaleAudioAccessUnits;
+}
+
 struct MyHandler : public AHandler {
     enum {
         kWhatConnected                  = 'conn',
@@ -1330,6 +1335,8 @@
 
                         ALOGV("rtp-info: %s", response->mHeaders.valueAt(i).c_str());
 
+                        mRTPConn->seekStream();
+
                         ALOGI("seek completed.");
                     }
                 }
@@ -1514,7 +1521,7 @@
             TrackInfo *info = &mTracks.editItemAt(trackIndex);
             info->mFirstSeqNumInSegment = seq;
             info->mNewSegment = true;
-            info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
+            info->mAllowedStaleAccessUnits = GetMaxAllowedStaleCount(info->mIsVideo);
 
             CHECK(GetAttribute((*it).c_str(), "rtptime", &val));
 
@@ -1556,6 +1563,7 @@
         int mRTPSocket;
         int mRTCPSocket;
         bool mUsingInterleavedTCP;
+        bool mIsVideo;
         uint32_t mFirstSeqNumInSegment;
         bool mNewSegment;
         int32_t mAllowedStaleAccessUnits;
@@ -1640,9 +1648,10 @@
         info->mURL = trackURL;
         info->mPacketSource = source;
         info->mUsingInterleavedTCP = false;
+        info->mIsVideo = source->isVideo();
         info->mFirstSeqNumInSegment = 0;
         info->mNewSegment = true;
-        info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
+        info->mAllowedStaleAccessUnits = GetMaxAllowedStaleCount(info->mIsVideo);
         info->mRTPSocket = -1;
         info->mRTCPSocket = -1;
         info->mRTPAnchor = 0;
@@ -1838,11 +1847,12 @@
                 // by ARTPSource. Only the low 16 bits of seq in RTP-Info of reply of
                 // RTSP "PLAY" command should be used to detect the first RTP packet
                 // after seeking.
+                int32_t maxAllowedStaleAccessUnits = GetMaxAllowedStaleCount(track->mIsVideo);
                 if (mSeekable) {
                     if (track->mAllowedStaleAccessUnits > 0) {
                         uint32_t seqNum16 = seqNum & 0xffff;
                         uint32_t firstSeqNumInSegment16 = track->mFirstSeqNumInSegment & 0xffff;
-                        if (seqNum16 > firstSeqNumInSegment16 + kMaxAllowedStaleAccessUnits
+                        if (seqNum16 > firstSeqNumInSegment16 + maxAllowedStaleAccessUnits
                                 || seqNum16 < firstSeqNumInSegment16) {
                             // Not the first rtp packet of the stream after seeking, discarding.
                             track->mAllowedStaleAccessUnits--;
@@ -1857,7 +1867,7 @@
                         mNumAccessUnitsReceived = 0;
                         ALOGW_IF(track->mAllowedStaleAccessUnits == 0,
                              "Still no first rtp packet after %d stale ones",
-                             kMaxAllowedStaleAccessUnits);
+                             maxAllowedStaleAccessUnits);
                         track->mAllowedStaleAccessUnits = -1;
                         return UNKNOWN_ERROR;
                     }
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index af21a99..b1a7f43 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -412,17 +412,19 @@
         uint32_t flags) {
     sp<AMessage> nativeFormat;
     AMediaFormat_getFormat(format, &nativeFormat);
-    ALOGV("configure with format: %s", nativeFormat->debugString(0).c_str());
+    // create our shallow copy, so we aren't victim to any later changes.
+    sp<AMessage> dupNativeFormat = nativeFormat->dup();
+    ALOGV("configure with format: %s", dupNativeFormat->debugString(0).c_str());
     sp<Surface> surface = NULL;
     if (window != NULL) {
         surface = (Surface*) window;
     }
 
-    status_t err = mData->mCodec->configure(nativeFormat, surface,
+    status_t err = mData->mCodec->configure(dupNativeFormat, surface,
             crypto ? crypto->mCrypto : NULL, flags);
     if (err != OK) {
         ALOGE("configure: err(%d), failed with format: %s",
-              err, nativeFormat->debugString(0).c_str());
+              err, dupNativeFormat->debugString(0).c_str());
     }
     return translate_error(err);
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
index 2e06da5..4b44dcf 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
@@ -69,7 +69,6 @@
 java_defaults {
     name: "MediaBenchmark-defaults",
 
-    sdk_version: "system_current",
     min_sdk_version: "28",
-    target_sdk_version: "29",
+    target_sdk_version: "30",
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/build.gradle b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
index b2aee1a..b222d47 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/build.gradle
+++ b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
@@ -17,21 +17,21 @@
 buildscript {
     repositories {
         google()
-        jcenter()
+        mavenCentral()
     }
     dependencies {
-        classpath 'com.android.tools.build:gradle:3.5.0'
+        classpath 'com.android.tools.build:gradle:4.2.1'
     }
 }
 
 apply plugin: 'com.android.application'
 
 android {
-    compileSdkVersion 29
+    compileSdkVersion 30
     defaultConfig {
         applicationId "com.android.media.benchmark"
         minSdkVersion 28
-        targetSdkVersion 29
+        targetSdkVersion 30
         versionCode 1
         versionName "1.0"
         testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
@@ -57,20 +57,20 @@
     externalNativeBuild {
         cmake {
             path "src/main/cpp/CMakeLists.txt"
-            version "3.10.2"
+            version "3.18.1"
         }
     }
 }
 
 repositories {
     google()
-    jcenter()
+    mavenCentral()
 }
 
 dependencies {
     implementation fileTree(dir: 'libs', include: ['*.jar'])
-    implementation 'androidx.appcompat:appcompat:1.1.0'
-    testImplementation 'junit:junit:4.12'
-    androidTestImplementation 'androidx.test:runner:1.2.0'
-    androidTestImplementation 'androidx.test.ext:junit:1.1.1'
+    implementation 'androidx.appcompat:appcompat:1.3.0'
+    testImplementation 'junit:junit:4.13.2'
+    androidTestImplementation 'androidx.test:runner:1.3.0'
+    androidTestImplementation 'androidx.test.ext:junit:1.1.2'
 }
\ No newline at end of file
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
index af92424..0192d68 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
@@ -9,7 +9,6 @@
 
 cc_test_library {
     name: "libmediabenchmark_jni",
-    sdk_version: "current",
 
     defaults: [
         "libmediabenchmark_common-defaults",
diff --git a/media/tests/benchmark/src/native/common/Android.bp b/media/tests/benchmark/src/native/common/Android.bp
index 6b54c6a..718d217 100644
--- a/media/tests/benchmark/src/native/common/Android.bp
+++ b/media/tests/benchmark/src/native/common/Android.bp
@@ -55,7 +55,6 @@
 
 cc_defaults {
     name: "libmediabenchmark-defaults",
-    sdk_version: "current",
     stl: "c++_shared",
 
     shared_libs: [
diff --git a/media/tests/benchmark/src/native/extractor/Extractor.cpp b/media/tests/benchmark/src/native/extractor/Extractor.cpp
index f0bb3b9..3bdfbad 100644
--- a/media/tests/benchmark/src/native/extractor/Extractor.cpp
+++ b/media/tests/benchmark/src/native/extractor/Extractor.cpp
@@ -124,9 +124,7 @@
 
     int64_t sTime = mStats->getCurTime();
     if (mExtractor) {
-        // TODO: (b/140128505) Multiple calls result in DoS.
-        // Uncomment call to AMediaExtractor_delete() once this is resolved
-        // AMediaExtractor_delete(mExtractor);
+        AMediaExtractor_delete(mExtractor);
         mExtractor = nullptr;
     }
     int64_t eTime = mStats->getCurTime();
diff --git a/media/tests/benchmark/tests/Android.bp b/media/tests/benchmark/tests/Android.bp
index 0fbd20d..9a8caa3 100644
--- a/media/tests/benchmark/tests/Android.bp
+++ b/media/tests/benchmark/tests/Android.bp
@@ -33,7 +33,12 @@
 
     srcs: ["ExtractorTest.cpp"],
 
-    static_libs: ["libmediabenchmark_extractor"]
+    static_libs: ["libmediabenchmark_extractor"],
+
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+    ],
 }
 
 cc_test {
@@ -50,6 +55,11 @@
         "libmediabenchmark_extractor",
         "libmediabenchmark_decoder",
     ],
+
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+    ],
 }
 
 cc_test {
diff --git a/media/tests/benchmark/tests/DecoderTest.cpp b/media/tests/benchmark/tests/DecoderTest.cpp
index 81ef02a..3666724 100644
--- a/media/tests/benchmark/tests/DecoderTest.cpp
+++ b/media/tests/benchmark/tests/DecoderTest.cpp
@@ -21,6 +21,8 @@
 #include <iostream>
 #include <limits>
 
+#include <android/binder_process.h>
+
 #include "BenchmarkTestEnvironment.h"
 #include "Decoder.h"
 
@@ -175,6 +177,7 @@
                                             "c2.android.hevc.decoder", true)));
 
 int main(int argc, char **argv) {
+    ABinderProcess_startThreadPool();
     gEnv = new BenchmarkTestEnvironment();
     ::testing::AddGlobalTestEnvironment(gEnv);
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/tests/benchmark/tests/ExtractorTest.cpp b/media/tests/benchmark/tests/ExtractorTest.cpp
index d14d15b..27ee9ba 100644
--- a/media/tests/benchmark/tests/ExtractorTest.cpp
+++ b/media/tests/benchmark/tests/ExtractorTest.cpp
@@ -19,6 +19,8 @@
 
 #include <gtest/gtest.h>
 
+#include <android/binder_process.h>
+
 #include "BenchmarkTestEnvironment.h"
 #include "Extractor.h"
 
@@ -73,6 +75,7 @@
                                                      0)));
 
 int main(int argc, char **argv) {
+    ABinderProcess_startThreadPool();
     gEnv = new BenchmarkTestEnvironment();
     ::testing::AddGlobalTestEnvironment(gEnv);
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 4ac46b7..b8782d7 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -976,8 +976,9 @@
                 }
             }
         }
-
-        setAudioHwSyncForSession_l(thread, sessionId);
+        if ((output.flags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) == AUDIO_OUTPUT_FLAG_HW_AV_SYNC) {
+            setAudioHwSyncForSession_l(thread, sessionId);
+        }
     }
 
     if (lStatus != NO_ERROR) {
diff --git a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
index 8c61b90..5986069 100644
--- a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
@@ -30,9 +30,9 @@
 // --- PolicyAudioPort class implementation
 void PolicyAudioPort::attach(const sp<HwModule>& module)
 {
+    mModule = module;
     ALOGV("%s: attaching module %s to port %s",
             __FUNCTION__, getModuleName(), asAudioPort()->getName().c_str());
-    mModule = module;
 }
 
 void PolicyAudioPort::detach()
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
index 98415b7..22ff954 100644
--- a/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
@@ -22,6 +22,17 @@
                      samplingRates="8000,16000,24000,32000,44100,48000"
                      channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
         </mixPort>
+        <mixPort name="le audio input" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="8000,16000,24000,32000,44100,48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
+                     samplingRates="8000,16000,24000,32000,44100,48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
+                     samplingRates="8000,16000,24000,32000,44100,48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO"/>
+        </mixPort>
     </mixPorts>
     <devicePorts>
         <!-- A2DP Audio Ports -->
@@ -49,6 +60,7 @@
         -->
         <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
         <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
+        <devicePort tagName="BLE Headset In" type="AUDIO_DEVICE_IN_BLE_HEADSET" role="source"/>
     </devicePorts>
     <routes>
         <route type="mix" sink="BT A2DP Out"
@@ -61,6 +73,8 @@
                sources="hearing aid output"/>
         <route type="mix" sink="BLE Headset Out"
                sources="le audio output"/>
+        <route type="mix" sink="le audio input"
+               sources="BLE Headset In"/>
         <route type="mix" sink="BLE Speaker Out"
                sources="le audio output"/>
     </routes>
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
index fbe7571..aad00d6 100644
--- a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
@@ -22,6 +22,17 @@
                      samplingRates="8000 16000 24000 32000 44100 48000"
                      channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
         </mixPort>
+        <mixPort name="le audio input" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="8000 16000 24000 32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
+                     samplingRates="8000 16000 24000 32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
+                     samplingRates="8000 16000 24000 32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+        </mixPort>
     </mixPorts>
     <devicePorts>
         <!-- A2DP Audio Ports -->
@@ -45,6 +56,7 @@
         <!-- BLE Audio Ports -->
         <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
         <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
+        <devicePort tagName="BLE Headset In" type="AUDIO_DEVICE_IN_BLE_HEADSET" role="source"/>
     </devicePorts>
     <routes>
         <route type="mix" sink="BT A2DP Out"
@@ -57,6 +69,8 @@
                sources="hearing aid output"/>
         <route type="mix" sink="BLE Headset Out"
                sources="le audio output"/>
+        <route type="mix" sink="le audio input"
+               sources="BLE Headset In"/>
         <route type="mix" sink="BLE Speaker Out"
                sources="le audio output"/>
     </routes>
diff --git a/services/audiopolicy/config/le_audio_policy_configuration.xml b/services/audiopolicy/config/le_audio_policy_configuration.xml
index a3dc72b..dcdd805 100644
--- a/services/audiopolicy/config/le_audio_policy_configuration.xml
+++ b/services/audiopolicy/config/le_audio_policy_configuration.xml
@@ -7,13 +7,20 @@
                      samplingRates="8000,16000,24000,32000,44100,48000"
                      channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
         </mixPort>
+        <mixPort name="le audio input" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT,AUDIO_FORMAT_PCM_24_BIT,AUDIO_FORMAT_PCM_32_BIT"
+                     samplingRates="8000,16000,24000,32000,44100,48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO"/>
+        </mixPort>
     </mixPorts>
     <devicePorts>
         <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
         <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
+        <devicePort tagName="BLE Headset In" type="AUDIO_DEVICE_IN_BLE_HEADSET" role="source"/>
     </devicePorts>
     <routes>
         <route type="mix" sink="BLE Headset Out" sources="le audio output"/>
         <route type="mix" sink="BLE Speaker Out" sources="le audio output"/>
+        <route type="mix" sink="le audio input" sources="BLE Headset In"/>
     </routes>
 </module>
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index e0a0bbb..1fda6f1 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -479,7 +479,7 @@
         switch (commDeviceType) {
         case AUDIO_DEVICE_OUT_BLE_HEADSET:
             device = availableDevices.getDevice(
-                    AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
+                    AUDIO_DEVICE_IN_BLE_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
             break;
         case AUDIO_DEVICE_OUT_SPEAKER:
             device = availableDevices.getFirstExistingDevice({
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 91dda92..ae7e05c 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -3758,25 +3758,13 @@
         ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, cameraId.string());
         return;
     }
-    bool supportsHAL3 = false;
-    // supportsCameraApi also holds mInterfaceMutex, we can't call it in the
-    // HIDL onStatusChanged wrapper call (we'll hold mStatusListenerLock and
-    // mInterfaceMutex together, which can lead to deadlocks)
-    binder::Status sRet =
-            supportsCameraApi(String16(cameraId), hardware::ICameraService::API_VERSION_2,
-                    &supportsHAL3);
-    if (!sRet.isOk()) {
-        ALOGW("%s: Failed to determine if device supports HAL3 %s, supportsCameraApi call failed",
-                __FUNCTION__, cameraId.string());
-        return;
-    }
 
     // Collect the logical cameras without holding mStatusLock in updateStatus
     // as that can lead to a deadlock(b/162192331).
     auto logicalCameraIds = getLogicalCameras(cameraId);
     // Update the status for this camera state, then send the onStatusChangedCallbacks to each
     // of the listeners with both the mStatusLock and mStatusListenerLock held
-    state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind, &supportsHAL3,
+    state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind,
                         &logicalCameraIds]
             (const String8& cameraId, StatusInternal status) {
 
@@ -3804,8 +3792,8 @@
                 bool isVendorListener = listener->isVendorListener();
                 if (shouldSkipStatusUpdates(deviceKind, isVendorListener,
                         listener->getListenerPid(), listener->getListenerUid()) ||
-                        (isVendorListener && !supportsHAL3)) {
-                    ALOGV("Skipping discovery callback for system-only camera/HAL1 device %s",
+                        isVendorListener) {
+                    ALOGV("Skipping discovery callback for system-only camera device %s",
                             cameraId.c_str());
                     continue;
                 }
diff --git a/services/mediacodec/registrant/Android.bp b/services/mediacodec/registrant/Android.bp
index 696b967..d10e339 100644
--- a/services/mediacodec/registrant/Android.bp
+++ b/services/mediacodec/registrant/Android.bp
@@ -7,7 +7,7 @@
     default_applicable_licenses: ["frameworks_av_services_mediacodec_license"],
 }
 
-cc_library_shared {
+cc_library {
     name: "libmedia_codecserviceregistrant",
     vendor_available: true,
     srcs: [
diff --git a/services/mediacodec/registrant/fuzzer/Android.bp b/services/mediacodec/registrant/fuzzer/Android.bp
new file mode 100644
index 0000000..43afbf1
--- /dev/null
+++ b/services/mediacodec/registrant/fuzzer/Android.bp
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_services_mediacodec_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_services_mediacodec_license"],
+}
+
+cc_fuzz {
+    name: "codecServiceRegistrant_fuzzer",
+    srcs: [
+        "codecServiceRegistrant_fuzzer.cpp",
+    ],
+    static_libs: [
+        "libmedia_codecserviceregistrant",
+    ],
+    header_libs: [
+        "libmedia_headers",
+    ],
+    defaults: [
+        "libcodec2-hidl-defaults",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/services/mediacodec/registrant/fuzzer/README.md b/services/mediacodec/registrant/fuzzer/README.md
new file mode 100644
index 0000000..0ffa063
--- /dev/null
+++ b/services/mediacodec/registrant/fuzzer/README.md
@@ -0,0 +1,56 @@
+# Fuzzer for libmedia_codecserviceregistrant
+
+## Plugin Design Considerations
+The fuzzer plugin for libmedia_codecserviceregistrant is designed based on the understanding of the library and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+libmedia_codecserviceregistrant supports the following parameters:
+1. C2String (parameter name: `c2String`)
+2. Width (parameter name: `width`)
+3. Height (parameter name: `height`)
+4. SamplingRate (parameter name: `samplingRate`)
+5. Channels (parameter name: `channels`)
+6. Stream (parameter name: `stream`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `c2String` |`String` | Value obtained from FuzzedDataProvider|
+| `width` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `height` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `samplingRate` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `channels` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `stream` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the libmedia_codecserviceregistrant module.
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build codecServiceRegistrant_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) codecServiceRegistrant_fuzzer
+```
+#### Steps to run
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/${TARGET_ARCH}/codecServiceRegistrant_fuzzer/codecServiceRegistrant_fuzzer
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/mediacodec/registrant/fuzzer/codecServiceRegistrant_fuzzer.cpp b/services/mediacodec/registrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
new file mode 100644
index 0000000..e5983e4
--- /dev/null
+++ b/services/mediacodec/registrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
@@ -0,0 +1,158 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "../CodecServiceRegistrant.cpp"
+#include "fuzzer/FuzzedDataProvider.h"
+#include <C2Config.h>
+#include <C2Param.h>
+
+using namespace std;
+
+constexpr char kServiceName[] = "software";
+
+class CodecServiceRegistrantFuzzer {
+public:
+  void process(const uint8_t *data, size_t size);
+  ~CodecServiceRegistrantFuzzer() {
+    delete mH2C2;
+    if (mInputSize) {
+      delete mInputSize;
+    }
+    if (mSampleRateInfo) {
+      delete mSampleRateInfo;
+    }
+    if (mChannelCountInfo) {
+      delete mChannelCountInfo;
+    }
+  }
+
+private:
+  void initH2C2ComponentStore();
+  void invokeH2C2ComponentStore();
+  void invokeConfigSM();
+  void invokeQuerySM();
+  H2C2ComponentStore *mH2C2 = nullptr;
+  C2StreamPictureSizeInfo::input *mInputSize = nullptr;
+  C2StreamSampleRateInfo::output *mSampleRateInfo = nullptr;
+  C2StreamChannelCountInfo::output *mChannelCountInfo = nullptr;
+  C2Param::Index mIndex = C2StreamProfileLevelInfo::output::PARAM_TYPE;
+  C2StreamFrameRateInfo::output mFrameRate;
+  FuzzedDataProvider *mFDP = nullptr;
+};
+
+void CodecServiceRegistrantFuzzer::initH2C2ComponentStore() {
+  using namespace ::android::hardware::media::c2;
+  shared_ptr<C2ComponentStore> store =
+      android::GetCodec2PlatformComponentStore();
+  if (!store) {
+    return;
+  }
+  android::sp<V1_1::IComponentStore> storeV1_1 =
+      new V1_1::utils::ComponentStore(store);
+  if (storeV1_1->registerAsService(string(kServiceName)) != android::OK) {
+    return;
+  }
+  string const preferredStoreName = string(kServiceName);
+  sp<IComponentStore> preferredStore =
+      IComponentStore::getService(preferredStoreName.c_str());
+  mH2C2 = new H2C2ComponentStore(preferredStore);
+}
+
+void CodecServiceRegistrantFuzzer::invokeConfigSM() {
+  vector<C2Param *> configParams;
+  uint32_t width = mFDP->ConsumeIntegral<uint32_t>();
+  uint32_t height = mFDP->ConsumeIntegral<uint32_t>();
+  uint32_t samplingRate = mFDP->ConsumeIntegral<uint32_t>();
+  uint32_t channels = mFDP->ConsumeIntegral<uint32_t>();
+  if (mFDP->ConsumeBool()) {
+    mInputSize = new C2StreamPictureSizeInfo::input(0u, width, height);
+    configParams.push_back(mInputSize);
+  } else {
+    if (mFDP->ConsumeBool()) {
+      mSampleRateInfo = new C2StreamSampleRateInfo::output(0u, samplingRate);
+      configParams.push_back(mSampleRateInfo);
+    }
+    if (mFDP->ConsumeBool()) {
+      mChannelCountInfo = new C2StreamChannelCountInfo::output(0u, channels);
+      configParams.push_back(mChannelCountInfo);
+    }
+  }
+  vector<unique_ptr<C2SettingResult>> failures;
+  mH2C2->config_sm(configParams, &failures);
+}
+
+void CodecServiceRegistrantFuzzer::invokeQuerySM() {
+  vector<C2Param *> stackParams;
+  vector<C2Param::Index> heapParamIndices;
+  if (mFDP->ConsumeBool()) {
+    stackParams = {};
+    heapParamIndices = {};
+  } else {
+    uint32_t stream = mFDP->ConsumeIntegral<uint32_t>();
+    mFrameRate.setStream(stream);
+    stackParams.push_back(&mFrameRate);
+    heapParamIndices.push_back(mIndex);
+  }
+  vector<unique_ptr<C2Param>> heapParams;
+  mH2C2->query_sm(stackParams, heapParamIndices, &heapParams);
+}
+
+void CodecServiceRegistrantFuzzer::invokeH2C2ComponentStore() {
+  initH2C2ComponentStore();
+  shared_ptr<C2Component> component;
+  shared_ptr<C2ComponentInterface> interface;
+  string c2String = mFDP->ConsumeRandomLengthString();
+  mH2C2->createComponent(c2String, &component);
+  mH2C2->createInterface(c2String, &interface);
+  invokeConfigSM();
+  invokeQuerySM();
+
+  vector<shared_ptr<C2ParamDescriptor>> params;
+  mH2C2->querySupportedParams_nb(&params);
+
+  C2StoreIonUsageInfo usageInfo;
+  std::vector<C2FieldSupportedValuesQuery> query = {
+      C2FieldSupportedValuesQuery::Possible(
+          C2ParamField::Make(usageInfo, usageInfo.usage)),
+      C2FieldSupportedValuesQuery::Possible(
+          C2ParamField::Make(usageInfo, usageInfo.capacity)),
+  };
+  mH2C2->querySupportedValues_sm(query);
+
+  mH2C2->getName();
+  shared_ptr<C2ParamReflector> paramReflector = mH2C2->getParamReflector();
+  if (paramReflector) {
+    paramReflector->describe(C2ComponentDomainSetting::CORE_INDEX);
+  }
+  mH2C2->listComponents();
+  shared_ptr<C2GraphicBuffer> src;
+  shared_ptr<C2GraphicBuffer> dst;
+  mH2C2->copyBuffer(src, dst);
+}
+
+void CodecServiceRegistrantFuzzer::process(const uint8_t *data, size_t size) {
+  mFDP = new FuzzedDataProvider(data, size);
+  invokeH2C2ComponentStore();
+  /** RegisterCodecServices is called here to improve code coverage */
+  /** as currently it is not called by codecServiceRegistrant       */
+  RegisterCodecServices();
+  delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  CodecServiceRegistrantFuzzer codecServiceRegistrantFuzzer;
+  codecServiceRegistrantFuzzer.process(data, size);
+  return 0;
+}
diff --git a/services/mediametrics/TransactionLog.h b/services/mediametrics/TransactionLog.h
index 0ca4639..fd42518 100644
--- a/services/mediametrics/TransactionLog.h
+++ b/services/mediametrics/TransactionLog.h
@@ -158,7 +158,7 @@
                 ++it) {
             if (ll <= 0) break;
             if (prefix != nullptr && !startsWith(it->first, prefix)) break;
-            auto [s, l] = dumpMapTimeItem(it->second, ll - 1, sinceNs, prefix);
+            std::tie(s, l) = dumpMapTimeItem(it->second, ll - 1, sinceNs, prefix);
             if (l == 0) continue; // don't show empty groups (due to sinceNs).
             ss << " " << it->first << "\n" << s;
             ll -= l + 1;
diff --git a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
index 0cb2594..6604511 100644
--- a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
+++ b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
@@ -48,6 +48,7 @@
     void invokeAudioAnalytics(const uint8_t *data, size_t size);
     void invokeTimedAction(const uint8_t *data, size_t size);
     void process(const uint8_t *data, size_t size);
+    std::atomic_int mValue = 0;
 };
 
 void MediaMetricsServiceFuzzer::invokeStartsWith(const uint8_t *data, size_t size) {
@@ -340,11 +341,10 @@
 void MediaMetricsServiceFuzzer::invokeTimedAction(const uint8_t *data, size_t size) {
     FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
     android::mediametrics::TimedAction timedAction;
-    std::atomic_int value = 0;
 
     while (fdp.remaining_bytes()) {
         timedAction.postIn(std::chrono::seconds(fdp.ConsumeIntegral<int32_t>()),
-                           [&value] { ++value; });
+                           [this] { ++mValue; });
         timedAction.size();
     }
 }
diff --git a/services/mediatranscoding/Android.bp b/services/mediatranscoding/Android.bp
index a895071..6c390c8 100644
--- a/services/mediatranscoding/Android.bp
+++ b/services/mediatranscoding/Android.bp
@@ -32,7 +32,7 @@
     ],
 
     static_libs: [
-        "mediatranscoding_aidl_interface-ndk_platform",
+        "mediatranscoding_aidl_interface-ndk",
     ],
 
     cflags: [
@@ -60,7 +60,7 @@
     ],
 
     static_libs: [
-        "mediatranscoding_aidl_interface-ndk_platform",
+        "mediatranscoding_aidl_interface-ndk",
     ],
 
     init_rc: ["mediatranscoding.rc"],
diff --git a/services/mediatranscoding/tests/Android.bp b/services/mediatranscoding/tests/Android.bp
index a856c05..9b18d43 100644
--- a/services/mediatranscoding/tests/Android.bp
+++ b/services/mediatranscoding/tests/Android.bp
@@ -34,7 +34,7 @@
     ],
 
     static_libs: [
-        "mediatranscoding_aidl_interface-ndk_platform",
+        "mediatranscoding_aidl_interface-ndk",
     ],
 }
 
diff --git a/services/minijail/Android.bp b/services/minijail/Android.bp
index 3a89e12..038197f 100644
--- a/services/minijail/Android.bp
+++ b/services/minijail/Android.bp
@@ -31,17 +31,6 @@
     export_include_dirs: ["."],
 }
 
-// By adding "vendor_available: true" to "libavservices_minijail", we don't
-// need to have "libavservices_minijail_vendor" any longer.
-// "libavservices_minijail_vendor" will be removed, once we replace it with
-// "libavservices_minijail" in all vendor modules. (b/146313710)
-cc_library_shared {
-    name: "libavservices_minijail_vendor",
-    vendor: true,
-    defaults: ["libavservices_minijail_defaults"],
-    export_include_dirs: ["."],
-}
-
 // Unit tests.
 cc_test {
     name: "libavservices_minijail_unittest",