Merge "libdatasource: fix null dereference"
diff --git a/apex/Android.bp b/apex/Android.bp
index 4d194bd..dabf4c2 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -64,6 +64,9 @@
     // - build artifacts (lib/javalib/bin) against Android 10 SDK
     //   so that the artifacts can run.
     min_sdk_version: "29",
+    // Indicates that pre-installed version of this apex can be compressed.
+    // Whether it actually will be compressed is controlled on per-device basis.
+    compressible: true,
 }
 
 apex {
@@ -120,6 +123,9 @@
     // - build artifacts (lib/javalib/bin) against Android 10 SDK
     //   so that the artifacts can run.
     min_sdk_version: "29",
+    // Indicates that pre-installed version of this apex can be compressed.
+    // Whether it actually will be compressed is controlled on per-device basis.
+    compressible: true,
 }
 
 prebuilt_etc {
diff --git a/drm/mediacas/plugins/clearkey/ClearKeySessionLibrary.h b/drm/mediacas/plugins/clearkey/ClearKeySessionLibrary.h
index a537e63..7c6d86c 100644
--- a/drm/mediacas/plugins/clearkey/ClearKeySessionLibrary.h
+++ b/drm/mediacas/plugins/clearkey/ClearKeySessionLibrary.h
@@ -22,7 +22,6 @@
 #include <openssl/aes.h>
 #include <utils/KeyedVector.h>
 #include <utils/Mutex.h>
-#include <utils/RefBase.h>
 
 namespace android {
 struct ABuffer;
@@ -30,7 +29,7 @@
 namespace clearkeycas {
 class KeyFetcher;
 
-class ClearKeyCasSession : public RefBase {
+class ClearKeyCasSession {
 public:
     explicit ClearKeyCasSession(CasPlugin *plugin);
 
diff --git a/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp
index 1b8b8c1..6ac3510 100644
--- a/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp
@@ -109,6 +109,7 @@
 }
 
 void DrmPlugin::setPlayPolicy() {
+    android::Mutex::Autolock lock(mPlayPolicyLock);
     mPlayPolicy.clear();
     mPlayPolicy.add(kQueryKeyLicenseType, kStreaming);
     mPlayPolicy.add(kQueryKeyPlayAllowed, kTrue);
diff --git a/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h
index 4fa42e5..aa9b59d 100644
--- a/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h
@@ -262,6 +262,7 @@
     void initProperties();
     void setPlayPolicy();
 
+    android::Mutex mPlayPolicyLock;
     android::KeyedVector<String8, String8> mPlayPolicy;
     android::KeyedVector<String8, String8> mStringProperties;
     android::KeyedVector<String8, Vector<uint8_t>> mByteArrayProperties;
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
index 1495703..d278633 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
@@ -119,7 +119,11 @@
         return Void();
     }
 
-    if (source.offset + offset + source.size > sourceBase->getSize()) {
+    size_t totalSize = 0;
+    if (__builtin_add_overflow(source.offset, offset, &totalSize) ||
+        __builtin_add_overflow(totalSize, source.size, &totalSize) ||
+        totalSize > sourceBase->getSize()) {
+        android_errorWriteLog(0x534e4554, "176496160");
         _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0, "invalid buffer size");
         return Void();
     }
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
index f87f830..a77759e 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
@@ -304,6 +304,7 @@
 }
 
 void DrmPlugin::setPlayPolicy() {
+    android::Mutex::Autolock lock(mPlayPolicyLock);
     mPlayPolicy.clear();
 
     KeyValue policy;
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
index 3de7589..076beb8 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
@@ -406,6 +406,7 @@
     int64_t mCloseSessionOkCount;
     int64_t mCloseSessionNotOpenedCount;
     uint32_t mNextSecureStopId;
+    android::Mutex mPlayPolicyLock;
 
     // set by property to mock error scenarios
     Status_V1_2 mMockError;
diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h
index 9cabd8b..200e92d 100644
--- a/include/private/media/AudioTrackShared.h
+++ b/include/private/media/AudioTrackShared.h
@@ -182,6 +182,7 @@
                 // This is set by AudioTrack.setBufferSizeInFrames().
                 // A write will not fill the buffer above this limit.
     volatile    uint32_t   mBufferSizeInFrames;  // effective size of the buffer
+    volatile    uint32_t   mStartThresholdInFrames; // min frames in buffer to start streaming
 
 public:
 
@@ -216,6 +217,8 @@
     };
 
     size_t frameCount() const { return mFrameCount; }
+    uint32_t getStartThresholdInFrames() const;
+    uint32_t setStartThresholdInFrames(uint32_t startThresholdInFrames);
 
 protected:
     // These refer to shared memory, and are virtual addresses with respect to the current process.
diff --git a/media/bufferpool/2.0/BufferPoolClient.cpp b/media/bufferpool/2.0/BufferPoolClient.cpp
index 9308b81..cda23ff 100644
--- a/media/bufferpool/2.0/BufferPoolClient.cpp
+++ b/media/bufferpool/2.0/BufferPoolClient.cpp
@@ -29,7 +29,7 @@
 namespace V2_0 {
 namespace implementation {
 
-static constexpr int64_t kReceiveTimeoutUs = 1000000; // 100ms
+static constexpr int64_t kReceiveTimeoutUs = 2000000; // 2s
 static constexpr int kPostMaxRetry = 3;
 static constexpr int kCacheTtlUs = 1000000; // TODO: tune
 static constexpr size_t kMaxCachedBufferCount = 64;
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index 3e6b0ff..332696d 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -55,6 +55,8 @@
 namespace android {
 
 constexpr char COMPONENT_NAME[] = "c2.android.aac.decoder";
+constexpr size_t kDefaultOutputPortDelay = 2;
+constexpr size_t kMaxOutputPortDelay = 16;
 
 class C2SoftAacDec::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
@@ -73,7 +75,9 @@
 
         addParameter(
                 DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
-                .withConstValue(new C2PortActualDelayTuning::output(2u))
+                .withDefault(new C2PortActualDelayTuning::output(kDefaultOutputPortDelay))
+                .withFields({C2F(mActualOutputDelay, value).inRange(0, kMaxOutputPortDelay)})
+                .withSetter(Setter<decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
                 .build());
 
         addParameter(
@@ -263,6 +267,7 @@
       mAACDecoder(nullptr),
       mStreamInfo(nullptr),
       mSignalledError(false),
+      mOutputPortDelay(kDefaultOutputPortDelay),
       mOutputDelayRingBuffer(nullptr) {
 }
 
@@ -915,6 +920,29 @@
 
     int32_t outputDelay = mStreamInfo->outputDelay * mStreamInfo->numChannels;
 
+    size_t numSamplesInOutput = mStreamInfo->frameSize * mStreamInfo->numChannels;
+    if (numSamplesInOutput > 0) {
+        size_t actualOutputPortDelay = (outputDelay + numSamplesInOutput - 1) / numSamplesInOutput;
+        if (actualOutputPortDelay > mOutputPortDelay) {
+            mOutputPortDelay = actualOutputPortDelay;
+            ALOGV("New Output port delay %zu ", mOutputPortDelay);
+
+            C2PortActualDelayTuning::output outputPortDelay(mOutputPortDelay);
+            std::vector<std::unique_ptr<C2SettingResult>> failures;
+            c2_status_t err =
+                mIntf->config({&outputPortDelay}, C2_MAY_BLOCK, &failures);
+            if (err == OK) {
+                work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(outputPortDelay));
+            } else {
+                ALOGE("Cannot set output delay");
+                mSignalledError = true;
+                work->workletsProcessed = 1u;
+                work->result = C2_CORRUPTED;
+                return;
+            }
+        }
+    }
     mBuffersInfo.push_back(std::move(inInfo));
     work->workletsProcessed = 0u;
     if (!eos && mOutputDelayCompensated < outputDelay) {
diff --git a/media/codec2/components/aac/C2SoftAacDec.h b/media/codec2/components/aac/C2SoftAacDec.h
index 965c29e..986187c 100644
--- a/media/codec2/components/aac/C2SoftAacDec.h
+++ b/media/codec2/components/aac/C2SoftAacDec.h
@@ -57,6 +57,7 @@
     size_t mInputBufferCount;
     size_t mOutputBufferCount;
     bool mSignalledError;
+    size_t mOutputPortDelay;
     struct Info {
         uint64_t frameIndex;
         size_t bufferSize;
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index ea76cbb..d865ab2 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -272,8 +272,9 @@
         return UNKNOWN_ERROR;
     }
 
-    if (sbrMode != -1 && aacProfile == C2Config::PROFILE_AAC_ELD) {
-        if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, sbrMode)) {
+    if (sbrMode != C2Config::AAC_SBR_AUTO && aacProfile == C2Config::PROFILE_AAC_ELD) {
+        int aacSbrMode = sbrMode != C2Config::AAC_SBR_OFF;
+        if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, aacSbrMode)) {
             ALOGE("Failed to set AAC encoder parameters");
             return UNKNOWN_ERROR;
         }
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 0207311..e8287f9 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -26,7 +26,6 @@
 #include <SimpleC2Interface.h>
 
 #include "C2SoftAvcDec.h"
-#include "ih264d.h"
 
 namespace android {
 
@@ -391,12 +390,14 @@
     }
 
     while (true) {
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
+        ih264d_video_decode_ip_t s_h264d_decode_ip = {};
+        ih264d_video_decode_op_t s_h264d_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_h264d_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_h264d_decode_op.s_ivd_video_decode_op_t;
 
-        setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, nullptr, 0, 0, 0);
-        (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        if (0 == s_decode_op.u4_output_present) {
+        setDecodeArgs(ps_decode_ip, ps_decode_op, nullptr, nullptr, 0, 0, 0);
+        (void) ivdec_api_function(mDecHandle, &s_h264d_decode_ip, &s_h264d_decode_op);
+        if (0 == ps_decode_op->u4_output_present) {
             resetPlugin();
             break;
         }
@@ -411,8 +412,8 @@
 }
 
 status_t C2SoftAvcDec::createDecoder() {
-    ivdext_create_ip_t s_create_ip;
-    ivdext_create_op_t s_create_op;
+    ivdext_create_ip_t s_create_ip = {};
+    ivdext_create_op_t s_create_op = {};
 
     s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
     s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
@@ -438,8 +439,8 @@
 }
 
 status_t C2SoftAvcDec::setNumCores() {
-    ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip;
-    ivdext_ctl_set_num_cores_op_t s_set_num_cores_op;
+    ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip = {};
+    ivdext_ctl_set_num_cores_op_t s_set_num_cores_op = {};
 
     s_set_num_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
     s_set_num_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -458,22 +459,26 @@
 }
 
 status_t C2SoftAvcDec::setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode) {
-    ivd_ctl_set_config_ip_t s_set_dyn_params_ip;
-    ivd_ctl_set_config_op_t s_set_dyn_params_op;
+    ih264d_ctl_set_config_ip_t s_h264d_set_dyn_params_ip = {};
+    ih264d_ctl_set_config_op_t s_h264d_set_dyn_params_op = {};
+    ivd_ctl_set_config_ip_t *ps_set_dyn_params_ip =
+        &s_h264d_set_dyn_params_ip.s_ivd_ctl_set_config_ip_t;
+    ivd_ctl_set_config_op_t *ps_set_dyn_params_op =
+        &s_h264d_set_dyn_params_op.s_ivd_ctl_set_config_op_t;
 
-    s_set_dyn_params_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
-    s_set_dyn_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_set_dyn_params_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
-    s_set_dyn_params_ip.u4_disp_wd = (UWORD32) stride;
-    s_set_dyn_params_ip.e_frm_skip_mode = IVD_SKIP_NONE;
-    s_set_dyn_params_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
-    s_set_dyn_params_ip.e_vid_dec_mode = dec_mode;
-    s_set_dyn_params_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+    ps_set_dyn_params_ip->u4_size = sizeof(ih264d_ctl_set_config_ip_t);
+    ps_set_dyn_params_ip->e_cmd = IVD_CMD_VIDEO_CTL;
+    ps_set_dyn_params_ip->e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+    ps_set_dyn_params_ip->u4_disp_wd = (UWORD32) stride;
+    ps_set_dyn_params_ip->e_frm_skip_mode = IVD_SKIP_NONE;
+    ps_set_dyn_params_ip->e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+    ps_set_dyn_params_ip->e_vid_dec_mode = dec_mode;
+    ps_set_dyn_params_op->u4_size = sizeof(ih264d_ctl_set_config_op_t);
     IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
-                                                     &s_set_dyn_params_ip,
-                                                     &s_set_dyn_params_op);
+                                                     &s_h264d_set_dyn_params_ip,
+                                                     &s_h264d_set_dyn_params_op);
     if (status != IV_SUCCESS) {
-        ALOGE("error in %s: 0x%x", __func__, s_set_dyn_params_op.u4_error_code);
+        ALOGE("error in %s: 0x%x", __func__, ps_set_dyn_params_op->u4_error_code);
         return UNKNOWN_ERROR;
     }
 
@@ -481,8 +486,8 @@
 }
 
 void C2SoftAvcDec::getVersion() {
-    ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip;
-    ivd_ctl_getversioninfo_op_t s_get_versioninfo_op;
+    ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip = {};
+    ivd_ctl_getversioninfo_op_t s_get_versioninfo_op = {};
     UWORD8 au1_buf[512];
 
     s_get_versioninfo_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
@@ -538,7 +543,7 @@
         if (OK != setParams(mStride, IVD_DECODE_FRAME)) return false;
     }
 
-    ps_decode_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+    ps_decode_ip->u4_size = sizeof(ih264d_video_decode_ip_t);
     ps_decode_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
     if (inBuffer) {
         ps_decode_ip->u4_ts = tsMarker;
@@ -567,14 +572,14 @@
         ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize;
     }
     ps_decode_ip->s_out_buffer.u4_num_bufs = 3;
-    ps_decode_op->u4_size = sizeof(ivd_video_decode_op_t);
+    ps_decode_op->u4_size = sizeof(ih264d_video_decode_op_t);
 
     return true;
 }
 
 bool C2SoftAvcDec::getVuiParams() {
-    ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip;
-    ivdext_ctl_get_vui_params_op_t s_get_vui_params_op;
+    ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip = {};
+    ivdext_ctl_get_vui_params_op_t s_get_vui_params_op = {};
 
     s_get_vui_params_ip.u4_size = sizeof(ivdext_ctl_get_vui_params_ip_t);
     s_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -622,8 +627,8 @@
 }
 
 status_t C2SoftAvcDec::setFlushMode() {
-    ivd_ctl_flush_ip_t s_set_flush_ip;
-    ivd_ctl_flush_op_t s_set_flush_op;
+    ivd_ctl_flush_ip_t s_set_flush_ip = {};
+    ivd_ctl_flush_op_t s_set_flush_op = {};
 
     s_set_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
     s_set_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -641,8 +646,8 @@
 }
 
 status_t C2SoftAvcDec::resetDecoder() {
-    ivd_ctl_reset_ip_t s_reset_ip;
-    ivd_ctl_reset_op_t s_reset_op;
+    ivd_ctl_reset_ip_t s_reset_ip = {};
+    ivd_ctl_reset_op_t s_reset_op = {};
 
     s_reset_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
     s_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -671,8 +676,8 @@
 
 status_t C2SoftAvcDec::deleteDecoder() {
     if (mDecHandle) {
-        ivdext_delete_ip_t s_delete_ip;
-        ivdext_delete_op_t s_delete_op;
+        ivdext_delete_ip_t s_delete_ip = {};
+        ivdext_delete_op_t s_delete_op = {};
 
         s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t);
         s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE;
@@ -837,8 +842,10 @@
             return;
         }
 
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
+        ih264d_video_decode_ip_t s_h264d_decode_ip = {};
+        ih264d_video_decode_op_t s_h264d_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_h264d_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_h264d_decode_op.s_ivd_video_decode_op_t;
         {
             C2GraphicView wView = mOutBlock->map().get();
             if (wView.error()) {
@@ -846,7 +853,7 @@
                 work->result = wView.error();
                 return;
             }
-            if (!setDecodeArgs(&s_decode_ip, &s_decode_op, &rView, &wView,
+            if (!setDecodeArgs(ps_decode_ip, ps_decode_op, &rView, &wView,
                                inOffset + inPos, inSize - inPos, workIndex)) {
                 mSignalledError = true;
                 work->workletsProcessed = 1u;
@@ -862,26 +869,27 @@
             WORD32 delay;
             GETTIME(&mTimeStart, nullptr);
             TIME_DIFF(mTimeEnd, mTimeStart, delay);
-            (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+            (void) ivdec_api_function(mDecHandle, &s_h264d_decode_ip, &s_h264d_decode_op);
             WORD32 decodeTime;
             GETTIME(&mTimeEnd, nullptr);
             TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
             ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
-                  s_decode_op.u4_num_bytes_consumed);
+                  ps_decode_op->u4_num_bytes_consumed);
         }
-        if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+        if (IVD_MEM_ALLOC_FAILED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("allocation failure in decoder");
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
-        } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+        } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED ==
+                (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("unsupported resolution : %dx%d", mWidth, mHeight);
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
-        } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+        } else if (IVD_RES_CHANGED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGV("resolution changed");
             drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
             resetDecoder();
@@ -890,16 +898,16 @@
 
             /* Decode header and get new dimensions */
             setParams(mStride, IVD_DECODE_HEADER);
-            (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        } else if (IS_IVD_FATAL_ERROR(s_decode_op.u4_error_code)) {
-            ALOGE("Fatal error in decoder 0x%x", s_decode_op.u4_error_code);
+            (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
+        } else if (IS_IVD_FATAL_ERROR(ps_decode_op->u4_error_code)) {
+            ALOGE("Fatal error in decoder 0x%x", ps_decode_op->u4_error_code);
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
         }
-        if (s_decode_op.i4_reorder_depth >= 0 && mOutputDelay != s_decode_op.i4_reorder_depth) {
-            mOutputDelay = s_decode_op.i4_reorder_depth;
+        if (ps_decode_op->i4_reorder_depth >= 0 && mOutputDelay != ps_decode_op->i4_reorder_depth) {
+            mOutputDelay = ps_decode_op->i4_reorder_depth;
             ALOGV("New Output delay %d ", mOutputDelay);
 
             C2PortActualDelayTuning::output outputDelay(mOutputDelay);
@@ -917,16 +925,16 @@
                 return;
             }
         }
-        if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
+        if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                mStride = ALIGN32(s_decode_op.u4_pic_wd);
+                mStride = ALIGN32(ps_decode_op->u4_pic_wd);
                 setParams(mStride, IVD_DECODE_FRAME);
             }
-            if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) {
-                mWidth = s_decode_op.u4_pic_wd;
-                mHeight = s_decode_op.u4_pic_ht;
-                CHECK_EQ(0u, s_decode_op.u4_output_present);
+            if (ps_decode_op->u4_pic_wd != mWidth || ps_decode_op->u4_pic_ht != mHeight) {
+                mWidth = ps_decode_op->u4_pic_wd;
+                mHeight = ps_decode_op->u4_pic_ht;
+                CHECK_EQ(0u, ps_decode_op->u4_output_present);
 
                 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
                 std::vector<std::unique_ptr<C2SettingResult>> failures;
@@ -945,11 +953,11 @@
             }
         }
         (void)getVuiParams();
-        hasPicture |= (1 == s_decode_op.u4_frame_decoded_flag);
-        if (s_decode_op.u4_output_present) {
-            finishWork(s_decode_op.u4_ts, work);
+        hasPicture |= (1 == ps_decode_op->u4_frame_decoded_flag);
+        if (ps_decode_op->u4_output_present) {
+            finishWork(ps_decode_op->u4_ts, work);
         }
-        inPos += s_decode_op.u4_num_bytes_consumed;
+        inPos += ps_decode_op->u4_num_bytes_consumed;
     }
     if (eos) {
         drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
@@ -987,16 +995,18 @@
             ALOGE("graphic view map failed %d", wView.error());
             return C2_CORRUPTED;
         }
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
-        if (!setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, &wView, 0, 0, 0)) {
+        ih264d_video_decode_ip_t s_h264d_decode_ip = {};
+        ih264d_video_decode_op_t s_h264d_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_h264d_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_h264d_decode_op.s_ivd_video_decode_op_t;
+        if (!setDecodeArgs(ps_decode_ip, ps_decode_op, nullptr, &wView, 0, 0, 0)) {
             mSignalledError = true;
             work->workletsProcessed = 1u;
             return C2_CORRUPTED;
         }
-        (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        if (s_decode_op.u4_output_present) {
-            finishWork(s_decode_op.u4_ts, work);
+        (void) ivdec_api_function(mDecHandle, &s_h264d_decode_ip, &s_h264d_decode_op);
+        if (ps_decode_op->u4_output_present) {
+            finishWork(ps_decode_op->u4_ts, work);
         } else {
             fillEmptyWork(work);
             break;
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
index bd84de0..5c07d29 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.h
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -25,8 +25,7 @@
 #include <SimpleC2Component.h>
 
 #include "ih264_typedefs.h"
-#include "iv.h"
-#include "ivd.h"
+#include "ih264d.h"
 
 namespace android {
 
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index cfaeb66..bab651f 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -19,6 +19,8 @@
 #include <log/log.h>
 #include <utils/misc.h>
 
+#include <algorithm>
+
 #include <media/hardware/VideoAPI.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
@@ -121,6 +123,19 @@
                 .build());
 
         addParameter(
+                DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+                .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+                        0 /* flexCount */, 0u /* stream */))
+                .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+                                {C2Config::picture_type_t(I_FRAME),
+                                  C2Config::picture_type_t(P_FRAME),
+                                  C2Config::picture_type_t(B_FRAME)}),
+                             C2F(mPictureQuantization, m.values[0].min).any(),
+                             C2F(mPictureQuantization, m.values[0].max).any()})
+                .withSetter(PictureQuantizationSetter)
+                .build());
+
+        addParameter(
                 DefineParam(mActualInputDelay, C2_PARAMKEY_INPUT_DELAY)
                 .withDefault(new C2PortActualDelayTuning::input(DEFAULT_B_FRAMES))
                 .withFields({C2F(mActualInputDelay, value).inRange(0, MAX_B_FRAMES)})
@@ -220,6 +235,7 @@
         return res;
     }
 
+
     static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
                           C2P<C2StreamPictureSizeInfo::input> &me) {
         (void)mayBlock;
@@ -336,6 +352,13 @@
         return C2R::Ok();
     }
 
+    static C2R PictureQuantizationSetter(bool mayBlock,
+                                         C2P<C2StreamPictureQuantizationTuning::output> &me) {
+        (void)mayBlock;
+        (void)me;
+        return C2R::Ok();
+    }
+
     IV_PROFILE_T getProfile_l() const {
         switch (mProfileLevel->profile) {
         case PROFILE_AVC_CONSTRAINED_BASELINE:  [[fallthrough]];
@@ -393,6 +416,8 @@
     std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
     std::shared_ptr<C2StreamGopTuning::output> getGop_l() const { return mGop; }
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const
+    { return mPictureQuantization; }
 
 private:
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
@@ -404,6 +429,7 @@
     std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
     std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
     std::shared_ptr<C2StreamGopTuning::output> mGop;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
 };
 
 #define ive_api_function  ih264e_api_function
@@ -664,20 +690,67 @@
     ive_ctl_set_qp_op_t s_qp_op;
     IV_STATUS_T status;
 
+    ALOGV("in setQp()");
+
+    // set the defaults
     s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL;
     s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP;
 
-    s_qp_ip.u4_i_qp = DEFAULT_I_QP;
-    s_qp_ip.u4_i_qp_max = DEFAULT_QP_MAX;
-    s_qp_ip.u4_i_qp_min = DEFAULT_QP_MIN;
+    // these are the ones we're going to set, so want them to default ....
+    // to the DEFAULT values for the codec instea dof CODEC_ bounding
+    int32_t iMin = INT32_MIN, pMin = INT32_MIN, bMin = INT32_MIN;
+    int32_t iMax = INT32_MAX, pMax = INT32_MAX, bMax = INT32_MAX;
 
-    s_qp_ip.u4_p_qp = DEFAULT_P_QP;
-    s_qp_ip.u4_p_qp_max = DEFAULT_QP_MAX;
-    s_qp_ip.u4_p_qp_min = DEFAULT_QP_MIN;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> qp =
+                    mIntf->getPictureQuantization_l();
+    for (size_t i = 0; i < qp->flexCount(); ++i) {
+        const C2PictureQuantizationStruct &layer = qp->m.values[i];
 
-    s_qp_ip.u4_b_qp = DEFAULT_P_QP;
-    s_qp_ip.u4_b_qp_max = DEFAULT_QP_MAX;
-    s_qp_ip.u4_b_qp_min = DEFAULT_QP_MIN;
+        if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+            iMax = layer.max;
+            iMin = layer.min;
+            ALOGV("iMin %d iMax %d", iMin, iMax);
+        } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+            pMax = layer.max;
+            pMin = layer.min;
+            ALOGV("pMin %d pMax %d", pMin, pMax);
+        } else if (layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+            bMax = layer.max;
+            bMin = layer.min;
+            ALOGV("bMin %d bMax %d", bMin, bMax);
+        }
+    }
+
+    // INT32_{MIN,MAX} means unspecified, so use the codec's default
+    if (iMax == INT32_MAX) iMax = DEFAULT_I_QP_MAX;
+    if (iMin == INT32_MIN) iMin = DEFAULT_I_QP_MIN;
+    if (pMax == INT32_MAX) pMax = DEFAULT_P_QP_MAX;
+    if (pMin == INT32_MIN) pMin = DEFAULT_P_QP_MIN;
+    if (bMax == INT32_MAX) bMax = DEFAULT_B_QP_MAX;
+    if (bMin == INT32_MIN) bMin = DEFAULT_B_QP_MIN;
+
+    // ensure we have legal values
+    iMax = std::clamp(iMax, CODEC_QP_MIN, CODEC_QP_MAX);
+    iMin = std::clamp(iMin, CODEC_QP_MIN, CODEC_QP_MAX);
+    pMax = std::clamp(pMax, CODEC_QP_MIN, CODEC_QP_MAX);
+    pMin = std::clamp(pMin, CODEC_QP_MIN, CODEC_QP_MAX);
+    bMax = std::clamp(bMax, CODEC_QP_MIN, CODEC_QP_MAX);
+    bMin = std::clamp(bMin, CODEC_QP_MIN, CODEC_QP_MAX);
+
+    s_qp_ip.u4_i_qp_max = iMax;
+    s_qp_ip.u4_i_qp_min = iMin;
+    s_qp_ip.u4_p_qp_max = pMax;
+    s_qp_ip.u4_p_qp_min = pMin;
+    s_qp_ip.u4_b_qp_max = bMax;
+    s_qp_ip.u4_b_qp_min = bMin;
+
+    // ensure initial qp values are within our newly configured bounds...
+    s_qp_ip.u4_i_qp = std::clamp(DEFAULT_I_QP, iMin, iMax);
+    s_qp_ip.u4_p_qp = std::clamp(DEFAULT_P_QP, pMin, pMax);
+    s_qp_ip.u4_b_qp = std::clamp(DEFAULT_B_QP, bMin, bMax);
+
+    ALOGV("setting QP: i %d-%d p %d-%d b %d-%d", iMin, iMax, pMin, pMax, bMin, bMax);
+
 
     s_qp_ip.u4_timestamp_high = -1;
     s_qp_ip.u4_timestamp_low = -1;
@@ -1009,29 +1082,31 @@
 
     /* Getting MemRecords Attributes */
     {
-        iv_fill_mem_rec_ip_t s_fill_mem_rec_ip;
-        iv_fill_mem_rec_op_t s_fill_mem_rec_op;
+        ih264e_fill_mem_rec_ip_t s_ih264e_mem_rec_ip = {};
+        ih264e_fill_mem_rec_op_t s_ih264e_mem_rec_op = {};
+        iv_fill_mem_rec_ip_t *ps_fill_mem_rec_ip = &s_ih264e_mem_rec_ip.s_ive_ip;
+        iv_fill_mem_rec_op_t *ps_fill_mem_rec_op = &s_ih264e_mem_rec_op.s_ive_op;
 
-        s_fill_mem_rec_ip.u4_size = sizeof(iv_fill_mem_rec_ip_t);
-        s_fill_mem_rec_op.u4_size = sizeof(iv_fill_mem_rec_op_t);
+        ps_fill_mem_rec_ip->u4_size = sizeof(ih264e_fill_mem_rec_ip_t);
+        ps_fill_mem_rec_op->u4_size = sizeof(ih264e_fill_mem_rec_op_t);
 
-        s_fill_mem_rec_ip.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
-        s_fill_mem_rec_ip.ps_mem_rec = mMemRecords;
-        s_fill_mem_rec_ip.u4_num_mem_rec = mNumMemRecords;
-        s_fill_mem_rec_ip.u4_max_wd = width;
-        s_fill_mem_rec_ip.u4_max_ht = height;
-        s_fill_mem_rec_ip.u4_max_level = mAVCEncLevel;
-        s_fill_mem_rec_ip.e_color_format = DEFAULT_INP_COLOR_FORMAT;
-        s_fill_mem_rec_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
-        s_fill_mem_rec_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
-        s_fill_mem_rec_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
-        s_fill_mem_rec_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+        ps_fill_mem_rec_ip->e_cmd = IV_CMD_FILL_NUM_MEM_REC;
+        ps_fill_mem_rec_ip->ps_mem_rec = mMemRecords;
+        ps_fill_mem_rec_ip->u4_num_mem_rec = mNumMemRecords;
+        ps_fill_mem_rec_ip->u4_max_wd = width;
+        ps_fill_mem_rec_ip->u4_max_ht = height;
+        ps_fill_mem_rec_ip->u4_max_level = mAVCEncLevel;
+        ps_fill_mem_rec_ip->e_color_format = DEFAULT_INP_COLOR_FORMAT;
+        ps_fill_mem_rec_ip->u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+        ps_fill_mem_rec_ip->u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+        ps_fill_mem_rec_ip->u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+        ps_fill_mem_rec_ip->u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
 
-        status = ive_api_function(nullptr, &s_fill_mem_rec_ip, &s_fill_mem_rec_op);
+        status = ive_api_function(nullptr, &s_ih264e_mem_rec_ip, &s_ih264e_mem_rec_op);
 
         if (status != IV_SUCCESS) {
             ALOGE("Fill memory records failed = 0x%x\n",
-                    s_fill_mem_rec_op.u4_error_code);
+                    ps_fill_mem_rec_op->u4_error_code);
             return C2_CORRUPTED;
         }
     }
@@ -1060,48 +1135,51 @@
 
     /* Codec Instance Creation */
     {
-        ive_init_ip_t s_init_ip;
-        ive_init_op_t s_init_op;
+        ih264e_init_ip_t s_enc_ip = {};
+        ih264e_init_op_t s_enc_op = {};
+
+        ive_init_ip_t *ps_init_ip = &s_enc_ip.s_ive_ip;
+        ive_init_op_t *ps_init_op = &s_enc_op.s_ive_op;
 
         mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base;
         mCodecCtx->u4_size = sizeof(iv_obj_t);
         mCodecCtx->pv_fxns = (void *)ive_api_function;
 
-        s_init_ip.u4_size = sizeof(ive_init_ip_t);
-        s_init_op.u4_size = sizeof(ive_init_op_t);
+        ps_init_ip->u4_size = sizeof(ih264e_init_ip_t);
+        ps_init_op->u4_size = sizeof(ih264e_init_op_t);
 
-        s_init_ip.e_cmd = IV_CMD_INIT;
-        s_init_ip.u4_num_mem_rec = mNumMemRecords;
-        s_init_ip.ps_mem_rec = mMemRecords;
-        s_init_ip.u4_max_wd = width;
-        s_init_ip.u4_max_ht = height;
-        s_init_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
-        s_init_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
-        s_init_ip.u4_max_level = mAVCEncLevel;
-        s_init_ip.e_inp_color_fmt = mIvVideoColorFormat;
+        ps_init_ip->e_cmd = IV_CMD_INIT;
+        ps_init_ip->u4_num_mem_rec = mNumMemRecords;
+        ps_init_ip->ps_mem_rec = mMemRecords;
+        ps_init_ip->u4_max_wd = width;
+        ps_init_ip->u4_max_ht = height;
+        ps_init_ip->u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+        ps_init_ip->u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+        ps_init_ip->u4_max_level = mAVCEncLevel;
+        ps_init_ip->e_inp_color_fmt = mIvVideoColorFormat;
 
         if (mReconEnable || mPSNREnable) {
-            s_init_ip.u4_enable_recon = 1;
+            ps_init_ip->u4_enable_recon = 1;
         } else {
-            s_init_ip.u4_enable_recon = 0;
+            ps_init_ip->u4_enable_recon = 0;
         }
-        s_init_ip.e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
-        s_init_ip.e_rc_mode = DEFAULT_RC_MODE;
-        s_init_ip.u4_max_framerate = DEFAULT_MAX_FRAMERATE;
-        s_init_ip.u4_max_bitrate = DEFAULT_MAX_BITRATE;
-        s_init_ip.u4_num_bframes = mBframes;
-        s_init_ip.e_content_type = IV_PROGRESSIVE;
-        s_init_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
-        s_init_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
-        s_init_ip.e_slice_mode = mSliceMode;
-        s_init_ip.u4_slice_param = mSliceParam;
-        s_init_ip.e_arch = mArch;
-        s_init_ip.e_soc = DEFAULT_SOC;
+        ps_init_ip->e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
+        ps_init_ip->e_rc_mode = DEFAULT_RC_MODE;
+        ps_init_ip->u4_max_framerate = DEFAULT_MAX_FRAMERATE;
+        ps_init_ip->u4_max_bitrate = DEFAULT_MAX_BITRATE;
+        ps_init_ip->u4_num_bframes = mBframes;
+        ps_init_ip->e_content_type = IV_PROGRESSIVE;
+        ps_init_ip->u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+        ps_init_ip->u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+        ps_init_ip->e_slice_mode = mSliceMode;
+        ps_init_ip->u4_slice_param = mSliceParam;
+        ps_init_ip->e_arch = mArch;
+        ps_init_ip->e_soc = DEFAULT_SOC;
 
-        status = ive_api_function(mCodecCtx, &s_init_ip, &s_init_op);
+        status = ive_api_function(mCodecCtx, &s_enc_ip, &s_enc_op);
 
         if (status != IV_SUCCESS) {
-            ALOGE("Init encoder failed = 0x%x\n", s_init_op.u4_error_code);
+            ALOGE("Init encoder failed = 0x%x\n", ps_init_op->u4_error_code);
             return C2_CORRUPTED;
         }
     }
@@ -1328,13 +1406,13 @@
             ps_inp_raw_buf->apv_bufs[1] = uPlane;
             ps_inp_raw_buf->apv_bufs[2] = vPlane;
 
-            ps_inp_raw_buf->au4_wd[0] = input->width();
-            ps_inp_raw_buf->au4_wd[1] = input->width() / 2;
-            ps_inp_raw_buf->au4_wd[2] = input->width() / 2;
+            ps_inp_raw_buf->au4_wd[0] = mSize->width;
+            ps_inp_raw_buf->au4_wd[1] = mSize->width / 2;
+            ps_inp_raw_buf->au4_wd[2] = mSize->width / 2;
 
-            ps_inp_raw_buf->au4_ht[0] = input->height();
-            ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
-            ps_inp_raw_buf->au4_ht[2] = input->height() / 2;
+            ps_inp_raw_buf->au4_ht[0] = mSize->height;
+            ps_inp_raw_buf->au4_ht[1] = mSize->height / 2;
+            ps_inp_raw_buf->au4_ht[2] = mSize->height / 2;
 
             ps_inp_raw_buf->au4_strd[0] = yStride;
             ps_inp_raw_buf->au4_strd[1] = uStride;
@@ -1359,11 +1437,11 @@
             ps_inp_raw_buf->apv_bufs[0] = yPlane;
             ps_inp_raw_buf->apv_bufs[1] = uPlane;
 
-            ps_inp_raw_buf->au4_wd[0] = input->width();
-            ps_inp_raw_buf->au4_wd[1] = input->width();
+            ps_inp_raw_buf->au4_wd[0] = mSize->width;
+            ps_inp_raw_buf->au4_wd[1] = mSize->width;
 
-            ps_inp_raw_buf->au4_ht[0] = input->height();
-            ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
+            ps_inp_raw_buf->au4_ht[0] = mSize->height;
+            ps_inp_raw_buf->au4_ht[1] = mSize->height / 2;
 
             ps_inp_raw_buf->au4_strd[0] = yStride;
             ps_inp_raw_buf->au4_strd[1] = uStride;
@@ -1429,15 +1507,17 @@
     }
     // while (!mSawOutputEOS && !outQueue.empty()) {
     c2_status_t error;
-    ive_video_encode_ip_t s_encode_ip;
-    ive_video_encode_op_t s_encode_op;
-    memset(&s_encode_op, 0, sizeof(s_encode_op));
+    ih264e_video_encode_ip_t s_video_encode_ip = {};
+    ih264e_video_encode_op_t s_video_encode_op = {};
+    ive_video_encode_ip_t *ps_encode_ip = &s_video_encode_ip.s_ive_ip;
+    ive_video_encode_op_t *ps_encode_op = &s_video_encode_op.s_ive_op;
+    memset(ps_encode_op, 0, sizeof(*ps_encode_op));
 
     if (!mSpsPpsHeaderReceived) {
         constexpr uint32_t kHeaderLength = MIN_STREAM_SIZE;
         uint8_t header[kHeaderLength];
         error = setEncodeArgs(
-                &s_encode_ip, &s_encode_op, nullptr, header, kHeaderLength, workIndex);
+                ps_encode_ip, ps_encode_op, nullptr, header, kHeaderLength, workIndex);
         if (error != C2_OK) {
             ALOGE("setEncodeArgs failed: %d", error);
             mSignalledError = true;
@@ -1445,22 +1525,22 @@
             work->workletsProcessed = 1u;
             return;
         }
-        status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+        status = ive_api_function(mCodecCtx, ps_encode_ip, ps_encode_op);
 
         if (IV_SUCCESS != status) {
             ALOGE("Encode header failed = 0x%x\n",
-                    s_encode_op.u4_error_code);
+                    ps_encode_op->u4_error_code);
             work->workletsProcessed = 1u;
             return;
         } else {
             ALOGV("Bytes Generated in header %d\n",
-                    s_encode_op.s_out_buf.u4_bytes);
+                    ps_encode_op->s_out_buf.u4_bytes);
         }
 
         mSpsPpsHeaderReceived = true;
 
         std::unique_ptr<C2StreamInitDataInfo::output> csd =
-            C2StreamInitDataInfo::output::AllocUnique(s_encode_op.s_out_buf.u4_bytes, 0u);
+            C2StreamInitDataInfo::output::AllocUnique(ps_encode_op->s_out_buf.u4_bytes, 0u);
         if (!csd) {
             ALOGE("CSD allocation failed");
             mSignalledError = true;
@@ -1468,7 +1548,7 @@
             work->workletsProcessed = 1u;
             return;
         }
-        memcpy(csd->m.value, header, s_encode_op.s_out_buf.u4_bytes);
+        memcpy(csd->m.value, header, ps_encode_op->s_out_buf.u4_bytes);
         work->worklets.front()->output.configUpdate.push_back(std::move(csd));
 
         DUMP_TO_FILE(
@@ -1562,7 +1642,7 @@
         }
 
         error = setEncodeArgs(
-                &s_encode_ip, &s_encode_op, view.get(), wView.base(), wView.capacity(), workIndex);
+                ps_encode_ip, ps_encode_op, view.get(), wView.base(), wView.capacity(), workIndex);
         if (error != C2_OK) {
             ALOGE("setEncodeArgs failed : %d", error);
             mSignalledError = true;
@@ -1579,17 +1659,17 @@
         /* Compute time elapsed between end of previous decode()
          * to start of current decode() */
         TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
-        status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+        status = ive_api_function(mCodecCtx, &s_video_encode_ip, &s_video_encode_op);
 
         if (IV_SUCCESS != status) {
-            if ((s_encode_op.u4_error_code & 0xFF) == IH264E_BITSTREAM_BUFFER_OVERFLOW) {
+            if ((ps_encode_op->u4_error_code & 0xFF) == IH264E_BITSTREAM_BUFFER_OVERFLOW) {
                 // TODO: use IVE_CMD_CTL_GETBUFINFO for proper max input size?
                 mOutBufferSize *= 2;
                 mOutBlock.reset();
                 continue;
             }
             ALOGE("Encode Frame failed = 0x%x\n",
-                    s_encode_op.u4_error_code);
+                    ps_encode_op->u4_error_code);
             mSignalledError = true;
             work->result = C2_CORRUPTED;
             work->workletsProcessed = 1u;
@@ -1599,7 +1679,7 @@
 
     // Hold input buffer reference
     if (inputBuffer) {
-        mBuffers[s_encode_ip.s_inp_buf.apv_bufs[0]] = inputBuffer;
+        mBuffers[ps_encode_ip->s_inp_buf.apv_bufs[0]] = inputBuffer;
     }
 
     GETTIME(&mTimeEnd, nullptr);
@@ -1607,9 +1687,9 @@
     TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
 
     ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
-            s_encode_op.s_out_buf.u4_bytes);
+            ps_encode_op->s_out_buf.u4_bytes);
 
-    void *freed = s_encode_op.s_inp_buf.apv_bufs[0];
+    void *freed = ps_encode_op->s_inp_buf.apv_bufs[0];
     /* If encoder frees up an input buffer, mark it as free */
     if (freed != nullptr) {
         if (mBuffers.count(freed) == 0u) {
@@ -1621,17 +1701,17 @@
         }
     }
 
-    if (s_encode_op.output_present) {
-        if (!s_encode_op.s_out_buf.u4_bytes) {
+    if (ps_encode_op->output_present) {
+        if (!ps_encode_op->s_out_buf.u4_bytes) {
             ALOGE("Error: Output present but bytes generated is zero");
             mSignalledError = true;
             work->result = C2_CORRUPTED;
             work->workletsProcessed = 1u;
             return;
         }
-        uint64_t workId = ((uint64_t)s_encode_op.u4_timestamp_high << 32) |
-                      s_encode_op.u4_timestamp_low;
-        finishWork(workId, work, &s_encode_op);
+        uint64_t workId = ((uint64_t)ps_encode_op->u4_timestamp_high << 32) |
+                      ps_encode_op->u4_timestamp_low;
+        finishWork(workId, work, ps_encode_op);
     }
     if (mSawInputEOS) {
         drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
@@ -1671,9 +1751,11 @@
             ALOGE("graphic view map failed %d", wView.error());
             return C2_CORRUPTED;
         }
-        ive_video_encode_ip_t s_encode_ip;
-        ive_video_encode_op_t s_encode_op;
-        if (C2_OK != setEncodeArgs(&s_encode_ip, &s_encode_op, nullptr,
+        ih264e_video_encode_ip_t s_video_encode_ip = {};
+        ih264e_video_encode_op_t s_video_encode_op = {};
+        ive_video_encode_ip_t *ps_encode_ip = &s_video_encode_ip.s_ive_ip;
+        ive_video_encode_op_t *ps_encode_op = &s_video_encode_op.s_ive_op;
+        if (C2_OK != setEncodeArgs(ps_encode_ip, ps_encode_op, nullptr,
                                    wView.base(), wView.capacity(), 0)) {
             ALOGE("setEncodeArgs failed for drainInternal");
             mSignalledError = true;
@@ -1681,9 +1763,9 @@
             work->workletsProcessed = 1u;
             return C2_CORRUPTED;
         }
-        (void)ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+        (void)ive_api_function(mCodecCtx, &s_video_encode_ip, &s_video_encode_op);
 
-        void *freed = s_encode_op.s_inp_buf.apv_bufs[0];
+        void *freed = ps_encode_op->s_inp_buf.apv_bufs[0];
         /* If encoder frees up an input buffer, mark it as free */
         if (freed != nullptr) {
             if (mBuffers.count(freed) == 0u) {
@@ -1695,10 +1777,10 @@
             }
         }
 
-        if (s_encode_op.output_present) {
-            uint64_t workId = ((uint64_t)s_encode_op.u4_timestamp_high << 32) |
-                          s_encode_op.u4_timestamp_low;
-            finishWork(workId, work, &s_encode_op);
+        if (ps_encode_op->output_present) {
+            uint64_t workId = ((uint64_t)ps_encode_op->u4_timestamp_high << 32) |
+                          ps_encode_op->u4_timestamp_low;
+            finishWork(workId, work, ps_encode_op);
         } else {
             if (work->workletsProcessed != 1u) {
                 work->worklets.front()->output.flags = work->input.flags;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index 555055b..673a282 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -24,8 +24,7 @@
 #include <SimpleC2Component.h>
 
 #include "ih264_typedefs.h"
-#include "iv2.h"
-#include "ive2.h"
+#include "ih264e.h"
 
 namespace android {
 
@@ -100,6 +99,11 @@
 #define STRLENGTH                   500
 #define DEFAULT_CONSTRAINED_INTRA   0
 
+/** limits as specified by h264 */
+#define CODEC_QP_MIN                0
+#define CODEC_QP_MAX                51
+
+
 #define MIN(a, b) ((a) < (b))? (a) : (b)
 #define MAX(a, b) ((a) > (b))? (a) : (b)
 #define ALIGN16(x) ((((x) + 15) >> 4) << 4)
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index fb3fbd0..dfad226 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -110,17 +110,20 @@
         }
         case kWhatStop: {
             int32_t err = thiz->onStop();
+            thiz->mOutputBlockPool.reset();
             Reply(msg, &err);
             break;
         }
         case kWhatReset: {
             thiz->onReset();
+            thiz->mOutputBlockPool.reset();
             mRunning = false;
             Reply(msg);
             break;
         }
         case kWhatRelease: {
             thiz->onRelease();
+            thiz->mOutputBlockPool.reset();
             mRunning = false;
             Reply(msg);
             break;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index a374dfa..6bcf3a2 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -26,7 +26,6 @@
 #include <SimpleC2Interface.h>
 
 #include "C2SoftHevcDec.h"
-#include "ihevcd_cxa.h"
 
 namespace android {
 
@@ -380,12 +379,14 @@
     }
 
     while (true) {
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
+        ihevcd_cxa_video_decode_ip_t s_hevcd_decode_ip = {};
+        ihevcd_cxa_video_decode_op_t s_hevcd_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_hevcd_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_hevcd_decode_op.s_ivd_video_decode_op_t;
 
-        setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, nullptr, 0, 0, 0);
-        (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        if (0 == s_decode_op.u4_output_present) {
+        setDecodeArgs(ps_decode_ip, ps_decode_op, nullptr, nullptr, 0, 0, 0);
+        (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
+        if (0 == ps_decode_op->u4_output_present) {
             resetPlugin();
             break;
         }
@@ -400,8 +401,8 @@
 }
 
 status_t C2SoftHevcDec::createDecoder() {
-    ivdext_create_ip_t s_create_ip;
-    ivdext_create_op_t s_create_op;
+    ivdext_create_ip_t s_create_ip = {};
+    ivdext_create_op_t s_create_op = {};
 
     s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
     s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
@@ -427,8 +428,8 @@
 }
 
 status_t C2SoftHevcDec::setNumCores() {
-    ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip;
-    ivdext_ctl_set_num_cores_op_t s_set_num_cores_op;
+    ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip = {};
+    ivdext_ctl_set_num_cores_op_t s_set_num_cores_op = {};
 
     s_set_num_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
     s_set_num_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -447,22 +448,26 @@
 }
 
 status_t C2SoftHevcDec::setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode) {
-    ivd_ctl_set_config_ip_t s_set_dyn_params_ip;
-    ivd_ctl_set_config_op_t s_set_dyn_params_op;
+    ihevcd_cxa_ctl_set_config_ip_t s_hevcd_set_dyn_params_ip = {};
+    ihevcd_cxa_ctl_set_config_op_t s_hevcd_set_dyn_params_op = {};
+    ivd_ctl_set_config_ip_t *ps_set_dyn_params_ip =
+        &s_hevcd_set_dyn_params_ip.s_ivd_ctl_set_config_ip_t;
+    ivd_ctl_set_config_op_t *ps_set_dyn_params_op =
+        &s_hevcd_set_dyn_params_op.s_ivd_ctl_set_config_op_t;
 
-    s_set_dyn_params_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
-    s_set_dyn_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_set_dyn_params_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
-    s_set_dyn_params_ip.u4_disp_wd = (UWORD32) stride;
-    s_set_dyn_params_ip.e_frm_skip_mode = IVD_SKIP_NONE;
-    s_set_dyn_params_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
-    s_set_dyn_params_ip.e_vid_dec_mode = dec_mode;
-    s_set_dyn_params_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+    ps_set_dyn_params_ip->u4_size = sizeof(ihevcd_cxa_ctl_set_config_ip_t);
+    ps_set_dyn_params_ip->e_cmd = IVD_CMD_VIDEO_CTL;
+    ps_set_dyn_params_ip->e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+    ps_set_dyn_params_ip->u4_disp_wd = (UWORD32) stride;
+    ps_set_dyn_params_ip->e_frm_skip_mode = IVD_SKIP_NONE;
+    ps_set_dyn_params_ip->e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+    ps_set_dyn_params_ip->e_vid_dec_mode = dec_mode;
+    ps_set_dyn_params_op->u4_size = sizeof(ihevcd_cxa_ctl_set_config_op_t);
     IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
-                                                     &s_set_dyn_params_ip,
-                                                     &s_set_dyn_params_op);
+                                                     ps_set_dyn_params_ip,
+                                                     ps_set_dyn_params_op);
     if (status != IV_SUCCESS) {
-        ALOGE("error in %s: 0x%x", __func__, s_set_dyn_params_op.u4_error_code);
+        ALOGE("error in %s: 0x%x", __func__, ps_set_dyn_params_op->u4_error_code);
         return UNKNOWN_ERROR;
     }
 
@@ -470,8 +475,8 @@
 }
 
 status_t C2SoftHevcDec::getVersion() {
-    ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip;
-    ivd_ctl_getversioninfo_op_t s_get_versioninfo_op;
+    ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip = {};
+    ivd_ctl_getversioninfo_op_t s_get_versioninfo_op = {};
     UWORD8 au1_buf[512];
 
     s_get_versioninfo_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
@@ -529,7 +534,7 @@
         if (OK != setParams(mStride, IVD_DECODE_FRAME)) return false;
     }
 
-    ps_decode_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+    ps_decode_ip->u4_size = sizeof(ihevcd_cxa_video_decode_ip_t);
     ps_decode_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
     if (inBuffer) {
         ps_decode_ip->u4_ts = tsMarker;
@@ -558,15 +563,15 @@
         ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize;
     }
     ps_decode_ip->s_out_buffer.u4_num_bufs = 3;
-    ps_decode_op->u4_size = sizeof(ivd_video_decode_op_t);
+    ps_decode_op->u4_size = sizeof(ihevcd_cxa_video_decode_op_t);
     ps_decode_op->u4_output_present = 0;
 
     return true;
 }
 
 bool C2SoftHevcDec::getVuiParams() {
-    ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip;
-    ivdext_ctl_get_vui_params_op_t s_get_vui_params_op;
+    ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip = {};
+    ivdext_ctl_get_vui_params_op_t s_get_vui_params_op = {};
 
     s_get_vui_params_ip.u4_size = sizeof(ivdext_ctl_get_vui_params_ip_t);
     s_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -614,8 +619,8 @@
 }
 
 status_t C2SoftHevcDec::setFlushMode() {
-    ivd_ctl_flush_ip_t s_set_flush_ip;
-    ivd_ctl_flush_op_t s_set_flush_op;
+    ivd_ctl_flush_ip_t s_set_flush_ip = {};
+    ivd_ctl_flush_op_t s_set_flush_op = {};
 
     s_set_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
     s_set_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -633,8 +638,8 @@
 }
 
 status_t C2SoftHevcDec::resetDecoder() {
-    ivd_ctl_reset_ip_t s_reset_ip;
-    ivd_ctl_reset_op_t s_reset_op;
+    ivd_ctl_reset_ip_t s_reset_ip = {};
+    ivd_ctl_reset_op_t s_reset_op = {};
 
     s_reset_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
     s_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -662,8 +667,8 @@
 
 status_t C2SoftHevcDec::deleteDecoder() {
     if (mDecHandle) {
-        ivdext_delete_ip_t s_delete_ip;
-        ivdext_delete_op_t s_delete_op;
+        ivdext_delete_ip_t s_delete_ip = {};
+        ivdext_delete_op_t s_delete_op = {};
 
         s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t);
         s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE;
@@ -835,9 +840,11 @@
             work->result = wView.error();
             return;
         }
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
-        if (!setDecodeArgs(&s_decode_ip, &s_decode_op, &rView, &wView,
+        ihevcd_cxa_video_decode_ip_t s_hevcd_decode_ip = {};
+        ihevcd_cxa_video_decode_op_t s_hevcd_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_hevcd_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_hevcd_decode_op.s_ivd_video_decode_op_t;
+        if (!setDecodeArgs(ps_decode_ip, ps_decode_op, &rView, &wView,
                            inOffset + inPos, inSize - inPos, workIndex)) {
             mSignalledError = true;
             work->workletsProcessed = 1u;
@@ -852,26 +859,26 @@
         WORD32 delay;
         GETTIME(&mTimeStart, nullptr);
         TIME_DIFF(mTimeEnd, mTimeStart, delay);
-        (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+        (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
         WORD32 decodeTime;
         GETTIME(&mTimeEnd, nullptr);
         TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
         ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
-              s_decode_op.u4_num_bytes_consumed);
-        if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+              ps_decode_op->u4_num_bytes_consumed);
+        if (IVD_MEM_ALLOC_FAILED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("allocation failure in decoder");
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
         } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED ==
-                   (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+                   (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("unsupported resolution : %dx%d", mWidth, mHeight);
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
-        } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+        } else if (IVD_RES_CHANGED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGV("resolution changed");
             drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
             resetDecoder();
@@ -880,16 +887,16 @@
 
             /* Decode header and get new dimensions */
             setParams(mStride, IVD_DECODE_HEADER);
-            (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        } else if (IS_IVD_FATAL_ERROR(s_decode_op.u4_error_code)) {
-            ALOGE("Fatal error in decoder 0x%x", s_decode_op.u4_error_code);
+            (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
+        } else if (IS_IVD_FATAL_ERROR(ps_decode_op->u4_error_code)) {
+            ALOGE("Fatal error in decoder 0x%x", ps_decode_op->u4_error_code);
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
         }
-        if (s_decode_op.i4_reorder_depth >= 0 && mOutputDelay != s_decode_op.i4_reorder_depth) {
-            mOutputDelay = s_decode_op.i4_reorder_depth;
+        if (ps_decode_op->i4_reorder_depth >= 0 && mOutputDelay != ps_decode_op->i4_reorder_depth) {
+            mOutputDelay = ps_decode_op->i4_reorder_depth;
             ALOGV("New Output delay %d ", mOutputDelay);
 
             C2PortActualDelayTuning::output outputDelay(mOutputDelay);
@@ -907,15 +914,15 @@
                 return;
             }
         }
-        if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
+        if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                setParams(ALIGN32(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
+                setParams(ALIGN32(ps_decode_op->u4_pic_wd), IVD_DECODE_FRAME);
             }
-            if (s_decode_op.u4_pic_wd != mWidth ||  s_decode_op.u4_pic_ht != mHeight) {
-                mWidth = s_decode_op.u4_pic_wd;
-                mHeight = s_decode_op.u4_pic_ht;
-                CHECK_EQ(0u, s_decode_op.u4_output_present);
+            if (ps_decode_op->u4_pic_wd != mWidth ||  ps_decode_op->u4_pic_ht != mHeight) {
+                mWidth = ps_decode_op->u4_pic_wd;
+                mHeight = ps_decode_op->u4_pic_ht;
+                CHECK_EQ(0u, ps_decode_op->u4_output_present);
 
                 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
                 std::vector<std::unique_ptr<C2SettingResult>> failures;
@@ -935,15 +942,15 @@
             }
         }
         (void) getVuiParams();
-        hasPicture |= (1 == s_decode_op.u4_frame_decoded_flag);
-        if (s_decode_op.u4_output_present) {
-            finishWork(s_decode_op.u4_ts, work);
+        hasPicture |= (1 == ps_decode_op->u4_frame_decoded_flag);
+        if (ps_decode_op->u4_output_present) {
+            finishWork(ps_decode_op->u4_ts, work);
         }
-        if (0 == s_decode_op.u4_num_bytes_consumed) {
+        if (0 == ps_decode_op->u4_num_bytes_consumed) {
             ALOGD("Bytes consumed is zero. Ignoring remaining bytes");
             break;
         }
-        inPos += s_decode_op.u4_num_bytes_consumed;
+        inPos += ps_decode_op->u4_num_bytes_consumed;
         if (hasPicture && (inSize - inPos)) {
             ALOGD("decoded frame in current access nal, ignoring further trailing bytes %d",
                   (int)inSize - (int)inPos);
@@ -985,16 +992,18 @@
             ALOGE("graphic view map failed %d", wView.error());
             return C2_CORRUPTED;
         }
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
-        if (!setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, &wView, 0, 0, 0)) {
+        ihevcd_cxa_video_decode_ip_t s_hevcd_decode_ip = {};
+        ihevcd_cxa_video_decode_op_t s_hevcd_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_hevcd_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_hevcd_decode_op.s_ivd_video_decode_op_t;
+        if (!setDecodeArgs(ps_decode_ip, ps_decode_op, nullptr, &wView, 0, 0, 0)) {
             mSignalledError = true;
             work->workletsProcessed = 1u;
             return C2_CORRUPTED;
         }
-        (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        if (s_decode_op.u4_output_present) {
-            finishWork(s_decode_op.u4_ts, work);
+        (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
+        if (ps_decode_op->u4_output_present) {
+            finishWork(ps_decode_op->u4_ts, work);
         } else {
             fillEmptyWork(work);
             break;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.h b/media/codec2/components/hevc/C2SoftHevcDec.h
index 600d7c1..b9b0a48 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.h
+++ b/media/codec2/components/hevc/C2SoftHevcDec.h
@@ -23,8 +23,7 @@
 #include <SimpleC2Component.h>
 
 #include "ihevc_typedefs.h"
-#include "iv.h"
-#include "ivd.h"
+#include "ihevcd_cxa.h"
 
 namespace android {
 
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index e1cc6b3..3c87531 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -448,6 +448,20 @@
         work->worklets.front()->output.configUpdate.push_back(std::move(csd));
     }
 
+    // handle dynamic bitrate change
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
+        lock.unlock();
+
+        if (bitrate != mBitrate) {
+            mBitrate = bitrate;
+            int layerBitrate[2] = {static_cast<int>(mBitrate->value), 0};
+            ALOGV("Calling PVUpdateBitRate %d", layerBitrate[0]);
+            PVUpdateBitRate(mHandle, layerBitrate);
+        }
+    }
+
     std::shared_ptr<const C2GraphicView> rView;
     std::shared_ptr<C2Buffer> inputBuffer;
     bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
diff --git a/media/codec2/components/raw/C2SoftRawDec.cpp b/media/codec2/components/raw/C2SoftRawDec.cpp
index 31ca705..a03d4e2 100644
--- a/media/codec2/components/raw/C2SoftRawDec.cpp
+++ b/media/codec2/components/raw/C2SoftRawDec.cpp
@@ -87,7 +87,9 @@
                 .withFields({C2F(mPcmEncodingInfo, value).oneOf({
                      C2Config::PCM_16,
                      C2Config::PCM_8,
-                     C2Config::PCM_FLOAT})
+                     C2Config::PCM_FLOAT,
+                     C2Config::PCM_24,
+                     C2Config::PCM_32})
                 })
                 .withSetter((Setter<decltype(*mPcmEncodingInfo)>::StrictValueWithNoDeps))
                 .build());
diff --git a/media/codec2/core/include/C2Buffer.h b/media/codec2/core/include/C2Buffer.h
index fe37b05..a5d6fbf 100644
--- a/media/codec2/core/include/C2Buffer.h
+++ b/media/codec2/core/include/C2Buffer.h
@@ -642,7 +642,8 @@
      * \retval C2_REFUSED   no permission to complete the allocation
      * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
      * \retval C2_OMITTED   this allocator does not support 1D allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during allocation
+     *                      (unexpected)
      */
     virtual c2_status_t newLinearAllocation(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
@@ -666,7 +667,8 @@
      * \retval C2_REFUSED   no permission to recreate the allocation
      * \retval C2_BAD_VALUE invalid handle (caller error)
      * \retval C2_OMITTED   this allocator does not support 1D allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during allocation
+     *                      (unexpected)
      */
     virtual c2_status_t priorLinearAllocation(
             const C2Handle *handle __unused,
@@ -699,7 +701,8 @@
      * \retval C2_REFUSED   no permission to complete the allocation
      * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller error)
      * \retval C2_OMITTED   this allocator does not support 2D allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during allocation
+     *                      (unexpected)
      */
     virtual c2_status_t newGraphicAllocation(
             uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
@@ -724,7 +727,8 @@
      * \retval C2_REFUSED   no permission to recreate the allocation
      * \retval C2_BAD_VALUE invalid handle (caller error)
      * \retval C2_OMITTED   this allocator does not support 2D allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during recreation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during recreation
+     *                      (unexpected)
      */
     virtual c2_status_t priorGraphicAllocation(
             const C2Handle *handle __unused,
@@ -908,7 +912,8 @@
      * \retval C2_REFUSED   no permission to complete any required allocation
      * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
      * \retval C2_OMITTED   this pool does not support linear blocks
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
      */
     virtual c2_status_t fetchLinearBlock(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
@@ -937,7 +942,8 @@
      * \retval C2_REFUSED   no permission to complete any required allocation
      * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
      * \retval C2_OMITTED   this pool does not support circular blocks
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
      */
     virtual c2_status_t fetchCircularBlock(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
@@ -969,7 +975,8 @@
      * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller
      *                      error)
      * \retval C2_OMITTED   this pool does not support 2D blocks
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
      */
     virtual c2_status_t fetchGraphicBlock(
             uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
@@ -980,6 +987,90 @@
     }
 
     virtual ~C2BlockPool() = default;
+
+    /**
+     * Blocking fetch for linear block. Obtains a linear writable block of given |capacity|
+     * and |usage|. If a block can be successfully obtained, the block is stored in |block|,
+     * |fence| is set to a null-fence and C2_OK is returned.
+     *
+     * If a block cannot be temporarily obtained, |block| is set to nullptr, a waitable fence
+     * is stored into |fence| and C2_BLOCKING is returned.
+     *
+     * Otherwise, |block| is set to nullptr and |fence| is set to a null-fence. The waitable
+     * fence is signalled when the temporary restriction on fetch is lifted.
+     * e.g. more memory is available to fetch because some meomory or prior blocks were released.
+     *
+     * \param capacity the size of requested block.
+     * \param usage    the memory usage info for the requested block. Returned blocks will be
+     *                 optimized for this usage, but may be used with any usage. One exception:
+     *                 protected blocks/buffers can only be used in a protected scenario.
+     * \param block    pointer to where the obtained block shall be stored on success. nullptr will
+     *                 be stored here on failure
+     * \param fence    pointer to where the fence shall be stored on C2_BLOCKING error.
+     *
+     * \retval C2_OK        the operation was successful
+     * \retval C2_NO_MEMORY not enough memory to complete any required allocation
+     * \retval C2_TIMED_OUT the operation timed out
+     * \retval C2_BLOCKING  the operation is blocked
+     * \retval C2_REFUSED   no permission to complete any required allocation
+     * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
+     * \retval C2_OMITTED   this pool does not support linear blocks nor fence.
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
+     */
+    virtual c2_status_t fetchLinearBlock(
+            uint32_t capacity __unused, C2MemoryUsage usage __unused,
+            std::shared_ptr<C2LinearBlock> *block /* nonnull */,
+            C2Fence *fence /* nonnull */) {
+        *block = nullptr;
+        (void) fence;
+        return C2_OMITTED;
+    }
+
+    /**
+     * Blocking fetch for 2D graphic block. Obtains a 2D graphic writable block of given |capacity|
+     * and |usage|. If a block can be successfully obtained, the block is stored in |block|,
+     * |fence| is set to a null-fence and C2_OK is returned.
+     *
+     * If a block cannot be temporarily obtained, |block| is set to nullptr, a waitable fence
+     * is stored into |fence| and C2_BLOCKING is returned.
+     *
+     * Otherwise, |block| is set to nullptr and |fence| is set to a null-fence. The waitable
+     * fence is signalled when the temporary restriction on fetch is lifted.
+     * e.g. more memory is available to fetch because some meomory or prior blocks were released.
+     *
+     * \param width  the width of requested block (the obtained block could be slightly larger, e.g.
+     *               to accommodate any system-required alignment)
+     * \param height the height of requested block (the obtained block could be slightly larger,
+     *               e.g. to accommodate any system-required alignment)
+     * \param format the pixel format of requested block. This could be a vendor specific format.
+     * \param usage  the memory usage info for the requested block. Returned blocks will be
+     *               optimized for this usage, but may be used with any usage. One exception:
+     *               protected blocks/buffers can only be used in a protected scenario.
+     * \param block  pointer to where the obtained block shall be stored on success. nullptr
+     *               will be stored here on failure
+     * \param fence  pointer to where the fence shall be stored on C2_BLOCKING error.
+     *
+     * \retval C2_OK        the operation was successful
+     * \retval C2_NO_MEMORY not enough memory to complete any required allocation
+     * \retval C2_TIMED_OUT the operation timed out
+     * \retval C2_BLOCKING  the operation is blocked
+     * \retval C2_REFUSED   no permission to complete any required allocation
+     * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller
+     *                      error)
+     * \retval C2_OMITTED   this pool does not support 2D blocks
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
+     */
+    virtual c2_status_t fetchGraphicBlock(
+            uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
+            C2MemoryUsage usage __unused,
+            std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+            C2Fence *fence /* nonnull */) {
+        *block = nullptr;
+        (void) fence;
+        return C2_OMITTED;
+    }
 protected:
     C2BlockPool() = default;
 };
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 752140a..6176646 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -75,6 +75,10 @@
     enum tiling_mode_t : uint32_t;          ///< tiling modes
 };
 
+struct C2PlatformConfig {
+    enum encoding_quality_level_t : uint32_t; ///< encoding quality level
+};
+
 namespace {
 
 enum C2ParamIndexKind : C2Param::type_index_t {
@@ -187,6 +191,8 @@
     kParamIndexPictureType,
     kParamIndexHdr10PlusMetadata,
 
+    kParamIndexPictureQuantization,
+
     /* ------------------------------------ video components ------------------------------------ */
 
     kParamIndexFrameRate = C2_PARAM_INDEX_VIDEO_PARAM_START,
@@ -257,7 +263,11 @@
     kParamIndexTunnelHandle, // int32[]
     kParamIndexTunnelSystemTime, // int64
 
+    // dmabuf allocator
     kParamIndexStoreDmaBufUsage,  // store, struct
+
+    // encoding quality requirements
+    kParamIndexEncodingQualityLevel, // encoders, enum
 };
 
 }
@@ -392,6 +402,7 @@
     _C2_PL_VP9_BASE  = 0x7000,
     _C2_PL_DV_BASE   = 0x8000,
     _C2_PL_AV1_BASE  = 0x9000,
+    _C2_PL_VP8_BASE  = 0xA000,
 
     C2_PROFILE_LEVEL_VENDOR_START = 0x70000000,
 };
@@ -545,6 +556,12 @@
     PROFILE_AV1_0 = _C2_PL_AV1_BASE,            ///< AV1 Profile 0 (4:2:0, 8 to 10 bit)
     PROFILE_AV1_1,                              ///< AV1 Profile 1 (8 to 10 bit)
     PROFILE_AV1_2,                              ///< AV1 Profile 2 (8 to 12 bit)
+
+    // VP8 profiles
+    PROFILE_VP8_0 = _C2_PL_VP8_BASE,            ///< VP8 Profile 0
+    PROFILE_VP8_1,                              ///< VP8 Profile 1
+    PROFILE_VP8_2,                              ///< VP8 Profile 2
+    PROFILE_VP8_3,                              ///< VP8 Profile 3
 };
 
 enum C2Config::level_t : uint32_t {
@@ -1699,6 +1716,31 @@
 constexpr char C2_PARAMKEY_GOP[] = "coding.gop";
 
 /**
+ * Quantization
+ * min/max for each picture type
+ *
+ */
+struct C2PictureQuantizationStruct {
+    C2PictureQuantizationStruct() : type_((C2Config::picture_type_t)0),
+                                         min(INT32_MIN), max(INT32_MAX) {}
+    C2PictureQuantizationStruct(C2Config::picture_type_t type, int32_t min_, int32_t max_)
+        : type_(type), min(min_), max(max_) { }
+
+    C2Config::picture_type_t type_;
+    int32_t min;      // INT32_MIN == 'no lower bound specified'
+    int32_t max;      // INT32_MAX == 'no upper bound specified'
+
+    DEFINE_AND_DESCRIBE_C2STRUCT(PictureQuantization)
+    C2FIELD(type_, "type")
+    C2FIELD(min, "min")
+    C2FIELD(max, "max")
+};
+
+typedef C2StreamParam<C2Tuning, C2SimpleArrayStruct<C2PictureQuantizationStruct>,
+        kParamIndexPictureQuantization> C2StreamPictureQuantizationTuning;
+constexpr char C2_PARAMKEY_PICTURE_QUANTIZATION[] = "coding.qp";
+
+/**
  * Sync frame can be requested on demand by the client.
  *
  * If true, the next I frame shall be encoded as a sync frame. This config can be passed
@@ -1874,7 +1916,9 @@
 C2ENUM(C2Config::pcm_encoding_t, uint32_t,
     PCM_16,
     PCM_8,
-    PCM_FLOAT
+    PCM_FLOAT,
+    PCM_24,
+    PCM_32
 )
 
 typedef C2StreamParam<C2Info, C2SimpleValueStruct<C2Config::pcm_encoding_t>, kParamIndexPcmEncoding>
@@ -2304,6 +2348,23 @@
         C2PortTunnelSystemTime;
 constexpr char C2_PARAMKEY_OUTPUT_RENDER_TIME[] = "output.render-time";
 
+C2ENUM(C2PlatformConfig::encoding_quality_level_t, uint32_t,
+    NONE,
+    S_HANDHELD,
+    S_HANDHELD_PC
+);
+
+namespace android {
+
+/**
+ * Encoding quality level signaling.
+ */
+typedef C2GlobalParam<C2Setting,
+        C2SimpleValueStruct<C2EasyEnum<C2PlatformConfig::encoding_quality_level_t>>,
+        kParamIndexEncodingQualityLevel> C2EncodingQualityLevel;
+
+}
+
 /// @}
 
 #endif  // C2CONFIG_H_
diff --git a/media/codec2/hidl/1.0/vts/.clang-format b/media/codec2/hidl/1.0/vts/.clang-format
new file mode 120000
index 0000000..136279c
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/.clang-format
@@ -0,0 +1 @@
+../../../../../../../build/soong/scripts/system-clang-format
\ No newline at end of file
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index 3a47ae9..efc5813 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -33,14 +33,40 @@
 using android::C2AllocatorIon;
 
 #include "media_c2_hidl_test_common.h"
+using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<DecodeTestParameters> kDecodeTestParameters;
 
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kDecodeTestParameters;
+using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
+static std::vector<CsdFlushTestParameters> kCsdFlushTestParameters;
 
-static std::vector<std::tuple<std::string, std::string, std::string>> kCsdFlushTestParameters;
+struct CompToURL {
+    std::string mime;
+    std::string mURL;
+    std::string info;
+};
 
-// Resource directory
-static std::string sResourceDir = "";
+std::vector<CompToURL> kCompToURL = {
+        {"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.info"},
+        {"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac",
+         "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"},
+        {"audio/mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz.info"},
+        {"audio/mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3",
+         "bbb_mp3_stereo_192kbps_48000hz_multi_frame.info"},
+        {"3gpp", "sine_amrnb_1ch_12kbps_8000hz.amrnb", "sine_amrnb_1ch_12kbps_8000hz.info"},
+        {"3gpp", "sine_amrnb_1ch_12kbps_8000hz.amrnb",
+         "sine_amrnb_1ch_12kbps_8000hz_multi_frame.info"},
+        {"amr-wb", "bbb_amrwb_1ch_14kbps_16000hz.amrwb", "bbb_amrwb_1ch_14kbps_16000hz.info"},
+        {"amr-wb", "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
+         "bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info"},
+        {"vorbis", "bbb_vorbis_stereo_128kbps_48000hz.vorbis",
+         "bbb_vorbis_stereo_128kbps_48000hz.info"},
+        {"opus", "bbb_opus_stereo_128kbps_48000hz.opus", "bbb_opus_stereo_128kbps_48000hz.info"},
+        {"g711-alaw", "bbb_g711alaw_1ch_8khz.raw", "bbb_g711alaw_1ch_8khz.info"},
+        {"g711-mlaw", "bbb_g711mulaw_1ch_8khz.raw", "bbb_g711mulaw_1ch_8khz.info"},
+        {"gsm", "bbb_gsm_1ch_8khz_13kbps.raw", "bbb_gsm_1ch_8khz_13kbps.info"},
+        {"raw", "bbb_raw_1ch_8khz_s32le.raw", "bbb_raw_1ch_8khz_s32le.info"},
+        {"flac", "bbb_flac_stereo_680kbps_48000hz.flac", "bbb_flac_stereo_680kbps_48000hz.info"},
+};
 
 class LinearBuffer : public C2Buffer {
   public:
@@ -76,33 +102,17 @@
         mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++);
         ASSERT_NE(mLinearPool, nullptr);
 
-        mCompName = unknown_comp;
-        struct StringToName {
-            const char* Name;
-            standardComp CompName;
-        };
-        const StringToName kStringToName[] = {
-                {"xaac", xaac},          {"mp3", mp3}, {"amrnb", amrnb},
-                {"amrwb", amrwb},        {"aac", aac}, {"vorbis", vorbis},
-                {"opus", opus},          {"pcm", pcm}, {"g711.alaw", g711alaw},
-                {"g711.mlaw", g711mlaw}, {"gsm", gsm}, {"raw", raw},
-                {"flac", flac},
-        };
-        const size_t kNumStringToName = sizeof(kStringToName) / sizeof(kStringToName[0]);
+        std::vector<std::unique_ptr<C2Param>> queried;
+        mComponent->query({}, {C2PortMediaTypeSetting::input::PARAM_TYPE}, C2_DONT_BLOCK, &queried);
+        ASSERT_GT(queried.size(), 0);
 
-        // Find the component type
-        for (size_t i = 0; i < kNumStringToName; ++i) {
-            if (strcasestr(mComponentName.c_str(), kStringToName[i].Name)) {
-                mCompName = kStringToName[i].CompName;
-                break;
-            }
-        }
+        mMime = ((C2PortMediaTypeSetting::input*)queried[0].get())->m.value;
+
         mEos = false;
         mFramesReceived = 0;
         mTimestampUs = 0u;
         mWorkResult = C2_OK;
         mTimestampDevTest = false;
-        if (mCompName == unknown_comp) mDisableTest = true;
         if (mDisableTest) std::cout << "[   WARN   ] Test Disabled \n";
     }
 
@@ -119,6 +129,8 @@
 
     virtual void validateTimestampList(int32_t* bitStreamInfo);
 
+    void GetURLForComponent(char* mURL, char* info, size_t streamIndex = 0);
+
     struct outputMetaData {
         uint64_t timestampUs;
         uint32_t rangeLength;
@@ -158,29 +170,12 @@
         }
     }
 
-    enum standardComp {
-        xaac,
-        mp3,
-        amrnb,
-        amrwb,
-        aac,
-        vorbis,
-        opus,
-        pcm,
-        g711alaw,
-        g711mlaw,
-        gsm,
-        raw,
-        flac,
-        unknown_comp,
-    };
-
+    std::string mMime;
     std::string mInstanceName;
     std::string mComponentName;
     bool mEos;
     bool mDisableTest;
     bool mTimestampDevTest;
-    standardComp mCompName;
 
     int32_t mWorkResult;
     uint64_t mTimestampUs;
@@ -207,9 +202,8 @@
     }
 };
 
-class Codec2AudioDecHidlTest
-    : public Codec2AudioDecHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2AudioDecHidlTest : public Codec2AudioDecHidlTestBase,
+                               public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -217,7 +211,7 @@
 };
 
 void validateComponent(const std::shared_ptr<android::Codec2Client::Component>& component,
-                       Codec2AudioDecHidlTest::standardComp compName, bool& disableTest) {
+                       bool& disableTest) {
     // Validate its a C2 Component
     if (component->getName().find("c2") == std::string::npos) {
         ALOGE("Not a c2 component");
@@ -244,13 +238,6 @@
             return;
         }
     }
-
-    // Validates component name
-    if (compName == Codec2AudioDecHidlTest::unknown_comp) {
-        ALOGE("Component InValid");
-        disableTest = true;
-        return;
-    }
     ALOGV("Component Valid");
 }
 
@@ -271,7 +258,7 @@
 // parsing the header of elementary stream. Client needs to collect this
 // information and reconfigure
 void getInputChannelInfo(const std::shared_ptr<android::Codec2Client::Component>& component,
-                         Codec2AudioDecHidlTest::standardComp compName, int32_t* bitStreamInfo) {
+                         std::string mime, int32_t* bitStreamInfo) {
     // query nSampleRate and nChannels
     std::initializer_list<C2Param::Index> indices{
             C2StreamSampleRateInfo::output::PARAM_TYPE,
@@ -288,89 +275,29 @@
             C2Param* param = inParams[i].get();
             bitStreamInfo[i] = *(int32_t*)((uint8_t*)param + offset);
         }
-        switch (compName) {
-            case Codec2AudioDecHidlTest::amrnb: {
-                ASSERT_EQ(bitStreamInfo[0], 8000);
-                ASSERT_EQ(bitStreamInfo[1], 1);
-                break;
-            }
-            case Codec2AudioDecHidlTest::amrwb: {
-                ASSERT_EQ(bitStreamInfo[0], 16000);
-                ASSERT_EQ(bitStreamInfo[1], 1);
-                break;
-            }
-            case Codec2AudioDecHidlTest::gsm: {
-                ASSERT_EQ(bitStreamInfo[0], 8000);
-                break;
-            }
-            default:
-                break;
+        if (mime.find("3gpp") != std::string::npos) {
+            ASSERT_EQ(bitStreamInfo[0], 8000);
+            ASSERT_EQ(bitStreamInfo[1], 1);
+        } else if (mime.find("amr-wb") != std::string::npos) {
+            ASSERT_EQ(bitStreamInfo[0], 16000);
+            ASSERT_EQ(bitStreamInfo[1], 1);
+        } else if (mime.find("gsm") != std::string::npos) {
+            ASSERT_EQ(bitStreamInfo[0], 8000);
         }
     }
 }
 
-// number of elementary streams per component
-#define STREAM_COUNT 2
-
 // LookUpTable of clips and metadata for component testing
-void GetURLForComponent(Codec2AudioDecHidlTest::standardComp comp, char* mURL, char* info,
-                        size_t streamIndex = 0) {
-    struct CompToURL {
-        Codec2AudioDecHidlTest::standardComp comp;
-        const char mURL[STREAM_COUNT][512];
-        const char info[STREAM_COUNT][512];
-    };
-    ASSERT_TRUE(streamIndex < STREAM_COUNT);
-
-    static const CompToURL kCompToURL[] = {
-            {Codec2AudioDecHidlTest::standardComp::xaac,
-             {"bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.aac"},
-             {"bbb_aac_stereo_128kbps_48000hz.info",
-              "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"}},
-            {Codec2AudioDecHidlTest::standardComp::mp3,
-             {"bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz.mp3"},
-             {"bbb_mp3_stereo_192kbps_48000hz.info",
-              "bbb_mp3_stereo_192kbps_48000hz_multi_frame.info"}},
-            {Codec2AudioDecHidlTest::standardComp::aac,
-             {"bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.aac"},
-             {"bbb_aac_stereo_128kbps_48000hz.info",
-              "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"}},
-            {Codec2AudioDecHidlTest::standardComp::amrnb,
-             {"sine_amrnb_1ch_12kbps_8000hz.amrnb", "sine_amrnb_1ch_12kbps_8000hz.amrnb"},
-             {"sine_amrnb_1ch_12kbps_8000hz.info",
-              "sine_amrnb_1ch_12kbps_8000hz_multi_frame.info"}},
-            {Codec2AudioDecHidlTest::standardComp::amrwb,
-             {"bbb_amrwb_1ch_14kbps_16000hz.amrwb", "bbb_amrwb_1ch_14kbps_16000hz.amrwb"},
-             {"bbb_amrwb_1ch_14kbps_16000hz.info",
-              "bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info"}},
-            {Codec2AudioDecHidlTest::standardComp::vorbis,
-             {"bbb_vorbis_stereo_128kbps_48000hz.vorbis", ""},
-             {"bbb_vorbis_stereo_128kbps_48000hz.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::opus,
-             {"bbb_opus_stereo_128kbps_48000hz.opus", ""},
-             {"bbb_opus_stereo_128kbps_48000hz.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::g711alaw,
-             {"bbb_g711alaw_1ch_8khz.raw", ""},
-             {"bbb_g711alaw_1ch_8khz.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::g711mlaw,
-             {"bbb_g711mulaw_1ch_8khz.raw", ""},
-             {"bbb_g711mulaw_1ch_8khz.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::gsm,
-             {"bbb_gsm_1ch_8khz_13kbps.raw", ""},
-             {"bbb_gsm_1ch_8khz_13kbps.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::raw,
-             {"bbb_raw_1ch_8khz_s32le.raw", ""},
-             {"bbb_raw_1ch_8khz_s32le.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::flac,
-             {"bbb_flac_stereo_680kbps_48000hz.flac", ""},
-             {"bbb_flac_stereo_680kbps_48000hz.info", ""}},
-    };
-
-    for (size_t i = 0; i < sizeof(kCompToURL) / sizeof(kCompToURL[0]); ++i) {
-        if (kCompToURL[i].comp == comp) {
-            strcat(mURL, kCompToURL[i].mURL[streamIndex]);
-            strcat(info, kCompToURL[i].info[streamIndex]);
-            return;
+void Codec2AudioDecHidlTestBase::GetURLForComponent(char* mURL, char* info, size_t streamIndex) {
+    int streamCount = 0;
+    for (size_t i = 0; i < kCompToURL.size(); ++i) {
+        if (mMime.find(kCompToURL[i].mime) != std::string::npos) {
+            if (streamCount == streamIndex) {
+                strcat(mURL, kCompToURL[i].mURL.c_str());
+                strcat(info, kCompToURL[i].info.c_str());
+                return;
+            }
+            streamCount++;
         }
     }
 }
@@ -461,7 +388,7 @@
 void Codec2AudioDecHidlTestBase::validateTimestampList(int32_t* bitStreamInfo) {
     uint32_t samplesReceived = 0;
     // Update SampleRate and ChannelCount
-    ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+    ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     int32_t nSampleRate = bitStreamInfo[0];
     int32_t nChannels = bitStreamInfo[1];
     std::list<uint64_t>::iterator itIn = mTimestampUslist.begin();
@@ -486,7 +413,7 @@
 TEST_P(Codec2AudioDecHidlTest, validateCompName) {
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     ALOGV("Checks if the given component is a valid audio component");
-    validateComponent(mComponent, mCompName, mDisableTest);
+    validateComponent(mComponent, mDisableTest);
     ASSERT_EQ(mDisableTest, false);
 }
 
@@ -495,15 +422,13 @@
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     ASSERT_EQ(mComponent->start(), C2_OK);
     int32_t bitStreamInfo[2] = {0};
-    ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+    ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     setupConfigParam(mComponent, bitStreamInfo);
     ASSERT_EQ(mComponent->stop(), C2_OK);
 }
 
-class Codec2AudioDecDecodeTest
-    : public Codec2AudioDecHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2AudioDecDecodeTest : public Codec2AudioDecHidlTestBase,
+                                 public ::testing::WithParamInterface<DecodeTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -514,16 +439,15 @@
     description("Decodes input file");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    uint32_t streamIndex = std::stoi(std::get<2>(GetParam()));
-    ;
-    bool signalEOS = !std::get<3>(GetParam()).compare("true");
+    uint32_t streamIndex = std::get<2>(GetParam());
+    bool signalEOS = std::get<3>(GetParam());
     mTimestampDevTest = true;
     char mURL[512], info[512];
     android::Vector<FrameInfo> Info;
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info, streamIndex);
+    GetURLForComponent(mURL, info, streamIndex);
     if (!strcmp(mURL, sResourceDir.c_str())) {
         ALOGV("EMPTY INPUT sResourceDir.c_str() %s mURL  %s ", sResourceDir.c_str(), mURL);
         return;
@@ -536,11 +460,11 @@
     mFramesReceived = 0;
     mTimestampUs = 0;
     int32_t bitStreamInfo[2] = {0};
-    if (mCompName == raw) {
+    if (mMime.find("raw") != std::string::npos) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
     } else {
-        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     }
     if (!setupConfigParam(mComponent, bitStreamInfo)) {
         std::cout << "[   WARN   ] Test Skipped \n";
@@ -591,17 +515,17 @@
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
+    GetURLForComponent(mURL, info);
 
     int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
     ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
 
     int32_t bitStreamInfo[2] = {0};
-    if (mCompName == raw) {
+    if (mMime.find("raw") != std::string::npos) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
     } else {
-        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     }
     if (!setupConfigParam(mComponent, bitStreamInfo)) {
         std::cout << "[   WARN   ] Test Skipped \n";
@@ -683,17 +607,17 @@
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
+    GetURLForComponent(mURL, info);
 
     int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
     ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
 
     int32_t bitStreamInfo[2] = {0};
-    if (mCompName == raw) {
+    if (mMime.find("raw") != std::string::npos) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
     } else {
-        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     }
     if (!setupConfigParam(mComponent, bitStreamInfo)) {
         std::cout << "[   WARN   ] Test Skipped \n";
@@ -768,7 +692,7 @@
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
+    GetURLForComponent(mURL, info);
 
     eleInfo.open(info);
     ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
@@ -798,11 +722,11 @@
     }
     eleInfo.close();
     int32_t bitStreamInfo[2] = {0};
-    if (mCompName == raw) {
+    if (mMime.find("raw") != std::string::npos) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
     } else {
-        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     }
     if (!setupConfigParam(mComponent, bitStreamInfo)) {
         std::cout << "[   WARN   ] Test Skipped \n";
@@ -833,9 +757,8 @@
     ASSERT_EQ(mComponent->stop(), C2_OK);
 }
 
-class Codec2AudioDecCsdInputTests
-    : public Codec2AudioDecHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string, std::string>> {
+class Codec2AudioDecCsdInputTests : public Codec2AudioDecHidlTestBase,
+                                    public ::testing::WithParamInterface<CsdFlushTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -853,7 +776,7 @@
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
+    GetURLForComponent(mURL, info);
     if (!strcmp(mURL, sResourceDir.c_str())) {
         ALOGV("EMPTY INPUT sResourceDir.c_str() %s mURL  %s ", sResourceDir.c_str(), mURL);
         return;
@@ -864,11 +787,11 @@
     ASSERT_GE(numCsds, 0) << "Error in parsing input info file";
 
     int32_t bitStreamInfo[2] = {0};
-    if (mCompName == raw) {
+    if (mMime.find("raw") != std::string::npos) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
     } else {
-        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     }
     if (!setupConfigParam(mComponent, bitStreamInfo)) {
         std::cout << "[   WARN   ] Test Skipped \n";
@@ -881,7 +804,7 @@
     ASSERT_EQ(eleStream.is_open(), true);
 
     bool signalEOS = false;
-    bool flushCsd = !std::get<2>(GetParam()).compare("true");
+    bool flushCsd = std::get<2>(GetParam());
     ALOGV("sending %d csd data ", numCsds);
     int framesToDecode = numCsds;
     ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
@@ -937,44 +860,36 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2AudioDecHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 // DecodeTest with StreamIndex and EOS / No EOS
 INSTANTIATE_TEST_SUITE_P(StreamIndexAndEOS, Codec2AudioDecDecodeTest,
                          testing::ValuesIn(kDecodeTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 INSTANTIATE_TEST_SUITE_P(CsdInputs, Codec2AudioDecCsdInputTests,
                          testing::ValuesIn(kCsdFlushTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 }  // anonymous namespace
 
 int main(int argc, char** argv) {
+    parseArgs(argc, argv);
     kTestParameters = getTestParameters(C2Component::DOMAIN_AUDIO, C2Component::KIND_DECODER);
     for (auto params : kTestParameters) {
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1, false));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1, true));
 
         kCsdFlushTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), true));
         kCsdFlushTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "false"));
-    }
-
-    // Set the resource directory based on command line args.
-    // Test will fail to set up if the argument is not set.
-    for (int i = 1; i < argc; i++) {
-        if (strcmp(argv[i], "-P") == 0 && i < argc - 1) {
-            sResourceDir = argv[i + 1];
-            break;
-        }
+                std::make_tuple(std::get<0>(params), std::get<1>(params), false));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
index e3a4f68..562c77f 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
@@ -35,11 +35,9 @@
 
 #include "media_c2_hidl_test_common.h"
 
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kEncodeTestParameters;
+using EncodeTestParameters = std::tuple<std::string, std::string, bool, int32_t>;
 
-// Resource directory
-static std::string sResourceDir = "";
+static std::vector<EncodeTestParameters> kEncodeTestParameters;
 
 class LinearBuffer : public C2Buffer {
   public:
@@ -75,30 +73,17 @@
         mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++);
         ASSERT_NE(mLinearPool, nullptr);
 
-        mCompName = unknown_comp;
-        struct StringToName {
-            const char* Name;
-            standardComp CompName;
-        };
-        const StringToName kStringToName[] = {
-                {"aac", aac}, {"flac", flac}, {"opus", opus}, {"amrnb", amrnb}, {"amrwb", amrwb},
-        };
-        const size_t kNumStringToName = sizeof(kStringToName) / sizeof(kStringToName[0]);
+        std::vector<std::unique_ptr<C2Param>> queried;
+        mComponent->query({}, {C2PortMediaTypeSetting::output::PARAM_TYPE}, C2_DONT_BLOCK,
+                          &queried);
+        ASSERT_GT(queried.size(), 0);
 
-        // Find the component type
-        for (size_t i = 0; i < kNumStringToName; ++i) {
-            if (strcasestr(mComponentName.c_str(), kStringToName[i].Name)) {
-                mCompName = kStringToName[i].CompName;
-                break;
-            }
-        }
+        mMime = ((C2PortMediaTypeSetting::output*)queried[0].get())->m.value;
         mEos = false;
         mCsd = false;
         mFramesReceived = 0;
         mWorkResult = C2_OK;
         mOutputSize = 0u;
-        if (mCompName == unknown_comp) mDisableTest = true;
-        if (mDisableTest) std::cout << "[   WARN   ] Test Disabled \n";
         getInputMaxBufSize();
     }
 
@@ -113,6 +98,8 @@
     // Get the test parameters from GetParam call.
     virtual void getParams() {}
 
+    void GetURLForComponent(char* mURL);
+
     // callback function to process onWorkDone received by Listener
     void handleWorkDone(std::list<std::unique_ptr<C2Work>>& workItems) {
         for (std::unique_ptr<C2Work>& work : workItems) {
@@ -133,21 +120,13 @@
             }
         }
     }
-    enum standardComp {
-        aac,
-        flac,
-        opus,
-        amrnb,
-        amrwb,
-        unknown_comp,
-    };
 
+    std::string mMime;
     std::string mInstanceName;
     std::string mComponentName;
     bool mEos;
     bool mCsd;
     bool mDisableTest;
-    standardComp mCompName;
 
     int32_t mWorkResult;
     uint32_t mFramesReceived;
@@ -192,9 +171,8 @@
     }
 };
 
-class Codec2AudioEncHidlTest
-    : public Codec2AudioEncHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2AudioEncHidlTest : public Codec2AudioEncHidlTestBase,
+                               public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -202,7 +180,7 @@
 };
 
 void validateComponent(const std::shared_ptr<android::Codec2Client::Component>& component,
-                       Codec2AudioEncHidlTest::standardComp compName, bool& disableTest) {
+                       bool& disableTest) {
     // Validate its a C2 Component
     if (component->getName().find("c2") == std::string::npos) {
         ALOGE("Not a c2 component");
@@ -229,13 +207,6 @@
             return;
         }
     }
-
-    // Validates component name
-    if (compName == Codec2AudioEncHidlTest::unknown_comp) {
-        ALOGE("Component InValid");
-        disableTest = true;
-        return;
-    }
     ALOGV("Component Valid");
 }
 
@@ -253,56 +224,48 @@
 }
 
 // Get config params for a component
-bool getConfigParams(Codec2AudioEncHidlTest::standardComp compName, int32_t* nChannels,
-                     int32_t* nSampleRate, int32_t* samplesPerFrame) {
-    switch (compName) {
-        case Codec2AudioEncHidlTest::aac:
-            *nChannels = 2;
-            *nSampleRate = 48000;
-            *samplesPerFrame = 1024;
-            break;
-        case Codec2AudioEncHidlTest::flac:
-            *nChannels = 2;
-            *nSampleRate = 48000;
-            *samplesPerFrame = 1152;
-            break;
-        case Codec2AudioEncHidlTest::opus:
-            *nChannels = 2;
-            *nSampleRate = 48000;
-            *samplesPerFrame = 960;
-            break;
-        case Codec2AudioEncHidlTest::amrnb:
-            *nChannels = 1;
-            *nSampleRate = 8000;
-            *samplesPerFrame = 160;
-            break;
-        case Codec2AudioEncHidlTest::amrwb:
-            *nChannels = 1;
-            *nSampleRate = 16000;
-            *samplesPerFrame = 160;
-            break;
-        default:
-            return false;
-    }
+bool getConfigParams(std::string mime, int32_t* nChannels, int32_t* nSampleRate,
+                     int32_t* samplesPerFrame) {
+    if (mime.find("mp4a-latm") != std::string::npos) {
+        *nChannels = 2;
+        *nSampleRate = 48000;
+        *samplesPerFrame = 1024;
+    } else if (mime.find("flac") != std::string::npos) {
+        *nChannels = 2;
+        *nSampleRate = 48000;
+        *samplesPerFrame = 1152;
+    } else if (mime.find("opus") != std::string::npos) {
+        *nChannels = 2;
+        *nSampleRate = 48000;
+        *samplesPerFrame = 960;
+    } else if (mime.find("3gpp") != std::string::npos) {
+        *nChannels = 1;
+        *nSampleRate = 8000;
+        *samplesPerFrame = 160;
+    } else if (mime.find("amr-wb") != std::string::npos) {
+        *nChannels = 1;
+        *nSampleRate = 16000;
+        *samplesPerFrame = 160;
+    } else
+        return false;
+
     return true;
 }
 
 // LookUpTable of clips and metadata for component testing
-void GetURLForComponent(Codec2AudioEncHidlTest::standardComp comp, char* mURL) {
+void Codec2AudioEncHidlTestBase::GetURLForComponent(char* mURL) {
     struct CompToURL {
-        Codec2AudioEncHidlTest::standardComp comp;
+        std::string mime;
         const char* mURL;
     };
     static const CompToURL kCompToURL[] = {
-            {Codec2AudioEncHidlTest::standardComp::aac, "bbb_raw_2ch_48khz_s16le.raw"},
-            {Codec2AudioEncHidlTest::standardComp::amrnb, "bbb_raw_1ch_8khz_s16le.raw"},
-            {Codec2AudioEncHidlTest::standardComp::amrwb, "bbb_raw_1ch_16khz_s16le.raw"},
-            {Codec2AudioEncHidlTest::standardComp::flac, "bbb_raw_2ch_48khz_s16le.raw"},
-            {Codec2AudioEncHidlTest::standardComp::opus, "bbb_raw_2ch_48khz_s16le.raw"},
+            {"mp4a-latm", "bbb_raw_2ch_48khz_s16le.raw"}, {"3gpp", "bbb_raw_1ch_8khz_s16le.raw"},
+            {"amr-wb", "bbb_raw_1ch_16khz_s16le.raw"},    {"flac", "bbb_raw_2ch_48khz_s16le.raw"},
+            {"opus", "bbb_raw_2ch_48khz_s16le.raw"},
     };
 
     for (size_t i = 0; i < sizeof(kCompToURL) / sizeof(kCompToURL[0]); ++i) {
-        if (kCompToURL[i].comp == comp) {
+        if (mMime.find(kCompToURL[i].mime) != std::string::npos) {
             strcat(mURL, kCompToURL[i].mURL);
             return;
         }
@@ -395,14 +358,12 @@
 TEST_P(Codec2AudioEncHidlTest, validateCompName) {
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     ALOGV("Checks if the given component is a valid audio component");
-    validateComponent(mComponent, mCompName, mDisableTest);
+    validateComponent(mComponent, mDisableTest);
     ASSERT_EQ(mDisableTest, false);
 }
 
-class Codec2AudioEncEncodeTest
-    : public Codec2AudioEncHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2AudioEncEncodeTest : public Codec2AudioEncHidlTestBase,
+                                 public ::testing::WithParamInterface<EncodeTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -414,17 +375,17 @@
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     char mURL[512];
     strcpy(mURL, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL);
-    bool signalEOS = !std::get<2>(GetParam()).compare("true");
+    GetURLForComponent(mURL);
+    bool signalEOS = std::get<2>(GetParam());
     // Ratio w.r.t to mInputMaxBufSize
-    int32_t inputMaxBufRatio = std::stoi(std::get<3>(GetParam()));
+    int32_t inputMaxBufRatio = std::get<3>(GetParam());
 
     int32_t nChannels;
     int32_t nSampleRate;
     int32_t samplesPerFrame;
 
-    if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
-        std::cout << "Failed to get the config params for " << mCompName << " component\n";
+    if (!getConfigParams(mMime, &nChannels, &nSampleRate, &samplesPerFrame)) {
+        std::cout << "Failed to get the config params for " << mComponentName << "\n";
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
     }
@@ -464,11 +425,9 @@
         ALOGE("framesReceived : %d inputFrames : %u", mFramesReceived, numFrames);
         ASSERT_TRUE(false);
     }
-    if ((mCompName == flac || mCompName == opus || mCompName == aac)) {
-        if (!mCsd) {
-            ALOGE("CSD buffer missing");
-            ASSERT_TRUE(false);
-        }
+    if ((mMime.find("flac") != std::string::npos) || (mMime.find("opus") != std::string::npos) ||
+        (mMime.find("mp4a-latm") != std::string::npos)) {
+        ASSERT_TRUE(mCsd) << "CSD buffer missing";
     }
     ASSERT_EQ(mEos, true);
     ASSERT_EQ(mComponent->stop(), C2_OK);
@@ -522,15 +481,15 @@
 
     char mURL[512];
     strcpy(mURL, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL);
+    GetURLForComponent(mURL);
 
     mFlushedIndices.clear();
     int32_t nChannels;
     int32_t nSampleRate;
     int32_t samplesPerFrame;
 
-    if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
-        std::cout << "Failed to get the config params for " << mCompName << " component\n";
+    if (!getConfigParams(mMime, &nChannels, &nSampleRate, &samplesPerFrame)) {
+        std::cout << "Failed to get the config params for " << mComponentName << "\n";
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
     }
@@ -587,7 +546,7 @@
 
     char mURL[512];
     strcpy(mURL, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL);
+    GetURLForComponent(mURL);
 
     std::ifstream eleStream;
     eleStream.open(mURL, std::ifstream::binary);
@@ -600,8 +559,8 @@
     int32_t numFrames = 16;
     int32_t maxChannelCount = 8;
 
-    if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
-        std::cout << "Failed to get the config params for " << mCompName << " component\n";
+    if (!getConfigParams(mMime, &nChannels, &nSampleRate, &samplesPerFrame)) {
+        std::cout << "Failed to get the config params for " << mComponentName << "\n";
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
     }
@@ -611,7 +570,7 @@
 
     // Looping through the maximum number of channel count supported by encoder
     for (nChannels = 1; nChannels < maxChannelCount; nChannels++) {
-        ALOGV("Configuring %u encoder for channel count = %d", mCompName, nChannels);
+        ALOGV("Configuring encoder %s  for channel count = %d", mComponentName.c_str(), nChannels);
         if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
             std::cout << "[   WARN   ] Test Skipped \n";
             return;
@@ -668,7 +627,9 @@
             ALOGE("framesReceived : %d inputFrames : %u", mFramesReceived, numFrames);
             ASSERT_TRUE(false);
         }
-        if ((mCompName == flac || mCompName == opus || mCompName == aac)) {
+        if ((mMime.find("flac") != std::string::npos) ||
+            (mMime.find("opus") != std::string::npos) ||
+            (mMime.find("mp4a-latm") != std::string::npos)) {
             ASSERT_TRUE(mCsd) << "CSD buffer missing";
         }
         ASSERT_TRUE(mEos);
@@ -687,7 +648,7 @@
 
     char mURL[512];
     strcpy(mURL, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL);
+    GetURLForComponent(mURL);
 
     std::ifstream eleStream;
     eleStream.open(mURL, std::ifstream::binary);
@@ -699,8 +660,8 @@
     int32_t nChannels;
     int32_t numFrames = 16;
 
-    if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
-        std::cout << "Failed to get the config params for " << mCompName << " component\n";
+    if (!getConfigParams(mMime, &nChannels, &nSampleRate, &samplesPerFrame)) {
+        std::cout << "Failed to get the config params for " << mComponentName << "\n";
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
     }
@@ -711,7 +672,7 @@
     uint32_t prevSampleRate = 0u;
 
     for (int32_t nSampleRate : sampleRateValues) {
-        ALOGV("Configuring %u encoder for SampleRate = %d", mCompName, nSampleRate);
+        ALOGV("Configuring encoder %s  for SampleRate = %d", mComponentName.c_str(), nSampleRate);
         if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
             std::cout << "[   WARN   ] Test Skipped \n";
             return;
@@ -772,7 +733,9 @@
             ALOGE("framesReceived : %d inputFrames : %u", mFramesReceived, numFrames);
             ASSERT_TRUE(false);
         }
-        if ((mCompName == flac || mCompName == opus || mCompName == aac)) {
+        if ((mMime.find("flac") != std::string::npos) ||
+            (mMime.find("opus") != std::string::npos) ||
+            (mMime.find("mp4a-latm") != std::string::npos)) {
             ASSERT_TRUE(mCsd) << "CSD buffer missing";
         }
         ASSERT_TRUE(mEos);
@@ -786,36 +749,28 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2AudioEncHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 // EncodeTest with EOS / No EOS and inputMaxBufRatio
 // inputMaxBufRatio is ratio w.r.t. to mInputMaxBufSize
 INSTANTIATE_TEST_SUITE_P(EncodeTest, Codec2AudioEncEncodeTest,
                          testing::ValuesIn(kEncodeTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 }  // anonymous namespace
 
 int main(int argc, char** argv) {
+    parseArgs(argc, argv);
     kTestParameters = getTestParameters(C2Component::DOMAIN_AUDIO, C2Component::KIND_ENCODER);
     for (auto params : kTestParameters) {
         kEncodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "false", "1"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), false, 1));
         kEncodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "false", "2"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), false, 2));
         kEncodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "true", "1"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), true, 1));
         kEncodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "true", "2"));
-    }
-
-    // Set the resource directory based on command line args.
-    // Test will fail to set up if the argument is not set.
-    for (int i = 1; i < argc; i++) {
-        if (strcmp(argv[i], "-P") == 0 && i < argc - 1) {
-            sResourceDir = argv[i + 1];
-            break;
-        }
+                std::make_tuple(std::get<0>(params), std::get<1>(params), true, 2));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
index 0251ec2..1f1681d 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
@@ -22,6 +22,48 @@
 
 #include <android/hardware/media/c2/1.0/IComponentStore.h>
 
+std::string sResourceDir = "";
+
+std::string sComponentNamePrefix = "";
+
+static constexpr struct option kArgOptions[] = {
+        {"res", required_argument, 0, 'P'},
+        {"prefix", required_argument, 0, 'p'},
+        {"help", required_argument, 0, 'h'},
+        {nullptr, 0, nullptr, 0},
+};
+
+void printUsage(char* me) {
+    std::cerr << "VTS tests to test codec2 components \n";
+    std::cerr << "Usage: " << me << " [options] \n";
+    std::cerr << "\t -P,  --res:    Mandatory path to a folder that contains test resources \n";
+    std::cerr << "\t -p,  --prefix: Optional prefix to select component/s to be tested \n";
+    std::cerr << "\t                    All codecs are tested by default \n";
+    std::cerr << "\t                    Eg: c2.android - test codecs starting with c2.android \n";
+    std::cerr << "\t                    Eg: c2.android.aac.decoder - test a specific codec \n";
+    std::cerr << "\t -h,  --help:   Print usage \n";
+}
+
+void parseArgs(int argc, char** argv) {
+    int arg;
+    int option_index;
+    while ((arg = getopt_long(argc, argv, ":P:p:h", kArgOptions, &option_index)) != -1) {
+        switch (arg) {
+            case 'P':
+                sResourceDir = optarg;
+                break;
+            case 'p':
+                sComponentNamePrefix = optarg;
+                break;
+            case 'h':
+                printUsage(argv[0]);
+                break;
+            default:
+                break;
+        }
+    }
+}
+
 // Test the codecs for NullBuffer, Empty Input Buffer with(out) flags set
 void testInputBuffer(const std::shared_ptr<android::Codec2Client::Component>& component,
                      std::mutex& queueLock, std::list<std::unique_ptr<C2Work>>& workQueue,
@@ -92,8 +134,7 @@
         for (size_t i = 0; i < updates.size(); ++i) {
             C2Param* param = updates[i].get();
             if (param->index() == C2StreamInitDataInfo::output::PARAM_TYPE) {
-                C2StreamInitDataInfo::output* csdBuffer =
-                        (C2StreamInitDataInfo::output*)(param);
+                C2StreamInitDataInfo::output* csdBuffer = (C2StreamInitDataInfo::output*)(param);
                 size_t csdSize = csdBuffer->flexCount();
                 if (csdSize > 0) csd = true;
             } else if ((param->index() == C2StreamSampleRateInfo::output::PARAM_TYPE) ||
@@ -118,8 +159,7 @@
             typedef std::unique_lock<std::mutex> ULock;
             ULock l(queueLock);
             workQueue.push_back(std::move(work));
-            if (!flushedIndices.empty() &&
-                (frameIndexIt != flushedIndices.end())) {
+            if (!flushedIndices.empty() && (frameIndexIt != flushedIndices.end())) {
                 flushedIndices.erase(frameIndexIt);
             }
             queueCondition.notify_all();
@@ -136,15 +176,15 @@
 }
 
 // Return all test parameters, a list of tuple of <instance, component>
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters() {
+const std::vector<TestParameters>& getTestParameters() {
     return getTestParameters(C2Component::DOMAIN_OTHER, C2Component::KIND_OTHER);
 }
 
 // Return all test parameters, a list of tuple of <instance, component> with matching domain and
 // kind.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters(
-        C2Component::domain_t domain, C2Component::kind_t kind) {
-    static std::vector<std::tuple<std::string, std::string>> parameters;
+const std::vector<TestParameters>& getTestParameters(C2Component::domain_t domain,
+                                                     C2Component::kind_t kind) {
+    static std::vector<TestParameters> parameters;
 
     auto instances = android::Codec2Client::GetServiceNames();
     for (std::string instance : instances) {
@@ -157,11 +197,18 @@
                 (traits.domain != domain || traits.kind != kind)) {
                 continue;
             }
-
+            if (traits.name.rfind(sComponentNamePrefix, 0) != 0) {
+                ALOGD("Skipping tests for %s. Prefix specified is %s", traits.name.c_str(),
+                      sComponentNamePrefix.c_str());
+                continue;
+            }
             parameters.push_back(std::make_tuple(instance, traits.name));
         }
     }
 
+    if (parameters.empty()) {
+        ALOGE("No test parameters added. Verify component prefix passed to the test");
+    }
     return parameters;
 }
 
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
index 50e3ac5..e74f247 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
@@ -40,7 +40,14 @@
 
 using namespace ::std::chrono;
 
-static std::vector<std::tuple<std::string, std::string>> kTestParameters;
+using TestParameters = std::tuple<std::string, std::string>;
+static std::vector<TestParameters> kTestParameters;
+
+// Resource directory
+extern std::string sResourceDir;
+
+// Component name prefix
+extern std::string sComponentNamePrefix;
 
 struct FrameInfo {
     int bytesCount;
@@ -48,6 +55,18 @@
     int64_t timestamp;
 };
 
+template <typename... T>
+static inline std::string PrintInstanceTupleNameToString(
+        const testing::TestParamInfo<std::tuple<T...>>& info) {
+    std::stringstream ss;
+    std::apply([&ss](auto&&... elems) { ((ss << elems << '_'), ...); }, info.param);
+    ss << info.index;
+    std::string param_string = ss.str();
+    auto isNotAlphaNum = [](char c) { return !std::isalnum(c); };
+    std::replace_if(param_string.begin(), param_string.end(), isNotAlphaNum, '_');
+    return param_string;
+}
+
 /*
  * Handle Callback functions onWorkDone(), onTripped(),
  * onError(), onDeath(), onFramesRendered()
@@ -105,13 +124,15 @@
     std::function<void(std::list<std::unique_ptr<C2Work>>& workItems)> callBack;
 };
 
+void parseArgs(int argc, char** argv);
+
 // Return all test parameters, a list of tuple of <instance, component>.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters();
+const std::vector<TestParameters>& getTestParameters();
 
 // Return all test parameters, a list of tuple of <instance, component> with matching domain and
 // kind.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters(
-        C2Component::domain_t domain, C2Component::kind_t kind);
+const std::vector<TestParameters>& getTestParameters(C2Component::domain_t domain,
+                                                     C2Component::kind_t kind);
 
 /*
  * common functions declarations
diff --git a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index 6122225..29acd33 100644
--- a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -53,9 +53,8 @@
     }
 
 namespace {
-
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kInputTestParameters;
+using InputTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<InputTestParameters> kInputTestParameters;
 
 // google.codec2 Component test setup
 class Codec2ComponentHidlTestBase : public ::testing::Test {
@@ -120,9 +119,8 @@
     }
 };
 
-class Codec2ComponentHidlTest
-    : public Codec2ComponentHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2ComponentHidlTest : public Codec2ComponentHidlTestBase,
+                                public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -317,10 +315,8 @@
     ASSERT_EQ(err, C2_OK);
 }
 
-class Codec2ComponentInputTests
-    : public Codec2ComponentHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2ComponentInputTests : public Codec2ComponentHidlTestBase,
+                                  public ::testing::WithParamInterface<InputTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -330,8 +326,8 @@
 TEST_P(Codec2ComponentInputTests, InputBufferTest) {
     description("Tests for different inputs");
 
-    uint32_t flags = std::stoul(std::get<2>(GetParam()));
-    bool isNullBuffer = !std::get<3>(GetParam()).compare("true");
+    uint32_t flags = std::get<2>(GetParam());
+    bool isNullBuffer = std::get<3>(GetParam());
     if (isNullBuffer)
         ALOGD("Testing for null input buffer with flag : %u", flags);
     else
@@ -350,31 +346,28 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2ComponentHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 INSTANTIATE_TEST_CASE_P(NonStdInputs, Codec2ComponentInputTests,
-                        testing::ValuesIn(kInputTestParameters),
-                        android::hardware::PrintInstanceTupleNameToString<>);
+                        testing::ValuesIn(kInputTestParameters), PrintInstanceTupleNameToString<>);
 }  // anonymous namespace
 
 // TODO: Add test for Invalid work,
 // TODO: Add test for Invalid states
 int main(int argc, char** argv) {
+    parseArgs(argc, argv);
     kTestParameters = getTestParameters();
     for (auto params : kTestParameters) {
         kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
+        kInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+                                                       C2FrameData::FLAG_END_OF_STREAM, true));
         kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params),
-                                std::to_string(C2FrameData::FLAG_END_OF_STREAM), "true"));
-        kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
-        kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params),
-                                std::to_string(C2FrameData::FLAG_CODEC_CONFIG), "false"));
-        kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params),
-                                std::to_string(C2FrameData::FLAG_END_OF_STREAM), "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
+        kInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+                                                       C2FrameData::FLAG_CODEC_CONFIG, false));
+        kInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+                                                       C2FrameData::FLAG_END_OF_STREAM, false));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index b520c17..d0a1c31 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -40,13 +40,44 @@
 #include "media_c2_hidl_test_common.h"
 #include "media_c2_video_hidl_test_common.h"
 
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kDecodeTestParameters;
+using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<DecodeTestParameters> kDecodeTestParameters;
 
-static std::vector<std::tuple<std::string, std::string, std::string>> kCsdFlushTestParameters;
+using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
+static std::vector<CsdFlushTestParameters> kCsdFlushTestParameters;
 
-// Resource directory
-static std::string sResourceDir = "";
+struct CompToURL {
+    std::string mime;
+    std::string mURL;
+    std::string info;
+    std::string chksum;
+};
+std::vector<CompToURL> kCompToURL = {
+        {"avc", "bbb_avc_176x144_300kbps_60fps.h264", "bbb_avc_176x144_300kbps_60fps.info",
+         "bbb_avc_176x144_300kbps_60fps_chksum.md5"},
+        {"avc", "bbb_avc_640x360_768kbps_30fps.h264", "bbb_avc_640x360_768kbps_30fps.info",
+         "bbb_avc_640x360_768kbps_30fps_chksum.md5"},
+        {"hevc", "bbb_hevc_176x144_176kbps_60fps.hevc", "bbb_hevc_176x144_176kbps_60fps.info",
+         "bbb_hevc_176x144_176kbps_60fps_chksum.md5"},
+        {"hevc", "bbb_hevc_640x360_1600kbps_30fps.hevc", "bbb_hevc_640x360_1600kbps_30fps.info",
+         "bbb_hevc_640x360_1600kbps_30fps_chksum.md5"},
+        {"mpeg2", "bbb_mpeg2_176x144_105kbps_25fps.m2v", "bbb_mpeg2_176x144_105kbps_25fps.info",
+         ""},
+        {"mpeg2", "bbb_mpeg2_352x288_1mbps_60fps.m2v", "bbb_mpeg2_352x288_1mbps_60fps.info", ""},
+        {"3gpp", "bbb_h263_352x288_300kbps_12fps.h263", "bbb_h263_352x288_300kbps_12fps.info", ""},
+        {"mp4v-es", "bbb_mpeg4_352x288_512kbps_30fps.m4v", "bbb_mpeg4_352x288_512kbps_30fps.info",
+         ""},
+        {"vp8", "bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_176x144_240kbps_60fps.info", ""},
+        {"vp8", "bbb_vp8_640x360_2mbps_30fps.vp8", "bbb_vp8_640x360_2mbps_30fps.info",
+         "bbb_vp8_640x360_2mbps_30fps_chksm.md5"},
+        {"vp9", "bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_176x144_285kbps_60fps.info", ""},
+        {"vp9", "bbb_vp9_640x360_1600kbps_30fps.vp9", "bbb_vp9_640x360_1600kbps_30fps.info",
+         "bbb_vp9_640x360_1600kbps_30fps_chksm.md5"},
+        {"vp9", "bbb_vp9_704x480_280kbps_24fps_altref_2.vp9",
+         "bbb_vp9_704x480_280kbps_24fps_altref_2.info", ""},
+        {"av01", "bbb_av1_640_360.av1", "bbb_av1_640_360.info", "bbb_av1_640_360_chksum.md5"},
+        {"av01", "bbb_av1_176_144.av1", "bbb_av1_176_144.info", "bbb_av1_176_144_chksm.md5"},
+};
 
 class LinearBuffer : public C2Buffer {
   public:
@@ -85,26 +116,11 @@
         mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++);
         ASSERT_NE(mLinearPool, nullptr);
 
-        mCompName = unknown_comp;
-        struct StringToName {
-            const char* Name;
-            standardComp CompName;
-        };
+        std::vector<std::unique_ptr<C2Param>> queried;
+        mComponent->query({}, {C2PortMediaTypeSetting::input::PARAM_TYPE}, C2_DONT_BLOCK, &queried);
+        ASSERT_GT(queried.size(), 0);
 
-        const StringToName kStringToName[] = {
-                {"h263", h263}, {"avc", avc}, {"mpeg2", mpeg2}, {"mpeg4", mpeg4},
-                {"hevc", hevc}, {"vp8", vp8}, {"vp9", vp9},     {"av1", av1},
-        };
-
-        const size_t kNumStringToName = sizeof(kStringToName) / sizeof(kStringToName[0]);
-
-        // Find the component type
-        for (size_t i = 0; i < kNumStringToName; ++i) {
-            if (strcasestr(mComponentName.c_str(), kStringToName[i].Name)) {
-                mCompName = kStringToName[i].CompName;
-                break;
-            }
-        }
+        mMime = ((C2PortMediaTypeSetting::input*)queried[0].get())->m.value;
         mEos = false;
         mFramesReceived = 0;
         mTimestampUs = 0u;
@@ -114,11 +130,11 @@
         mMd5Offset = 0;
         mMd5Enable = false;
         mRefMd5 = nullptr;
-        if (mCompName == unknown_comp) mDisableTest = true;
 
         C2SecureModeTuning secureModeTuning{};
         mComponent->query({&secureModeTuning}, {}, C2_MAY_BLOCK, nullptr);
-        if (secureModeTuning.value == C2Config::SM_READ_PROTECTED) {
+        if (secureModeTuning.value == C2Config::SM_READ_PROTECTED ||
+            secureModeTuning.value == C2Config::SM_READ_PROTECTED_WITH_ENCRYPTED) {
             mDisableTest = true;
         }
 
@@ -136,6 +152,9 @@
     // Get the test parameters from GetParam call.
     virtual void getParams() {}
 
+    void GetURLChksmForComponent(char* mURL, char* info, char* chksum, size_t streamIndex);
+    void GetURLForComponent(char* mURL, char* info, size_t streamIndex = 0);
+
     /* Calculate the CKSUM for the data in inbuf */
     void calc_md5_cksum(uint8_t* pu1_inbuf, uint32_t u4_stride, uint32_t u4_width,
                         uint32_t u4_height, uint8_t* pu1_cksum_p) {
@@ -220,8 +239,7 @@
                 if (!codecConfig && !work->worklets.front()->output.buffers.empty()) {
                     if (mReorderDepth < 0) {
                         C2PortReorderBufferDepthTuning::output reorderBufferDepth;
-                        mComponent->query({&reorderBufferDepth}, {}, C2_MAY_BLOCK,
-                                          nullptr);
+                        mComponent->query({&reorderBufferDepth}, {}, C2_MAY_BLOCK, nullptr);
                         mReorderDepth = reorderBufferDepth.value;
                         if (mReorderDepth > 0) {
                             // TODO: Add validation for reordered output
@@ -267,18 +285,7 @@
         }
     }
 
-    enum standardComp {
-        h263,
-        avc,
-        mpeg2,
-        mpeg4,
-        hevc,
-        vp8,
-        vp9,
-        av1,
-        unknown_comp,
-    };
-
+    std::string mMime;
     std::string mInstanceName;
     std::string mComponentName;
 
@@ -291,7 +298,6 @@
     char* mRefMd5;
     std::list<uint64_t> mTimestampUslist;
     std::list<uint64_t> mFlushedIndices;
-    standardComp mCompName;
 
     int32_t mWorkResult;
     int32_t mReorderDepth;
@@ -314,9 +320,8 @@
     }
 };
 
-class Codec2VideoDecHidlTest
-    : public Codec2VideoDecHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2VideoDecHidlTest : public Codec2VideoDecHidlTestBase,
+                               public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -324,7 +329,7 @@
 };
 
 void validateComponent(const std::shared_ptr<android::Codec2Client::Component>& component,
-                       Codec2VideoDecHidlTest::standardComp compName, bool& disableTest) {
+                       bool& disableTest) {
     // Validate its a C2 Component
     if (component->getName().find("c2") == std::string::npos) {
         ALOGE("Not a c2 component");
@@ -351,83 +356,32 @@
             return;
         }
     }
-
-    // Validates component name
-    if (compName == Codec2VideoDecHidlTest::unknown_comp) {
-        ALOGE("Component InValid");
-        disableTest = true;
-        return;
-    }
     ALOGV("Component Valid");
 }
 
 // number of elementary streams per component
 #define STREAM_COUNT 3
 // LookUpTable of clips, metadata and chksum for component testing
-void GetURLChksmForComponent(Codec2VideoDecHidlTest::standardComp comp, char* mURL, char* info,
-                             char* chksum, size_t streamIndex = 1) {
-    struct CompToURL {
-        Codec2VideoDecHidlTest::standardComp comp;
-        const char mURL[STREAM_COUNT][512];
-        const char info[STREAM_COUNT][512];
-        const char chksum[STREAM_COUNT][512];
-    };
-    ASSERT_TRUE(streamIndex < STREAM_COUNT);
-
-    static const CompToURL kCompToURL[] = {
-            {Codec2VideoDecHidlTest::standardComp::avc,
-             {"bbb_avc_176x144_300kbps_60fps.h264", "bbb_avc_640x360_768kbps_30fps.h264", ""},
-             {"bbb_avc_176x144_300kbps_60fps.info", "bbb_avc_640x360_768kbps_30fps.info", ""},
-             {"bbb_avc_176x144_300kbps_60fps_chksum.md5",
-              "bbb_avc_640x360_768kbps_30fps_chksum.md5", ""}},
-            {Codec2VideoDecHidlTest::standardComp::hevc,
-             {"bbb_hevc_176x144_176kbps_60fps.hevc", "bbb_hevc_640x360_1600kbps_30fps.hevc", ""},
-             {"bbb_hevc_176x144_176kbps_60fps.info", "bbb_hevc_640x360_1600kbps_30fps.info", ""},
-             {"bbb_hevc_176x144_176kbps_60fps_chksum.md5",
-              "bbb_hevc_640x360_1600kbps_30fps_chksum.md5", ""}},
-            {Codec2VideoDecHidlTest::standardComp::mpeg2,
-             {"bbb_mpeg2_176x144_105kbps_25fps.m2v", "bbb_mpeg2_352x288_1mbps_60fps.m2v", ""},
-             {"bbb_mpeg2_176x144_105kbps_25fps.info", "bbb_mpeg2_352x288_1mbps_60fps.info", ""},
-             {"", "", ""}},
-            {Codec2VideoDecHidlTest::standardComp::h263,
-             {"", "bbb_h263_352x288_300kbps_12fps.h263", ""},
-             {"", "bbb_h263_352x288_300kbps_12fps.info", ""},
-             {"", "", ""}},
-            {Codec2VideoDecHidlTest::standardComp::mpeg4,
-             {"", "bbb_mpeg4_352x288_512kbps_30fps.m4v", ""},
-             {"", "bbb_mpeg4_352x288_512kbps_30fps.info", ""},
-             {"", "", ""}},
-            {Codec2VideoDecHidlTest::standardComp::vp8,
-             {"bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_640x360_2mbps_30fps.vp8", ""},
-             {"bbb_vp8_176x144_240kbps_60fps.info", "bbb_vp8_640x360_2mbps_30fps.info", ""},
-             {"", "bbb_vp8_640x360_2mbps_30fps_chksm.md5", ""}},
-            {Codec2VideoDecHidlTest::standardComp::vp9,
-             {"bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_640x360_1600kbps_30fps.vp9",
-              "bbb_vp9_704x480_280kbps_24fps_altref_2.vp9"},
-             {"bbb_vp9_176x144_285kbps_60fps.info", "bbb_vp9_640x360_1600kbps_30fps.info",
-              "bbb_vp9_704x480_280kbps_24fps_altref_2.info"},
-             {"", "bbb_vp9_640x360_1600kbps_30fps_chksm.md5", ""}},
-            {Codec2VideoDecHidlTest::standardComp::av1,
-             {"bbb_av1_640_360.av1", "bbb_av1_176_144.av1", ""},
-             {"bbb_av1_640_360.info", "bbb_av1_176_144.info", ""},
-             {"bbb_av1_640_360_chksum.md5", "bbb_av1_176_144_chksm.md5", ""}},
-    };
-
-    for (size_t i = 0; i < sizeof(kCompToURL) / sizeof(kCompToURL[0]); ++i) {
-        if (kCompToURL[i].comp == comp) {
-            strcat(mURL, kCompToURL[i].mURL[streamIndex]);
-            strcat(info, kCompToURL[i].info[streamIndex]);
-            strcat(chksum, kCompToURL[i].chksum[streamIndex]);
-            return;
+void Codec2VideoDecHidlTestBase::GetURLChksmForComponent(char* mURL, char* info, char* chksum,
+                                                         size_t streamIndex) {
+    int streamCount = 0;
+    for (size_t i = 0; i < kCompToURL.size(); ++i) {
+        if (mMime.find(kCompToURL[i].mime) != std::string::npos) {
+            if (streamCount == streamIndex) {
+                strcat(mURL, kCompToURL[i].mURL.c_str());
+                strcat(info, kCompToURL[i].info.c_str());
+                strcat(chksum, kCompToURL[i].chksum.c_str());
+                return;
+            }
+            streamCount++;
         }
     }
 }
 
-void GetURLForComponent(Codec2VideoDecHidlTest::standardComp comp, char* mURL, char* info,
-                        size_t streamIndex = 1) {
+void Codec2VideoDecHidlTestBase::GetURLForComponent(char* mURL, char* info, size_t streamIndex) {
     char chksum[512];
     strcpy(chksum, sResourceDir.c_str());
-    GetURLChksmForComponent(comp, mURL, info, chksum, streamIndex);
+    GetURLChksmForComponent(mURL, info, chksum, streamIndex);
 }
 
 void decodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -517,7 +471,7 @@
 TEST_P(Codec2VideoDecHidlTest, validateCompName) {
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     ALOGV("Checks if the given component is a valid video component");
-    validateComponent(mComponent, mCompName, mDisableTest);
+    validateComponent(mComponent, mDisableTest);
     ASSERT_EQ(mDisableTest, false);
 }
 
@@ -573,10 +527,8 @@
     return false;
 }
 
-class Codec2VideoDecDecodeTest
-    : public Codec2VideoDecHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2VideoDecDecodeTest : public Codec2VideoDecHidlTestBase,
+                                 public ::testing::WithParamInterface<DecodeTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -588,8 +540,8 @@
     description("Decodes input file");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    uint32_t streamIndex = std::stoi(std::get<2>(GetParam()));
-    bool signalEOS = !std::get<2>(GetParam()).compare("true");
+    uint32_t streamIndex = std::get<2>(GetParam());
+    bool signalEOS = std::get<3>(GetParam());
     mTimestampDevTest = true;
 
     char mURL[512], info[512], chksum[512];
@@ -599,7 +551,7 @@
     strcpy(info, sResourceDir.c_str());
     strcpy(chksum, sResourceDir.c_str());
 
-    GetURLChksmForComponent(mCompName, mURL, info, chksum, streamIndex);
+    GetURLChksmForComponent(mURL, info, chksum, streamIndex);
     if (!(strcmp(mURL, sResourceDir.c_str())) || !(strcmp(info, sResourceDir.c_str()))) {
         ALOGV("Skipping Test, Stream not available");
         return;
@@ -688,9 +640,11 @@
 TEST_P(Codec2VideoDecHidlTest, AdaptiveDecodeTest) {
     description("Adaptive Decode Test");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
-    if (!(mCompName == avc || mCompName == hevc || mCompName == vp8 || mCompName == vp9 ||
-          mCompName == mpeg2))
+    if (!(strcasestr(mMime.c_str(), "avc") || strcasestr(mMime.c_str(), "hevc") ||
+          strcasestr(mMime.c_str(), "vp8") || strcasestr(mMime.c_str(), "vp9") ||
+          strcasestr(mMime.c_str(), "mpeg2"))) {
         return;
+    }
 
     typedef std::unique_lock<std::mutex> ULock;
     ASSERT_EQ(mComponent->start(), C2_OK);
@@ -705,7 +659,7 @@
 
         strcpy(mURL, sResourceDir.c_str());
         strcpy(info, sResourceDir.c_str());
-        GetURLForComponent(mCompName, mURL, info, i % STREAM_COUNT);
+        GetURLForComponent(mURL, info, i % STREAM_COUNT);
         if (!(strcmp(mURL, sResourceDir.c_str())) || !(strcmp(info, sResourceDir.c_str()))) {
             ALOGV("Stream not available, skipping this index");
             continue;
@@ -801,7 +755,7 @@
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
+    GetURLForComponent(mURL, info);
 
     int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
     ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
@@ -888,7 +842,7 @@
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
+    GetURLForComponent(mURL, info);
 
     mFlushedIndices.clear();
 
@@ -964,7 +918,7 @@
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
+    GetURLForComponent(mURL, info);
 
     eleInfo.open(info);
     ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
@@ -1017,9 +971,8 @@
     }
 }
 
-class Codec2VideoDecCsdInputTests
-    : public Codec2VideoDecHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string, std::string>> {
+class Codec2VideoDecCsdInputTests : public Codec2VideoDecHidlTestBase,
+                                    public ::testing::WithParamInterface<CsdFlushTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -1038,7 +991,7 @@
 
     strcpy(mURL, sResourceDir.c_str());
     strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
+    GetURLForComponent(mURL, info);
 
     int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
     ASSERT_GE(numCsds, 0) << "Error in parsing input info file";
@@ -1052,7 +1005,7 @@
     bool flushedDecoder = false;
     bool signalEOS = false;
     bool keyFrame = false;
-    bool flushCsd = !std::get<2>(GetParam()).compare("true");
+    bool flushCsd = std::get<2>(GetParam());
 
     ALOGV("sending %d csd data ", numCsds);
     int framesToDecode = numCsds;
@@ -1122,49 +1075,41 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2VideoDecHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 // DecodeTest with StreamIndex and EOS / No EOS
 INSTANTIATE_TEST_SUITE_P(StreamIndexAndEOS, Codec2VideoDecDecodeTest,
                          testing::ValuesIn(kDecodeTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 INSTANTIATE_TEST_SUITE_P(CsdInputs, Codec2VideoDecCsdInputTests,
                          testing::ValuesIn(kCsdFlushTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 }  // anonymous namespace
 
 // TODO : Video specific configuration Test
 int main(int argc, char** argv) {
+    parseArgs(argc, argv);
     kTestParameters = getTestParameters(C2Component::DOMAIN_VIDEO, C2Component::KIND_DECODER);
     for (auto params : kTestParameters) {
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1, false));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1, true));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "2", "false"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 2, false));
         kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "2", "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 2, true));
 
         kCsdFlushTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "true"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), true));
         kCsdFlushTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "false"));
-    }
-
-    // Set the resource directory based on command line args.
-    // Test will fail to set up if the argument is not set.
-    for (int i = 1; i < argc; i++) {
-        if (strcmp(argv[i], "-P") == 0 && i < argc - 1) {
-            sResourceDir = argv[i + 1];
-            break;
-        }
+                std::make_tuple(std::get<0>(params), std::get<1>(params), false));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
index 5bcea5b..23ceff4 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
@@ -41,13 +41,11 @@
         : C2Buffer({block->share(C2Rect(block->width(), block->height()), ::C2Fence())}) {}
 };
 
-static std::vector<std::tuple<std::string, std::string, std::string, std::string, std::string>>
-        kEncodeTestParameters;
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kEncodeResolutionTestParameters;
+using EncodeTestParameters = std::tuple<std::string, std::string, bool, bool, bool>;
+static std::vector<EncodeTestParameters> kEncodeTestParameters;
 
-// Resource directory
-static std::string sResourceDir = "";
+using EncodeResolutionTestParameters = std::tuple<std::string, std::string, int32_t, int32_t>;
+static std::vector<EncodeResolutionTestParameters> kEncodeResolutionTestParameters;
 
 namespace {
 
@@ -78,26 +76,13 @@
         mGraphicPool = std::make_shared<C2PooledBlockPool>(mGraphicAllocator, mBlockPoolId++);
         ASSERT_NE(mGraphicPool, nullptr);
 
-        mCompName = unknown_comp;
-        struct StringToName {
-            const char* Name;
-            standardComp CompName;
-        };
+        std::vector<std::unique_ptr<C2Param>> queried;
+        mComponent->query({}, {C2PortMediaTypeSetting::output::PARAM_TYPE}, C2_DONT_BLOCK,
+                          &queried);
+        ASSERT_GT(queried.size(), 0);
 
-        const StringToName kStringToName[] = {
-                {"h263", h263}, {"avc", avc}, {"mpeg4", mpeg4},
-                {"hevc", hevc}, {"vp8", vp8}, {"vp9", vp9},
-        };
-
-        const size_t kNumStringToName = sizeof(kStringToName) / sizeof(kStringToName[0]);
-
-        // Find the component type
-        for (size_t i = 0; i < kNumStringToName; ++i) {
-            if (strcasestr(mComponentName.c_str(), kStringToName[i].Name)) {
-                mCompName = kStringToName[i].CompName;
-                break;
-            }
-        }
+        mMime = ((C2PortMediaTypeSetting::output*)queried[0].get())->m.value;
+        std::cout << "mime : " << mMime << "\n";
         mEos = false;
         mCsd = false;
         mConfigBPictures = false;
@@ -106,11 +91,11 @@
         mTimestampUs = 0u;
         mOutputSize = 0u;
         mTimestampDevTest = false;
-        if (mCompName == unknown_comp) mDisableTest = true;
 
         C2SecureModeTuning secureModeTuning{};
         mComponent->query({&secureModeTuning}, {}, C2_MAY_BLOCK, nullptr);
-        if (secureModeTuning.value == C2Config::SM_READ_PROTECTED) {
+        if (secureModeTuning.value == C2Config::SM_READ_PROTECTED ||
+            secureModeTuning.value == C2Config::SM_READ_PROTECTED_WITH_ENCRYPTED) {
             mDisableTest = true;
         }
 
@@ -187,16 +172,7 @@
         }
     }
 
-    enum standardComp {
-        h263,
-        avc,
-        mpeg4,
-        hevc,
-        vp8,
-        vp9,
-        unknown_comp,
-    };
-
+    std::string mMime;
     std::string mInstanceName;
     std::string mComponentName;
     bool mEos;
@@ -204,7 +180,6 @@
     bool mDisableTest;
     bool mConfigBPictures;
     bool mTimestampDevTest;
-    standardComp mCompName;
     uint32_t mFramesReceived;
     uint32_t mFailedWorkReceived;
     uint64_t mTimestampUs;
@@ -231,9 +206,8 @@
     }
 };
 
-class Codec2VideoEncHidlTest
-    : public Codec2VideoEncHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2VideoEncHidlTest : public Codec2VideoEncHidlTestBase,
+                               public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -241,7 +215,7 @@
 };
 
 void validateComponent(const std::shared_ptr<android::Codec2Client::Component>& component,
-                       Codec2VideoEncHidlTest::standardComp compName, bool& disableTest) {
+                       bool& disableTest) {
     // Validate its a C2 Component
     if (component->getName().find("c2") == std::string::npos) {
         ALOGE("Not a c2 component");
@@ -268,13 +242,6 @@
             return;
         }
     }
-
-    // Validates component name
-    if (compName == Codec2VideoEncHidlTest::unknown_comp) {
-        ALOGE("Component InValid");
-        disableTest = true;
-        return;
-    }
     ALOGV("Component Valid");
 }
 
@@ -405,14 +372,12 @@
 TEST_P(Codec2VideoEncHidlTest, validateCompName) {
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     ALOGV("Checks if the given component is a valid video component");
-    validateComponent(mComponent, mCompName, mDisableTest);
+    validateComponent(mComponent, mDisableTest);
     ASSERT_EQ(mDisableTest, false);
 }
 
-class Codec2VideoEncEncodeTest
-    : public Codec2VideoEncHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string, std::string>> {
+class Codec2VideoEncEncodeTest : public Codec2VideoEncHidlTestBase,
+                                 public ::testing::WithParamInterface<EncodeTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -426,10 +391,10 @@
     char mURL[512];
     int32_t nWidth = ENC_DEFAULT_FRAME_WIDTH;
     int32_t nHeight = ENC_DEFAULT_FRAME_HEIGHT;
-    bool signalEOS = !std::get<2>(GetParam()).compare("true");
+    bool signalEOS = std::get<3>(GetParam());
     // Send an empty frame to receive CSD data from encoder.
-    bool sendEmptyFirstFrame = !std::get<3>(GetParam()).compare("true");
-    mConfigBPictures = !std::get<4>(GetParam()).compare("true");
+    bool sendEmptyFirstFrame = std::get<3>(GetParam());
+    mConfigBPictures = std::get<4>(GetParam());
 
     strcpy(mURL, sResourceDir.c_str());
     GetURLForComponent(mURL);
@@ -517,9 +482,9 @@
         ASSERT_TRUE(false);
     }
 
-    if (mCompName == vp8 || mCompName == h263) {
+    if ((mMime.find("vp8") != std::string::npos) || (mMime.find("3gpp") != std::string::npos)) {
         ASSERT_FALSE(mCsd) << "CSD Buffer not expected";
-    } else if (mCompName != vp9) {
+    } else if (mMime.find("vp9") == std::string::npos) {
         ASSERT_TRUE(mCsd) << "CSD Buffer not received";
     }
 
@@ -697,8 +662,7 @@
 
 class Codec2VideoEncResolutionTest
     : public Codec2VideoEncHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+      public ::testing::WithParamInterface<EncodeResolutionTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -710,8 +674,8 @@
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
     std::ifstream eleStream;
-    int32_t nWidth = std::stoi(std::get<2>(GetParam()));
-    int32_t nHeight = std::stoi(std::get<3>(GetParam()));
+    int32_t nWidth = std::get<2>(GetParam());
+    int32_t nHeight = std::get<3>(GetParam());
     ALOGD("Trying encode for width %d height %d", nWidth, nHeight);
     mEos = false;
 
@@ -743,14 +707,16 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2VideoEncHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         PrintInstanceTupleNameToString<>);
 
 INSTANTIATE_TEST_SUITE_P(NonStdSizes, Codec2VideoEncResolutionTest,
-                         ::testing::ValuesIn(kEncodeResolutionTestParameters));
+                         ::testing::ValuesIn(kEncodeResolutionTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 // EncodeTest with EOS / No EOS
 INSTANTIATE_TEST_SUITE_P(EncodeTestwithEOS, Codec2VideoEncEncodeTest,
-                         ::testing::ValuesIn(kEncodeTestParameters));
+                         ::testing::ValuesIn(kEncodeTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 TEST_P(Codec2VideoEncHidlTest, AdaptiveBitrateTest) {
     description("Encodes input file for different bitrates");
@@ -841,38 +807,26 @@
 }  // anonymous namespace
 
 int main(int argc, char** argv) {
+    parseArgs(argc, argv);
     kTestParameters = getTestParameters(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER);
     for (auto params : kTestParameters) {
-        constexpr char const* kBoolString[] = { "false", "true" };
         for (size_t i = 0; i < 1 << 3; ++i) {
             kEncodeTestParameters.push_back(std::make_tuple(
-                    std::get<0>(params), std::get<1>(params),
-                    kBoolString[i & 1],
-                    kBoolString[(i >> 1) & 1],
-                    kBoolString[(i >> 2) & 1]));
+                    std::get<0>(params), std::get<1>(params), i & 1, (i >> 1) & 1, (i >> 2) & 1));
         }
 
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "52", "18"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 52, 18));
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "365", "365"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 365, 365));
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "484", "362"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 484, 362));
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "244", "488"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 244, 488));
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "852", "608"));
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 852, 608));
         kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1400", "442"));
-    }
-
-    // Set the resource directory based on command line args.
-    // Test will fail to set up if the argument is not set.
-    for (int i = 1; i < argc; i++) {
-        if (strcmp(argv[i], "-P") == 0 && i < argc - 1) {
-            sResourceDir = argv[i + 1];
-            break;
-        }
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1400, 442));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/plugin/FilterWrapper.cpp b/media/codec2/hidl/plugin/FilterWrapper.cpp
index 0b38bc1..bed8aeb 100644
--- a/media/codec2/hidl/plugin/FilterWrapper.cpp
+++ b/media/codec2/hidl/plugin/FilterWrapper.cpp
@@ -19,7 +19,6 @@
 #include <android-base/logging.h>
 
 #include <set>
-#include <sstream>
 
 #include <dlfcn.h>
 
@@ -383,6 +382,9 @@
         // Configure the next interface with the params.
         std::vector<C2Param *> configParams;
         for (size_t i = 0; i < heapParams.size(); ++i) {
+            if (!heapParams[i]) {
+                continue;
+            }
             if (heapParams[i]->forStream()) {
                 heapParams[i] = C2Param::CopyAsStream(
                         *heapParams[i], false /* output */, heapParams[i]->stream());
@@ -782,10 +784,7 @@
         if (C2_OK != mStore->createComponent(filter.traits.name, &comp)) {
             return {};
         }
-        if (C2_OK != mStore->createInterface(filter.traits.name, &intf)) {
-            return {};
-        }
-        filters.push_back({comp, intf, filter.traits, filter.desc});
+        filters.push_back({comp, comp->intf(), filter.traits, filter.desc});
     }
     return filters;
 }
@@ -869,7 +868,7 @@
     }
     std::vector<Component> filters = createFilters();
     std::shared_ptr wrapped = std::make_shared<WrappedDecoder>(
-            comp, std::move(filters), weak_from_this());
+            comp, std::vector(filters), weak_from_this());
     {
         std::unique_lock lock(mWrappedComponentsMutex);
         std::vector<std::weak_ptr<const C2Component>> &components =
diff --git a/media/codec2/hidl/plugin/FilterWrapperStub.cpp b/media/codec2/hidl/plugin/FilterWrapperStub.cpp
index 1b94a1a..01ca596 100644
--- a/media/codec2/hidl/plugin/FilterWrapperStub.cpp
+++ b/media/codec2/hidl/plugin/FilterWrapperStub.cpp
@@ -42,10 +42,10 @@
 }
 
 c2_status_t FilterWrapper::createBlockPool(
-        C2PlatformAllocatorStore::id_t,
-        std::shared_ptr<const C2Component>,
-        std::shared_ptr<C2BlockPool> *) {
-    return C2_OMITTED;
+        C2PlatformAllocatorStore::id_t allocatorId,
+        std::shared_ptr<const C2Component> component,
+        std::shared_ptr<C2BlockPool> *pool) {
+    return CreateCodec2BlockPool(allocatorId, component, pool);
 }
 
 }  // namespace android
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm64.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm64.policy
index 4faf8b2..85fd28d 100644
--- a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm64.policy
+++ b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm64.policy
@@ -35,7 +35,7 @@
 # on ARM is statically loaded at 0xffff 0000. See
 # http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.ddi0211h/Babfeega.html
 # for more details.
-mremap: arg3 == 3
+mremap: arg3 == 3 || arg3 == MREMAP_MAYMOVE
 munmap: 1
 prctl: 1
 writev: 1
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index ab73245..2460490 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -33,12 +33,14 @@
 #include <OMX_IndexExt.h>
 
 #include <android/fdsan.h>
+#include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/omx/OMXUtils.h>
 #include <media/stagefright/MediaErrors.h>
 #include <ui/Fence.h>
 #include <ui/GraphicBuffer.h>
 #include <utils/Thread.h>
 
+#include "utils/Codec2Mapper.h"
 #include "C2OMXNode.h"
 
 namespace android {
@@ -71,6 +73,23 @@
         jobs->cond.broadcast();
     }
 
+    void setDataspace(android_dataspace dataspace) {
+        Mutexed<Jobs>::Locked jobs(mJobs);
+        ColorUtils::convertDataSpaceToV0(dataspace);
+        jobs->configUpdate.emplace_back(new C2StreamDataSpaceInfo::input(0u, dataspace));
+        int32_t standard;
+        int32_t transfer;
+        int32_t range;
+        ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
+        std::unique_ptr<C2StreamColorAspectsInfo::input> colorAspects =
+            std::make_unique<C2StreamColorAspectsInfo::input>(0u);
+        if (C2Mapper::map(standard, &colorAspects->primaries, &colorAspects->matrix)
+                && C2Mapper::map(transfer, &colorAspects->transfer)
+                && C2Mapper::map(range, &colorAspects->range)) {
+            jobs->configUpdate.push_back(std::move(colorAspects));
+        }
+    }
+
 protected:
     bool threadLoop() override {
         constexpr nsecs_t kIntervalNs = nsecs_t(10) * 1000 * 1000;  // 10ms
@@ -102,6 +121,9 @@
                     uniqueFds.push_back(std::move(queue.workList.front().fd1));
                     queue.workList.pop_front();
                 }
+                for (const std::unique_ptr<C2Param> &param : jobs->configUpdate) {
+                    items.front()->input.configUpdate.emplace_back(C2Param::Copy(*param));
+                }
 
                 jobs.unlock();
                 for (int fenceFd : fenceFds) {
@@ -119,6 +141,7 @@
                 queued = true;
             }
             if (queued) {
+                jobs->configUpdate.clear();
                 return true;
             }
             if (i == 0) {
@@ -161,6 +184,7 @@
         std::map<std::weak_ptr<Codec2Client::Component>,
                  Queue,
                  std::owner_less<std::weak_ptr<Codec2Client::Component>>> queues;
+        std::vector<std::unique_ptr<C2Param>> configUpdate;
         Condition cond;
     };
     Mutexed<Jobs> mJobs;
@@ -172,6 +196,9 @@
       mQueueThread(new QueueThread) {
     android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ALWAYS);
     mQueueThread->run("C2OMXNode", PRIORITY_AUDIO);
+
+    Mutexed<android_dataspace>::Locked ds(mDataspace);
+    *ds = HAL_DATASPACE_UNKNOWN;
 }
 
 status_t C2OMXNode::freeNode() {
@@ -459,8 +486,11 @@
     android_dataspace dataSpace = (android_dataspace)msg.u.event_data.data1;
     uint32_t pixelFormat = msg.u.event_data.data3;
 
-    // TODO: set dataspace on component to see if it impacts color aspects
     ALOGD("dataspace changed to %#x pixel format: %#x", dataSpace, pixelFormat);
+    mQueueThread->setDataspace(dataSpace);
+
+    Mutexed<android_dataspace>::Locked ds(mDataspace);
+    *ds = dataSpace;
     return OK;
 }
 
@@ -493,4 +523,8 @@
     (void)mBufferSource->onInputBufferEmptied(bufferId, -1);
 }
 
+android_dataspace C2OMXNode::getDataspace() {
+    return *mDataspace.lock();
+}
+
 }  // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.h b/media/codec2/sfplugin/C2OMXNode.h
index 1717c96..9c04969 100644
--- a/media/codec2/sfplugin/C2OMXNode.h
+++ b/media/codec2/sfplugin/C2OMXNode.h
@@ -93,6 +93,11 @@
      */
     void onInputBufferDone(c2_cntr64_t index);
 
+    /**
+     * Returns dataspace information from GraphicBufferSource.
+     */
+    android_dataspace getDataspace();
+
 private:
     std::weak_ptr<Codec2Client::Component> mComp;
     sp<IOMXBufferSource> mBufferSource;
@@ -101,6 +106,7 @@
     uint32_t mWidth;
     uint32_t mHeight;
     uint64_t mUsage;
+    Mutexed<android_dataspace> mDataspace;
 
     // WORKAROUND: timestamp adjustment
 
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index a23b9bd..8def53d 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -44,6 +44,7 @@
 #include <media/stagefright/BufferProducerWrapper.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <utils/NativeHandle.h>
 
 #include "C2OMXNode.h"
 #include "CCodecBufferChannel.h"
@@ -210,8 +211,6 @@
                 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
                 &usage, sizeof(usage));
 
-        // NOTE: we do not use/pass through color aspects from GraphicBufferSource as we
-        // communicate that directly to the component.
         mSource->configure(
                 mOmxNode, static_cast<hardware::graphics::common::V1_0::Dataspace>(mDataSpace));
         return OK;
@@ -410,6 +409,10 @@
         mNode->onInputBufferDone(index);
     }
 
+    android_dataspace getDataspace() override {
+        return mNode->getDataspace();
+    }
+
 private:
     sp<HGraphicBufferSource> mSource;
     sp<C2OMXNode> mNode;
@@ -795,10 +798,30 @@
             mChannel->setMetaMode(CCodecBufferChannel::MODE_ANW);
         }
 
+        status_t err = OK;
         sp<RefBase> obj;
         sp<Surface> surface;
         if (msg->findObject("native-window", &obj)) {
             surface = static_cast<Surface *>(obj.get());
+            // setup tunneled playback
+            if (surface != nullptr) {
+                Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+                const std::unique_ptr<Config> &config = *configLocked;
+                if ((config->mDomain & Config::IS_DECODER)
+                        && (config->mDomain & Config::IS_VIDEO)) {
+                    int32_t tunneled;
+                    if (msg->findInt32("feature-tunneled-playback", &tunneled) && tunneled != 0) {
+                        ALOGI("Configuring TUNNELED video playback.");
+
+                        err = configureTunneledVideoPlayback(comp, &config->mSidebandHandle, msg);
+                        if (err != OK) {
+                            ALOGE("configureTunneledVideoPlayback failed!");
+                            return err;
+                        }
+                        config->mTunneled = true;
+                    }
+                }
+            }
             setSurface(surface);
         }
 
@@ -973,7 +996,15 @@
                 // needed for decoders.
                 if (!(config->mDomain & Config::IS_ENCODER)) {
                     if (surface == nullptr) {
-                        format = flexPixelFormat.value_or(COLOR_FormatYUV420Flexible);
+                        const char *prefix = "";
+                        if (flexSemiPlanarPixelFormat) {
+                            format = COLOR_FormatYUV420SemiPlanar;
+                            prefix = "semi-";
+                        } else {
+                            format = COLOR_FormatYUV420Planar;
+                        }
+                        ALOGD("Client requested ByteBuffer mode decoder w/o color format set: "
+                                "using default %splanar color format", prefix);
                     } else {
                         format = COLOR_FormatSurface;
                     }
@@ -1007,6 +1038,26 @@
             }
         }
 
+        /*
+         * Handle dataspace
+         */
+        int32_t usingRecorder;
+        if (msg->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) {
+            android_dataspace dataSpace = HAL_DATASPACE_BT709;
+            int32_t width, height;
+            if (msg->findInt32("width", &width)
+                    && msg->findInt32("height", &height)) {
+                ColorAspects aspects;
+                getColorAspectsFromFormat(msg, aspects);
+                setDefaultCodecColorAspectsIfNeeded(aspects, width, height);
+                // TODO: read dataspace / color aspect from the component
+                setColorAspectsIntoFormat(aspects, const_cast<sp<AMessage> &>(msg));
+                dataSpace = getDataSpaceForColorAspects(aspects, true /* mayexpand */);
+            }
+            msg->setInt32("android._dataspace", (int32_t)dataSpace);
+            ALOGD("setting dataspace to %x", dataSpace);
+        }
+
         int32_t subscribeToAllVendorParams;
         if (msg->findInt32("x-*", &subscribeToAllVendorParams) && subscribeToAllVendorParams) {
             if (config->subscribeToAllVendorParams(comp, C2_MAY_BLOCK) != OK) {
@@ -1023,7 +1074,7 @@
             sdkParams = msg->dup();
             sdkParams->removeEntryAt(sdkParams->findEntryByName(PARAMETER_KEY_VIDEO_BITRATE));
         }
-        status_t err = config->getConfigUpdateFromSdkParams(
+        err = config->getConfigUpdateFromSdkParams(
                 comp, sdkParams, Config::IS_CONFIG, C2_DONT_BLOCK, &configUpdate);
         if (err != OK) {
             ALOGW("failed to convert configuration to c2 params");
@@ -1044,6 +1095,45 @@
             configUpdate.push_back(std::move(gop));
         }
 
+        if ((config->mDomain & Config::IS_ENCODER)
+                && (config->mDomain & Config::IS_VIDEO)) {
+            // we may not use all 3 of these entries
+            std::unique_ptr<C2StreamPictureQuantizationTuning::output> qp =
+                C2StreamPictureQuantizationTuning::output::AllocUnique(3 /* flexCount */,
+                                                                       0u /* stream */);
+
+            int ix = 0;
+
+            int32_t iMax = INT32_MAX;
+            int32_t iMin = INT32_MIN;
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_I_MAX, &iMax);
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_I_MIN, &iMin);
+            if (iMax != INT32_MAX || iMin != INT32_MIN) {
+                qp->m.values[ix++] = {I_FRAME, iMin, iMax};
+            }
+
+            int32_t pMax = INT32_MAX;
+            int32_t pMin = INT32_MIN;
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_P_MAX, &pMax);
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_P_MIN, &pMin);
+            if (pMax != INT32_MAX || pMin != INT32_MIN) {
+                qp->m.values[ix++] = {P_FRAME, pMin, pMax};
+            }
+
+            int32_t bMax = INT32_MAX;
+            int32_t bMin = INT32_MIN;
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_B_MAX, &bMax);
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_B_MIN, &bMin);
+            if (bMax != INT32_MAX || bMin != INT32_MIN) {
+                qp->m.values[ix++] = {B_FRAME, bMin, bMax};
+            }
+
+            // adjust to reflect actual use.
+            qp->setFlexCount(ix);
+
+            configUpdate.push_back(std::move(qp));
+        }
+
         err = config->setParameters(comp, configUpdate, C2_DONT_BLOCK);
         if (err != OK) {
             ALOGW("failed to configure c2 params");
@@ -1133,10 +1223,10 @@
         int32_t clientPrepend;
         if ((config->mDomain & Config::IS_VIDEO)
                 && (config->mDomain & Config::IS_ENCODER)
-                && msg->findInt32(KEY_PREPEND_HEADERS_TO_SYNC_FRAMES, &clientPrepend)
+                && msg->findInt32(KEY_PREPEND_HEADER_TO_SYNC_FRAMES, &clientPrepend)
                 && clientPrepend
                 && (!prepend || prepend.value != PREPEND_HEADER_TO_ALL_SYNC)) {
-            ALOGE("Failed to set KEY_PREPEND_HEADERS_TO_SYNC_FRAMES");
+            ALOGE("Failed to set KEY_PREPEND_HEADER_TO_SYNC_FRAMES");
             return BAD_VALUE;
         }
 
@@ -1530,6 +1620,7 @@
         outputFormat = config->mOutputFormat = config->mOutputFormat->dup();
         if (config->mInputSurface) {
             err2 = config->mInputSurface->start();
+            config->mInputSurfaceDataspace = config->mInputSurface->getDataspace();
         }
         buffersBoundToCodec = config->mBuffersBoundToCodec;
     }
@@ -1617,6 +1708,7 @@
         if (config->mInputSurface) {
             config->mInputSurface->disconnect();
             config->mInputSurface = nullptr;
+            config->mInputSurfaceDataspace = HAL_DATASPACE_UNKNOWN;
         }
     }
     {
@@ -1666,6 +1758,7 @@
         if (config->mInputSurface) {
             config->mInputSurface->disconnect();
             config->mInputSurface = nullptr;
+            config->mInputSurfaceDataspace = HAL_DATASPACE_UNKNOWN;
         }
     }
 
@@ -1703,6 +1796,21 @@
 }
 
 status_t CCodec::setSurface(const sp<Surface> &surface) {
+    {
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        if (config->mTunneled && config->mSidebandHandle != nullptr) {
+            sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(surface.get());
+            status_t err = native_window_set_sideband_stream(
+                    nativeWindow.get(),
+                    const_cast<native_handle_t *>(config->mSidebandHandle->handle()));
+            if (err != OK) {
+                ALOGE("NativeWindow(%p) native_window_set_sideband_stream(%p) failed! (err %d).",
+                        nativeWindow.get(), config->mSidebandHandle->handle(), err);
+                return err;
+            }
+        }
+    }
     return mChannel->setSurface(surface);
 }
 
@@ -1835,6 +1943,12 @@
         params->removeEntryAt(params->findEntryByName(KEY_BIT_RATE));
     }
 
+    int32_t syncId = 0;
+    if (params->findInt32("audio-hw-sync", &syncId)
+            || params->findInt32("hw-av-sync-id", &syncId)) {
+        configureTunneledVideoPlayback(comp, nullptr, params);
+    }
+
     Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
     const std::unique_ptr<Config> &config = *configLocked;
 
@@ -1906,6 +2020,39 @@
     config->setParameters(comp, params, C2_MAY_BLOCK);
 }
 
+status_t CCodec::querySupportedParameters(std::vector<std::string> *names) {
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    return config->querySupportedParameters(names);
+}
+
+status_t CCodec::describeParameter(
+        const std::string &name, CodecParameterDescriptor *desc) {
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    return config->describe(name, desc);
+}
+
+status_t CCodec::subscribeToParameters(const std::vector<std::string> &names) {
+    std::shared_ptr<Codec2Client::Component> comp = mState.lock()->comp;
+    if (!comp) {
+        return INVALID_OPERATION;
+    }
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    return config->subscribeToVendorConfigUpdate(comp, names);
+}
+
+status_t CCodec::unsubscribeFromParameters(const std::vector<std::string> &names) {
+    std::shared_ptr<Codec2Client::Component> comp = mState.lock()->comp;
+    if (!comp) {
+        return INVALID_OPERATION;
+    }
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    return config->unsubscribeFromVendorConfigUpdate(comp, names);
+}
+
 void CCodec::onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems) {
     if (!workItems.empty()) {
         Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
@@ -2002,80 +2149,92 @@
             }
 
             // handle configuration changes in work done
-            Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
-            const std::unique_ptr<Config> &config = *configLocked;
-            Config::Watcher<C2StreamInitDataInfo::output> initData =
-                config->watch<C2StreamInitDataInfo::output>();
-            if (!work->worklets.empty()
-                    && (work->worklets.front()->output.flags
-                            & C2FrameData::FLAG_DISCARD_FRAME) == 0) {
+            std::unique_ptr<C2Param> initData;
+            sp<AMessage> outputFormat = nullptr;
+            {
+                Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+                const std::unique_ptr<Config> &config = *configLocked;
+                Config::Watcher<C2StreamInitDataInfo::output> initDataWatcher =
+                    config->watch<C2StreamInitDataInfo::output>();
+                if (!work->worklets.empty()
+                        && (work->worklets.front()->output.flags
+                                & C2FrameData::FLAG_DISCARD_FRAME) == 0) {
 
-                // copy buffer info to config
-                std::vector<std::unique_ptr<C2Param>> updates;
-                for (const std::unique_ptr<C2Param> &param
-                        : work->worklets.front()->output.configUpdate) {
-                    updates.push_back(C2Param::Copy(*param));
-                }
-                unsigned stream = 0;
-                for (const std::shared_ptr<C2Buffer> &buf : work->worklets.front()->output.buffers) {
-                    for (const std::shared_ptr<const C2Info> &info : buf->info()) {
-                        // move all info into output-stream #0 domain
-                        updates.emplace_back(C2Param::CopyAsStream(*info, true /* output */, stream));
+                    // copy buffer info to config
+                    std::vector<std::unique_ptr<C2Param>> updates;
+                    for (const std::unique_ptr<C2Param> &param
+                            : work->worklets.front()->output.configUpdate) {
+                        updates.push_back(C2Param::Copy(*param));
+                    }
+                    unsigned stream = 0;
+                    std::vector<std::shared_ptr<C2Buffer>> &outputBuffers =
+                        work->worklets.front()->output.buffers;
+                    for (const std::shared_ptr<C2Buffer> &buf : outputBuffers) {
+                        for (const std::shared_ptr<const C2Info> &info : buf->info()) {
+                            // move all info into output-stream #0 domain
+                            updates.emplace_back(
+                                    C2Param::CopyAsStream(*info, true /* output */, stream));
+                        }
+
+                        const std::vector<C2ConstGraphicBlock> blocks = buf->data().graphicBlocks();
+                        // for now only do the first block
+                        if (!blocks.empty()) {
+                            // ALOGV("got output buffer with crop %u,%u+%u,%u and size %u,%u",
+                            //      block.crop().left, block.crop().top,
+                            //      block.crop().width, block.crop().height,
+                            //      block.width(), block.height());
+                            const C2ConstGraphicBlock &block = blocks[0];
+                            updates.emplace_back(new C2StreamCropRectInfo::output(
+                                    stream, block.crop()));
+                            updates.emplace_back(new C2StreamPictureSizeInfo::output(
+                                    stream, block.crop().width, block.crop().height));
+                        }
+                        ++stream;
                     }
 
-                    const std::vector<C2ConstGraphicBlock> blocks = buf->data().graphicBlocks();
-                    // for now only do the first block
-                    if (!blocks.empty()) {
-                        // ALOGV("got output buffer with crop %u,%u+%u,%u and size %u,%u",
-                        //      block.crop().left, block.crop().top,
-                        //      block.crop().width, block.crop().height,
-                        //      block.width(), block.height());
-                        const C2ConstGraphicBlock &block = blocks[0];
-                        updates.emplace_back(new C2StreamCropRectInfo::output(stream, block.crop()));
-                        updates.emplace_back(new C2StreamPictureSizeInfo::output(
-                                stream, block.crop().width, block.crop().height));
-                    }
-                    ++stream;
-                }
+                    sp<AMessage> oldFormat = config->mOutputFormat;
+                    config->updateConfiguration(updates, config->mOutputDomain);
+                    RevertOutputFormatIfNeeded(oldFormat, config->mOutputFormat);
 
-                sp<AMessage> outputFormat = config->mOutputFormat;
-                config->updateConfiguration(updates, config->mOutputDomain);
-                RevertOutputFormatIfNeeded(outputFormat, config->mOutputFormat);
-
-                // copy standard infos to graphic buffers if not already present (otherwise, we
-                // may overwrite the actual intermediate value with a final value)
-                stream = 0;
-                const static C2Param::Index stdGfxInfos[] = {
-                    C2StreamRotationInfo::output::PARAM_TYPE,
-                    C2StreamColorAspectsInfo::output::PARAM_TYPE,
-                    C2StreamDataSpaceInfo::output::PARAM_TYPE,
-                    C2StreamHdrStaticInfo::output::PARAM_TYPE,
-                    C2StreamHdr10PlusInfo::output::PARAM_TYPE,
-                    C2StreamPixelAspectRatioInfo::output::PARAM_TYPE,
-                    C2StreamSurfaceScalingInfo::output::PARAM_TYPE
-                };
-                for (const std::shared_ptr<C2Buffer> &buf : work->worklets.front()->output.buffers) {
-                    if (buf->data().graphicBlocks().size()) {
-                        for (C2Param::Index ix : stdGfxInfos) {
-                            if (!buf->hasInfo(ix)) {
-                                const C2Param *param =
-                                    config->getConfigParameterValue(ix.withStream(stream));
-                                if (param) {
-                                    std::shared_ptr<C2Param> info(C2Param::Copy(*param));
-                                    buf->setInfo(std::static_pointer_cast<C2Info>(info));
+                    // copy standard infos to graphic buffers if not already present (otherwise, we
+                    // may overwrite the actual intermediate value with a final value)
+                    stream = 0;
+                    const static C2Param::Index stdGfxInfos[] = {
+                        C2StreamRotationInfo::output::PARAM_TYPE,
+                        C2StreamColorAspectsInfo::output::PARAM_TYPE,
+                        C2StreamDataSpaceInfo::output::PARAM_TYPE,
+                        C2StreamHdrStaticInfo::output::PARAM_TYPE,
+                        C2StreamHdr10PlusInfo::output::PARAM_TYPE,
+                        C2StreamPixelAspectRatioInfo::output::PARAM_TYPE,
+                        C2StreamSurfaceScalingInfo::output::PARAM_TYPE
+                    };
+                    for (const std::shared_ptr<C2Buffer> &buf : outputBuffers) {
+                        if (buf->data().graphicBlocks().size()) {
+                            for (C2Param::Index ix : stdGfxInfos) {
+                                if (!buf->hasInfo(ix)) {
+                                    const C2Param *param =
+                                        config->getConfigParameterValue(ix.withStream(stream));
+                                    if (param) {
+                                        std::shared_ptr<C2Param> info(C2Param::Copy(*param));
+                                        buf->setInfo(std::static_pointer_cast<C2Info>(info));
+                                    }
                                 }
                             }
                         }
+                        ++stream;
                     }
-                    ++stream;
                 }
-            }
-            if (config->mInputSurface) {
-                config->mInputSurface->onInputBufferDone(work->input.ordinal.frameIndex);
+                if (config->mInputSurface) {
+                    config->mInputSurface->onInputBufferDone(work->input.ordinal.frameIndex);
+                }
+                if (initDataWatcher.hasChanged()) {
+                    initData = C2Param::Copy(*initDataWatcher.update().get());
+                }
+                outputFormat = config->mOutputFormat;
             }
             mChannel->onWorkDone(
-                    std::move(work), config->mOutputFormat,
-                    initData.hasChanged() ? initData.update().get() : nullptr);
+                    std::move(work), outputFormat,
+                    initData ? (C2StreamInitDataInfo::output *)initData.get() : nullptr);
             break;
         }
         case kWhatWatch: {
@@ -2099,6 +2258,55 @@
     deadline->set(now + (timeout * mult), name);
 }
 
+status_t CCodec::configureTunneledVideoPlayback(
+        std::shared_ptr<Codec2Client::Component> comp,
+        sp<NativeHandle> *sidebandHandle,
+        const sp<AMessage> &msg) {
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+
+    std::unique_ptr<C2PortTunneledModeTuning::output> tunneledPlayback =
+        C2PortTunneledModeTuning::output::AllocUnique(
+            1,
+            C2PortTunneledModeTuning::Struct::SIDEBAND,
+            C2PortTunneledModeTuning::Struct::REALTIME,
+            0);
+    // TODO: use KEY_AUDIO_HW_SYNC, KEY_HARDWARE_AV_SYNC_ID when they are in MediaCodecConstants.h
+    if (msg->findInt32("audio-hw-sync", &tunneledPlayback->m.syncId[0])) {
+        tunneledPlayback->m.syncType = C2PortTunneledModeTuning::Struct::sync_type_t::AUDIO_HW_SYNC;
+    } else if (msg->findInt32("hw-av-sync-id", &tunneledPlayback->m.syncId[0])) {
+        tunneledPlayback->m.syncType = C2PortTunneledModeTuning::Struct::sync_type_t::HW_AV_SYNC;
+    } else {
+        tunneledPlayback->m.syncType = C2PortTunneledModeTuning::Struct::sync_type_t::REALTIME;
+        tunneledPlayback->setFlexCount(0);
+    }
+    c2_status_t c2err = comp->config({ tunneledPlayback.get() }, C2_MAY_BLOCK, &failures);
+    if (c2err != C2_OK) {
+        return UNKNOWN_ERROR;
+    }
+
+    if (sidebandHandle == nullptr) {
+        return OK;
+    }
+
+    std::vector<std::unique_ptr<C2Param>> params;
+    c2err = comp->query({}, {C2PortTunnelHandleTuning::output::PARAM_TYPE}, C2_DONT_BLOCK, &params);
+    if (c2err == C2_OK && params.size() == 1u) {
+        C2PortTunnelHandleTuning::output *videoTunnelSideband =
+            C2PortTunnelHandleTuning::output::From(params[0].get());
+        // Currently, Codec2 only supports non-fd case for sideband native_handle.
+        native_handle_t *handle = native_handle_create(0, videoTunnelSideband->flexCount());
+        *sidebandHandle = NativeHandle::create(handle, true /* ownsHandle */);
+        if (handle != nullptr && videoTunnelSideband->flexCount()) {
+            memcpy(handle->data, videoTunnelSideband->m.values,
+                    sizeof(int32_t) * videoTunnelSideband->flexCount());
+            return OK;
+        } else {
+            return NO_MEMORY;
+        }
+    }
+    return UNKNOWN_ERROR;
+}
+
 void CCodec::initiateReleaseIfStuck() {
     std::string name;
     bool pendingDeadline = false;
@@ -2111,7 +2319,13 @@
             pendingDeadline = true;
         }
     }
-    if (name.empty()) {
+    bool tunneled = false;
+    {
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        tunneled = config->mTunneled;
+    }
+    if (!tunneled && name.empty()) {
         constexpr std::chrono::steady_clock::duration kWorkDurationThreshold = 3s;
         std::chrono::steady_clock::duration elapsed = mChannel->elapsed();
         if (elapsed >= kWorkDurationThreshold) {
@@ -2500,4 +2714,3 @@
 }
 
 }  // namespace android
-
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 4d2700a..0008172 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1348,7 +1348,7 @@
     // about buffers from the previous generation do not interfere with the
     // newly initialized pipeline capacity.
 
-    {
+    if (inputFormat || outputFormat) {
         Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
         watcher->inputDelay(inputDelayValue)
                 .pipelineDelay(pipelineDelayValue)
@@ -1448,14 +1448,14 @@
 void CCodecBufferChannel::stop() {
     mSync.stop();
     mFirstValidFrameIndex = mFrameIndex.load(std::memory_order_relaxed);
-    if (mInputSurface != nullptr) {
-        mInputSurface.reset();
-    }
-    mPipelineWatcher.lock()->flush();
 }
 
 void CCodecBufferChannel::reset() {
     stop();
+    if (mInputSurface != nullptr) {
+        mInputSurface.reset();
+    }
+    mPipelineWatcher.lock()->flush();
     {
         Mutexed<Input>::Locked input(mInput);
         input->buffers.reset(new DummyInputBuffers(""));
@@ -1483,8 +1483,10 @@
 
 void CCodecBufferChannel::flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) {
     ALOGV("[%s] flush", mName);
+    std::vector<uint64_t> indices;
     std::list<std::unique_ptr<C2Work>> configs;
     for (const std::unique_ptr<C2Work> &work : flushedWork) {
+        indices.push_back(work->input.ordinal.frameIndex.peeku());
         if (!(work->input.flags & C2FrameData::FLAG_CODEC_CONFIG)) {
             continue;
         }
@@ -1497,6 +1499,7 @@
         std::unique_ptr<C2Work> copy(new C2Work);
         copy->input.flags = C2FrameData::flags_t(work->input.flags | C2FrameData::FLAG_DROP_FRAME);
         copy->input.ordinal = work->input.ordinal;
+        copy->input.ordinal.frameIndex = mFrameIndex++;
         copy->input.buffers.insert(
                 copy->input.buffers.begin(),
                 work->input.buffers.begin(),
@@ -1525,7 +1528,12 @@
             output->buffers->flushStash();
         }
     }
-    mPipelineWatcher.lock()->flush();
+    {
+        Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
+        for (uint64_t index : indices) {
+            watcher->onWorkDone(index);
+        }
+    }
 }
 
 void CCodecBufferChannel::onWorkDone(
@@ -1712,6 +1720,17 @@
                 }
                 break;
             }
+            case C2PortTunnelSystemTime::CORE_INDEX: {
+                C2PortTunnelSystemTime::output frameRenderTime;
+                if (frameRenderTime.updateFrom(*param)) {
+                    ALOGV("[%s] onWorkDone: frame rendered (sys:%lld ns, media:%lld us)",
+                          mName, (long long)frameRenderTime.value,
+                          (long long)worklet->output.ordinal.timestamp.peekll());
+                    mCCodecCallback->onOutputFramesRendered(
+                            worklet->output.ordinal.timestamp.peek(), frameRenderTime.value);
+                }
+                break;
+            }
             default:
                 ALOGV("[%s] onWorkDone: unrecognized config update (%08X)",
                       mName, param->index());
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 6825dc2..ba44074 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -77,34 +77,39 @@
 void CCodecBuffers::handleImageData(const sp<Codec2Buffer> &buffer) {
     sp<ABuffer> imageDataCandidate = buffer->getImageData();
     if (imageDataCandidate == nullptr) {
+        if (mFormatWithImageData) {
+            // We previously sent the format with image data, so use the same format.
+            buffer->setFormat(mFormatWithImageData);
+        }
         return;
     }
-    sp<ABuffer> imageData;
-    if (!mFormat->findBuffer("image-data", &imageData)
-            || imageDataCandidate->size() != imageData->size()
-            || memcmp(imageDataCandidate->data(), imageData->data(), imageData->size()) != 0) {
+    if (!mLastImageData
+            || imageDataCandidate->size() != mLastImageData->size()
+            || memcmp(imageDataCandidate->data(),
+                      mLastImageData->data(),
+                      mLastImageData->size()) != 0) {
         ALOGD("[%s] updating image-data", mName);
-        sp<AMessage> newFormat = dupFormat();
-        newFormat->setBuffer("image-data", imageDataCandidate);
+        mFormatWithImageData = dupFormat();
+        mLastImageData = imageDataCandidate;
+        mFormatWithImageData->setBuffer("image-data", imageDataCandidate);
         MediaImage2 *img = (MediaImage2*)imageDataCandidate->data();
         if (img->mNumPlanes > 0 && img->mType != img->MEDIA_IMAGE_TYPE_UNKNOWN) {
             int32_t stride = img->mPlane[0].mRowInc;
-            newFormat->setInt32(KEY_STRIDE, stride);
+            mFormatWithImageData->setInt32(KEY_STRIDE, stride);
             ALOGD("[%s] updating stride = %d", mName, stride);
             if (img->mNumPlanes > 1 && stride > 0) {
                 int64_t offsetDelta =
                     (int64_t)img->mPlane[1].mOffset - (int64_t)img->mPlane[0].mOffset;
                 int32_t vstride = int32_t(offsetDelta / stride);
-                newFormat->setInt32(KEY_SLICE_HEIGHT, vstride);
+                mFormatWithImageData->setInt32(KEY_SLICE_HEIGHT, vstride);
                 ALOGD("[%s] updating vstride = %d", mName, vstride);
                 buffer->setRange(
                         img->mPlane[0].mOffset,
                         buffer->size() - img->mPlane[0].mOffset);
             }
         }
-        setFormat(newFormat);
-        buffer->setFormat(newFormat);
     }
+    buffer->setFormat(mFormatWithImageData);
 }
 
 // InputBuffers
@@ -273,22 +278,12 @@
 
     if (entry.notify && mFormat != outputFormat) {
         updateSkipCutBuffer(outputFormat);
-        sp<ABuffer> imageData;
-        if (mFormat->findBuffer("image-data", &imageData)) {
-            outputFormat->setBuffer("image-data", imageData);
-        }
-        int32_t stride;
-        if (mFormat->findInt32(KEY_STRIDE, &stride)) {
-            outputFormat->setInt32(KEY_STRIDE, stride);
-        }
-        int32_t sliceHeight;
-        if (mFormat->findInt32(KEY_SLICE_HEIGHT, &sliceHeight)) {
-            outputFormat->setInt32(KEY_SLICE_HEIGHT, sliceHeight);
-        }
+        // Trigger image data processing to the new format
+        mLastImageData.clear();
         ALOGV("[%s] popFromStashAndRegister: output format reference changed: %p -> %p",
                 mName, mFormat.get(), outputFormat.get());
-        ALOGD("[%s] popFromStashAndRegister: output format changed to %s",
-                mName, outputFormat->debugString().c_str());
+        ALOGD("[%s] popFromStashAndRegister: at %lldus, output format changed to %s",
+                mName, (long long)entry.timestamp, outputFormat->debugString().c_str());
         setFormat(outputFormat);
     }
 
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index 7c4e7b1..995d3a4 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -86,6 +86,9 @@
     // Format to be used for creating MediaCodec-facing buffers.
     sp<AMessage> mFormat;
 
+    sp<ABuffer> mLastImageData;
+    sp<AMessage> mFormatWithImageData;
+
 private:
     DISALLOW_EVIL_CONSTRUCTORS(CCodecBuffers);
 };
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index d3814fb..727b1ff 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -18,11 +18,13 @@
 #define LOG_TAG "CCodecConfig"
 #include <cutils/properties.h>
 #include <log/log.h>
+#include <utils/NativeHandle.h>
 
 #include <C2Component.h>
 #include <C2Param.h>
 #include <util/C2InterfaceHelper.h>
 
+#include <media/stagefright/CodecBase.h>
 #include <media/stagefright/MediaCodecConstants.h>
 
 #include "CCodecConfig.h"
@@ -289,8 +291,8 @@
     std::vector<std::string> getPathsForDomain(
             Domain any, Domain all = Domain::ALL) const {
         std::vector<std::string> res;
-        for (const std::pair<std::string, std::vector<ConfigMapper>> &el : mConfigMappers) {
-            for (const ConfigMapper &cm : el.second) {
+        for (const auto &[key, mappers] : mConfigMappers) {
+            for (const ConfigMapper &cm : mappers) {
                 ALOGV("filtering %s %x %x %x %x", cm.path().c_str(), cm.domain(), any,
                         (cm.domain() & any), (cm.domain() & any & all));
                 if ((cm.domain() & any) && ((cm.domain() & any & all) == (any & all))) {
@@ -321,7 +323,8 @@
 CCodecConfig::CCodecConfig()
     : mInputFormat(new AMessage),
       mOutputFormat(new AMessage),
-      mUsingSurface(false) { }
+      mUsingSurface(false),
+      mTunneled(false) { }
 
 void CCodecConfig::initializeStandardParams() {
     typedef Domain D;
@@ -359,7 +362,10 @@
         .limitTo(D::OUTPUT & D::READ));
 
     add(ConfigMapper(KEY_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
-        .limitTo(D::ENCODER & D::OUTPUT));
+        .limitTo(D::ENCODER & D::CODED));
+    // Some audio decoders require bitrate information to be set
+    add(ConfigMapper(KEY_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
+        .limitTo(D::AUDIO & D::DECODER & D::CODED));
     // we also need to put the bitrate in the max bitrate field
     add(ConfigMapper(KEY_MAX_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
         .limitTo(D::ENCODER & D::READ & D::OUTPUT));
@@ -507,7 +513,7 @@
     add(ConfigMapper(std::string(KEY_FEATURE_) + FEATURE_SecurePlayback,
                      C2_PARAMKEY_SECURE_MODE, "value"));
 
-    add(ConfigMapper(KEY_PREPEND_HEADERS_TO_SYNC_FRAMES,
+    add(ConfigMapper(KEY_PREPEND_HEADER_TO_SYNC_FRAMES,
                      C2_PARAMKEY_PREPEND_HEADER_MODE, "value")
         .limitTo(D::ENCODER & D::VIDEO)
         .withMappers([](C2Value v) -> C2Value {
@@ -531,7 +537,7 @@
             return C2Value();
         }));
     // remove when codecs switch to PARAMKEY
-    deprecated(ConfigMapper(KEY_PREPEND_HEADERS_TO_SYNC_FRAMES,
+    deprecated(ConfigMapper(KEY_PREPEND_HEADER_TO_SYNC_FRAMES,
                             "coding.add-csd-to-sync-frames", "value")
                .limitTo(D::ENCODER & D::VIDEO));
     // convert to timestamp base
@@ -727,6 +733,17 @@
             return C2Value();
         }));
 
+    add(ConfigMapper(KEY_AAC_PROFILE, C2_PARAMKEY_PROFILE_LEVEL, "profile")
+        .limitTo(D::AUDIO & D::ENCODER & (D::CONFIG | D::PARAM))
+        .withMapper([mapper](C2Value v) -> C2Value {
+            C2Config::profile_t c2 = PROFILE_UNUSED;
+            int32_t sdk;
+            if (mapper && v.get(&sdk) && mapper->mapProfile(sdk, &c2)) {
+                return c2;
+            }
+            return PROFILE_UNUSED;
+        }));
+
     // convert to dBFS and add default
     add(ConfigMapper(KEY_AAC_DRC_TARGET_REFERENCE_LEVEL, C2_PARAMKEY_DRC_TARGET_REFERENCE_LEVEL, "value")
         .limitTo(D::AUDIO & D::DECODER & (D::CONFIG | D::PARAM | D::READ))
@@ -1059,7 +1076,7 @@
             std::vector<std::string> keys;
             mParamUpdater->getKeysForParamIndex(desc->index(), &keys);
             for (const std::string &key : keys) {
-                mVendorParamIndices.insert_or_assign(key, desc->index());
+                mVendorParams.insert_or_assign(key, desc);
             }
         }
     }
@@ -1126,6 +1143,12 @@
             insertion.first->second = std::move(p);
         }
     }
+    if (mInputSurface
+            && (domain & mOutputDomain)
+            && mInputSurfaceDataspace != mInputSurface->getDataspace()) {
+        changed = true;
+        mInputSurfaceDataspace = mInputSurface->getDataspace();
+    }
 
     ALOGV("updated configuration has %zu params (%s)", mCurrentConfig.size(),
             changed ? "CHANGED" : "no change");
@@ -1191,8 +1214,8 @@
         const ReflectedParamUpdater::Dict &reflected,
         Domain portDomain) const {
     sp<AMessage> msg = new AMessage;
-    for (const std::pair<std::string, std::vector<ConfigMapper>> &el : mStandardParams->getKeys()) {
-        for (const ConfigMapper &cm : el.second) {
+    for (const auto &[key, mappers] : mStandardParams->getKeys()) {
+        for (const ConfigMapper &cm : mappers) {
             if ((cm.domain() & portDomain) == 0 // input-output-coded-raw
                 || (cm.domain() & mDomain) != mDomain // component domain + kind (these must match)
                 || (cm.domain() & IS_READ) == 0) {
@@ -1216,26 +1239,26 @@
                 ALOGD("unexpected untyped query value for key: %s", cm.path().c_str());
                 continue;
             }
-            msg->setItem(el.first.c_str(), item);
+            msg->setItem(key.c_str(), item);
         }
     }
 
     bool input = (portDomain & Domain::IS_INPUT);
     std::vector<std::string> vendorKeys;
-    for (const std::pair<std::string, ReflectedParamUpdater::Value> &entry : reflected) {
-        auto it = mVendorParamIndices.find(entry.first);
-        if (it == mVendorParamIndices.end()) {
+    for (const auto &[key, value] : reflected) {
+        auto it = mVendorParams.find(key);
+        if (it == mVendorParams.end()) {
             continue;
         }
-        if (mSubscribedIndices.count(it->second) == 0) {
+        C2Param::Index index = it->second->index();
+        if (mSubscribedIndices.count(index) == 0) {
             continue;
         }
         // For vendor parameters, we only care about direction
-        if ((input && !it->second.forInput())
-                || (!input && !it->second.forOutput())) {
+        if ((input && !index.forInput())
+                || (!input && !index.forOutput())) {
             continue;
         }
-        const ReflectedParamUpdater::Value &value = entry.second;
         C2Value c2Value;
         sp<ABuffer> bufValue;
         AString strValue;
@@ -1247,10 +1270,10 @@
         } else if (value.find(&strValue)) {
             item.set(strValue);
         } else {
-            ALOGD("unexpected untyped query value for key: %s", entry.first.c_str());
+            ALOGD("unexpected untyped query value for key: %s", key.c_str());
             continue;
         }
-        msg->setItem(entry.first.c_str(), item);
+        msg->setItem(key.c_str(), item);
     }
 
     { // convert from Codec 2.0 rect to MediaFormat rect and add crop rect if not present
@@ -1310,6 +1333,14 @@
         }
     }
 
+    // Remove KEY_AAC_SBR_MODE from SDK message if it is outside supported range
+    // as SDK doesn't have a way to signal default sbr mode based on profile and
+    // requires that the key isn't present in format to signal that
+    int sbrMode;
+    if (msg->findInt32(KEY_AAC_SBR_MODE, &sbrMode) && (sbrMode < 0 || sbrMode > 2)) {
+        msg->removeEntryAt(msg->findEntryByName(KEY_AAC_SBR_MODE));
+    }
+
     { // convert color info
         // move default color to color aspect if not read from the component
         int32_t tmp;
@@ -1354,7 +1385,6 @@
             msg->removeEntryAt(msg->findEntryByName("color-matrix"));
         }
 
-
         // calculate dataspace for raw graphic buffers if not specified by component, or if
         // using surface with unspecified aspects (as those must be defaulted which may change
         // the dataspace)
@@ -1392,6 +1422,23 @@
             }
         }
 
+        if (mInputSurface) {
+            android_dataspace dataspace = mInputSurface->getDataspace();
+            ColorUtils::convertDataSpaceToV0(dataspace);
+            int32_t standard;
+            ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
+            if (range != 0) {
+                msg->setInt32(KEY_COLOR_RANGE, range);
+            }
+            if (standard != 0) {
+                msg->setInt32(KEY_COLOR_STANDARD, standard);
+            }
+            if (transfer != 0) {
+                msg->setInt32(KEY_COLOR_TRANSFER, transfer);
+            }
+            msg->setInt32("android._dataspace", dataspace);
+        }
+
         // HDR static info
 
         C2HdrStaticMetadataStruct hdr;
@@ -1809,8 +1856,81 @@
 status_t CCodecConfig::subscribeToAllVendorParams(
         const std::shared_ptr<Codec2Client::Configurable> &configurable,
         c2_blocking_t blocking) {
-    for (const std::pair<std::string, C2Param::Index> &entry : mVendorParamIndices) {
-        mSubscribedIndices.insert(entry.second);
+    for (const auto &[path, desc] : mVendorParams) {
+        mSubscribedIndices.insert(desc->index());
+    }
+    return subscribeToConfigUpdate(configurable, {}, blocking);
+}
+
+status_t CCodecConfig::querySupportedParameters(std::vector<std::string> *names) {
+    if (!names) {
+        return BAD_VALUE;
+    }
+    names->clear();
+    // TODO: expand to standard params
+    for (const auto &[key, desc] : mVendorParams) {
+        names->push_back(key);
+    }
+    return OK;
+}
+
+status_t CCodecConfig::describe(const std::string &name, CodecParameterDescriptor *desc) {
+    if (!desc) {
+        return BAD_VALUE;
+    }
+    // TODO: expand to standard params
+    desc->name = name;
+    switch (mParamUpdater->getTypeForKey(name)) {
+        case C2FieldDescriptor::INT32:
+        case C2FieldDescriptor::UINT32:
+        case C2FieldDescriptor::CNTR32:
+            desc->type = AMessage::kTypeInt32;
+            return OK;
+        case C2FieldDescriptor::INT64:
+        case C2FieldDescriptor::UINT64:
+        case C2FieldDescriptor::CNTR64:
+            desc->type = AMessage::kTypeInt64;
+            return OK;
+        case C2FieldDescriptor::FLOAT:
+            desc->type = AMessage::kTypeFloat;
+            return OK;
+        case C2FieldDescriptor::STRING:
+            desc->type = AMessage::kTypeString;
+            return OK;
+        case C2FieldDescriptor::BLOB:
+            desc->type = AMessage::kTypeBuffer;
+            return OK;
+        default:
+            return NAME_NOT_FOUND;
+    }
+}
+
+status_t CCodecConfig::subscribeToVendorConfigUpdate(
+        const std::shared_ptr<Codec2Client::Configurable> &configurable,
+        const std::vector<std::string> &names,
+        c2_blocking_t blocking) {
+    for (const std::string &name : names) {
+        auto it = mVendorParams.find(name);
+        if (it == mVendorParams.end()) {
+            ALOGD("%s is not a recognized vendor parameter; ignored.", name.c_str());
+            continue;
+        }
+        mSubscribedIndices.insert(it->second->index());
+    }
+    return subscribeToConfigUpdate(configurable, {}, blocking);
+}
+
+status_t CCodecConfig::unsubscribeFromVendorConfigUpdate(
+        const std::shared_ptr<Codec2Client::Configurable> &configurable,
+        const std::vector<std::string> &names,
+        c2_blocking_t blocking) {
+    for (const std::string &name : names) {
+        auto it = mVendorParams.find(name);
+        if (it == mVendorParams.end()) {
+            ALOGD("%s is not a recognized vendor parameter; ignored.", name.c_str());
+            continue;
+        }
+        mSubscribedIndices.erase(it->second->index());
     }
     return subscribeToConfigUpdate(configurable, {}, blocking);
 }
diff --git a/media/codec2/sfplugin/CCodecConfig.h b/media/codec2/sfplugin/CCodecConfig.h
index 2895746..417b773 100644
--- a/media/codec2/sfplugin/CCodecConfig.h
+++ b/media/codec2/sfplugin/CCodecConfig.h
@@ -35,6 +35,8 @@
 namespace android {
 
 struct AMessage;
+struct CodecParameterDescriptor;
+class NativeHandle;
 struct StandardParams;
 
 /**
@@ -123,6 +125,7 @@
 
     std::shared_ptr<InputSurfaceWrapper> mInputSurface;
     std::unique_ptr<InputSurfaceWrapper::Config> mISConfig;
+    android_dataspace mInputSurfaceDataspace;
 
     /// the current configuration. Updated after configure() and based on configUpdate in
     /// onWorkDone
@@ -136,11 +139,15 @@
     /// For now support a validation function.
     std::map<C2Param::Index, LocalParamValidator> mLocalParams;
 
-    /// Vendor field name -> index map.
-    std::map<std::string, C2Param::Index> mVendorParamIndices;
+    /// Vendor field name -> desc map.
+    std::map<std::string, std::shared_ptr<C2ParamDescriptor>> mVendorParams;
 
     std::set<std::string> mLastConfig;
 
+    /// Tunneled codecs
+    bool mTunneled;
+    sp<NativeHandle> mSidebandHandle;
+
     CCodecConfig();
 
     /// initializes the members required to manage the format: descriptors, reflector,
@@ -321,6 +328,41 @@
         return Watcher<T>(index, this);
     }
 
+    /**
+     * Queries supported parameters and put the keys to |names|.
+     * TODO: currently this method queries vendor parameter keys only.
+     *
+     * \return OK if successful.
+     *         BAD_VALUE if |names| is nullptr.
+     */
+    status_t querySupportedParameters(std::vector<std::string> *names);
+
+    /**
+     * Describe the parameter with |name|, filling the information into |desc|
+     * TODO: currently this method works only for vendor parameters.
+     *
+     * \return OK if successful.
+     *         BAD_VALUE if |desc| is nullptr.
+     *         NAME_NOT_FOUND if |name| is not a recognized parameter name.
+     */
+    status_t describe(const std::string &name, CodecParameterDescriptor *desc);
+
+    /**
+     * Find corresponding indices for |names| and subscribe to them.
+     */
+    status_t subscribeToVendorConfigUpdate(
+            const std::shared_ptr<Codec2Client::Configurable> &configurable,
+            const std::vector<std::string> &names,
+            c2_blocking_t blocking = C2_DONT_BLOCK);
+
+    /**
+     * Find corresponding indices for |names| and unsubscribe from them.
+     */
+    status_t unsubscribeFromVendorConfigUpdate(
+            const std::shared_ptr<Codec2Client::Configurable> &configurable,
+            const std::vector<std::string> &names,
+            c2_blocking_t blocking = C2_DONT_BLOCK);
+
 private:
 
     /// initializes the standard MediaCodec to Codec 2.0 params mapping
diff --git a/media/codec2/sfplugin/FrameReassembler.cpp b/media/codec2/sfplugin/FrameReassembler.cpp
index f8e6937..cf1be17 100644
--- a/media/codec2/sfplugin/FrameReassembler.cpp
+++ b/media/codec2/sfplugin/FrameReassembler.cpp
@@ -143,6 +143,7 @@
 
     if (buffer->size() > 0) {
         mCurrentOrdinal.timestamp = timeUs;
+        mCurrentOrdinal.customOrdinal = timeUs;
     }
 
     size_t frameSizeBytes = mFrameSize.value() * mChannelCount * bytesPerSample();
@@ -219,6 +220,7 @@
 
     ++mCurrentOrdinal.frameIndex;
     mCurrentOrdinal.timestamp += mFrameSize.value() * 1000000 / mSampleRate;
+    mCurrentOrdinal.customOrdinal = mCurrentOrdinal.timestamp;
     mCurrentBlock.reset();
     mWriteView.reset();
 }
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index bb35763..3ddae01 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -106,6 +106,11 @@
      */
     virtual void onInputBufferDone(c2_cntr64_t /* index */) {}
 
+    /**
+     * Returns dataspace information from GraphicBufferSource.
+     */
+    virtual android_dataspace getDataspace() { return mDataSpace; }
+
 protected:
     android_dataspace mDataSpace;
 };
diff --git a/media/codec2/sfplugin/PipelineWatcher.cpp b/media/codec2/sfplugin/PipelineWatcher.cpp
index 0ee9056..bc9197c 100644
--- a/media/codec2/sfplugin/PipelineWatcher.cpp
+++ b/media/codec2/sfplugin/PipelineWatcher.cpp
@@ -95,6 +95,7 @@
 }
 
 void PipelineWatcher::flush() {
+    ALOGV("flush");
     mFramesInPipeline.clear();
 }
 
diff --git a/media/codec2/sfplugin/ReflectedParamUpdater.cpp b/media/codec2/sfplugin/ReflectedParamUpdater.cpp
index f39051b..d14b9b0 100644
--- a/media/codec2/sfplugin/ReflectedParamUpdater.cpp
+++ b/media/codec2/sfplugin/ReflectedParamUpdater.cpp
@@ -288,6 +288,20 @@
     }
 }
 
+C2FieldDescriptor::type_t ReflectedParamUpdater::getTypeForKey(
+        const std::string &key) const {
+    auto it = mMap.find(key);
+    if (it == mMap.end()) {
+        return C2FieldDescriptor::type_t(~0);
+    }
+
+    if (it->second.fieldDesc) {
+        return it->second.fieldDesc->type();
+    }
+    // whole param is exposed as a blob
+    return C2FieldDescriptor::BLOB;
+}
+
 void ReflectedParamUpdater::updateParamsFromMessage(
         const Dict &params,
         std::vector<std::unique_ptr<C2Param>> *vec /* nonnull */) const {
diff --git a/media/codec2/sfplugin/ReflectedParamUpdater.h b/media/codec2/sfplugin/ReflectedParamUpdater.h
index 752c7e4..6dcf2a3 100644
--- a/media/codec2/sfplugin/ReflectedParamUpdater.h
+++ b/media/codec2/sfplugin/ReflectedParamUpdater.h
@@ -176,6 +176,14 @@
             std::vector<std::string> *keys /* nonnull */) const;
 
     /**
+     * Get field type for the given name
+     *
+     * \param key[in]   field name
+     * \return type of the field, or type_t(~0) if not found.
+     */
+    C2FieldDescriptor::type_t getTypeForKey(const std::string &name) const;
+
+    /**
      * Update C2Param objects from field name and value in AMessage object.
      *
      * \param params[in]    Dict object with field name to value pairs.
diff --git a/media/codec2/sfplugin/include/media/stagefright/CCodec.h b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
index dbbb5d5..ec18128 100644
--- a/media/codec2/sfplugin/include/media/stagefright/CCodec.h
+++ b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
@@ -65,6 +65,12 @@
     virtual void signalEndOfInputStream() override;
     virtual void signalRequestIDRFrame() override;
 
+    virtual status_t querySupportedParameters(std::vector<std::string> *names) override;
+    virtual status_t describeParameter(
+            const std::string &name, CodecParameterDescriptor *desc) override;
+    virtual status_t subscribeToParameters(const std::vector<std::string> &names) override;
+    virtual status_t unsubscribeFromParameters(const std::vector<std::string> &names) override;
+
     void initiateReleaseIfStuck();
     void onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems);
     void onInputBufferDone(uint64_t frameIndex, size_t arrayIndex);
@@ -126,6 +132,11 @@
             const std::chrono::milliseconds &timeout,
             const char *name);
 
+    status_t configureTunneledVideoPlayback(
+            const std::shared_ptr<Codec2Client::Component> comp,
+            sp<NativeHandle> *sidebandHandle,
+            const sp<AMessage> &msg);
+
     enum {
         kWhatAllocate,
         kWhatConfigure,
diff --git a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
index c9caa01..7c660dc 100644
--- a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
@@ -208,6 +208,24 @@
                         .withSetter(Setter<C2StreamPixelAspectRatioInfo::output>)
                         .build());
 
+                if (isEncoder) {
+                    addParameter(
+                            DefineParam(mInputBitrate, C2_PARAMKEY_BITRATE)
+                            .withDefault(new C2StreamBitrateInfo::input(0u))
+                            .withFields({C2F(mInputBitrate, value).any()})
+                            .withSetter(Setter<C2StreamBitrateInfo::input>)
+                            .build());
+
+                    addParameter(
+                            DefineParam(mOutputBitrate, C2_PARAMKEY_BITRATE)
+                            .withDefault(new C2StreamBitrateInfo::output(0u))
+                            .withFields({C2F(mOutputBitrate, value).any()})
+                            .calculatedAs(
+                                Copy<C2StreamBitrateInfo::output, C2StreamBitrateInfo::input>,
+                                mInputBitrate)
+                            .build());
+                }
+
                 // TODO: more SDK params
             }
         private:
@@ -221,11 +239,19 @@
             std::shared_ptr<C2StreamVendorInt64Info::output> mInt64Output;
             std::shared_ptr<C2PortVendorStringInfo::input> mStringInput;
             std::shared_ptr<C2StreamPixelAspectRatioInfo::output> mPixelAspectRatio;
+            std::shared_ptr<C2StreamBitrateInfo::input> mInputBitrate;
+            std::shared_ptr<C2StreamBitrateInfo::output> mOutputBitrate;
 
             template<typename T>
             static C2R Setter(bool, C2P<T> &) {
                 return C2R::Ok();
             }
+
+            template<typename ME, typename DEP>
+            static C2R Copy(bool, C2P<ME> &me, const C2P<DEP> &dep) {
+                me.set().value = dep.v.value;
+                return C2R::Ok();
+            }
         };
 
         Impl mImpl;
@@ -457,4 +483,97 @@
             << "mInputFormat = " << mConfig.mInputFormat->debugString().c_str();
 }
 
+TEST_F(CCodecConfigTest, DataspaceUpdate) {
+    init(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER, MIMETYPE_VIDEO_AVC);
+
+    ASSERT_EQ(OK, mConfig.initialize(mReflector, mConfigurable));
+    class InputSurfaceStub : public InputSurfaceWrapper {
+    public:
+        ~InputSurfaceStub() override = default;
+        status_t connect(const std::shared_ptr<Codec2Client::Component> &) override {
+            return OK;
+        }
+        void disconnect() override {}
+        status_t start() override { return OK; }
+        status_t signalEndOfInputStream() override { return OK; }
+        status_t configure(Config &) override { return OK; }
+    };
+    mConfig.mInputSurface = std::make_shared<InputSurfaceStub>();
+
+    sp<AMessage> format{new AMessage};
+    format->setInt32(KEY_COLOR_RANGE, COLOR_RANGE_LIMITED);
+    format->setInt32(KEY_COLOR_STANDARD, COLOR_STANDARD_BT709);
+    format->setInt32(KEY_COLOR_TRANSFER, COLOR_TRANSFER_SDR_VIDEO);
+    format->setInt32(KEY_BIT_RATE, 100);
+
+    std::vector<std::unique_ptr<C2Param>> configUpdate;
+    ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(
+            mConfigurable, format, D::ALL, C2_MAY_BLOCK, &configUpdate));
+    ASSERT_TRUE(mConfig.updateConfiguration(configUpdate, D::ALL));
+
+    int32_t range{0};
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_RANGE, &range))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_RANGE_LIMITED, range)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    int32_t standard{0};
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_STANDARD, &standard))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_STANDARD_BT709, standard)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    int32_t transfer{0};
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &transfer))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_TRANSFER_SDR_VIDEO, transfer)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    mConfig.mInputSurface->setDataSpace(HAL_DATASPACE_BT2020_PQ);
+
+    // Dataspace from input surface should override the configured setting
+    mConfig.updateFormats(D::ALL);
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_RANGE, &range))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_RANGE_FULL, range)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_STANDARD, &standard))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_STANDARD_BT2020, standard)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &transfer))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_TRANSFER_ST2084, transfer)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    // Simulate bitrate update
+    format = new AMessage;
+    format->setInt32(KEY_BIT_RATE, 200);
+    configUpdate.clear();
+    ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(
+            mConfigurable, format, D::ALL, C2_MAY_BLOCK, &configUpdate));
+    ASSERT_EQ(OK, mConfig.setParameters(mConfigurable, configUpdate, C2_MAY_BLOCK));
+
+    // Color information should remain the same
+    mConfig.updateFormats(D::ALL);
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_RANGE, &range))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_RANGE_FULL, range)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_STANDARD, &standard))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_STANDARD_BT2020, standard)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &transfer))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_TRANSFER_ST2084, transfer)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+}
+
 } // namespace android
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 74e7ef1..2f4d6b1 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -33,11 +33,13 @@
         "libcodec2_vndk",
         "libcutils",
         "liblog",
+        "libnativewindow",
         "libstagefright_foundation",
         "libutils",
     ],
 
     static_libs: [
+        "libarect",
         "libyuv_static",
     ],
 
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index bf2a07e..0966988 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -23,6 +23,7 @@
 #include <list>
 #include <mutex>
 
+#include <android/hardware_buffer.h>
 #include <media/hardware/HardwareAPI.h>
 #include <media/stagefright/foundation/AUtils.h>
 
@@ -121,32 +122,69 @@
     if (view.crop().width != img->mWidth || view.crop().height != img->mHeight) {
         return BAD_VALUE;
     }
-    if ((IsNV12(view) && IsI420(img)) || (IsI420(view) && IsNV12(img))) {
-        // Take shortcuts to use libyuv functions between NV12 and I420 conversion.
-        const uint8_t* src_y = view.data()[0];
-        const uint8_t* src_u = view.data()[1];
-        const uint8_t* src_v = view.data()[2];
-        int32_t src_stride_y = view.layout().planes[0].rowInc;
-        int32_t src_stride_u = view.layout().planes[1].rowInc;
-        int32_t src_stride_v = view.layout().planes[2].rowInc;
-        uint8_t* dst_y = imgBase + img->mPlane[0].mOffset;
-        uint8_t* dst_u = imgBase + img->mPlane[1].mOffset;
-        uint8_t* dst_v = imgBase + img->mPlane[2].mOffset;
-        int32_t dst_stride_y = img->mPlane[0].mRowInc;
-        int32_t dst_stride_u = img->mPlane[1].mRowInc;
-        int32_t dst_stride_v = img->mPlane[2].mRowInc;
-        if (IsNV12(view) && IsI420(img)) {
+    const uint8_t* src_y = view.data()[0];
+    const uint8_t* src_u = view.data()[1];
+    const uint8_t* src_v = view.data()[2];
+    int32_t src_stride_y = view.layout().planes[0].rowInc;
+    int32_t src_stride_u = view.layout().planes[1].rowInc;
+    int32_t src_stride_v = view.layout().planes[2].rowInc;
+    uint8_t* dst_y = imgBase + img->mPlane[0].mOffset;
+    uint8_t* dst_u = imgBase + img->mPlane[1].mOffset;
+    uint8_t* dst_v = imgBase + img->mPlane[2].mOffset;
+    int32_t dst_stride_y = img->mPlane[0].mRowInc;
+    int32_t dst_stride_u = img->mPlane[1].mRowInc;
+    int32_t dst_stride_v = img->mPlane[2].mRowInc;
+    int width = view.crop().width;
+    int height = view.crop().height;
+
+    if (IsNV12(view)) {
+        if (IsNV12(img)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
+            return OK;
+        } else if (IsNV21(img)) {
+            if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
+                                    dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        } else if (IsI420(img)) {
             if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
-                                    dst_u, dst_stride_u, dst_v, dst_stride_v, view.crop().width,
-                                    view.crop().height)) {
+                                    dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
-        } else {
+        }
+    } else if (IsNV21(view)) {
+        if (IsNV12(img)) {
+            if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
+                                    dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
+                return OK;
+            }
+        } else if (IsNV21(img)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
+            return OK;
+        } else if (IsI420(img)) {
+            if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
+                                    dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        }
+    } else if (IsI420(view)) {
+        if (IsNV12(img)) {
             if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
-                                    dst_y, dst_stride_y, dst_u, dst_stride_u, view.crop().width,
-                                    view.crop().height)) {
+                                    dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
+        } else if (IsNV21(img)) {
+            if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+                                    dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        } else if (IsI420(img)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
+            libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
+            return OK;
         }
     }
     return _ImageCopy<true>(view, img, imgBase);
@@ -156,32 +194,68 @@
     if (view.crop().width != img->mWidth || view.crop().height != img->mHeight) {
         return BAD_VALUE;
     }
-    if ((IsNV12(img) && IsI420(view)) || (IsI420(img) && IsNV12(view))) {
-        // Take shortcuts to use libyuv functions between NV12 and I420 conversion.
-        const uint8_t* src_y = imgBase + img->mPlane[0].mOffset;
-        const uint8_t* src_u = imgBase + img->mPlane[1].mOffset;
-        const uint8_t* src_v = imgBase + img->mPlane[2].mOffset;
-        int32_t src_stride_y = img->mPlane[0].mRowInc;
-        int32_t src_stride_u = img->mPlane[1].mRowInc;
-        int32_t src_stride_v = img->mPlane[2].mRowInc;
-        uint8_t* dst_y = view.data()[0];
-        uint8_t* dst_u = view.data()[1];
-        uint8_t* dst_v = view.data()[2];
-        int32_t dst_stride_y = view.layout().planes[0].rowInc;
-        int32_t dst_stride_u = view.layout().planes[1].rowInc;
-        int32_t dst_stride_v = view.layout().planes[2].rowInc;
-        if (IsNV12(img) && IsI420(view)) {
+    const uint8_t* src_y = imgBase + img->mPlane[0].mOffset;
+    const uint8_t* src_u = imgBase + img->mPlane[1].mOffset;
+    const uint8_t* src_v = imgBase + img->mPlane[2].mOffset;
+    int32_t src_stride_y = img->mPlane[0].mRowInc;
+    int32_t src_stride_u = img->mPlane[1].mRowInc;
+    int32_t src_stride_v = img->mPlane[2].mRowInc;
+    uint8_t* dst_y = view.data()[0];
+    uint8_t* dst_u = view.data()[1];
+    uint8_t* dst_v = view.data()[2];
+    int32_t dst_stride_y = view.layout().planes[0].rowInc;
+    int32_t dst_stride_u = view.layout().planes[1].rowInc;
+    int32_t dst_stride_v = view.layout().planes[2].rowInc;
+    int width = view.crop().width;
+    int height = view.crop().height;
+    if (IsNV12(img)) {
+        if (IsNV12(view)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
+            return OK;
+        } else if (IsNV21(view)) {
+            if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
+                                    dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        } else if (IsI420(view)) {
             if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
-                                    dst_u, dst_stride_u, dst_v, dst_stride_v, view.width(),
-                                    view.height())) {
+                                    dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
-        } else {
+        }
+    } else if (IsNV21(img)) {
+        if (IsNV12(view)) {
+            if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
+                                    dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
+                return OK;
+            }
+        } else if (IsNV21(view)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
+            return OK;
+        } else if (IsI420(view)) {
+            if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
+                                    dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        }
+    } else if (IsI420(img)) {
+        if (IsNV12(view)) {
             if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
-                                    dst_y, dst_stride_y, dst_u, dst_stride_u, view.width(),
-                                    view.height())) {
+                                    dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
+        } else if (IsNV21(view)) {
+            if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+                                    dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        } else if (IsI420(view)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
+            libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
+            return OK;
         }
     }
     return _ImageCopy<false>(view, img, imgBase);
@@ -225,6 +299,20 @@
             && layout.planes[layout.PLANE_V].offset == 1);
 }
 
+bool IsNV21(const C2GraphicView &view) {
+    if (!IsYUV420(view)) {
+        return false;
+    }
+    const C2PlanarLayout &layout = view.layout();
+    return (layout.rootPlanes == 2
+            && layout.planes[layout.PLANE_U].colInc == 2
+            && layout.planes[layout.PLANE_U].rootIx == layout.PLANE_V
+            && layout.planes[layout.PLANE_U].offset == 1
+            && layout.planes[layout.PLANE_V].colInc == 2
+            && layout.planes[layout.PLANE_V].rootIx == layout.PLANE_V
+            && layout.planes[layout.PLANE_V].offset == 0);
+}
+
 bool IsI420(const C2GraphicView &view) {
     if (!IsYUV420(view)) {
         return false;
@@ -258,7 +346,16 @@
     }
     return (img->mPlane[1].mColInc == 2
             && img->mPlane[2].mColInc == 2
-            && (img->mPlane[2].mOffset - img->mPlane[1].mOffset == 1));
+            && (img->mPlane[2].mOffset == img->mPlane[1].mOffset + 1));
+}
+
+bool IsNV21(const MediaImage2 *img) {
+    if (!IsYUV420(img)) {
+        return false;
+    }
+    return (img->mPlane[1].mColInc == 2
+            && img->mPlane[2].mColInc == 2
+            && (img->mPlane[1].mOffset == img->mPlane[2].mOffset + 1));
 }
 
 bool IsI420(const MediaImage2 *img) {
@@ -270,6 +367,76 @@
             && img->mPlane[2].mOffset > img->mPlane[1].mOffset);
 }
 
+FlexLayout GetYuv420FlexibleLayout() {
+    static FlexLayout sLayout = []{
+        AHardwareBuffer_Desc desc = {
+            16,  // width
+            16,  // height
+            1,   // layers
+            AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
+            AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+            0,   // stride
+            0,   // rfu0
+            0,   // rfu1
+        };
+        AHardwareBuffer *buffer = nullptr;
+        int ret = AHardwareBuffer_allocate(&desc, &buffer);
+        if (ret != 0) {
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        class AutoCloser {
+        public:
+            AutoCloser(AHardwareBuffer *buffer) : mBuffer(buffer), mLocked(false) {}
+            ~AutoCloser() {
+                if (mLocked) {
+                    AHardwareBuffer_unlock(mBuffer, nullptr);
+                }
+                AHardwareBuffer_release(mBuffer);
+            }
+
+            void setLocked() { mLocked = true; }
+
+        private:
+            AHardwareBuffer *mBuffer;
+            bool mLocked;
+        } autoCloser(buffer);
+        AHardwareBuffer_Planes planes;
+        ret = AHardwareBuffer_lockPlanes(
+                buffer,
+                AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+                -1,       // fence
+                nullptr,  // rect
+                &planes);
+        if (ret != 0) {
+            AHardwareBuffer_release(buffer);
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        autoCloser.setLocked();
+        if (planes.planeCount != 3) {
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        if (planes.planes[0].pixelStride != 1) {
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        if (planes.planes[1].pixelStride == 1 && planes.planes[2].pixelStride == 1) {
+            return FLEX_LAYOUT_PLANAR;
+        }
+        if (planes.planes[1].pixelStride == 2 && planes.planes[2].pixelStride == 2) {
+            ssize_t uvDist =
+                static_cast<uint8_t *>(planes.planes[2].data) -
+                static_cast<uint8_t *>(planes.planes[1].data);
+            if (uvDist == 1) {
+                return FLEX_LAYOUT_SEMIPLANAR_UV;
+            } else if (uvDist == -1) {
+                return FLEX_LAYOUT_SEMIPLANAR_VU;
+            }
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        return FLEX_LAYOUT_UNKNOWN;
+    }();
+    return sLayout;
+}
+
 MediaImage2 CreateYUV420PlanarMediaImage2(
         uint32_t width, uint32_t height, uint32_t stride, uint32_t vstride) {
     return MediaImage2 {
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.h b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
index afadf00..af29e81 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
@@ -96,6 +96,11 @@
 bool IsNV12(const C2GraphicView &view);
 
 /**
+ * Returns true iff a view has a NV21 layout.
+ */
+bool IsNV21(const C2GraphicView &view);
+
+/**
  * Returns true iff a view has a I420 layout.
  */
 bool IsI420(const C2GraphicView &view);
@@ -111,10 +116,26 @@
 bool IsNV12(const MediaImage2 *img);
 
 /**
+ * Returns true iff a MediaImage2 has a NV21 layout.
+ */
+bool IsNV21(const MediaImage2 *img);
+
+/**
  * Returns true iff a MediaImage2 has a I420 layout.
  */
 bool IsI420(const MediaImage2 *img);
 
+enum FlexLayout {
+    FLEX_LAYOUT_UNKNOWN,
+    FLEX_LAYOUT_PLANAR,
+    FLEX_LAYOUT_SEMIPLANAR_UV,
+    FLEX_LAYOUT_SEMIPLANAR_VU,
+};
+/**
+ * Returns layout of YCBCR_420_888 pixel format.
+ */
+FlexLayout GetYuv420FlexibleLayout();
+
 /**
  * A raw memory block to use for internal buffers.
  *
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 1390642..00bf84f 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -311,6 +311,8 @@
     { C2Config::PCM_8, kAudioEncodingPcm8bit },
     { C2Config::PCM_16, kAudioEncodingPcm16bit },
     { C2Config::PCM_FLOAT, kAudioEncodingPcmFloat },
+    { C2Config::PCM_24, kAudioEncodingPcm24bitPacked },
+    { C2Config::PCM_32, kAudioEncodingPcm32bit },
 };
 
 ALookup<C2Config::level_t, int32_t> sVp9Levels = {
diff --git a/media/codec2/tests/vndk/C2BufferTest.cpp b/media/codec2/tests/vndk/C2BufferTest.cpp
index a9f8e17..0cfb465 100644
--- a/media/codec2/tests/vndk/C2BufferTest.cpp
+++ b/media/codec2/tests/vndk/C2BufferTest.cpp
@@ -16,11 +16,12 @@
 
 #include <gtest/gtest.h>
 
-#include <C2AllocatorIon.h>
 #include <C2AllocatorGralloc.h>
 #include <C2Buffer.h>
 #include <C2BufferPriv.h>
+#include <C2Config.h>
 #include <C2ParamDef.h>
+#include <C2PlatformSupport.h>
 
 #include <system/graphics.h>
 
@@ -233,10 +234,10 @@
 public:
     C2BufferTest()
         : mBlockPoolId(C2BlockPool::PLATFORM_START),
-          mLinearAllocator(std::make_shared<C2AllocatorIon>('i')),
           mSize(0u),
           mAddr(nullptr),
           mGraphicAllocator(std::make_shared<C2AllocatorGralloc>('g')) {
+        getLinearAllocator(&mLinearAllocator);
     }
 
     ~C2BufferTest() = default;
@@ -329,6 +330,11 @@
     }
 
 private:
+    void getLinearAllocator(std::shared_ptr<C2Allocator>* mLinearAllocator) {
+        std::shared_ptr<C2AllocatorStore> store = android::GetCodec2PlatformAllocatorStore();
+        ASSERT_EQ(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, mLinearAllocator), C2_OK);
+    }
+
     C2BlockPool::local_id_t mBlockPoolId;
     std::shared_ptr<C2Allocator> mLinearAllocator;
     std::shared_ptr<C2LinearAllocation> mLinearAllocation;
diff --git a/media/codec2/vndk/C2AllocatorIon.cpp b/media/codec2/vndk/C2AllocatorIon.cpp
index 85623b8..a8528df 100644
--- a/media/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/codec2/vndk/C2AllocatorIon.cpp
@@ -30,10 +30,15 @@
 #include <C2ErrnoUtils.h>
 #include <C2HandleIonInternal.h>
 
+#include <android-base/properties.h>
+
 namespace android {
 
 namespace {
     constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+
+    // max padding after ion/dmabuf allocations in bytes
+    constexpr uint32_t MAX_PADDING = 0x8000; // 32KB
 }
 
 /* size_t <=> int(lo), int(hi) conversions */
@@ -376,14 +381,34 @@
         unsigned heapMask, unsigned flags, C2Allocator::id_t id) {
     int bufferFd = -1;
     ion_user_handle_t buffer = -1;
-    size_t alignedSize = align == 0 ? size : (size + align - 1) & ~(align - 1);
+    // NOTE: read this property directly from the property as this code has to run on
+    // Android Q, but the sysprop was only introduced in Android S.
+    static size_t sPadding =
+        base::GetUintProperty("media.c2.dmabuf.padding", (uint32_t)0, MAX_PADDING);
+    if (sPadding > SIZE_MAX - size) {
+        ALOGD("ion_alloc: size %#zx cannot accommodate padding %#zx", size, sPadding);
+        // use ImplV2 as there is no allocation anyways
+        return new ImplV2(ionFd, size, -1, id, -ENOMEM);
+    }
+
+    size_t allocSize = size + sPadding;
+    if (align) {
+        if (align - 1 > SIZE_MAX - allocSize) {
+            ALOGD("ion_alloc: size %#zx cannot accommodate padding %#zx and alignment %#zx",
+                  size, sPadding, align);
+            // use ImplV2 as there is no allocation anyways
+            return new ImplV2(ionFd, size, -1, id, -ENOMEM);
+        }
+        allocSize += align - 1;
+        allocSize &= ~(align - 1);
+    }
     int ret;
 
     if (ion_is_legacy(ionFd)) {
-        ret = ion_alloc(ionFd, alignedSize, align, heapMask, flags, &buffer);
+        ret = ion_alloc(ionFd, allocSize, align, heapMask, flags, &buffer);
         ALOGV("ion_alloc(ionFd = %d, size = %zu, align = %zu, prot = %d, flags = %d) "
               "returned (%d) ; buffer = %d",
-              ionFd, alignedSize, align, heapMask, flags, ret, buffer);
+              ionFd, allocSize, align, heapMask, flags, ret, buffer);
         if (ret == 0) {
             // get buffer fd for native handle constructor
             ret = ion_share(ionFd, buffer, &bufferFd);
@@ -392,15 +417,15 @@
                 buffer = -1;
             }
         }
-        return new Impl(ionFd, alignedSize, bufferFd, buffer, id, ret);
+        return new Impl(ionFd, allocSize, bufferFd, buffer, id, ret);
 
     } else {
-        ret = ion_alloc_fd(ionFd, alignedSize, align, heapMask, flags, &bufferFd);
+        ret = ion_alloc_fd(ionFd, allocSize, align, heapMask, flags, &bufferFd);
         ALOGV("ion_alloc_fd(ionFd = %d, size = %zu, align = %zu, prot = %d, flags = %d) "
               "returned (%d) ; bufferFd = %d",
-              ionFd, alignedSize, align, heapMask, flags, ret, bufferFd);
+              ionFd, allocSize, align, heapMask, flags, ret, bufferFd);
 
-        return new ImplV2(ionFd, alignedSize, bufferFd, id, ret);
+        return new ImplV2(ionFd, allocSize, bufferFd, id, ret);
     }
 }
 
diff --git a/media/codec2/vndk/C2DmaBufAllocator.cpp b/media/codec2/vndk/C2DmaBufAllocator.cpp
index 750aa31..6d8552a 100644
--- a/media/codec2/vndk/C2DmaBufAllocator.cpp
+++ b/media/codec2/vndk/C2DmaBufAllocator.cpp
@@ -16,11 +16,13 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2DmaBufAllocator"
+
 #include <BufferAllocator/BufferAllocator.h>
 #include <C2Buffer.h>
 #include <C2Debug.h>
 #include <C2DmaBufAllocator.h>
 #include <C2ErrnoUtils.h>
+
 #include <linux/ion.h>
 #include <sys/mman.h>
 #include <unistd.h>  // getpagesize, size_t, close, dup
@@ -28,14 +30,15 @@
 
 #include <list>
 
-#ifdef __ANDROID_APEX__
 #include <android-base/properties.h>
-#endif
 
 namespace android {
 
 namespace {
-constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+    constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+
+    // max padding after ion/dmabuf allocations in bytes
+    constexpr uint32_t MAX_PADDING = 0x8000; // 32KB
 }
 
 /* =========================== BUFFER HANDLE =========================== */
@@ -250,8 +253,11 @@
     int ret = 0;
 
     bufferFd = alloc.Alloc(heap_name, size, flags);
-    if (bufferFd < 0) ret = bufferFd;
+    if (bufferFd < 0) {
+        ret = bufferFd;
+    }
 
+    // this may be a non-working handle if bufferFd is negative
     mHandle = C2HandleBuf(bufferFd, size);
     mId = id;
     mInit = c2_status_t(c2_map_errno<ENOMEM, EACCES, EINVAL>(ret));
@@ -360,8 +366,22 @@
         return ret;
     }
 
+    // TODO: should we pad before mapping usage?
+
+    // NOTE: read this property directly from the property as this code has to run on
+    // Android Q, but the sysprop was only introduced in Android S.
+    static size_t sPadding =
+        base::GetUintProperty("media.c2.dmabuf.padding", (uint32_t)0, MAX_PADDING);
+    if (sPadding > SIZE_MAX - capacity) {
+        // size would overflow
+        ALOGD("dmabuf_alloc: size #%x cannot accommodate padding #%zx", capacity, sPadding);
+        return C2_NO_MEMORY;
+    }
+
+    size_t allocSize = (size_t)capacity + sPadding;
+    // TODO: should we align allocation size to mBlockSize to reflect the true allocation size?
     std::shared_ptr<C2DmaBufAllocation> alloc = std::make_shared<C2DmaBufAllocation>(
-            mBufferAllocator, capacity, heap_name, flags, getId());
+            mBufferAllocator, allocSize, heap_name, flags, getId());
     ret = alloc->status();
     if (ret == C2_OK) {
         *allocation = alloc;
diff --git a/media/codecs/amrnb/dec/test/Android.bp b/media/codecs/amrnb/dec/test/Android.bp
index b882481..74258e0 100644
--- a/media/codecs/amrnb/dec/test/Android.bp
+++ b/media/codecs/amrnb/dec/test/Android.bp
@@ -29,6 +29,7 @@
 cc_test {
     name: "AmrnbDecoderTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "AmrnbDecoderTest.cpp",
diff --git a/media/codecs/amrnb/enc/test/Android.bp b/media/codecs/amrnb/enc/test/Android.bp
index a94ffd4..7e393e3 100644
--- a/media/codecs/amrnb/enc/test/Android.bp
+++ b/media/codecs/amrnb/enc/test/Android.bp
@@ -29,6 +29,7 @@
 cc_test {
     name: "AmrnbEncoderTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "AmrnbEncoderTest.cpp",
diff --git a/media/codecs/m4v_h263/dec/test/Android.bp b/media/codecs/m4v_h263/dec/test/Android.bp
index 4ae5e73..6eed66f 100644
--- a/media/codecs/m4v_h263/dec/test/Android.bp
+++ b/media/codecs/m4v_h263/dec/test/Android.bp
@@ -29,6 +29,24 @@
     name: "Mpeg4H263DecoderTest",
     gtest: true,
 
+    test_suites: [
+        "device-tests",
+        "mts",
+    ],
+
+    // Support multilib variants (using different suffix per sub-architecture), which is needed on
+    // build targets with secondary architectures, as the MTS test suite packaging logic flattens
+    // all test artifacts into a single `testcases` directory.
+    compile_multilib: "both",
+    multilib: {
+        lib32: {
+            suffix: "32",
+        },
+        lib64: {
+            suffix: "64",
+        },
+    },
+
     srcs: [
         "Mpeg4H263DecoderTest.cpp",
     ],
diff --git a/media/codecs/m4v_h263/dec/test/AndroidTest.xml b/media/codecs/m4v_h263/dec/test/AndroidTest.xml
index f572b0c..8bb4d1c 100755
--- a/media/codecs/m4v_h263/dec/test/AndroidTest.xml
+++ b/media/codecs/m4v_h263/dec/test/AndroidTest.xml
@@ -15,9 +15,10 @@
 -->
 <configuration description="Test module config for Mpeg4H263 Decoder unit tests">
     <option name="test-suite-tag" value="Mpeg4H263DecoderTest" />
-    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.FilePusher">
         <option name="cleanup" value="true" />
         <option name="push" value="Mpeg4H263DecoderTest->/data/local/tmp/Mpeg4H263DecoderTest" />
+        <option name="append-bitness" value="true" />
         <option name="push-file"
             key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263Decoder-1.1.zip?unzip=true"
             value="/data/local/tmp/Mpeg4H263DecoderTestRes/" />
diff --git a/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp b/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
index d43156c..4555203 100644
--- a/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
+++ b/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
@@ -524,11 +524,9 @@
     }
 
     /* check bit rate */
-    /* set max bit rate */
     for (i = 0; i < encParams->nLayers; i++)
     {
         encParams->LayerBitRate[i] = encOption->bitRate[i];
-        encParams->LayerMaxBitRate[i] = encOption->bitRate[i];
     }
     if (encParams->nLayers > 1)
     {
@@ -3302,6 +3300,3 @@
 }
 
 #endif /* #ifndef ORIGINAL_VERSION */
-
-
-
diff --git a/media/codecs/m4v_h263/enc/test/Android.bp b/media/codecs/m4v_h263/enc/test/Android.bp
index e1ce4aa..2b5e49c 100644
--- a/media/codecs/m4v_h263/enc/test/Android.bp
+++ b/media/codecs/m4v_h263/enc/test/Android.bp
@@ -29,6 +29,7 @@
 cc_test {
     name: "Mpeg4H263EncoderTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs : [ "Mpeg4H263EncoderTest.cpp" ],
 
diff --git a/media/codecs/mp3dec/test/Android.bp b/media/codecs/mp3dec/test/Android.bp
index 8003068..f10b6ae 100644
--- a/media/codecs/mp3dec/test/Android.bp
+++ b/media/codecs/mp3dec/test/Android.bp
@@ -27,6 +27,7 @@
 cc_test {
     name: "Mp3DecoderTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "mp3reader.cpp",
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 409fca1..f725a8c 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -62,6 +62,16 @@
 
 #define ALAC_SPECIFIC_INFO_SIZE (36)
 
+// TODO : Remove the defines once mainline media is built against NDK >= 31.
+// The mp4 extractor is part of mainline and builds against NDK 29 as of
+// writing. These keys are available only from NDK 31:
+#define AMEDIAFORMAT_KEY_MPEGH_PROFILE_LEVEL_INDICATION \
+  "mpegh-profile-level-indication"
+#define AMEDIAFORMAT_KEY_MPEGH_REFERENCE_CHANNEL_LAYOUT \
+  "mpegh-reference-channel-layout"
+#define AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS \
+  "mpegh-compatible-sets"
+
 namespace android {
 
 enum {
@@ -139,6 +149,7 @@
     bool mIsHEVC;
     bool mIsDolbyVision;
     bool mIsAC4;
+    bool mIsMpegH = false;
     bool mIsPcm;
     size_t mNALLengthSize;
 
@@ -377,6 +388,10 @@
         case FOURCC(".mp3"):
         case 0x6D730055: // "ms U" mp3 audio
             return MEDIA_MIMETYPE_AUDIO_MPEG;
+        case FOURCC("mha1"):
+            return MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1;
+        case FOURCC("mhm1"):
+            return MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1;
         default:
             ALOGW("Unknown fourcc: %c%c%c%c",
                    (fourcc >> 24) & 0xff,
@@ -1760,6 +1775,8 @@
         case FOURCC("fLaC"):
         case FOURCC(".mp3"):
         case 0x6D730055: // "ms U" mp3 audio
+        case FOURCC("mha1"):
+        case FOURCC("mhm1"):
         {
             if (mIsQT && depth >= 1 && mPath[depth - 1] == FOURCC("wave")) {
 
@@ -1959,7 +1976,94 @@
             }
             break;
         }
+        case FOURCC("mhaC"):
+        {
+            // See ISO_IEC_23008-3;2019 MHADecoderConfigurationRecord
+            constexpr uint32_t mhac_header_size = 4 /* size */ + 4 /* boxtype 'mhaC' */
+                    + 1 /* configurationVersion */ + 1 /* mpegh3daProfileLevelIndication */
+                    + 1 /* referenceChannelLayout */ + 2 /* mpegh3daConfigLength */;
+            uint8_t mhac_header[mhac_header_size];
+            off64_t data_offset = *offset;
 
+            if (chunk_size < sizeof(mhac_header)) {
+                return ERROR_MALFORMED;
+            }
+
+            if (mDataSource->readAt(data_offset, mhac_header, sizeof(mhac_header))
+                    < (ssize_t)sizeof(mhac_header)) {
+                return ERROR_IO;
+            }
+
+            //get mpegh3daProfileLevelIndication
+            const uint32_t mpegh3daProfileLevelIndication = mhac_header[9];
+            AMediaFormat_setInt32(mLastTrack->meta,
+                    AMEDIAFORMAT_KEY_MPEGH_PROFILE_LEVEL_INDICATION,
+                    mpegh3daProfileLevelIndication);
+
+             //get referenceChannelLayout
+            const uint32_t referenceChannelLayout = mhac_header[10];
+            AMediaFormat_setInt32(mLastTrack->meta,
+                    AMEDIAFORMAT_KEY_MPEGH_REFERENCE_CHANNEL_LAYOUT,
+                    referenceChannelLayout);
+
+            // get mpegh3daConfigLength
+            const uint32_t mhac_config_size = U16_AT(&mhac_header[11]);
+            if (chunk_size != sizeof(mhac_header) + mhac_config_size) {
+                return ERROR_MALFORMED;
+            }
+
+            data_offset += sizeof(mhac_header);
+            uint8_t mhac_config[mhac_config_size];
+            if (mDataSource->readAt(data_offset, mhac_config, sizeof(mhac_config))
+                    < (ssize_t)sizeof(mhac_config)) {
+                return ERROR_IO;
+            }
+
+            AMediaFormat_setBuffer(mLastTrack->meta,
+                    AMEDIAFORMAT_KEY_CSD_0, mhac_config, sizeof(mhac_config));
+            data_offset += sizeof(mhac_config);
+            *offset = data_offset;
+            break;
+        }
+        case FOURCC("mhaP"):
+        {
+            // FDAmd_2 of ISO_IEC_23008-3;2019 MHAProfileAndLevelCompatibilitySetBox
+            constexpr uint32_t mhap_header_size = 4 /* size */ + 4 /* boxtype 'mhaP' */
+                    + 1 /* numCompatibleSets */;
+
+            uint8_t mhap_header[mhap_header_size];
+            off64_t data_offset = *offset;
+
+            if (chunk_size < (ssize_t)mhap_header_size) {
+                return ERROR_MALFORMED;
+            }
+
+            if (mDataSource->readAt(data_offset, mhap_header, sizeof(mhap_header))
+                    < (ssize_t)sizeof(mhap_header)) {
+                return ERROR_IO;
+            }
+
+            // mhap_compatible_sets_size = numCompatibleSets * sizeof(uint8_t)
+            const uint32_t mhap_compatible_sets_size = mhap_header[8];
+            if (chunk_size != sizeof(mhap_header) + mhap_compatible_sets_size) {
+                return ERROR_MALFORMED;
+            }
+
+            data_offset += sizeof(mhap_header);
+            uint8_t mhap_compatible_sets[mhap_compatible_sets_size];
+            if (mDataSource->readAt(
+                    data_offset, mhap_compatible_sets, sizeof(mhap_compatible_sets))
+                            < (ssize_t)sizeof(mhap_compatible_sets)) {
+                return ERROR_IO;
+            }
+
+            AMediaFormat_setBuffer(mLastTrack->meta,
+                    AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS,
+                    mhap_compatible_sets, sizeof(mhap_compatible_sets));
+            data_offset += sizeof(mhap_compatible_sets);
+            *offset = data_offset;
+            break;
+        }
         case FOURCC("mp4v"):
         case FOURCC("encv"):
         case FOURCC("s263"):
@@ -4505,6 +4609,9 @@
 
     if (objectTypeIndication == 0x6B || objectTypeIndication == 0x69) {
         // mp3 audio
+        if (mLastTrack == NULL)
+            return ERROR_MALFORMED;
+
         AMediaFormat_setString(mLastTrack->meta,AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_AUDIO_MPEG);
         return OK;
     }
@@ -4595,6 +4702,10 @@
         if (offset >= csd_size || csd[offset] != 0x01) {
             return ERROR_MALFORMED;
         }
+
+        if (mLastTrack == NULL) {
+            return ERROR_MALFORMED;
+        }
         // formerly kKeyVorbisInfo
         AMediaFormat_setBuffer(mLastTrack->meta,
                 AMEDIAFORMAT_KEY_CSD_0, &csd[offset], len1);
@@ -4932,6 +5043,8 @@
     bool success = AMediaFormat_getString(mFormat, AMEDIAFORMAT_KEY_MIME, &mime);
     CHECK(success);
 
+    mIsMpegH = !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1) ||
+               !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1);
     mIsAVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
     mIsHEVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC) ||
               !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
@@ -6001,10 +6114,11 @@
             }
 
             uint32_t syncSampleIndex = sampleIndex;
-            // assume every non-USAC audio sample is a sync sample. This works around
+            // assume every non-USAC/non-MPEGH audio sample is a sync sample.
+            // This works around
             // seek issues with files that were incorrectly written with an
             // empty or single-sample stss block for the audio track
-            if (err == OK && (!mIsAudio || mIsUsac)) {
+            if (err == OK && (!mIsAudio || mIsUsac || mIsMpegH)) {
                 err = mSampleTable->findSyncSampleNear(
                         sampleIndex, &syncSampleIndex, findFlags);
             }
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 19d1d1a..e37cc12 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1249,6 +1249,46 @@
     return finalBufferSize;
 }
 
+ssize_t AudioTrack::getStartThresholdInFrames() const
+{
+    AutoMutex lock(mLock);
+    if (mOutput == AUDIO_IO_HANDLE_NONE || mProxy.get() == 0) {
+        return NO_INIT;
+    }
+    return (ssize_t) mProxy->getStartThresholdInFrames();
+}
+
+ssize_t AudioTrack::setStartThresholdInFrames(size_t startThresholdInFrames)
+{
+    if (startThresholdInFrames > INT32_MAX || startThresholdInFrames == 0) {
+        // contractually we could simply return the current threshold in frames
+        // to indicate the request was ignored, but we return an error here.
+        return BAD_VALUE;
+    }
+    AutoMutex lock(mLock);
+    // We do not permit calling setStartThresholdInFrames() between the AudioTrack
+    // default ctor AudioTrack() and set(...) but rather fail such an attempt.
+    // (To do so would require a cached mOrigStartThresholdInFrames and we may
+    // not have proper validation for the actual set value).
+    if (mOutput == AUDIO_IO_HANDLE_NONE || mProxy.get() == 0) {
+        return NO_INIT;
+    }
+    const uint32_t original = mProxy->getStartThresholdInFrames();
+    const uint32_t final = mProxy->setStartThresholdInFrames(startThresholdInFrames);
+    if (original != final) {
+        android::mediametrics::LogItem(mMetricsId)
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETSTARTTHRESHOLD)
+                .set(AMEDIAMETRICS_PROP_STARTTHRESHOLDFRAMES, (int32_t)final)
+                .record();
+        if (original > final) {
+            // restart track if it was disabled by audioflinger due to previous underrun
+            // and we reduced the number of frames for the threshold.
+            restartIfDisabled();
+        }
+    }
+    return final;
+}
+
 status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount)
 {
     if (mSharedBuffer == 0 || isOffloadedOrDirect()) {
@@ -2562,6 +2602,10 @@
         staticPosition = mStaticProxy->getPosition().unsignedValue();
     }
 
+    // save the old startThreshold and framecount
+    const uint32_t originalStartThresholdInFrames = mProxy->getStartThresholdInFrames();
+    const uint32_t originalFrameCount = mProxy->frameCount();
+
     // See b/74409267. Connecting to a BT A2DP device supporting multiple codecs
     // causes a lot of churn on the service side, and it can reject starting
     // playback of a previously created track. May also apply to other cases.
@@ -2616,6 +2660,18 @@
             return mAudioTrack->applyVolumeShaper(shaper.mConfiguration, operationToEnd);
         });
 
+        // restore the original start threshold if different than frameCount.
+        if (originalStartThresholdInFrames != originalFrameCount) {
+            // Note: mProxy->setStartThresholdInFrames() call is in the Proxy
+            // and does not trigger a restart.
+            // (Also CBLK_DISABLED is not set, buffers are empty after track recreation).
+            // Any start would be triggered on the mState == ACTIVE check below.
+            const uint32_t currentThreshold =
+                    mProxy->setStartThresholdInFrames(originalStartThresholdInFrames);
+            ALOGD_IF(originalStartThresholdInFrames != currentThreshold,
+                    "%s(%d) startThresholdInFrames changing from %u to %u",
+                    __func__, mPortId, originalStartThresholdInFrames, currentThreshold);
+        }
         if (mState == STATE_ACTIVE) {
             result = mAudioTrack->start();
         }
diff --git a/media/libaudioclient/AudioTrackShared.cpp b/media/libaudioclient/AudioTrackShared.cpp
index f1f8f9c..35719be 100644
--- a/media/libaudioclient/AudioTrackShared.cpp
+++ b/media/libaudioclient/AudioTrackShared.cpp
@@ -17,6 +17,7 @@
 #define LOG_TAG "AudioTrackShared"
 //#define LOG_NDEBUG 0
 
+#include <atomic>
 #include <android-base/macros.h>
 #include <private/media/AudioTrackShared.h>
 #include <utils/Log.h>
@@ -33,6 +34,21 @@
     return sizeof(T) > sizeof(size_t) && x > (T) SIZE_MAX ? SIZE_MAX : x < 0 ? 0 : (size_t) x;
 }
 
+// compile-time safe atomics. TODO: update all methods to use it
+template <typename T>
+T android_atomic_load(const volatile T* addr) {
+    static_assert(sizeof(T) == sizeof(std::atomic<T>)); // no extra sync data required.
+    static_assert(std::atomic<T>::is_always_lock_free); // no hash lock somewhere.
+    return atomic_load((std::atomic<T>*)addr);          // memory_order_seq_cst
+}
+
+template <typename T>
+void android_atomic_store(const volatile T* addr, T value) {
+    static_assert(sizeof(T) == sizeof(std::atomic<T>)); // no extra sync data required.
+    static_assert(std::atomic<T>::is_always_lock_free); // no hash lock somewhere.
+    atomic_store((std::atomic<T>*)addr, value);         // memory_order_seq_cst
+}
+
 // incrementSequence is used to determine the next sequence value
 // for the loop and position sequence counters.  It should return
 // a value between "other" + 1 and "other" + INT32_MAX, the choice of
@@ -51,6 +67,7 @@
     : mServer(0), mFutex(0), mMinimum(0)
     , mVolumeLR(GAIN_MINIFLOAT_PACKED_UNITY), mSampleRate(0), mSendLevel(0)
     , mBufferSizeInFrames(0)
+    , mStartThresholdInFrames(0) // filled in by the server.
     , mFlags(0)
 {
     memset(&u, 0, sizeof(u));
@@ -66,6 +83,26 @@
 {
 }
 
+uint32_t Proxy::getStartThresholdInFrames() const
+{
+    const uint32_t startThresholdInFrames =
+           android_atomic_load(&mCblk->mStartThresholdInFrames);
+    if (startThresholdInFrames == 0 || startThresholdInFrames > mFrameCount) {
+        ALOGD("%s: startThresholdInFrames %u not between 1 and frameCount %zu, "
+                "setting to frameCount",
+                __func__, startThresholdInFrames, mFrameCount);
+        return mFrameCount;
+    }
+    return startThresholdInFrames;
+}
+
+uint32_t Proxy::setStartThresholdInFrames(uint32_t startThresholdInFrames)
+{
+    const uint32_t actual = std::min((size_t)startThresholdInFrames, frameCount());
+    android_atomic_store(&mCblk->mStartThresholdInFrames, actual);
+    return actual;
+}
+
 // ---------------------------------------------------------------------------
 
 ClientProxy::ClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount,
@@ -663,6 +700,7 @@
     , mTimestampMutator(&cblk->mExtendedTimestampQueue)
 {
     cblk->mBufferSizeInFrames = frameCount;
+    cblk->mStartThresholdInFrames = frameCount;
 }
 
 __attribute__((no_sanitize("integer")))
@@ -900,11 +938,8 @@
     }
     audio_track_cblk_t* cblk = mCblk;
 
-    int32_t flush = cblk->u.mStreaming.mFlush;
-    if (flush != mFlush) {
-        // FIXME should return an accurate value, but over-estimate is better than under-estimate
-        return mFrameCount;
-    }
+    flushBufferIfNeeded();
+
     const int32_t rear = getRear();
     ssize_t filled = audio_utils::safe_sub_overflow(rear, cblk->u.mStreaming.mFront);
     // pipe should not already be overfull
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index fac4c83..4afa9c9 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -429,6 +429,19 @@
      */
             ssize_t     setBufferSizeInFrames(size_t size);
 
+    /* Returns the start threshold on the buffer for audio streaming
+     * or a negative value if the AudioTrack is not initialized.
+     */
+            ssize_t     getStartThresholdInFrames() const;
+
+    /* Sets the start threshold in frames on the buffer for audio streaming.
+     *
+     * May be clamped internally. Returns the actual value set, or a negative
+     * value if the AudioTrack is not initialized or if the input
+     * is zero or greater than INT_MAX.
+     */
+            ssize_t     setStartThresholdInFrames(size_t startThresholdInFrames);
+
     /* Return the static buffer specified in constructor or set(), or 0 for streaming mode */
             sp<IMemory> sharedBuffer() const { return mSharedBuffer; }
 
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index da16477..ca4f663 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -246,6 +246,10 @@
         return status;
     }
     CoreUtils::AudioInputFlags hidlFlags;
+#if MAJOR_VERSION <= 5
+    // Some flags were specific to framework and must not leak to the HAL.
+    flags = static_cast<audio_input_flags_t>(flags & ~AUDIO_INPUT_FLAG_DIRECT);
+#endif
     if (status_t status = CoreUtils::audioInputFlagsFromHal(flags, &hidlFlags); status != OK) {
         return status;
     }
@@ -278,10 +282,6 @@
         sinkMetadata.tracks[0].destination.device(std::move(hidlOutputDevice));
     }
 #endif
-#if MAJOR_VERSION <= 5
-    // Some flags were specific to framework and must not leak to the HAL.
-    flags = static_cast<audio_input_flags_t>(flags & ~AUDIO_INPUT_FLAG_DIRECT);
-#endif
     Return<void> ret = mDevice->openInputStream(
             handle, hidlDevice, hidlConfig, hidlFlags, sinkMetadata,
             [&](Result r, const sp<IStreamIn>& result, const AudioConfig& suggestedConfig) {
@@ -354,7 +354,8 @@
     return processReturn("releaseAudioPatch", mDevice->releaseAudioPatch(patch));
 }
 
-status_t DeviceHalHidl::getAudioPort(struct audio_port *port) {
+template <typename HalPort>
+status_t DeviceHalHidl::getAudioPortImpl(HalPort *port) {
     if (mDevice == 0) return NO_INIT;
     AudioPort hidlPort;
     HidlUtils::audioPortFromHal(*port, &hidlPort);
@@ -370,6 +371,30 @@
     return processReturn("getAudioPort", ret, retval);
 }
 
+status_t DeviceHalHidl::getAudioPort(struct audio_port *port) {
+    return getAudioPortImpl(port);
+}
+
+status_t DeviceHalHidl::getAudioPort(struct audio_port_v7 *port) {
+#if MAJOR_VERSION >= 7
+    return getAudioPortImpl(port);
+#else
+    struct audio_port audioPort = {};
+    status_t result = NO_ERROR;
+    if (!audio_populate_audio_port(port, &audioPort)) {
+        ALOGE("Failed to populate legacy audio port from audio_port_v7");
+        result = BAD_VALUE;
+    }
+    status_t status = getAudioPort(&audioPort);
+    if (status == NO_ERROR) {
+        audio_populate_audio_port_v7(&audioPort, port);
+    } else {
+        result = status;
+    }
+    return result;
+#endif
+}
+
 status_t DeviceHalHidl::setAudioPortConfig(const struct audio_port_config *config) {
     if (mDevice == 0) return NO_INIT;
     AudioPortConfig hidlConfig;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index d342d4a..2c847cf 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -107,6 +107,9 @@
     // Fills the list of supported attributes for a given audio port.
     virtual status_t getAudioPort(struct audio_port *port);
 
+    // Fills the list of supported attributes for a given audio port.
+    virtual status_t getAudioPort(struct audio_port_v7 *port);
+
     // Set audio port configuration.
     virtual status_t setAudioPortConfig(const struct audio_port_config *config);
 
@@ -128,6 +131,8 @@
 
     // The destructor automatically closes the device.
     virtual ~DeviceHalHidl();
+
+    template <typename HalPort> status_t getAudioPortImpl(HalPort *port);
 };
 
 } // namespace CPP_VERSION
diff --git a/media/libaudiohal/impl/DeviceHalLocal.cpp b/media/libaudiohal/impl/DeviceHalLocal.cpp
index 8021d92..af7dc1a 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.cpp
+++ b/media/libaudiohal/impl/DeviceHalLocal.cpp
@@ -180,6 +180,22 @@
     return mDev->get_audio_port(mDev, port);
 }
 
+status_t DeviceHalLocal::getAudioPort(struct audio_port_v7 *port) {
+#if MAJOR_VERSION >= 7
+    if (version() >= AUDIO_DEVICE_API_VERSION_3_2) {
+        // get_audio_port_v7 is mandatory if legacy HAL support this API version.
+        return mDev->get_audio_port_v7(mDev, port);
+    }
+#endif
+    struct audio_port audioPort = {};
+    audio_populate_audio_port(port, &audioPort);
+    status_t status = getAudioPort(&audioPort);
+    if (status == NO_ERROR) {
+        audio_populate_audio_port_v7(&audioPort, port);
+    }
+    return status;
+}
+
 status_t DeviceHalLocal::setAudioPortConfig(const struct audio_port_config *config) {
     if (version() >= AUDIO_DEVICE_API_VERSION_3_0)
         return mDev->set_audio_port_config(mDev, config);
diff --git a/media/libaudiohal/impl/DeviceHalLocal.h b/media/libaudiohal/impl/DeviceHalLocal.h
index b4eeba5..46b510b 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.h
+++ b/media/libaudiohal/impl/DeviceHalLocal.h
@@ -100,6 +100,9 @@
     // Fills the list of supported attributes for a given audio port.
     virtual status_t getAudioPort(struct audio_port *port);
 
+    // Fills the list of supported attributes for a given audio port.
+    virtual status_t getAudioPort(struct audio_port_v7 *port);
+
     // Set audio port configuration.
     virtual status_t setAudioPortConfig(const struct audio_port_config *config);
 
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 6da8bbd..f4a4fe1 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -57,8 +57,7 @@
     // Note: This assumes channel mask, format, and sample rate do not change after creation.
     audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
     if (/* mStreamPowerLog.isUserDebugOrEngBuild() && */
-        StreamHalHidl::getAudioProperties(
-                &config.sample_rate, &config.channel_mask, &config.format) == NO_ERROR) {
+        StreamHalHidl::getAudioProperties(&config) == NO_ERROR) {
         mStreamPowerLog.init(config.sample_rate, config.channel_mask, config.format);
     }
 }
@@ -67,14 +66,6 @@
     mStream = nullptr;
 }
 
-// Note: this method will be removed
-status_t StreamHalHidl::getSampleRate(uint32_t *rate) {
-    audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
-    status_t status = getAudioProperties(&config.sample_rate, &config.channel_mask, &config.format);
-    *rate = config.sample_rate;
-    return status;
-}
-
 status_t StreamHalHidl::getBufferSize(size_t *size) {
     if (!mStream) return NO_INIT;
     status_t status = processReturn("getBufferSize", mStream->getBufferSize(), size);
@@ -84,48 +75,28 @@
     return status;
 }
 
-// Note: this method will be removed
-status_t StreamHalHidl::getChannelMask(audio_channel_mask_t *mask) {
-    audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
-    status_t status = getAudioProperties(&config.sample_rate, &config.channel_mask, &config.format);
-    *mask = config.channel_mask;
-    return status;
-}
-
-// Note: this method will be removed
-status_t StreamHalHidl::getFormat(audio_format_t *format) {
-    audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
-    status_t status = getAudioProperties(&config.sample_rate, &config.channel_mask, &config.format);
-    *format = config.format;
-    return status;
-}
-
-status_t StreamHalHidl::getAudioProperties(
-        uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) {
+status_t StreamHalHidl::getAudioProperties(audio_config_base_t *configBase) {
+    *configBase = AUDIO_CONFIG_BASE_INITIALIZER;
     if (!mStream) return NO_INIT;
 #if MAJOR_VERSION <= 6
     Return<void> ret = mStream->getAudioProperties(
             [&](uint32_t sr, auto m, auto f) {
-                *sampleRate = sr;
-                *mask = static_cast<audio_channel_mask_t>(m);
-                *format = static_cast<audio_format_t>(f);
+                configBase->sample_rate = sr;
+                configBase->channel_mask = static_cast<audio_channel_mask_t>(m);
+                configBase->format = static_cast<audio_format_t>(f);
             });
     return processReturn("getAudioProperties", ret);
 #else
     Result retval;
     status_t conversionStatus = BAD_VALUE;
-    audio_config_base_t halConfig = AUDIO_CONFIG_BASE_INITIALIZER;
     Return<void> ret = mStream->getAudioProperties(
             [&](Result r, const AudioConfigBase& config) {
                 retval = r;
                 if (retval == Result::OK) {
-                    conversionStatus = HidlUtils::audioConfigBaseToHal(config, &halConfig);
+                    conversionStatus = HidlUtils::audioConfigBaseToHal(config, configBase);
                 }
             });
     if (status_t status = processReturn("getAudioProperties", ret, retval); status == NO_ERROR) {
-        *sampleRate = halConfig.sample_rate;
-        *mask = halConfig.channel_mask;
-        *format = halConfig.format;
         return conversionStatus;
     } else {
         return status;
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 72ce60b..d40fa7c 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -48,21 +48,14 @@
 class StreamHalHidl : public virtual StreamHalInterface, public ConversionHelperHidl
 {
   public:
-    // Return the sampling rate in Hz - eg. 44100.
-    virtual status_t getSampleRate(uint32_t *rate);
-
     // Return size of input/output buffer in bytes for this stream - eg. 4800.
     virtual status_t getBufferSize(size_t *size);
 
-    // Return the channel mask.
-    virtual status_t getChannelMask(audio_channel_mask_t *mask);
-
-    // Return the audio format - e.g. AUDIO_FORMAT_PCM_16_BIT.
-    virtual status_t getFormat(audio_format_t *format);
-
-    // Convenience method.
-    virtual status_t getAudioProperties(
-            uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format);
+    // Return the base configuration of the stream:
+    //   - channel mask;
+    //   - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
+    //   - sampling rate in Hz - eg. 44100.
+    virtual status_t getAudioProperties(audio_config_base_t *configBase);
 
     // Set audio stream parameters.
     virtual status_t setParameters(const String8& kvPairs);
diff --git a/media/libaudiohal/impl/StreamHalLocal.cpp b/media/libaudiohal/impl/StreamHalLocal.cpp
index e89b288..d0c375e 100644
--- a/media/libaudiohal/impl/StreamHalLocal.cpp
+++ b/media/libaudiohal/impl/StreamHalLocal.cpp
@@ -45,31 +45,15 @@
     mDevice.clear();
 }
 
-status_t StreamHalLocal::getSampleRate(uint32_t *rate) {
-    *rate = mStream->get_sample_rate(mStream);
-    return OK;
-}
-
 status_t StreamHalLocal::getBufferSize(size_t *size) {
     *size = mStream->get_buffer_size(mStream);
     return OK;
 }
 
-status_t StreamHalLocal::getChannelMask(audio_channel_mask_t *mask) {
-    *mask = mStream->get_channels(mStream);
-    return OK;
-}
-
-status_t StreamHalLocal::getFormat(audio_format_t *format) {
-    *format = mStream->get_format(mStream);
-    return OK;
-}
-
-status_t StreamHalLocal::getAudioProperties(
-        uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) {
-    *sampleRate = mStream->get_sample_rate(mStream);
-    *mask = mStream->get_channels(mStream);
-    *format = mStream->get_format(mStream);
+status_t StreamHalLocal::getAudioProperties(audio_config_base_t *configBase) {
+    configBase->sample_rate = mStream->get_sample_rate(mStream);
+    configBase->channel_mask = mStream->get_channels(mStream);
+    configBase->format = mStream->get_format(mStream);
     return OK;
 }
 
diff --git a/media/libaudiohal/impl/StreamHalLocal.h b/media/libaudiohal/impl/StreamHalLocal.h
index e228104..b260495 100644
--- a/media/libaudiohal/impl/StreamHalLocal.h
+++ b/media/libaudiohal/impl/StreamHalLocal.h
@@ -28,21 +28,14 @@
 class StreamHalLocal : public virtual StreamHalInterface
 {
   public:
-    // Return the sampling rate in Hz - eg. 44100.
-    virtual status_t getSampleRate(uint32_t *rate);
-
     // Return size of input/output buffer in bytes for this stream - eg. 4800.
     virtual status_t getBufferSize(size_t *size);
 
-    // Return the channel mask.
-    virtual status_t getChannelMask(audio_channel_mask_t *mask);
-
-    // Return the audio format - e.g. AUDIO_FORMAT_PCM_16_BIT.
-    virtual status_t getFormat(audio_format_t *format);
-
-    // Convenience method.
-    virtual status_t getAudioProperties(
-            uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format);
+    // Return the base configuration of the stream:
+    //   - channel mask;
+    //   - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
+    //   - sampling rate in Hz - eg. 44100.
+    virtual status_t getAudioProperties(audio_config_base_t *configBase);
 
     // Set audio stream parameters.
     virtual status_t setParameters(const String8& kvPairs);
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 1e04b21..29ef011 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -106,6 +106,9 @@
     // Fills the list of supported attributes for a given audio port.
     virtual status_t getAudioPort(struct audio_port *port) = 0;
 
+    // Fills the list of supported attributes for a given audio port.
+    virtual status_t getAudioPort(struct audio_port_v7 *port) = 0;
+
     // Set audio port configuration.
     virtual status_t setAudioPortConfig(const struct audio_port_config *config) = 0;
 
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index b47f536..2be12fb 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -31,25 +31,27 @@
 class StreamHalInterface : public virtual RefBase
 {
   public:
-    // TODO(mnaganov): Remove
-    // Return the sampling rate in Hz - eg. 44100.
-    virtual status_t getSampleRate(uint32_t *rate) = 0;
-
     // Return size of input/output buffer in bytes for this stream - eg. 4800.
     virtual status_t getBufferSize(size_t *size) = 0;
 
-    // TODO(mnaganov): Remove
-    // Return the channel mask.
-    virtual status_t getChannelMask(audio_channel_mask_t *mask) = 0;
+    // Return the base configuration of the stream:
+    //   - channel mask;
+    //   - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
+    //   - sampling rate in Hz - eg. 44100.
+    virtual status_t getAudioProperties(audio_config_base_t *configBase) = 0;
 
-    // TODO(mnaganov): Remove
-    // Return the audio format - e.g. AUDIO_FORMAT_PCM_16_BIT.
-    virtual status_t getFormat(audio_format_t *format) = 0;
-
-    // TODO(mnaganov): Change to use audio_config_base_t
     // Convenience method.
-    virtual status_t getAudioProperties(
-            uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) = 0;
+    inline status_t getAudioProperties(
+            uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) {
+        audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+        const status_t result = getAudioProperties(&config);
+        if (result == NO_ERROR) {
+            if (sampleRate != nullptr) *sampleRate = config.sample_rate;
+            if (mask != nullptr) *mask = config.channel_mask;
+            if (format != nullptr) *format = config.format;
+        }
+        return result;
+    }
 
     // Set audio stream parameters.
     virtual status_t setParameters(const String8& kvPairs) = 0;
diff --git a/media/libeffects/lvm/lib/Android.bp b/media/libeffects/lvm/lib/Android.bp
index a044295..7998879 100644
--- a/media/libeffects/lvm/lib/Android.bp
+++ b/media/libeffects/lvm/lib/Android.bp
@@ -59,12 +59,10 @@
         "Eq/src/LVEQNB_Init.cpp",
         "Eq/src/LVEQNB_Process.cpp",
         "Eq/src/LVEQNB_Tables.cpp",
-        "Common/src/InstAlloc.cpp",
         "Common/src/DC_2I_D16_TRC_WRA_01.cpp",
         "Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp",
         "Common/src/Copy_16.cpp",
         "Common/src/MonoTo2I_32.cpp",
-        "Common/src/LoadConst_32.cpp",
         "Common/src/dB_to_Lin32.cpp",
         "Common/src/Shift_Sat_v16xv16.cpp",
         "Common/src/Shift_Sat_v32xv32.cpp",
@@ -146,12 +144,9 @@
         "Reverb/src/LVREV_ClearAudioBuffers.cpp",
         "Reverb/src/LVREV_GetControlParameters.cpp",
         "Reverb/src/LVREV_GetInstanceHandle.cpp",
-        "Reverb/src/LVREV_GetMemoryTable.cpp",
         "Reverb/src/LVREV_Process.cpp",
         "Reverb/src/LVREV_SetControlParameters.cpp",
         "Reverb/src/LVREV_Tables.cpp",
-        "Common/src/InstAlloc.cpp",
-        "Common/src/LoadConst_32.cpp",
         "Common/src/From2iToMono_32.cpp",
         "Common/src/Mult3s_32x16.cpp",
         "Common/src/Copy_16.cpp",
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp
index 3fc9e95..9fe8116 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp
@@ -277,13 +277,15 @@
     /*
      * Create biquad instance
      */
-    pInstance->pHPFBiquad.reset(
-            new android::audio_utils::BiquadFilter<LVM_FLOAT>(pParams->NrChannels));
-
+    if (pInstance->Params.NrChannels != pParams->NrChannels) {
+        pInstance->pHPFBiquad.reset(
+                new android::audio_utils::BiquadFilter<LVM_FLOAT>(pParams->NrChannels));
+    }
     /*
      * Update the filters
      */
     if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
+        (pInstance->Params.NrChannels != pParams->NrChannels) ||
         (pInstance->Params.CentreFrequency != pParams->CentreFrequency)) {
         LVDBE_SetFilters(pInstance, /* Instance pointer */
                          pParams);  /* New parameters */
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
index bbe7de0..b113f48 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
@@ -79,6 +79,7 @@
     pInstance->Params.SampleRate = LVDBE_FS_8000;
     pInstance->Params.VolumeControl = LVDBE_VOLUME_OFF;
     pInstance->Params.VolumedB = 0;
+    pInstance->Params.NrChannels = FCC_2;
 
     /*
      * Create pointer to data and coef memory
@@ -91,7 +92,7 @@
      * Create biquad instance
      */
     pInstance->pHPFBiquad.reset(
-            new android::audio_utils::BiquadFilter<LVM_FLOAT>(LVM_MAX_CHANNELS));
+            new android::audio_utils::BiquadFilter<LVM_FLOAT>(pInstance->Params.NrChannels));
     pInstance->pBPFBiquad.reset(new android::audio_utils::BiquadFilter<LVM_FLOAT>(FCC_1));
 
     /*
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
index 6ea08bc..12b86f3 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
@@ -25,7 +25,6 @@
 #include "LVM_Private.h"
 #include "LVM_Tables.h"
 #include "VectorArithmetic.h"
-#include "InstAlloc.h"
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -138,9 +137,9 @@
 
         pInstance->pBufferManagement->pScratch = (LVM_FLOAT*)pInstance->pScratch;
 
-        LoadConst_Float(0, /* Clear the input delay buffer */
-                        (LVM_FLOAT*)&pInstance->pBufferManagement->InDelayBuffer,
-                        (LVM_INT16)(LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE));
+        memset(pInstance->pBufferManagement->InDelayBuffer, 0,
+                LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE *
+                sizeof(pInstance->pBufferManagement->InDelayBuffer[0]));
         pInstance->pBufferManagement->InDelaySamples =
                 MIN_INTERNAL_BLOCKSIZE;                    /* Set the number of delay samples */
         pInstance->pBufferManagement->OutDelaySamples = 0; /* No samples in the output buffer */
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
index 20058a1..4eea04f 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
@@ -23,6 +23,7 @@
 #include <system/audio.h>
 
 #include "LVM_Private.h"
+#include "ScalarArithmetic.h"
 #include "VectorArithmetic.h"
 #include "LVM_Coeffs.h"
 
@@ -178,6 +179,9 @@
                  * Apply the filter
                  */
                 pInstance->pTEBiquad->process(pProcessed, pProcessed, NrFrames);
+                for (auto i = 0; i < NrChannels * NrFrames; i++) {
+                    pProcessed[i] = LVM_Clamp(pProcessed[i]);
+                }
             }
             /*
              * Volume balance
diff --git a/media/libeffects/lvm/lib/Common/lib/InstAlloc.h b/media/libeffects/lvm/lib/Common/lib/InstAlloc.h
deleted file mode 100644
index 17699ef..0000000
--- a/media/libeffects/lvm/lib/Common/lib/InstAlloc.h
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __INSTALLOC_H__
-#define __INSTALLOC_H__
-
-#include "LVM_Types.h"
-/*######################################################################################*/
-/*  Type declarations                                                                   */
-/*######################################################################################*/
-typedef struct {
-    LVM_UINT32 TotalSize;  /*  Accumulative total memory size                      */
-    uintptr_t pNextMember; /*  Pointer to the next instance member to be allocated */
-} INST_ALLOC;
-
-/*######################################################################################*/
-/*  Function prototypes                                                          */
-/*######################################################################################*/
-
-/****************************************************************************************
- *  Name        : InstAlloc_Init()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
-                  StartAddr - Base address of the instance memory
- *  Returns     : Error code
- *  Description : Initializes the instance distribution and memory size calculation function
- *  Remarks     :
- ****************************************************************************************/
-
-void InstAlloc_Init(INST_ALLOC* pms, void* StartAddr);
-
-/****************************************************************************************
- *  Name        : InstAlloc_AddMember()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
-                  Size - The size in bytes of the new added member
- *  Returns     : A pointer to the new added member
- *  Description : Allocates space for a new member in the instance memory and returns
-                  a pointer to this new member.  The start address of all members will
-                  be 32 bit alligned.
- *  Remarks     :
- ****************************************************************************************/
-
-void* InstAlloc_AddMember(INST_ALLOC* pms, LVM_UINT32 Size);
-
-/****************************************************************************************
- *  Name        : InstAlloc_GetTotal()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
- *  Returns     : The instance memory size
- *  Description : This functions returns the calculated instance memory size
- *  Remarks     :
- ****************************************************************************************/
-
-LVM_UINT32 InstAlloc_GetTotal(INST_ALLOC* pms);
-
-void* InstAlloc_AddMemberAllRet(INST_ALLOC* pms, LVM_UINT32 Size[], void** ptr);
-
-void* InstAlloc_AddMemberAll(INST_ALLOC* pms, LVM_UINT32 Size[], LVM_MemoryTable_st* pMemoryTable);
-
-void InstAlloc_InitAll(INST_ALLOC* pms, LVM_MemoryTable_st* pMemoryTable);
-
-void InstAlloc_InitAll_NULL(INST_ALLOC* pms);
-
-#endif /* __JBS_INSTALLOC_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Types.h b/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
index 5cdcf35..7cfaf27 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
@@ -134,27 +134,6 @@
     return LVM_FS_INVALID;
 }
 
-/* Memory Types */
-typedef enum {
-    LVM_PERSISTENT_SLOW_DATA = LVM_MEMREGION_PERSISTENT_SLOW_DATA,
-    LVM_PERSISTENT_FAST_DATA = LVM_MEMREGION_PERSISTENT_FAST_DATA,
-    LVM_PERSISTENT_FAST_COEF = LVM_MEMREGION_PERSISTENT_FAST_COEF,
-    LVM_TEMPORARY_FAST = LVM_MEMREGION_TEMPORARY_FAST,
-    LVM_MEMORYTYPE_DUMMY = LVM_MAXENUM
-} LVM_MemoryTypes_en;
-
-/* Memory region definition */
-typedef struct {
-    LVM_UINT32 Size;         /* Region size in bytes */
-    LVM_MemoryTypes_en Type; /* Region type */
-    void* pBaseAddress;      /* Pointer to the region base address */
-} LVM_MemoryRegion_st;
-
-/* Memory table containing the region definitions */
-typedef struct {
-    LVM_MemoryRegion_st Region[LVM_NR_MEMORY_REGIONS]; /* One definition for each region */
-} LVM_MemoryTable_st;
-
 /****************************************************************************************/
 /*                                                                                      */
 /*  Standard Function Prototypes                                                        */
diff --git a/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h b/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
index 18de85b..10f351e 100644
--- a/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
+++ b/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
@@ -24,8 +24,6 @@
     VARIOUS FUNCTIONS
 ***********************************************************************************/
 
-void LoadConst_Float(const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n);
-
 void Copy_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
 void Copy_Float_Mc_Stereo(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 NrFrames,
                           LVM_INT32 NrChannels);
diff --git a/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp b/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp
deleted file mode 100644
index 2cfe056..0000000
--- a/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "InstAlloc.h"
-
-/****************************************************************************************
- *  Name        : InstAlloc_Init()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
-                  StartAddr - Base address of the instance memory
- *  Returns     : Error code
- *  Description : Initializes the instance distribution and memory size calculation function
- *  Remarks     :
- ****************************************************************************************/
-
-void InstAlloc_Init(INST_ALLOC* pms, void* StartAddr) {
-    pms->TotalSize = 3;
-    pms->pNextMember = (((uintptr_t)StartAddr + 3) & (uintptr_t)~3);
-}
-
-/****************************************************************************************
- *  Name        : InstAlloc_AddMember()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
-                  Size - The size in bytes of the new added member
- *  Returns     : A pointer to the new added member
- *  Description : Allocates space for a new member in the instance memory and returns
-                  a pointer to this new member.  The start address of all members will
-                  be 32 bit alligned.
- *  Remarks     :
- ****************************************************************************************/
-
-void* InstAlloc_AddMember(INST_ALLOC* pms, LVM_UINT32 Size) {
-    void* NewMemberAddress; /* Variable to temporarily store the return value */
-    NewMemberAddress = (void*)pms->pNextMember;
-
-    Size = ((Size + 3) & (LVM_UINT32)~3); /* Ceil the size to a multiple of four */
-
-    pms->TotalSize += Size;
-    pms->pNextMember += Size;
-
-    return (NewMemberAddress);
-}
-
-/****************************************************************************************
- *  Name        : InstAlloc_GetTotal()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
- *  Returns     : The instance memory size
- *  Description : This functions returns the calculated instance memory size
- *  Remarks     :
- ****************************************************************************************/
-
-LVM_UINT32 InstAlloc_GetTotal(INST_ALLOC* pms) {
-    if (pms->TotalSize > 3) {
-        return (pms->TotalSize);
-    } else {
-        return 0; /* No memory added */
-    }
-}
-
-void InstAlloc_InitAll(INST_ALLOC* pms, LVM_MemoryTable_st* pMemoryTable) {
-    uintptr_t StartAddr;
-
-    StartAddr = (uintptr_t)pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress;
-
-    pms[0].TotalSize = 3;
-    pms[0].pNextMember = ((StartAddr + 3) & (uintptr_t)~3);
-
-    StartAddr = (uintptr_t)pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress;
-
-    pms[1].TotalSize = 3;
-    pms[1].pNextMember = ((StartAddr + 3) & (uintptr_t)~3);
-
-    StartAddr = (uintptr_t)pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress;
-
-    pms[2].TotalSize = 3;
-    pms[2].pNextMember = ((StartAddr + 3) & (uintptr_t)~3);
-
-    StartAddr = (uintptr_t)pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress;
-
-    pms[3].TotalSize = 3;
-    pms[3].pNextMember = ((StartAddr + 3) & (uintptr_t)~3);
-}
-
-/****************************************************************************************
- *  Name        : InstAlloc_InitAll_NULL()
- *  Input       : pms  - Pointer to array of four INST_ALLOC instances
- *  Returns     : Nothing
- *  Description : This function reserves Size of 3 bytes for all memory regions and
- *                intializes pNextMember for all regions to 0
- *  Remarks     :
- ****************************************************************************************/
-
-void InstAlloc_InitAll_NULL(INST_ALLOC* pms) {
-    pms[0].TotalSize = 3;
-    pms[0].pNextMember = 0;
-
-    pms[1].TotalSize = 3;
-    pms[1].pNextMember = 0;
-
-    pms[2].TotalSize = 3;
-    pms[2].pNextMember = 0;
-
-    pms[3].TotalSize = 3;
-    pms[3].pNextMember = 0;
-}
-
-void* InstAlloc_AddMemberAll(INST_ALLOC* pms, LVM_UINT32 Size[], LVM_MemoryTable_st* pMemoryTable) {
-    void* NewMemberAddress; /* Variable to temporarily store the return value */
-
-    /* coverity[returned_pointer] Ignore coverity warning that ptr is not used */
-    NewMemberAddress =
-            InstAlloc_AddMember(&pms[LVM_PERSISTENT_SLOW_DATA], Size[LVM_PERSISTENT_SLOW_DATA]);
-
-    pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size =
-            InstAlloc_GetTotal(&pms[LVM_PERSISTENT_SLOW_DATA]);
-    pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Type = LVM_PERSISTENT_SLOW_DATA;
-    pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress = LVM_NULL;
-
-    NewMemberAddress =
-            InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_DATA], Size[LVM_PERSISTENT_FAST_DATA]);
-
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size =
-            InstAlloc_GetTotal(&pms[LVM_PERSISTENT_FAST_DATA]);
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Type = LVM_PERSISTENT_FAST_DATA;
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress = LVM_NULL;
-
-    NewMemberAddress =
-            InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_COEF], Size[LVM_PERSISTENT_FAST_COEF]);
-
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size =
-            InstAlloc_GetTotal(&pms[LVM_PERSISTENT_FAST_COEF]);
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Type = LVM_PERSISTENT_FAST_COEF;
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress = LVM_NULL;
-
-    NewMemberAddress = InstAlloc_AddMember(&pms[LVM_TEMPORARY_FAST], Size[LVM_TEMPORARY_FAST]);
-
-    pMemoryTable->Region[LVM_TEMPORARY_FAST].Size = InstAlloc_GetTotal(&pms[LVM_TEMPORARY_FAST]);
-    pMemoryTable->Region[LVM_TEMPORARY_FAST].Type = LVM_TEMPORARY_FAST;
-    pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress = LVM_NULL;
-
-    return (NewMemberAddress);
-}
-
-void* InstAlloc_AddMemberAllRet(INST_ALLOC* pms, LVM_UINT32 Size[], void** ptr) {
-    ptr[0] = InstAlloc_AddMember(&pms[LVM_PERSISTENT_SLOW_DATA], Size[LVM_PERSISTENT_SLOW_DATA]);
-    ptr[1] = InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_DATA], Size[LVM_PERSISTENT_FAST_DATA]);
-    ptr[2] = InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_COEF], Size[LVM_PERSISTENT_FAST_COEF]);
-    ptr[3] = InstAlloc_AddMember(&pms[LVM_TEMPORARY_FAST], Size[LVM_TEMPORARY_FAST]);
-
-    return (ptr[0]);
-}
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
index be19fa0..5a67bda 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
@@ -19,6 +19,7 @@
    INCLUDE FILES
 ***********************************************************************************/
 
+#include <string.h>
 #include "LVC_Mixer_Private.h"
 #include "VectorArithmetic.h"
 #include "ScalarArithmetic.h"
@@ -68,7 +69,7 @@
 
     if (HardMixing) {
         if (pInstance->Target == 0)
-            LoadConst_Float(0.0, dst, n);
+            memset(dst, 0, n * sizeof(*dst));
         else {
             if ((pInstance->Target) != 1.0f)
                 Mult3s_Float(src, (pInstance->Target), dst, n);
@@ -150,7 +151,7 @@
 
     if (HardMixing) {
         if (pInstance->Target == 0)
-            LoadConst_Float(0.0, dst, NrFrames * NrChannels);
+            memset(dst, 0, NrFrames * NrChannels * sizeof(*dst));
         else {
             if ((pInstance->Target) != 1.0f)
                 Mult3s_Float(src, (pInstance->Target), dst, NrFrames * NrChannels);
diff --git a/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp b/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp
deleted file mode 100644
index df7a558..0000000
--- a/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**********************************************************************************
-   INCLUDE FILES
-***********************************************************************************/
-
-#include "VectorArithmetic.h"
-
-/**********************************************************************************
-   FUNCTION LoadConst_32
-***********************************************************************************/
-void LoadConst_Float(const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n) {
-    LVM_INT16 ii;
-
-    for (ii = n; ii != 0; ii--) {
-        *dst = val;
-        dst++;
-    }
-
-    return;
-}
-
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp b/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
index 8408962..58a9102 100644
--- a/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
@@ -19,6 +19,7 @@
    INCLUDE FILES
 ***********************************************************************************/
 
+#include <string.h>
 #include "Mixer_private.h"
 #include "VectorArithmetic.h"
 
@@ -61,7 +62,7 @@
 
     if (HardMixing) {
         if (pInstance->Target == 0)
-            LoadConst_Float(0, dst, n);
+            memset(dst, 0, n * sizeof(*dst));
         else if ((pInstance->Target) == 1.0f) {
             if (src != dst) Copy_Float((LVM_FLOAT*)src, (LVM_FLOAT*)dst, (LVM_INT16)(n));
         } else
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
index 8cb7013..3473262 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
@@ -24,7 +24,6 @@
 #include <stdlib.h>
 #include "LVEQNB.h"
 #include "LVEQNB_Private.h"
-#include "InstAlloc.h"
 #include <string.h> /* For memset */
 
 /****************************************************************************************/
diff --git a/media/libeffects/lvm/lib/Reverb/lib/LVREV.h b/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
index 489bc6f..82e94da 100644
--- a/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
+++ b/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
@@ -78,11 +78,6 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory table containing the region definitions */
-typedef struct {
-    LVM_MemoryRegion_st Region[LVREV_NR_MEMORY_REGIONS]; /* One definition for each region */
-} LVREV_MemoryTable_st;
-
 /* Control Parameter structure */
 typedef struct {
     /* General parameters */
@@ -121,46 +116,6 @@
 
 /****************************************************************************************/
 /*                                                                                      */
-/* FUNCTION:                LVREV_GetMemoryTable                                        */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used to obtain the LVREV module memory requirements to support     */
-/*  memory allocation. It can also be used to return the memory base address provided   */
-/*  during memory allocation to support freeing of memory when the LVREV module is no   */
-/*  longer required. It is called in two ways:                                          */
-/*                                                                                      */
-/*  hInstance = NULL                Returns the memory requirements                     */
-/*  hInstance = Instance handle     Returns the memory requirements and allocated       */
-/*                                  base addresses.                                     */
-/*                                                                                      */
-/*  When this function is called with hInstance = NULL the memory base address pointers */
-/*  will be NULL on return.                                                             */
-/*                                                                                      */
-/*  When the function is called for freeing memory, hInstance = Instance Handle the     */
-/*  memory table returns the allocated memory and base addresses used during            */
-/*  initialisation.                                                                     */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory table                            */
-/*  pInstanceParams         Pointer to the instance parameters                          */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVREV_SUCCESS           Succeeded                                                   */
-/*  LVREV_NULLADDRESS       When pMemoryTable is NULL                                   */
-/*  LVREV_NULLADDRESS       When requesting memory requirements and pInstanceParams     */
-/*                          is NULL                                                     */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVREV_Process function                  */
-/*                                                                                      */
-/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetMemoryTable(LVREV_Handle_t hInstance,
-                                           LVREV_MemoryTable_st* pMemoryTable,
-                                           LVREV_InstanceParams_st* pInstanceParams);
-
-/****************************************************************************************/
-/*                                                                                      */
 /* FUNCTION:                LVREV_GetInstanceHandle                                     */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
@@ -174,7 +129,6 @@
 /*                                                                                      */
 /* PARAMETERS:                                                                          */
 /*  phInstance              Pointer to the instance handle                              */
-/*  pMemoryTable            Pointer to the memory definition table                      */
 /*  pInstanceParams         Pointer to the instance parameters                          */
 /*                                                                                      */
 /* RETURNS:                                                                             */
@@ -186,7 +140,6 @@
 /*                                                                                      */
 /****************************************************************************************/
 LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t* phInstance,
-                                              LVREV_MemoryTable_st* pMemoryTable,
                                               LVREV_InstanceParams_st* pInstanceParams);
 
 /****************************************************************************************/
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
index d4b321f..be3505f 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
@@ -60,7 +60,8 @@
     pLVREV_Private->pRevLPFBiquad->clear();
     for (size_t i = 0; i < pLVREV_Private->InstanceParams.NumDelays; i++) {
         pLVREV_Private->revLPFBiquad[i]->clear();
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[i], LVREV_MAX_T_DELAY[i]);
+        memset(pLVREV_Private->pDelay_T[i], 0, LVREV_MAX_T_DELAY[i] *
+                sizeof(pLVREV_Private->pDelay_T[i][0]));
     }
     return LVREV_SUCCESS;
 }
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
index 298655b..3a63698 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
@@ -21,7 +21,6 @@
 /*                                                                                      */
 /****************************************************************************************/
 #include "LVREV_Private.h"
-#include "InstAlloc.h"
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -34,7 +33,6 @@
 /*                                                                                      */
 /* PARAMETERS:                                                                          */
 /*  phInstance              pointer to the instance handle                              */
-/*  pMemoryTable            Pointer to the memory definition table                      */
 /*  pInstanceParams         Pointer to the instance parameters                          */
 /*                                                                                      */
 /* RETURNS:                                                                             */
@@ -46,12 +44,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t* phInstance,
-                                              LVREV_MemoryTable_st* pMemoryTable,
                                               LVREV_InstanceParams_st* pInstanceParams) {
-    INST_ALLOC SlowData;
-    INST_ALLOC FastData;
-    INST_ALLOC FastCoef;
-    INST_ALLOC Temporary;
     LVREV_Instance_st* pLVREV_Private;
     LVM_INT16 i;
     LVM_UINT16 MaxBlockSize;
@@ -60,18 +53,9 @@
      * Check for error conditions
      */
     /* Check for NULL pointers */
-    if ((phInstance == LVM_NULL) || (pMemoryTable == LVM_NULL) || (pInstanceParams == LVM_NULL)) {
+    if ((phInstance == LVM_NULL) || (pInstanceParams == LVM_NULL)) {
         return LVREV_NULLADDRESS;
     }
-    /* Check the memory table for NULL pointers */
-    for (i = 0; i < LVREV_NR_MEMORY_REGIONS; i++) {
-        if (pMemoryTable->Region[i].Size != 0) {
-            if (pMemoryTable->Region[i].pBaseAddress == LVM_NULL) {
-                return (LVREV_NULLADDRESS);
-            }
-        }
-    }
-
     /*
      * Check all instance parameters are in range
      */
@@ -88,36 +72,12 @@
     }
 
     /*
-     * Initialise the InstAlloc instances
-     */
-    InstAlloc_Init(&SlowData, pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress);
-    InstAlloc_Init(&FastData, pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress);
-    InstAlloc_Init(&FastCoef, pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress);
-    InstAlloc_Init(&Temporary, pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress);
-
-    /*
-     * Zero all memory regions
-     */
-    LoadConst_Float(
-            0, (LVM_FLOAT*)pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress,
-            (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size) / sizeof(LVM_FLOAT)));
-    LoadConst_Float(
-            0, (LVM_FLOAT*)pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress,
-            (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size) / sizeof(LVM_FLOAT)));
-    LoadConst_Float(
-            0, (LVM_FLOAT*)pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress,
-            (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size) / sizeof(LVM_FLOAT)));
-    LoadConst_Float(
-            0, (LVM_FLOAT*)pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress,
-            (LVM_INT16)((pMemoryTable->Region[LVM_TEMPORARY_FAST].Size) / sizeof(LVM_FLOAT)));
-    /*
      * Set the instance handle if not already initialised
      */
     if (*phInstance == LVM_NULL) {
         *phInstance = new LVREV_Instance_st{};
     }
     pLVREV_Private = (LVREV_Instance_st*)*phInstance;
-    pLVREV_Private->MemoryTable = *pMemoryTable;
 
     if (pInstanceParams->NumDelays == LVREV_DELAYLINES_4) {
         MaxBlockSize = LVREV_MAX_AP_DELAY[3];
@@ -135,12 +95,9 @@
      * Set the data, coefficient and temporary memory pointers
      */
     for (size_t i = 0; i < pInstanceParams->NumDelays; i++) {
-        pLVREV_Private->pDelay_T[i] = (LVM_FLOAT*)InstAlloc_AddMember(
-                &FastData, LVREV_MAX_T_DELAY[i] * sizeof(LVM_FLOAT));
+        pLVREV_Private->pDelay_T[i] = (LVM_FLOAT*)calloc(LVREV_MAX_T_DELAY[i], sizeof(LVM_FLOAT));
         /* Scratch for each delay line output */
-        pLVREV_Private->pScratchDelayLine[i] =
-                (LVM_FLOAT*)InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[i], LVREV_MAX_T_DELAY[i]);
+        pLVREV_Private->pScratchDelayLine[i] = (LVM_FLOAT*)calloc(MaxBlockSize, sizeof(LVM_FLOAT));
     }
     /* All-pass delay buffer addresses and sizes */
     for (size_t i = 0; i < LVREV_DELAYLINES_4; i++) {
@@ -149,12 +106,9 @@
     pLVREV_Private->AB_Selection = 1; /* Select smoothing A to B */
 
     /* General purpose scratch */
-    pLVREV_Private->pScratch =
-            (LVM_FLOAT*)InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
+    pLVREV_Private->pScratch = (LVM_FLOAT*)calloc(MaxBlockSize, sizeof(LVM_FLOAT));
     /* Mono->stereo input save for end mix */
-    pLVREV_Private->pInputSave =
-            (LVM_FLOAT*)InstAlloc_AddMember(&Temporary, 2 * sizeof(LVM_FLOAT) * MaxBlockSize);
-    LoadConst_Float(0, pLVREV_Private->pInputSave, (LVM_INT16)(MaxBlockSize * 2));
+    pLVREV_Private->pInputSave = (LVM_FLOAT*)calloc(FCC_2 * MaxBlockSize, sizeof(LVM_FLOAT));
 
     /*
      * Save the instance parameters in the instance structure
@@ -289,7 +243,28 @@
         return LVREV_NULLADDRESS;
     }
 
-    delete (LVREV_Instance_st*)hInstance;
+    LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
+
+    for (size_t i = 0; i < pLVREV_Private->InstanceParams.NumDelays; i++) {
+        if (pLVREV_Private->pDelay_T[i]) {
+            free(pLVREV_Private->pDelay_T[i]);
+            pLVREV_Private->pDelay_T[i] = LVM_NULL;
+        }
+        if (pLVREV_Private->pScratchDelayLine[i]) {
+            free(pLVREV_Private->pScratchDelayLine[i]);
+            pLVREV_Private->pScratchDelayLine[i] = LVM_NULL;
+        }
+    }
+    if (pLVREV_Private->pScratch) {
+        free(pLVREV_Private->pScratch);
+        pLVREV_Private->pScratch = LVM_NULL;
+    }
+    if (pLVREV_Private->pInputSave) {
+        free(pLVREV_Private->pInputSave);
+        pLVREV_Private->pInputSave = LVM_NULL;
+    }
+
+    delete pLVREV_Private;
     return LVREV_SUCCESS;
 }
 /* End of file */
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp
deleted file mode 100644
index 02ceb16..0000000
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/****************************************************************************************/
-/*                                                                                      */
-/*  Includes                                                                            */
-/*                                                                                      */
-/****************************************************************************************/
-#include "LVREV_Private.h"
-#include "InstAlloc.h"
-
-/****************************************************************************************/
-/*                                                                                      */
-/* FUNCTION:                LVREV_GetMemoryTable                                        */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*  hInstance = NULL                Returns the memory requirements                     */
-/*  hInstance = Instance handle     Returns the memory requirements and allocated       */
-/*                                  base addresses.                                     */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) the memory      */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the memory       */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory table                            */
-/*  pInstanceParams         Pointer to the instance parameters                          */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVREV_Success           Succeeded                                                   */
-/*  LVREV_NULLADDRESS       When pMemoryTable is NULL                                   */
-/*  LVREV_NULLADDRESS       When requesting memory requirements and pInstanceParams     */
-/*                          is NULL                                                     */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVREV_Process function                  */
-/*                                                                                      */
-/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetMemoryTable(LVREV_Handle_t hInstance,
-                                           LVREV_MemoryTable_st* pMemoryTable,
-                                           LVREV_InstanceParams_st* pInstanceParams) {
-    INST_ALLOC SlowData;
-    INST_ALLOC FastData;
-    INST_ALLOC FastCoef;
-    INST_ALLOC Temporary;
-    LVM_UINT16 MaxBlockSize;
-
-    /*
-     * Check for error conditions
-     */
-    /* Check for NULL pointer */
-    if (pMemoryTable == LVM_NULL) {
-        return (LVREV_NULLADDRESS);
-    }
-
-    /*
-     * Check all instance parameters are in range
-     */
-    if (pInstanceParams != LVM_NULL) {
-        /*
-         * Call for memory allocation, so check the parameters
-         */
-        /* Check for a non-zero block size */
-        if (pInstanceParams->MaxBlockSize == 0) {
-            return LVREV_OUTOFRANGE;
-        }
-
-        /* Check for a valid number of delay lines */
-        if ((pInstanceParams->NumDelays != LVREV_DELAYLINES_1) &&
-            (pInstanceParams->NumDelays != LVREV_DELAYLINES_2) &&
-            (pInstanceParams->NumDelays != LVREV_DELAYLINES_4)) {
-            return LVREV_OUTOFRANGE;
-        }
-    }
-
-    /*
-     * Initialise the InstAlloc instances
-     */
-    InstAlloc_Init(&SlowData, (void*)LVM_NULL);
-    InstAlloc_Init(&FastData, (void*)LVM_NULL);
-    InstAlloc_Init(&FastCoef, (void*)LVM_NULL);
-    InstAlloc_Init(&Temporary, (void*)LVM_NULL);
-
-    /*
-     * Fill in the memory table
-     */
-    if (hInstance == LVM_NULL) {
-        /*
-         * Check for null pointers
-         */
-        if (pInstanceParams == LVM_NULL) {
-            return (LVREV_NULLADDRESS);
-        }
-
-        /*
-         * Select the maximum internal block size
-         */
-        if (pInstanceParams->NumDelays == LVREV_DELAYLINES_4) {
-            MaxBlockSize = LVREV_MAX_AP_DELAY[3];
-        } else if (pInstanceParams->NumDelays == LVREV_DELAYLINES_2) {
-            MaxBlockSize = LVREV_MAX_AP_DELAY[1];
-        } else {
-            MaxBlockSize = LVREV_MAX_AP_DELAY[0];
-        }
-
-        if (MaxBlockSize > pInstanceParams->MaxBlockSize) {
-            MaxBlockSize = pInstanceParams->MaxBlockSize;
-        }
-
-        /*
-         * Slow data memory
-         */
-        InstAlloc_AddMember(&SlowData, sizeof(LVREV_Instance_st));
-        pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size = InstAlloc_GetTotal(&SlowData);
-        pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Type = LVM_PERSISTENT_SLOW_DATA;
-        pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress = LVM_NULL;
-
-        /*
-         * Persistent fast data memory
-         */
-        for (size_t i = 0; i < pInstanceParams->NumDelays; i++) {
-            InstAlloc_AddMember(&FastData, LVREV_MAX_T_DELAY[i] * sizeof(LVM_FLOAT));
-        }
-
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size = InstAlloc_GetTotal(&FastData);
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Type = LVM_PERSISTENT_FAST_DATA;
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress = LVM_NULL;
-
-        /*
-         * Persistent fast coefficient memory
-         */
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size = InstAlloc_GetTotal(&FastCoef);
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Type = LVM_PERSISTENT_FAST_COEF;
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress = LVM_NULL;
-
-        /*
-         * Temporary fast memory
-         */
-        /* General purpose scratch memory */
-        InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
-        /* Mono->stereo input saved for end mix */
-        InstAlloc_AddMember(&Temporary, 2 * sizeof(LVM_FLOAT) * MaxBlockSize);
-        for (size_t i = 0; i < pInstanceParams->NumDelays; i++) {
-            /* A Scratch buffer for each delay line */
-            InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
-        }
-
-        pMemoryTable->Region[LVM_TEMPORARY_FAST].Size = InstAlloc_GetTotal(&Temporary);
-        pMemoryTable->Region[LVM_TEMPORARY_FAST].Type = LVM_TEMPORARY_FAST;
-        pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress = LVM_NULL;
-
-    } else {
-        LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
-
-        /*
-         * Read back memory allocation table
-         */
-        *pMemoryTable = pLVREV_Private->MemoryTable;
-    }
-
-    return (LVREV_SUCCESS);
-}
-
-/* End of file */
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h b/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h
index 33f8165..9a2f9ca 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h
@@ -103,11 +103,9 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-
 typedef struct {
     /* General */
     LVREV_InstanceParams_st InstanceParams; /* Initialisation time instance parameters */
-    LVREV_MemoryTable_st MemoryTable;       /* Memory table */
     LVREV_ControlParams_st CurrentParams;   /* Parameters being used */
     LVREV_ControlParams_st NewParams;       /* New parameters from the \
                                                calling application */
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
index 5550b9c..a0f28b1 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
@@ -18,7 +18,6 @@
 #include <stdlib.h>
 #include "LVPSA.h"
 #include "LVPSA_Private.h"
-#include "InstAlloc.h"
 
 /************************************************************************************/
 /*                                                                                  */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp
index 1746786..2b628f1 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp
@@ -72,8 +72,8 @@
 
         std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
                 pEqualiserCoefTable[Offset].A0, pEqualiserCoefTable[Offset].A1,
-                pEqualiserCoefTable[Offset].A2, -(pEqualiserCoefTable[Offset].B1),
-                -(pEqualiserCoefTable[Offset].B2)};
+                pEqualiserCoefTable[Offset].A2, pEqualiserCoefTable[Offset].B1,
+                pEqualiserCoefTable[Offset].B2};
         pInstance->pEqBiquad.reset(new android::audio_utils::BiquadFilter<LVM_FLOAT>(
                 (pParams->NrChannels == FCC_1) ? FCC_1 : FCC_2, coefs));
     }
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
index 12b1dc3..de23d07 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
@@ -81,10 +81,7 @@
         pConfig->DelaySize =
                 (pParams->NrChannels == FCC_1) ? (LVM_INT16)Delay : (LVM_INT16)(FCC_2 * Delay);
         pConfig->DelayOffset = 0;
-        LoadConst_Float(0,                                      /* Value */
-                        (LVM_FLOAT*)&pConfig->StereoSamples[0], /* Destination */
-                        /* Number of words */
-                        (LVM_UINT16)(sizeof(pConfig->StereoSamples) / sizeof(LVM_FLOAT)));
+        memset(pConfig->StereoSamples, 0, sizeof(pConfig->StereoSamples));
         /*
          * Setup the filters
          */
@@ -93,8 +90,8 @@
 
         std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
                 pReverbCoefTable[Offset].A0, pReverbCoefTable[Offset].A1,
-                pReverbCoefTable[Offset].A2, -(pReverbCoefTable[Offset].B1),
-                -(pReverbCoefTable[Offset].B2)};
+                pReverbCoefTable[Offset].A2, pReverbCoefTable[Offset].B1,
+                pReverbCoefTable[Offset].B2};
         pInstance->pRevBiquad.reset(new android::audio_utils::BiquadFilter<LVM_FLOAT>(
                 (pParams->NrChannels == FCC_1) ? FCC_1 : FCC_2, coefs));
 
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp
index e3ff604..3ca25f9 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp
@@ -68,7 +68,7 @@
 
         std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
                 LVCS_SEMidCoefTable[Offset].A0, LVCS_SEMidCoefTable[Offset].A1, 0.0,
-                -(LVCS_SEMidCoefTable[Offset].B1), 0.0};
+                LVCS_SEMidCoefTable[Offset].B1, 0.0};
         pInstance->pSEMidBiquad.reset(
                 new android::audio_utils::BiquadFilter<LVM_FLOAT>(FCC_1, coefs));
 
@@ -77,7 +77,7 @@
 
         /* Side filter */
         coefs = {pSESideCoefs[Offset].A0, pSESideCoefs[Offset].A1, pSESideCoefs[Offset].A2,
-                 -(pSESideCoefs[Offset].B1), -(pSESideCoefs[Offset].B2)};
+                 pSESideCoefs[Offset].B1, pSESideCoefs[Offset].B2};
         pInstance->pSESideBiquad.reset(
                 new android::audio_utils::BiquadFilter<LVM_FLOAT>(FCC_1, coefs));
     }
diff --git a/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh b/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
index df7ca5a..7571a24 100755
--- a/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
+++ b/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
@@ -53,16 +53,16 @@
 flags_arr=(
     "-csE"
     "-eqE"
-    "-tE"
-    "-csE -tE -eqE"
+    "-tE -trebleLvl:15"
+    "-csE -tE -trebleLvl:15 -eqE"
     "-bE -M"
-    "-csE -tE"
-    "-csE -eqE" "-tE -eqE"
-    "-csE -tE -bE -M -eqE"
-    "-tE -eqE -vcBal:96 -M"
-    "-tE -eqE -vcBal:-96 -M"
-    "-tE -eqE -vcBal:0 -M"
-    "-tE -eqE -bE -vcBal:30 -M"
+    "-csE -tE -trebleLvl:15"
+    "-csE -eqE" "-tE -trebleLvl:15 -eqE"
+    "-csE -tE -trebleLvl:15 -bE -M -eqE"
+    "-tE -trebleLvl:15 -eqE -vcBal:96 -M"
+    "-tE -trebleLvl:15 -eqE -vcBal:-96 -M"
+    "-tE -trebleLvl:15 -eqE -vcBal:0 -M"
+    "-tE -trebleLvl:15 -eqE -bE -vcBal:30 -M"
 )
 
 fs_arr=(
diff --git a/media/libeffects/lvm/tests/lvmtest.cpp b/media/libeffects/lvm/tests/lvmtest.cpp
index e484a1a..e65228c 100644
--- a/media/libeffects/lvm/tests/lvmtest.cpp
+++ b/media/libeffects/lvm/tests/lvmtest.cpp
@@ -79,6 +79,7 @@
     int bassEffectLevel = 0;
     int eqPresetLevel = 0;
     int frameLength = 256;
+    int trebleEffectLevel = 0;
     LVM_BE_Mode_en bassEnable = LVM_BE_OFF;
     LVM_TE_Mode_en trebleEnable = LVM_TE_OFF;
     LVM_EQNB_Mode_en eqEnable = LVM_EQNB_OFF;
@@ -303,10 +304,6 @@
     params->PSA_Enable = LVM_PSA_OFF;
     params->PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
 
-    /* TE Control parameters */
-    params->TE_OperatingMode = LVM_TE_OFF;
-    params->TE_EffectLevel = 0;
-
     /* Activate the initial settings */
     LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, params);
 
@@ -445,6 +442,7 @@
 
     /* Treble Enhancement parameters */
     params->TE_OperatingMode = plvmConfigParams->trebleEnable;
+    params->TE_EffectLevel = plvmConfigParams->trebleEffectLevel;
 
     /* PSA Control parameters */
     params->PSA_Enable = LVM_PSA_ON;
@@ -604,6 +602,15 @@
                 return -1;
             }
             lvmConfigParams.eqPresetLevel = eqPresetLevel;
+        } else if (!strncmp(argv[i], "-trebleLvl:", 11)) {
+            const int trebleEffectLevel = atoi(argv[i] + 11);
+            if (trebleEffectLevel > LVM_TE_MAX_EFFECTLEVEL ||
+                trebleEffectLevel < LVM_TE_MIN_EFFECTLEVEL) {
+                printf("Error: Unsupported Treble Effect Level : %d\n", trebleEffectLevel);
+                printUsage();
+                return -1;
+            }
+            lvmConfigParams.trebleEffectLevel = trebleEffectLevel;
         } else if (!strcmp(argv[i], "-bE")) {
             lvmConfigParams.bassEnable = LVM_BE_ON;
         } else if (!strcmp(argv[i], "-eqE")) {
diff --git a/media/libeffects/lvm/tests/reverb_test.cpp b/media/libeffects/lvm/tests/reverb_test.cpp
index 0ea401c..dfb6970 100644
--- a/media/libeffects/lvm/tests/reverb_test.cpp
+++ b/media/libeffects/lvm/tests/reverb_test.cpp
@@ -212,7 +212,9 @@
         printUsage();
         return EXIT_FAILURE;
     }
-
+    for (int i = 1; i < argc; i++) {
+        printf("%s ", argv[i]);
+    }
     reverbConfigParams_t revConfigParams{};  // default initialize
     const char* inputFile = nullptr;
     const char* outputFile = nullptr;
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
index 3738d62..290a7b1 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
@@ -516,25 +516,6 @@
 
 void Reverb_free(ReverbContext* pContext) {
     LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
-    LVREV_MemoryTable_st MemTab;
-
-    /* Free the algorithm memory */
-    LvmStatus = LVREV_GetMemoryTable(pContext->hInstance, &MemTab, LVM_NULL);
-
-    LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "Reverb_free")
-
-    for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
-        if (MemTab.Region[i].Size != 0) {
-            if (MemTab.Region[i].pBaseAddress != NULL) {
-                free(MemTab.Region[i].pBaseAddress);
-            } else {
-                ALOGV("\tLVM_ERROR : free() - trying to free with NULL pointer %" PRIu32
-                      " bytes "
-                      "for region %u at %p ERROR\n",
-                      MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-            }
-        }
-    }
 
     LvmStatus = LVREV_FreeInstance(pContext->hInstance);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_FreeInstance", "Reverb_free")
@@ -677,65 +658,17 @@
     LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
     LVREV_ControlParams_st params;                   /* Control Parameters */
     LVREV_InstanceParams_st InstParams;              /* Instance parameters */
-    LVREV_MemoryTable_st MemTab;                     /* Memory allocation table */
-    bool bMallocFailure = LVM_FALSE;
 
     /* Set the capabilities */
     InstParams.MaxBlockSize = MAX_CALL_SIZE;
     InstParams.SourceFormat = LVM_STEREO;  // Max format, could be mono during process
     InstParams.NumDelays = LVREV_DELAYLINES_4;
 
-    /* Allocate memory, forcing alignment */
-    LvmStatus = LVREV_GetMemoryTable(LVM_NULL, &MemTab, &InstParams);
-
-    LVM_ERROR_CHECK(LvmStatus, "LVREV_GetMemoryTable", "Reverb_init")
-    if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
-
-    ALOGV("\tCreateInstance Successfully called LVM_GetMemoryTable\n");
-
-    /* Allocate memory */
-    for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
-        if (MemTab.Region[i].Size != 0) {
-            MemTab.Region[i].pBaseAddress = calloc(1, MemTab.Region[i].Size);
-
-            if (MemTab.Region[i].pBaseAddress == LVM_NULL) {
-                ALOGV("\tLVREV_ERROR :Reverb_init CreateInstance Failed to allocate %" PRIu32
-                      " bytes for region %u\n",
-                      MemTab.Region[i].Size, i);
-                bMallocFailure = LVM_TRUE;
-            } else {
-                ALOGV("\tReverb_init CreateInstance allocate %" PRIu32
-                      " bytes for region %u at %p\n",
-                      MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-            }
-        }
-    }
-
-    /* If one or more of the memory regions failed to allocate, free the regions that were
-     * succesfully allocated and return with an error
-     */
-    if (bMallocFailure == LVM_TRUE) {
-        for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
-            if (MemTab.Region[i].pBaseAddress == LVM_NULL) {
-                ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed to allocate %" PRIu32
-                      " bytes for region %u - Not freeing\n",
-                      MemTab.Region[i].Size, i);
-            } else {
-                ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed: but allocated %" PRIu32
-                      " bytes for region %u at %p- free\n",
-                      MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-                free(MemTab.Region[i].pBaseAddress);
-            }
-        }
-        return -EINVAL;
-    }
-    ALOGV("\tReverb_init CreateInstance Successfully malloc'd memory\n");
-
     /* Initialise */
     pContext->hInstance = LVM_NULL;
 
     /* Init sets the instance handle */
-    LvmStatus = LVREV_GetInstanceHandle(&pContext->hInstance, &MemTab, &InstParams);
+    LvmStatus = LVREV_GetInstanceHandle(&pContext->hInstance, &InstParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetInstanceHandle", "Reverb_init")
     if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index eb3ce34..c6e036a 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -18,14 +18,10 @@
     ],
 }
 
-cc_library {
-    name: "libaudiopreprocessing",
+cc_defaults {
+    name: "libaudiopreprocessing-defaults",
     vendor: true,
-    relative_install_path: "soundfx",
-    srcs: ["PreProcessing.cpp"],
-    local_include_dirs: [
-        ".",
-    ],
+    host_supported: true,
     cflags: [
         "-Wall",
         "-Werror",
@@ -45,6 +41,20 @@
     header_libs: [
         "libaudioeffects",
         "libhardware_headers",
+    ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+cc_library {
+    name: "libaudiopreprocessing",
+    defaults: ["libaudiopreprocessing-defaults"],
+    relative_install_path: "soundfx",
+    srcs: ["PreProcessing.cpp"],
+    header_libs: [
         "libwebrtc_absl_headers",
     ],
 }
diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp
index 03ccc34..19a8b2f 100644
--- a/media/libeffects/preprocessing/PreProcessing.cpp
+++ b/media/libeffects/preprocessing/PreProcessing.cpp
@@ -105,9 +105,8 @@
     webrtc::AudioProcessing* apm;  // handle on webRTC audio processing module (APM)
     // Audio Processing module builder
     webrtc::AudioProcessingBuilder ap_builder;
-    size_t apmFrameCount;      // buffer size for webRTC process (10 ms)
-    uint32_t apmSamplingRate;  // webRTC APM sampling rate (8/16 or 32 kHz)
-    size_t frameCount;         // buffer size before input resampler ( <=> apmFrameCount)
+    // frameCount represents the size of the buffers used for processing, and must represent 10ms.
+    size_t frameCount;
     uint32_t samplingRate;     // sampling rate at effect process interface
     uint32_t inChannelCount;   // input channel count
     uint32_t outChannelCount;  // output channel count
@@ -119,21 +118,12 @@
     webrtc::AudioProcessing::Config config;
     webrtc::StreamConfig inputConfig;   // input stream configuration
     webrtc::StreamConfig outputConfig;  // output stream configuration
-    int16_t* inBuf;    // input buffer used when resampling
-    size_t inBufSize;  // input buffer size in frames
-    size_t framesIn;   // number of frames in input buffer
-    int16_t* outBuf;    // output buffer used when resampling
-    size_t outBufSize;  // output buffer size in frames
-    size_t framesOut;   // number of frames in output buffer
     uint32_t revChannelCount;  // number of channels on reverse stream
     uint32_t revEnabledMsk;    // bit field containing IDs of enabled pre processors
                                // with reverse channel
     uint32_t revProcessedMsk;  // bit field containing IDs of pre processors with reverse
                                // channel already processed in current round
     webrtc::StreamConfig revConfig;     // reverse stream configuration.
-    int16_t* revBuf;    // reverse channel input buffer
-    size_t revBufSize;  // reverse channel input buffer size
-    size_t framesRev;   // number of frames in reverse channel input buffer
 };
 
 #ifdef DUAL_MIC_TEST
@@ -670,8 +660,8 @@
     return 0;
 }
 
-int NsGetParameter(preproc_effect_t* effect __unused, void* pParam __unused,
-                   uint32_t* pValueSize __unused, void* pValue __unused) {
+int NsGetParameter(preproc_effect_t* /*effect __unused*/, void* /*pParam __unused*/,
+                   uint32_t* /*pValueSize __unused*/, void* /*pValue __unused*/) {
     int status = 0;
     return status;
 }
@@ -862,9 +852,7 @@
             ALOGW("Session_CreateEffect could not get apm engine");
             goto error;
         }
-        session->apmSamplingRate = kPreprocDefaultSr;
-        session->apmFrameCount = (kPreprocDefaultSr) / 100;
-        session->frameCount = session->apmFrameCount;
+        session->frameCount = kPreprocDefaultSr / 100;
         session->samplingRate = kPreprocDefaultSr;
         session->inChannelCount = kPreProcDefaultCnl;
         session->outChannelCount = kPreProcDefaultCnl;
@@ -879,12 +867,6 @@
         session->processedMsk = 0;
         session->revEnabledMsk = 0;
         session->revProcessedMsk = 0;
-        session->inBuf = NULL;
-        session->inBufSize = 0;
-        session->outBuf = NULL;
-        session->outBufSize = 0;
-        session->revBuf = NULL;
-        session->revBufSize = 0;
     }
     status = Effect_Create(&session->effects[procId], session, interface);
     if (status < 0) {
@@ -908,13 +890,6 @@
     if (session->createdMsk == 0) {
         delete session->apm;
         session->apm = NULL;
-        delete session->inBuf;
-        session->inBuf = NULL;
-        delete session->outBuf;
-        session->outBuf = NULL;
-        delete session->revBuf;
-        session->revBuf = NULL;
-
         session->id = 0;
     }
 
@@ -934,24 +909,8 @@
     ALOGV("Session_SetConfig sr %d cnl %08x", config->inputCfg.samplingRate,
           config->inputCfg.channels);
 
-    // AEC implementation is limited to 16kHz
-    if (config->inputCfg.samplingRate >= 32000 && !(session->createdMsk & (1 << PREPROC_AEC))) {
-        session->apmSamplingRate = 32000;
-    } else if (config->inputCfg.samplingRate >= 16000) {
-        session->apmSamplingRate = 16000;
-    } else if (config->inputCfg.samplingRate >= 8000) {
-        session->apmSamplingRate = 8000;
-    }
-
-
     session->samplingRate = config->inputCfg.samplingRate;
-    session->apmFrameCount = session->apmSamplingRate / 100;
-    if (session->samplingRate == session->apmSamplingRate) {
-        session->frameCount = session->apmFrameCount;
-    } else {
-        session->frameCount =
-                (session->apmFrameCount * session->samplingRate) / session->apmSamplingRate;
-    }
+    session->frameCount = session->samplingRate / 100;
     session->inChannelCount = inCnl;
     session->outChannelCount = outCnl;
     session->inputConfig.set_sample_rate_hz(session->samplingRate);
@@ -963,13 +922,6 @@
     session->revConfig.set_sample_rate_hz(session->samplingRate);
     session->revConfig.set_num_channels(inCnl);
 
-    // force process buffer reallocation
-    session->inBufSize = 0;
-    session->outBufSize = 0;
-    session->framesIn = 0;
-    session->framesOut = 0;
-
-
     session->state = PREPROC_SESSION_STATE_CONFIG;
     return 0;
 }
@@ -1004,9 +956,6 @@
     }
     uint32_t inCnl = audio_channel_count_from_out_mask(config->inputCfg.channels);
     session->revChannelCount = inCnl;
-    // force process buffer reallocation
-    session->revBufSize = 0;
-    session->framesRev = 0;
 
     return 0;
 }
@@ -1023,12 +972,8 @@
 
 void Session_SetProcEnabled(preproc_session_t* session, uint32_t procId, bool enabled) {
     if (enabled) {
-        if (session->enabledMsk == 0) {
-            session->framesIn = 0;
-        }
         session->enabledMsk |= (1 << procId);
         if (HasReverseStream(procId)) {
-            session->framesRev = 0;
             session->revEnabledMsk |= (1 << procId);
         }
     } else {
@@ -1117,43 +1062,24 @@
         return -EINVAL;
     }
 
+    if (inBuffer->frameCount != outBuffer->frameCount) {
+        ALOGW("inBuffer->frameCount %zu is not equal to outBuffer->frameCount %zu",
+              inBuffer->frameCount, outBuffer->frameCount);
+        return -EINVAL;
+    }
+
+    if (inBuffer->frameCount != session->frameCount) {
+        ALOGW("inBuffer->frameCount %zu != %zu representing 10ms at sampling rate %d",
+              inBuffer->frameCount, session->frameCount, session->samplingRate);
+        return -EINVAL;
+    }
+
     session->processedMsk |= (1 << effect->procId);
 
     //    ALOGV("PreProcessingFx_Process In %d frames enabledMsk %08x processedMsk %08x",
     //         inBuffer->frameCount, session->enabledMsk, session->processedMsk);
-
     if ((session->processedMsk & session->enabledMsk) == session->enabledMsk) {
         effect->session->processedMsk = 0;
-        size_t framesRq = outBuffer->frameCount;
-        size_t framesWr = 0;
-        if (session->framesOut) {
-            size_t fr = session->framesOut;
-            if (outBuffer->frameCount < fr) {
-                fr = outBuffer->frameCount;
-            }
-            memcpy(outBuffer->s16, session->outBuf,
-                   fr * session->outChannelCount * sizeof(int16_t));
-            memmove(session->outBuf, session->outBuf + fr * session->outChannelCount,
-                    (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
-            session->framesOut -= fr;
-            framesWr += fr;
-        }
-        outBuffer->frameCount = framesWr;
-        if (framesWr == framesRq) {
-            inBuffer->frameCount = 0;
-            return 0;
-        }
-
-        size_t fr = session->frameCount - session->framesIn;
-        if (inBuffer->frameCount < fr) {
-            fr = inBuffer->frameCount;
-        }
-        session->framesIn += fr;
-        inBuffer->frameCount = fr;
-        if (session->framesIn < session->frameCount) {
-            return 0;
-        }
-        session->framesIn = 0;
         if (int status = effect->session->apm->ProcessStream(
                     (const int16_t* const)inBuffer->s16,
                     (const webrtc::StreamConfig)effect->session->inputConfig,
@@ -1163,34 +1089,6 @@
             ALOGE("Process Stream failed with error %d\n", status);
             return status;
         }
-        outBuffer->frameCount = inBuffer->frameCount;
-
-        if (session->outBufSize < session->framesOut + session->frameCount) {
-            int16_t* buf;
-            session->outBufSize = session->framesOut + session->frameCount;
-            buf = (int16_t*)realloc(
-                    session->outBuf,
-                    session->outBufSize * session->outChannelCount * sizeof(int16_t));
-            if (buf == NULL) {
-                session->framesOut = 0;
-                free(session->outBuf);
-                session->outBuf = NULL;
-                return -ENOMEM;
-            }
-            session->outBuf = buf;
-        }
-
-        fr = session->framesOut;
-        if (framesRq - framesWr < fr) {
-            fr = framesRq - framesWr;
-        }
-        memcpy(outBuffer->s16 + framesWr * session->outChannelCount, session->outBuf,
-               fr * session->outChannelCount * sizeof(int16_t));
-        memmove(session->outBuf, session->outBuf + fr * session->outChannelCount,
-                (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
-        session->framesOut -= fr;
-        outBuffer->frameCount += fr;
-
         return 0;
     } else {
         return -ENODATA;
@@ -1551,7 +1449,7 @@
 }
 
 int PreProcessingFx_ProcessReverse(effect_handle_t self, audio_buffer_t* inBuffer,
-                                   audio_buffer_t* outBuffer __unused) {
+                                   audio_buffer_t* outBuffer) {
     preproc_effect_t* effect = (preproc_effect_t*)self;
 
     if (effect == NULL) {
@@ -1565,6 +1463,18 @@
         return -EINVAL;
     }
 
+    if (inBuffer->frameCount != outBuffer->frameCount) {
+        ALOGW("inBuffer->frameCount %zu is not equal to outBuffer->frameCount %zu",
+              inBuffer->frameCount, outBuffer->frameCount);
+        return -EINVAL;
+    }
+
+    if (inBuffer->frameCount != session->frameCount) {
+        ALOGW("inBuffer->frameCount %zu != %zu representing 10ms at sampling rate %d",
+              inBuffer->frameCount, session->frameCount, session->samplingRate);
+        return -EINVAL;
+    }
+
     session->revProcessedMsk |= (1 << effect->procId);
 
     //    ALOGV("PreProcessingFx_ProcessReverse In %d frames revEnabledMsk %08x revProcessedMsk
@@ -1573,16 +1483,6 @@
 
     if ((session->revProcessedMsk & session->revEnabledMsk) == session->revEnabledMsk) {
         effect->session->revProcessedMsk = 0;
-        size_t fr = session->frameCount - session->framesRev;
-        if (inBuffer->frameCount < fr) {
-            fr = inBuffer->frameCount;
-        }
-        session->framesRev += fr;
-        inBuffer->frameCount = fr;
-        if (session->framesRev < session->frameCount) {
-            return 0;
-        }
-        session->framesRev = 0;
         if (int status = effect->session->apm->ProcessReverseStream(
                     (const int16_t* const)inBuffer->s16,
                     (const webrtc::StreamConfig)effect->session->revConfig,
diff --git a/media/libeffects/preprocessing/README.md b/media/libeffects/preprocessing/README.md
new file mode 100644
index 0000000..af46376
--- /dev/null
+++ b/media/libeffects/preprocessing/README.md
@@ -0,0 +1,7 @@
+# Preprocessing effects
+
+## Limitations
+- Preprocessing effects currently work on 10ms worth of data and do not support
+  arbitrary frame counts. This limiation comes from the underlying effects in
+  webrtc modules
+- There is currently no api to communicate this requirement
diff --git a/media/libeffects/preprocessing/benchmarks/Android.bp b/media/libeffects/preprocessing/benchmarks/Android.bp
index c1b2295..fbbcab4 100644
--- a/media/libeffects/preprocessing/benchmarks/Android.bp
+++ b/media/libeffects/preprocessing/benchmarks/Android.bp
@@ -11,27 +11,10 @@
 
 cc_benchmark {
     name: "preprocessing_benchmark",
-    vendor: true,
+    defaults: ["libaudiopreprocessing-defaults"],
     srcs: ["preprocessing_benchmark.cpp"],
-    shared_libs: [
-        "libaudioutils",
-        "liblog",
-        "libutils",
-    ],
     static_libs: [
         "libaudiopreprocessing",
-        "webrtc_audio_processing",
-    ],
-    cflags: [
-        "-DWEBRTC_POSIX",
-        "-fvisibility=default",
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-    header_libs: [
-        "libaudioeffects",
-        "libhardware_headers",
-        "libwebrtc_absl_headers",
+        "libaudioutils",
     ],
 }
diff --git a/media/libeffects/preprocessing/tests/Android.bp b/media/libeffects/preprocessing/tests/Android.bp
index 8848e79..d80b135 100644
--- a/media/libeffects/preprocessing/tests/Android.bp
+++ b/media/libeffects/preprocessing/tests/Android.bp
@@ -11,23 +11,29 @@
 }
 
 cc_test {
-    name: "AudioPreProcessingTest",
-    vendor: true,
-    srcs: ["PreProcessingTest.cpp"],
-    shared_libs: [
-        "libaudioutils",
-        "liblog",
-        "libutils",
+    name: "EffectPreprocessingTest",
+    defaults: ["libaudiopreprocessing-defaults"],
+    gtest: true,
+    test_suites: ["device-tests"],
+    srcs: [
+        "EffectPreprocessingTest.cpp",
+        "EffectTestHelper.cpp",
     ],
     static_libs: [
         "libaudiopreprocessing",
-        "webrtc_audio_processing",
+        "libaudioutils",
     ],
-    header_libs: [
-        "libaudioeffects",
-        "libhardware_headers",
-    ],
+}
+
+cc_test {
+    name: "AudioPreProcessingTest",
+    defaults: ["libaudiopreprocessing-defaults"],
     gtest: false,
+    srcs: ["PreProcessingTest.cpp"],
+    static_libs: [
+        "libaudiopreprocessing",
+        "libaudioutils",
+    ],
 }
 
 cc_test {
diff --git a/media/libeffects/preprocessing/tests/EffectPreprocessingTest.cpp b/media/libeffects/preprocessing/tests/EffectPreprocessingTest.cpp
new file mode 100644
index 0000000..07006a1
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/EffectPreprocessingTest.cpp
@@ -0,0 +1,332 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "EffectTestHelper.h"
+
+#include <getopt.h>
+#include <stddef.h>
+#include <stdint.h>
+#include <tuple>
+#include <vector>
+
+#include <audio_effects/effect_aec.h>
+#include <audio_effects/effect_agc.h>
+#include <audio_effects/effect_agc2.h>
+#include <audio_effects/effect_ns.h>
+#include <log/log.h>
+
+constexpr effect_uuid_t kAGCUuid = {
+        0xaa8130e0, 0x66fc, 0x11e0, 0xbad0, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+constexpr effect_uuid_t kAGC2Uuid = {
+        0x89f38e65, 0xd4d2, 0x4d64, 0xad0e, {0x2b, 0x3e, 0x79, 0x9e, 0xa8, 0x86}};
+constexpr effect_uuid_t kAECUuid = {
+        0xbb392ec0, 0x8d4d, 0x11e0, 0xa896, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+constexpr effect_uuid_t kNSUuid = {
+        0xc06c8400, 0x8e06, 0x11e0, 0x9cb6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+
+static bool isAGCEffect(const effect_uuid_t* uuid) {
+    return uuid == &kAGCUuid;
+}
+static bool isAGC2Effect(const effect_uuid_t* uuid) {
+    return uuid == &kAGC2Uuid;
+}
+static bool isAECEffect(const effect_uuid_t* uuid) {
+    return uuid == &kAECUuid;
+}
+static bool isNSEffect(const effect_uuid_t* uuid) {
+    return uuid == &kNSUuid;
+}
+
+constexpr int kAGCTargetLevels[] = {0, -300, -500, -1000, -3100};
+
+constexpr int kAGCCompLevels[] = {0, -300, -500, -1000, -9000};
+
+constexpr size_t kAGC2FixedDigitalGains[] = {0, 3, 10, 20, 49};
+
+constexpr size_t kAGC2AdaptGigitalLevelEstimators[] = {0, 1};
+
+constexpr size_t kAGC2ExtraSaturationMargins[] = {0, 3, 10, 20, 100};
+
+constexpr size_t kAECEchoDelays[] = {0, 250, 500};
+
+constexpr size_t kNSLevels[] = {0, 1, 2, 3};
+
+struct AGCParams {
+    int targetLevel;
+    int compLevel;
+};
+
+struct AGC2Params {
+    size_t fixedDigitalGain;
+    size_t adaptDigiLevelEstimator;
+    size_t extraSaturationMargin;
+};
+
+struct AECParams {
+    size_t echoDelay;
+};
+
+struct NSParams {
+    size_t level;
+};
+
+struct PreProcParams {
+    const effect_uuid_t* uuid;
+    union {
+        AGCParams agcParams;
+        AGC2Params agc2Params;
+        AECParams aecParams;
+        NSParams nsParams;
+    };
+};
+
+// Create a list of pre-processing parameters to be used for testing
+static const std::vector<PreProcParams> kPreProcParams = [] {
+    std::vector<PreProcParams> result;
+
+    for (const auto targetLevel : kAGCTargetLevels) {
+        for (const auto compLevel : kAGCCompLevels) {
+            AGCParams agcParams = {.targetLevel = targetLevel, .compLevel = compLevel};
+            PreProcParams params = {.uuid = &kAGCUuid, .agcParams = agcParams};
+            result.push_back(params);
+        }
+    }
+
+    for (const auto fixedDigitalGain : kAGC2FixedDigitalGains) {
+        for (const auto adaptDigiLevelEstimator : kAGC2AdaptGigitalLevelEstimators) {
+            for (const auto extraSaturationMargin : kAGC2ExtraSaturationMargins) {
+                AGC2Params agc2Params = {.fixedDigitalGain = fixedDigitalGain,
+                                         .adaptDigiLevelEstimator = adaptDigiLevelEstimator,
+                                         .extraSaturationMargin = extraSaturationMargin};
+                PreProcParams params = {.uuid = &kAGC2Uuid, .agc2Params = agc2Params};
+                result.push_back(params);
+            }
+        }
+    }
+
+    for (const auto echoDelay : kAECEchoDelays) {
+        AECParams aecParams = {.echoDelay = echoDelay};
+        PreProcParams params = {.uuid = &kAECUuid, .aecParams = aecParams};
+        result.push_back(params);
+    }
+
+    for (const auto level : kNSLevels) {
+        NSParams nsParams = {.level = level};
+        PreProcParams params = {.uuid = &kNSUuid, .nsParams = nsParams};
+        result.push_back(params);
+    }
+    return result;
+}();
+
+static const size_t kNumPreProcParams = std::size(kPreProcParams);
+
+void setPreProcParams(const effect_uuid_t* uuid, EffectTestHelper& effect, size_t paramIdx) {
+    const PreProcParams* params = &kPreProcParams[paramIdx];
+    if (isAGCEffect(uuid)) {
+        const AGCParams* agcParams = &params->agcParams;
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(AGC_PARAM_TARGET_LEVEL, agcParams->targetLevel));
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(AGC_PARAM_COMP_GAIN, agcParams->compLevel));
+    } else if (isAGC2Effect(uuid)) {
+        const AGC2Params* agc2Params = &params->agc2Params;
+        ASSERT_NO_FATAL_FAILURE(
+                effect.setParam(AGC2_PARAM_FIXED_DIGITAL_GAIN, agc2Params->fixedDigitalGain));
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR,
+                                                agc2Params->adaptDigiLevelEstimator));
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN,
+                                                agc2Params->extraSaturationMargin));
+    } else if (isAECEffect(uuid)) {
+        const AECParams* aecParams = &params->aecParams;
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(AEC_PARAM_ECHO_DELAY, aecParams->echoDelay));
+    } else if (isNSEffect(uuid)) {
+        const NSParams* nsParams = &params->nsParams;
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(NS_PARAM_LEVEL, nsParams->level));
+    }
+}
+
+typedef std::tuple<int, int, int, int> SingleEffectTestParam;
+class SingleEffectTest : public ::testing::TestWithParam<SingleEffectTestParam> {
+  public:
+    SingleEffectTest()
+        : mSampleRate(EffectTestHelper::kSampleRates[std::get<1>(GetParam())]),
+          mFrameCount(mSampleRate * EffectTestHelper::kTenMilliSecVal),
+          mLoopCount(EffectTestHelper::kLoopCounts[std::get<2>(GetParam())]),
+          mTotalFrameCount(mFrameCount * mLoopCount),
+          mChMask(EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
+          mChannelCount(audio_channel_count_from_in_mask(mChMask)),
+          mParamIdx(std::get<3>(GetParam())),
+          mUuid(kPreProcParams[mParamIdx].uuid){};
+
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    const size_t mTotalFrameCount;
+    const size_t mChMask;
+    const size_t mChannelCount;
+    const size_t mParamIdx;
+    const effect_uuid_t* mUuid;
+};
+
+// Tests applying a single effect
+TEST_P(SingleEffectTest, SimpleProcess) {
+    SCOPED_TRACE(testing::Message() << " chMask: " << mChMask << " sampleRate: " << mSampleRate
+                                    << " loopCount: " << mLoopCount << " paramIdx " << mParamIdx);
+
+    EffectTestHelper effect(mUuid, mChMask, mSampleRate, mLoopCount);
+
+    ASSERT_NO_FATAL_FAILURE(effect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(effect.setConfig(isAECEffect(mUuid)));
+    ASSERT_NO_FATAL_FAILURE(setPreProcParams(mUuid, effect, mParamIdx));
+
+    // Initialize input buffer with deterministic pseudo-random values
+    std::vector<int16_t> input(mTotalFrameCount * mChannelCount);
+    std::vector<int16_t> output(mTotalFrameCount * mChannelCount);
+    std::vector<int16_t> farInput(mTotalFrameCount * mChannelCount);
+    std::minstd_rand gen(mChMask);
+    std::uniform_int_distribution<int16_t> dis(INT16_MIN, INT16_MAX);
+    for (auto& in : input) {
+        in = dis(gen);
+    }
+    if (isAECEffect(mUuid)) {
+        for (auto& farIn : farInput) {
+            farIn = dis(gen);
+        }
+    }
+    ASSERT_NO_FATAL_FAILURE(effect.process(input.data(), output.data(), isAECEffect(mUuid)));
+    if (isAECEffect(mUuid)) {
+        ASSERT_NO_FATAL_FAILURE(effect.process_reverse(farInput.data(), output.data()));
+    }
+    ASSERT_NO_FATAL_FAILURE(effect.releaseEffect());
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        PreProcTestAll, SingleEffectTest,
+        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumChMasks),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+                           ::testing::Range(0, (int)kNumPreProcParams)));
+
+typedef std::tuple<int, int, int> SingleEffectComparisonTestParam;
+class SingleEffectComparisonTest
+    : public ::testing::TestWithParam<SingleEffectComparisonTestParam> {
+  public:
+    SingleEffectComparisonTest()
+        : mSampleRate(EffectTestHelper::kSampleRates[std::get<0>(GetParam())]),
+          mFrameCount(mSampleRate * EffectTestHelper::kTenMilliSecVal),
+          mLoopCount(EffectTestHelper::kLoopCounts[std::get<1>(GetParam())]),
+          mTotalFrameCount(mFrameCount * mLoopCount),
+          mParamIdx(std::get<2>(GetParam())),
+          mUuid(kPreProcParams[mParamIdx].uuid){};
+
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    const size_t mTotalFrameCount;
+    const size_t mParamIdx;
+    const effect_uuid_t* mUuid;
+};
+
+// Compares first channel in multi-channel output to mono output when same effect is applied
+TEST_P(SingleEffectComparisonTest, SimpleProcess) {
+    SCOPED_TRACE(testing::Message() << " sampleRate: " << mSampleRate
+                                    << " loopCount: " << mLoopCount << " paramIdx " << mParamIdx);
+
+    // Initialize mono input buffer with deterministic pseudo-random values
+    std::vector<int16_t> monoInput(mTotalFrameCount);
+    std::vector<int16_t> monoFarInput(mTotalFrameCount);
+
+    std::minstd_rand gen(mSampleRate);
+    std::uniform_int_distribution<int16_t> dis(INT16_MIN, INT16_MAX);
+    for (auto& in : monoInput) {
+        in = dis(gen);
+    }
+    if (isAECEffect(mUuid)) {
+        for (auto& farIn : monoFarInput) {
+            farIn = dis(gen);
+        }
+    }
+
+    // Apply effect on mono channel
+    EffectTestHelper monoEffect(mUuid, AUDIO_CHANNEL_INDEX_MASK_1, mSampleRate, mLoopCount);
+
+    ASSERT_NO_FATAL_FAILURE(monoEffect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(monoEffect.setConfig(isAECEffect(mUuid)));
+    ASSERT_NO_FATAL_FAILURE(setPreProcParams(mUuid, monoEffect, mParamIdx));
+
+    std::vector<int16_t> monoOutput(mTotalFrameCount);
+    ASSERT_NO_FATAL_FAILURE(
+            monoEffect.process(monoInput.data(), monoOutput.data(), isAECEffect(mUuid)));
+    if (isAECEffect(mUuid)) {
+        ASSERT_NO_FATAL_FAILURE(monoEffect.process_reverse(monoFarInput.data(), monoOutput.data()));
+    }
+    ASSERT_NO_FATAL_FAILURE(monoEffect.releaseEffect());
+
+    for (size_t chMask : EffectTestHelper::kChMasks) {
+        size_t channelCount = audio_channel_count_from_in_mask(chMask);
+
+        EffectTestHelper testEffect(mUuid, chMask, mSampleRate, mLoopCount);
+
+        ASSERT_NO_FATAL_FAILURE(testEffect.createEffect());
+        ASSERT_NO_FATAL_FAILURE(testEffect.setConfig(isAECEffect(mUuid)));
+        ASSERT_NO_FATAL_FAILURE(setPreProcParams(mUuid, testEffect, mParamIdx));
+
+        std::vector<int16_t> testInput(mTotalFrameCount * channelCount);
+        std::vector<int16_t> testFarInput(mTotalFrameCount * channelCount);
+
+        // Repeat mono channel data to all the channels
+        // adjust_channels() zero fills channels > 2, hence can't be used here
+        for (size_t i = 0; i < mTotalFrameCount; ++i) {
+            auto* fpInput = &testInput[i * channelCount];
+            std::fill(fpInput, fpInput + channelCount, monoInput[i]);
+        }
+        if (isAECEffect(mUuid)) {
+            for (size_t i = 0; i < mTotalFrameCount; ++i) {
+                auto* fpFarInput = &testFarInput[i * channelCount];
+                std::fill(fpFarInput, fpFarInput + channelCount, monoFarInput[i]);
+            }
+        }
+
+        std::vector<int16_t> testOutput(mTotalFrameCount * channelCount);
+        ASSERT_NO_FATAL_FAILURE(
+                testEffect.process(testInput.data(), testOutput.data(), isAECEffect(mUuid)));
+        if (isAECEffect(mUuid)) {
+            ASSERT_NO_FATAL_FAILURE(
+                    testEffect.process_reverse(testFarInput.data(), testOutput.data()));
+        }
+        ASSERT_NO_FATAL_FAILURE(testEffect.releaseEffect());
+
+        // Adjust the test output to mono channel
+        std::vector<int16_t> monoTestOutput(mTotalFrameCount);
+        adjust_channels(testOutput.data(), channelCount, monoTestOutput.data(), FCC_1,
+                        sizeof(int16_t), mTotalFrameCount * sizeof(int16_t) * channelCount);
+
+        ASSERT_EQ(0, memcmp(monoOutput.data(), monoTestOutput.data(),
+                            mTotalFrameCount * sizeof(int16_t)))
+                << "Mono channel output does not match with reference output \n";
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        PreProcTestAll, SingleEffectComparisonTest,
+        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+                           ::testing::Range(0, (int)kNumPreProcParams)));
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = RUN_ALL_TESTS();
+    ALOGV("Test result = %d", status);
+    return status;
+}
diff --git a/media/libeffects/preprocessing/tests/EffectTestHelper.cpp b/media/libeffects/preprocessing/tests/EffectTestHelper.cpp
new file mode 100644
index 0000000..79200b6
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/EffectTestHelper.cpp
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "EffectTestHelper.h"
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+
+void EffectTestHelper::createEffect() {
+    int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(mUuid, 1, 1, &mEffectHandle);
+    ASSERT_EQ(status, 0) << "create_effect returned an error " << status;
+}
+
+void EffectTestHelper::releaseEffect() {
+    int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(mEffectHandle);
+    ASSERT_EQ(status, 0) << "release_effect returned an error " << status;
+}
+
+void EffectTestHelper::setConfig(bool configReverse) {
+    effect_config_t config{};
+    config.inputCfg.samplingRate = config.outputCfg.samplingRate = mSampleRate;
+    config.inputCfg.channels = config.outputCfg.channels = mChMask;
+    config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+
+    int status = (*mEffectHandle)
+                         ->command(mEffectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
+                                   &config, &replySize, &reply);
+    ASSERT_EQ(status, 0) << "set_config returned an error " << status;
+    ASSERT_EQ(reply, 0) << "set_config reply non zero " << reply;
+
+    if (configReverse) {
+        int status = (*mEffectHandle)
+                             ->command(mEffectHandle, EFFECT_CMD_SET_CONFIG_REVERSE,
+                                       sizeof(effect_config_t), &config, &replySize, &reply);
+        ASSERT_EQ(status, 0) << "set_config_reverse returned an error " << status;
+        ASSERT_EQ(reply, 0) << "set_config_reverse reply non zero " << reply;
+    }
+
+    status = (*mEffectHandle)
+                     ->command(mEffectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+    ASSERT_EQ(status, 0) << "cmd_enable returned an error " << status;
+    ASSERT_EQ(reply, 0) << "cmd_enable reply non zero " << reply;
+}
+
+void EffectTestHelper::setParam(uint32_t type, uint32_t value) {
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    uint32_t paramData[2] = {type, value};
+    auto effectParam = (effect_param_t*)malloc(sizeof(effect_param_t) + sizeof(paramData));
+    memcpy(&effectParam->data[0], &paramData[0], sizeof(paramData));
+    effectParam->psize = sizeof(paramData[0]);
+    effectParam->vsize = sizeof(paramData[1]);
+    int status = (*mEffectHandle)
+                         ->command(mEffectHandle, EFFECT_CMD_SET_PARAM,
+                                   sizeof(effect_param_t) + sizeof(paramData), effectParam,
+                                   &replySize, &reply);
+    free(effectParam);
+    ASSERT_EQ(status, 0) << "set_param returned an error " << status;
+    ASSERT_EQ(reply, 0) << "set_param reply non zero " << reply;
+}
+
+void EffectTestHelper::process(int16_t* input, int16_t* output, bool setAecEchoDelay) {
+    audio_buffer_t inBuffer = {.frameCount = mFrameCount, .s16 = input};
+    audio_buffer_t outBuffer = {.frameCount = mFrameCount, .s16 = output};
+    for (size_t i = 0; i < mLoopCount; i++) {
+        if (setAecEchoDelay) ASSERT_NO_FATAL_FAILURE(setParam(AEC_PARAM_ECHO_DELAY, kAECDelay));
+        int status = (*mEffectHandle)->process(mEffectHandle, &inBuffer, &outBuffer);
+        ASSERT_EQ(status, 0) << "process returned an error " << status;
+
+        inBuffer.s16 += mFrameCount * mChannelCount;
+        outBuffer.s16 += mFrameCount * mChannelCount;
+    }
+}
+
+void EffectTestHelper::process_reverse(int16_t* farInput, int16_t* output) {
+    audio_buffer_t farInBuffer = {.frameCount = mFrameCount, .s16 = farInput};
+    audio_buffer_t outBuffer = {.frameCount = mFrameCount, .s16 = output};
+    for (size_t i = 0; i < mLoopCount; i++) {
+        int status = (*mEffectHandle)->process_reverse(mEffectHandle, &farInBuffer, &outBuffer);
+        ASSERT_EQ(status, 0) << "process returned an error " << status;
+
+        farInBuffer.s16 += mFrameCount * mChannelCount;
+        outBuffer.s16 += mFrameCount * mChannelCount;
+    }
+}
diff --git a/media/libeffects/preprocessing/tests/EffectTestHelper.h b/media/libeffects/preprocessing/tests/EffectTestHelper.h
new file mode 100644
index 0000000..117cf7b
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/EffectTestHelper.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <array>
+#include <audio_effects/effect_aec.h>
+#include <audio_utils/channels.h>
+#include <audio_utils/primitives.h>
+#include <climits>
+#include <cstdlib>
+#include <gtest/gtest.h>
+#include <hardware/audio_effect.h>
+#include <log/log.h>
+#include <random>
+#include <stdint.h>
+#include <system/audio.h>
+#include <vector>
+
+template <typename T>
+static float computeSnr(const T* ref, const T* tst, size_t count) {
+    double signal{};
+    double noise{};
+
+    for (size_t i = 0; i < count; ++i) {
+        const double value(ref[i]);
+        const double diff(tst[i] - value);
+        signal += value * value;
+        noise += diff * diff;
+    }
+    // Initialized to large value to handle
+    // cases where ref and tst match exactly
+    float snr = FLT_MAX;
+    if (signal > 0.0f && noise > 0.0f) {
+        snr = 10.f * log(signal / noise);
+    }
+    return snr;
+}
+
+class EffectTestHelper {
+  public:
+    EffectTestHelper(const effect_uuid_t* uuid, size_t chMask, size_t sampleRate, size_t loopCount)
+        : mUuid(uuid),
+          mChMask(chMask),
+          mChannelCount(audio_channel_count_from_in_mask(mChMask)),
+          mSampleRate(sampleRate),
+          mFrameCount(mSampleRate * kTenMilliSecVal),
+          mLoopCount(loopCount) {}
+    void createEffect();
+    void releaseEffect();
+    void setConfig(bool configReverse);
+    void setParam(uint32_t type, uint32_t val);
+    void process(int16_t* input, int16_t* output, bool setAecEchoDelay);
+    void process_reverse(int16_t* farInput, int16_t* output);
+
+    // Corresponds to SNR for 1 bit difference between two int16_t signals
+    static constexpr float kSNRThreshold = 90.308998;
+
+    static constexpr audio_channel_mask_t kChMasks[] = {
+            AUDIO_CHANNEL_IN_MONO,
+            AUDIO_CHANNEL_IN_STEREO,
+            AUDIO_CHANNEL_IN_FRONT_BACK,
+            AUDIO_CHANNEL_IN_6,
+            AUDIO_CHANNEL_IN_2POINT0POINT2,
+            AUDIO_CHANNEL_IN_2POINT1POINT2,
+            AUDIO_CHANNEL_IN_3POINT0POINT2,
+            AUDIO_CHANNEL_IN_3POINT1POINT2,
+            AUDIO_CHANNEL_IN_5POINT1,
+            AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO,
+            AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO,
+            AUDIO_CHANNEL_IN_VOICE_CALL_MONO,
+    };
+
+    static constexpr float kTenMilliSecVal = 0.01;
+
+    static constexpr size_t kNumChMasks = std::size(kChMasks);
+
+    static constexpr size_t kSampleRates[] = {8000,  11025, 12000, 16000, 22050,
+                                              24000, 32000, 44100, 48000};
+
+    static constexpr size_t kNumSampleRates = std::size(kSampleRates);
+
+    static constexpr size_t kLoopCounts[] = {1, 4};
+
+    static constexpr size_t kNumLoopCounts = std::size(kLoopCounts);
+
+    static constexpr size_t kAECDelay = 0;
+
+  private:
+    const effect_uuid_t* mUuid;
+    const size_t mChMask;
+    const size_t mChannelCount;
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    effect_handle_t mEffectHandle{};
+};
diff --git a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
index 5f223c9..3bd93f8 100644
--- a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
+++ b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
@@ -24,6 +24,7 @@
 #include <audio_effects/effect_agc.h>
 #include <audio_effects/effect_agc2.h>
 #include <audio_effects/effect_ns.h>
+#include <audio_utils/channels.h>
 #include <log/log.h>
 
 // This is the only symbol that needs to be imported
@@ -55,7 +56,9 @@
     ARG_NS_LVL,
     ARG_AGC2_GAIN,
     ARG_AGC2_LVL,
-    ARG_AGC2_SAT_MGN
+    ARG_AGC2_SAT_MGN,
+    ARG_FILE_CHANNELS,
+    ARG_MONO_MODE
 };
 
 struct preProcConfigParams_t {
@@ -68,6 +71,8 @@
     float agc2SaturationMargin = 2.f;  // in dB
     int agc2Level = 0;                 // either kRms(0) or kPeak(1)
     int aecDelay = 0;  // in ms
+    int fileChannels = 1;
+    int monoMode = 0;
 };
 
 const effect_uuid_t kPreProcUuids[PREPROC_NUM_EFFECTS] = {
@@ -106,7 +111,7 @@
     printf("\n           Prints this usage information");
     printf("\n     --fs <sampling_freq>");
     printf("\n           Sampling frequency in Hz, default 16000.");
-    printf("\n     -ch_mask <channel_mask>\n");
+    printf("\n     --ch_mask <channel_mask>\n");
     printf("\n         0  - AUDIO_CHANNEL_IN_MONO");
     printf("\n         1  - AUDIO_CHANNEL_IN_STEREO");
     printf("\n         2  - AUDIO_CHANNEL_IN_FRONT_BACK");
@@ -144,6 +149,10 @@
     printf("\n           AGC Adaptive Digital Saturation Margin in dB, default value 2dB");
     printf("\n     --aec_delay <delay>");
     printf("\n           AEC delay value in ms, default value 0ms");
+    printf("\n     --fch <fileChannels>");
+    printf("\n           number of channels in the input file");
+    printf("\n     --mono <Mono Mode>");
+    printf("\n           Mode to make data of all channels the same as first channel");
     printf("\n");
 }
 
@@ -189,10 +198,17 @@
         printUsage();
         return EXIT_FAILURE;
     }
+
+    // Print the arguments passed
+    for (int i = 1; i < argc; i++) {
+        printf("%s ", argv[i]);
+    }
+
     const char* inputFile = nullptr;
     const char* outputFile = nullptr;
     const char* farFile = nullptr;
     int effectEn[PREPROC_NUM_EFFECTS] = {0};
+    struct preProcConfigParams_t preProcCfgParams {};
 
     const option long_opts[] = {
             {"help", no_argument, nullptr, ARG_HELP},
@@ -212,9 +228,10 @@
             {"agc", no_argument, &effectEn[PREPROC_AGC], 1},
             {"agc2", no_argument, &effectEn[PREPROC_AGC2], 1},
             {"ns", no_argument, &effectEn[PREPROC_NS], 1},
+            {"fch", required_argument, nullptr, ARG_FILE_CHANNELS},
+            {"mono", no_argument, &preProcCfgParams.monoMode, 1},
             {nullptr, 0, nullptr, 0},
     };
-    struct preProcConfigParams_t preProcCfgParams {};
 
     while (true) {
         const int opt = getopt_long(argc, (char* const*)argv, "i:o:", long_opts, nullptr);
@@ -279,6 +296,14 @@
                 preProcCfgParams.nsLevel = atoi(optarg);
                 break;
             }
+            case ARG_FILE_CHANNELS: {
+                preProcCfgParams.fileChannels = atoi(optarg);
+                break;
+            }
+            case ARG_MONO_MODE: {
+                preProcCfgParams.monoMode = 1;
+                break;
+            }
             default:
                 break;
         }
@@ -402,29 +427,52 @@
     // Process Call
     const int frameLength = (int)(preProcCfgParams.samplingFreq * kTenMilliSecVal);
     const int ioChannelCount = audio_channel_count_from_in_mask(preProcCfgParams.chMask);
+    const int fileChannelCount = preProcCfgParams.fileChannels;
     const int ioFrameSize = ioChannelCount * sizeof(short);
+    const int inFrameSize = fileChannelCount * sizeof(short);
     int frameCounter = 0;
     while (true) {
         std::vector<short> in(frameLength * ioChannelCount);
         std::vector<short> out(frameLength * ioChannelCount);
         std::vector<short> farIn(frameLength * ioChannelCount);
-        size_t samplesRead = fread(in.data(), ioFrameSize, frameLength, inputFp.get());
+        size_t samplesRead = fread(in.data(), inFrameSize, frameLength, inputFp.get());
         if (samplesRead == 0) {
             break;
         }
+        if (fileChannelCount != ioChannelCount) {
+            adjust_channels(in.data(), fileChannelCount, in.data(), ioChannelCount, sizeof(short),
+                            frameLength * inFrameSize);
+            if (preProcCfgParams.monoMode == 1) {
+                for (int i = 0; i < frameLength; ++i) {
+                    auto* fp = &in[i * ioChannelCount];
+                    std::fill(fp + 1, fp + ioChannelCount, *fp);  // replicate ch 0
+                }
+            }
+        }
         audio_buffer_t inputBuffer, outputBuffer;
         audio_buffer_t farInBuffer{};
-        inputBuffer.frameCount = samplesRead;
-        outputBuffer.frameCount = samplesRead;
+        inputBuffer.frameCount = frameLength;
+        outputBuffer.frameCount = frameLength;
         inputBuffer.s16 = in.data();
         outputBuffer.s16 = out.data();
 
         if (farFp != nullptr) {
-            samplesRead = fread(farIn.data(), ioFrameSize, frameLength, farFp.get());
+            samplesRead = fread(farIn.data(), inFrameSize, frameLength, farFp.get());
             if (samplesRead == 0) {
                 break;
             }
-            farInBuffer.frameCount = samplesRead;
+            if (fileChannelCount != ioChannelCount) {
+                adjust_channels(farIn.data(), fileChannelCount, farIn.data(), ioChannelCount,
+                                sizeof(short), frameLength * inFrameSize);
+                if (preProcCfgParams.monoMode == 1) {
+                    for (int i = 0; i < frameLength; ++i) {
+                        auto* fp = &farIn[i * ioChannelCount];
+                        std::fill(fp + 1, fp + ioChannelCount, *fp);  // replicate ch 0
+                    }
+                }
+            }
+
+            farInBuffer.frameCount = frameLength;
             farInBuffer.s16 = farIn.data();
         }
 
@@ -458,8 +506,12 @@
             }
         }
         if (outputFp != nullptr) {
+            if (fileChannelCount != ioChannelCount) {
+                adjust_channels(out.data(), ioChannelCount, out.data(), fileChannelCount,
+                                sizeof(short), frameLength * ioFrameSize);
+            }
             size_t samplesWritten =
-                    fwrite(out.data(), ioFrameSize, outputBuffer.frameCount, outputFp.get());
+                    fwrite(out.data(), inFrameSize, outputBuffer.frameCount, outputFp.get());
             if (samplesWritten != outputBuffer.frameCount) {
                 ALOGE("\nError: Output file writing failed");
                 break;
@@ -467,6 +519,7 @@
         }
         frameCounter += frameLength;
     }
+    printf("frameCounter: [%d]\n", frameCounter);
     // Release all the effect handles created
     for (int i = 0; i < PREPROC_NUM_EFFECTS; i++) {
         if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle[i]);
diff --git a/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh b/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh
new file mode 100755
index 0000000..35da13e
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh
@@ -0,0 +1,119 @@
+#!/bin/bash
+#
+# Run tests in this directory.
+#
+
+if [ -z "$ANDROID_BUILD_TOP" ]; then
+    echo "Android build environment not set"
+    exit -1
+fi
+
+# ensure we have mm
+. $ANDROID_BUILD_TOP/build/envsetup.sh
+
+mm -j
+
+echo "waiting for device"
+
+adb root && adb wait-for-device remount
+
+# location of test files
+testdir="/data/local/tmp/AudioPreProcessingTest"
+
+echo "========================================"
+echo "testing PreProcessing modules"
+adb shell mkdir -p $testdir
+adb push $ANDROID_BUILD_TOP/frameworks/av/media/libeffects/res/raw/sinesweepraw.raw $testdir
+adb push $OUT/testcases/snr/arm64/snr $testdir
+
+E_VAL=1
+if [ -z "$1" ]
+then
+    cmds=("adb push $OUT/testcases/AudioPreProcessingTest/arm64/AudioPreProcessingTest $testdir"
+          "adb push $OUT/testcases/AudioPreProcessingTest/arm/AudioPreProcessingTest $testdir"
+)
+elif [ "$1" == "32" ]
+then
+    cmds="adb push $OUT/testcases/AudioPreProcessingTest/arm/AudioPreProcessingTest $testdir"
+elif [ "$1" == "64" ]
+then
+    cmds="adb push $OUT/testcases/AudioPreProcessingTest/arm64/AudioPreProcessingTest $testdir"
+else
+    echo ""
+    echo "Invalid \"val\""
+    echo "Usage:"
+    echo "      "$0" [val]"
+    echo "      where, val can be either 32 or 64."
+    echo ""
+    echo "      If val is not specified then both 32 bit and 64 bit binaries"
+    echo "      are tested."
+    exit $E_VAL
+fi
+
+flags_arr=(
+    "--agc --mono"
+    "--ns --mono"
+    "--agc2 --mono"
+    "--aec --mono"
+)
+
+fs_arr=(
+    8000
+    11025
+    12000
+    16000
+    22050
+    24000
+    32000
+    44100
+    48000
+)
+
+# run multichannel effects at different configs, saving only the mono channel
+error_count=0
+test_count=0
+for cmd in "${cmds[@]}"
+do
+    $cmd
+    for flags in "${flags_arr[@]}"
+    do
+        for fs in ${fs_arr[*]}
+        do
+            for chMask in {0..7}
+            do
+                adb shell $testdir/AudioPreProcessingTest $flags \
+                    --i $testdir/sinesweepraw.raw --far $testdir/sinesweepraw.raw \
+                    --output $testdir/sinesweep_$((chMask))_$((fs)).raw --ch_mask $chMask \
+                    --fs $fs --fch 1
+
+                shell_ret=$?
+                if [ $shell_ret -ne 0 ]; then
+                    echo "error shell_ret here is zero: $shell_ret"
+                    ((++error_count))
+                fi
+
+
+                # single channel files should be identical to higher channel
+                # computation (first channel).
+                if  [[ "$chMask" -gt 1 ]]
+                then
+                    adb shell cmp $testdir/sinesweep_1_$((fs)).raw \
+                        $testdir/sinesweep_$((chMask))_$((fs)).raw
+                fi
+
+                # cmp return EXIT_FAILURE on mismatch.
+                shell_ret=$?
+                if [ $shell_ret -ne 0 ]; then
+                    echo "error: $shell_ret"
+                    ((++error_count))
+                fi
+                ((++test_count))
+            done
+        done
+    done
+done
+
+adb shell rm -r $testdir
+echo "$test_count tests performed"
+echo "$error_count errors"
+exit $error_count
diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp
index c010d68..be9f8c0 100644
--- a/media/libeffects/proxy/EffectProxy.cpp
+++ b/media/libeffects/proxy/EffectProxy.cpp
@@ -116,6 +116,16 @@
         pContext->sube[SUB_FX_OFFLOAD] = sube[1];
         pContext->desc[SUB_FX_OFFLOAD] = desc[1];
         pContext->aeli[SUB_FX_OFFLOAD] = aeli[1];
+    } else {
+        ALOGE("Both effects have (or don't have) EFFECT_FLAG_HW_ACC_TUNNEL flag");
+        delete[] sube;
+        delete[] desc;
+        delete[] aeli;
+        delete[] pContext->sube;
+        delete[] pContext->desc;
+        delete[] pContext->aeli;
+        delete pContext;
+        return -EINVAL;
     }
     delete[] desc;
     delete[] aeli;
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 48b5391..6214f5e 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -43,6 +43,12 @@
             enabled: false,
         },
     },
+    header_libs: [
+        "libbinder_headers",
+    ],
+    export_header_lib_headers: [
+        "libbinder_headers",
+    ],
     apex_available: [
         "//apex_available:platform",
         "com.android.media",
diff --git a/media/libmedia/tests/fuzzer/Android.bp b/media/libmedia/tests/fuzzer/Android.bp
new file mode 100644
index 0000000..c03b5b1
--- /dev/null
+++ b/media/libmedia/tests/fuzzer/Android.bp
@@ -0,0 +1,19 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libmedia_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libmedia_license"],
+}
+
+cc_fuzz {
+  name: "libmedia_metadata_fuzzer",
+  srcs: [
+    "libmedia_metadata_fuzzer.cpp",
+  ],
+  shared_libs: [
+    "libmedia",
+    "libbinder",
+  ],
+}
diff --git a/media/libmedia/tests/fuzzer/libmedia_metadata_fuzzer.cpp b/media/libmedia/tests/fuzzer/libmedia_metadata_fuzzer.cpp
new file mode 100644
index 0000000..058e4e5
--- /dev/null
+++ b/media/libmedia/tests/fuzzer/libmedia_metadata_fuzzer.cpp
@@ -0,0 +1,52 @@
+//This program fuzzes Metadata.cpp
+
+#include <stddef.h>
+#include <stdint.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/Metadata.h>
+#include <binder/Parcel.h>
+
+using namespace android;
+using namespace media;
+
+static const float want_prob = 0.5;
+
+bool bytesRemain(FuzzedDataProvider *fdp);
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+    Parcel p;
+    Metadata md = Metadata(&p);
+
+    md.appendHeader();
+    while (bytesRemain(&fdp)) {
+
+        float got_prob = fdp.ConsumeProbability<float>();
+        if (!bytesRemain(&fdp)) {
+            break;
+        }
+
+        if (got_prob < want_prob) {
+            int32_t key_bool = fdp.ConsumeIntegral<int32_t>();
+            if (!bytesRemain(&fdp)) {
+                break;
+            }
+            bool val_bool = fdp.ConsumeBool();
+            md.appendBool(key_bool, val_bool);
+        } else {
+            int32_t key_int32 = fdp.ConsumeIntegral<int32_t>();
+            if (!bytesRemain(&fdp)) {
+                break;
+            }
+            bool val_int32 = fdp.ConsumeIntegral<int32_t>();
+            md.appendInt32(key_int32, val_int32);
+        }
+        md.updateLength();
+    }
+    md.resetParcel();
+    return 0;
+}
+
+bool bytesRemain(FuzzedDataProvider *fdp){
+    return fdp -> remaining_bytes() > 0;
+}
\ No newline at end of file
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 84388c9..674df17 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -139,6 +139,7 @@
 #define AMEDIAMETRICS_PROP_SESSIONID      "sessionId"      // int32
 #define AMEDIAMETRICS_PROP_SHARINGMODE    "sharingMode"    // string value, "exclusive", shared"
 #define AMEDIAMETRICS_PROP_SOURCE         "source"         // string (AudioAttributes)
+#define AMEDIAMETRICS_PROP_STARTTHRESHOLDFRAMES "startThresholdFrames" // int32 (AudioTrack)
 #define AMEDIAMETRICS_PROP_STARTUPMS      "startupMs"      // double value
 // State is "ACTIVE" or "STOPPED" for AudioRecord
 #define AMEDIAMETRICS_PROP_STATE          "state"          // string
@@ -181,6 +182,7 @@
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETMODE    "setMode" // AudioFlinger
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETBUFFERSIZE    "setBufferSize" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYBACKPARAM "setPlaybackParam" // AudioTrack
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETSTARTTHRESHOLD "setStartThreshold" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOICEVOLUME   "setVoiceVolume" // AudioFlinger
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOLUME  "setVolume"  // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_START      "start"  // AudioTrack, AudioRecord
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index 9ab117a..c5d3c1d 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -35,6 +35,7 @@
         "android.hardware.media.c2@1.0",
         "android.hardware.media.omx@1.0",
         "libbase",
+        "libactivitymanager_aidl",
         "libandroid_net",
         "libaudioclient",
         "libbinder",
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 4d90d98..7cda2fb 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -1822,8 +1822,7 @@
 //static
 void MediaPlayerService::AudioOutput::setMinBufferCount()
 {
-    char value[PROPERTY_VALUE_MAX];
-    if (property_get("ro.kernel.qemu", value, 0)) {
+    if (property_get_bool("ro.boot.qemu", false)) {
         mIsOnEmulator = true;
         mMinBufferCount = 12;  // to prevent systematic buffer underrun for emulator
     }
diff --git a/media/libnbaio/AudioStreamInSource.cpp b/media/libnbaio/AudioStreamInSource.cpp
index 1054b68..ca98b28 100644
--- a/media/libnbaio/AudioStreamInSource.cpp
+++ b/media/libnbaio/AudioStreamInSource.cpp
@@ -46,13 +46,11 @@
         status_t result;
         result = mStream->getBufferSize(&mStreamBufferSizeBytes);
         if (result != OK) return result;
-        audio_format_t streamFormat;
-        uint32_t sampleRate;
-        audio_channel_mask_t channelMask;
-        result = mStream->getAudioProperties(&sampleRate, &channelMask, &streamFormat);
+        audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+        result = mStream->getAudioProperties(&config);
         if (result != OK) return result;
-        mFormat = Format_from_SR_C(sampleRate,
-                audio_channel_count_from_in_mask(channelMask), streamFormat);
+        mFormat = Format_from_SR_C(config.sample_rate,
+                audio_channel_count_from_in_mask(config.channel_mask), config.format);
         mFrameSize = Format_frameSize(mFormat);
     }
     return NBAIO_Source::negotiate(offers, numOffers, counterOffers, numCounterOffers);
diff --git a/media/libnbaio/AudioStreamOutSink.cpp b/media/libnbaio/AudioStreamOutSink.cpp
index 8564899..581867f 100644
--- a/media/libnbaio/AudioStreamOutSink.cpp
+++ b/media/libnbaio/AudioStreamOutSink.cpp
@@ -44,13 +44,11 @@
         status_t result;
         result = mStream->getBufferSize(&mStreamBufferSizeBytes);
         if (result != OK) return result;
-        audio_format_t streamFormat;
-        uint32_t sampleRate;
-        audio_channel_mask_t channelMask;
-        result = mStream->getAudioProperties(&sampleRate, &channelMask, &streamFormat);
+        audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+        result = mStream->getAudioProperties(&config);
         if (result != OK) return result;
-        mFormat = Format_from_SR_C(sampleRate,
-                audio_channel_count_from_out_mask(channelMask), streamFormat);
+        mFormat = Format_from_SR_C(config.sample_rate,
+                audio_channel_count_from_out_mask(config.channel_mask), config.format);
         mFrameSize = Format_frameSize(mFormat);
     }
     return NBAIO_Sink::negotiate(offers, numOffers, counterOffers, numCounterOffers);
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 71a4ad8..224ec8b 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -5683,15 +5683,18 @@
     int32_t range, standard, transfer;
     convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer);
 
+    int32_t dsRange, dsStandard, dsTransfer;
+    getColorConfigFromDataSpace(dataSpace, &dsRange, &dsStandard, &dsTransfer);
+
     // if some aspects are unspecified, use dataspace fields
     if (range == 0) {
-        range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT;
+        range = dsRange;
     }
     if (standard == 0) {
-        standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT;
+        standard = dsStandard;
     }
     if (transfer == 0) {
-        transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT;
+        transfer = dsTransfer;
     }
 
     mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 9b3f420..4e34a26 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -797,7 +797,7 @@
     mStartTimeUs = 0;
     mNumInputBuffers = 0;
     mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-    mEncoderDataSpace = HAL_DATASPACE_V0_BT709;
+    mEncoderDataSpace = mBufferDataSpace = HAL_DATASPACE_V0_BT709;
 
     if (meta) {
         int64_t startTimeUs;
@@ -817,6 +817,7 @@
         }
         if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) {
             ALOGI("Using encoder data space: %#x", mEncoderDataSpace);
+            mBufferDataSpace = mEncoderDataSpace;
         }
     }
 
@@ -1114,6 +1115,11 @@
         (*buffer)->setObserver(this);
         (*buffer)->add_ref();
         (*buffer)->meta_data().setInt64(kKeyTime, frameTime);
+        if (mBufferDataSpace != mEncoderDataSpace) {
+            ALOGD("Data space updated to %x", mBufferDataSpace);
+            (*buffer)->meta_data().setInt32(kKeyColorSpace, mBufferDataSpace);
+            mEncoderDataSpace = mBufferDataSpace;
+        }
     }
     return OK;
 }
@@ -1391,6 +1397,7 @@
     // Find a available memory slot to store the buffer as VideoNativeMetadata.
     sp<IMemory> data = *mMemoryBases.begin();
     mMemoryBases.erase(mMemoryBases.begin());
+    mBufferDataSpace = buffer.mDataSpace;
 
     ssize_t offset;
     size_t size;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index d99596e..b2fae96 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -4217,13 +4217,20 @@
 void MPEG4Writer::Track::writeColrBox() {
     ColorAspects aspects;
     memset(&aspects, 0, sizeof(aspects));
+    // Color metadata may have changed.
+    sp<MetaData> meta = mSource->getFormat();
     // TRICKY: using | instead of || because we want to execute all findInt32-s
-    if (mMeta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries)
-            | mMeta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer)
-            | mMeta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs)
-            | mMeta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange)) {
+    if (meta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries)
+            | meta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer)
+            | meta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs)
+            | meta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange)) {
         int32_t primaries, transfer, coeffs;
         bool fullRange;
+        ALOGV("primaries=%s transfer=%s matrix=%s range=%s",
+                asString(aspects.mPrimaries),
+                asString(aspects.mTransfer),
+                asString(aspects.mMatrixCoeffs),
+                asString(aspects.mRange));
         ColorUtils::convertCodecColorAspectsToIsoAspects(
                 aspects, &primaries, &transfer, &coeffs, &fullRange);
         mOwner->beginBox("colr");
@@ -4233,6 +4240,8 @@
         mOwner->writeInt16(coeffs);
         mOwner->writeInt8(int8_t(fullRange ? 0x80 : 0x0));
         mOwner->endBox(); // colr
+    } else {
+        ALOGV("no color information");
     }
 }
 
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index b0b7978..8e721d4 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -1981,6 +1981,22 @@
     return OK;
 }
 
+status_t MediaCodec::querySupportedVendorParameters(std::vector<std::string> *names) {
+    return mCodec->querySupportedParameters(names);
+}
+
+status_t MediaCodec::describeParameter(const std::string &name, CodecParameterDescriptor *desc) {
+    return mCodec->describeParameter(name, desc);
+}
+
+status_t MediaCodec::subscribeToVendorParameters(const std::vector<std::string> &names) {
+    return mCodec->subscribeToParameters(names);
+}
+
+status_t MediaCodec::unsubscribeFromVendorParameters(const std::vector<std::string> &names) {
+    return mCodec->unsubscribeFromParameters(names);
+}
+
 void MediaCodec::requestActivityNotification(const sp<AMessage> &notify) {
     sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, this);
     msg->setMessage("notify", notify);
@@ -2244,6 +2260,8 @@
                                 }
                                 postPendingRepliesAndDeferredMessages(origin + ":dead");
                                 sendErrorResponse = false;
+                            } else if (!mReplyID) {
+                                sendErrorResponse = false;
                             }
                             break;
                         }
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index bc656a2..6c4addf 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -30,6 +30,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecConstants.h>
@@ -714,6 +715,9 @@
 status_t MediaCodecSource::feedEncoderInputBuffers() {
     MediaBufferBase* mbuf = NULL;
     while (!mAvailEncoderInputIndices.empty() && mPuller->readBuffer(&mbuf)) {
+        if (!mEncoder) {
+            return BAD_VALUE;
+        }
         size_t bufferIndex = *mAvailEncoderInputIndices.begin();
         mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
 
@@ -768,6 +772,23 @@
             memcpy(inbuf->data(), mbuf->data(), size);
 
             if (mIsVideo) {
+                int32_t ds = 0;
+                if (mbuf->meta_data().findInt32(kKeyColorSpace, &ds)
+                        && ds != HAL_DATASPACE_UNKNOWN) {
+                    android_dataspace dataspace = static_cast<android_dataspace>(ds);
+                    ColorUtils::convertDataSpaceToV0(dataspace);
+                    ALOGD("Updating dataspace to %x", dataspace);
+                    int32_t standard, transfer, range;
+                    ColorUtils::getColorConfigFromDataSpace(
+                            dataspace, &range, &standard, &transfer);
+                    sp<AMessage> msg = new AMessage;
+                    msg->setInt32(KEY_COLOR_STANDARD, standard);
+                    msg->setInt32(KEY_COLOR_TRANSFER, transfer);
+                    msg->setInt32(KEY_COLOR_RANGE, range);
+                    msg->setInt32("android._dataspace", dataspace);
+                    mEncoder->setParameters(msg);
+                }
+
                 // video encoder will release MediaBuffer when done
                 // with underlying data.
                 inbuf->meta()->setObject("mediaBufferHolder", new MediaBufferHolder(mbuf));
@@ -1130,7 +1151,7 @@
         if (mFlags & FLAG_USE_SURFACE_INPUT) {
             sp<AMessage> params = new AMessage;
             params->setInt64(PARAMETER_KEY_OFFSET_TIME, mInputBufferTimeOffsetUs);
-            err = mEncoder->setParameters(params);
+            err = mEncoder ? mEncoder->setParameters(params) : BAD_VALUE;
         }
 
         sp<AMessage> response = new AMessage;
@@ -1150,7 +1171,7 @@
         if (mFlags & FLAG_USE_SURFACE_INPUT) {
             sp<AMessage> params = new AMessage;
             params->setInt64("stop-time-us", stopTimeUs);
-            err = mEncoder->setParameters(params);
+            err = mEncoder ? mEncoder->setParameters(params) : BAD_VALUE;
         } else {
             err = mPuller->setStopTimeUs(stopTimeUs);
         }
diff --git a/media/libstagefright/OWNERS b/media/libstagefright/OWNERS
index 819389d..0cc2294 100644
--- a/media/libstagefright/OWNERS
+++ b/media/libstagefright/OWNERS
@@ -4,4 +4,8 @@
 lajos@google.com
 marcone@google.com
 taklee@google.com
-wonsik@google.com
\ No newline at end of file
+wonsik@google.com
+
+# LON
+olly@google.com
+andrewlewis@google.com
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 1c4f5ac..bd9a694 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -47,6 +47,16 @@
 #include <media/AudioParameter.h>
 #include <system/audio.h>
 
+// TODO : Remove the defines once mainline media is built against NDK >= 31.
+// The mp4 extractor is part of mainline and builds against NDK 29 as of
+// writing. These keys are available only from NDK 31:
+#define AMEDIAFORMAT_KEY_MPEGH_PROFILE_LEVEL_INDICATION \
+  "mpegh-profile-level-indication"
+#define AMEDIAFORMAT_KEY_MPEGH_REFERENCE_CHANNEL_LAYOUT \
+  "mpegh-reference-channel-layout"
+#define AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS \
+  "mpegh-compatible-sets"
+
 namespace android {
 
 static status_t copyNALUToABuffer(sp<ABuffer> *buffer, const uint8_t *ptr, size_t length) {
@@ -1078,6 +1088,25 @@
             msg->setInt32("is-adts", isADTS);
         }
 
+        int32_t mpeghProfileLevelIndication;
+        if (meta->findInt32(kKeyMpeghProfileLevelIndication, &mpeghProfileLevelIndication)) {
+            msg->setInt32(AMEDIAFORMAT_KEY_MPEGH_PROFILE_LEVEL_INDICATION,
+                    mpeghProfileLevelIndication);
+        }
+        int32_t mpeghReferenceChannelLayout;
+        if (meta->findInt32(kKeyMpeghReferenceChannelLayout, &mpeghReferenceChannelLayout)) {
+            msg->setInt32(AMEDIAFORMAT_KEY_MPEGH_REFERENCE_CHANNEL_LAYOUT,
+                    mpeghReferenceChannelLayout);
+        }
+        if (meta->findData(kKeyMpeghCompatibleSets, &type, &data, &size)) {
+            sp<ABuffer> buffer = new (std::nothrow) ABuffer(size);
+            if (buffer.get() == NULL || buffer->base() == NULL) {
+                return NO_MEMORY;
+            }
+            msg->setBuffer(AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS, buffer);
+            memcpy(buffer->data(), data, size);
+        }
+
         int32_t aacProfile = -1;
         if (meta->findInt32(kKeyAACAOT, &aacProfile)) {
             msg->setInt32("aac-profile", aacProfile);
@@ -1837,6 +1866,23 @@
             meta->setInt32(kKeyIsADTS, isADTS);
         }
 
+        int32_t mpeghProfileLevelIndication = -1;
+        if (msg->findInt32(AMEDIAFORMAT_KEY_MPEGH_PROFILE_LEVEL_INDICATION,
+                &mpeghProfileLevelIndication)) {
+            meta->setInt32(kKeyMpeghProfileLevelIndication, mpeghProfileLevelIndication);
+        }
+        int32_t mpeghReferenceChannelLayout = -1;
+        if (msg->findInt32(AMEDIAFORMAT_KEY_MPEGH_REFERENCE_CHANNEL_LAYOUT,
+                &mpeghReferenceChannelLayout)) {
+            meta->setInt32(kKeyMpeghReferenceChannelLayout, mpeghReferenceChannelLayout);
+        }
+        sp<ABuffer> mpeghCompatibleSets;
+        if (msg->findBuffer(AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS,
+                &mpeghCompatibleSets)) {
+            meta->setData(kKeyMpeghCompatibleSets, kTypeHCOS,
+                    mpeghCompatibleSets->data(), mpeghCompatibleSets->size());
+        }
+
         int32_t aacProfile = -1;
         if (msg->findInt32("aac-profile", &aacProfile)) {
             meta->setInt32(kKeyAACAOT, aacProfile);
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index 407f609..db87a33 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -45,6 +45,9 @@
     ],
 
     header_libs: [
+        // this is only needed for the vendor variant that removes libbinder, but vendor
+        // target below does not allow adding header_libs.
+        "libbinder_headers",
         "libstagefright_foundation_headers",
         "media_ndk_headers",
         "media_plugin_headers",
@@ -98,6 +101,7 @@
 
     target: {
         vendor: {
+            // TODO: add libbinder_headers here instead of above when it becomes supported
             exclude_shared_libs: [
                 "libbinder",
             ],
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index 070e325..3812afe 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -613,6 +613,35 @@
 }
 
 // static
+void ColorUtils::getColorConfigFromDataSpace(
+        const android_dataspace &dataspace, int32_t *range, int32_t *standard, int32_t *transfer) {
+    uint32_t gfxRange =
+        (dataspace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT;
+    uint32_t gfxStandard =
+        (dataspace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT;
+    uint32_t gfxTransfer =
+        (dataspace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT;
+
+    // assume 1-to-1 mapping to HAL values (to deal with potential vendor extensions)
+    CU::ColorRange    cuRange    = CU::kColorRangeUnspecified;
+    CU::ColorStandard cuStandard = CU::kColorStandardUnspecified;
+    CU::ColorTransfer cuTransfer = CU::kColorTransferUnspecified;
+    // TRICKY: use & to ensure all three mappings are completed
+    if (!(sGfxRanges.map(gfxRange, &cuRange) & sGfxStandards.map(gfxStandard, &cuStandard)
+            & sGfxTransfers.map(gfxTransfer, &cuTransfer))) {
+        ALOGW("could not safely map graphics dataspace (R:%u S:%u T:%u) to "
+              "platform color aspects (R:%u(%s) S:%u(%s) T:%u(%s)",
+              gfxRange, gfxStandard, gfxTransfer,
+              cuRange,    asString(cuRange),
+              cuStandard, asString(cuStandard),
+              cuTransfer, asString(cuTransfer));
+    }
+    *range    = cuRange;
+    *standard = cuStandard;
+    *transfer = cuTransfer;
+}
+
+// static
 void ColorUtils::getColorConfigFromFormat(
         const sp<AMessage> &format, int32_t *range, int32_t *standard, int32_t *transfer) {
     if (!format->findInt32("color-range", range)) {
diff --git a/media/libstagefright/foundation/MediaDefs.cpp b/media/libstagefright/foundation/MediaDefs.cpp
index a08fed1..1493406 100644
--- a/media/libstagefright/foundation/MediaDefs.cpp
+++ b/media/libstagefright/foundation/MediaDefs.cpp
@@ -57,6 +57,8 @@
 const char *MEDIA_MIMETYPE_AUDIO_EAC3 = "audio/eac3";
 const char *MEDIA_MIMETYPE_AUDIO_EAC3_JOC = "audio/eac3-joc";
 const char *MEDIA_MIMETYPE_AUDIO_AC4 = "audio/ac4";
+const char *MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1 = "audio/mha1";
+const char *MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1 = "audio/mhm1";
 const char *MEDIA_MIMETYPE_AUDIO_SCRAMBLED = "audio/scrambled";
 const char *MEDIA_MIMETYPE_AUDIO_ALAC = "audio/alac";
 const char *MEDIA_MIMETYPE_AUDIO_WMA = "audio/x-ms-wma";
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
index cd0af2b..9e3f718 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
@@ -156,6 +156,10 @@
     // suited to blending. This requires implicit color space conversion on part of the device.
     static android_dataspace getDataSpaceForColorAspects(ColorAspects &aspects, bool mayExpand);
 
+    // it returns the platform color configs from given |dataspace|.
+    static void getColorConfigFromDataSpace(
+            const android_dataspace &dataspace, int *range, int *standard, int *transfer);
+
     // converts |dataSpace| to a V0 enum, and returns true if dataSpace is an aspect-only value
     static bool convertDataSpaceToV0(android_dataspace &dataSpace);
 
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h b/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
index 1f9e636..97458c3 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
@@ -59,6 +59,8 @@
 extern const char *MEDIA_MIMETYPE_AUDIO_EAC3;
 extern const char *MEDIA_MIMETYPE_AUDIO_EAC3_JOC;
 extern const char *MEDIA_MIMETYPE_AUDIO_AC4;
+extern const char *MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1;
+extern const char *MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1;
 extern const char *MEDIA_MIMETYPE_AUDIO_SCRAMBLED;
 extern const char *MEDIA_MIMETYPE_AUDIO_ALAC;
 extern const char *MEDIA_MIMETYPE_AUDIO_WMA;
@@ -89,6 +91,8 @@
     kAudioEncodingPcm16bit = 2,
     kAudioEncodingPcm8bit = 3,
     kAudioEncodingPcmFloat = 4,
+    kAudioEncodingPcm24bitPacked = 21,
+    kAudioEncodingPcm32bit = 22,
 };
 
 }  // namespace android
diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp
index e97f6eb..b7c9062 100644
--- a/media/libstagefright/id3/ID3.cpp
+++ b/media/libstagefright/id3/ID3.cpp
@@ -234,9 +234,11 @@
     }
 
     // first handle global unsynchronization
+    bool hasGlobalUnsync = false;
     if (header.flags & 0x80) {
         ALOGV("removing unsynchronization");
 
+        hasGlobalUnsync = true;
         removeUnsynchronization();
     }
 
@@ -341,12 +343,12 @@
 
         memcpy(copy, mData, size);
 
-        bool success = removeUnsynchronizationV2_4(false /* iTunesHack */);
+        bool success = removeUnsynchronizationV2_4(false /* iTunesHack */, hasGlobalUnsync);
         if (!success) {
             memcpy(mData, copy, size);
             mSize = size;
 
-            success = removeUnsynchronizationV2_4(true /* iTunesHack */);
+            success = removeUnsynchronizationV2_4(true /* iTunesHack */, hasGlobalUnsync);
 
             if (success) {
                 ALOGV("Had to apply the iTunes hack to parse this ID3 tag");
@@ -407,7 +409,7 @@
     }
 }
 
-bool ID3::removeUnsynchronizationV2_4(bool iTunesHack) {
+bool ID3::removeUnsynchronizationV2_4(bool iTunesHack, bool hasGlobalUnsync) {
     size_t oldSize = mSize;
 
     size_t offset = mFirstFrameOffset;
@@ -443,7 +445,7 @@
             flags &= ~1;
         }
 
-        if ((flags & 2) && (dataSize >= 2)) {
+        if (!hasGlobalUnsync && (flags & 2) && (dataSize >= 2)) {
             // This frame has "unsynchronization", so we have to replace occurrences
             // of 0xff 0x00 with just 0xff in order to get the real data.
 
diff --git a/media/libstagefright/id3/test/AndroidTest.xml b/media/libstagefright/id3/test/AndroidTest.xml
index d6ea470..50f9253 100644
--- a/media/libstagefright/id3/test/AndroidTest.xml
+++ b/media/libstagefright/id3/test/AndroidTest.xml
@@ -19,7 +19,7 @@
         <option name="cleanup" value="true" />
         <option name="push" value="ID3Test->/data/local/tmp/ID3Test" />
         <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/id3/test/ID3Test-1.1.zip?unzip=true"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/id3/test/ID3Test-1.2.zip?unzip=true"
             value="/data/local/tmp/ID3TestRes/" />
     </target_preparer>
 
diff --git a/media/libstagefright/id3/test/ID3Test.cpp b/media/libstagefright/id3/test/ID3Test.cpp
index 8db83cb..1ceeb6a 100644
--- a/media/libstagefright/id3/test/ID3Test.cpp
+++ b/media/libstagefright/id3/test/ID3Test.cpp
@@ -135,6 +135,7 @@
     } else {
         ASSERT_EQ(data, nullptr) << "Found album art when expected none!";
     }
+
 #if (LOG_NDEBUG == 0)
     hexdump(data, dataSize > 128 ? 128 : dataSize);
 #endif
@@ -186,7 +187,8 @@
                                            "bbb_1sec_v23_3tags.mp3",
                                            "bbb_1sec_v1_5tags.mp3",
                                            "bbb_2sec_v24_unsynchronizedOneFrame.mp3",
-                                           "bbb_2sec_v24_unsynchronizedAllFrames.mp3"));
+                                           "bbb_2sec_v24_unsynchronizedAllFrames.mp3",
+                                           "idv24_unsynchronized.mp3"));
 
 INSTANTIATE_TEST_SUITE_P(
         id3TestAll, ID3versionTest,
@@ -201,7 +203,8 @@
                           make_pair("bbb_1sec_v1_5tags.mp3", ID3::ID3_V1_1),
                           make_pair("bbb_1sec_v1_3tags.mp3", ID3::ID3_V1_1),
                           make_pair("bbb_2sec_v24_unsynchronizedOneFrame.mp3", ID3::ID3_V2_4),
-                          make_pair("bbb_2sec_v24_unsynchronizedAllFrames.mp3", ID3::ID3_V2_4)));
+                          make_pair("bbb_2sec_v24_unsynchronizedAllFrames.mp3", ID3::ID3_V2_4),
+                          make_pair("idv24_unsynchronized.mp3", ID3::ID3_V2_4)));
 
 INSTANTIATE_TEST_SUITE_P(
         id3TestAll, ID3textTagTest,
@@ -227,7 +230,9 @@
                                            make_pair("bbb_2sec_1_image.mp3", true),
                                            make_pair("bbb_2sec_2_image.mp3", true),
                                            make_pair("bbb_2sec_largeSize.mp3", true),
-                                           make_pair("bbb_1sec_v1_5tags.mp3", false)));
+                                           make_pair("bbb_1sec_v1_5tags.mp3", false),
+                                           make_pair("idv24_unsynchronized.mp3", true)
+                                           ));
 
 INSTANTIATE_TEST_SUITE_P(id3TestAll, ID3multiAlbumArtTest,
                          ::testing::Values(make_pair("bbb_1sec_v23.mp3", 0),
diff --git a/media/libstagefright/include/ID3.h b/media/libstagefright/include/ID3.h
index 0be5896..bd0d27c 100644
--- a/media/libstagefright/include/ID3.h
+++ b/media/libstagefright/include/ID3.h
@@ -91,7 +91,7 @@
     bool parseV1(DataSourceBase *source);
     bool parseV2(DataSourceBase *source, off64_t offset);
     void removeUnsynchronization();
-    bool removeUnsynchronizationV2_4(bool iTunesHack);
+    bool removeUnsynchronizationV2_4(bool iTunesHack, bool hasGlobalUnsync);
 
     static bool ParseSyncsafeInteger(const uint8_t encoded[4], size_t *x);
 
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index 6f0d3b5..efdfa02 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -192,6 +192,7 @@
     int32_t  mColorFormat;
     int32_t  mEncoderFormat;
     int32_t  mEncoderDataSpace;
+    int32_t  mBufferDataSpace;
     status_t mInitCheck;
 
     sp<Camera>   mCamera;
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index dd6df90..2e98fec 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -61,6 +61,11 @@
 
 using hardware::cas::native::V1_0::IDescrambler;
 
+struct CodecParameterDescriptor {
+    std::string name;
+    AMessage::Type type;
+};
+
 struct CodecBase : public AHandler, /* static */ ColorUtils {
     /**
      * This interface defines events firing from CodecBase back to MediaCodec.
@@ -233,6 +238,64 @@
     virtual void signalSetParameters(const sp<AMessage> &msg) = 0;
     virtual void signalEndOfInputStream() = 0;
 
+    /**
+     * Query supported parameters from this instance, and fill |names| with the
+     * names of the parameters.
+     *
+     * \param names string vector to fill with supported parameters.
+     * \return OK if successful;
+     *         BAD_VALUE if |names| is null;
+     *         INVALID_OPERATION if already released;
+     *         ERROR_UNSUPPORTED if not supported.
+     */
+    virtual status_t querySupportedParameters([[maybe_unused]] std::vector<std::string> *names) {
+        return ERROR_UNSUPPORTED;
+    }
+    /**
+     * Fill |desc| with description of the parameter with |name|.
+     *
+     * \param name name of the parameter to describe
+     * \param desc pointer to CodecParameterDescriptor to be filled
+     * \return OK if successful;
+     *         BAD_VALUE if |desc| is null;
+     *         NAME_NOT_FOUND if |name| is not recognized by the component;
+     *         INVALID_OPERATION if already released;
+     *         ERROR_UNSUPPORTED if not supported.
+     */
+    virtual status_t describeParameter(
+            [[maybe_unused]] const std::string &name,
+            [[maybe_unused]] CodecParameterDescriptor *desc) {
+        return ERROR_UNSUPPORTED;
+    }
+    /**
+     * Subscribe to parameters in |names| and get output format change event
+     * when they change.
+     * Unrecognized / already subscribed parameters are ignored.
+     *
+     * \param names names of parameters to subscribe
+     * \return OK if successful;
+     *         INVALID_OPERATION if already released;
+     *         ERROR_UNSUPPORTED if not supported.
+     */
+    virtual status_t subscribeToParameters(
+            [[maybe_unused]] const std::vector<std::string> &names) {
+        return ERROR_UNSUPPORTED;
+    }
+    /**
+     * Unsubscribe from parameters in |names| and no longer get
+     * output format change event when they change.
+     * Unrecognized / already unsubscribed parameters are ignored.
+     *
+     * \param names names of parameters to unsubscribe
+     * \return OK if successful;
+     *         INVALID_OPERATION if already released;
+     *         ERROR_UNSUPPORTED if not supported.
+     */
+    virtual status_t unsubscribeFromParameters(
+            [[maybe_unused]] const std::vector<std::string> &names) {
+        return ERROR_UNSUPPORTED;
+    }
+
     typedef CodecBase *(*CreateCodecFunc)(void);
     typedef PersistentSurface *(*CreateInputSurfaceFunc)(void);
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index a28d479..8952376 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -50,6 +50,7 @@
 struct BatteryChecker;
 class BufferChannelBase;
 struct CodecBase;
+struct CodecParameterDescriptor;
 class IBatteryStats;
 struct ICrypto;
 class MediaCodecBuffer;
@@ -246,6 +247,11 @@
 
     status_t setParameters(const sp<AMessage> &params);
 
+    status_t querySupportedVendorParameters(std::vector<std::string> *names);
+    status_t describeParameter(const std::string &name, CodecParameterDescriptor *desc);
+    status_t subscribeToVendorParameters(const std::vector<std::string> &names);
+    status_t unsubscribeFromVendorParameters(const std::vector<std::string> &names);
+
     // Create a MediaCodec notification message from a list of rendered or dropped render infos
     // by adding rendered frame information to a base notification message. Returns the number
     // of frames that were rendered.
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 6cf1ba0..bee96b1 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -752,7 +752,7 @@
 constexpr char KEY_CA_SYSTEM_ID[] = "ca-system-id";
 constexpr char KEY_CA_PRIVATE_DATA[] = "ca-private-data";
 constexpr char KEY_CAPTURE_RATE[] = "capture-rate";
-constexpr char KEY_CHANNEL_COUNT[] = "channel-count";
+constexpr char KEY_CHANNEL_COUNT[] = "channel-count";   // value N, eq to range 1..N
 constexpr char KEY_CHANNEL_MASK[] = "channel-mask";
 constexpr char KEY_COLOR_FORMAT[] = "color-format";
 constexpr char KEY_COLOR_RANGE[] = "color-range";
@@ -793,7 +793,7 @@
 constexpr char KEY_PCM_ENCODING[] = "pcm-encoding";
 constexpr char KEY_PIXEL_ASPECT_RATIO_HEIGHT[] = "sar-height";
 constexpr char KEY_PIXEL_ASPECT_RATIO_WIDTH[] = "sar-width";
-constexpr char KEY_PREPEND_HEADERS_TO_SYNC_FRAMES[] = "prepend-sps-pps-to-idr-frames";
+constexpr char KEY_PREPEND_HEADER_TO_SYNC_FRAMES[] = "prepend-sps-pps-to-idr-frames";
 constexpr char KEY_PRIORITY[] = "priority";
 constexpr char KEY_PROFILE[] = "profile";
 constexpr char KEY_PUSH_BLANK_BUFFERS_ON_STOP[] = "push-blank-buffers-on-shutdown";
@@ -807,6 +807,14 @@
 constexpr char KEY_TILE_HEIGHT[] = "tile-height";
 constexpr char KEY_TILE_WIDTH[] = "tile-width";
 constexpr char KEY_TRACK_ID[] = "track-id";
+constexpr char KEY_VIDEO_QP_B_MAX[] = "video-qp-b-max";
+constexpr char KEY_VIDEO_QP_B_MIN[] = "video-qp-b-min";
+constexpr char KEY_VIDEO_QP_I_MAX[] = "video-qp-i-max";
+constexpr char KEY_VIDEO_QP_I_MIN[] = "video-qp-i-min";
+constexpr char KEY_VIDEO_QP_MAX[] = "video-qp-max";
+constexpr char KEY_VIDEO_QP_MIN[] = "video-qp-min";
+constexpr char KEY_VIDEO_QP_P_MAX[] = "video-qp-p-max";
+constexpr char KEY_VIDEO_QP_P_MIN[] = "video-qp-p-min";
 constexpr char KEY_WIDTH[] = "width";
 
 // from MediaCodec.java
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 6f21a80..aae5ef9 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -152,6 +152,10 @@
     kKeyIsADTS            = 'adts',  // bool (int32_t)
     kKeyAACAOT            = 'aaot',  // int32_t
 
+    kKeyMpeghProfileLevelIndication = 'hpli', // int32_t
+    kKeyMpeghReferenceChannelLayout = 'hrcl', // int32_t
+    kKeyMpeghCompatibleSets         = 'hcos', // raw data
+
     // If a MediaBuffer's data represents (at least partially) encrypted
     // data, the following fields aid in decryption.
     // The data can be thought of as pairs of plain and encrypted data
@@ -265,6 +269,7 @@
     kTypeAV1C        = 'av1c',
     kTypeDVCC        = 'dvcc',
     kTypeD263        = 'd263',
+    kTypeHCOS        = 'hcos',
 };
 
 enum {
diff --git a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
index 6facbd8..06e36ad 100644
--- a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
+++ b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
@@ -285,10 +285,11 @@
     // 1) Client thread calls stop(); MediaCodec looper thread calls
     //    initiateShutdown(); shutdown is being handled at the component thread.
     // 2) Error occurred, but the shutdown operation is still being done.
-    // 3) MediaCodec looper thread handles the error.
-    // 4) Client releases the codec upon the error; previous shutdown is still
+    // 3) Another error occurred during the shutdown operation.
+    // 4) MediaCodec looper thread handles the error.
+    // 5) Client releases the codec upon the error; previous shutdown is still
     //    going on.
-    // 5) Component thread completes shutdown and posts onStopCompleted();
+    // 6) Component thread completes shutdown and posts onStopCompleted();
     //    Shutdown from release also completes.
 
     static const AString kCodecName{"test.codec"};
@@ -317,6 +318,9 @@
                 });
             ON_CALL(*mockCodec, initiateShutdown(true))
                 .WillByDefault([mockCodec](bool) {
+                    // 2)
+                    mockCodec->callback()->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+                    // 3)
                     mockCodec->callback()->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
                 });
             ON_CALL(*mockCodec, initiateShutdown(false))
@@ -339,6 +343,8 @@
     codec->start();
     // stop() will fail because of the error
     EXPECT_NE(OK, codec->stop());
+    // sleep here so that the looper thread can handle all the errors.
+    std::this_thread::sleep_for(std::chrono::milliseconds(100));
     // upon receiving the error, client tries to release the codec.
     codec->release();
     looper->stop();
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index e9ea386..05f6efa 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -85,7 +85,9 @@
 
 cc_library_shared {
     name: "libmediandk",
-    llndk_stubs: "libmediandk.llndk",
+    llndk: {
+        symbol_file: "libmediandk.map.txt",
+    },
 
     srcs: [
         "NdkJavaVMHelper.cpp",
@@ -165,14 +167,6 @@
     },
 }
 
-llndk_library {
-    name: "libmediandk.llndk",
-    symbol_file: "libmediandk.map.txt",
-    export_include_dirs: [
-        "include",
-    ],
-}
-
 cc_library {
     name: "libmediandk_utils",
 
diff --git a/media/tests/SampleVideoEncoder/README.md b/media/tests/SampleVideoEncoder/README.md
new file mode 100644
index 0000000..2e275c5
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/README.md
@@ -0,0 +1,56 @@
+# B-Frames Encoding App
+
+This is a sample android application for encoding AVC/HEVC streams with B-Frames enabled. It uses MediaRecorder APIs to record B-frames enabled video from camera2 input and MediaCodec APIs to encode reference test vector using input surface.
+
+This page describes how to get started with the Encoder App and how to run the tests for it.
+
+
+# Getting Started
+
+This app uses the Gradle build system as well as Soong Build System.
+
+To build this project using Gradle build, use the "gradlew build" command or use "Import Project" in Android Studio.
+
+To build the app using Soong Build System, run the following command:
+```
+mmm frameworks/av/media/tests/SampleVideoEncoder/
+```
+
+The apk is generated at the following location:
+```
+out\target\product\sargo\testcases\SampleVideoEncoder\arm64\SampleVideoEncoder.apk
+```
+
+Command to install the apk:
+```
+adb install SampleVideoEncoder.apk
+```
+
+Command to launch the app:
+```
+adb shell am start -n "com.android.media.samplevideoencoder/com.android.media.samplevideoencoder.MainActivity"
+```
+
+After installing the app, a TextureView showing camera preview is dispalyed on one third of the screen. It also features checkboxes to select either avc/hevc and hw/sw codecs. It also has an option to select either MediaRecorder APIs or MediaCodec, along with the 'Start' button to start/stop recording.
+
+# Running Tests
+
+The app also contains a test, which will test the MediaCodec APIs for encoding avc/hevc streams with B-frames enabled. This does not require us to use application UI.
+
+## Running the tests using atest
+Note that atest command will install the SampleVideoEncoder app on the device.
+
+Command to run the tests:
+```
+atest SampleVideoEncoder
+```
+
+# Ouput
+
+The muxed ouptput video is saved in the app data at:
+```
+/storage/emulated/0/Android/data/com.android.media.samplevideoencoder/files/
+```
+
+The total number of I-frames, P-frames and B-frames after encoding has been done using MediaCodec APIs are displayed on the screen.
+The results of the tests can be obtained from the logcats of the test.
diff --git a/media/tests/SampleVideoEncoder/app/Android.bp b/media/tests/SampleVideoEncoder/app/Android.bp
new file mode 100644
index 0000000..58b219b
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/Android.bp
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+android_test {
+    name: "SampleVideoEncoder",
+
+    manifest: "src/main/AndroidManifest.xml",
+
+    srcs: ["src/**/*.java"],
+
+    sdk_version: "current",
+    min_sdk_version: "24", // N
+
+    resource_dirs: [
+        "src/main/res",
+    ],
+
+    static_libs: [
+        "androidx.annotation_annotation",
+        "androidx.appcompat_appcompat",
+        "androidx-constraintlayout_constraintlayout",
+        "junit",
+        "androidx.test.core",
+        "androidx.test.runner",
+        "hamcrest-library",
+    ],
+
+    javacflags: [
+        "-Xlint:deprecation",
+        "-Xlint:unchecked",
+    ],
+}
diff --git a/media/tests/SampleVideoEncoder/app/AndroidTest.xml b/media/tests/SampleVideoEncoder/app/AndroidTest.xml
new file mode 100644
index 0000000..91f4304
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/AndroidTest.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Runs SampleVideoEncoder Tests">
+    <target_preparer class="com.android.tradefed.targetprep.TestAppInstallSetup">
+        <option name="cleanup-apks" value="false" />
+        <option name="test-file-name" value="SampleVideoEncoder.apk" />
+    </target_preparer>
+
+    <option name="test-tag" value="SampleVideoEncoder" />
+    <test class="com.android.tradefed.testtype.AndroidJUnitTest" >
+        <option name="package" value="com.android.media.samplevideoencoder" />
+        <option name="runner" value="androidx.test.runner.AndroidJUnitRunner" />
+        <option name="hidden-api-checks" value="false"/>
+    </test>
+</configuration>
diff --git a/media/tests/SampleVideoEncoder/app/build.gradle b/media/tests/SampleVideoEncoder/app/build.gradle
new file mode 100644
index 0000000..cc54981
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/build.gradle
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+apply plugin: 'com.android.application'
+
+android {
+    compileSdkVersion 30
+    buildToolsVersion "30.0.2"
+
+    defaultConfig {
+        applicationId "com.android.media.samplevideoencoder"
+        minSdkVersion 24
+        targetSdkVersion 30
+        versionCode 1
+        versionName "1.0"
+        testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
+    }
+
+    buildTypes {
+        release {
+            minifyEnabled false
+            proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
+        }
+    }
+}
+
+dependencies {
+    implementation fileTree(dir: "libs", include: ["*.jar"])
+    implementation 'androidx.appcompat:appcompat:1.2.0'
+    implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
+    testImplementation 'junit:junit:4.13.1'
+    androidTestImplementation 'androidx.test:runner:1.3.0'
+    androidTestImplementation 'androidx.test.ext:junit:1.1.2'
+    androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
+}
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/androidTest/java/com/android/media/samplevideoencoder/tests/SampleVideoEncoderTest.java b/media/tests/SampleVideoEncoder/app/src/androidTest/java/com/android/media/samplevideoencoder/tests/SampleVideoEncoderTest.java
new file mode 100644
index 0000000..1ef332e
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/androidTest/java/com/android/media/samplevideoencoder/tests/SampleVideoEncoderTest.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.samplevideoencoder.tests;
+
+import androidx.test.platform.app.InstrumentationRegistry;
+
+import android.content.Context;
+import android.media.MediaFormat;
+import android.util.Log;
+
+import com.android.media.samplevideoencoder.MediaCodecSurfaceEncoder;
+import com.android.media.samplevideoencoder.R;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertThat;
+
+@RunWith(Parameterized.class)
+public class SampleVideoEncoderTest {
+    private static final String TAG = SampleVideoEncoderTest.class.getSimpleName();
+    private final Context mContext;
+    private int mMaxBFrames;
+    private int mInputResId;
+    private String mMime;
+    private boolean mIsSoftwareEncoder;
+
+    @Parameterized.Parameters
+    public static Collection<Object[]> inputFiles() {
+        return Arrays.asList(new Object[][]{
+                // Parameters: MimeType, isSoftwareEncoder, maxBFrames
+                {MediaFormat.MIMETYPE_VIDEO_AVC, false, 1},
+                {MediaFormat.MIMETYPE_VIDEO_AVC, true, 1},
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, false, 1},
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, true, 1}});
+    }
+
+    public SampleVideoEncoderTest(String mimeType, boolean isSoftwareEncoder, int maxBFrames) {
+        this.mContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
+        this.mInputResId = R.raw.crowd_1920x1080_25fps_4000kbps_h265;
+        this.mMime = mimeType;
+        this.mIsSoftwareEncoder = isSoftwareEncoder;
+        this.mMaxBFrames = maxBFrames;
+    }
+
+    private String getOutputPath() {
+        File dir = mContext.getExternalFilesDir(null);
+        if (dir == null) {
+            Log.e(TAG, "Cannot get external directory path to save output video");
+            return null;
+        }
+        String videoPath = dir.getAbsolutePath() + "/Video-" + System.currentTimeMillis() + ".mp4";
+        Log.i(TAG, "Output video is saved at: " + videoPath);
+        return videoPath;
+    }
+
+    @Test
+    public void testMediaSurfaceEncoder() throws IOException, InterruptedException {
+        String outputFilePath = getOutputPath();
+        MediaCodecSurfaceEncoder surfaceEncoder =
+                new MediaCodecSurfaceEncoder(mContext, mInputResId, mMime, mIsSoftwareEncoder,
+                        outputFilePath, mMaxBFrames);
+        int encodingStatus = surfaceEncoder.startEncodingSurface();
+        assertThat(encodingStatus, is(equalTo(0)));
+        int[] frameNumArray = surfaceEncoder.getFrameTypes();
+        Log.i(TAG, "Results: I-Frames: " + frameNumArray[0] + "; P-Frames: " + frameNumArray[1] +
+                "\n " + "; B-Frames:" + frameNumArray[2]);
+        assertNotEquals("Encoder mime: " + mMime + " isSoftware: " + mIsSoftwareEncoder +
+                " failed to generate B Frames", frameNumArray[2], 0);
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml b/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..b17541d
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
@@ -0,0 +1,45 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ -->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.android.media.samplevideoencoder">
+
+    <uses-permission android:name="android.permission.CAMERA"/>
+    <uses-permission android:name="android.permission.RECORD_AUDIO"/>
+
+    <application
+        android:configChanges="orientation"
+        android:screenOrientation="portrait"
+        android:allowBackup="true"
+        android:icon="@mipmap/ic_launcher"
+        android:label="@string/app_name"
+        android:roundIcon="@mipmap/ic_launcher_round"
+        android:supportsRtl="true"
+        android:theme="@style/AppTheme">
+        <activity android:name="com.android.media.samplevideoencoder.MainActivity">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+    </application>
+
+    <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner"
+        android:targetPackage="com.android.media.samplevideoencoder"
+        android:label="SampleVideoEncoder Test"/>
+
+</manifest>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/AutoFitTextureView.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/AutoFitTextureView.java
new file mode 100644
index 0000000..a3ea4c7
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/AutoFitTextureView.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.samplevideoencoder;
+
+import android.content.Context;
+import android.util.AttributeSet;
+import android.view.TextureView;
+
+public class AutoFitTextureView extends TextureView {
+
+    public AutoFitTextureView(Context context) {
+        this(context, null);
+    }
+
+    public AutoFitTextureView(Context context, AttributeSet attrs) {
+        this(context, attrs, 0);
+    }
+
+    public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) {
+        super(context, attrs, defStyle);
+    }
+
+    public void setAspectRatio(int width, int height) {
+        if (width < 0 || height < 0) {
+            throw new IllegalArgumentException("Size cannot be negative.");
+        }
+        requestLayout();
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MainActivity.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MainActivity.java
new file mode 100644
index 0000000..a7a353c
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MainActivity.java
@@ -0,0 +1,671 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.samplevideoencoder;
+
+import androidx.appcompat.app.AppCompatActivity;
+import androidx.core.app.ActivityCompat;
+
+import android.Manifest;
+import android.app.Activity;
+
+import android.content.Context;
+import android.content.pm.PackageManager;
+
+import android.graphics.Matrix;
+import android.graphics.RectF;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.graphics.SurfaceTexture;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.media.MediaRecorder;
+
+import android.os.AsyncTask;
+import android.os.Build;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.view.Surface;
+import android.view.View;
+import android.view.TextureView;
+import android.widget.Button;
+import android.widget.CheckBox;
+
+import java.io.File;
+import java.io.IOException;
+
+import android.util.Log;
+import android.util.Size;
+import android.widget.RadioGroup;
+import android.widget.TextView;
+import android.widget.Toast;
+
+import java.lang.ref.WeakReference;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
+
+import static java.lang.Boolean.FALSE;
+import static java.lang.Boolean.TRUE;
+
+public class MainActivity extends AppCompatActivity
+        implements View.OnClickListener, ActivityCompat.OnRequestPermissionsResultCallback {
+
+    private static final String TAG = "SampleVideoEncoder";
+    private static final String[] RECORD_PERMISSIONS =
+            {Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO};
+    private static final int REQUEST_RECORD_PERMISSIONS = 1;
+    private final Semaphore mCameraOpenCloseLock = new Semaphore(1);
+    private static final int VIDEO_BITRATE = 8000000 /* 8 Mbps */;
+    private static final int VIDEO_FRAMERATE = 30;
+
+    /**
+     * Constant values to frame types assigned here are internal to this app.
+     * These values does not correspond to the actual values defined in avc/hevc specifications.
+     */
+    public static final int FRAME_TYPE_I = 0;
+    public static final int FRAME_TYPE_P = 1;
+    public static final int FRAME_TYPE_B = 2;
+
+    private String mMime = MediaFormat.MIMETYPE_VIDEO_AVC;
+    private String mOutputVideoPath = null;
+
+    private final boolean mIsFrontCamera = true;
+    private boolean mIsCodecSoftware = false;
+    private boolean mIsMediaRecorder = true;
+    private boolean mIsRecording;
+
+    private AutoFitTextureView mTextureView;
+    private TextView mTextView;
+    private CameraDevice mCameraDevice;
+    private CameraCaptureSession mPreviewSession;
+    private CaptureRequest.Builder mPreviewBuilder;
+    private MediaRecorder mMediaRecorder;
+    private Size mVideoSize;
+    private Size mPreviewSize;
+
+    private Handler mBackgroundHandler;
+    private HandlerThread mBackgroundThread;
+
+    private Button mStartButton;
+
+    private int[] mFrameTypeOccurrences;
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.activity_main);
+
+        final RadioGroup radioGroup_mime = findViewById(R.id.radio_group_mime);
+        radioGroup_mime.setOnCheckedChangeListener(new RadioGroup.OnCheckedChangeListener() {
+            @Override
+            public void onCheckedChanged(RadioGroup group, int checkedId) {
+                if (checkedId == R.id.avc) {
+                    mMime = MediaFormat.MIMETYPE_VIDEO_AVC;
+                } else {
+                    mMime = MediaFormat.MIMETYPE_VIDEO_HEVC;
+                }
+            }
+        });
+
+        final RadioGroup radioGroup_codec = findViewById(R.id.radio_group_codec);
+        radioGroup_codec.setOnCheckedChangeListener(new RadioGroup.OnCheckedChangeListener() {
+            @Override
+            public void onCheckedChanged(RadioGroup group, int checkedId) {
+                mIsCodecSoftware = checkedId == R.id.sw;
+            }
+        });
+
+        final CheckBox checkBox_mr = findViewById(R.id.checkBox_media_recorder);
+        final CheckBox checkBox_mc = findViewById(R.id.checkBox_media_codec);
+        mTextureView = findViewById(R.id.texture);
+        mTextView = findViewById(R.id.textViewResults);
+
+        checkBox_mr.setOnClickListener(new View.OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                boolean checked = ((CheckBox) v).isChecked();
+                if (checked) {
+                    checkBox_mc.setChecked(false);
+                    mIsMediaRecorder = TRUE;
+                    for (int i = 0; i < radioGroup_codec.getChildCount(); i++) {
+                        radioGroup_codec.getChildAt(i).setEnabled(false);
+                    }
+                }
+            }
+        });
+        checkBox_mc.setOnClickListener(new View.OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                boolean checked = ((CheckBox) v).isChecked();
+                if (checked) {
+                    checkBox_mr.setChecked(false);
+                    mIsMediaRecorder = FALSE;
+                    for (int i = 0; i < radioGroup_codec.getChildCount(); i++) {
+                        radioGroup_codec.getChildAt(i).setEnabled(true);
+                    }
+                }
+            }
+        });
+        mStartButton = findViewById(R.id.start_button);
+        mStartButton.setOnClickListener(this);
+    }
+
+    @Override
+    public void onClick(View v) {
+        if (v.getId() == R.id.start_button) {
+            mTextView.setText(null);
+            if (mIsMediaRecorder) {
+                if (mIsRecording) {
+                    stopRecordingVideo();
+                } else {
+                    mStartButton.setEnabled(false);
+                    startRecordingVideo();
+                }
+            } else {
+                mStartButton.setEnabled(false);
+                mOutputVideoPath = getVideoPath(MainActivity.this);
+                MediaCodecSurfaceAsync codecAsyncTask = new MediaCodecSurfaceAsync(this);
+                codecAsyncTask.execute(
+                        "Encoding reference test vector with MediaCodec APIs using surface");
+            }
+        }
+    }
+
+    private static class MediaCodecSurfaceAsync extends AsyncTask<String, String, Integer> {
+
+        private final WeakReference<MainActivity> activityReference;
+
+        MediaCodecSurfaceAsync(MainActivity context) {
+            activityReference = new WeakReference<>(context);
+        }
+
+        @Override
+        protected Integer doInBackground(String... strings) {
+            MainActivity mainActivity = activityReference.get();
+            int resId = R.raw.crowd_1920x1080_25fps_4000kbps_h265;
+            int encodingStatus = 1;
+            MediaCodecSurfaceEncoder codecSurfaceEncoder =
+                    new MediaCodecSurfaceEncoder(mainActivity.getApplicationContext(), resId,
+                            mainActivity.mMime, mainActivity.mIsCodecSoftware,
+                            mainActivity.mOutputVideoPath);
+            try {
+                encodingStatus = codecSurfaceEncoder.startEncodingSurface();
+                mainActivity.mFrameTypeOccurrences = codecSurfaceEncoder.getFrameTypes();
+            } catch (IOException | InterruptedException e) {
+                e.printStackTrace();
+            }
+            return encodingStatus;
+        }
+
+        @Override
+        protected void onPostExecute(Integer encodingStatus) {
+            MainActivity mainActivity = activityReference.get();
+            mainActivity.mStartButton.setEnabled(true);
+            if (encodingStatus == 0) {
+                Toast.makeText(mainActivity.getApplicationContext(), "Encoding Completed",
+                        Toast.LENGTH_SHORT).show();
+                mainActivity.mTextView.append("\n Encoded stream contains: ");
+                mainActivity.mTextView.append("\n Number of I-Frames: " +
+                        mainActivity.mFrameTypeOccurrences[FRAME_TYPE_I]);
+                mainActivity.mTextView.append("\n Number of P-Frames: " +
+                        mainActivity.mFrameTypeOccurrences[FRAME_TYPE_P]);
+                mainActivity.mTextView.append("\n Number of B-Frames: " +
+                        mainActivity.mFrameTypeOccurrences[FRAME_TYPE_B]);
+            } else {
+                Toast.makeText(mainActivity.getApplicationContext(),
+                        "Error occurred while " + "encoding", Toast.LENGTH_SHORT).show();
+            }
+            mainActivity.mOutputVideoPath = null;
+            super.onPostExecute(encodingStatus);
+        }
+    }
+
+    private final TextureView.SurfaceTextureListener mSurfaceTextureListener =
+            new TextureView.SurfaceTextureListener() {
+
+                @Override
+                public void onSurfaceTextureAvailable(SurfaceTexture surface, int width,
+                                                      int height) {
+                    openCamera(width, height);
+                }
+
+                @Override
+                public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width,
+                                                        int height) {
+                    configureTransform(width, height);
+                    Log.v(TAG, "Keeping camera preview size fixed");
+                }
+
+                @Override
+                public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
+                    return true;
+                }
+
+                @Override
+                public void onSurfaceTextureUpdated(SurfaceTexture surface) {
+                }
+            };
+
+
+    private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
+
+        @Override
+        public void onOpened(CameraDevice cameraDevice) {
+            mCameraDevice = cameraDevice;
+            startPreview();
+            mCameraOpenCloseLock.release();
+        }
+
+        @Override
+        public void onDisconnected(CameraDevice cameraDevice) {
+            mCameraOpenCloseLock.release();
+            cameraDevice.close();
+            mCameraDevice = null;
+        }
+
+        @Override
+        public void onError(CameraDevice cameraDevice, int error) {
+            mCameraOpenCloseLock.release();
+            cameraDevice.close();
+            mCameraDevice = null;
+            Activity activity = MainActivity.this;
+            activity.finish();
+        }
+    };
+
+    private boolean shouldShowRequestPermissionRationale(String[] recordPermissions) {
+        for (String permission : recordPermissions) {
+            if (ActivityCompat.shouldShowRequestPermissionRationale(this, permission)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    private void requestRecordPermissions() {
+        if (!shouldShowRequestPermissionRationale(RECORD_PERMISSIONS)) {
+            ActivityCompat.requestPermissions(this, RECORD_PERMISSIONS, REQUEST_RECORD_PERMISSIONS);
+        }
+    }
+
+    @Override
+    public void onRequestPermissionsResult(int requestCode, String[] permissions,
+                                           int[] grantResults) {
+        if (requestCode == REQUEST_RECORD_PERMISSIONS) {
+            if (grantResults.length == RECORD_PERMISSIONS.length) {
+                for (int result : grantResults) {
+                    if (result != PackageManager.PERMISSION_GRANTED) {
+                        Log.e(TAG, "Permission is not granted");
+                        break;
+                    }
+                }
+            }
+        } else {
+            super.onRequestPermissionsResult(requestCode, permissions, grantResults);
+        }
+    }
+
+    @SuppressWarnings("MissingPermission")
+    private void openCamera(int width, int height) {
+        if (!hasPermissionGranted(RECORD_PERMISSIONS)) {
+            Log.e(TAG, "Camera does not have permission to record video");
+            requestRecordPermissions();
+            return;
+        }
+        final Activity activity = MainActivity.this;
+        if (activity == null || activity.isFinishing()) {
+            Log.e(TAG, "Activity not found");
+            return;
+        }
+        CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
+        try {
+            Log.v(TAG, "Acquire Camera");
+            if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
+                throw new RuntimeException("Timed out waiting to lock camera opening");
+            }
+            Log.d(TAG, "Camera Acquired");
+
+            String cameraId = manager.getCameraIdList()[0];
+            if (mIsFrontCamera) {
+                cameraId = manager.getCameraIdList()[1];
+            }
+
+            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
+            StreamConfigurationMap map =
+                    characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+            mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
+            mPreviewSize =
+                    chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height,
+                            mVideoSize);
+            mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
+            configureTransform(width, height);
+            mMediaRecorder = new MediaRecorder();
+            manager.openCamera(cameraId, mStateCallback, null);
+        } catch (InterruptedException | CameraAccessException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void closeCamera() {
+        try {
+            mCameraOpenCloseLock.acquire();
+            closePreviewSession();
+            if (null != mCameraDevice) {
+                mCameraDevice.close();
+                mCameraDevice = null;
+            }
+            if (null != mMediaRecorder) {
+                mMediaRecorder.release();
+                mMediaRecorder = null;
+            }
+        } catch (InterruptedException e) {
+            throw new RuntimeException("Interrupted while trying to lock camera closing.");
+        } finally {
+            mCameraOpenCloseLock.release();
+        }
+    }
+
+    private static Size chooseVideoSize(Size[] choices) {
+        for (Size size : choices) {
+            if (size.getWidth() == size.getHeight() * 16 / 9 && size.getWidth() <= 1920) {
+                return size;
+            }
+        }
+        Log.e(TAG, "Couldn't find any suitable video size");
+        return choices[choices.length - 1];
+    }
+
+    private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
+        List<Size> bigEnough = new ArrayList<>();
+        int w = aspectRatio.getWidth();
+        int h = aspectRatio.getHeight();
+        for (Size option : choices) {
+            if (option.getHeight() == option.getWidth() * h / w && option.getWidth() >= width &&
+                    option.getHeight() >= height) {
+                bigEnough.add(option);
+            }
+        }
+
+        // Pick the smallest of those, assuming we found any
+        if (bigEnough.size() > 0) {
+            return Collections.min(bigEnough, new CompareSizesByArea());
+        } else {
+            Log.e(TAG, "Couldn't find any suitable preview size");
+            return choices[0];
+        }
+    }
+
+    private boolean hasPermissionGranted(String[] recordPermissions) {
+        for (String permission : recordPermissions) {
+            if (ActivityCompat.checkSelfPermission(MainActivity.this, permission) !=
+                    PackageManager.PERMISSION_GRANTED) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    @Override
+    public void onResume() {
+        super.onResume();
+        startBackgroundThread();
+        if (mTextureView.isAvailable()) {
+            openCamera(mTextureView.getWidth(), mTextureView.getHeight());
+        } else {
+            mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
+        }
+    }
+
+    @Override
+    public void onPause() {
+        closeCamera();
+        stopBackgroundThread();
+        super.onPause();
+    }
+
+    private void startBackgroundThread() {
+        mBackgroundThread = new HandlerThread("CameraBackground");
+        mBackgroundThread.start();
+        mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
+    }
+
+    private void stopBackgroundThread() {
+        mBackgroundThread.quitSafely();
+        try {
+            mBackgroundThread.join();
+            mBackgroundThread = null;
+            mBackgroundHandler = null;
+        } catch (InterruptedException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void startRecordingVideo() {
+        if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
+            Toast.makeText(MainActivity.this, "Cannot start recording.", Toast.LENGTH_SHORT).show();
+            Log.e(TAG, "Cannot start recording.");
+            return;
+        }
+        try {
+            closePreviewSession();
+            setUpMediaRecorder();
+            SurfaceTexture texture = mTextureView.getSurfaceTexture();
+            assert texture != null;
+            texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
+            mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
+            List<Surface> surfaces = new ArrayList<>();
+
+            // Set up Surface for the camera preview
+            Surface previewSurface = new Surface(texture);
+            surfaces.add(previewSurface);
+            mPreviewBuilder.addTarget(previewSurface);
+
+            // Set up Surface for the MediaRecorder
+            Surface recorderSurface = mMediaRecorder.getSurface();
+            surfaces.add(recorderSurface);
+            mPreviewBuilder.addTarget(recorderSurface);
+
+            //Start a capture session
+            mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
+
+                @Override
+                public void onConfigured(CameraCaptureSession session) {
+                    mPreviewSession = session;
+                    updatePreview();
+                    MainActivity.this.runOnUiThread(new Runnable() {
+                        @Override
+                        public void run() {
+                            mIsRecording = true;
+                            mMediaRecorder.start();
+                            mStartButton.setText(R.string.stop);
+                            mStartButton.setEnabled(true);
+                        }
+                    });
+                }
+
+                @Override
+                public void onConfigureFailed(CameraCaptureSession session) {
+                    Log.e(TAG, "Failed to configure. Cannot start Recording");
+                }
+            }, mBackgroundHandler);
+        } catch (CameraAccessException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void setUpMediaRecorder() {
+        final Activity activity = MainActivity.this;
+        if (activity == null) {
+            Toast.makeText(MainActivity.this, "Error occurred while setting up the MediaRecorder",
+                    Toast.LENGTH_SHORT).show();
+            Log.e(TAG, "Error occurred while setting up the MediaRecorder");
+            return;
+        }
+        try {
+            mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
+            mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
+            mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
+        } catch (IllegalStateException e) {
+            e.printStackTrace();
+        }
+        if (mOutputVideoPath == null) {
+            mOutputVideoPath = getVideoPath(MainActivity.this);
+        }
+        mMediaRecorder.setOutputFile(mOutputVideoPath);
+        mMediaRecorder.setVideoEncodingBitRate(VIDEO_BITRATE);
+        mMediaRecorder.setVideoFrameRate(VIDEO_FRAMERATE);
+        mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
+        mMediaRecorder.setOrientationHint(270);
+        if (mMime.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)) {
+            mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.HEVC);
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+                mMediaRecorder.setVideoEncodingProfileLevel(
+                        MediaCodecInfo.CodecProfileLevel.HEVCProfileMain,
+                        MediaCodecInfo.CodecProfileLevel.HEVCMainTierLevel4);
+            }
+        } else {
+            mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+                mMediaRecorder.setVideoEncodingProfileLevel(
+                        MediaCodecInfo.CodecProfileLevel.AVCProfileMain,
+                        MediaCodecInfo.CodecProfileLevel.AVCLevel4);
+            }
+        }
+        mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
+        try {
+            mMediaRecorder.prepare();
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private String getVideoPath(Activity activity) {
+        File dir = activity.getApplicationContext().getExternalFilesDir(null);
+        if (dir == null) {
+            Log.e(TAG, "Cannot get external directory path to save output video");
+            return null;
+        }
+        String videoPath = dir.getAbsolutePath() + "/Video-" + System.currentTimeMillis() + ".mp4";
+        Log.d(TAG, "Output video is saved at: " + videoPath);
+        return videoPath;
+    }
+
+    private void closePreviewSession() {
+        if (mPreviewSession != null) {
+            mPreviewSession.close();
+            mPreviewSession = null;
+        }
+    }
+
+    private void stopRecordingVideo() {
+        mIsRecording = false;
+        mStartButton.setText(R.string.start);
+        mMediaRecorder.stop();
+        mMediaRecorder.reset();
+        Toast.makeText(MainActivity.this, "Recording Finished", Toast.LENGTH_SHORT).show();
+        mOutputVideoPath = null;
+        startPreview();
+    }
+
+    private void startPreview() {
+        if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
+            return;
+        }
+        try {
+            closePreviewSession();
+            SurfaceTexture texture = mTextureView.getSurfaceTexture();
+            assert texture != null;
+            texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
+            mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+
+            Surface previewSurface = new Surface(texture);
+            mPreviewBuilder.addTarget(previewSurface);
+
+            mCameraDevice.createCaptureSession(Collections.singletonList(previewSurface),
+                    new CameraCaptureSession.StateCallback() {
+
+                        @Override
+                        public void onConfigured(CameraCaptureSession session) {
+                            mPreviewSession = session;
+                            updatePreview();
+                        }
+
+                        @Override
+                        public void onConfigureFailed(CameraCaptureSession session) {
+                            Toast.makeText(MainActivity.this,
+                                    "Configure Failed; Cannot start " + "preview",
+                                    Toast.LENGTH_SHORT).show();
+                            Log.e(TAG, "Configure failed; Cannot start preview");
+                        }
+                    }, mBackgroundHandler);
+        } catch (CameraAccessException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void updatePreview() {
+        if (mCameraDevice == null) {
+            Toast.makeText(MainActivity.this, "Camera not found; Cannot update " + "preview",
+                    Toast.LENGTH_SHORT).show();
+            Log.e(TAG, "Camera not found; Cannot update preview");
+            return;
+        }
+        try {
+            mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
+            HandlerThread thread = new HandlerThread("Camera preview");
+            thread.start();
+            mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
+        } catch (CameraAccessException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void configureTransform(int viewWidth, int viewHeight) {
+        Activity activity = MainActivity.this;
+        if (null == mTextureView || null == mPreviewSize || null == activity) {
+            return;
+        }
+        Matrix matrix = new Matrix();
+        RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
+        RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
+        float centerX = viewRect.centerX();
+        float centerY = viewRect.centerY();
+        bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
+        matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
+        float scale = Math.max((float) viewHeight / mPreviewSize.getHeight(),
+                (float) viewWidth / mPreviewSize.getWidth());
+        matrix.postScale(scale, scale, centerX, centerY);
+        mTextureView.setTransform(matrix);
+    }
+
+    static class CompareSizesByArea implements Comparator<Size> {
+        @Override
+        public int compare(Size lhs, Size rhs) {
+            return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
+                    (long) rhs.getWidth() * rhs.getHeight());
+        }
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecBase.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecBase.java
new file mode 100644
index 0000000..88ce73b
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecBase.java
@@ -0,0 +1,193 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.samplevideoencoder;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.os.Build;
+import android.util.Log;
+import android.util.Pair;
+
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+class CodecAsyncHandler extends MediaCodec.Callback {
+    private static final String TAG = CodecAsyncHandler.class.getSimpleName();
+    private final Lock mLock = new ReentrantLock();
+    private final Condition mCondition = mLock.newCondition();
+    private final LinkedList<Pair<Integer, MediaCodec.BufferInfo>> mCbInputQueue;
+    private final LinkedList<Pair<Integer, MediaCodec.BufferInfo>> mCbOutputQueue;
+    private volatile boolean mSignalledError;
+
+    CodecAsyncHandler() {
+        mCbInputQueue = new LinkedList<>();
+        mCbOutputQueue = new LinkedList<>();
+        mSignalledError = false;
+    }
+
+    void clearQueues() {
+        mLock.lock();
+        mCbInputQueue.clear();
+        mCbOutputQueue.clear();
+        mLock.unlock();
+    }
+
+    void resetContext() {
+        clearQueues();
+        mSignalledError = false;
+    }
+
+    @Override
+    public void onInputBufferAvailable(MediaCodec codec, int bufferIndex) {
+        mLock.lock();
+        mCbInputQueue.add(new Pair<>(bufferIndex, (MediaCodec.BufferInfo) null));
+        mCondition.signalAll();
+        mLock.unlock();
+    }
+
+    @Override
+    public void onOutputBufferAvailable(MediaCodec codec, int bufferIndex,
+                                        MediaCodec.BufferInfo info) {
+        mLock.lock();
+        mCbOutputQueue.add(new Pair<>(bufferIndex, info));
+        mCondition.signalAll();
+        mLock.unlock();
+    }
+
+    @Override
+    public void onError(MediaCodec codec, MediaCodec.CodecException e) {
+        mLock.lock();
+        mSignalledError = true;
+        mCondition.signalAll();
+        mLock.unlock();
+        Log.e(TAG, "Received media codec error : " + e.getMessage());
+    }
+
+    @Override
+    public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
+        Log.i(TAG, "Output format changed: " + format.toString());
+    }
+
+    void setCallBack(MediaCodec codec, boolean isCodecInAsyncMode) {
+        if (isCodecInAsyncMode) {
+            codec.setCallback(this);
+        }
+    }
+
+    Pair<Integer, MediaCodec.BufferInfo> getOutput() throws InterruptedException {
+        Pair<Integer, MediaCodec.BufferInfo> element = null;
+        mLock.lock();
+        while (!mSignalledError) {
+            if (mCbOutputQueue.isEmpty()) {
+                mCondition.await();
+            } else {
+                element = mCbOutputQueue.remove(0);
+                break;
+            }
+        }
+        mLock.unlock();
+        return element;
+    }
+
+    Pair<Integer, MediaCodec.BufferInfo> getWork() throws InterruptedException {
+        Pair<Integer, MediaCodec.BufferInfo> element = null;
+        mLock.lock();
+        while (!mSignalledError) {
+            if (mCbInputQueue.isEmpty() && mCbOutputQueue.isEmpty()) {
+                mCondition.await();
+            } else {
+                if (!mCbOutputQueue.isEmpty()) {
+                    element = mCbOutputQueue.remove(0);
+                    break;
+                }
+                if (!mCbInputQueue.isEmpty()) {
+                    element = mCbInputQueue.remove(0);
+                    break;
+                }
+            }
+        }
+        mLock.unlock();
+        return element;
+    }
+
+    boolean hasSeenError() {
+        return mSignalledError;
+    }
+}
+
+abstract public class MediaCodecBase {
+    static ArrayList<String> selectCodecs(String mime, ArrayList<MediaFormat> formats,
+                                          String[] features, boolean isEncoder,
+                                          boolean isSoftware) {
+
+        MediaCodecList codecList = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
+        MediaCodecInfo[] codecInfos = codecList.getCodecInfos();
+        ArrayList<String> listOfCodecs = new ArrayList<>();
+        for (MediaCodecInfo codecInfo : codecInfos) {
+            if (isEncoder) {
+                if (!codecInfo.isEncoder()) continue;
+            } else {
+                if (codecInfo.isEncoder()) continue;
+            }
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q && codecInfo.isAlias()) continue;
+            String[] types = codecInfo.getSupportedTypes();
+            for (String type : types) {
+                if (type.equalsIgnoreCase(mime)) {
+                    boolean isOk = true;
+                    MediaCodecInfo.CodecCapabilities codecCapabilities =
+                            codecInfo.getCapabilitiesForType(type);
+                    if (formats != null) {
+                        for (MediaFormat format : formats) {
+                            if (!codecCapabilities.isFormatSupported(format)) {
+                                isOk = false;
+                                break;
+                            }
+                        }
+                    }
+                    if (features != null) {
+                        for (String feature : features) {
+                            if (!codecCapabilities.isFeatureSupported(feature)) {
+                                isOk = false;
+                                break;
+                            }
+                        }
+                    }
+                    if (isSoftware) {
+                        if (codecInfo.getName().contains("software") ||
+                                codecInfo.getName().contains("android") ||
+                                codecInfo.getName().contains("google")) {
+                            if (isOk) listOfCodecs.add(codecInfo.getName());
+                        }
+                    } else {
+                        if (codecInfo.getName().contains("software") ||
+                                codecInfo.getName().contains("android") ||
+                                codecInfo.getName().contains("google")) {
+                            continue;
+                        } else {
+                            if (isOk) listOfCodecs.add(codecInfo.getName());
+                        }
+                    }
+                }
+            }
+        }
+        return listOfCodecs;
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java
new file mode 100644
index 0000000..011c38c
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java
@@ -0,0 +1,385 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.samplevideoencoder;
+
+import android.content.Context;
+import android.content.res.AssetFileDescriptor;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.media.MediaMuxer;
+import android.os.Build;
+import android.util.Log;
+import android.util.Pair;
+import android.view.Surface;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_B;
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_I;
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_P;
+
+public class MediaCodecSurfaceEncoder {
+    private static final String TAG = MediaCodecSurfaceEncoder.class.getSimpleName();
+    private static final boolean DEBUG = false;
+    private static final int VIDEO_BITRATE = 8000000  /*8 Mbps*/;
+    private static final int VIDEO_FRAMERATE = 30;
+    private final Context mActivityContext;
+    private final int mResID;
+    private final int mMaxBFrames;
+    private final String mMime;
+    private final String mOutputPath;
+    private int mTrackID = -1;
+    private int mFrameNum = 0;
+    private int[] mFrameTypeOccurrences = {0, 0, 0};
+
+    private Surface mSurface;
+    private MediaExtractor mExtractor;
+    private MediaCodec mDecoder;
+    private MediaCodec mEncoder;
+    private MediaMuxer mMuxer;
+
+    private final boolean mIsCodecSoftware;
+    private boolean mSawDecInputEOS;
+    private boolean mSawDecOutputEOS;
+    private boolean mSawEncOutputEOS;
+    private int mDecOutputCount;
+    private int mEncOutputCount;
+
+    private final CodecAsyncHandler mAsyncHandleEncoder = new CodecAsyncHandler();
+    private final CodecAsyncHandler mAsyncHandleDecoder = new CodecAsyncHandler();
+
+    public MediaCodecSurfaceEncoder(Context context, int resId, String mime, boolean isSoftware,
+                                    String outputPath, int maxBFrames) {
+        mActivityContext = context;
+        mResID = resId;
+        mMime = mime;
+        mIsCodecSoftware = isSoftware;
+        mOutputPath = outputPath;
+        mMaxBFrames = maxBFrames;
+    }
+
+    public MediaCodecSurfaceEncoder(Context context, int resId, String mime, boolean isSoftware,
+                                    String outputPath) {
+        // Default value of MediaFormat.KEY_MAX_B_FRAMES is set to 1, if not passed as a parameter.
+        this(context, resId, mime, isSoftware, outputPath, 1);
+    }
+
+    public int startEncodingSurface() throws IOException, InterruptedException {
+        MediaFormat decoderFormat = setUpSource();
+        if (decoderFormat == null) {
+            return -1;
+        }
+
+        String decoderMime = decoderFormat.getString(MediaFormat.KEY_MIME);
+        ArrayList<String> listOfDeocders =
+                MediaCodecBase.selectCodecs(decoderMime, null, null, false, mIsCodecSoftware);
+        if (listOfDeocders.isEmpty()) {
+            Log.e(TAG, "No suitable decoder found for mime: " + decoderMime);
+            return -1;
+        }
+        mDecoder = MediaCodec.createByCodecName(listOfDeocders.get(0));
+
+        MediaFormat encoderFormat = setUpEncoderFormat(decoderFormat);
+        ArrayList<String> listOfEncoders =
+                MediaCodecBase.selectCodecs(mMime, null, null, true, mIsCodecSoftware);
+        if (listOfEncoders.isEmpty()) {
+            Log.e(TAG, "No suitable encoder found for mime: " + mMime);
+            return -1;
+        }
+
+        boolean muxOutput = true;
+        for (String encoder : listOfEncoders) {
+            mEncoder = MediaCodec.createByCodecName(encoder);
+            mExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
+            if (muxOutput) {
+                int muxerFormat = MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4;
+                mMuxer = new MediaMuxer(mOutputPath, muxerFormat);
+            }
+            configureCodec(decoderFormat, encoderFormat);
+            mEncoder.start();
+            mDecoder.start();
+            doWork(Integer.MAX_VALUE);
+            queueEOS();
+            waitForAllEncoderOutputs();
+            if (muxOutput) {
+                if (mTrackID != -1) {
+                    mMuxer.stop();
+                    mTrackID = -1;
+                }
+                if (mMuxer != null) {
+                    mMuxer.release();
+                    mMuxer = null;
+                }
+            }
+            mDecoder.reset();
+            mEncoder.reset();
+            mSurface.release();
+            mSurface = null;
+            Log.i(TAG, "Number of I-frames = " + mFrameTypeOccurrences[FRAME_TYPE_I]);
+            Log.i(TAG, "Number of P-frames = " + mFrameTypeOccurrences[FRAME_TYPE_P]);
+            Log.i(TAG, "Number of B-frames = " + mFrameTypeOccurrences[FRAME_TYPE_B]);
+        }
+        mEncoder.release();
+        mDecoder.release();
+        mExtractor.release();
+        return 0;
+    }
+
+    private MediaFormat setUpSource() throws IOException {
+        mExtractor = new MediaExtractor();
+        AssetFileDescriptor fd = mActivityContext.getResources().openRawResourceFd(mResID);
+        mExtractor.setDataSource(fd.getFileDescriptor(), fd.getStartOffset(), fd.getLength());
+        for (int trackID = 0; trackID < mExtractor.getTrackCount(); trackID++) {
+            MediaFormat format = mExtractor.getTrackFormat(trackID);
+            String mime = format.getString(MediaFormat.KEY_MIME);
+            if (mime.startsWith("video/")) {
+                mExtractor.selectTrack(trackID);
+                format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
+                        MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
+                return format;
+            }
+        }
+        mExtractor.release();
+        return null;
+    }
+
+    private MediaFormat setUpEncoderFormat(MediaFormat decoderFormat) {
+        MediaFormat encoderFormat = new MediaFormat();
+        encoderFormat.setString(MediaFormat.KEY_MIME, mMime);
+        encoderFormat
+                .setInteger(MediaFormat.KEY_WIDTH, decoderFormat.getInteger(MediaFormat.KEY_WIDTH));
+        encoderFormat.setInteger(MediaFormat.KEY_HEIGHT,
+                decoderFormat.getInteger(MediaFormat.KEY_HEIGHT));
+        encoderFormat.setInteger(MediaFormat.KEY_FRAME_RATE, VIDEO_FRAMERATE);
+        encoderFormat.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_BITRATE);
+        encoderFormat.setFloat(MediaFormat.KEY_I_FRAME_INTERVAL, 1.0f);
+        encoderFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
+                MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
+        if (mMime.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)) {
+            encoderFormat.setInteger(MediaFormat.KEY_PROFILE,
+                    MediaCodecInfo.CodecProfileLevel.HEVCProfileMain);
+            encoderFormat.setInteger(MediaFormat.KEY_LEVEL,
+                    MediaCodecInfo.CodecProfileLevel.HEVCMainTierLevel4);
+        } else {
+            encoderFormat.setInteger(MediaFormat.KEY_PROFILE,
+                    MediaCodecInfo.CodecProfileLevel.AVCProfileMain);
+            encoderFormat
+                    .setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel4);
+        }
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+            encoderFormat.setInteger(MediaFormat.KEY_MAX_B_FRAMES, mMaxBFrames);
+        } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+            encoderFormat.setInteger(MediaFormat.KEY_LATENCY, 1);
+        }
+        return encoderFormat;
+    }
+
+    private void resetContext() {
+        mAsyncHandleDecoder.resetContext();
+        mAsyncHandleEncoder.resetContext();
+        mSawDecInputEOS = false;
+        mSawDecOutputEOS = false;
+        mSawEncOutputEOS = false;
+        mDecOutputCount = 0;
+        mEncOutputCount = 0;
+        mFrameNum = 0;
+        Arrays.fill(mFrameTypeOccurrences, 0);
+    }
+
+    private void configureCodec(MediaFormat decFormat, MediaFormat encFormat) {
+        resetContext();
+        mAsyncHandleEncoder.setCallBack(mEncoder, true);
+        mEncoder.configure(encFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+        mSurface = mEncoder.createInputSurface();
+        if (!mSurface.isValid()) {
+            Log.e(TAG, "Surface is not valid");
+            return;
+        }
+        mAsyncHandleDecoder.setCallBack(mDecoder, true);
+        mDecoder.configure(decFormat, mSurface, null, 0);
+        Log.d(TAG, "Codec configured");
+        if (DEBUG) {
+            Log.d(TAG, "Encoder Output format: " + mEncoder.getOutputFormat());
+        }
+    }
+
+    private void dequeueDecoderOutput(int bufferIndex, MediaCodec.BufferInfo info) {
+        if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+            mSawDecOutputEOS = true;
+        }
+        if (DEBUG) {
+            Log.d(TAG,
+                    "output: id: " + bufferIndex + " flags: " + info.flags + " size: " + info.size +
+                            " timestamp: " + info.presentationTimeUs);
+        }
+        if (info.size > 0 && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
+            mDecOutputCount++;
+        }
+        mDecoder.releaseOutputBuffer(bufferIndex, mSurface != null);
+    }
+
+    private void enqueueDecoderInput(int bufferIndex) {
+        ByteBuffer inputBuffer = mDecoder.getInputBuffer(bufferIndex);
+        int size = mExtractor.readSampleData(inputBuffer, 0);
+        if (size < 0) {
+            enqueueDecoderEOS(bufferIndex);
+        } else {
+            long pts = mExtractor.getSampleTime();
+            int extractorFlags = mExtractor.getSampleFlags();
+            int codecFlags = 0;
+            if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0) {
+                codecFlags |= MediaCodec.BUFFER_FLAG_KEY_FRAME;
+            }
+            if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_PARTIAL_FRAME) != 0) {
+                codecFlags |= MediaCodec.BUFFER_FLAG_PARTIAL_FRAME;
+            }
+            if (!mExtractor.advance()) {
+                codecFlags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
+                mSawDecInputEOS = true;
+            }
+            if (DEBUG) {
+                Log.d(TAG, "input: id: " + bufferIndex + " size: " + size + " pts: " + pts +
+                        " flags: " + codecFlags);
+            }
+            mDecoder.queueInputBuffer(bufferIndex, 0, size, pts, codecFlags);
+        }
+    }
+
+    private void doWork(int frameLimit) throws InterruptedException {
+        int frameCount = 0;
+        while (!hasSeenError() && !mSawDecInputEOS && frameCount < frameLimit) {
+            Pair<Integer, MediaCodec.BufferInfo> element = mAsyncHandleDecoder.getWork();
+            if (element != null) {
+                int bufferID = element.first;
+                MediaCodec.BufferInfo info = element.second;
+                if (info != null) {
+                    // <id, info> corresponds to output callback.
+                    dequeueDecoderOutput(bufferID, info);
+                } else {
+                    // <id, null> corresponds to input callback.
+                    enqueueDecoderInput(bufferID);
+                    frameCount++;
+                }
+            }
+            // check decoder EOS
+            if (mSawDecOutputEOS) mEncoder.signalEndOfInputStream();
+            // encoder output
+            if (mDecOutputCount - mEncOutputCount > mMaxBFrames) {
+                tryEncoderOutput();
+            }
+        }
+    }
+
+    private void queueEOS() throws InterruptedException {
+        while (!mAsyncHandleDecoder.hasSeenError() && !mSawDecInputEOS) {
+            Pair<Integer, MediaCodec.BufferInfo> element = mAsyncHandleDecoder.getWork();
+            if (element != null) {
+                int bufferID = element.first;
+                MediaCodec.BufferInfo info = element.second;
+                if (info != null) {
+                    dequeueDecoderOutput(bufferID, info);
+                } else {
+                    enqueueDecoderEOS(element.first);
+                }
+            }
+        }
+
+        while (!hasSeenError() && !mSawDecOutputEOS) {
+            Pair<Integer, MediaCodec.BufferInfo> decOp = mAsyncHandleDecoder.getOutput();
+            if (decOp != null) dequeueDecoderOutput(decOp.first, decOp.second);
+            if (mSawDecOutputEOS) mEncoder.signalEndOfInputStream();
+            if (mDecOutputCount - mEncOutputCount > mMaxBFrames) {
+                tryEncoderOutput();
+            }
+        }
+    }
+
+    private void tryEncoderOutput() throws InterruptedException {
+        if (!hasSeenError() && !mSawEncOutputEOS) {
+            Pair<Integer, MediaCodec.BufferInfo> element = mAsyncHandleEncoder.getOutput();
+            if (element != null) {
+                dequeueEncoderOutput(element.first, element.second);
+            }
+        }
+    }
+
+    private void waitForAllEncoderOutputs() throws InterruptedException {
+        while (!hasSeenError() && !mSawEncOutputEOS) {
+            tryEncoderOutput();
+        }
+    }
+
+    private void enqueueDecoderEOS(int bufferIndex) {
+        if (!mSawDecInputEOS) {
+            mDecoder.queueInputBuffer(bufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+            mSawDecInputEOS = true;
+            Log.d(TAG, "Queued End of Stream");
+        }
+    }
+
+    private void dequeueEncoderOutput(int bufferIndex, MediaCodec.BufferInfo info) {
+        if (DEBUG) {
+            Log.d(TAG, "encoder output: id: " + bufferIndex + " flags: " + info.flags + " size: " +
+                    info.size + " timestamp: " + info.presentationTimeUs);
+        }
+        if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+            mSawEncOutputEOS = true;
+        }
+        if (info.size > 0) {
+            ByteBuffer buf = mEncoder.getOutputBuffer(bufferIndex);
+            // Parse the buffer to get the frame type
+            if (DEBUG) Log.d(TAG, "[ Frame : " + (mFrameNum++) + " ]");
+            int frameTypeResult = -1;
+            if (mMime == MediaFormat.MIMETYPE_VIDEO_AVC) {
+                frameTypeResult = NalUnitUtil.getStandardizedFrameTypesFromAVC(buf);
+            } else if (mMime == MediaFormat.MIMETYPE_VIDEO_HEVC){
+                frameTypeResult = NalUnitUtil.getStandardizedFrameTypesFromHEVC(buf);
+            } else {
+                Log.e(TAG, "Mime type " + mMime + " is not supported.");
+                return;
+            }
+            if (frameTypeResult != -1) {
+                mFrameTypeOccurrences[frameTypeResult]++;
+            }
+
+            if (mMuxer != null) {
+                if (mTrackID == -1) {
+                    mTrackID = mMuxer.addTrack(mEncoder.getOutputFormat());
+                    mMuxer.start();
+                }
+                mMuxer.writeSampleData(mTrackID, buf, info);
+            }
+            if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
+                mEncOutputCount++;
+            }
+        }
+        mEncoder.releaseOutputBuffer(bufferIndex, false);
+    }
+
+    private boolean hasSeenError() {
+        return mAsyncHandleDecoder.hasSeenError() || mAsyncHandleEncoder.hasSeenError();
+    }
+
+    public int[] getFrameTypes() {
+        return mFrameTypeOccurrences;
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/NalUnitUtil.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/NalUnitUtil.java
new file mode 100644
index 0000000..efff4fd
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/NalUnitUtil.java
@@ -0,0 +1,168 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.samplevideoencoder;
+
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_B;
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_I;
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_P;
+
+public class NalUnitUtil {
+    private static final String TAG = MediaCodecSurfaceEncoder.class.getSimpleName();
+    private static final boolean DEBUG = false;
+
+    public static int findNalUnit(byte[] dataArray, int pos, int limit) {
+        int startOffset = 0;
+        if (limit - pos < 4) {
+            return startOffset;
+        }
+        if (dataArray[pos] == 0 && dataArray[pos + 1] == 0 && dataArray[pos + 2] == 1) {
+            startOffset = 3;
+        } else {
+            if (dataArray[pos] == 0 && dataArray[pos + 1] == 0 && dataArray[pos + 2] == 0 &&
+                    dataArray[pos + 3] == 1) {
+                startOffset = 4;
+            }
+        }
+        return startOffset;
+    }
+
+    private static int getAVCNalUnitType(byte[] dataArray, int nalUnitOffset) {
+        return dataArray[nalUnitOffset] & 0x1F;
+    }
+
+    private static int parseAVCNALUnitData(byte[] dataArray, int offset, int limit) {
+        ParsableBitArray bitArray = new ParsableBitArray(dataArray);
+        bitArray.reset(dataArray, offset, limit);
+
+        bitArray.skipBit(); // forbidden_zero_bit
+        bitArray.readBits(2); // nal_ref_idc
+        bitArray.skipBits(5); // nal_unit_type
+
+        bitArray.readUEV(); // first_mb_in_slice
+        if (!bitArray.canReadUEV()) {
+            return -1;
+        }
+        int sliceType = bitArray.readUEV();
+        if (DEBUG) Log.d(TAG, "slice_type = " + sliceType);
+        if (sliceType == 0) {
+            return FRAME_TYPE_P;
+        } else if (sliceType == 1) {
+            return FRAME_TYPE_B;
+        } else if (sliceType == 2) {
+            return FRAME_TYPE_I;
+        } else {
+            return -1;
+        }
+    }
+
+    private static int getHEVCNalUnitType(byte[] dataArray, int nalUnitOffset) {
+        return (dataArray[nalUnitOffset] & 0x7E) >> 1;
+    }
+
+    private static int parseHEVCNALUnitData(byte[] dataArray, int offset, int limit,
+                                            int nalUnitType) {
+        // nal_unit_type values from H.265/HEVC Table 7-1.
+        final int BLA_W_LP = 16;
+        final int RSV_IRAP_VCL23 = 23;
+
+        ParsableBitArray bitArray = new ParsableBitArray(dataArray);
+        bitArray.reset(dataArray, offset, limit);
+
+        bitArray.skipBit(); // forbidden zero bit
+        bitArray.readBits(6); // nal_unit_header
+        bitArray.readBits(6); // nuh_layer_id
+        bitArray.readBits(3); // nuh_temporal_id_plus1
+
+        // Parsing slice_segment_header values from H.265/HEVC Table 7.3.6.1
+        boolean first_slice_segment = bitArray.readBit(); // first_slice_segment_in_pic_flag
+        if (!first_slice_segment) return -1;
+        if (nalUnitType >= BLA_W_LP && nalUnitType <= RSV_IRAP_VCL23) {
+            bitArray.readBit();  // no_output_of_prior_pics_flag
+        }
+        bitArray.readUEV(); // slice_pic_parameter_set_id
+        // Assume num_extra_slice_header_bits element of PPS data to be 0
+        int sliceType = bitArray.readUEV();
+        if (DEBUG) Log.d(TAG, "slice_type = " + sliceType);
+        if (sliceType == 0) {
+            return FRAME_TYPE_B;
+        } else if (sliceType == 1) {
+            return FRAME_TYPE_P;
+        } else if (sliceType == 2) {
+            return FRAME_TYPE_I;
+        } else {
+            return -1;
+        }
+    }
+
+    public static int getStandardizedFrameTypesFromAVC(ByteBuffer buf) {
+        int limit = buf.limit();
+        byte[] dataArray = new byte[buf.remaining()];
+        buf.get(dataArray);
+        int frameType = -1;
+        for (int pos = 0; pos + 3 < limit; ) {
+            int startOffset = NalUnitUtil.findNalUnit(dataArray, pos, limit);
+            if (startOffset != 0) {
+                int nalUnitType = getAVCNalUnitType(dataArray, (pos + startOffset));
+                if (DEBUG) {
+                    Log.d(TAG, "NalUnitOffset = " + (pos + startOffset));
+                    Log.d(TAG, "NalUnitType = " + nalUnitType);
+                }
+                // SLICE_NAL = 1; IDR_SLICE_NAL = 5
+                if (nalUnitType == 1 || nalUnitType == 5) {
+                    frameType = parseAVCNALUnitData(dataArray, (pos + startOffset),
+                            (limit - pos - startOffset));
+                    break;
+                }
+                pos += 3;
+            } else {
+                pos++;
+            }
+        }
+        return frameType;
+    }
+
+    public static int getStandardizedFrameTypesFromHEVC(ByteBuffer buf) {
+        int limit = buf.limit();
+        byte[] dataArray = new byte[buf.remaining()];
+        buf.get(dataArray);
+        int frameType = -1;
+        for (int pos = 0; pos + 3 < limit; ) {
+            int startOffset = NalUnitUtil.findNalUnit(dataArray, pos, limit);
+            if (startOffset != 0) {
+                int nalUnitType = NalUnitUtil.getHEVCNalUnitType(dataArray, (pos + startOffset));
+                if (DEBUG) {
+                    Log.d(TAG, "NalUnitOffset = " + (pos + startOffset));
+                    Log.d(TAG, "NalUnitType = " + nalUnitType);
+                }
+                // Parse NALUnits containing slice_headers which lies in the range of 0 to 21
+                if (nalUnitType >= 0 && nalUnitType <= 21) {
+                    frameType = parseHEVCNALUnitData(dataArray, (pos + startOffset),
+                            (limit - pos - startOffset), nalUnitType);
+                    break;
+                }
+                pos += 3;
+            } else {
+                pos++;
+            }
+        }
+        return frameType;
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/ParsableBitArray.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/ParsableBitArray.java
new file mode 100644
index 0000000..e4bfaa3
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/ParsableBitArray.java
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.samplevideoencoder;
+
+public class ParsableBitArray {
+    public byte[] data;
+    private int byteOffset;
+    private int bitOffset;
+    private int byteLimit;
+
+    public ParsableBitArray(byte[] dataArray) {
+        this(dataArray, dataArray.length);
+    }
+
+    public ParsableBitArray(byte[] dataArray, int limit) {
+        this.data = dataArray;
+        byteLimit = limit;
+    }
+
+    public void reset(byte[] data, int offset, int limit) {
+        this.data = data;
+        byteOffset = offset;
+        bitOffset = 0;
+        byteLimit = limit;
+    }
+
+    public void skipBit() {
+        if (++bitOffset == 8) {
+            bitOffset = 0;
+            byteOffset++;
+        }
+    }
+
+    public void skipBits(int numBits) {
+        int numBytes = numBits / 8;
+        byteOffset += numBytes;
+        bitOffset += numBits - (numBytes * 8);
+        if (bitOffset > 7) {
+            byteOffset++;
+            bitOffset -= 8;
+        }
+    }
+
+    public boolean readBit() {
+        boolean returnValue = (data[byteOffset] & (0x80 >> bitOffset)) != 0;
+        skipBit();
+        return returnValue;
+    }
+
+    public int readBits(int numBits) {
+        if (numBits == 0) {
+            return 0;
+        }
+        int returnValue = 0;
+        bitOffset += numBits;
+        while (bitOffset > 8) {
+            bitOffset -= 8;
+            returnValue |= (data[byteOffset++] & 0xFF) << bitOffset;
+        }
+        returnValue |= (data[byteOffset] & 0xFF) >> (8 - bitOffset);
+        returnValue &= 0xFFFFFFFF >>> (32 - numBits);
+        if (bitOffset == 8) {
+            bitOffset = 0;
+            byteOffset++;
+        }
+        return returnValue;
+    }
+
+    public boolean canReadUEV() {
+        int initialByteOffset = byteOffset;
+        int initialBitOffset = bitOffset;
+        int leadingZeros = 0;
+        while (byteOffset < byteLimit && !readBit()) {
+            leadingZeros++;
+        }
+        boolean hitLimit = byteOffset == byteLimit;
+        byteOffset = initialByteOffset;
+        bitOffset = initialBitOffset;
+        return !hitLimit && canReadBits(leadingZeros * 2 + 1);
+    }
+
+    public int readUEV() {
+        int leadingZeros = 0;
+        while (!readBit()) {
+            leadingZeros++;
+        }
+        return (1 << leadingZeros) - 1 + (leadingZeros > 0 ? readBits(leadingZeros) : 0);
+    }
+
+    public boolean canReadBits(int numBits) {
+        int oldByteOffset = byteOffset;
+        int numBytes = numBits / 8;
+        int newByteOffset = byteOffset + numBytes;
+        int newBitOffset = bitOffset + numBits - (numBytes * 8);
+        if (newBitOffset > 7) {
+            newByteOffset++;
+            newBitOffset -= 8;
+        }
+        for (int i = oldByteOffset + 1; i <= newByteOffset && newByteOffset < byteLimit; i++) {
+            if (shouldSkipByte(i)) {
+                // Skip the byte and check three bytes ahead.
+                newByteOffset++;
+                i += 2;
+            }
+        }
+        return newByteOffset < byteLimit || (newByteOffset == byteLimit && newBitOffset == 0);
+    }
+
+    private boolean shouldSkipByte(int offset) {
+        return (2 <= offset && offset < byteLimit && data[offset] == (byte) 0x03 &&
+                data[offset - 2] == (byte) 0x00 && data[offset - 1] == (byte) 0x00);
+    }
+
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/drawable-v24/ic_launcher_foreground.xml b/media/tests/SampleVideoEncoder/app/src/main/res/drawable-v24/ic_launcher_foreground.xml
new file mode 100644
index 0000000..2b068d1
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/drawable-v24/ic_launcher_foreground.xml
@@ -0,0 +1,30 @@
+<vector xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:aapt="http://schemas.android.com/aapt"
+    android:width="108dp"
+    android:height="108dp"
+    android:viewportWidth="108"
+    android:viewportHeight="108">
+    <path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
+        <aapt:attr name="android:fillColor">
+            <gradient
+                android:endX="85.84757"
+                android:endY="92.4963"
+                android:startX="42.9492"
+                android:startY="49.59793"
+                android:type="linear">
+                <item
+                    android:color="#44000000"
+                    android:offset="0.0" />
+                <item
+                    android:color="#00000000"
+                    android:offset="1.0" />
+            </gradient>
+        </aapt:attr>
+    </path>
+    <path
+        android:fillColor="#FFFFFF"
+        android:fillType="nonZero"
+        android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
+        android:strokeWidth="1"
+        android:strokeColor="#00000000" />
+</vector>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/drawable/ic_launcher_background.xml b/media/tests/SampleVideoEncoder/app/src/main/res/drawable/ic_launcher_background.xml
new file mode 100644
index 0000000..07d5da9
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/drawable/ic_launcher_background.xml
@@ -0,0 +1,170 @@
+<?xml version="1.0" encoding="utf-8"?>
+<vector xmlns:android="http://schemas.android.com/apk/res/android"
+    android:width="108dp"
+    android:height="108dp"
+    android:viewportWidth="108"
+    android:viewportHeight="108">
+    <path
+        android:fillColor="#3DDC84"
+        android:pathData="M0,0h108v108h-108z" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M9,0L9,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,0L19,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M29,0L29,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M39,0L39,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M49,0L49,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M59,0L59,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M69,0L69,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M79,0L79,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M89,0L89,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M99,0L99,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,9L108,9"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,19L108,19"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,29L108,29"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,39L108,39"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,49L108,49"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,59L108,59"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,69L108,69"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,79L108,79"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,89L108,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,99L108,99"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,29L89,29"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,39L89,39"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,49L89,49"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,59L89,59"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,69L89,69"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,79L89,79"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M29,19L29,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M39,19L39,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M49,19L49,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M59,19L59,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M69,19L69,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M79,19L79,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+</vector>
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/layout/activity_main.xml b/media/tests/SampleVideoEncoder/app/src/main/res/layout/activity_main.xml
new file mode 100644
index 0000000..017012d
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/layout/activity_main.xml
@@ -0,0 +1,138 @@
+<?xml version="1.0" encoding="utf-8"?>
+<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    xmlns:tools="http://schemas.android.com/tools"
+    android:id="@+id/container"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:layout_gravity="center"
+    tools:context="com.android.media.samplevideoencoder.MainActivity">
+
+    <com.android.media.samplevideoencoder.AutoFitTextureView
+        android:id="@+id/texture"
+        android:layout_width="wrap_content"
+        android:layout_height="300dp"
+        android:layout_alignParentStart="true"
+        android:layout_alignParentTop="true"
+        android:layout_marginBottom="16dp"
+        android:gravity="center"
+        app:layout_constraintBottom_toTopOf="@+id/checkBox_media_recorder"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintTop_toTopOf="parent" />
+
+    <CheckBox
+        android:id="@+id/checkBox_media_recorder"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginStart="90dp"
+        android:layout_marginTop="10dp"
+        android:fontFamily="sans-serif-medium"
+        android:text="@string/media_recorder"
+        android:textAppearance="@style/TextAppearance.AppCompat.Large"
+        android:textStyle="normal"
+        android:checked="true"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintTop_toBottomOf="@+id/texture"/>
+
+    <CheckBox
+        android:id="@+id/checkBox_media_codec"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginStart="90dp"
+        android:fontFamily="sans-serif-medium"
+        android:text="@string/media_codec"
+        android:textAppearance="@style/TextAppearance.AppCompat.Large"
+        android:textStyle="normal"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintTop_toBottomOf="@+id/checkBox_media_recorder" />
+
+    <RadioGroup
+        android:id="@+id/radio_group_mime"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginStart="40dp"
+        android:layout_marginTop="10dp"
+        android:orientation="vertical"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintBottom_toTopOf="@+id/frameLayout2"
+        app:layout_constraintTop_toBottomOf="@+id/checkBox_media_codec">
+
+        <RadioButton
+            android:id="@+id/avc"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_weight="1"
+            android:checked="true"
+            android:text="@string/avc" />
+
+        <RadioButton
+            android:id="@+id/hevc"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:text="@string/hevc" />
+    </RadioGroup>
+
+    <RadioGroup
+        android:id="@+id/radio_group_codec"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginTop="10dp"
+        android:layout_marginEnd="40dp"
+        android:orientation="vertical"
+        app:layout_constraintEnd_toEndOf="parent"
+        app:layout_constraintTop_toBottomOf="@+id/checkBox_media_codec">
+
+        <RadioButton
+            android:id="@+id/hw"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_weight="1"
+            android:checked="true"
+            android:enabled="false"
+            android:text="@string/hardware" />
+
+        <RadioButton
+            android:id="@+id/sw"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_weight="1"
+            android:enabled="false"
+            android:text="@string/software" />
+    </RadioGroup>
+
+    <FrameLayout
+        android:id="@+id/frameLayout2"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:layout_below="@id/radio_group_mime"
+        android:layout_alignParentStart="true"
+        android:layout_alignParentBottom="true"
+        android:layout_marginTop="10dp"
+        android:background="@color/colorPrimary"
+        app:layout_constraintTop_toBottomOf="@+id/radio_group_mime"
+        tools:layout_editor_absoluteX="80dp">
+
+        <Button
+            android:id="@+id/start_button"
+            android:layout_width="108dp"
+            android:layout_height="wrap_content"
+            android:layout_gravity="center"
+            android:gravity="center"
+            android:text="@string/start_button"
+            tools:layout_editor_absoluteX="155dp"
+            tools:layout_editor_absoluteY="455dp" />
+
+    </FrameLayout>
+
+    <TextView
+        android:id="@+id/textViewResults"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginTop="10dp"
+        android:fontFamily="sans-serif-medium"
+        android:textSize="18sp"
+        android:textStyle="normal"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintTop_toBottomOf = "@+id/frameLayout2" />
+
+</androidx.constraintlayout.widget.ConstraintLayout>
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml b/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
new file mode 100644
index 0000000..eca70cf
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
+    <background android:drawable="@drawable/ic_launcher_background" />
+    <foreground android:drawable="@drawable/ic_launcher_foreground" />
+</adaptive-icon>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml b/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
new file mode 100644
index 0000000..eca70cf
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
+    <background android:drawable="@drawable/ic_launcher_background" />
+    <foreground android:drawable="@drawable/ic_launcher_foreground" />
+</adaptive-icon>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/raw/crowd_1920x1080_25fps_4000kbps_h265.mp4 b/media/tests/SampleVideoEncoder/app/src/main/res/raw/crowd_1920x1080_25fps_4000kbps_h265.mp4
new file mode 100644
index 0000000..6204008
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/raw/crowd_1920x1080_25fps_4000kbps_h265.mp4
Binary files differ
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/values/colors.xml b/media/tests/SampleVideoEncoder/app/src/main/res/values/colors.xml
new file mode 100644
index 0000000..4faecfa
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/values/colors.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    <color name="colorPrimary">#6200EE</color>
+    <color name="colorPrimaryDark">#3700B3</color>
+    <color name="colorAccent">#03DAC5</color>
+</resources>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/values/strings.xml b/media/tests/SampleVideoEncoder/app/src/main/res/values/strings.xml
new file mode 100644
index 0000000..f825a7f
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/values/strings.xml
@@ -0,0 +1,13 @@
+<resources>
+    <string name="app_name">SampleVideoEncoder</string>
+    <string name="media_recorder">MediaRecorder</string>
+    <string name="media_codec">MediaCodec</string>
+    <string name="start_button">Start</string>
+    <string name="stop">Stop</string>
+    <string name="start">Start</string>
+    <string name="avc">AVC</string>
+    <string name="hevc">HEVC</string>
+    <string name="hardware">H/W</string>
+    <string name="software">S/W</string>
+
+</resources>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/values/styles.xml b/media/tests/SampleVideoEncoder/app/src/main/res/values/styles.xml
new file mode 100644
index 0000000..fac9291
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/values/styles.xml
@@ -0,0 +1,10 @@
+<resources>
+    <!-- Base application theme. -->
+    <style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
+        <!-- Customize your theme here. -->
+        <item name="colorPrimary">@color/colorPrimary</item>
+        <item name="colorPrimaryDark">@color/colorPrimaryDark</item>
+        <item name="colorAccent">@color/colorAccent</item>
+    </style>
+
+</resources>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/build.gradle b/media/tests/SampleVideoEncoder/build.gradle
new file mode 100644
index 0000000..4ca0c7e
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/build.gradle
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+buildscript {
+    repositories {
+        google()
+        jcenter()
+    }
+    dependencies {
+        classpath 'com.android.tools.build:gradle:4.1.1'
+    }
+}
+
+allprojects {
+    repositories {
+        google()
+        jcenter()
+    }
+}
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/gradle.properties b/media/tests/SampleVideoEncoder/gradle.properties
new file mode 100644
index 0000000..5ae443b
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/gradle.properties
@@ -0,0 +1,4 @@
+# Project-wide Gradle settings.
+org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
+android.useAndroidX=true
+android.enableJetifier=true
diff --git a/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.jar b/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 0000000..f6b961f
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.properties b/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..a9a12eb
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,6 @@
+#Wed Dec 16 10:06:45 IST 2020
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip
diff --git a/media/tests/SampleVideoEncoder/gradlew b/media/tests/SampleVideoEncoder/gradlew
new file mode 100644
index 0000000..cccdd3d
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/gradlew
@@ -0,0 +1,172 @@
+#!/usr/bin/env sh
+
+##############################################################################
+##
+##  Gradle start up script for UN*X
+##
+##############################################################################
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+    ls=`ls -ld "$PRG"`
+    link=`expr "$ls" : '.*-> \(.*\)$'`
+    if expr "$link" : '/.*' > /dev/null; then
+        PRG="$link"
+    else
+        PRG=`dirname "$PRG"`"/$link"
+    fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS=""
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn () {
+    echo "$*"
+}
+
+die () {
+    echo
+    echo "$*"
+    echo
+    exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "`uname`" in
+  CYGWIN* )
+    cygwin=true
+    ;;
+  Darwin* )
+    darwin=true
+    ;;
+  MINGW* )
+    msys=true
+    ;;
+  NONSTOP* )
+    nonstop=true
+    ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+    if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+        # IBM's JDK on AIX uses strange locations for the executables
+        JAVACMD="$JAVA_HOME/jre/sh/java"
+    else
+        JAVACMD="$JAVA_HOME/bin/java"
+    fi
+    if [ ! -x "$JAVACMD" ] ; then
+        die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+    fi
+else
+    JAVACMD="java"
+    which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
+    MAX_FD_LIMIT=`ulimit -H -n`
+    if [ $? -eq 0 ] ; then
+        if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+            MAX_FD="$MAX_FD_LIMIT"
+        fi
+        ulimit -n $MAX_FD
+        if [ $? -ne 0 ] ; then
+            warn "Could not set maximum file descriptor limit: $MAX_FD"
+        fi
+    else
+        warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+    fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+    GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin ; then
+    APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+    CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+    JAVACMD=`cygpath --unix "$JAVACMD"`
+
+    # We build the pattern for arguments to be converted via cygpath
+    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+    SEP=""
+    for dir in $ROOTDIRSRAW ; do
+        ROOTDIRS="$ROOTDIRS$SEP$dir"
+        SEP="|"
+    done
+    OURCYGPATTERN="(^($ROOTDIRS))"
+    # Add a user-defined pattern to the cygpath arguments
+    if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+        OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+    fi
+    # Now convert the arguments - kludge to limit ourselves to /bin/sh
+    i=0
+    for arg in "$@" ; do
+        CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+        CHECK2=`echo "$arg"|egrep -c "^-"`                                 ### Determine if an option
+
+        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition
+            eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+        else
+            eval `echo args$i`="\"$arg\""
+        fi
+        i=$((i+1))
+    done
+    case $i in
+        (0) set -- ;;
+        (1) set -- "$args0" ;;
+        (2) set -- "$args0" "$args1" ;;
+        (3) set -- "$args0" "$args1" "$args2" ;;
+        (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+        (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+        (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+        (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+        (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+        (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+    esac
+fi
+
+# Escape application args
+save () {
+    for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
+    echo " "
+}
+APP_ARGS=$(save "$@")
+
+# Collect all arguments for the java command, following the shell quoting and substitution rules
+eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
+
+# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
+if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
+  cd "$(dirname "$0")"
+fi
+
+exec "$JAVACMD" "$@"
diff --git a/media/tests/SampleVideoEncoder/gradlew.bat b/media/tests/SampleVideoEncoder/gradlew.bat
new file mode 100644
index 0000000..f955316
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/gradlew.bat
@@ -0,0 +1,84 @@
+@if "%DEBUG%" == "" @echo off
+@rem ##########################################################################
+@rem
+@rem  Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+set DIRNAME=%~dp0
+if "%DIRNAME%" == "" set DIRNAME=.
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS=
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if "%ERRORLEVEL%" == "0" goto init
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto init
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:init
+@rem Get command-line arguments, handling Windows variants
+
+if not "%OS%" == "Windows_NT" goto win9xME_args
+
+:win9xME_args
+@rem Slurp the command line arguments.
+set CMD_LINE_ARGS=
+set _SKIP=2
+
+:win9xME_args_slurp
+if "x%~1" == "x" goto execute
+
+set CMD_LINE_ARGS=%*
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
+
+:end
+@rem End local scope for the variables with windows NT shell
+if "%ERRORLEVEL%"=="0" goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+if  not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
+exit /b 1
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
diff --git a/media/tests/SampleVideoEncoder/settings.gradle b/media/tests/SampleVideoEncoder/settings.gradle
new file mode 100644
index 0000000..4d3c3a5
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/settings.gradle
@@ -0,0 +1,2 @@
+include ':app'
+rootProject.name = "SampleVideoEncoder"
\ No newline at end of file
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 748afeb..59c2e65 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -37,17 +37,23 @@
     ],
     static_libs: [
         "libc_malloc_debug_backtrace",
+        "libbatterystats_aidl",
+        "libprocessinfoservice_aidl",
     ],
     shared_libs: [
         "libaudioutils", // for clock.h
         "libbinder",
         "libcutils",
         "liblog",
+        "libpermission",
         "libutils",
         "libhidlbase",
         "android.hardware.graphics.bufferqueue@1.0",
         "android.hidl.token@1.0-utils",
     ],
+    export_static_lib_headers: [
+        "libbatterystats_aidl",
+    ],
 
     logtags: ["EventLogTags.logtags"],
 
diff --git a/media/utils/ProcessInfo.cpp b/media/utils/ProcessInfo.cpp
index 113e4a7..e9c9f8d 100644
--- a/media/utils/ProcessInfo.cpp
+++ b/media/utils/ProcessInfo.cpp
@@ -21,9 +21,9 @@
 #include <media/stagefright/ProcessInfo.h>
 
 #include <binder/IPCThreadState.h>
-#include <binder/IProcessInfoService.h>
 #include <binder/IServiceManager.h>
 #include <private/android_filesystem_config.h>
+#include <processinfo/IProcessInfoService.h>
 
 namespace android {
 
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
index 80882b2..5c03926 100644
--- a/media/utils/fuzzers/Android.bp
+++ b/media/utils/fuzzers/Android.bp
@@ -10,6 +10,7 @@
 cc_defaults {
     name: "libmediautils_fuzzer_defaults",
     shared_libs: [
+        "libbatterystats_aidl",
         "libbinder",
         "libcutils",
         "liblog",
diff --git a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
index 4521853..130feee 100644
--- a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
+++ b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 #define LOG_TAG "BatteryNotifierFuzzer"
-#include <binder/IBatteryStats.h>
+#include <batterystats/IBatteryStats.h>
 #include <binder/IServiceManager.h>
 #include <utils/String16.h>
 #include <android/log.h>
diff --git a/media/utils/include/mediautils/BatteryNotifier.h b/media/utils/include/mediautils/BatteryNotifier.h
index a4e42ad..3812d7a 100644
--- a/media/utils/include/mediautils/BatteryNotifier.h
+++ b/media/utils/include/mediautils/BatteryNotifier.h
@@ -17,7 +17,7 @@
 #ifndef MEDIA_BATTERY_NOTIFIER_H
 #define MEDIA_BATTERY_NOTIFIER_H
 
-#include <binder/IBatteryStats.h>
+#include <batterystats/IBatteryStats.h>
 #include <utils/Singleton.h>
 #include <utils/String8.h>
 
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 3c9897d..ff3bfd2 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -69,6 +69,7 @@
         "libmediautils",
         "libnbaio",
         "libnblog",
+        "libpermission",
         "libpowermanager",
         "libmediautils",
         "libmemunreachable",
@@ -86,6 +87,10 @@
         "libmedia_headers",
     ],
 
+    export_shared_lib_headers: [
+        "libpermission",
+    ],
+
     cflags: [
         "-DSTATE_QUEUE_INSTANTIATIONS=\"StateQueueInstantiations.cpp\"",
         "-fvisibility=hidden",
diff --git a/services/audioflinger/AudioStreamOut.cpp b/services/audioflinger/AudioStreamOut.cpp
index 7e06096..d8565bd 100644
--- a/services/audioflinger/AudioStreamOut.cpp
+++ b/services/audioflinger/AudioStreamOut.cpp
@@ -173,22 +173,15 @@
     return status;
 }
 
-audio_format_t AudioStreamOut::getFormat() const
+audio_config_base_t AudioStreamOut::getAudioProperties() const
 {
-    audio_format_t result;
-    return stream->getFormat(&result) == OK ? result : AUDIO_FORMAT_INVALID;
-}
-
-uint32_t AudioStreamOut::getSampleRate() const
-{
-    uint32_t result;
-    return stream->getSampleRate(&result) == OK ? result : 0;
-}
-
-audio_channel_mask_t AudioStreamOut::getChannelMask() const
-{
-    audio_channel_mask_t result;
-    return stream->getChannelMask(&result) == OK ? result : AUDIO_CHANNEL_INVALID;
+    audio_config_base_t result = AUDIO_CONFIG_BASE_INITIALIZER;
+    if (stream->getAudioProperties(&result) != OK) {
+        result.sample_rate = 0;
+        result.channel_mask = AUDIO_CHANNEL_INVALID;
+        result.format = AUDIO_FORMAT_INVALID;
+    }
+    return result;
 }
 
 int AudioStreamOut::flush()
diff --git a/services/audioflinger/AudioStreamOut.h b/services/audioflinger/AudioStreamOut.h
index 16fbcf2..565f43a 100644
--- a/services/audioflinger/AudioStreamOut.h
+++ b/services/audioflinger/AudioStreamOut.h
@@ -81,22 +81,14 @@
     virtual size_t getFrameSize() const { return mHalFrameSize; }
 
     /**
-     * @return format from the perspective of the application and the AudioFlinger.
+     * @return audio stream configuration: channel mask, format, sample rate:
+     *   - channel mask from the perspective of the application and the AudioFlinger,
+     *     The HAL is in stereo mode when playing multi-channel compressed audio over HDMI;
+     *   - format from the perspective of the application and the AudioFlinger;
+     *   - sample rate from the perspective of the application and the AudioFlinger,
+     *     The HAL may be running at a higher sample rate if, for example, playing wrapped EAC3.
      */
-    virtual audio_format_t getFormat() const;
-
-    /**
-     * The HAL may be running at a higher sample rate if, for example, playing wrapped EAC3.
-     * @return sample rate from the perspective of the application and the AudioFlinger.
-     */
-    virtual uint32_t getSampleRate() const;
-
-    /**
-     * The HAL is in stereo mode when playing multi-channel compressed audio over HDMI.
-     * @return channel mask from the perspective of the application and the AudioFlinger.
-     */
-    virtual audio_channel_mask_t getChannelMask() const;
-
+    virtual audio_config_base_t getAudioProperties() const;
 
     virtual status_t flush();
     virtual status_t standby();
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index cd3c743..13e2ced 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -353,7 +353,8 @@
 #endif
         //ALOGD("Eric FastMixer::onWork() mIsWarm");
     } else {
-        dumpState->mTimestampVerifier.discontinuity();
+        dumpState->mTimestampVerifier.discontinuity(
+            dumpState->mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS);
         // See comment in if block.
 #ifdef FASTMIXER_LOG_HIST_TS
         LOG_AUDIO_STATE();
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 7804822..9770054 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -217,6 +217,10 @@
     void flushAck();
     bool isResumePending();
     void resumeAck();
+    // For direct or offloaded tracks ensure that the pause state is acknowledged
+    // by the playback thread in case of an immediate flush.
+    bool isPausePending() const { return mPauseHwPending; }
+    void pauseAck();
     void updateTrackFrameInfo(int64_t trackFramesReleased, int64_t sinkFramesWritten,
             uint32_t halSampleRate, const ExtendedTimestamp &timeStamp);
 
@@ -284,8 +288,6 @@
     };
     sp<AudioVibrationController> mAudioVibrationController;
     sp<os::ExternalVibration>    mExternalVibration;
-    /** How many frames should be in the buffer before the track is considered ready */
-    const size_t        mFrameCountToBeReady;
 
     audio_dual_mono_mode_t mDualMonoMode = AUDIO_DUAL_MONO_MODE_OFF;
     float               mAudioDescriptionMixLevel = -std::numeric_limits<float>::infinity();
@@ -314,6 +316,7 @@
     sp<AudioTrackServerProxy>  mAudioTrackServerProxy;
     bool                mResumeToStopping; // track was paused in stopping state.
     bool                mFlushHwPending; // track requests for thread flush
+    bool                mPauseHwPending = false; // direct/offload track request for thread pause
     audio_output_flags_t mFlags;
     // If the last track change was notified to the client with readAndClearHasChanged
     std::atomic_flag     mChangeNotified = ATOMIC_FLAG_INIT;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 927d87e..deb13af 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -622,7 +622,7 @@
     mIoJitterMs.reset();
     mLatencyMs.reset();
     mProcessTimeMs.reset();
-    mTimestampVerifier.discontinuity();
+    mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS);
 
     sp<ConfigEvent> configEvent = (ConfigEvent *)new IoConfigEvent(event, pid, portId);
     sendConfigEvent_l(configEvent);
@@ -2719,7 +2719,7 @@
         // the timestamp frame position to reset to 0 for direct and offload threads.
         // (Out of sequence requests are ignored, since the discontinuity would be handled
         // elsewhere, e.g. in flush).
-        mTimestampVerifier.discontinuity();
+        mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
         mDrainSequence &= ~1;
         mWaitWorkCV.signal();
     }
@@ -2728,8 +2728,9 @@
 void AudioFlinger::PlaybackThread::readOutputParameters_l()
 {
     // unfortunately we have no way of recovering from errors here, hence the LOG_ALWAYS_FATAL
-    mSampleRate = mOutput->getSampleRate();
-    mChannelMask = mOutput->getChannelMask();
+    const audio_config_base_t audioConfig = mOutput->getAudioProperties();
+    mSampleRate = audioConfig.sample_rate;
+    mChannelMask = audioConfig.channel_mask;
     if (!audio_is_output_channel(mChannelMask)) {
         LOG_ALWAYS_FATAL("HAL channel mask %#x not valid for output", mChannelMask);
     }
@@ -2742,11 +2743,11 @@
     mBalance.setChannelMask(mChannelMask);
 
     // Get actual HAL format.
-    status_t result = mOutput->stream->getFormat(&mHALFormat);
+    status_t result = mOutput->stream->getAudioProperties(nullptr, nullptr, &mHALFormat);
     LOG_ALWAYS_FATAL_IF(result != OK, "Error when retrieving output stream format: %d", result);
     // Get format from the shim, which will be different than the HAL format
     // if playing compressed audio over HDMI passthrough.
-    mFormat = mOutput->getFormat();
+    mFormat = audioConfig.format;
     if (!audio_is_valid_format(mFormat)) {
         LOG_ALWAYS_FATAL("HAL format %#x not valid for output", mFormat);
     }
@@ -3408,7 +3409,6 @@
 
     mStandbyTimeNs = systemTime();
     int64_t lastLoopCountWritten = -2; // never matches "previous" loop, when loopCount = 0.
-    int64_t lastFramesWritten = -1;    // track changes in timestamp server frames written
 
     // MIXER
     nsecs_t lastWarning = 0;
@@ -3444,14 +3444,6 @@
 
     checkSilentMode_l();
 
-    // DIRECT and OFFLOAD threads should reset frame count to zero on stop/flush
-    // TODO: add confirmation checks:
-    // 1) DIRECT threads and linear PCM format really resets to 0?
-    // 2) Is frame count really valid if not linear pcm?
-    // 3) Are all 64 bits of position returned, not just lowest 32 bits?
-    if (mType == OFFLOAD || mType == DIRECT) {
-        mTimestampVerifier.setDiscontinuityMode(mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
-    }
     audio_patch_handle_t lastDownstreamPatchHandle = AUDIO_PATCH_HANDLE_NONE;
 
     // loopCount is used for statistics and diagnostics.
@@ -3523,135 +3515,8 @@
                 logString = NULL;
             }
 
-            // Collect timestamp statistics for the Playback Thread types that support it.
-            if (mType == MIXER
-                    || mType == DUPLICATING
-                    || mType == DIRECT
-                    || mType == OFFLOAD) { // no indentation
-            // Gather the framesReleased counters for all active tracks,
-            // and associate with the sink frames written out.  We need
-            // this to convert the sink timestamp to the track timestamp.
-            bool kernelLocationUpdate = false;
-            ExtendedTimestamp timestamp; // use private copy to fetch
-            if (mStandby) {
-                mTimestampVerifier.discontinuity();
-            } else if (threadloop_getHalTimestamp_l(&timestamp) == OK) {
-                mTimestampVerifier.add(timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL],
-                        timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
-                        mSampleRate);
+            collectTimestamps_l();
 
-                if (isTimestampCorrectionEnabled()) {
-                    ALOGVV("TS_BEFORE: %d %lld %lld", id(),
-                            (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
-                            (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
-                    auto correctedTimestamp = mTimestampVerifier.getLastCorrectedTimestamp();
-                    timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
-                            = correctedTimestamp.mFrames;
-                    timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]
-                            = correctedTimestamp.mTimeNs;
-                    ALOGVV("TS_AFTER: %d %lld %lld", id(),
-                            (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
-                            (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
-
-                    // Note: Downstream latency only added if timestamp correction enabled.
-                    if (mDownstreamLatencyStatMs.getN() > 0) { // we have latency info.
-                        const int64_t newPosition =
-                                timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
-                                - int64_t(mDownstreamLatencyStatMs.getMean() * mSampleRate * 1e-3);
-                        // prevent retrograde
-                        timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] = max(
-                                newPosition,
-                                (mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
-                                        - mSuspendedFrames));
-                    }
-                }
-
-                // We always fetch the timestamp here because often the downstream
-                // sink will block while writing.
-
-                // We keep track of the last valid kernel position in case we are in underrun
-                // and the normal mixer period is the same as the fast mixer period, or there
-                // is some error from the HAL.
-                if (mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
-                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
-                            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
-                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
-                            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
-
-                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
-                            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER];
-                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
-                            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER];
-                }
-
-                if (timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
-                    kernelLocationUpdate = true;
-                } else {
-                    ALOGVV("getTimestamp error - no valid kernel position");
-                }
-
-                // copy over kernel info
-                mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] =
-                        timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
-                        + mSuspendedFrames; // add frames discarded when suspended
-                mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] =
-                        timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
-            } else {
-                mTimestampVerifier.error();
-            }
-
-            // mFramesWritten for non-offloaded tracks are contiguous
-            // even after standby() is called. This is useful for the track frame
-            // to sink frame mapping.
-            bool serverLocationUpdate = false;
-            if (mFramesWritten != lastFramesWritten) {
-                serverLocationUpdate = true;
-                lastFramesWritten = mFramesWritten;
-            }
-            // Only update timestamps if there is a meaningful change.
-            // Either the kernel timestamp must be valid or we have written something.
-            if (kernelLocationUpdate || serverLocationUpdate) {
-                if (serverLocationUpdate) {
-                    // use the time before we called the HAL write - it is a bit more accurate
-                    // to when the server last read data than the current time here.
-                    //
-                    // If we haven't written anything, mLastIoBeginNs will be -1
-                    // and we use systemTime().
-                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER] = mFramesWritten;
-                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = mLastIoBeginNs == -1
-                            ? systemTime() : mLastIoBeginNs;
-                }
-
-                for (const sp<Track> &t : mActiveTracks) {
-                    if (!t->isFastTrack()) {
-                        t->updateTrackFrameInfo(
-                                t->mAudioTrackServerProxy->framesReleased(),
-                                mFramesWritten,
-                                mSampleRate,
-                                mTimestamp);
-                    }
-                }
-            }
-
-            if (audio_has_proportional_frames(mFormat)) {
-                const double latencyMs = mTimestamp.getOutputServerLatencyMs(mSampleRate);
-                if (latencyMs != 0.) { // note 0. means timestamp is empty.
-                    mLatencyMs.add(latencyMs);
-                }
-            }
-
-            } // if (mType ... ) { // no indentation
-#if 0
-            // logFormat example
-            if (z % 100 == 0) {
-                timespec ts;
-                clock_gettime(CLOCK_MONOTONIC, &ts);
-                LOGT("This is an integer %d, this is a float %f, this is my "
-                    "pid %p %% %s %t", 42, 3.14, "and this is a timestamp", ts);
-                LOGT("A deceptive null-terminated string %\0");
-            }
-            ++z;
-#endif
             saveOutputTracks();
             if (mSignalPending) {
                 // A signal was raised while we were unlocked
@@ -4091,6 +3956,148 @@
     return false;
 }
 
+void AudioFlinger::PlaybackThread::collectTimestamps_l()
+{
+    // Collect timestamp statistics for the Playback Thread types that support it.
+    if (mType != MIXER
+            && mType != DUPLICATING
+            && mType != DIRECT
+            && mType != OFFLOAD) {
+        return;
+    }
+    if (mStandby) {
+        mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
+        return;
+    } else if (mHwPaused) {
+        mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS);
+        return;
+    }
+
+    // Gather the framesReleased counters for all active tracks,
+    // and associate with the sink frames written out.  We need
+    // this to convert the sink timestamp to the track timestamp.
+    bool kernelLocationUpdate = false;
+    ExtendedTimestamp timestamp; // use private copy to fetch
+
+    // Always query HAL timestamp and update timestamp verifier. In standby or pause,
+    // HAL may be draining some small duration buffered data for fade out.
+    if (threadloop_getHalTimestamp_l(&timestamp) == OK) {
+        mTimestampVerifier.add(timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL],
+                timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
+                mSampleRate);
+
+        if (isTimestampCorrectionEnabled()) {
+            ALOGVV("TS_BEFORE: %d %lld %lld", id(),
+                    (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
+                    (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
+            auto correctedTimestamp = mTimestampVerifier.getLastCorrectedTimestamp();
+            timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+                    = correctedTimestamp.mFrames;
+            timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]
+                    = correctedTimestamp.mTimeNs;
+            ALOGVV("TS_AFTER: %d %lld %lld", id(),
+                    (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
+                    (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
+
+            // Note: Downstream latency only added if timestamp correction enabled.
+            if (mDownstreamLatencyStatMs.getN() > 0) { // we have latency info.
+                const int64_t newPosition =
+                        timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+                        - int64_t(mDownstreamLatencyStatMs.getMean() * mSampleRate * 1e-3);
+                // prevent retrograde
+                timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] = max(
+                        newPosition,
+                        (mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+                                - mSuspendedFrames));
+            }
+        }
+
+        // We always fetch the timestamp here because often the downstream
+        // sink will block while writing.
+
+        // We keep track of the last valid kernel position in case we are in underrun
+        // and the normal mixer period is the same as the fast mixer period, or there
+        // is some error from the HAL.
+        if (mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
+            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
+                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
+                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+
+            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
+                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER];
+            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
+                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER];
+        }
+
+        if (timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
+            kernelLocationUpdate = true;
+        } else {
+            ALOGVV("getTimestamp error - no valid kernel position");
+        }
+
+        // copy over kernel info
+        mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] =
+                timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+                + mSuspendedFrames; // add frames discarded when suspended
+        mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] =
+                timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+    } else {
+        mTimestampVerifier.error();
+    }
+
+    // mFramesWritten for non-offloaded tracks are contiguous
+    // even after standby() is called. This is useful for the track frame
+    // to sink frame mapping.
+    bool serverLocationUpdate = false;
+    if (mFramesWritten != mLastFramesWritten) {
+        serverLocationUpdate = true;
+        mLastFramesWritten = mFramesWritten;
+    }
+    // Only update timestamps if there is a meaningful change.
+    // Either the kernel timestamp must be valid or we have written something.
+    if (kernelLocationUpdate || serverLocationUpdate) {
+        if (serverLocationUpdate) {
+            // use the time before we called the HAL write - it is a bit more accurate
+            // to when the server last read data than the current time here.
+            //
+            // If we haven't written anything, mLastIoBeginNs will be -1
+            // and we use systemTime().
+            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER] = mFramesWritten;
+            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = mLastIoBeginNs == -1
+                    ? systemTime() : mLastIoBeginNs;
+        }
+
+        for (const sp<Track> &t : mActiveTracks) {
+            if (!t->isFastTrack()) {
+                t->updateTrackFrameInfo(
+                        t->mAudioTrackServerProxy->framesReleased(),
+                        mFramesWritten,
+                        mSampleRate,
+                        mTimestamp);
+            }
+        }
+    }
+
+    if (audio_has_proportional_frames(mFormat)) {
+        const double latencyMs = mTimestamp.getOutputServerLatencyMs(mSampleRate);
+        if (latencyMs != 0.) { // note 0. means timestamp is empty.
+            mLatencyMs.add(latencyMs);
+        }
+    }
+#if 0
+    // logFormat example
+    if (z % 100 == 0) {
+        timespec ts;
+        clock_gettime(CLOCK_MONOTONIC, &ts);
+        LOGT("This is an integer %d, this is a float %f, this is my "
+            "pid %p %% %s %t", 42, 3.14, "and this is a timestamp", ts);
+        LOGT("A deceptive null-terminated string %\0");
+    }
+    ++z;
+#endif
+}
+
 // removeTracks_l() must be called with ThreadBase::mLock held
 void AudioFlinger::PlaybackThread::removeTracks_l(const Vector< sp<Track> >& tracksToRemove)
 {
@@ -4137,20 +4144,15 @@
         return status;
     }
     if ((mType == OFFLOAD || mType == DIRECT) && mOutput != NULL) {
-        uint64_t position64;
-        if (mOutput->getPresentationPosition(&position64, &timestamp.mTime) == OK) {
-            timestamp.mPosition = (uint32_t)position64;
-            if (mDownstreamLatencyStatMs.getN() > 0) {
-                const uint32_t positionOffset =
-                    (uint32_t)(mDownstreamLatencyStatMs.getMean() * mSampleRate * 1e-3);
-                if (positionOffset > timestamp.mPosition) {
-                    timestamp.mPosition = 0;
-                } else {
-                    timestamp.mPosition -= positionOffset;
-                }
-            }
-            return NO_ERROR;
+        collectTimestamps_l();
+        if (mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] <= 0) {
+            return INVALID_OPERATION;
         }
+        timestamp.mPosition = mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+        const int64_t timeNs = mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+        timestamp.mTime.tv_sec = timeNs / NANOS_PER_SECOND;
+        timestamp.mTime.tv_nsec = timeNs - (timestamp.mTime.tv_sec * NANOS_PER_SECOND);
+        return NO_ERROR;
     }
     return INVALID_OPERATION;
 }
@@ -5551,8 +5553,6 @@
                                                        status_t& status)
 {
     bool reconfig = false;
-    bool a2dpDeviceChanged = false;
-
     status = NO_ERROR;
 
     AutoPark<FastMixer> park(mFastMixer);
@@ -5624,7 +5624,7 @@
         }
     }
 
-    return reconfig || a2dpDeviceChanged;
+    return reconfig;
 }
 
 
@@ -5825,8 +5825,15 @@
         sp<Track> l = mActiveTracks.getLatest();
         bool last = l.get() == track;
 
-        if (track->isPausing()) {
-            track->setPaused();
+        if (track->isPausePending()) {
+            track->pauseAck();
+            // It is possible a track might have been flushed or stopped.
+            // Other operations such as flush pending might occur on the next prepare.
+            if (track->isPausing()) {
+                track->setPaused();
+            }
+            // Always perform pause, as an immediate flush will change
+            // the pause state to be no longer isPausing().
             if (mHwSupportsPause && last && !mHwPaused) {
                 doHwPause = true;
                 mHwPaused = true;
@@ -6085,8 +6092,6 @@
                                                               status_t& status)
 {
     bool reconfig = false;
-    bool a2dpDeviceChanged = false;
-
     status = NO_ERROR;
 
     AudioParameter param = AudioParameter(keyValuePair);
@@ -6121,7 +6126,7 @@
         }
     }
 
-    return reconfig || a2dpDeviceChanged;
+    return reconfig;
 }
 
 uint32_t AudioFlinger::DirectOutputThread::activeSleepTimeUs() const
@@ -6178,7 +6183,7 @@
     mOutput->flush();
     mHwPaused = false;
     mFlushPending = false;
-    mTimestampVerifier.discontinuity(); // DIRECT and OFFLOADED flush resets frame count.
+    mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
     mTimestamp.clear();
 }
 
@@ -6370,8 +6375,15 @@
             continue;
         }
 
-        if (track->isPausing()) {
-            track->setPaused();
+        if (track->isPausePending()) {
+            track->pauseAck();
+            // It is possible a track might have been flushed or stopped.
+            // Other operations such as flush pending might occur on the next prepare.
+            if (track->isPausing()) {
+                track->setPaused();
+            }
+            // Always perform pause if last, as an immediate flush will change
+            // the pause state to be no longer isPausing().
             if (last) {
                 if (mHwSupportsPause && !mHwPaused) {
                     doHwPause = true;
@@ -6514,13 +6526,14 @@
                     track->presentationComplete(framesWritten, audioHALFrames);
                     track->reset();
                     tracksToRemove->add(track);
-                    // DIRECT and OFFLOADED stop resets frame counts.
+                    // OFFLOADED stop resets frame counts.
                     if (!mUseAsyncWrite) {
                         // If we don't get explicit drain notification we must
                         // register discontinuity regardless of whether this is
                         // the previous (!last) or the upcoming (last) track
                         // to avoid skipping the discontinuity.
-                        mTimestampVerifier.discontinuity();
+                        mTimestampVerifier.discontinuity(
+                                mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
                     }
                 }
             } else {
@@ -7378,7 +7391,9 @@
         if (mPipeSource.get() == nullptr /* don't obtain for FastCapture, could block */) {
             int64_t position, time;
             if (mStandby) {
-                mTimestampVerifier.discontinuity();
+                mTimestampVerifier.discontinuity(audio_is_linear_pcm(mFormat) ?
+                    mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS :
+                    mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
             } else if (mSource->getCapturePosition(&position, &time) == NO_ERROR
                     && time > mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]) {
 
@@ -8370,13 +8385,11 @@
         }
         if (reconfig) {
             if (status == BAD_VALUE) {
-                uint32_t sRate;
-                audio_channel_mask_t channelMask;
-                audio_format_t format;
-                if (mInput->stream->getAudioProperties(&sRate, &channelMask, &format) == OK &&
-                        audio_is_linear_pcm(format) && audio_is_linear_pcm(reqFormat) &&
-                        sRate <= (AUDIO_RESAMPLER_DOWN_RATIO_MAX * samplingRate) &&
-                        audio_channel_count_from_in_mask(channelMask) <= FCC_8) {
+                audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+                if (mInput->stream->getAudioProperties(&config) == OK &&
+                        audio_is_linear_pcm(config.format) && audio_is_linear_pcm(reqFormat) &&
+                        config.sample_rate <= (AUDIO_RESAMPLER_DOWN_RATIO_MAX * samplingRate) &&
+                        audio_channel_count_from_in_mask(config.channel_mask) <= FCC_8) {
                     status = NO_ERROR;
                 }
             }
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 709a3cc..9f65562 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -590,6 +590,11 @@
                 ExtendedTimestamp       mTimestamp;
                 TimestampVerifier< // For timestamp statistics.
                         int64_t /* frame count */, int64_t /* time ns */> mTimestampVerifier;
+                // DIRECT and OFFLOAD threads should reset frame count to zero on stop/flush
+                // TODO: add confirmation checks:
+                // 1) DIRECT threads and linear PCM format really resets to 0?
+                // 2) Is frame count really valid if not linear pcm?
+                // 3) Are all 64 bits of position returned, not just lowest 32 bits?
                 // Timestamp corrected device should be a single device.
                 audio_devices_t         mTimestampCorrectedDevice = AUDIO_DEVICE_NONE;
 
@@ -1023,6 +1028,8 @@
 
     int64_t                         mBytesWritten;
     int64_t                         mFramesWritten; // not reset on standby
+    int64_t                         mLastFramesWritten = -1; // track changes in timestamp
+                                                             // server frames written.
     int64_t                         mSuspendedFrames; // not reset on standby
 
     // mHapticChannelMask and mHapticChannelCount will only be valid when the thread support
@@ -1035,6 +1042,14 @@
     // copy rather than the one in AudioFlinger.  This optimization saves a lock.
     bool                            mMasterMute;
                 void        setMasterMute_l(bool muted) { mMasterMute = muted; }
+
+                auto discontinuityForStandbyOrFlush() const { // call on threadLoop or with lock.
+                    return ((mType == DIRECT && !audio_is_linear_pcm(mFormat))
+                                    || mType == OFFLOAD)
+                            ? mTimestampVerifier.DISCONTINUITY_MODE_ZERO
+                            : mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS;
+                }
+
 protected:
     ActiveTracks<Track>     mActiveTracks;
 
@@ -1081,6 +1096,8 @@
     void        updateMetadata_l() final;
     virtual void sendMetadataToBackend_l(const StreamOutHalInterface::SourceMetadata& metadata);
 
+    void        collectTimestamps_l();
+
     // The Tracks class manages tracks added and removed from the Thread.
     template <typename T>
     class Tracks {
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index ee886d5..4353b3d 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -638,7 +638,6 @@
     mOpPlayAudioMonitor(OpPlayAudioMonitor::createIfNeeded(
             uid, attr, id(), streamType, opPackageName)),
     // mSinkTimestamp
-    mFrameCountToBeReady(frameCountToBeReady),
     mFastIndex(-1),
     mCachedVolume(1.0),
     /* The track might not play immediately after being active, similarly as if its volume was 0.
@@ -672,6 +671,7 @@
                 mFrameSize, sampleRate);
     }
     mServerProxy = mAudioTrackServerProxy;
+    mServerProxy->setStartThresholdInFrames(frameCountToBeReady); // update the Cblk value
 
     // only allocate a fast track index if we were able to allocate a normal track name
     if (flags & AUDIO_OUTPUT_FLAG_FAST) {
@@ -999,7 +999,10 @@
     }
 
     size_t bufferSizeInFrames = mServerProxy->getBufferSizeInFrames();
-    size_t framesToBeReady = std::min(mFrameCountToBeReady, bufferSizeInFrames);
+    // Note: mServerProxy->getStartThresholdInFrames() is clamped.
+    const size_t startThresholdInFrames = mServerProxy->getStartThresholdInFrames();
+    const size_t framesToBeReady = std::clamp(  // clamp again to validate client values.
+            std::min(startThresholdInFrames, bufferSizeInFrames), size_t(1), mFrameCount);
 
     if (framesReady() >= framesToBeReady || (mCblk->mFlags & CBLK_FORCEREADY)) {
         ALOGV("%s(%d): consider track ready with %zu/%zu, target was %zu)",
@@ -1038,6 +1041,11 @@
         // initial state-stopping. next state-pausing.
         // What if resume is called ?
 
+        if (state == FLUSHED) {
+            // avoid underrun glitches when starting after flush
+            reset();
+        }
+
         if (state == PAUSED || state == PAUSING) {
             if (mResumeToStopping) {
                 // happened we need to resume to STOPPING_1
@@ -1169,6 +1177,9 @@
             mState = PAUSING;
             ALOGV("%s(%d): ACTIVE/RESUMING => PAUSING on thread %d",
                     __func__, mId, (int)mThreadIoHandle);
+            if (isOffloadedOrDirect()) {
+                mPauseHwPending = true;
+            }
             playbackThread->broadcast_l();
             break;
 
@@ -1256,6 +1267,11 @@
     mFlushHwPending = false;
 }
 
+void AudioFlinger::PlaybackThread::Track::pauseAck()
+{
+    mPauseHwPending = false;
+}
+
 void AudioFlinger::PlaybackThread::Track::reset()
 {
     // Do not reset twice to avoid discarding data written just after a flush and before
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index ca29591..2038aa9 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -218,7 +218,9 @@
             add(devices);
             return size();
         }
-        return SortedVector::merge(devices);
+        ssize_t ret = SortedVector::merge(devices);
+        refreshTypes();
+        return ret;
     }
 
     /**
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 8c7fb97..20c2c28 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -17,6 +17,8 @@
 #define LOG_TAG "APM::AudioPolicyEngine/Base"
 //#define LOG_NDEBUG 0
 
+#include <sys/stat.h>
+
 #include "EngineBase.h"
 #include "EngineDefaultConfig.h"
 #include <TypeConverter.h>
@@ -147,8 +149,13 @@
         });
         return iter != end(volumeGroups);
     };
+    auto fileExists = [](const char* path) {
+        struct stat fileStat;
+        return stat(path, &fileStat) == 0 && S_ISREG(fileStat.st_mode);
+    };
 
-    auto result = engineConfig::parse();
+    auto result = fileExists(engineConfig::DEFAULT_PATH) ?
+            engineConfig::parse(engineConfig::DEFAULT_PATH) : engineConfig::ParsingResult{};
     if (result.parsedConfig == nullptr) {
         ALOGW("%s: No configuration found, using default matching phone experience.", __FUNCTION__);
         engineConfig::Config config = gDefaultEngineConfig;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 5a042bd..5b53c0b 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -257,11 +257,7 @@
         } else {
             checkCloseOutputs();
         }
-
-        if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
-            DeviceVector newDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
-            updateCallRouting(newDevices);
-        }
+        (void)updateCallRouting(false /*fromCache*/);
         const DeviceVector msdOutDevices = getMsdAudioOutDevices();
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
@@ -349,10 +345,7 @@
         // getDeviceForStrategy() cache
         updateDevicesAndOutputs();
 
-        if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
-            DeviceVector newDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
-            updateCallRouting(newDevices);
-        }
+        (void)updateCallRouting(false /*fromCache*/);
         // Reconnect Audio Source
         for (const auto &strategy : mEngine->getOrderedProductStrategies()) {
             auto attributes = mEngine->getAllAttributesForProductStrategy(strategy).front();
@@ -517,23 +510,58 @@
     return status;
 }
 
-uint32_t AudioPolicyManager::updateCallRouting(const DeviceVector &rxDevices, uint32_t delayMs)
+DeviceVector AudioPolicyManager::selectBestRxSinkDevicesForCall(bool fromCache)
+{
+    DeviceVector rxSinkdevices{};
+    rxSinkdevices = mEngine->getOutputDevicesForAttributes(
+                attributes_initializer(AUDIO_USAGE_VOICE_COMMUNICATION), nullptr, fromCache);
+    if (!rxSinkdevices.isEmpty() && mAvailableOutputDevices.contains(rxSinkdevices.itemAt(0))) {
+        auto rxSinkDevice = rxSinkdevices.itemAt(0);
+        auto telephonyRxModule = mHwModules.getModuleForDeviceType(
+                    AUDIO_DEVICE_IN_TELEPHONY_RX, AUDIO_FORMAT_DEFAULT);
+        // retrieve Rx Source device descriptor
+        sp<DeviceDescriptor> rxSourceDevice = mAvailableInputDevices.getDevice(
+                    AUDIO_DEVICE_IN_TELEPHONY_RX, String8(), AUDIO_FORMAT_DEFAULT);
+
+        // RX Telephony and Rx sink devices are declared by Primary Audio HAL
+        if (isPrimaryModule(telephonyRxModule) && (telephonyRxModule->getHalVersionMajor() >= 3) &&
+                telephonyRxModule->supportsPatch(rxSourceDevice, rxSinkDevice)) {
+            ALOGW("%s() device %s using HW Bridge", __func__, rxSinkDevice->toString().c_str());
+            return DeviceVector(rxSinkDevice);
+        }
+    }
+    // Note that despite the fact that getNewOutputDevices() is called on the primary output,
+    // the device returned is not necessarily reachable via this output
+    // (filter later by setOutputDevices())
+    return getNewOutputDevices(mPrimaryOutput, fromCache);
+}
+
+status_t AudioPolicyManager::updateCallRouting(bool fromCache, uint32_t delayMs, uint32_t *waitMs)
+{
+    if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
+        DeviceVector rxDevices = selectBestRxSinkDevicesForCall(fromCache);
+        return updateCallRoutingInternal(rxDevices, delayMs, waitMs);
+    }
+    return INVALID_OPERATION;
+}
+
+status_t AudioPolicyManager::updateCallRoutingInternal(
+        const DeviceVector &rxDevices, uint32_t delayMs, uint32_t *waitMs)
 {
     bool createTxPatch = false;
     bool createRxPatch = false;
     uint32_t muteWaitMs = 0;
-
     if(!hasPrimaryOutput() ||
             mPrimaryOutput->devices().onlyContainsDevicesWithType(AUDIO_DEVICE_OUT_STUB)) {
-        return muteWaitMs;
+        return INVALID_OPERATION;
     }
-    ALOG_ASSERT(!rxDevices.isEmpty(), "updateCallRouting() no selected output device");
+    ALOG_ASSERT(!rxDevices.isEmpty(), "%s() no selected output device", __func__);
 
     audio_attributes_t attr = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
     auto txSourceDevice = mEngine->getInputDeviceForAttributes(attr);
-    ALOG_ASSERT(txSourceDevice != 0, "updateCallRouting() input selected device not available");
+    ALOG_ASSERT(txSourceDevice != 0, "%s() input selected device not available", __func__);
 
-    ALOGV("updateCallRouting device rxDevice %s txDevice %s",
+    ALOGV("%s device rxDevice %s txDevice %s", __func__,
           rxDevices.itemAt(0)->toString().c_str(), txSourceDevice->toString().c_str());
 
     disconnectTelephonyRxAudioSource();
@@ -562,8 +590,8 @@
             (telephonyRxModule->getHalVersionMajor() >= 3)) {
         if (rxSourceDevice == 0 || txSinkDevice == 0) {
             // RX / TX Telephony device(s) is(are) not currently available
-            ALOGE("updateCallRouting() no telephony Tx and/or RX device");
-            return muteWaitMs;
+            ALOGE("%s() no telephony Tx and/or RX device", __func__);
+            return INVALID_OPERATION;
         }
         // createAudioPatchInternal now supports both HW / SW bridging
         createRxPatch = true;
@@ -601,8 +629,10 @@
         }
         mCallTxPatch = createTelephonyPatch(false /*isRx*/, txSourceDevice, delayMs);
     }
-
-    return muteWaitMs;
+    if (waitMs != nullptr) {
+        *waitMs = muteWaitMs;
+    }
+    return NO_ERROR;
 }
 
 sp<AudioPatch> AudioPolicyManager::createTelephonyPatch(
@@ -720,25 +750,22 @@
     }
 
     if (hasPrimaryOutput()) {
-        // Note that despite the fact that getNewOutputDevices() is called on the primary output,
-        // the device returned is not necessarily reachable via this output
-        DeviceVector rxDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
-        // force routing command to audio hardware when ending call
-        // even if no device change is needed
-        if (isStateInCall(oldState) && rxDevices.isEmpty()) {
-            rxDevices = mPrimaryOutput->devices();
-        }
-
         if (state == AUDIO_MODE_IN_CALL) {
-            updateCallRouting(rxDevices, delayMs);
-        } else if (oldState == AUDIO_MODE_IN_CALL) {
-            disconnectTelephonyRxAudioSource();
-            if (mCallTxPatch != 0) {
-                releaseAudioPatchInternal(mCallTxPatch->getHandle());
-                mCallTxPatch.clear();
-            }
-            setOutputDevices(mPrimaryOutput, rxDevices, force, 0);
+            (void)updateCallRouting(false /*fromCache*/, delayMs);
         } else {
+            DeviceVector rxDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
+            // force routing command to audio hardware when ending call
+            // even if no device change is needed
+            if (isStateInCall(oldState) && rxDevices.isEmpty()) {
+                rxDevices = mPrimaryOutput->devices();
+            }
+            if (oldState == AUDIO_MODE_IN_CALL) {
+                disconnectTelephonyRxAudioSource();
+                if (mCallTxPatch != 0) {
+                    releaseAudioPatchInternal(mCallTxPatch->getHandle());
+                    mCallTxPatch.clear();
+                }
+            }
             setOutputDevices(mPrimaryOutput, rxDevices, force, 0);
         }
     }
@@ -3236,9 +3263,7 @@
 void AudioPolicyManager::updateCallAndOutputRouting(bool forceVolumeReeval, uint32_t delayMs)
 {
     uint32_t waitMs = 0;
-    if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
-        DeviceVector newDevices = getNewOutputDevices(mPrimaryOutput, true /*fromCache*/);
-        waitMs = updateCallRouting(newDevices, delayMs);
+    if (updateCallRouting(true /*fromCache*/, delayMs, &waitMs) == NO_ERROR) {
         // Only apply special touch sound delay once
         delayMs = 0;
     }
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index ed5be5e..5533829 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -729,9 +729,22 @@
                     String8(devices.itemAt(0)->address().c_str()) : String8("");
         }
 
-        uint32_t updateCallRouting(const DeviceVector &rxDevices, uint32_t delayMs = 0);
+        status_t updateCallRouting(
+                bool fromCache, uint32_t delayMs = 0, uint32_t *waitMs = nullptr);
+        status_t updateCallRoutingInternal(
+                const DeviceVector &rxDevices, uint32_t delayMs, uint32_t *waitMs);
         sp<AudioPatch> createTelephonyPatch(bool isRx, const sp<DeviceDescriptor> &device,
                                             uint32_t delayMs);
+        /**
+         * @brief selectBestRxSinkDevicesForCall: if the primary module host both Telephony Rx/Tx
+         * devices, and it declares also supporting a HW bridge between the Telephony Rx and the
+         * given sink device for Voice Call audio attributes, select this device in prio.
+         * Otherwise, getNewOutputDevices() is called on the primary output to select sink device.
+         * @param fromCache true to prevent engine reconsidering all product strategies and retrieve
+         * from engine cache.
+         * @return vector of devices, empty if none is found.
+         */
+        DeviceVector selectBestRxSinkDevicesForCall(bool fromCache);
         bool isDeviceOfModule(const sp<DeviceDescriptor>& devDesc, const char *moduleId) const;
 
         status_t startSource(const sp<SwAudioOutputDescriptor>& outputDesc,
diff --git a/services/audiopolicy/service/Android.mk b/services/audiopolicy/service/Android.mk
index 7015b7b..7be10c4 100644
--- a/services/audiopolicy/service/Android.mk
+++ b/services/audiopolicy/service/Android.mk
@@ -19,6 +19,7 @@
     libaudiopolicymanager_interface_headers
 
 LOCAL_SHARED_LIBRARIES := \
+    libactivitymanager_aidl \
     libcutils \
     libutils \
     liblog \
@@ -36,6 +37,7 @@
     capture_state_listener-aidl-cpp
 
 LOCAL_EXPORT_SHARED_LIBRARY_HEADERS := \
+    libactivitymanager_aidl \
     libsensorprivacy
 
 LOCAL_STATIC_LIBRARIES := \
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index b738633..5dac55b 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -970,7 +970,7 @@
     for (const auto& deviceEffectsIter : mDeviceEffects) {
         const auto& deviceEffects =  deviceEffectsIter.second;
         for (const auto& effectDesc : deviceEffects->mEffectDescriptors->mEffects) {
-            auto fx = std::make_unique<AudioEffect>(String16("android"));
+            sp<AudioEffect> fx = new AudioEffect(String16("android"));
             fx->set(EFFECT_UUID_NULL, &effectDesc->mUuid, 0, nullptr,
                     nullptr, AUDIO_SESSION_DEVICE, AUDIO_IO_HANDLE_NONE,
                     AudioDeviceTypeAddr{deviceEffects->getDeviceType(),
@@ -987,7 +987,7 @@
             ALOGV("%s(): create Fx %s added on port type=%d address=%s", __func__,
                   effectDesc->mName, deviceEffects->getDeviceType(),
                   deviceEffects->getDeviceAddress().c_str());
-            deviceEffects->mEffects.push_back(std::move(fx));
+            deviceEffects->mEffects.push_back(fx);
         }
     }
 }
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index 81c728d..13d5d0c 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -207,7 +207,7 @@
             mDeviceType(device), mDeviceAddress(address) {}
         /*virtual*/ ~DeviceEffects() = default;
 
-        std::vector<std::unique_ptr<AudioEffect>> mEffects;
+        std::vector< sp<AudioEffect> > mEffects;
         audio_devices_t getDeviceType() const { return mDeviceType; }
         std::string getDeviceAddress() const { return mDeviceAddress; }
         const std::unique_ptr<EffectDescVector> mEffectDescriptors;
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 11df5f3..d16ea1a 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -113,6 +113,8 @@
         "libutilscallstack",
         "libutils",
         "libbinder",
+        "libactivitymanager_aidl",
+        "libpermission",
         "libcutils",
         "libmedia",
         "libmediautils",
@@ -150,11 +152,14 @@
     ],
 
     static_libs: [
+        "libprocessinfoservice_aidl",
         "libbinderthreadstateutils",
     ],
 
     export_shared_lib_headers: [
         "libbinder",
+        "libactivitymanager_aidl",
+        "libpermission",
         "libcamera_client",
         "libfmq",
         "libsensorprivacy",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 3d9998a..91dda92 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -41,7 +41,6 @@
 #include <binder/MemoryBase.h>
 #include <binder/MemoryHeapBase.h>
 #include <binder/PermissionController.h>
-#include <binder/ProcessInfoService.h>
 #include <binder/IResultReceiver.h>
 #include <binderthreadstate/CallerUtils.h>
 #include <cutils/atomic.h>
@@ -57,6 +56,7 @@
 #include <media/IMediaHTTPService.h>
 #include <media/mediaplayer.h>
 #include <mediautils/BatteryNotifier.h>
+#include <processinfo/ProcessInfoService.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
 #include <utils/String16.h>
@@ -252,10 +252,16 @@
     proxyBinder->pingForUserUpdate();
 }
 
-void CameraService::broadcastTorchModeStatus(const String8& cameraId, TorchModeStatus status) {
+void CameraService::broadcastTorchModeStatus(const String8& cameraId, TorchModeStatus status,
+        SystemCameraKind systemCameraKind) {
     Mutex::Autolock lock(mStatusListenerLock);
-
     for (auto& i : mListenerList) {
+        if (shouldSkipStatusUpdates(systemCameraKind, i->isVendorListener(), i->getListenerPid(),
+                i->getListenerUid())) {
+            ALOGV("Skipping torch callback for system-only camera device %s",
+                    cameraId.c_str());
+            continue;
+        }
         i->getListener()->onTorchStatusChanged(mapToInterface(status), String16{cameraId});
     }
 }
@@ -341,7 +347,7 @@
         Mutex::Autolock al(mTorchStatusMutex);
         mTorchStatusMap.add(id, TorchModeStatus::AVAILABLE_OFF);
 
-        broadcastTorchModeStatus(id, TorchModeStatus::AVAILABLE_OFF);
+        broadcastTorchModeStatus(id, TorchModeStatus::AVAILABLE_OFF, deviceKind);
     }
 
     updateCameraNumAndIds();
@@ -502,12 +508,19 @@
 
 void CameraService::onTorchStatusChanged(const String8& cameraId,
         TorchModeStatus newStatus) {
+    SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
+    status_t res = getSystemCameraKind(cameraId, &systemCameraKind);
+    if (res != OK) {
+        ALOGE("%s: Could not get system camera kind for camera id %s", __FUNCTION__,
+                cameraId.string());
+        return;
+    }
     Mutex::Autolock al(mTorchStatusMutex);
-    onTorchStatusChangedLocked(cameraId, newStatus);
+    onTorchStatusChangedLocked(cameraId, newStatus, systemCameraKind);
 }
 
 void CameraService::onTorchStatusChangedLocked(const String8& cameraId,
-        TorchModeStatus newStatus) {
+        TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
     ALOGI("%s: Torch status changed for cameraId=%s, newStatus=%d",
             __FUNCTION__, cameraId.string(), newStatus);
 
@@ -556,8 +569,7 @@
             }
         }
     }
-
-    broadcastTorchModeStatus(cameraId, newStatus);
+    broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
 }
 
 static bool hasPermissionsForSystemCamera(int callingPid, int callingUid) {
@@ -1864,6 +1876,10 @@
     String8 id = String8(cameraId.string());
     int uid = CameraThreadState::getCallingUid();
 
+    if (shouldRejectSystemCameraConnection(id)) {
+        return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to set torch mode"
+                " for system only device %s: ", id.string());
+    }
     // verify id is valid.
     auto state = getCameraState(id);
     if (state == nullptr) {
@@ -2220,6 +2236,11 @@
                     return shouldSkipStatusUpdates(deviceKind, isVendorListener, clientPid,
                             clientUid);}), cameraStatuses->end());
 
+    //cameraStatuses will have non-eligible camera ids removed.
+    std::set<String16> idsChosenForCallback;
+    for (const auto &s : *cameraStatuses) {
+        idsChosenForCallback.insert(String16(s.cameraId));
+    }
 
     /*
      * Immediately signal current torch status to this listener only
@@ -2229,7 +2250,11 @@
         Mutex::Autolock al(mTorchStatusMutex);
         for (size_t i = 0; i < mTorchStatusMap.size(); i++ ) {
             String16 id = String16(mTorchStatusMap.keyAt(i).string());
-            listener->onTorchStatusChanged(mapToInterface(mTorchStatusMap.valueAt(i)), id);
+            // The camera id is visible to the client. Fine to send torch
+            // callback.
+            if (idsChosenForCallback.find(id) != idsChosenForCallback.end()) {
+                listener->onTorchStatusChanged(mapToInterface(mTorchStatusMap.valueAt(i)), id);
+            }
         }
     }
 
@@ -3766,7 +3791,7 @@
                             TorchModeStatus::AVAILABLE_OFF :
                             TorchModeStatus::NOT_AVAILABLE;
                     if (torchStatus != newTorchStatus) {
-                        onTorchStatusChangedLocked(cameraId, newTorchStatus);
+                        onTorchStatusChangedLocked(cameraId, newTorchStatus, deviceKind);
                     }
                 }
             }
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 6771718..2853b0c 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -996,7 +996,8 @@
     // handle torch mode status change and invoke callbacks. mTorchStatusMutex
     // should be locked.
     void onTorchStatusChangedLocked(const String8& cameraId,
-            hardware::camera::common::V1_0::TorchModeStatus newStatus);
+            hardware::camera::common::V1_0::TorchModeStatus newStatus,
+            SystemCameraKind systemCameraKind);
 
     // get a camera's torch status. mTorchStatusMutex should be locked.
     status_t getTorchStatusLocked(const String8 &cameraId,
@@ -1085,7 +1086,8 @@
     static void pingCameraServiceProxy();
 
     void broadcastTorchModeStatus(const String8& cameraId,
-            hardware::camera::common::V1_0::TorchModeStatus status);
+            hardware::camera::common::V1_0::TorchModeStatus status,
+            SystemCameraKind systemCameraKind);
 
     void disconnectClient(const String8& id, sp<BasicClient> clientToDisconnect);
 
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index b2a0cda..f13ca74 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -61,6 +61,7 @@
     "-bugprone-unhandled-self-assignment", // found in TimeMachine.h
     "-bugprone-suspicious-string-compare", // found in TimeMachine.h
     "-cert-oop54-cpp", // found in TransactionLog.h
+    "-bugprone-narrowing-conversions", // b/182410845
 ]
 
 cc_defaults {
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index d78d1e3..2a52599 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -348,7 +348,7 @@
             } else {
                 ss << "Statsd atoms:\n" << statsd;
             }
-            ll -= n;
+            ll -= (int32_t)n;
         }
     }
 
@@ -737,7 +737,7 @@
         mA2dpConnectionServiceNs = 0;
         ++mA2dpConnectionSuccesses;
 
-        const auto connectionTimeMs = float(timeDiffNs * 1e-6);
+        const auto connectionTimeMs = float((double)timeDiffNs * 1e-6);
 
         const auto outputDeviceBits = types::lookup<types::OUTPUT_DEVICE, long_enum_type_t>(
                 "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP");
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index 34be0b9..e584f12 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -174,8 +174,8 @@
         if (item_device == device && item_type == type) {
             int64_t final_duration_ns = item_duration_ns + duration_ns;
             double final_volume = (device & INPUT_DEVICE_BIT) ? 1.0:
-                            ((item_volume * item_duration_ns +
-                            average_vol * duration_ns) / final_duration_ns);
+                            ((item_volume * (double)item_duration_ns +
+                            average_vol * (double)duration_ns) / (double)final_duration_ns);
 
             item->setInt64(AUDIO_POWER_USAGE_PROP_DURATION_NS, final_duration_ns);
             item->setDouble(AUDIO_POWER_USAGE_PROP_VOLUME, final_volume);
@@ -289,7 +289,7 @@
         const int64_t durationNs = endCallNs - mDeviceTimeNs;
         if (durationNs > 0) {
             mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
-                    mVoiceVolume * double(endCallNs - mVolumeTimeNs)) / durationNs;
+                    mVoiceVolume * double(endCallNs - mVolumeTimeNs)) / (double)durationNs;
             saveAsItems_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
         }
     } else if (mode == "AUDIO_MODE_IN_CALL") { // entering call mode
@@ -317,7 +317,7 @@
         const int64_t durationNs = timeNs - mDeviceTimeNs;
         if (durationNs > 0) {
             mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
-                    mVoiceVolume * double(timeNs - mVolumeTimeNs)) / durationNs;
+                    mVoiceVolume * double(timeNs - mVolumeTimeNs)) / (double)durationNs;
             mVolumeTimeNs = timeNs;
         }
     }
@@ -348,7 +348,7 @@
         const int64_t durationNs = endDeviceNs - mDeviceTimeNs;
         if (durationNs > 0) {
             mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
-                    mVoiceVolume * double(endDeviceNs - mVolumeTimeNs)) / durationNs;
+                    mVoiceVolume * double(endDeviceNs - mVolumeTimeNs)) / (double)durationNs;
             saveAsItems_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
         }
         // reset statistics
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index bf6e428..0e89054 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -107,7 +107,7 @@
 {
     ALOGD("%s", __func__);
     // the class destructor clears anyhow, but we enforce clearing items first.
-    mItemsDiscarded += mItems.size();
+    mItemsDiscarded += (int64_t)mItems.size();
     mItems.clear();
 }
 
@@ -287,7 +287,7 @@
         std::lock_guard _l(mLock);
 
         if (clear) {
-            mItemsDiscarded += mItems.size();
+            mItemsDiscarded += (int64_t)mItems.size();
             mItems.clear();
             mAudioAnalytics.clear();
         } else {
@@ -416,10 +416,10 @@
 
     if (const size_t toErase = overlimit + expired;
             toErase > 0) {
-        mItemsDiscardedCount += overlimit;
-        mItemsDiscardedExpire += expired;
-        mItemsDiscarded += toErase;
-        mItems.erase(mItems.begin(), mItems.begin() + toErase); // erase from front
+        mItemsDiscardedCount += (int64_t)overlimit;
+        mItemsDiscardedExpire += (int64_t)expired;
+        mItemsDiscarded += (int64_t)toErase;
+        mItems.erase(mItems.begin(), mItems.begin() + (ptrdiff_t)toErase); // erase from front
     }
     return more;
 }
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index e503885..e67720c 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -13,6 +13,9 @@
     srcs: [
         "ResourceManagerService.cpp",
         "ServiceLog.cpp",
+
+        // TODO: convert to AIDL?
+        "IMediaResourceMonitor.cpp",
     ],
 
     shared_libs: [
diff --git a/services/mediaresourcemanager/IMediaResourceMonitor.cpp b/services/mediaresourcemanager/IMediaResourceMonitor.cpp
new file mode 100644
index 0000000..42d7feb
--- /dev/null
+++ b/services/mediaresourcemanager/IMediaResourceMonitor.cpp
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "IMediaResourceMonitor.h"
+#include <binder/Parcel.h>
+#include <utils/Errors.h>
+#include <sys/types.h>
+
+namespace android {
+
+// ----------------------------------------------------------------------
+
+class BpMediaResourceMonitor : public BpInterface<IMediaResourceMonitor> {
+public:
+    explicit BpMediaResourceMonitor(const sp<IBinder>& impl)
+        : BpInterface<IMediaResourceMonitor>(impl) {}
+
+    virtual void notifyResourceGranted(/*in*/ int32_t pid, /*in*/ const int32_t type)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaResourceMonitor::getInterfaceDescriptor());
+        data.writeInt32(pid);
+        data.writeInt32(type);
+        remote()->transact(NOTIFY_RESOURCE_GRANTED, data, &reply, IBinder::FLAG_ONEWAY);
+    }
+};
+
+IMPLEMENT_META_INTERFACE(MediaResourceMonitor, "android.media.IMediaResourceMonitor")
+
+// ----------------------------------------------------------------------
+
+// NOLINTNEXTLINE(google-default-arguments)
+status_t BnMediaResourceMonitor::onTransact( uint32_t code, const Parcel& data, Parcel* reply,
+        uint32_t flags) {
+    switch(code) {
+        case NOTIFY_RESOURCE_GRANTED: {
+            CHECK_INTERFACE(IMediaResourceMonitor, data, reply);
+            int32_t pid = data.readInt32();
+            const int32_t type = data.readInt32();
+            notifyResourceGranted(/*in*/ pid, /*in*/ type);
+            return NO_ERROR;
+        } break;
+        default:
+            return BBinder::onTransact(code, data, reply, flags);
+    }
+}
+
+// ----------------------------------------------------------------------
+
+} // namespace android
diff --git a/services/mediaresourcemanager/IMediaResourceMonitor.h b/services/mediaresourcemanager/IMediaResourceMonitor.h
new file mode 100644
index 0000000..f92d557
--- /dev/null
+++ b/services/mediaresourcemanager/IMediaResourceMonitor.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#ifndef __ANDROID_VNDK__
+
+#include <binder/IInterface.h>
+
+namespace android {
+
+// ----------------------------------------------------------------------
+
+class IMediaResourceMonitor : public IInterface {
+public:
+    DECLARE_META_INTERFACE(MediaResourceMonitor)
+
+    // Values should be in sync with Intent.EXTRA_MEDIA_RESOURCE_TYPE_XXX.
+    enum {
+        TYPE_VIDEO_CODEC = 0,
+        TYPE_AUDIO_CODEC = 1,
+    };
+
+    virtual void notifyResourceGranted(/*in*/ int32_t pid, /*in*/ const int32_t type) = 0;
+
+    enum {
+        NOTIFY_RESOURCE_GRANTED = IBinder::FIRST_CALL_TRANSACTION,
+    };
+};
+
+// ----------------------------------------------------------------------
+
+class BnMediaResourceMonitor : public BnInterface<IMediaResourceMonitor> {
+public:
+    // NOLINTNEXTLINE(google-default-arguments)
+    virtual status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply,
+            uint32_t flags = 0);
+};
+
+// ----------------------------------------------------------------------
+
+} // namespace android
+
+#else // __ANDROID_VNDK__
+#error "This header is not visible to vendors"
+#endif // __ANDROID_VNDK__
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index db06a36..1695228 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -21,7 +21,7 @@
 
 #include <android/binder_manager.h>
 #include <android/binder_process.h>
-#include <binder/IMediaResourceMonitor.h>
+#include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
 #include <cutils/sched_policy.h>
 #include <dirent.h>
@@ -35,6 +35,7 @@
 #include <sys/time.h>
 #include <unistd.h>
 
+#include "IMediaResourceMonitor.h"
 #include "ResourceManagerService.h"
 #include "ServiceLog.h"
 
diff --git a/services/mediatranscoding/tests/Android.bp b/services/mediatranscoding/tests/Android.bp
index 86e047e..a856c05 100644
--- a/services/mediatranscoding/tests/Android.bp
+++ b/services/mediatranscoding/tests/Android.bp
@@ -25,6 +25,7 @@
     ],
 
     shared_libs: [
+        "libactivitymanager_aidl",
         "libbinder",
         "libbinder_ndk",
         "liblog",