Merge "codec2: Fix VideoEncoderRoiTest cts failure" into main
diff --git a/apex/Android.bp b/apex/Android.bp
index b0d7c02..356bf03 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -173,6 +173,7 @@
         "mediaswcodec",
     ],
     native_shared_libs: [
+        "libapexcodecs",
         "libcodec2_hidl@1.0",
         "libcodec2_hidl@1.1",
         "libcodec2_hidl@1.2",
diff --git a/apex/ld.config.txt b/apex/ld.config.txt
index 4dc5fb1..c24d51f 100644
--- a/apex/ld.config.txt
+++ b/apex/ld.config.txt
@@ -33,7 +33,7 @@
 # TODO: replace the following when apex has a way to auto-generate this list
 # namespace.default.link.platform.shared_libs  = %LLNDK_LIBRARIES%
 # namespace.default.link.platform.shared_libs += %SANITIZER_RUNTIME_LIBRARIES%
-namespace.default.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libandroid_net.so:libc.so:libcgrouprc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libdl_android.so:libvulkan.so:libbinder_ndk.so
+namespace.default.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libandroid_net.so:libc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libdl_android.so:libvulkan.so:libbinder_ndk.so
 
 ###############################################################################
 # "platform" namespace
@@ -138,7 +138,7 @@
 # TODO: replace the following when apex has a way to auto-generate this list
 # namespace.sphal.link.platform.shared_libs  = %LLNDK_LIBRARIES%
 # namespace.sphal.link.platform.shared_libs += %SANITIZER_RUNTIME_LIBRARIES%
-namespace.sphal.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libandroid_net.so:libc.so:libcgrouprc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libvulkan.so:libbinder_ndk.so
+namespace.sphal.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libandroid_net.so:libc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libvulkan.so:libbinder_ndk.so
 
 # Add a link for libz.so which is llndk on devices where VNDK is not enforced.
 namespace.sphal.link.platform.shared_libs += libz.so
diff --git a/cmds/stagefright/AudioPlayer.cpp b/cmds/stagefright/AudioPlayer.cpp
index 6cddf47..54885ef 100644
--- a/cmds/stagefright/AudioPlayer.cpp
+++ b/cmds/stagefright/AudioPlayer.cpp
@@ -101,6 +101,10 @@
 
     CHECK(mFirstBuffer == NULL);
 
+    if (!mAudioPlayerWrapper) {
+        mAudioPlayerWrapper = sp<MediaPlayerBase::WeakWrapper<AudioPlayer>>::make(this);
+    }
+
     MediaSource::ReadOptions options;
     if (mSeeking) {
         options.setSeekTo(mSeekTimeUs);
@@ -203,7 +207,7 @@
                 mSampleRate, numChannels, channelMask, audioFormat,
                 DEFAULT_AUDIOSINK_BUFFERCOUNT,
                 &AudioPlayer::AudioSinkCallback,
-                this,
+                mAudioPlayerWrapper,
                 (audio_output_flags_t)flags,
                 useOffload() ? &offloadInfo : NULL);
 
@@ -430,10 +434,11 @@
 
 // static
 size_t AudioPlayer::AudioSinkCallback(
-        MediaPlayerBase::AudioSink * /* audioSink */,
-        void *buffer, size_t size, void *cookie,
+        const sp<MediaPlayerBase::AudioSink>& /* audioSink */,
+        void *buffer, size_t size, const wp<RefBase>& cookie,
         MediaPlayerBase::AudioSink::cb_event_t event) {
-    AudioPlayer *me = (AudioPlayer *)cookie;
+    const auto me = MediaPlayerBase::WeakWrapper<AudioPlayer>::promoteFromRefBase(cookie);
+    if (!me) return 0;
 
     switch(event) {
     case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
diff --git a/cmds/stagefright/AudioPlayer.h b/cmds/stagefright/AudioPlayer.h
index 608f54b..877ac13 100644
--- a/cmds/stagefright/AudioPlayer.h
+++ b/cmds/stagefright/AudioPlayer.h
@@ -29,7 +29,7 @@
 
 struct AwesomePlayer;
 
-class AudioPlayer : AudioTrack::IAudioTrackCallback {
+class AudioPlayer : public AudioTrack::IAudioTrackCallback {
 public:
     enum {
         REACHED_EOS,
@@ -97,14 +97,15 @@
     MediaBufferBase *mFirstBuffer;
 
     sp<MediaPlayerBase::AudioSink> mAudioSink;
+    sp<MediaPlayerBase::WeakWrapper<AudioPlayer>> mAudioPlayerWrapper;
 
     bool mPlaying;
     int64_t mStartPosUs;
     const uint32_t mCreateFlags;
 
     static size_t AudioSinkCallback(
-            MediaPlayerBase::AudioSink *audioSink,
-            void *data, size_t size, void *me,
+            const sp<MediaPlayerBase::AudioSink>& audioSink,
+            void *data, size_t size, const wp<RefBase>& me,
             MediaPlayerBase::AudioSink::cb_event_t event);
 
     size_t fillBuffer(void *data, size_t size);
diff --git a/drm/drmserver/Android.bp b/drm/drmserver/Android.bp
index 81c2003..cee44b9 100644
--- a/drm/drmserver/Android.bp
+++ b/drm/drmserver/Android.bp
@@ -78,9 +78,6 @@
         "libselinux",
         "libstagefright_foundation",
     ],
-    whole_static_libs: [
-        "libc++fs",
-    ],
 
     cflags: [
         "-Wall",
@@ -127,7 +124,6 @@
     ],
 
     static_libs: [
-        "libc++fs",
         "libmediautils",
         "liblog",
         "libdrmframeworkcommon",
diff --git a/drm/libdrmframework/plugins/passthru/Android.bp b/drm/libdrmframework/plugins/passthru/Android.bp
index 0a6cd47..6ac7188 100644
--- a/drm/libdrmframework/plugins/passthru/Android.bp
+++ b/drm/libdrmframework/plugins/passthru/Android.bp
@@ -45,9 +45,6 @@
         "libdl",
         "libdrmframeworkcommon",
     ],
-    whole_static_libs: [
-        "libc++fs",
-    ],
 
     local_include_dirs: ["include"],
 
diff --git a/drm/libmediadrm/DrmHalHidl.cpp b/drm/libmediadrm/DrmHalHidl.cpp
index c8c6e8e..33ea5ea 100644
--- a/drm/libmediadrm/DrmHalHidl.cpp
+++ b/drm/libmediadrm/DrmHalHidl.cpp
@@ -339,7 +339,7 @@
             DrmUtils::LOG2BI("makeDrmFactories: using default passthrough drm instance");
             factories.push_back(passthrough);
         } else {
-            DrmUtils::LOG2BE("Failed to find passthrough drm factories");
+            DrmUtils::LOG2BW("Failed to find passthrough drm factories");
         }
     }
     return factories;
diff --git a/drm/mediadrm/plugins/clearkey/aidl/Android.bp b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
index cce6338..f46409f 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
@@ -133,6 +133,7 @@
     required: [
         "com.android.hardware.drm.clearkey",
     ],
+    vendor: true,
 }
 
 cc_defaults {
diff --git a/media/aconfig/Android.bp b/media/aconfig/Android.bp
index 16beb28..1e5eafb 100644
--- a/media/aconfig/Android.bp
+++ b/media/aconfig/Android.bp
@@ -50,3 +50,22 @@
     ],
     aconfig_declarations: "aconfig_codec_fwk_flags",
 }
+
+aconfig_declarations {
+    name: "aconfig_media_swcodec_flags",
+    package: "android.media.swcodec.flags",
+    container: "com.android.media.swcodec",
+    srcs: ["swcodec_flags.aconfig"],
+}
+
+cc_aconfig_library {
+    name: "android.media.swcodec.flags-aconfig-cc",
+    aconfig_declarations: "aconfig_media_swcodec_flags",
+    min_sdk_version: "apex_inherit",
+    vendor_available: true,
+    double_loadable: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+}
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index ed1522b..c820a2c 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -13,6 +13,21 @@
 }
 
 flag {
+  name: "apv_support"
+  is_exported: true
+  namespace: "codec_fwk"
+  description: "Feature flag for Android support for APV Content"
+  bug: "375464302"
+}
+
+flag {
+  name: "codec_availability"
+  namespace: "codec_fwk"
+  description: "Feature flag for codec availability HAL API support"
+  bug: "363282971"
+}
+
+flag {
   name: "codec_buffer_state_cleanup"
   namespace: "codec_fwk"
   description: "Bugfix flag for more buffer state cleanup in MediaCodec"
@@ -103,6 +118,21 @@
 }
 
 flag {
+  name: "num_input_slots"
+  namespace: "codec_fwk"
+  description: "Feature flag for exposing number of input slots"
+  bug: "159891571"
+}
+
+flag {
+  name: "p210_format_support"
+  is_exported: true
+  namespace: "codec_fwk"
+  description: "Feature flag for Android support for P210 YCbCr format"
+  bug: "368395888"
+}
+
+flag {
   name: "region_of_interest"
   is_exported: true
   namespace: "codec_fwk"
@@ -118,6 +148,13 @@
 }
 
 flag {
+  name: "rendering_depth_removal"
+  namespace: "codec_fwk"
+  description: "Feature flag for removing rendering depth"
+  bug: "275527219"
+}
+
+flag {
   name: "secure_codecs_require_crypto"
   namespace: "codec_fwk"
   description: "Bugfix flag for requiring setting crypto for secure codecs"
@@ -158,6 +195,13 @@
 }
 
 flag {
+  name: "subsession_metrics"
+  namespace: "codec_fwk"
+  description: "Feature flag for subsession codec metrics"
+  bug: "363382811"
+}
+
+flag {
   name: "teamfood"
   namespace: "codec_fwk"
   description: "Feature flag to track teamfood population"
diff --git a/media/aconfig/swcodec_flags.aconfig b/media/aconfig/swcodec_flags.aconfig
new file mode 100644
index 0000000..a435a43
--- /dev/null
+++ b/media/aconfig/swcodec_flags.aconfig
@@ -0,0 +1,14 @@
+# Media SW Codec Flags.
+#
+# !!! Please add flags in alphabetical order. !!!
+package: "android.media.swcodec.flags"
+container: "com.android.media.swcodec"
+
+flag {
+  name: "apv_software_codec"
+  is_exported: true
+  is_fixed_read_only: true
+  namespace: "codec_fwk"
+  description: "Feature flag for APV Software C2 codec"
+  bug: "376770121"
+}
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index a5aeff2..5f4a6a1 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -116,7 +116,11 @@
     package: "android.media.audio",
     container: "system",
     srcs: ["audio_framework.aconfig"],
-    visibility: ["//frameworks/base/api"],
+    visibility: [
+        "//frameworks/base/api",
+        "//frameworks/base/core/res",
+    ],
+    exportable: true,
 }
 
 aconfig_declarations {
@@ -149,6 +153,20 @@
 }
 
 java_aconfig_library {
+    name: "android.media.audio-aconfig-exported-java",
+    aconfig_declarations: "android.media.audio-aconfig",
+    defaults: ["framework-minus-apex-aconfig-java-defaults"],
+    min_sdk_version: "Tiramisu",
+    mode: "exported",
+    apex_available: [
+        "com.android.btservices",
+    ],
+    visibility: [
+        "//packages/modules/Bluetooth:__subpackages__",
+    ],
+}
+
+java_aconfig_library {
     name: "android.media.audiopolicy-aconfig-java",
     aconfig_declarations: "android.media.audiopolicy-aconfig",
     defaults: ["framework-minus-apex-aconfig-java-defaults"],
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index ea5f26d..b155bac 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -23,6 +23,14 @@
 }
 
 flag {
+    name: "deprecate_stream_bt_sco"
+    namespace: "media_audio"
+    description: "Deprecate STREAM_BLUETOOTH_SCO"
+    is_exported: true
+    bug: "376756660"
+}
+
+flag {
     name: "feature_spatial_audio_headtracking_low_latency"
     is_exported: true
     namespace: "media_audio"
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 90996a3..40d5f5f 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -2315,6 +2315,15 @@
     audio_port_config_device_ext legacy{};
     RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
                     aidl.device, &legacy.type, legacy.address));
+    const bool isInput = false;  // speaker_layout_channel_mask only represents output.
+    if (aidl.speakerLayout.has_value()) {
+        legacy.speaker_layout_channel_mask =
+                VALUE_OR_RETURN(aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+                        aidl.speakerLayout.value(), isInput));
+    } else {
+        // Default to none when the field is null in the AIDL.
+        legacy.speaker_layout_channel_mask = AUDIO_CHANNEL_NONE;
+    }
     return legacy;
 }
 
@@ -2323,6 +2332,14 @@
     AudioPortDeviceExt aidl;
     aidl.device = VALUE_OR_RETURN(
             legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
+    const bool isInput = false;  // speaker_layout_channel_mask only represents output.
+    // The AIDL speakerLayout is nullable and if set, can only be a layoutMask.
+    if (audio_channel_mask_is_valid(legacy.speaker_layout_channel_mask) &&
+        audio_channel_mask_get_representation(legacy.speaker_layout_channel_mask) ==
+                AUDIO_CHANNEL_REPRESENTATION_POSITION) {
+        aidl.speakerLayout = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+                legacy.speaker_layout_channel_mask, isInput));
+    }
     return aidl;
 }
 
diff --git a/media/codec2/components/apv/Android.bp b/media/codec2/components/apv/Android.bp
new file mode 100644
index 0000000..f565978
--- /dev/null
+++ b/media/codec2/components/apv/Android.bp
@@ -0,0 +1,58 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
+    enabled: true,
+    name: "libcodec2_soft_apvenc",
+    defaults: [
+        "libcodec2_soft-defaults",
+        "libcodec2_soft_sanitize_signed-defaults",
+        "libcodec2_soft_sanitize_cfi-defaults",
+    ],
+
+    static_libs: [
+        "libopenapv",
+        "android.media.swcodec.flags-aconfig-cc",
+    ],
+
+    srcs: ["C2SoftApvEnc.cpp"],
+
+    cflags: [
+        "-DOAPV_STATIC_DEFINE",
+        "-Wno-unused-variable",
+        "-Wno-unused-parameter",
+        "-Wno-unused-function",
+        "-Wno-reorder-ctor",
+    ],
+}
+
+cc_library {
+    enabled: true,
+    name: "libcodec2_soft_apvdec",
+    defaults: [
+        "libcodec2_soft-defaults",
+        "libcodec2_soft_sanitize_signed-defaults",
+        "libcodec2_soft_sanitize_cfi-defaults",
+    ],
+
+    static_libs: [
+        "libopenapv",
+        "android.media.swcodec.flags-aconfig-cc",
+    ],
+
+    srcs: ["C2SoftApvDec.cpp"],
+
+    cflags: [
+        "-DOAPV_STATIC_DEFINE",
+        "-Wno-unused-variable",
+        "-Wno-unused-parameter",
+        "-Wno-unused-function",
+        "-Wno-reorder-ctor",
+    ],
+}
diff --git a/media/codec2/components/apv/C2SoftApvCommon.h b/media/codec2/components/apv/C2SoftApvCommon.h
new file mode 100644
index 0000000..9325f28
--- /dev/null
+++ b/media/codec2/components/apv/C2SoftApvCommon.h
@@ -0,0 +1,169 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_APV_COMMON_H__
+#define ANDROID_C2_SOFT_APV_COMMON_H__
+
+typedef enum {
+    PIX_CHROMA_NA = 0xFFFFFFFF,
+    PIX_YUV_420P = 0x1,
+    PIX_YUV_422P = 0x2,
+    PIX_420_UV_INTL = 0x3,
+    PIX_YUV_422IBE = 0x4,
+    PIX_YUV_422ILE = 0x5,
+    PIX_YUV_444P = 0x6,
+    PIX_YUV_411P = 0x7,
+    PIX_GRAY = 0x8,
+    PIX_RGB_565 = 0x9,
+    PIX_RGB_24 = 0xa,
+    PIX_YUV_420SP_UV = 0xb,
+    PIX_YUV_420SP_VU = 0xc,
+    PIX_YUV_422SP_UV = 0xd,
+    PIX_YUV_422SP_VU = 0xe
+} PIX_COLOR_FORMAT_T;
+
+#define CLIP_VAL(n, min, max) (((n) > (max)) ? (max) : (((n) < (min)) ? (min) : (n)))
+#define ALIGN_VAL(val, align) ((((val) + (align) - 1) / (align)) * (align))
+
+static int atomic_inc(volatile int* pcnt) {
+    int ret;
+    ret = *pcnt;
+    ret++;
+    *pcnt = ret;
+    return ret;
+}
+
+static int atomic_dec(volatile int* pcnt) {
+    int ret;
+    ret = *pcnt;
+    ret--;
+    *pcnt = ret;
+    return ret;
+}
+
+/* Function to allocate memory for picture buffer:
+   This function might need to modify according to O/S or CPU platform
+*/
+static void* picbuf_alloc(int size) {
+    return malloc(size);
+}
+
+/* Function to free memory allocated for picture buffer:
+   This function might need to modify according to O/S or CPU platform
+*/
+static void picbuf_free(void* p) {
+    if (p) {
+        free(p);
+    }
+}
+
+static int imgb_addref(oapv_imgb_t* imgb) {
+    return atomic_inc(&imgb->refcnt);
+}
+
+static int imgb_getref(oapv_imgb_t* imgb) {
+    return imgb->refcnt;
+}
+
+static int imgb_release(oapv_imgb_t* imgb) {
+    int refcnt, i;
+    refcnt = atomic_dec(&imgb->refcnt);
+    if (refcnt == 0) {
+        for (i = 0; i < OAPV_MAX_CC; i++) {
+            if (imgb->baddr[i]) picbuf_free(imgb->baddr[i]);
+        }
+        free(imgb);
+    }
+    return refcnt;
+}
+
+static oapv_imgb_t* imgb_create(int w, int h, int cs) {
+    int i, bd;
+    oapv_imgb_t* imgb;
+
+    imgb = (oapv_imgb_t*)malloc(sizeof(oapv_imgb_t));
+    if (imgb == NULL) goto ERR;
+    memset(imgb, 0, sizeof(oapv_imgb_t));
+
+    bd = OAPV_CS_GET_BYTE_DEPTH(cs); /* byte unit */
+
+    imgb->w[0] = w;
+    imgb->h[0] = h;
+    switch (OAPV_CS_GET_FORMAT(cs)) {
+        case OAPV_CF_YCBCR400:
+            imgb->w[1] = imgb->w[2] = w;
+            imgb->h[1] = imgb->h[2] = h;
+            imgb->np = 1;
+            break;
+        case OAPV_CF_YCBCR420:
+            imgb->w[1] = imgb->w[2] = (w + 1) >> 1;
+            imgb->h[1] = imgb->h[2] = (h + 1) >> 1;
+            imgb->np = 3;
+            break;
+        case OAPV_CF_YCBCR422:
+            imgb->w[1] = imgb->w[2] = (w + 1) >> 1;
+            imgb->h[1] = imgb->h[2] = h;
+            imgb->np = 3;
+            break;
+        case OAPV_CF_YCBCR444:
+            imgb->w[1] = imgb->w[2] = w;
+            imgb->h[1] = imgb->h[2] = h;
+            imgb->np = 3;
+            break;
+        case OAPV_CF_YCBCR4444:
+            imgb->w[1] = imgb->w[2] = imgb->w[3] = w;
+            imgb->h[1] = imgb->h[2] = imgb->h[3] = h;
+            imgb->np = 4;
+            break;
+        case OAPV_CF_PLANAR2:
+            imgb->w[1] = w;
+            imgb->h[1] = h;
+            imgb->np = 2;
+            break;
+        default:
+            goto ERR;
+    }
+
+    for (i = 0; i < imgb->np; i++) {
+        // width and height need to be aligned to macroblock size
+        imgb->aw[i] = ALIGN_VAL(imgb->w[i], OAPV_MB_W);
+        imgb->s[i] = imgb->aw[i] * bd;
+        imgb->ah[i] = ALIGN_VAL(imgb->h[i], OAPV_MB_H);
+        imgb->e[i] = imgb->ah[i];
+
+        imgb->bsize[i] = imgb->s[i] * imgb->e[i];
+        imgb->a[i] = imgb->baddr[i] = picbuf_alloc(imgb->bsize[i]);
+        memset(imgb->a[i], 0, imgb->bsize[i]);
+    }
+    imgb->cs = cs;
+    imgb->addref = imgb_addref;
+    imgb->getref = imgb_getref;
+    imgb->release = imgb_release;
+
+    imgb->addref(imgb); /* increase reference count */
+    return imgb;
+
+ERR:
+    if (imgb) {
+        for (int i = 0; i < OAPV_MAX_CC; i++) {
+            if (imgb->a[i]) picbuf_free(imgb->a[i]);
+        }
+        free(imgb);
+    }
+    return NULL;
+}
+
+#endif  // ANDROID_C2_SOFT_APV_COMMON_H__
\ No newline at end of file
diff --git a/media/codec2/components/apv/C2SoftApvDec.cpp b/media/codec2/components/apv/C2SoftApvDec.cpp
new file mode 100644
index 0000000..0064cec
--- /dev/null
+++ b/media/codec2/components/apv/C2SoftApvDec.cpp
@@ -0,0 +1,1247 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftApvDec"
+#include <log/log.h>
+
+#include <android_media_swcodec_flags.h>
+
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
+#include <SimpleC2Interface.h>
+#include "C2SoftApvDec.h"
+
+#include <cutils/properties.h>
+
+const char* MEDIA_MIMETYPE_VIDEO_APV = "video/apv";
+
+#define MAX_NUM_FRMS (1)  // supports only 1-frame output
+#define FRM_IDX (0)       // supports only 1-frame output
+// check generic frame or not
+#define IS_NON_AUX_FRM(frm)                              \
+    (((frm)->pbu_type == OAPV_PBU_TYPE_PRIMARY_FRAME) || \
+     ((frm)->pbu_type == OAPV_PBU_TYPE_NON_PRIMARY_FRAME))
+// check auxiliary frame or not
+#define IS_AUX_FRM(frm) (!(IS_NON_AUX_FRM(frm)))
+#define OUTPUT_CSP_NATIVE (0)
+#define OUTPUT_CSP_P210 (1)
+
+namespace android {
+namespace {
+constexpr char COMPONENT_NAME[] = "c2.android.apv.decoder";
+constexpr uint32_t kDefaultOutputDelay = 8;
+constexpr uint32_t kMaxOutputDelay = 16;
+constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
+}  // namespace
+
+class C2SoftApvDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+  public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+        : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_DECODER,
+                                            C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_APV) {
+        noPrivateBuffers();  // TODO: account for our buffers here.
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
+
+        addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                             .withConstValue(new C2ComponentAttributesSetting(
+                                     C2Component::ATTRIB_IS_TEMPORAL))
+                             .build());
+
+        addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+                             .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 4096),
+                                     C2F(mSize, height).inRange(2, 4096),
+                             })
+                             .withSetter(SizeSetter)
+                             .build());
+
+        addParameter(
+                DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                        .withDefault(new C2StreamProfileLevelInfo::input(
+                                0u, C2Config::PROFILE_APV_422_10))
+                        .withFields(
+                                {C2F(mProfileLevel, profile).oneOf({C2Config::PROFILE_APV_422_10}),
+                                 C2F(mProfileLevel, level)
+                                         .oneOf({
+                                                C2Config::LEVEL_APV_1_BAND_0,
+                                                C2Config::LEVEL_APV_1_1_BAND_0,
+                                                C2Config::LEVEL_APV_2_BAND_0,
+                                                C2Config::LEVEL_APV_2_1_BAND_0,
+                                                C2Config::LEVEL_APV_3_BAND_0,
+                                                C2Config::LEVEL_APV_3_1_BAND_0,
+                                                C2Config::LEVEL_APV_4_BAND_0,
+                                                C2Config::LEVEL_APV_4_1_BAND_0,
+                                                C2Config::LEVEL_APV_5_BAND_0,
+                                                C2Config::LEVEL_APV_5_1_BAND_0,
+                                                C2Config::LEVEL_APV_6_BAND_0,
+                                                C2Config::LEVEL_APV_6_1_BAND_0,
+                                                C2Config::LEVEL_APV_7_BAND_0,
+                                                C2Config::LEVEL_APV_7_1_BAND_0,
+                                                C2Config::LEVEL_APV_1_BAND_1,
+                                                C2Config::LEVEL_APV_1_1_BAND_1,
+                                                C2Config::LEVEL_APV_2_BAND_1,
+                                                C2Config::LEVEL_APV_2_1_BAND_1,
+                                                C2Config::LEVEL_APV_3_BAND_1,
+                                                C2Config::LEVEL_APV_3_1_BAND_1,
+                                                C2Config::LEVEL_APV_4_BAND_1,
+                                                C2Config::LEVEL_APV_4_1_BAND_1,
+                                                C2Config::LEVEL_APV_5_BAND_1,
+                                                C2Config::LEVEL_APV_5_1_BAND_1,
+                                                C2Config::LEVEL_APV_6_BAND_1,
+                                                C2Config::LEVEL_APV_6_1_BAND_1,
+                                                C2Config::LEVEL_APV_7_BAND_1,
+                                                C2Config::LEVEL_APV_7_1_BAND_1,
+                                                C2Config::LEVEL_APV_1_BAND_2,
+                                                C2Config::LEVEL_APV_1_1_BAND_2,
+                                                C2Config::LEVEL_APV_2_BAND_2,
+                                                C2Config::LEVEL_APV_2_1_BAND_2,
+                                                C2Config::LEVEL_APV_3_BAND_2,
+                                                C2Config::LEVEL_APV_3_1_BAND_2,
+                                                C2Config::LEVEL_APV_4_BAND_2,
+                                                C2Config::LEVEL_APV_4_1_BAND_2,
+                                                C2Config::LEVEL_APV_5_BAND_2,
+                                                C2Config::LEVEL_APV_5_1_BAND_2,
+                                                C2Config::LEVEL_APV_6_BAND_2,
+                                                C2Config::LEVEL_APV_6_1_BAND_2,
+                                                C2Config::LEVEL_APV_7_BAND_2,
+                                                C2Config::LEVEL_APV_7_1_BAND_2,
+                                                C2Config::LEVEL_APV_1_BAND_3,
+                                                C2Config::LEVEL_APV_1_1_BAND_3,
+                                                C2Config::LEVEL_APV_2_BAND_3,
+                                                C2Config::LEVEL_APV_2_1_BAND_3,
+                                                C2Config::LEVEL_APV_3_BAND_3,
+                                                C2Config::LEVEL_APV_3_1_BAND_3,
+                                                C2Config::LEVEL_APV_4_BAND_3,
+                                                C2Config::LEVEL_APV_4_1_BAND_3,
+                                                C2Config::LEVEL_APV_5_BAND_3,
+                                                C2Config::LEVEL_APV_5_1_BAND_3,
+                                                C2Config::LEVEL_APV_6_BAND_3,
+                                                C2Config::LEVEL_APV_6_1_BAND_3,
+                                                C2Config::LEVEL_APV_7_BAND_3,
+                                                C2Config::LEVEL_APV_7_1_BAND_3,
+                                                 })})
+                        .withSetter(ProfileLevelSetter, mSize)
+                        .build());
+
+        mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
+        addParameter(DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
+                             .withDefault(mHdr10PlusInfoInput)
+                             .withFields({
+                                     C2F(mHdr10PlusInfoInput, m.value).any(),
+                             })
+                             .withSetter(Hdr10PlusInfoInputSetter)
+                             .build());
+
+        mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
+        addParameter(DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
+                             .withDefault(mHdr10PlusInfoOutput)
+                             .withFields({
+                                     C2F(mHdr10PlusInfoOutput, m.value).any(),
+                             })
+                             .withSetter(Hdr10PlusInfoOutputSetter)
+                             .build());
+
+        // default static info
+        C2HdrStaticMetadataStruct defaultStaticInfo{};
+        helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
+        addParameter(
+                DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
+                        .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
+                        .withFields({C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
+                                     C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
+                                     C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
+                                     C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)})
+                        .withSetter(HdrStaticInfoSetter)
+                        .build());
+
+        addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+                             .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 4096, 2),
+                                     C2F(mSize, height).inRange(2, 4096, 2),
+                             })
+                             .withSetter(MaxPictureSizeSetter, mSize)
+                             .build());
+
+        addParameter(
+                DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+                        .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
+                        .withFields({
+                                C2F(mMaxInputSize, value).any(),
+                        })
+                        .calculatedAs(MaxInputSizeSetter, mMaxSize)
+                        .build());
+
+        C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
+        std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+                C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+        memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+        defaultColorInfo = C2StreamColorInfo::output::AllocShared(
+                {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+        helper->addStructDescriptors<C2ChromaOffsetStruct>();
+        addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+                             .withConstValue(defaultColorInfo)
+                             .build());
+
+        addParameter(DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsTuning::output(
+                                     0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mDefaultColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mDefaultColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mDefaultColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mDefaultColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(DefaultColorAspectsSetter)
+                             .build());
+
+        addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsInfo::input(
+                                     0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mCodedColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mCodedColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mCodedColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mCodedColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(CodedColorAspectsSetter)
+                             .build());
+
+        addParameter(
+                DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+                        .withDefault(new C2StreamColorAspectsInfo::output(
+                                0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                        .withFields(
+                                {C2F(mColorAspects, range)
+                                         .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+                                 C2F(mColorAspects, primaries)
+                                         .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                  C2Color::PRIMARIES_OTHER),
+                                 C2F(mColorAspects, transfer)
+                                         .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                  C2Color::TRANSFER_OTHER),
+                                 C2F(mColorAspects, matrix)
+                                         .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                  C2Color::MATRIX_OTHER)})
+                        .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+                        .build());
+
+        // TODO: support more formats?
+        std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+            pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+        }
+        // If color format surface isn't added to supported formats, there is no way to know
+        // when the color-format is configured to surface. This is necessary to be able to
+        // choose 10-bit format while decoding 10-bit clips in surface mode.
+        pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+        addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+                             .withDefault(new C2StreamPixelFormatInfo::output(
+                                     0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+                             .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+                             .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+                             .build());
+    }
+
+    static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
+                          C2P<C2StreamPictureSizeInfo::output>& me) {
+        (void)mayBlock;
+        ALOGV("%s - %d x %d", __FUNCTION__, me.v.width, me.v.height);
+        C2R res = C2R::Ok();
+        if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+            me.set().width = oldMe.v.width;
+        }
+        if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+            me.set().height = oldMe.v.height;
+        }
+        return res;
+    }
+
+    static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output>& me,
+                                    const C2P<C2StreamPictureSizeInfo::output>& size) {
+        (void)mayBlock;
+        ALOGV("%s - %d x %d", __FUNCTION__, me.v.width, me.v.height);
+        // TODO: get max width/height from the size's field helpers vs.
+        // hardcoding
+        me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
+        me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
+        return C2R::Ok();
+    }
+
+    static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& me,
+                                  const C2P<C2StreamMaxPictureSizeTuning::output>& maxSize) {
+        (void)mayBlock;
+        ALOGV("%s", __FUNCTION__);
+        // assume compression ratio of 2, but enforce a floor
+        me.set().value =
+                c2_max((((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072),
+                       kMinInputBufferSize);
+        return C2R::Ok();
+    }
+
+    static C2R DefaultColorAspectsSetter(bool mayBlock,
+                                         C2P<C2StreamColorAspectsTuning::output>& me) {
+        (void)mayBlock;
+        ALOGV("%s - range: %u, primary: %u, transfer: %u, matrix: %u", __FUNCTION__, me.v.range,
+              me.v.primaries, me.v.transfer, me.v.matrix);
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
+        (void)mayBlock;
+        ALOGV("%s - range: %u, primaries: %u, transfer: %u, matrix: %u", __func__, me.v.range,
+              me.v.primaries, me.v.transfer, me.v.matrix);
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+                                  const C2P<C2StreamColorAspectsTuning::output>& def,
+                                  const C2P<C2StreamColorAspectsInfo::input>& coded) {
+        (void)mayBlock;
+        ALOGV("%s", __FUNCTION__);
+        // take default values for all unspecified fields, and coded values for specified ones
+        me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+        me.set().primaries =
+                coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
+        me.set().transfer =
+                coded.v.transfer == TRANSFER_UNSPECIFIED ? def.v.transfer : coded.v.transfer;
+        me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+        ALOGV("%s - me.v.range = %u, me.v.primaries = %u, me.v.transfer = %u, me.v.matrix = %u",
+              __func__, me.v.range, me.v.primaries, me.v.transfer, me.v.matrix);
+        return C2R::Ok();
+    }
+
+    static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input>& me,
+                                  const C2P<C2StreamPictureSizeInfo::output>& size) {
+        (void)mayBlock;
+        ALOGV("%s", __FUNCTION__);
+        (void)size;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
+        ALOGV("%s - mDefaultColorAspects: %u", __FUNCTION__, mDefaultColorAspects->primaries);
+        return mDefaultColorAspects;
+    }
+
+    std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
+        ALOGV("%s - mColorAspects: %u", __FUNCTION__, mColorAspects->primaries);
+        return mColorAspects;
+    }
+
+    static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input>& me) {
+        (void)mayBlock;
+        ALOGV("%s", __FUNCTION__);
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    static C2R Hdr10PlusInfoOutputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::output>& me) {
+        (void)mayBlock;
+        ALOGV("%s", __FUNCTION__);
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    // unsafe getters
+    std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
+        return mPixelFormat;
+    }
+
+    static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
+        (void)mayBlock;
+        ALOGV("%s", __FUNCTION__);
+        if (me.v.mastering.red.x > 1) {
+            me.set().mastering.red.x = 1;
+        }
+        if (me.v.mastering.red.y > 1) {
+            me.set().mastering.red.y = 1;
+        }
+        if (me.v.mastering.green.x > 1) {
+            me.set().mastering.green.x = 1;
+        }
+        if (me.v.mastering.green.y > 1) {
+            me.set().mastering.green.y = 1;
+        }
+        if (me.v.mastering.blue.x > 1) {
+            me.set().mastering.blue.x = 1;
+        }
+        if (me.v.mastering.blue.y > 1) {
+            me.set().mastering.blue.y = 1;
+        }
+        if (me.v.mastering.white.x > 1) {
+            me.set().mastering.white.x = 1;
+        }
+        if (me.v.mastering.white.y > 1) {
+            me.set().mastering.white.y = 1;
+        }
+        if (me.v.mastering.maxLuminance > 65535.0) {
+            me.set().mastering.maxLuminance = 65535.0;
+        }
+        if (me.v.mastering.minLuminance > 6.5535) {
+            me.set().mastering.minLuminance = 6.5535;
+        }
+        if (me.v.maxCll > 65535.0) {
+            me.set().maxCll = 65535.0;
+        }
+        if (me.v.maxFall > 65535.0) {
+            me.set().maxFall = 65535.0;
+        }
+        return C2R::Ok();
+    }
+
+  private:
+    std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+    std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+    std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+    std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+    std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+    std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+    std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
+    std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
+    std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
+};
+
+static void ivd_aligned_free(void* ctxt, void* mem) {
+    (void)ctxt;
+    free(mem);
+}
+
+C2SoftApvDec::C2SoftApvDec(const char* name, c2_node_id_t id,
+                           const std::shared_ptr<IntfImpl>& intfImpl)
+    : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+      mIntf(intfImpl),
+      mDecHandle(nullptr),
+      mOutBufferFlush(nullptr),
+      mIvColorformat(IV_YUV_420P),
+      mOutputDelay(kDefaultOutputDelay),
+      mHeaderDecoded(false),
+      mOutIndex(0u),
+      mHalPixelFormat(HAL_PIXEL_FORMAT_YV12),
+      mWidth(320),
+      mHeight(240),
+      mSignalledOutputEos(false),
+      mSignalledError(false) {
+    oapvdHandle = NULL;
+    oapvmHandle = NULL;
+    outputCsp = OUTPUT_CSP_NATIVE;
+}
+
+C2SoftApvDec::~C2SoftApvDec() {
+    onRelease();
+}
+
+c2_status_t C2SoftApvDec::onInit() {
+    ALOGV("%s", __FUNCTION__);
+    status_t err = initDecoder();
+    return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftApvDec::onStop() {
+    ALOGV("%s", __FUNCTION__);
+    if (OK != resetDecoder()) return C2_CORRUPTED;
+    resetPlugin();
+    return C2_OK;
+}
+
+void C2SoftApvDec::onReset() {
+    ALOGV("%s", __FUNCTION__);
+    (void)onStop();
+}
+
+status_t C2SoftApvDec::deleteDecoder() {
+    ALOGV("%s", __FUNCTION__);
+    if (oapvdHandle) {
+        oapvd_delete(oapvdHandle);
+        oapvdHandle = NULL;
+    }
+    if (oapvmHandle) {
+        oapvm_delete(oapvmHandle);
+        oapvmHandle = NULL;
+    }
+    for (int i = 0; i < ofrms.num_frms; i++) {
+        if (ofrms.frm[i].imgb != NULL) {
+            ofrms.frm[i].imgb->release(ofrms.frm[i].imgb);
+            ofrms.frm[i].imgb = NULL;
+        }
+    }
+    return OK;
+}
+
+void C2SoftApvDec::onRelease() {
+    ALOGV("%s", __FUNCTION__);
+    (void)deleteDecoder();
+    if (mOutBufferFlush) {
+        ivd_aligned_free(nullptr, mOutBufferFlush);
+        mOutBufferFlush = nullptr;
+    }
+    if (mOutBlock) {
+        mOutBlock.reset();
+    }
+}
+
+c2_status_t C2SoftApvDec::onFlush_sm() {
+    ALOGV("%s", __FUNCTION__);
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+    return C2_OK;
+}
+
+status_t C2SoftApvDec::createDecoder() {
+    ALOGV("%s", __FUNCTION__);
+    return OK;
+}
+
+status_t C2SoftApvDec::initDecoder() {
+    int ret;
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+
+    mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        mPixelFormatInfo = mIntf->getPixelFormat_l();
+        ALOGW("Hal pixel format = %d", mPixelFormatInfo->value);
+    }
+    memset(&cdesc, 0, sizeof(oapvd_cdesc_t));
+
+    cdesc.threads = 1;  // default
+    oapvdHandle = oapvd_create(&cdesc, &ret);
+    if (oapvdHandle == NULL) {
+        ALOGE("ERROR: cannot create APV decoder (err=%d)\n", ret);
+        return C2_NO_INIT;
+    }
+
+    memset(&ofrms, 0, sizeof(oapv_frms_t));
+
+    oapvmHandle = oapvm_create(&ret);
+    if (OAPV_FAILED(ret)) {
+        ALOGE("oapvm create failed");
+        oapvd_delete(oapvdHandle);
+        oapvdHandle = NULL;
+        return C2_NO_INIT;
+    }
+
+    ALOGV("oapvd init done");
+    return OK;
+}
+
+status_t C2SoftApvDec::setFlushMode() {
+    ALOGV("%s", __FUNCTION__);
+    return OK;
+}
+
+status_t C2SoftApvDec::resetDecoder() {
+    ALOGV("%s", __FUNCTION__);
+    return OK;
+}
+
+void C2SoftApvDec::resetPlugin() {
+    ALOGV("%s", __FUNCTION__);
+    mSignalledOutputEos = false;
+    if (mOutBlock) {
+        mOutBlock.reset();
+    }
+}
+
+void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
+    uint32_t flags = 0;
+    if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+        flags |= C2FrameData::FLAG_END_OF_STREAM;
+        ALOGV("signalling eos");
+    }
+    work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+    work->worklets.front()->output.buffers.clear();
+    work->worklets.front()->output.ordinal = work->input.ordinal;
+    work->workletsProcessed = 1u;
+}
+
+void C2SoftApvDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                              const std::shared_ptr<C2GraphicBlock>& block) {
+    std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
+
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        buffer->setInfo(mIntf->getColorAspects_l());
+    }
+
+    class FillWork {
+      public:
+        FillWork(uint32_t flags, C2WorkOrdinalStruct ordinal,
+                 const std::shared_ptr<C2Buffer>& buffer)
+            : mFlags(flags), mOrdinal(ordinal), mBuffer(buffer) {}
+        ~FillWork() = default;
+
+        void operator()(const std::unique_ptr<C2Work>& work) {
+            work->worklets.front()->output.flags = (C2FrameData::flags_t)mFlags;
+            work->worklets.front()->output.buffers.clear();
+            work->worklets.front()->output.ordinal = mOrdinal;
+            work->workletsProcessed = 1u;
+            work->result = C2_OK;
+            if (mBuffer) {
+                work->worklets.front()->output.buffers.push_back(mBuffer);
+            }
+            ALOGV("timestamp = %lld, index = %lld, w/%s buffer", mOrdinal.timestamp.peekll(),
+                  mOrdinal.frameIndex.peekll(), mBuffer ? "" : "o");
+        }
+
+      private:
+        const uint32_t mFlags;
+        const C2WorkOrdinalStruct mOrdinal;
+        const std::shared_ptr<C2Buffer> mBuffer;
+    };
+
+    auto fillWork = [buffer](const std::unique_ptr<C2Work>& work) {
+        work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.buffers.push_back(buffer);
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->workletsProcessed = 1u;
+    };
+
+    if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+        bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+        // TODO: Check if cloneAndSend can be avoided by tracking number of frames remaining
+        if (eos) {
+            if (buffer) {
+                mOutIndex = index;
+                C2WorkOrdinalStruct outOrdinal = work->input.ordinal;
+                cloneAndSend(mOutIndex, work,
+                             FillWork(C2FrameData::FLAG_INCOMPLETE, outOrdinal, buffer));
+                buffer.reset();
+            }
+        } else {
+            fillWork(work);
+        }
+    } else {
+        finish(index, fillWork);
+    }
+}
+
+static void copyBufferFromYUV420ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
+                                       const uint8_t* srcY, const uint8_t* srcU,
+                                       const uint8_t* srcV, size_t srcYStride, size_t srcUStride,
+                                       size_t srcVStride, size_t dstYStride, size_t dstUStride,
+                                       size_t dstVStride, uint32_t width, uint32_t height) {
+    for (size_t i = 0; i < height; ++i) {
+        memcpy(dstY, srcY, width);
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    for (size_t i = 0; i < height / 2; ++i) {
+        memcpy(dstU, srcU, width / 2);
+        memcpy(dstV, srcV, width / 2);
+        dstU += dstUStride;
+        srcU += srcUStride;
+        dstV += dstVStride;
+        srcV += srcVStride;
+    }
+}
+
+static void copyBufferFromYUV422ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
+                                       const uint8_t* srcY, const uint8_t* srcU,
+                                       const uint8_t* srcV, size_t srcYStride, size_t srcUStride,
+                                       size_t srcVStride, size_t dstYStride, size_t dstUStride,
+                                       size_t dstVStride, uint32_t width, uint32_t height) {
+    for (size_t i = 0; i < height; ++i) {
+        memcpy(dstY, srcY, width);
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    for (size_t i = 0; i < height / 2; ++i) {
+        memcpy(dstU, srcU, width / 2);
+        memcpy(dstV, srcV, width / 2);
+        dstU += dstUStride;
+        srcU += srcUStride * 2;
+        dstV += dstVStride;
+        srcV += srcVStride * 2;
+    }
+}
+
+static void copyBufferFromYUV42010bitToP010(uint16_t* dstY, uint16_t* dstUV, const uint16_t* srcY,
+                                            const uint16_t* srcU, const uint16_t* srcV,
+                                            size_t srcYStride, size_t srcUStride, size_t srcVStride,
+                                            size_t dstYStride, size_t dstUVStride, size_t width,
+                                            size_t height) {
+    for (size_t y = 0; y < height; ++y) {
+        for (size_t x = 0; x < width; ++x) {
+            dstY[x] = srcY[x] << 6;
+        }
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    for (size_t y = 0; y < height / 2; ++y) {
+        for (size_t x = 0; x < width / 2; ++x) {
+            dstUV[2 * x] = srcU[x] << 6;
+            dstUV[2 * x + 1] = srcV[x] << 6;
+        }
+        srcU += srcUStride;
+        srcV += srcVStride;
+        dstUV += dstUVStride;
+    }
+}
+
+static void copyBufferFromYUV42210bitToP010(uint16_t* dstY, uint16_t* dstUV, const uint16_t* srcY,
+                                            const uint16_t* srcU, const uint16_t* srcV,
+                                            size_t srcYStride, size_t srcUStride, size_t srcVStride,
+                                            size_t dstYStride, size_t dstUVStride, size_t width,
+                                            size_t height) {
+    for (size_t y = 0; y < height; ++y) {
+        for (size_t x = 0; x < width; ++x) {
+            dstY[x] = srcY[x] << 6;
+        }
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    for (size_t y = 0; y < height / 2; ++y) {
+        for (size_t x = 0; x < width / 2; ++x) {
+            dstUV[2 * x] = srcU[x] << 6;
+            dstUV[2 * x + 1] = srcV[x] << 6;
+        }
+        srcU += srcUStride * 2;
+        srcV += srcVStride * 2;
+        dstUV += dstUVStride;
+    }
+}
+
+static void copyBufferFromP210ToP010(uint16_t* dstY, uint16_t* dstUV, const uint16_t* srcY,
+                                     const uint16_t* srcUV, size_t srcYStride, size_t srcUVStride,
+                                     size_t dstYStride, size_t dstUVStride, size_t width,
+                                     size_t height) {
+    for (size_t y = 0; y < height; ++y) {
+        memcpy(dstY, srcY, width * sizeof(uint16_t));
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    for (size_t y = 0; y < height / 2; ++y) {
+        memcpy(dstUV, srcUV, width * 2);
+        srcUV += srcUVStride * 2;
+        dstUV += dstUVStride;
+    }
+}
+
+static void copyBufferFromYUV42010bitToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
+                                            const uint16_t* srcY, const uint16_t* srcU,
+                                            const uint16_t* srcV, size_t srcYStride,
+                                            size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                                            size_t dstUStride, size_t dstVStride, uint32_t width,
+                                            uint32_t height) {
+    for (size_t i = 0; i < height; ++i) {
+        for (size_t j = 0; j < width; ++j) {
+            dstY[i * dstYStride + j] = (srcY[i * srcYStride + j] >> 2) & 0xFF;
+        }
+    }
+
+    for (size_t i = 0; i < height / 2; ++i) {
+        for (size_t j = 0; j < width / 2; ++j) {
+            dstU[i * dstUStride + j] = (srcU[i * srcUStride + j] >> 2) & 0xFF;
+        }
+    }
+
+    for (size_t i = 0; i < height / 2; ++i) {
+        for (size_t j = 0; j < width / 2; ++j) {
+            dstV[i * dstVStride + j] = (srcV[i * srcVStride + j] >> 2) & 0xFF;
+        }
+    }
+}
+
+static void copyBufferFromYUV42210bitToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
+                                            const uint16_t* srcY, const uint16_t* srcU,
+                                            const uint16_t* srcV, size_t srcYStride,
+                                            size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                                            size_t dstUStride, size_t dstVStride, uint32_t width,
+                                            uint32_t height) {
+    for (size_t i = 0; i < height; ++i) {
+        for (size_t j = 0; j < width; ++j) {
+            dstY[i * dstYStride + j] = (srcY[i * srcYStride + j] >> 2) & 0xFF;
+        }
+    }
+
+    for (size_t i = 0; i < height / 2; ++i) {
+        for (size_t j = 0; j < width / 2; ++j) {
+            dstU[i * dstUStride + j] = (srcU[i * srcUStride * 2 + j] >> 2) & 0xFF;
+        }
+    }
+
+    for (size_t i = 0; i < height / 2; ++i) {
+        for (size_t j = 0; j < width / 2; ++j) {
+            dstV[i * dstVStride + j] = (srcV[i * srcVStride * 2 + j] >> 2) & 0xFF;
+        }
+    }
+}
+
+static void copyBufferFromP210ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
+                                     const uint16_t* srcY, const uint16_t* srcUV, size_t srcYStride,
+                                     size_t srcUVStride, size_t dstYStride, size_t dstUStride,
+                                     size_t dstVStride, size_t width, size_t height) {
+    for (size_t i = 0; i < height; ++i) {
+        for (size_t j = 0; j < width; ++j) {
+            dstY[i * dstYStride + j] = (srcY[i * srcYStride + j] >> 8) & 0xFF;
+        }
+    }
+
+    for (size_t i = 0; i < height / 2; ++i) {
+        for (size_t j = 0; j < width / 2; ++j) {
+            dstV[i * dstVStride + j] = (srcUV[i * srcUVStride * 2 + j * 2] >> 8) & 0xFF;
+            dstU[i * dstUStride + j] = (srcUV[i * srcUVStride * 2 + j * 2 + 1] >> 8) & 0xFF;
+        }
+    }
+}
+
+void C2SoftApvDec::process(const std::unique_ptr<C2Work>& work,
+                           const std::shared_ptr<C2BlockPool>& pool) {
+    // Initialize output work
+    work->result = C2_OK;
+    work->workletsProcessed = 0u;
+    work->worklets.front()->output.configUpdate.clear();
+    work->worklets.front()->output.flags = work->input.flags;
+    if (mSignalledError || mSignalledOutputEos) {
+        work->result = C2_BAD_VALUE;
+        return;
+    }
+
+    int ret = 0;
+    size_t inOffset = 0u;
+    size_t inSize = 0u;
+    C2ReadView rView = mDummyReadView;
+    if (!work->input.buffers.empty()) {
+        rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+        inSize = rView.capacity();
+        if (inSize && rView.error()) {
+            ALOGE("read view map failed %d", rView.error());
+            work->result = C2_CORRUPTED;
+            return;
+        }
+    }
+
+    bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
+    bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+
+    ALOGV("in buffer attr. size %zu timestamp %llu frameindex %d, flags %x", inSize,
+          work->input.ordinal.timestamp.peekull(), (int)work->input.ordinal.frameIndex.peeku(),
+          work->input.flags);
+
+    if (codecConfig) {
+        fillEmptyWork(work);
+        return;
+    }
+
+    if (inSize > 0) {
+        uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
+        oapv_au_info_t aui;
+        oapv_bitb_t bitb;
+        bitb.addr = bitstream + 4;  // skip au
+        bitb.ssize = inSize - 4;
+
+        if (OAPV_FAILED(oapvd_info(bitb.addr, bitb.ssize, &aui))) {
+            ALOGE("cannot get information from bitstream");
+            return;
+        }
+
+        /* create decoding frame buffers */
+        ofrms.num_frms = aui.num_frms;
+        if (ofrms.num_frms <= 0) {
+            ALOGE("Parse error - no output frame(%d)", ofrms.num_frms);
+            fillEmptyWork(work);
+            return;
+        }
+        for (int i = 0; i < ofrms.num_frms; i++) {
+            oapv_frm_info_t* finfo = &aui.frm_info[FRM_IDX];
+            oapv_frm_t* frm = &ofrms.frm[i];
+
+            if (mWidth != finfo->w || mHeight != finfo->w) {
+                mWidth = finfo->w;
+                mHeight = finfo->h;
+            }
+
+            if (frm->imgb != NULL && (frm->imgb->w[0] != finfo->w || frm->imgb->h[0] != finfo->h)) {
+                frm->imgb->release(frm->imgb);
+                frm->imgb = NULL;
+            }
+
+            if (frm->imgb == NULL) {
+                if (outputCsp == OUTPUT_CSP_P210) {
+                    frm->imgb = imgb_create(finfo->w, finfo->h, OAPV_CS_P210);
+                } else {
+                    frm->imgb = imgb_create(finfo->w, finfo->h, finfo->cs);
+                }
+                if (frm->imgb == NULL) {
+                    ALOGE("cannot allocate image buffer (w:%d, h:%d, cs:%d)", finfo->w, finfo->h,
+                          finfo->cs);
+                    fillEmptyWork(work);
+                    return;
+                }
+            }
+        }
+
+        oapvd_stat_t stat;
+        ret = oapvd_decode(oapvdHandle, &bitb, &ofrms, oapvmHandle, &stat);
+        if (bitb.ssize != stat.read) {
+            ALOGW("decode done, input size: %d, processed size: %d", bitb.ssize, stat.read);
+        }
+
+        if (OAPV_FAILED(ret)) {
+            ALOGE("failed to decode bitstream\n");
+            fillEmptyWork(work);
+            return;
+        }
+
+        status_t err = outputBuffer(pool, work);
+        if (err == NOT_ENOUGH_DATA) {
+            if (inSize > 0) {
+                ALOGV("Maybe non-display frame at %lld.", work->input.ordinal.frameIndex.peekll());
+                // send the work back with empty buffer.
+                inSize = 0;
+            }
+        } else if (err != OK) {
+            ALOGD("Error while getting the output frame out");
+            // work->result would be already filled; do fillEmptyWork() below to
+            // send the work back.
+            inSize = 0;
+        }
+    }
+
+    if (eos) {
+        drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+        mSignalledOutputEos = true;
+    } else if (!inSize) {
+        fillEmptyWork(work);
+    }
+}
+
+status_t C2SoftApvDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                                    const std::unique_ptr<C2Work>& work) {
+    if (!(work && pool)) return BAD_VALUE;
+
+    oapv_imgb_t* imgbOutput;
+    std::shared_ptr<C2GraphicBlock> block;
+
+    if (ofrms.num_frms > 0) {
+        oapv_frm_t* frm = &ofrms.frm[0];
+        imgbOutput = frm->imgb;
+    } else {
+        ALOGW("No output frames");
+        return false;
+    }
+    bool isMonochrome = OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CS_YCBCR400;
+
+    uint32_t format = HAL_PIXEL_FORMAT_YV12;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
+    if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10 &&
+        mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+        IntfImpl::Lock lock = mIntf->lock();
+        codedColorAspects = mIntf->getColorAspects_l();
+        bool allowRGBA1010102 = false;
+        if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
+            codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
+            codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+            allowRGBA1010102 = true;
+        }
+        format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
+    }
+
+    if (mHalPixelFormat != format) {
+        C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = mIntf->config({&pixelFormat}, C2_MAY_BLOCK, &failures);
+        if (err == C2_OK) {
+            work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(pixelFormat));
+        } else {
+            ALOGE("Config update pixelFormat failed");
+            mSignalledError = true;
+            work->workletsProcessed = 1u;
+            work->result = C2_CORRUPTED;
+            return UNKNOWN_ERROR;
+        }
+        mHalPixelFormat = format;
+    }
+    ALOGV("mHalPixelFormat: %u, format: %d", mHalPixelFormat, format);
+
+    C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
+    // check. align height to 2 times does not work.
+    c2_status_t err =
+            pool->fetchGraphicBlock(align(mWidth, 16), align(mHeight, 16), format, usage, &block);
+
+    if (err != C2_OK) {
+        ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+        work->result = err;
+        return false;
+    }
+
+    C2GraphicView wView = block->map().get();
+    if (wView.error()) {
+        ALOGE("graphic view map failed %d", wView.error());
+        work->result = C2_CORRUPTED;
+        return false;
+    }
+
+    ALOGV("provided (%dx%d) required (%dx%d)", block->width(), block->height(), mWidth, mHeight);
+
+    uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
+    uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
+
+    C2PlanarLayout layout = wView.layout();
+    size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+    size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+    if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+        if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10) {
+            const uint16_t* srcY = (const uint16_t*)imgbOutput->a[0];
+            const uint16_t* srcU = (const uint16_t*)imgbOutput->a[1];
+            const uint16_t* srcV = (const uint16_t*)imgbOutput->a[2];
+            size_t srcYStride = imgbOutput->s[0] / 2;
+            size_t srcUStride = imgbOutput->s[1] / 2;
+            size_t srcVStride = imgbOutput->s[2] / 2;
+            dstYStride /= 2;
+            dstUStride /= 2;
+            dstVStride /= 2;
+            if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420) {
+                ALOGV("OAPV_CS_YUV420 10bit to P010");
+                copyBufferFromYUV42010bitToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV,
+                                                srcYStride, srcUStride, srcVStride, dstYStride,
+                                                dstUStride, mWidth, mHeight);
+            } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422) {
+                ALOGV("OAPV_CS_YUV422 10bit to P010");
+                copyBufferFromYUV42210bitToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV,
+                                                srcYStride, srcUStride, srcVStride, dstYStride,
+                                                dstUStride, mWidth, mHeight);
+            } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_PLANAR2) {
+                ALOGV("OAPV_CS_P210 to P010");
+                copyBufferFromP210ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcYStride,
+                                         srcUStride, dstYStride, dstUStride, mWidth, mHeight);
+            } else {
+                ALOGE("Not supported convert format : %d", OAPV_CS_GET_FORMAT(imgbOutput->cs));
+            }
+        } else {
+            ALOGE("Not supported convder from bd:%d, format: %d(%s), to format: %d(%s)",
+                  OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs), OAPV_CS_GET_FORMAT(imgbOutput->cs),
+                  OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420
+                          ? "YUV420"
+                          : (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422 ? "YUV422"
+                                                                                    : "UNKNOWN"),
+                  format,
+                  format == HAL_PIXEL_FORMAT_YCBCR_P010
+                          ? "P010"
+                          : (format == HAL_PIXEL_FORMAT_YCBCR_420_888
+                                     ? "YUV420"
+                                     : (format == HAL_PIXEL_FORMAT_YV12 ? "YV12" : "UNKNOWN")));
+        }
+    } else {  // HAL_PIXEL_FORMAT_YV12
+        if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10) {
+            const uint16_t* srcY = (const uint16_t*)imgbOutput->a[0];
+            const uint16_t* srcV = (const uint16_t*)imgbOutput->a[1];
+            const uint16_t* srcU = (const uint16_t*)imgbOutput->a[2];
+            size_t srcYStride = imgbOutput->s[0] / 2;
+            size_t srcVStride = imgbOutput->s[1] / 2;
+            size_t srcUStride = imgbOutput->s[2] / 2;
+            if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420) {
+                ALOGV("OAPV_CS_YUV420 10bit to YV12");
+                copyBufferFromYUV42010bitToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+                                                srcUStride, srcVStride, dstYStride, dstUStride,
+                                                dstVStride, mWidth, mHeight);
+            } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422) {
+                ALOGV("OAPV_CS_YUV422 10bit to YV12");
+                copyBufferFromYUV42210bitToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+                                                srcUStride, srcVStride, dstYStride, dstUStride,
+                                                dstVStride, mWidth, mHeight);
+            } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_PLANAR2) {
+                ALOGV("OAPV_CS_P210 to YV12");
+                copyBufferFromP210ToYV12(dstY, dstU, dstV, srcY, srcV, srcYStride, srcVStride,
+                                         dstYStride, dstUStride, dstVStride, mWidth, mHeight);
+            } else {
+                ALOGE("Not supported convert format : %d", OAPV_CS_GET_FORMAT(imgbOutput->cs));
+            }
+        } else if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 8) {
+            const uint8_t* srcY = (const uint8_t*)imgbOutput->a[0];
+            const uint8_t* srcV = (const uint8_t*)imgbOutput->a[1];
+            const uint8_t* srcU = (const uint8_t*)imgbOutput->a[2];
+            size_t srcYStride = imgbOutput->s[0];
+            size_t srcVStride = imgbOutput->s[1];
+            size_t srcUStride = imgbOutput->s[2];
+            if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420) {
+                ALOGV("OAPV_CS_YUV420 to YV12");
+                copyBufferFromYUV420ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+                                           srcUStride, srcVStride, dstYStride, dstUStride,
+                                           dstVStride, mWidth, mHeight);
+            } else if (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422) {
+                ALOGV("OAPV_CS_YUV422 to YV12");
+                copyBufferFromYUV422ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+                                           srcUStride, srcVStride, dstYStride, dstUStride,
+                                           dstVStride, mWidth, mHeight);
+            } else {
+                ALOGE("Not supported convert format : %d", OAPV_CS_GET_FORMAT(imgbOutput->cs));
+            }
+        } else {
+            ALOGE("Not supported convert from bd:%d, format: %d(%s), to format: %d(%s)",
+                  OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs), OAPV_CS_GET_FORMAT(imgbOutput->cs),
+                  OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420
+                          ? "YUV420"
+                          : (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422 ? "YUV422"
+                                                                                    : "UNKNOWN"),
+                  format,
+                  format == HAL_PIXEL_FORMAT_YCBCR_P010
+                          ? "P010"
+                          : (format == HAL_PIXEL_FORMAT_YCBCR_420_888
+                                     ? "YUV420"
+                                     : (format == HAL_PIXEL_FORMAT_YV12 ? "YV12" : "UNKNOWN")));
+        }
+    }
+
+    finishWork(work->input.ordinal.frameIndex.peekll(), work, std::move(block));
+    return OK;
+}
+
+c2_status_t C2SoftApvDec::drainInternal(uint32_t drainMode,
+                                        const std::shared_ptr<C2BlockPool>& pool,
+                                        const std::unique_ptr<C2Work>& work) {
+    if (drainMode == NO_DRAIN) {
+        ALOGW("drain with NO_DRAIN: no-op");
+        return C2_OK;
+    }
+    if (drainMode == DRAIN_CHAIN) {
+        ALOGW("DRAIN_CHAIN not supported");
+        return C2_OMITTED;
+    }
+
+    if (drainMode == DRAIN_COMPONENT_WITH_EOS && work && work->workletsProcessed == 0u) {
+        fillEmptyWork(work);
+    }
+    return C2_OK;
+}
+
+c2_status_t C2SoftApvDec::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
+    return drainInternal(drainMode, pool, nullptr);
+}
+
+class C2SoftApvDecFactory : public C2ComponentFactory {
+  public:
+    C2SoftApvDecFactory()
+        : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+                  GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+    virtual c2_status_t createComponent(c2_node_id_t id,
+                                        std::shared_ptr<C2Component>* const component,
+                                        std::function<void(C2Component*)> deleter) override {
+        *component = std::shared_ptr<C2Component>(
+                new C2SoftApvDec(COMPONENT_NAME, id,
+                                 std::make_shared<C2SoftApvDec::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual c2_status_t createInterface(
+            c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+            std::function<void(C2ComponentInterface*)> deleter) override {
+        *interface = std::shared_ptr<C2ComponentInterface>(
+                new SimpleInterface<C2SoftApvDec::IntfImpl>(
+                        COMPONENT_NAME, id, std::make_shared<C2SoftApvDec::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual ~C2SoftApvDecFactory() override = default;
+
+  private:
+    std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+}  // namespace android
+
+__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+    if (!android::media::swcodec::flags::apv_software_codec()) {
+        ALOGV("APV SW Codec is not enabled");
+        return nullptr;
+    }
+    return new ::android::C2SoftApvDecFactory();
+}
+
+__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
+        ::C2ComponentFactory* factory) {
+    delete factory;
+}
diff --git a/media/codec2/components/apv/C2SoftApvDec.h b/media/codec2/components/apv/C2SoftApvDec.h
new file mode 100644
index 0000000..f5beb8f
--- /dev/null
+++ b/media/codec2/components/apv/C2SoftApvDec.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_APV_DEC_H_
+#define ANDROID_C2_SOFT_APV_DEC_H_
+
+#include <media/stagefright/foundation/ColorUtils.h>
+
+#include <SimpleC2Component.h>
+#include <inttypes.h>
+#include <atomic>
+
+#include "oapv.h"
+#include <C2SoftApvCommon.h>
+
+typedef unsigned int UWORD32;
+
+typedef enum {
+    IV_CHROMA_NA = 0xFFFFFFFF,
+    IV_YUV_420P = 0x1,
+    IV_YUV_422P = 0x2,
+    IV_420_UV_INTL = 0x3,
+    IV_YUV_422IBE = 0x4,
+    IV_YUV_422ILE = 0x5,
+    IV_YUV_444P = 0x6,
+    IV_YUV_411P = 0x7,
+    IV_GRAY = 0x8,
+    IV_RGB_565 = 0x9,
+    IV_RGB_24 = 0xa,
+    IV_YUV_420SP_UV = 0xb,
+    IV_YUV_420SP_VU = 0xc,
+    IV_YUV_422SP_UV = 0xd,
+    IV_YUV_422SP_VU = 0xe
+
+} IV_COLOR_FORMAT_T;
+
+typedef struct {
+    /**
+     * u4_size of the structure
+     */
+    UWORD32 u4_size;
+
+    /**
+     * Pointer to the API function pointer table of the codec
+     */
+    void* pv_fxns;
+
+    /**
+     * Pointer to the handle of the codec
+     */
+    void* pv_codec_handle;
+} iv_obj_t;
+
+namespace android {
+
+struct C2SoftApvDec final : public SimpleC2Component {
+    class IntfImpl;
+
+    C2SoftApvDec(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+    virtual ~C2SoftApvDec();
+
+    // From SimpleC2Component
+    c2_status_t onInit() override;
+    c2_status_t onStop() override;
+    void onReset() override;
+    void onRelease() override;
+    c2_status_t onFlush_sm() override;
+    void process(const std::unique_ptr<C2Work>& work,
+                 const std::shared_ptr<C2BlockPool>& pool) override;
+    c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override;
+
+  private:
+    status_t createDecoder();
+    status_t initDecoder();
+    bool isConfigured() const;
+    void drainDecoder();
+    status_t setFlushMode();
+    status_t resetDecoder();
+    void resetPlugin();
+    status_t deleteDecoder();
+    void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                    const std::shared_ptr<C2GraphicBlock>& block);
+    void drainRingBuffer(const std::unique_ptr<C2Work>& work,
+                         const std::shared_ptr<C2BlockPool>& pool, bool eos);
+    c2_status_t drainInternal(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool,
+                              const std::unique_ptr<C2Work>& work);
+
+    status_t outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                          const std::unique_ptr<C2Work>& work);
+
+    std::shared_ptr<IntfImpl> mIntf;
+    iv_obj_t* mDecHandle;
+    uint8_t* mOutBufferFlush;
+    IV_COLOR_FORMAT_T mIvColorformat;
+    uint32_t mOutputDelay;
+    bool mHeaderDecoded;
+    std::atomic_uint64_t mOutIndex;
+    std::shared_ptr<C2GraphicBlock> mOutBlock;
+
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+
+    std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+    uint32_t mHalPixelFormat;
+    uint32_t mWidth;
+    uint32_t mHeight;
+    bool mSignalledOutputEos;
+    bool mSignalledError;
+
+    oapvd_t oapvdHandle;
+    oapvm_t oapvmHandle;
+    oapvd_cdesc_t cdesc;
+    oapv_frms_t ofrms;
+
+    int outputCsp;
+
+    C2_DO_NOT_COPY(C2SoftApvDec);
+};
+
+}  // namespace android
+
+#endif
diff --git a/media/codec2/components/apv/C2SoftApvEnc.cpp b/media/codec2/components/apv/C2SoftApvEnc.cpp
new file mode 100644
index 0000000..a61cfd6
--- /dev/null
+++ b/media/codec2/components/apv/C2SoftApvEnc.cpp
@@ -0,0 +1,1285 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftApvEnc"
+#include <log/log.h>
+
+#include <android_media_swcodec_flags.h>
+
+#include <media/hardware/VideoAPI.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/AUtils.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
+#include <SimpleC2Interface.h>
+#include <media/stagefright/foundation/ABitReader.h>
+#include <util/C2InterfaceHelper.h>
+#include <cmath>
+#include "C2SoftApvEnc.h"
+
+namespace android {
+
+namespace {
+
+constexpr char COMPONENT_NAME[] = "c2.android.apv.encoder";
+constexpr uint32_t kMinOutBufferSize = 524288;
+constexpr uint32_t kMaxBitstreamBufSize = 16 * 1024 * 1024;
+constexpr int32_t kApvQpMin = 0;
+constexpr int32_t kApvQpMax = 51;
+constexpr int32_t kApvDefaultQP = 32;
+
+#define PROFILE_APV_DEFAULT 0
+#define LEVEL_APV_DEFAULT 0
+#define MAX_NUM_FRMS (1)  // supports only 1-frame input
+
+}  // namespace
+
+class C2SoftApvEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
+  public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+        : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_ENCODER,
+                                            C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_APV) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noTimeStretch();
+        setDerivedInstance(this);
+
+        addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                             .withConstValue(new C2ComponentAttributesSetting(
+                                     C2Component::ATTRIB_IS_TEMPORAL))
+                             .build());
+
+        addParameter(DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
+                             .withConstValue(new C2StreamUsageTuning::input(
+                                     0u, (uint64_t)C2MemoryUsage::CPU_READ))
+                             .build());
+
+        // matches size limits in codec library
+        addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+                             .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 4096, 2),
+                                     C2F(mSize, height).inRange(2, 4096, 2),
+                             })
+                             .withSetter(SizeSetter)
+                             .build());
+
+        // matches limits in codec library
+        addParameter(DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+                             .withDefault(new C2StreamBitrateModeTuning::output(
+                                     0u, C2Config::BITRATE_VARIABLE))
+                             .withFields({C2F(mBitrateMode, value)
+                                                  .oneOf({C2Config::BITRATE_CONST,
+                                                          C2Config::BITRATE_VARIABLE,
+                                                          C2Config::BITRATE_IGNORE})})
+                             .withSetter(Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+                             .build());
+
+        addParameter(DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+                             .withDefault(new C2StreamBitrateInfo::output(0u, 512000))
+                             .withFields({C2F(mBitrate, value).inRange(512000, 240000000)})
+                             .withSetter(BitrateSetter)
+                             .build());
+
+        addParameter(DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
+                             .withDefault(new C2StreamFrameRateInfo::output(0u, 15.))
+                             .withFields({C2F(mFrameRate, value).greaterThan(0.)})
+                             .withSetter(Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
+                             .build());
+
+        addParameter(DefineParam(mQuality, C2_PARAMKEY_QUALITY)
+                             .withDefault(new C2StreamQualityTuning::output(0u, 40))
+                             .withFields({C2F(mQuality, value).inRange(0, 100)})
+                             .withSetter(Setter<decltype(*mQuality)>::NonStrictValueWithNoDeps)
+                             .build());
+
+        addParameter(
+                DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                        .withDefault(new C2StreamProfileLevelInfo::output(
+                                0u, C2Config::PROFILE_APV_422_10, LEVEL_APV_1_BAND_0))
+                        .withFields({
+                                C2F(mProfileLevel, profile).oneOf({C2Config::PROFILE_APV_422_10}),
+                                C2F(mProfileLevel, level)
+                                        .oneOf({
+                                                C2Config::LEVEL_APV_1_BAND_0,
+                                                C2Config::LEVEL_APV_1_1_BAND_0,
+                                                C2Config::LEVEL_APV_2_BAND_0,
+                                                C2Config::LEVEL_APV_2_1_BAND_0,
+                                                C2Config::LEVEL_APV_3_BAND_0,
+                                                C2Config::LEVEL_APV_3_1_BAND_0,
+                                                C2Config::LEVEL_APV_4_BAND_0,
+                                                C2Config::LEVEL_APV_4_1_BAND_0,
+                                                C2Config::LEVEL_APV_5_BAND_0,
+                                                C2Config::LEVEL_APV_5_1_BAND_0,
+                                                C2Config::LEVEL_APV_6_BAND_0,
+                                                C2Config::LEVEL_APV_6_1_BAND_0,
+                                                C2Config::LEVEL_APV_7_BAND_0,
+                                                C2Config::LEVEL_APV_7_1_BAND_0,
+                                                C2Config::LEVEL_APV_1_BAND_1,
+                                                C2Config::LEVEL_APV_1_1_BAND_1,
+                                                C2Config::LEVEL_APV_2_BAND_1,
+                                                C2Config::LEVEL_APV_2_1_BAND_1,
+                                                C2Config::LEVEL_APV_3_BAND_1,
+                                                C2Config::LEVEL_APV_3_1_BAND_1,
+                                                C2Config::LEVEL_APV_4_BAND_1,
+                                                C2Config::LEVEL_APV_4_1_BAND_1,
+                                                C2Config::LEVEL_APV_5_BAND_1,
+                                                C2Config::LEVEL_APV_5_1_BAND_1,
+                                                C2Config::LEVEL_APV_6_BAND_1,
+                                                C2Config::LEVEL_APV_6_1_BAND_1,
+                                                C2Config::LEVEL_APV_7_BAND_1,
+                                                C2Config::LEVEL_APV_7_1_BAND_1,
+                                                C2Config::LEVEL_APV_1_BAND_2,
+                                                C2Config::LEVEL_APV_1_1_BAND_2,
+                                                C2Config::LEVEL_APV_2_BAND_2,
+                                                C2Config::LEVEL_APV_2_1_BAND_2,
+                                                C2Config::LEVEL_APV_3_BAND_2,
+                                                C2Config::LEVEL_APV_3_1_BAND_2,
+                                                C2Config::LEVEL_APV_4_BAND_2,
+                                                C2Config::LEVEL_APV_4_1_BAND_2,
+                                                C2Config::LEVEL_APV_5_BAND_2,
+                                                C2Config::LEVEL_APV_5_1_BAND_2,
+                                                C2Config::LEVEL_APV_6_BAND_2,
+                                                C2Config::LEVEL_APV_6_1_BAND_2,
+                                                C2Config::LEVEL_APV_7_BAND_2,
+                                                C2Config::LEVEL_APV_7_1_BAND_2,
+                                                C2Config::LEVEL_APV_1_BAND_3,
+                                                C2Config::LEVEL_APV_1_1_BAND_3,
+                                                C2Config::LEVEL_APV_2_BAND_3,
+                                                C2Config::LEVEL_APV_2_1_BAND_3,
+                                                C2Config::LEVEL_APV_3_BAND_3,
+                                                C2Config::LEVEL_APV_3_1_BAND_3,
+                                                C2Config::LEVEL_APV_4_BAND_3,
+                                                C2Config::LEVEL_APV_4_1_BAND_3,
+                                                C2Config::LEVEL_APV_5_BAND_3,
+                                                C2Config::LEVEL_APV_5_1_BAND_3,
+                                                C2Config::LEVEL_APV_6_BAND_3,
+                                                C2Config::LEVEL_APV_6_1_BAND_3,
+                                                C2Config::LEVEL_APV_7_BAND_3,
+                                                C2Config::LEVEL_APV_7_1_BAND_3,
+                                        }),
+                        })
+                        .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
+                        .build());
+
+        addParameter(DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsInfo::input(
+                                     0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(ColorAspectsSetter)
+                             .build());
+
+        addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsInfo::output(
+                                     0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mCodedColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mCodedColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mCodedColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mCodedColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(CodedColorAspectsSetter, mColorAspects)
+                             .build());
+        std::vector<uint32_t> pixelFormats = {
+                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+        };
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+            pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+        }
+        addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+                             .withDefault(new C2StreamPixelFormatInfo::input(
+                                     0u, HAL_PIXEL_FORMAT_YCBCR_P010))
+                             .withFields({C2F(mPixelFormat, value).oneOf({pixelFormats})})
+                             .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+                             .build());
+    }
+
+    static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output>& me) {
+        (void)mayBlock;
+        C2R res = C2R::Ok();
+        if (me.v.value < 1000000) {
+            me.set().value = 1000000;
+        }
+        return res;
+    }
+
+    static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input>& oldMe,
+                          C2P<C2StreamPictureSizeInfo::input>& me) {
+        (void)mayBlock;
+        C2R res = C2R::Ok();
+        if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+            me.set().width = oldMe.v.width;
+        }
+        if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+            me.set().height = oldMe.v.height;
+        }
+        return res;
+    }
+
+    static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::output>& me,
+                                  const C2P<C2StreamPictureSizeInfo::input>& size,
+                                  const C2P<C2StreamFrameRateInfo::output>& frameRate,
+                                  const C2P<C2StreamBitrateInfo::output>& bitrate) {
+        (void)mayBlock;
+        if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+            me.set().profile = C2Config::PROFILE_APV_422_10;
+        }
+        if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+            me.set().level = LEVEL_APV_1_BAND_0;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+                                       const C2P<C2StreamColorAspectsInfo::input>& coded) {
+        (void)mayBlock;
+        me.set().range = coded.v.range;
+        me.set().primaries = coded.v.primaries;
+        me.set().transfer = coded.v.transfer;
+        me.set().matrix = coded.v.matrix;
+        return C2R::Ok();
+    }
+
+    uint32_t getProfile_l() const {
+        int32_t profile = PROFILE_UNUSED;
+
+        switch (mProfileLevel->profile) {
+            case C2Config::PROFILE_APV_422_10:
+                profile = 33;
+                break;
+            case C2Config::PROFILE_APV_422_12:
+                profile = 44;
+                break;
+            case C2Config::PROFILE_APV_444_10:
+                profile = 55;
+                break;
+            case C2Config::PROFILE_APV_444_12:
+                profile = 66;
+                break;
+            case C2Config::PROFILE_APV_4444_10:
+                profile = 77;
+                break;
+            case C2Config::PROFILE_APV_4444_12:
+                profile = 88;
+                break;
+            case C2Config::PROFILE_APV_400_10:
+                profile = 99;
+                break;
+            default:
+                ALOGD("Unrecognized profile: %x", mProfileLevel->profile);
+        }
+        return profile;
+    }
+
+    uint32_t getLevel_l() const {
+        int32_t level = LEVEL_UNUSED;
+
+        // TODO: Add Band settings
+        switch (mProfileLevel->level) {
+            case C2Config::LEVEL_APV_1_BAND_0:
+                level = 10;
+                break;
+            case C2Config::LEVEL_APV_1_1_BAND_0:
+                level = 11;
+                break;
+            case C2Config::LEVEL_APV_2_BAND_0:
+                level = 20;
+                break;
+            case C2Config::LEVEL_APV_2_1_BAND_0:
+                level = 21;
+                break;
+            case C2Config::LEVEL_APV_3_BAND_0:
+                level = 30;
+                break;
+            case C2Config::LEVEL_APV_3_1_BAND_0:
+                level = 31;
+                break;
+            case C2Config::LEVEL_APV_4_BAND_0:
+                level = 40;
+                break;
+            case C2Config::LEVEL_APV_4_1_BAND_0:
+                level = 41;
+                break;
+            case C2Config::LEVEL_APV_5_BAND_0:
+                level = 50;
+                break;
+            case C2Config::LEVEL_APV_5_1_BAND_0:
+                level = 51;
+                break;
+            case C2Config::LEVEL_APV_6_BAND_0:
+                level = 60;
+                break;
+            case C2Config::LEVEL_APV_6_1_BAND_0:
+                level = 61;
+                break;
+            case C2Config::LEVEL_APV_7_BAND_0:
+                level = 70;
+                break;
+            case C2Config::LEVEL_APV_7_1_BAND_0:
+                level = 71;
+                break;
+            default:
+                ALOGD("Unrecognized level: %x", mProfileLevel->level);
+        }
+        // Convert to APV level_idc according to APV spec
+        return level * 3;
+    }
+
+    int32_t getBitrateMode_l() const {
+        int32_t bitrateMode = C2Config::BITRATE_CONST;
+
+        switch (mBitrateMode->value) {
+            case C2Config::BITRATE_CONST:
+                bitrateMode = OAPV_RC_CQP;
+                break;
+            case C2Config::BITRATE_VARIABLE:
+                bitrateMode = OAPV_RC_ABR;
+                break;
+            case C2Config::BITRATE_IGNORE:
+                bitrateMode = 0;
+                break;
+            default:
+                ALOGE("Unrecognized bitrate mode: %x", mBitrateMode->value);
+        }
+        return bitrateMode;
+    }
+
+    std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
+    std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
+    std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
+    std::shared_ptr<C2StreamQualityTuning::output> getQuality_l() const { return mQuality; }
+    std::shared_ptr<C2StreamColorAspectsInfo::input> getColorAspects_l() const {
+        return mColorAspects;
+    }
+    std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() const {
+        return mCodedColorAspects;
+    }
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const {
+        return mPictureQuantization;
+    }
+    std::shared_ptr<C2StreamProfileLevelInfo::output> getProfileLevel_l() const {
+        return mProfileLevel;
+    }
+    std::shared_ptr<C2StreamPixelFormatInfo::input> getPixelFormat_l() const {
+        return mPixelFormat;
+    }
+
+  private:
+    std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+    std::shared_ptr<C2StreamUsageTuning::input> mUsage;
+    std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+    std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+    std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+    std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
+    std::shared_ptr<C2StreamQualityTuning::output> mQuality;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
+    std::shared_ptr<C2StreamColorInfo::input> mColorFormat;
+    std::shared_ptr<C2StreamPixelFormatInfo::input> mPixelFormat;
+};
+
+C2SoftApvEnc::C2SoftApvEnc(const char* name, c2_node_id_t id,
+                           const std::shared_ptr<IntfImpl>& intfImpl)
+    : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+      mIntf(intfImpl),
+      mColorFormat(OAPV_CF_PLANAR2),
+      mStarted(false),
+      mSignalledEos(false),
+      mSignalledError(false),
+      mOutBlock(nullptr) {
+    reset();
+}
+
+C2SoftApvEnc::~C2SoftApvEnc() {
+    onRelease();
+}
+
+c2_status_t C2SoftApvEnc::onInit() {
+    return C2_OK;
+}
+
+c2_status_t C2SoftApvEnc::onStop() {
+    return C2_OK;
+}
+
+void C2SoftApvEnc::onReset() {
+    releaseEncoder();
+    reset();
+}
+
+void C2SoftApvEnc::onRelease() {
+    releaseEncoder();
+}
+
+c2_status_t C2SoftApvEnc::onFlush_sm() {
+    return C2_OK;
+}
+
+static void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
+    uint32_t flags = 0;
+    if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+        flags |= C2FrameData::FLAG_END_OF_STREAM;
+        ALOGV("Signalling EOS");
+    }
+    work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+    work->worklets.front()->output.buffers.clear();
+    work->worklets.front()->output.ordinal = work->input.ordinal;
+    work->workletsProcessed = 1u;
+}
+
+int32_t C2SoftApvEnc::getQpFromQuality(int32_t quality) {
+    int32_t qp = ((kApvQpMin - kApvQpMax) * quality / 100) + kApvQpMax;
+    qp = std::min(qp, (int)kApvQpMax);
+    qp = std::max(qp, (int)kApvQpMin);
+    return qp;
+}
+
+c2_status_t C2SoftApvEnc::reset() {
+    ALOGV("reset");
+    mInitEncoder = false;
+    mStarted = false;
+    mSignalledEos = false;
+    mSignalledError = false;
+    mBitDepth = 10;
+    mMaxFrames = MAX_NUM_FRMS;
+    mReceivedFrames = 0;
+    mReceivedFirstFrame = false;
+    mColorFormat = OAPV_CF_PLANAR2;
+    memset(&mInputFrames, 0, sizeof(mInputFrames));
+    memset(&mReconFrames, 0, sizeof(mReconFrames));
+    return C2_OK;
+}
+
+c2_status_t C2SoftApvEnc::releaseEncoder() {
+    for (int32_t i = 0; i < MAX_NUM_FRMS; i++) {
+        if (mInputFrames.frm[i].imgb != nullptr) {
+            imgb_release(mInputFrames.frm[i].imgb);
+            mInputFrames.frm[i].imgb = nullptr;
+        }
+    }
+
+    if (mBitstreamBuf) {
+        std::free(mBitstreamBuf);
+        mBitstreamBuf = nullptr;
+    }
+    return C2_OK;
+}
+
+c2_status_t C2SoftApvEnc::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
+    return drainInternal(drainMode, pool, nullptr);
+}
+
+void C2SoftApvEnc::showEncoderParams(oapve_cdesc_t* cdsc) {
+    std::string title = "APV encoder params:";
+    ALOGD("%s width = %d, height = %d", title.c_str(), cdsc->param[0].w, cdsc->param[0].h);
+    ALOGD("%s FrameRate = %f", title.c_str(),
+          (double)cdsc->param[0].fps_num / cdsc->param[0].fps_den);
+    ALOGD("%s BitRate = %d Kbps", title.c_str(), cdsc->param[0].bitrate);
+    ALOGD("%s QP = %d", title.c_str(), cdsc->param[0].qp);
+    ALOGD("%s profile_idc = %d, level_idc = %d, band_idc = %d", title.c_str(),
+          cdsc->param[0].profile_idc, cdsc->param[0].level_idc / 3, cdsc->param[0].band_idc);
+    ALOGD("%s Bitrate Mode: %d", title.c_str(), cdsc->param[0].rc_type);
+    ALOGD("%s mColorAspects primaries: %d, transfer: %d, matrix: %d, range: %d", title.c_str(),
+          mColorAspects->primaries, mColorAspects->transfer, mColorAspects->matrix,
+          mColorAspects->range);
+    ALOGD("%s mCodedColorAspects primaries: %d, transfer: %d, matrix: %d, range: %d", title.c_str(),
+          mCodedColorAspects->primaries, mCodedColorAspects->transfer, mCodedColorAspects->matrix,
+          mCodedColorAspects->range);
+    ALOGD("%s Input color format: %s", title.c_str(),
+          mColorFormat == OAPV_CF_YCBCR422 ? "YUV422P10LE" : "P210");
+    ALOGD("%s max_num_frms: %d", title.c_str(), cdsc->max_num_frms);
+}
+
+c2_status_t C2SoftApvEnc::initEncoder() {
+    if (mInitEncoder) {
+        return C2_OK;
+    }
+    ALOGV("initEncoder");
+
+    mSize = mIntf->getSize_l();
+    mFrameRate = mIntf->getFrameRate_l();
+    mBitrate = mIntf->getBitrate_l();
+    mQuality = mIntf->getQuality_l();
+    mColorAspects = mIntf->getColorAspects_l();
+    mCodedColorAspects = mIntf->getCodedColorAspects_l();
+    mProfileLevel = mIntf->getProfileLevel_l();
+    mPixelFormat = mIntf->getPixelFormat_l();
+
+    mCodecDesc = std::make_unique<oapve_cdesc_t>();
+    if (mCodecDesc == nullptr) {
+        ALOGE("Allocate ctx failed");
+        return C2_NO_INIT;
+    }
+    mCodecDesc->max_bs_buf_size = kMaxBitstreamBufSize;
+    mCodecDesc->max_num_frms = MAX_NUM_FRMS;
+    // TODO: Bound parameters to CPU count
+    mCodecDesc->threads = 4;
+
+    int32_t ret = C2_OK;
+    /* set params */
+    for (int32_t i = 0; i < mMaxFrames; i++) {
+        oapve_param_t* param = &mCodecDesc->param[i];
+        ret = oapve_param_default(param);
+        if (OAPV_FAILED(ret)) {
+            ALOGE("cannot set default parameter");
+            return C2_NO_INIT;
+        }
+        setParams(*param);
+    }
+
+    showEncoderParams(mCodecDesc.get());
+
+    /* create encoder */
+    mEncoderId = oapve_create(mCodecDesc.get(), NULL);
+    if (mEncoderId == NULL) {
+        ALOGE("cannot create APV encoder");
+        return C2_CORRUPTED;
+    }
+
+    /* create metadata */
+    mMetaId = oapvm_create(&ret);
+    if (mMetaId == NULL) {
+        ALOGE("cannot create APV encoder");
+        return C2_NO_MEMORY;
+    }
+
+    /* create image buffers */
+    for (int32_t i = 0; i < mMaxFrames; i++) {
+        if (mBitDepth == 10) {
+            mInputFrames.frm[i].imgb = imgb_create(mCodecDesc->param[0].w, mCodecDesc->param[0].h,
+                                                  OAPV_CS_SET(mColorFormat, mBitDepth, 0));
+            mReconFrames.frm[i].imgb = nullptr;
+        } else {
+            mInputFrames.frm[i].imgb = imgb_create(mCodecDesc->param[0].w, mCodecDesc->param[0].h,
+                                                  OAPV_CS_SET(mColorFormat, 10, 0));
+            mReconFrames.frm[i].imgb = nullptr;
+        }
+    }
+
+    /* allocate bitstream buffer */
+    mBitstreamBuf = new unsigned char[kMaxBitstreamBufSize];
+    if (mBitstreamBuf == nullptr) {
+        ALOGE("cannot allocate bitstream buffer, size= %d", kMaxBitstreamBufSize);
+        return C2_NO_MEMORY;
+    }
+
+    /* Calculate SDR to HDR mapping values */
+    mSdrToHdrMapping.clear();
+    for (int32_t i = 0; i < 256; i++) {
+        mSdrToHdrMapping.push_back((uint16_t)(i * 1023 / 255 + 0.5));
+    }
+
+    mStarted = true;
+    mInitEncoder = true;
+    return C2_OK;
+}
+
+void C2SoftApvEnc::setParams(oapve_param_t& param) {
+    param.w = mSize->width;
+    param.h = mSize->height;
+    param.fps_num = (int)(mFrameRate->value * 100);
+    param.fps_den = 100;
+    param.bitrate = mBitrate->value / 1000;
+    param.rc_type = mIntf->getBitrateMode_l();
+
+    int ApvQP = kApvDefaultQP;
+    if (param.rc_type == OAPV_RC_CQP) {
+        ApvQP = getQpFromQuality(mQuality->value);
+        ALOGI("Bitrate mode is CQ, so QP value is derived from Quality. Quality is %d, QP is %d",
+              mQuality->value, ApvQP);
+    }
+    param.qp = ApvQP;
+    param.band_idc = 0;  // TODO: Get from the Level setting
+    param.profile_idc = mIntf->getProfile_l();
+    C2Config::level_t level = decisionApvLevel(
+            param.w, param.h, (int)(param.fps_num / param.fps_den), param.bitrate, param.band_idc);
+    if (mProfileLevel->level != level) {
+        mProfileLevel->level = level;
+        ALOGI("Need to update level to %d", mIntf->getLevel_l());
+    }
+    param.level_idc = mIntf->getLevel_l();
+}
+
+c2_status_t C2SoftApvEnc::setEncodeArgs(oapv_frms_t* inputFrames, const C2GraphicView* const input,
+                                        uint64_t workIndex) {
+    if (input->width() < mSize->width || input->height() < mSize->height) {
+        /* Expect width height to be configured */
+        ALOGW("unexpected Capacity Aspect %d(%d) x %d(%d)", input->width(), mSize->width,
+              input->height(), mSize->height);
+        return C2_BAD_VALUE;
+    }
+    const C2PlanarLayout& layout = input->layout();
+    uint8_t* yPlane = const_cast<uint8_t*>(input->data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t* uPlane = const_cast<uint8_t*>(input->data()[C2PlanarLayout::PLANE_U]);
+    uint8_t* vPlane = const_cast<uint8_t*>(input->data()[C2PlanarLayout::PLANE_V]);
+    int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+    int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+    uint32_t width = mSize->width;
+    uint32_t height = mSize->height;
+
+    /* width and height must be even */
+    if (width & 1u || height & 1u) {
+        ALOGW("height(%u) and width(%u) must both be even", height, width);
+        return C2_BAD_VALUE;
+    }
+
+    /* Set num frames */
+    inputFrames->num_frms = MAX_NUM_FRMS;
+    inputFrames->frm[mReceivedFrames].group_id = 1;
+    inputFrames->frm[mReceivedFrames].pbu_type = OAPV_PBU_TYPE_PRIMARY_FRAME;
+
+    switch (layout.type) {
+        case C2PlanarLayout::TYPE_RGB:
+            [[fallthrough]];
+        case C2PlanarLayout::TYPE_RGBA: {
+            // TODO: Add RGBA1010102 support
+            ALOGE("Not supported RGB color format");
+            return C2_BAD_VALUE;
+        }
+        case C2PlanarLayout::TYPE_YUV: {
+            if (IsP010(*input)) {
+                if (mColorFormat == OAPV_CF_YCBCR422) {
+                    ColorConvertP010ToYUV422P10le(input, inputFrames->frm[0].imgb);
+                } else if (mColorFormat == OAPV_CF_PLANAR2) {
+                    ColorConvertP010ToP210(input, inputFrames->frm[0].imgb);
+                } else {
+                    ALOGE("Not supported color format. %d", mColorFormat);
+                    return C2_BAD_VALUE;
+                }
+            } else if (IsNV12(*input)) {
+                ColorConvertNv12ToP210(input, inputFrames->frm[0].imgb);
+            } else if (IsNV21(*input)) {
+                ColorConvertNv12ToP210(input, inputFrames->frm[0].imgb);
+            } else if (IsYUV420(*input)) {
+                return C2_BAD_VALUE;
+            } else if (IsI420(*input)) {
+                return C2_BAD_VALUE;
+            } else {
+                ALOGE("Not supported color format. %d", mColorFormat);
+                return C2_BAD_VALUE;
+            }
+            break;
+        }
+
+        default:
+            ALOGE("Unrecognized plane type: %d", layout.type);
+            return C2_BAD_VALUE;
+    }
+
+    return C2_OK;
+}
+
+void C2SoftApvEnc::ColorConvertNv12ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb) {
+    auto width = input->width();
+    auto height = input->height();
+
+    auto* yPlane = (uint8_t*)input->data()[0];
+    auto* uvPlane = (uint8_t*)input->data()[1];
+
+    auto* dst = (uint16_t*)imgb->a[0];
+    int32_t lumaOffset = 0;
+    for (int32_t y = 0; y < height; ++y) {
+        for (int32_t x = 0; x < width; ++x) {
+            lumaOffset = y * width + x;
+            dst[lumaOffset] = (mSdrToHdrMapping[yPlane[lumaOffset]] << 6) |
+                              ((mSdrToHdrMapping[yPlane[lumaOffset]] & 0x300) >> 3);
+        }
+    }
+
+    auto* dst_uv = (uint16_t*)imgb->a[1];
+    uint32_t uvDstStride = width;
+    int32_t srcOffset = 0;
+    int32_t dstOffset1 = 0, dstOffset2 = 0;
+    int32_t tmp1 = 0, tmp2 = 0;
+    for (int32_t y = 0; y < height / 2; ++y) {
+        for (int32_t x = 0; x < width; x += 2) {
+            srcOffset = y * width + x;
+            dstOffset1 = (y * 2) * width + x;
+            dstOffset2 = ((y * 2) + 1) * width + x;
+
+            tmp1 = (mSdrToHdrMapping[uvPlane[srcOffset]] << 6) |
+                   ((mSdrToHdrMapping[uvPlane[srcOffset]] & 0x300) >> 3);
+            tmp2 = (mSdrToHdrMapping[uvPlane[srcOffset + 1]] << 6) |
+                   ((mSdrToHdrMapping[uvPlane[srcOffset + 1]] & 0x300) >> 3);
+            dst_uv[dstOffset1] = (uint16_t)tmp1;
+            dst_uv[dstOffset1 + 1] = (uint16_t)tmp2;
+            dst_uv[dstOffset2] = (uint16_t)tmp1;
+            dst_uv[dstOffset2 + 1] = (uint16_t)tmp2;
+        }
+    }
+}
+
+C2Config::level_t C2SoftApvEnc::decisionApvLevel(int32_t width, int32_t height, int32_t fps,
+                                                 int32_t bitrate, int32_t band) {
+    C2Config::level_t level = C2Config::LEVEL_APV_1_BAND_0;
+
+    struct LevelLimits {
+        C2Config::level_t level;
+        uint64_t samplesPerSec;
+        uint32_t bitratesOfBand;
+    };
+
+    constexpr LevelLimits kLimitsBand0[] = {
+            {LEVEL_APV_1_BAND_0, 3'041'280, 7'000},
+            {LEVEL_APV_1_1_BAND_0, 6'082'560, 14'000},
+            {LEVEL_APV_2_BAND_0, 15'667'200, 36'000},
+            {LEVEL_APV_2_1_BAND_0, 31'334'400, 71'000},
+            {LEVEL_APV_3_BAND_0, 66'846'720, 101'000},
+            {LEVEL_APV_3_1_BAND_0, 133'693'440, 201'000},
+            {LEVEL_APV_4_BAND_0, 265'420'800, 401'000},
+            {LEVEL_APV_4_1_BAND_0, 530'841'600, 780'000},
+            {LEVEL_APV_5_BAND_0, 1'061'683'200, 1'560'000},
+            {LEVEL_APV_5_1_BAND_0, 2'123'366'400, 3'324'000},
+            {LEVEL_APV_6_BAND_0, 4'777'574'400, 6'648'000},
+            {LEVEL_APV_6_1_BAND_0, 8'493'465'600, 13'296'000},
+            {LEVEL_APV_7_BAND_0, 16'986'931'200, 26'592'000},
+            {LEVEL_APV_7_1_BAND_0, 33'973'862'400, 53'184'000},
+    };
+
+    constexpr LevelLimits kLimitsBand1[] = {
+            {LEVEL_APV_1_BAND_1, 3'041'280, 11'000},
+            {LEVEL_APV_1_1_BAND_1, 6'082'560, 21'000},
+            {LEVEL_APV_2_BAND_1, 15'667'200, 53'000},
+            {LEVEL_APV_2_1_BAND_1, 31'334'400, 106'00},
+            {LEVEL_APV_3_BAND_1, 66'846'720, 151'000},
+            {LEVEL_APV_3_1_BAND_1, 133'693'440, 301'000},
+            {LEVEL_APV_4_BAND_1, 265'420'800, 602'000},
+            {LEVEL_APV_4_1_BAND_1, 530'841'600, 1'170'000},
+            {LEVEL_APV_5_BAND_1, 1'061'683'200, 2'340'000},
+            {LEVEL_APV_5_1_BAND_1, 2'123'366'400, 4'986'000},
+            {LEVEL_APV_6_BAND_1, 4'777'574'400, 9'972'000},
+            {LEVEL_APV_6_1_BAND_1, 8'493'465'600, 19'944'000},
+            {LEVEL_APV_7_BAND_1, 16'986'931'200, 39'888'000},
+            {LEVEL_APV_7_1_BAND_1, 33'973'862'400, 79'776'000},
+    };
+
+    constexpr LevelLimits kLimitsBand2[] = {
+            {LEVEL_APV_1_BAND_2, 3'041'280, 14'000},
+            {LEVEL_APV_1_1_BAND_2, 6'082'560, 28'000},
+            {LEVEL_APV_2_BAND_2, 15'667'200, 71'000},
+            {LEVEL_APV_2_1_BAND_2, 31'334'400, 141'000},
+            {LEVEL_APV_3_BAND_2, 66'846'720, 201'000},
+            {LEVEL_APV_3_1_BAND_2, 133'693'440, 401'000},
+            {LEVEL_APV_4_BAND_2, 265'420'800, 780'000},
+            {LEVEL_APV_4_1_BAND_2, 530'841'600, 1'560'000},
+            {LEVEL_APV_5_BAND_2, 1'061'683'200, 3'324'000},
+            {LEVEL_APV_5_1_BAND_2, 2'123'366'400, 6'648'000},
+            {LEVEL_APV_6_BAND_2, 4'777'574'400, 13'296'000},
+            {LEVEL_APV_6_1_BAND_2, 8'493'465'600, 26'592'000},
+            {LEVEL_APV_7_BAND_2, 16'986'931'200, 53'184'000},
+            {LEVEL_APV_7_1_BAND_2, 33'973'862'400, 106'368'000},
+    };
+
+    constexpr LevelLimits kLimitsBand3[] = {
+            {LEVEL_APV_1_BAND_3, 3'041'280, 21'000},
+            {LEVEL_APV_1_1_BAND_3, 6'082'560, 42'000},
+            {LEVEL_APV_2_BAND_3, 15'667'200, 106'000},
+            {LEVEL_APV_2_1_BAND_3, 31'334'400, 212'000},
+            {LEVEL_APV_3_BAND_3, 66'846'720, 301'000},
+            {LEVEL_APV_3_1_BAND_3, 133'693'440, 602'000},
+            {LEVEL_APV_4_BAND_3, 265'420'800, 1'170'000},
+            {LEVEL_APV_4_1_BAND_3, 530'841'600, 2'340'000},
+            {LEVEL_APV_5_BAND_3, 1'061'683'200, 4'986'000},
+            {LEVEL_APV_5_1_BAND_3, 2'123'366'400, 9'972'000},
+            {LEVEL_APV_6_BAND_3, 4'777'574'400, 19'944'000},
+            {LEVEL_APV_6_1_BAND_3, 8'493'465'600, 39'888'000},
+            {LEVEL_APV_7_BAND_3, 16'986'931'200, 79'776'000},
+            {LEVEL_APV_7_1_BAND_3, 33'973'862'400, 159'552'000},
+    };
+
+    uint64_t samplesPerSec = width * height * fps;
+    if (band == 0) {
+        for (const LevelLimits& limit : kLimitsBand0) {
+            if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
+                level = limit.level;
+                break;
+            }
+        }
+    } else if (band == 1) {
+        for (const LevelLimits& limit : kLimitsBand1) {
+            if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
+                level = limit.level;
+                break;
+            }
+        }
+    } else if (band == 2) {
+        for (const LevelLimits& limit : kLimitsBand2) {
+            if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
+                level = limit.level;
+                break;
+            }
+        }
+    } else if (band == 3) {
+        for (const LevelLimits& limit : kLimitsBand3) {
+            if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
+                level = limit.level;
+                break;
+            }
+        }
+    } else {
+        ALOGE("Invalid band_idc on calculte level");
+    }
+
+    return level;
+}
+
+void C2SoftApvEnc::ColorConvertP010ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb) {
+    auto width = input->width();
+    auto height = input->height();
+
+    auto* yPlane = (uint8_t*)input->data()[0];
+    auto* uvPlane = (uint8_t*)input->data()[1];
+    uint32_t uvStride = width * 2;
+
+    auto* src = yPlane;
+    auto* dst = (uint8_t*)imgb->a[0];
+    std::memcpy(dst, src, width * height * 2);
+
+    auto* dst_uv = (uint8_t*)imgb->a[1];
+    int32_t offset1 = 0, offset2 = 0;
+    for (int32_t y = 0; y < height / 2; ++y) {
+        offset1 = (y * 2) * uvStride;
+        offset2 = (y * 2 + 1) * uvStride;
+        src = uvPlane + (y * uvStride);
+
+        std::memcpy(dst_uv + offset1, src, uvStride);
+        std::memcpy(dst_uv + offset2, src, uvStride);
+    }
+}
+
+void C2SoftApvEnc::ColorConvertP010ToYUV422P10le(const C2GraphicView* const input,
+                                                 oapv_imgb_t* imgb) {
+    uint32_t width = input->width();
+    uint32_t height = input->height();
+
+    uint8_t* yPlane = (uint8_t*)input->data()[0];
+    auto* uvPlane = (uint8_t*)input->data()[1];
+    uint32_t stride[3];
+    stride[0] = width * 2;
+    stride[1] = stride[2] = width;
+
+    uint8_t *dst, *src;
+    uint16_t tmp;
+    for (int32_t y = 0; y < height; ++y) {
+        src = yPlane + y * stride[0];
+        dst = (uint8_t*)imgb->a[0] + y * stride[0];
+        for (int32_t x = 0; x < stride[0]; x += 2) {
+            tmp = (src[x + 1] << 2) | (src[x] >> 6);
+            dst[x] = tmp & 0xFF;
+            dst[x + 1] = tmp >> 8;
+        }
+    }
+
+    uint8_t *dst_u, *dst_v;
+    for (int32_t y = 0; y < height / 2; ++y) {
+        src = uvPlane + y * stride[1] * 2;
+        dst_u = (uint8_t*)imgb->a[1] + (y * 2) * stride[1];
+        dst_v = (uint8_t*)imgb->a[2] + (y * 2) * stride[2];
+        for (int32_t x = 0; x < stride[1] * 2; x += 4) {
+            tmp = (src[x + 1] << 2) | (src[x] >> 6);  // cb
+            dst_u[x / 2] = tmp & 0xFF;
+            dst_u[x / 2 + 1] = tmp >> 8;
+            dst_u[x / 2 + stride[1]] = dst_u[x / 2];
+            dst_u[x / 2 + stride[1] + 1] = dst_u[x / 2 + 1];
+
+            tmp = (src[x + 3] << 2) | (src[x + 2] >> 6);  // cr
+            dst_v[x / 2] = tmp & 0xFF;
+            dst_v[x / 2 + 1] = tmp >> 8;
+            dst_v[x / 2 + stride[2]] = dst_v[x / 2];
+            dst_v[x / 2 + stride[2] + 1] = dst_v[x / 2 + 1];
+        }
+    }
+}
+
+void C2SoftApvEnc::finishWork(uint64_t workIndex, const std::unique_ptr<C2Work>& work,
+                              const std::shared_ptr<C2BlockPool>& pool, oapv_bitb_t* bitb,
+                              oapve_stat_t* stat) {
+    std::shared_ptr<C2LinearBlock> block;
+    C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+    c2_status_t status = pool->fetchLinearBlock(stat->write, usage, &block);
+    if (C2_OK != status) {
+        ALOGE("fetchLinearBlock for Output failed with status 0x%x", status);
+        mSignalledError = true;
+        work->result = status;
+        work->workletsProcessed = 1u;
+        return;
+    }
+
+    C2WriteView wView = block->map().get();
+    if (C2_OK != wView.error()) {
+        ALOGE("write view map failed with status 0x%x", wView.error());
+        mSignalledError = true;
+        work->result = wView.error();
+        work->workletsProcessed = 1u;
+        return;
+    }
+    if ((!mReceivedFirstFrame)) {
+        createCsdData(work, bitb, stat->write);
+        mReceivedFirstFrame = true;
+    }
+
+    memcpy(wView.data(), bitb->addr, stat->write);
+    std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block, 0, stat->write);
+
+    /* All frames are SYNC FRAME */
+    buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(0u /* stream id */,
+                                                                          C2Config::SYNC_FRAME));
+
+    auto fillWork = [buffer](const std::unique_ptr<C2Work>& work) {
+        work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.buffers.push_back(buffer);
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->workletsProcessed = 1u;
+    };
+    if (work && c2_cntr64_t(workIndex) == work->input.ordinal.frameIndex) {
+        fillWork(work);
+        if (mSignalledEos) {
+            work->worklets.front()->output.flags = C2FrameData::FLAG_END_OF_STREAM;
+        }
+    } else {
+        finish(workIndex, fillWork);
+    }
+}
+void C2SoftApvEnc::createCsdData(const std::unique_ptr<C2Work>& work, oapv_bitb_t* bitb,
+                                 uint32_t encodedSize) {
+    uint32_t csdStart = 0, csdEnd = 0;
+    uint32_t bitOffset = 0;
+    uint8_t* buf = (uint8_t*)bitb->addr + csdStart;
+
+    if (encodedSize == 0) {
+        ALOGE("the first frame size is zero, so no csd data will be created.");
+        return;
+    }
+    ABitReader reader(buf, encodedSize);
+
+    /* pbu_header() */
+    reader.skipBits(32);
+    bitOffset += 32;  // pbu_size
+    reader.skipBits(32);
+    bitOffset += 32;  // currReadSize
+    csdStart = bitOffset / 8;
+
+    int32_t pbu_type = reader.getBits(8);
+    bitOffset += 8;  // pbu_type
+    reader.skipBits(16);
+    bitOffset += 16;  // group_id
+    reader.skipBits(8);
+    bitOffset += 8;  // reserved_zero_8bits
+
+    /* frame info() */
+    int32_t profile_idc = reader.getBits(8);
+    bitOffset += 8;  // profile_idc
+    int32_t level_idc = reader.getBits(8);
+    bitOffset += 8;  // level_idc
+    int32_t band_idc = reader.getBits(3);
+    bitOffset += 3;  // band_idc
+    reader.skipBits(5);
+    bitOffset += 5;  // reserved_zero_5bits
+    int32_t width = reader.getBits(32);
+    bitOffset += 32;  // width
+    int32_t height = reader.getBits(32);
+    bitOffset += 32;  // height
+    int32_t chroma_idc = reader.getBits(4);
+    bitOffset += 4;  // chroma_format_idc
+    reader.skipBits(4);
+    bitOffset += 4;  // bit_depth
+    reader.skipBits(8);
+    bitOffset += 8;  // capture_time_distance
+    reader.skipBits(8);
+    bitOffset += 8;  // reserved_zero_8bits
+
+    /* frame header() */
+    reader.skipBits(8);
+    bitOffset += 8;  // reserved_zero_8bit
+    bool color_description_present_flag = reader.getBits(1);
+    bitOffset += 1;  // color_description_present_flag
+    if (color_description_present_flag) {
+        reader.skipBits(8);
+        bitOffset += 8;  // color_primaries
+        reader.skipBits(8);
+        bitOffset += 8;  // transfer_characteristics
+        reader.skipBits(8);
+        bitOffset += 8;  // matrix_coefficients
+    }
+    bool use_q_matrix = reader.getBits(1);
+    bitOffset += 1;  // use_q_matrix
+    if (use_q_matrix) {
+        /* quantization_matrix() */
+        int32_t numComp = chroma_idc == 0   ? 1
+                          : chroma_idc == 2 ? 3
+                          : chroma_idc == 3 ? 3
+                          : chroma_idc == 4 ? 4
+                                            : -1;
+        int32_t needBitsForQ = 64 * 8 * numComp;
+        reader.skipBits(needBitsForQ);
+        bitOffset += needBitsForQ;
+    }
+
+    /* tile_info() */
+    int32_t tile_width_in_mbs_minus1 = reader.getBits(28);
+    bitOffset += 28;
+    int32_t tile_height_in_mbs_minus1 = reader.getBits(28);
+    bitOffset += 28;
+    bool tile_size_present_in_fh_flag = reader.getBits(1);
+    bitOffset += 1;
+    if (tile_size_present_in_fh_flag) {
+        int32_t numTiles = ceil((double)width / (double)tile_width_in_mbs_minus1) *
+                           ceil((double)height / (double)tile_height_in_mbs_minus1);
+        reader.skipBits(32 * numTiles);
+        bitOffset += (32 * numTiles);
+    }
+
+    reader.skipBits(8);
+    bitOffset += 8;  // reserved_zero_8bits
+
+    /* byte_alignmenet() */
+    while (bitOffset % 8) {
+        reader.skipBits(1);
+        bitOffset += 1;
+    }
+    csdEnd = bitOffset / 8;
+    int32_t csdSize = csdEnd - csdStart + 1;
+
+    std::unique_ptr<C2StreamInitDataInfo::output> csd =
+            C2StreamInitDataInfo::output::AllocUnique(csdSize, 0u);
+    if (!csd) {
+        ALOGE("CSD allocation failed");
+        mSignalledError = true;
+        work->result = C2_NO_MEMORY;
+        work->workletsProcessed = 1u;
+        return;
+    }
+
+    buf = buf + csdStart;
+    memcpy(csd->m.value, buf, csdSize);
+    work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+}
+c2_status_t C2SoftApvEnc::drainInternal(uint32_t drainMode,
+                                        const std::shared_ptr<C2BlockPool>& pool,
+                                        const std::unique_ptr<C2Work>& work) {
+    fillEmptyWork(work);
+    return C2_OK;
+}
+
+void C2SoftApvEnc::process(const std::unique_ptr<C2Work>& work,
+                           const std::shared_ptr<C2BlockPool>& pool) {
+    c2_status_t error;
+    work->result = C2_OK;
+    work->workletsProcessed = 0u;
+    work->worklets.front()->output.flags = work->input.flags;
+
+    nsecs_t timeDelay = 0;
+    uint64_t workIndex = work->input.ordinal.frameIndex.peekull();
+
+    mSignalledEos = false;
+    mOutBlock = nullptr;
+
+    if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+        ALOGV("Got FLAG_END_OF_STREAM");
+        mSignalledEos = true;
+    }
+
+    /* Initialize encoder if not already initialized */
+    if (initEncoder() != C2_OK) {
+        ALOGE("Failed to initialize encoder");
+        mSignalledError = true;
+        work->result = C2_CORRUPTED;
+        work->workletsProcessed = 1u;
+        ALOGE("[%s] Failed to make Codec context", __func__);
+        return;
+    }
+    if (mSignalledError) {
+        ALOGE("[%s] Received signalled error", __func__);
+        return;
+    }
+
+    if (mSignalledEos) {
+        drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+        return;
+    }
+
+    if (work->input.buffers.empty()) {
+        return;
+    }
+
+    std::shared_ptr<C2GraphicView> view;
+    std::shared_ptr<C2Buffer> inputBuffer = nullptr;
+    if (!work->input.buffers.empty()) {
+        inputBuffer = work->input.buffers[0];
+        view = std::make_shared<C2GraphicView>(
+                inputBuffer->data().graphicBlocks().front().map().get());
+        if (view->error() != C2_OK) {
+            ALOGE("graphic view map err = %d", view->error());
+            work->workletsProcessed = 1u;
+            return;
+        }
+    }
+    if (!inputBuffer) {
+        fillEmptyWork(work);
+        return;
+    }
+
+    oapve_stat_t stat;
+    auto outBufferSize =
+            mCodecDesc->param[mReceivedFrames].w * mCodecDesc->param[mReceivedFrames].h * 4;
+    if (!mOutBlock) {
+        C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+        c2_status_t err = pool->fetchLinearBlock(outBufferSize, usage, &mOutBlock);
+        if (err != C2_OK) {
+            work->result = err;
+            work->workletsProcessed = 1u;
+            ALOGE("fetchLinearBlock has failed. err = %d", err);
+            return;
+        }
+    }
+
+    C2WriteView wView = mOutBlock->map().get();
+    if (wView.error() != C2_OK) {
+        work->result = wView.error();
+        work->workletsProcessed = 1u;
+        return;
+    }
+
+    error = setEncodeArgs(&mInputFrames, view.get(), workIndex);
+    if (error != C2_OK) {
+        mSignalledError = true;
+        work->result = error;
+        work->workletsProcessed = 1u;
+        return;
+    }
+
+    if (++mReceivedFrames < mMaxFrames) {
+        return;
+    }
+    mReceivedFrames = 0;
+
+    std::shared_ptr<oapv_bitb_t> bits = std::make_shared<oapv_bitb_t>();
+    std::memset(mBitstreamBuf, 0, kMaxBitstreamBufSize);
+    bits->addr = mBitstreamBuf;
+    bits->bsize = kMaxBitstreamBufSize;
+    bits->err = C2_OK;
+
+    if (mInputFrames.frm[0].imgb) {
+        int32_t status =
+                oapve_encode(mEncoderId, &mInputFrames, mMetaId, bits.get(), &stat, &mReconFrames);
+        if (status != C2_OK) {
+            mSignalledError = true;
+            work->result = C2_CORRUPTED;
+            work->workletsProcessed = 1u;
+            return;
+        }
+    } else if (!mSignalledEos) {
+        fillEmptyWork(work);
+    }
+    finishWork(workIndex, work, pool, bits.get(), &stat);
+}
+
+class C2SoftApvEncFactory : public C2ComponentFactory {
+  public:
+    C2SoftApvEncFactory()
+        : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+                  GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+    virtual c2_status_t createComponent(c2_node_id_t id,
+                                        std::shared_ptr<C2Component>* const component,
+                                        std::function<void(C2Component*)> deleter) override {
+        *component = std::shared_ptr<C2Component>(
+                new C2SoftApvEnc(COMPONENT_NAME, id,
+                                 std::make_shared<C2SoftApvEnc::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    c2_status_t createInterface(c2_node_id_t id,
+                                std::shared_ptr<C2ComponentInterface>* const interface,
+                                std::function<void(C2ComponentInterface*)> deleter) override {
+        *interface = std::shared_ptr<C2ComponentInterface>(
+                new SimpleInterface<C2SoftApvEnc::IntfImpl>(
+                        COMPONENT_NAME, id, std::make_shared<C2SoftApvEnc::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    ~C2SoftApvEncFactory() override = default;
+
+  private:
+    std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+}  // namespace android
+
+__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+    if (!android::media::swcodec::flags::apv_software_codec()) {
+        ALOGV("APV SW Codec is not enabled");
+        return nullptr;
+    }
+    return new ::android::C2SoftApvEncFactory();
+}
+
+__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
+        ::C2ComponentFactory* factory) {
+    delete factory;
+}
diff --git a/media/codec2/components/apv/C2SoftApvEnc.h b/media/codec2/components/apv/C2SoftApvEnc.h
new file mode 100644
index 0000000..441c663
--- /dev/null
+++ b/media/codec2/components/apv/C2SoftApvEnc.h
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_APV_ENC_H_
+#define ANDROID_C2_SOFT_APV_ENC_H_
+
+#include <SimpleC2Component.h>
+#include <utils/Vector.h>
+#include <map>
+#include "oapv.h"
+
+#include <C2SoftApvCommon.h>
+
+namespace android {
+
+#define CODEC_MAX_CORES 4
+
+#define APV_QP_MIN 1
+#define APV_QP_MAX 51
+
+struct C2SoftApvEnc final : public SimpleC2Component {
+    class IntfImpl;
+
+    C2SoftApvEnc(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+    virtual ~C2SoftApvEnc();
+
+    // From SimpleC2Component
+    c2_status_t onInit() override;
+    c2_status_t onStop() override;
+    void onReset() override;
+    void onRelease() override;
+    c2_status_t onFlush_sm() override;
+    void process(const std::unique_ptr<C2Work>& work,
+                 const std::shared_ptr<C2BlockPool>& pool) override;
+    c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override;
+
+  private:
+    c2_status_t reset();
+    c2_status_t initEncoder();
+    c2_status_t releaseEncoder();
+    c2_status_t setEncodeArgs(oapv_frms_t* imgb_inp, const C2GraphicView* const input,
+                              uint64_t workIndex);
+    void finishWork(uint64_t workIndex, const std::unique_ptr<C2Work>& work,
+                    const std::shared_ptr<C2BlockPool>& pool, oapv_bitb_t* bitb,
+                    oapve_stat_t* stat);
+    c2_status_t drainInternal(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool,
+                              const std::unique_ptr<C2Work>& work);
+    void setParams(oapve_param_t& param);
+    int32_t getQpFromQuality(int quality);
+    C2Config::level_t decisionApvLevel(int32_t width, int32_t height, int32_t fps, int32_t bitrate,
+                                       int32_t band);
+
+    void showEncoderParams(oapve_cdesc_t* cdsc);
+
+    void ColorConvertNv12ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb);
+    void ColorConvertP010ToYUV422P10le(const C2GraphicView* const input, oapv_imgb_t* imgb);
+    void ColorConvertP010ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb);
+
+    void createCsdData(const std::unique_ptr<C2Work>& work, oapv_bitb_t* bitb,
+                       uint32_t encodedSize);
+
+    std::shared_ptr<IntfImpl> mIntf;
+    int32_t mBitDepth;
+    int32_t mColorFormat;
+
+    bool mStarted;
+    bool mSignalledEos;
+    bool mSignalledError;
+
+    std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+    std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+    std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+    std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
+    std::shared_ptr<C2StreamQualityTuning::output> mQuality;
+    std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
+    std::shared_ptr<C2LinearBlock> mOutBlock;
+    std::shared_ptr<C2StreamComplexityTuning::output> mComplexity;
+    std::shared_ptr<C2StreamPixelFormatInfo::input> mPixelFormat;
+
+    std::map<const void*, std::shared_ptr<C2Buffer>> mBuffers;
+    MemoryBlockPool mConversionBuffers;
+    std::map<const void*, MemoryBlock> mConversionBuffersInUse;
+
+    bool mInitEncoder;
+    int32_t mMaxFrames;
+    int32_t mReceivedFrames;
+    std::unique_ptr<oapve_cdesc_t> mCodecDesc;
+    oapv_frms_t mInputFrames;
+    oapv_frms_t mReconFrames;
+    oapve_t mEncoderId;
+    oapvm_t mMetaId;
+    uint8_t* mBitstreamBuf = nullptr;
+    std::vector<uint16_t> mSdrToHdrMapping;
+    bool mReceivedFirstFrame = false;
+    C2_DO_NOT_COPY(C2SoftApvEnc);
+};
+}  // namespace android
+
+#endif  // ANDROID_C2_SOFT_APV_ENC_H_
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 562dcf5..52920c2 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -743,6 +743,25 @@
 }
 
 status_t C2SoftMpeg2Dec::deleteDecoder() {
+    // API call to IV_CMD_RETRIEVE_MEMREC not only retrieves the memory records
+    // but also joins active threads and destroys conditional thread variables and
+    // mutex locks for each thread.
+    iv_retrieve_mem_rec_ip_t s_retrieve_mem_ip;
+    iv_retrieve_mem_rec_op_t s_retrieve_mem_op;
+
+    s_retrieve_mem_ip.pv_mem_rec_location = (iv_mem_rec_t *)mMemRecords;
+    s_retrieve_mem_ip.e_cmd = IV_CMD_RETRIEVE_MEMREC;
+    s_retrieve_mem_ip.u4_size = sizeof(iv_retrieve_mem_rec_ip_t);
+    s_retrieve_mem_op.u4_size = sizeof(iv_retrieve_mem_rec_op_t);
+
+    IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+                                                    &s_retrieve_mem_ip,
+                                                    &s_retrieve_mem_op);
+    if (IV_SUCCESS != status) {
+        ALOGE("error in %s: 0x%x", __func__, s_retrieve_mem_op.u4_error_code);
+        return UNKNOWN_ERROR;
+    }
+
     if (mMemRecords) {
         iv_mem_rec_t *ps_mem_rec = mMemRecords;
 
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index e6782a9..069d6ad 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -430,6 +430,7 @@
     _C2_PL_AV1_BASE  = 0x9000,
     _C2_PL_VP8_BASE  = 0xA000,
     _C2_PL_MPEGH_BASE = 0xB000,     // MPEG-H 3D Audio
+    _C2_PL_APV_BASE = 0xC000,     // APV
 
     C2_PROFILE_LEVEL_VENDOR_START = 0x70000000,
 };
@@ -597,6 +598,15 @@
     PROFILE_MPEGH_HIGH,                         ///< MPEG-H High
     PROFILE_MPEGH_LC,                           ///< MPEG-H Low-complexity
     PROFILE_MPEGH_BASELINE,                     ///< MPEG-H Baseline
+
+    // Advanced Professional VideoCodec (APV)
+    PROFILE_APV_422_10 = _C2_PL_APV_BASE,       ///< APV 422-10 Profile
+    PROFILE_APV_422_12,                         ///< APV 422-12 Profile
+    PROFILE_APV_444_10,                         ///< APV 444-10 Profile
+    PROFILE_APV_444_12,                         ///< APV 444-12 Profile
+    PROFILE_APV_4444_10,                        ///< APV 4444-10 Profile
+    PROFILE_APV_4444_12,                        ///< APV 4444-12 Profile
+    PROFILE_APV_400_10,                         ///< APV 400-10 Profile
 };
 
 enum C2Config::level_t : uint32_t {
@@ -752,6 +762,68 @@
     LEVEL_MPEGH_3,                              ///< MPEG-H L3
     LEVEL_MPEGH_4,                              ///< MPEG-H L4
     LEVEL_MPEGH_5,                              ///< MPEG-H L5
+
+    // Advanced Professional VideoCodec(APV) levels/bands
+    LEVEL_APV_1_BAND_0 = _C2_PL_APV_BASE,            ///< APV L 1, BAND 0
+    LEVEL_APV_1_1_BAND_0,                            ///< APV L 1.1, BAND 0
+    LEVEL_APV_2_BAND_0,                              ///< APV L 2, BAND 0
+    LEVEL_APV_2_1_BAND_0,                            ///< APV L 2.1, BAND 0
+    LEVEL_APV_3_BAND_0,                              ///< APV L 3, BAND 0
+    LEVEL_APV_3_1_BAND_0,                            ///< APV L 3.1, BAND 0
+    LEVEL_APV_4_BAND_0,                              ///< APV L 4, BAND 0
+    LEVEL_APV_4_1_BAND_0,                            ///< APV L 4.1, BAND 0
+    LEVEL_APV_5_BAND_0,                              ///< APV L 5, BAND 0
+    LEVEL_APV_5_1_BAND_0,                            ///< APV L 5.1, BAND 0
+    LEVEL_APV_6_BAND_0,                              ///< APV L 6, BAND 0
+    LEVEL_APV_6_1_BAND_0,                            ///< APV L 6.1, BAND 0
+    LEVEL_APV_7_BAND_0,                              ///< APV L 7, BAND 0
+    LEVEL_APV_7_1_BAND_0,                            ///< APV L 7.1, BAND 0
+
+    LEVEL_APV_1_BAND_1 = _C2_PL_APV_BASE + 0x100,    ///< APV L 1, BAND 1
+    LEVEL_APV_1_1_BAND_1,                            ///< APV L 1.1, BAND 1
+    LEVEL_APV_2_BAND_1,                              ///< APV L 2, BAND 1
+    LEVEL_APV_2_1_BAND_1,                            ///< APV L 2.1, BAND 1
+    LEVEL_APV_3_BAND_1,                              ///< APV L 3, BAND 1
+    LEVEL_APV_3_1_BAND_1,                            ///< APV L 3.1, BAND 1
+    LEVEL_APV_4_BAND_1,                              ///< APV L 4, BAND 1
+    LEVEL_APV_4_1_BAND_1,                            ///< APV L 4.1, BAND 1
+    LEVEL_APV_5_BAND_1,                              ///< APV L 5, BAND 1
+    LEVEL_APV_5_1_BAND_1,                            ///< APV L 5.1, BAND 1
+    LEVEL_APV_6_BAND_1,                              ///< APV L 6, BAND 1
+    LEVEL_APV_6_1_BAND_1,                            ///< APV L 6.1, BAND 1
+    LEVEL_APV_7_BAND_1,                              ///< APV L 7, BAND 1
+    LEVEL_APV_7_1_BAND_1,                            ///< APV L 7.1, BAND 1
+
+    LEVEL_APV_1_BAND_2 = _C2_PL_APV_BASE + 0x200,    ///< APV L 1, BAND 2
+    LEVEL_APV_1_1_BAND_2,                            ///< APV L 1.1, BAND 2
+    LEVEL_APV_2_BAND_2,                              ///< APV L 2, BAND 2
+    LEVEL_APV_2_1_BAND_2,                            ///< APV L 2.1, BAND 2
+    LEVEL_APV_3_BAND_2,                              ///< APV L 3, BAND 2
+    LEVEL_APV_3_1_BAND_2,                            ///< APV L 3.1, BAND 2
+    LEVEL_APV_4_BAND_2,                              ///< APV L 4, BAND 2
+    LEVEL_APV_4_1_BAND_2,                            ///< APV L 4.1, BAND 2
+    LEVEL_APV_5_BAND_2,                              ///< APV L 5, BAND 2
+    LEVEL_APV_5_1_BAND_2,                            ///< APV L 5.1, BAND 2
+    LEVEL_APV_6_BAND_2,                              ///< APV L 6, BAND 2
+    LEVEL_APV_6_1_BAND_2,                            ///< APV L 6.1, BAND 2
+    LEVEL_APV_7_BAND_2,                              ///< APV L 7, BAND 2
+    LEVEL_APV_7_1_BAND_2,                            ///< APV L 7.1, BAND 2
+
+    LEVEL_APV_1_BAND_3 = _C2_PL_APV_BASE + 0x300,    ///< APV L 1, BAND 3
+    LEVEL_APV_1_1_BAND_3,                            ///< APV L 1.1, BAND 3
+    LEVEL_APV_2_BAND_3,                              ///< APV L 2, BAND 3
+    LEVEL_APV_2_1_BAND_3,                            ///< APV L 2.1, BAND 3
+    LEVEL_APV_3_BAND_3,                              ///< APV L 3, BAND 3
+    LEVEL_APV_3_1_BAND_3,                            ///< APV L 3.1, BAND 3
+    LEVEL_APV_4_BAND_3,                              ///< APV L 4, BAND 3
+    LEVEL_APV_4_1_BAND_3,                            ///< APV L 4.1, BAND 3
+    LEVEL_APV_5_BAND_3,                              ///< APV L 5, BAND 3
+    LEVEL_APV_5_1_BAND_3,                            ///< APV L 5.1, BAND 3
+    LEVEL_APV_6_BAND_3,                              ///< APV L 6, BAND 3
+    LEVEL_APV_6_1_BAND_3,                            ///< APV L 6.1, BAND 3
+    LEVEL_APV_7_BAND_3,                              ///< APV L 7, BAND 3
+    LEVEL_APV_7_1_BAND_3,                            ///< APV L 7.1, BAND 3
+
 };
 
 struct C2ProfileLevelStruct {
diff --git a/media/codec2/hal/common/include/codec2/common/BqPoolInvalidateHelper.h b/media/codec2/hal/common/include/codec2/common/BqPoolInvalidateHelper.h
new file mode 100644
index 0000000..859f703
--- /dev/null
+++ b/media/codec2/hal/common/include/codec2/common/BqPoolInvalidateHelper.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <C2BqBufferPriv.h>
+#include <C2PlatformSupport.h>
+
+namespace android {
+
+// filter fn from component's blockpool container to bqpool conatainer
+static inline bool BqPoolFilterFn(
+        std::pair<const uint64_t, std::shared_ptr<C2BlockPool>> pool) {
+    return (pool.second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE);
+}
+
+// convert fn from component's blockpool container to bqpool container
+static inline std::shared_ptr<C2BufferQueueBlockPool> BqPoolConvertFn(
+        std::pair<const uint64_t, std::shared_ptr<C2BlockPool>> pool) {
+    return std::static_pointer_cast<C2BufferQueueBlockPool>(pool.second);
+}
+
+// This is similar to std::transform excpet there is \pred functor parameter.
+// The elements with \pred function value \true only will be transformed and
+// added to the dest container. (For portability std::ranges are not used.)
+template <class InputIt, class OutputIt, class Pred, class Fct>
+void transform_if(InputIt first, InputIt last, OutputIt dest, Pred pred, Fct transform)
+{
+   while (first != last) {
+      if (pred(*first)) {
+         *dest++ = transform(*first);
+      }
+      ++first;
+   }
+}
+
+}  // namespace android
diff --git a/media/codec2/hal/hidl/1.0/utils/Component.cpp b/media/codec2/hal/hidl/1.0/utils/Component.cpp
index 62f0e25..162a80e 100644
--- a/media/codec2/hal/hidl/1.0/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/Component.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "Codec2-Component"
 #include <android-base/logging.h>
 
+#include <codec2/common/BqPoolInvalidateHelper.h>
 #include <codec2/hidl/1.0/Component.h>
 #include <codec2/hidl/1.0/ComponentStore.h>
 #include <codec2/hidl/1.0/InputBufferManager.h>
@@ -30,6 +31,7 @@
 #include <utils/Timers.h>
 
 #include <C2BqBufferPriv.h>
+#include <C2BqPoolInvalidator.h>
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
@@ -270,16 +272,17 @@
 }
 
 void Component::onDeathReceived() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
     {
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mClientDied = true;
-        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
-            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
-                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
-                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
-                bqPool->invalidate();
-            }
-        }
+        transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                BqPoolFilterFn, BqPoolConvertFn);
+    }
+    if (!bqPools.empty()) {
+        std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem =
+                std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        bqInvalidateItem->invalidate();
     }
     release();
 }
@@ -549,7 +552,26 @@
 }
 
 Return<Status> Component::release() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        if (!mClientDied) {
+            transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                     BqPoolFilterFn, BqPoolConvertFn);
+        }
+    }
+    std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem;
+    if (!bqPools.empty()) {
+        // handling rare cases of process death just after release() called.
+        bqInvalidateItem = std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        C2BqPoolInvalidator::getInstance().queue(bqInvalidateItem);
+    }
     Status status = static_cast<Status>(mComponent->release());
+    if (bqInvalidateItem) {
+        // If release is not blocked,
+        // skip invalidation and finish ASAP.
+        bqInvalidateItem->skip();
+    }
     {
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
@@ -637,6 +659,18 @@
 }
 
 Component::~Component() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                     BqPoolFilterFn, BqPoolConvertFn);
+    }
+    if (!bqPools.empty()) {
+        LOG(ERROR) << "blockpools are not cleared yet at dtor";
+        std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem =
+                std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        C2BqPoolInvalidator::getInstance().queue(bqInvalidateItem);
+    }
     InputBufferManager::unregisterFrameData(mListener);
     mStore->reportComponentDeath(this);
 }
diff --git a/media/codec2/hal/hidl/1.1/utils/Component.cpp b/media/codec2/hal/hidl/1.1/utils/Component.cpp
index 7f2c4dd..1c2a49a 100644
--- a/media/codec2/hal/hidl/1.1/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/Component.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "Codec2-Component@1.1"
 #include <android-base/logging.h>
 
+#include <codec2/common/BqPoolInvalidateHelper.h>
 #include <codec2/hidl/1.1/Component.h>
 #include <codec2/hidl/1.1/ComponentStore.h>
 #include <codec2/hidl/1.1/InputBufferManager.h>
@@ -32,6 +33,7 @@
 #include <codec2/common/MultiAccessUnitHelper.h>
 
 #include <C2BqBufferPriv.h>
+#include <C2BqPoolInvalidator.h>
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
@@ -274,16 +276,17 @@
 }
 
 void Component::onDeathReceived() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
     {
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mClientDied = true;
-        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
-            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
-                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
-                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
-                bqPool->invalidate();
-            }
-        }
+        transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                BqPoolFilterFn, BqPoolConvertFn);
+    }
+    if (!bqPools.empty()) {
+        std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem =
+                std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        bqInvalidateItem->invalidate();
     }
     release();
 }
@@ -555,7 +558,26 @@
 }
 
 Return<Status> Component::release() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        if (!mClientDied) {
+            transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                    BqPoolFilterFn, BqPoolConvertFn);
+        }
+    }
+    std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem;
+    if (!bqPools.empty()) {
+        // handling rare cases of process death just after release() called.
+        bqInvalidateItem = std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        C2BqPoolInvalidator::getInstance().queue(bqInvalidateItem);
+    }
     Status status = static_cast<Status>(mComponent->release());
+    if (bqInvalidateItem) {
+        // If release is not blocked,
+        // skip invalidation and finish ASAP.
+        bqInvalidateItem->skip();
+    }
     {
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
@@ -649,6 +671,18 @@
 }
 
 Component::~Component() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                BqPoolFilterFn, BqPoolConvertFn);
+    }
+    if (!bqPools.empty()) {
+        LOG(ERROR) << "blockpools are not cleared yet at dtor";
+        std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem =
+                std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        C2BqPoolInvalidator::getInstance().queue(bqInvalidateItem);
+    }
     InputBufferManager::unregisterFrameData(mListener);
     mStore->reportComponentDeath(this);
 }
diff --git a/media/codec2/hal/hidl/1.2/utils/Component.cpp b/media/codec2/hal/hidl/1.2/utils/Component.cpp
index 7b0aa9b..a15febe 100644
--- a/media/codec2/hal/hidl/1.2/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/Component.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "Codec2-Component@1.2"
 #include <android-base/logging.h>
 
+#include <codec2/common/BqPoolInvalidateHelper.h>
 #include <codec2/hidl/1.2/Component.h>
 #include <codec2/hidl/1.2/ComponentStore.h>
 #include <codec2/hidl/1.2/InputBufferManager.h>
@@ -30,6 +31,7 @@
 #include <utils/Timers.h>
 
 #include <C2BqBufferPriv.h>
+#include <C2BqPoolInvalidator.h>
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
@@ -272,16 +274,17 @@
 }
 
 void Component::onDeathReceived() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
     {
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mClientDied = true;
-        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
-            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
-                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
-                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
-                bqPool->invalidate();
-            }
-        }
+        transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                BqPoolFilterFn, BqPoolConvertFn);
+    }
+    if (!bqPools.empty()) {
+        std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem =
+                std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        bqInvalidateItem->invalidate();
     }
     release();
 }
@@ -551,7 +554,26 @@
 }
 
 Return<Status> Component::release() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        if (!mClientDied) {
+            transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                    BqPoolFilterFn, BqPoolConvertFn);
+        }
+    }
+    std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem;
+    if (!bqPools.empty()) {
+        // handling rare cases of process death just after release() called.
+        bqInvalidateItem = std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        C2BqPoolInvalidator::getInstance().queue(bqInvalidateItem);
+    }
     Status status = static_cast<Status>(mComponent->release());
+    if (bqInvalidateItem) {
+        // If release is not blocked,
+        // skip invalidation and finish ASAP.
+        bqInvalidateItem->skip();
+    }
     {
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
@@ -676,6 +698,18 @@
 }
 
 Component::~Component() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> bqPools;
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        transform_if(mBlockPools.begin(), mBlockPools.end(), std::back_inserter(bqPools),
+                BqPoolFilterFn, BqPoolConvertFn);
+    }
+    if (!bqPools.empty()) {
+        LOG(ERROR) << "blockpools are not cleared yet at dtor";
+        std::shared_ptr<C2BqPoolInvalidateItem> bqInvalidateItem =
+                std::make_shared<C2BqPoolInvalidateItem>(std::move(bqPools));
+        C2BqPoolInvalidator::getInstance().queue(bqInvalidateItem);
+    }
     InputBufferManager::unregisterFrameData(mListener);
     mStore->reportComponentDeath(this);
 }
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 3c8c1b7..cc5d10c 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -16,6 +16,10 @@
 cc_library_shared {
     name: "libsfplugin_ccodec",
 
+    defaults: [
+        "android.hardware.graphics.common-ndk_shared",
+    ],
+
     export_include_dirs: ["include"],
 
     srcs: [
@@ -46,6 +50,7 @@
     ],
 
     static_libs: [
+        "libPlatformProperties",
         "libSurfaceFlingerProperties",
         "aconfig_mediacodec_flags_c_lib",
         "android.media.codec-aconfig-cc",
@@ -56,7 +61,6 @@
         "android.hardware.drm@1.0",
         "android.hardware.media.c2@1.0",
         "android.hardware.media.omx@1.0",
-        "android.hardware.graphics.common-V5-ndk",
         "graphicbuffersource-aidl-ndk",
         "libbase",
         "libbinder",
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 632eaed..72b5a61 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -2877,6 +2877,7 @@
 
             // handle configuration changes in work done
             std::shared_ptr<const C2StreamInitDataInfo::output> initData;
+            sp<AMessage> inputFormat = nullptr;
             sp<AMessage> outputFormat = nullptr;
             {
                 Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
@@ -2964,10 +2965,12 @@
                             initData->m.value, initData->flexCount(), config->mCodingMediaType,
                             config->mOutputFormat);
                 }
+                inputFormat = config->mInputFormat;
                 outputFormat = config->mOutputFormat;
             }
             mChannel->onWorkDone(
-                    std::move(work), outputFormat, initData ? initData.get() : nullptr);
+                    std::move(work), inputFormat, outputFormat,
+                    initData ? initData.get() : nullptr);
             // log metrics to MediaCodec
             if (mMetrics->countEntries() == 0) {
                 Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 3ef2f84..d67a876 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -38,8 +38,10 @@
 
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <android/hardware/drm/1.0/types.h>
+#include <android/sysprop/MediaProperties.sysprop.h>
 #include <android-base/parseint.h>
 #include <android-base/properties.h>
+#include <android-base/no_destructor.h>
 #include <android-base/stringprintf.h>
 #include <binder/MemoryBase.h>
 #include <binder/MemoryDealer.h>
@@ -114,6 +116,109 @@
             });
 }
 
+class SurfaceCallbackHandler {
+public:
+    enum callback_type_t {
+        ON_BUFFER_RELEASED = 0,
+        ON_BUFFER_ATTACHED
+    };
+
+    void post(callback_type_t callback,
+            std::shared_ptr<Codec2Client::Component> component,
+            uint32_t generation) {
+        if (!component) {
+            ALOGW("surface callback psoted for invalid component");
+        }
+        std::shared_ptr<SurfaceCallbackItem> item =
+                std::make_shared<SurfaceCallbackItem>(callback, component, generation);
+        std::unique_lock<std::mutex> lock(mMutex);
+        mItems.emplace_back(std::move(item));
+        mCv.notify_one();
+    }
+
+    ~SurfaceCallbackHandler() {
+        {
+            std::unique_lock<std::mutex> lock(mMutex);
+            mDone = true;
+            mCv.notify_all();
+        }
+        if (mThread.joinable()) {
+            mThread.join();
+        }
+    }
+
+    static SurfaceCallbackHandler& GetInstance() {
+        static base::NoDestructor<SurfaceCallbackHandler> sSurfaceCallbackHandler{};
+        return *sSurfaceCallbackHandler;
+    }
+
+private:
+    struct SurfaceCallbackItem {
+        callback_type_t mCallback;
+        std::weak_ptr<Codec2Client::Component> mComp;
+        uint32_t mGeneration;
+
+        SurfaceCallbackItem(
+                callback_type_t callback,
+                std::shared_ptr<Codec2Client::Component> comp,
+                uint32_t generation)
+                : mCallback(callback), mComp(comp), mGeneration(generation) {}
+    };
+
+    SurfaceCallbackHandler() { mThread = std::thread(&SurfaceCallbackHandler::run, this); }
+
+    void run() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mDone) {
+            while (!mItems.empty()) {
+                std::deque<std::shared_ptr<SurfaceCallbackItem>> items = std::move(mItems);
+                mItems.clear();
+                lock.unlock();
+                handle(items);
+                lock.lock();
+            }
+            mCv.wait(lock);
+        }
+    }
+
+    void handle(std::deque<std::shared_ptr<SurfaceCallbackItem>> &items) {
+        while (!items.empty()) {
+            std::shared_ptr<SurfaceCallbackItem> item = items.front();
+            items.pop_front();
+            switch (item->mCallback) {
+                case ON_BUFFER_RELEASED: {
+                    std::shared_ptr<Codec2Client::Component> comp = item->mComp.lock();;
+                    if (comp) {
+                        comp->onBufferReleasedFromOutputSurface(item->mGeneration);
+                    }
+                    break;
+                }
+                case ON_BUFFER_ATTACHED: {
+                    std::shared_ptr<Codec2Client::Component> comp = item->mComp.lock();
+                    if (comp) {
+                        comp->onBufferAttachedToOutputSurface(item->mGeneration);
+                    }
+                    break;
+                }
+                default:
+                    ALOGE("Non defined surface callback message");
+                    break;
+            }
+        }
+    }
+
+    std::thread mThread;
+    bool mDone = false;
+    std::deque<std::shared_ptr<SurfaceCallbackItem>> mItems;
+    std::mutex mMutex;
+    std::condition_variable mCv;
+
+
+    friend class base::NoDestructor<SurfaceCallbackHandler>;
+
+    DISALLOW_EVIL_CONSTRUCTORS(SurfaceCallbackHandler);
+};
+
 }  // namespace
 
 CCodecBufferChannel::QueueGuard::QueueGuard(
@@ -207,8 +312,18 @@
         Mutexed<BlockPools>::Locked pools(mBlockPools);
         pools->outputPoolId = C2BlockPool::BASIC_LINEAR;
     }
-    std::string value = GetServerConfigurableFlag("media_native", "ccodec_rendering_depth", "3");
-    android::base::ParseInt(value, &mRenderingDepth);
+    if (android::media::codec::provider_->rendering_depth_removal()) {
+        constexpr int kAndroidApi202404 = 202404;
+        int vendorVersion = ::android::base::GetIntProperty("ro.vendor.api_level", -1);
+        using ::android::sysprop::MediaProperties::codec2_remove_rendering_depth;
+        if (vendorVersion > kAndroidApi202404 || codec2_remove_rendering_depth().value_or(false)) {
+            mRenderingDepth = 0;
+        }
+    } else {
+        std::string value = GetServerConfigurableFlag(
+                "media_native", "ccodec_rendering_depth", "3");
+        android::base::ParseInt(value, &mRenderingDepth);
+    }
     mOutputSurface.lock()->maxDequeueBuffers = kSmoothnessFactor + mRenderingDepth;
 }
 
@@ -220,7 +335,7 @@
 
 void CCodecBufferChannel::setComponent(
         const std::shared_ptr<Codec2Client::Component> &component) {
-    mComponent = component;
+    std::atomic_store(&mComponent, component);
     mComponentName = component->getName() + StringPrintf("#%d", int(uintptr_t(component.get()) % 997));
     mName = mComponentName.c_str();
 }
@@ -236,7 +351,7 @@
     inputSurface->numProcessingBuffersBalance = 0;
     inputSurface->surface = surface;
     mHasInputSurface = true;
-    return inputSurface->surface->connect(mComponent);
+    return inputSurface->surface->connect(std::atomic_load(&mComponent));
 }
 
 status_t CCodecBufferChannel::signalEndOfInputStream() {
@@ -432,7 +547,7 @@
                         now);
             }
         }
-        err = mComponent->queue(&items);
+        err = std::atomic_load(&mComponent)->queue(&items);
     }
     if (err != C2_OK) {
         Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
@@ -1342,7 +1457,7 @@
     qbi.setSurfaceDamage(Region::INVALID_REGION); // we don't have dirty regions
     qbi.getFrameTimestamps = true; // we need to know when a frame is rendered
     IGraphicBufferProducer::QueueBufferOutput qbo;
-    status_t result = mComponent->queueToOutputSurface(block, qbi, &qbo);
+    status_t result = std::atomic_load(&mComponent)->queueToOutputSurface(block, qbi, &qbo);
     if (result != OK) {
         ALOGI("[%s] queueBuffer failed: %d", mName, result);
         if (result == NO_INIT) {
@@ -1481,7 +1596,7 @@
 
 void CCodecBufferChannel::pollForRenderedBuffers() {
     FrameEventHistoryDelta delta;
-    mComponent->pollForRenderedFrames(&delta);
+    std::atomic_load(&mComponent)->pollForRenderedFrames(&delta);
     processRenderedFrames(delta);
 }
 
@@ -1490,9 +1605,10 @@
     // knowing the internal state of CCodec/CCodecBufferChannel,
     // prevent mComponent from being destroyed by holding the shared reference
     // during this interface being executed.
-    std::shared_ptr<Codec2Client::Component> comp = mComponent;
+    std::shared_ptr<Codec2Client::Component> comp = std::atomic_load(&mComponent);
     if (comp) {
-        comp->onBufferReleasedFromOutputSurface(generation);
+      SurfaceCallbackHandler::GetInstance().post(
+                SurfaceCallbackHandler::ON_BUFFER_RELEASED, comp, generation);
     }
 }
 
@@ -1501,9 +1617,10 @@
     // knowing the internal state of CCodec/CCodecBufferChannel,
     // prevent mComponent from being destroyed by holding the shared reference
     // during this interface being executed.
-    std::shared_ptr<Codec2Client::Component> comp = mComponent;
+    std::shared_ptr<Codec2Client::Component> comp = std::atomic_load(&mComponent);
     if (comp) {
-        comp->onBufferAttachedToOutputSurface(generation);
+      SurfaceCallbackHandler::GetInstance().post(
+                SurfaceCallbackHandler::ON_BUFFER_ATTACHED, comp, generation);
     }
 }
 
@@ -1574,7 +1691,7 @@
     C2ActualPipelineDelayTuning pipelineDelay(0);
     C2SecureModeTuning secureMode(C2Config::SM_UNPROTECTED);
 
-    c2_status_t err = mComponent->query(
+    c2_status_t err = std::atomic_load(&mComponent)->query(
             {
                 &iStreamFormat,
                 &oStreamFormat,
@@ -1605,7 +1722,7 @@
     size_t numOutputSlots = outputDelayValue + kSmoothnessFactor;
 
     // TODO: get this from input format
-    bool secure = mComponent->getName().find(".secure") != std::string::npos;
+    bool secure = std::atomic_load(&mComponent)->getName().find(".secure") != std::string::npos;
 
     // secure mode is a static parameter (shall not change in the executing state)
     mSendEncryptedInfoBuffer = secureMode.value == C2Config::SM_READ_PROTECTED_WITH_ENCRYPTED;
@@ -1651,7 +1768,7 @@
                 channelCount.invalidate();
                 pcmEncoding.invalidate();
             }
-            err = mComponent->query(stackParams,
+            err = std::atomic_load(&mComponent)->query(stackParams,
                                     { C2PortAllocatorsTuning::input::PARAM_TYPE },
                                     C2_DONT_BLOCK,
                                     &params);
@@ -1719,6 +1836,9 @@
                     channelCount.value,
                     pcmEncoding ? pcmEncoding.value : C2Config::PCM_16);
         }
+        if (!buffersBoundToCodec) {
+            inputFormat->setInt32(KEY_NUM_SLOTS, numInputSlots);
+        }
         bool conforming = (apiFeatures & API_SAME_INPUT_BUFFER);
         // For encrypted content, framework decrypts source buffer (ashmem) into
         // C2Buffers. Thus non-conforming codecs can process these.
@@ -1790,6 +1910,7 @@
             outputSurface = output->surface ?
                     output->surface->getIGraphicBufferProducer() : nullptr;
             if (outputSurface) {
+                (void)SurfaceCallbackHandler::GetInstance();
                 output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
             }
             outputGeneration = output->generation;
@@ -1811,7 +1932,7 @@
             // query C2PortAllocatorsTuning::output from component, or use default allocator if
             // unsuccessful.
             std::vector<std::unique_ptr<C2Param>> params;
-            err = mComponent->query({ },
+            err = std::atomic_load(&mComponent)->query({ },
                                     { C2PortAllocatorsTuning::output::PARAM_TYPE },
                                     C2_DONT_BLOCK,
                                     &params);
@@ -1839,7 +1960,7 @@
             // if unsuccessful.
             if (outputSurface) {
                 params.clear();
-                err = mComponent->query({ },
+                err = std::atomic_load(&mComponent)->query({ },
                                         { C2PortSurfaceAllocatorTuning::output::PARAM_TYPE },
                                         C2_DONT_BLOCK,
                                         &params);
@@ -1870,7 +1991,7 @@
             }
 
             if ((poolMask >> pools->outputAllocatorId) & 1) {
-                err = mComponent->createBlockPool(
+                err = std::atomic_load(&mComponent)->createBlockPool(
                         pools->outputAllocatorId, &pools->outputPoolId, &pools->outputPoolIntf);
                 ALOGI("[%s] Created output block pool with allocatorID %u => poolID %llu - %s",
                         mName, pools->outputAllocatorId,
@@ -1891,7 +2012,8 @@
                     C2PortBlockPoolsTuning::output::AllocUnique({ pools->outputPoolId });
 
             std::vector<std::unique_ptr<C2SettingResult>> failures;
-            err = mComponent->config({ poolIdsTuning.get() }, C2_MAY_BLOCK, &failures);
+            err = std::atomic_load(&mComponent)->config(
+                    { poolIdsTuning.get() }, C2_MAY_BLOCK, &failures);
             ALOGD("[%s] Configured output block pool ids %llu => %s",
                     mName, (unsigned long long)poolIdsTuning->m.values[0], asString(err));
             outputPoolId_ = pools->outputPoolId;
@@ -1899,7 +2021,7 @@
 
         if (prevOutputPoolId != C2BlockPool::BASIC_LINEAR
                 && prevOutputPoolId != C2BlockPool::BASIC_GRAPHIC) {
-            c2_status_t err = mComponent->destroyBlockPool(prevOutputPoolId);
+            c2_status_t err = std::atomic_load(&mComponent)->destroyBlockPool(prevOutputPoolId);
             if (err != C2_OK) {
                 ALOGW("Failed to clean up previous block pool %llu - %s (%d)\n",
                         (unsigned long long) prevOutputPoolId, asString(err), err);
@@ -1931,7 +2053,7 @@
 
         // Try to set output surface to created block pool if given.
         if (outputSurface) {
-            mComponent->setOutputSurface(
+            std::atomic_load(&mComponent)->setOutputSurface(
                     outputPoolId_,
                     outputSurface,
                     outputGeneration,
@@ -1940,7 +2062,7 @@
             // configure CPU read consumer usage
             C2StreamUsageTuning::output outputUsage{0u, C2MemoryUsage::CPU_READ};
             std::vector<std::unique_ptr<C2SettingResult>> failures;
-            err = mComponent->config({ &outputUsage }, C2_MAY_BLOCK, &failures);
+            err = std::atomic_load(&mComponent)->config({ &outputUsage }, C2_MAY_BLOCK, &failures);
             // do not print error message for now as most components may not yet
             // support this setting
             ALOGD_IF(err != C2_BAD_INDEX, "[%s] Configured output usage [%#llx]",
@@ -2052,9 +2174,18 @@
 
 status_t CCodecBufferChannel::requestInitialInputBuffers(
         std::map<size_t, sp<MediaCodecBuffer>> &&clientInputBuffers) {
+    std::optional<QueueGuard> guard;
+    if (android::media::codec::provider_->codec_buffer_state_cleanup()) {
+        guard.emplace(mSync);
+        if (!guard->isRunning()) {
+            ALOGD("[%s] skip requestInitialInputBuffers when not running", mName);
+            return OK;
+        }
+    }
     C2StreamBufferTypeSetting::output oStreamFormat(0u);
     C2PrependHeaderModeSetting prepend(PREPEND_HEADER_TO_NONE);
-    c2_status_t err = mComponent->query({ &oStreamFormat, &prepend }, {}, C2_DONT_BLOCK, nullptr);
+    c2_status_t err = std::atomic_load(&mComponent)->query(
+            { &oStreamFormat, &prepend }, {}, C2_DONT_BLOCK, nullptr);
     if (err != C2_OK && err != C2_BAD_INDEX) {
         return UNKNOWN_ERROR;
     }
@@ -2072,7 +2203,7 @@
                         now);
             }
         }
-        err = mComponent->queue(&flushedConfigs);
+        err = std::atomic_load(&mComponent)->queue(&flushedConfigs);
         if (err != C2_OK) {
             ALOGW("[%s] Error while queueing a flushed config", mName);
             return UNKNOWN_ERROR;
@@ -2123,7 +2254,8 @@
             Mutexed<BlockPools>::Locked pools(mBlockPools);
             outputPoolId = pools->outputPoolId;
         }
-        if (mComponent) mComponent->stopUsingOutputSurface(outputPoolId);
+        std::shared_ptr<Codec2Client::Component> comp = std::atomic_load(&mComponent);
+        if (comp) comp->stopUsingOutputSurface(outputPoolId);
 
         if (pushBlankBuffer) {
             sp<ANativeWindow> anw = static_cast<ANativeWindow *>(surface.get());
@@ -2157,7 +2289,8 @@
 
 void CCodecBufferChannel::release() {
     mInfoBuffers.clear();
-    mComponent.reset();
+    std::shared_ptr<Codec2Client::Component> nullComp;
+    std::atomic_store(&mComponent, nullComp);
     mInputAllocator.reset();
     mOutputSurface.lock()->surface.clear();
     {
@@ -2226,9 +2359,11 @@
 }
 
 void CCodecBufferChannel::onWorkDone(
-        std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
+        std::unique_ptr<C2Work> work,
+        const sp<AMessage> &inputFormat,
+        const sp<AMessage> &outputFormat,
         const C2StreamInitDataInfo::output *initData) {
-    if (handleWork(std::move(work), outputFormat, initData)) {
+    if (handleWork(std::move(work), inputFormat, outputFormat, initData)) {
         feedInputBufferIfAvailable();
     }
 }
@@ -2258,6 +2393,7 @@
 
 bool CCodecBufferChannel::handleWork(
         std::unique_ptr<C2Work> work,
+        const sp<AMessage> &inputFormat,
         const sp<AMessage> &outputFormat,
         const C2StreamInitDataInfo::output *initData) {
     {
@@ -2431,6 +2567,9 @@
         } else {
             input->numSlots = newNumSlots;
         }
+        if (inputFormat->contains(KEY_NUM_SLOTS)) {
+            inputFormat->setInt32(KEY_NUM_SLOTS, input->numSlots);
+        }
     }
     size_t numOutputSlots = 0;
     uint32_t reorderDepth = 0;
@@ -2479,7 +2618,7 @@
             }
         }
         if (maxDequeueCount > 0) {
-            mComponent->setOutputSurfaceMaxDequeueCount(maxDequeueCount);
+            std::atomic_load(&mComponent)->setOutputSurfaceMaxDequeueCount(maxDequeueCount);
         }
     }
 
@@ -2708,6 +2847,7 @@
         oldSurface = outputSurface->surface;
     }
     if (newSurface) {
+        (void)SurfaceCallbackHandler::GetInstance();
         newSurface->setScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
         newSurface->setDequeueTimeout(kDequeueTimeoutNs);
         newSurface->setMaxDequeuedBufferCount(maxDequeueCount);
@@ -2726,7 +2866,7 @@
     }
 
     if (outputPoolIntf) {
-        if (mComponent->setOutputSurface(
+        if (std::atomic_load(&mComponent)->setOutputSurface(
                 outputPoolId,
                 producer,
                 generation,
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 4d296fd..6493b87 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -193,12 +193,15 @@
     /**
      * Notify input client about work done.
      *
-     * @param workItems   finished work item.
+     * @param workItems    finished work item.
+     * @param inputFormat  input format
      * @param outputFormat new output format if it has changed, otherwise nullptr
-     * @param initData    new init data (CSD) if it has changed, otherwise nullptr
+     * @param initData     new init data (CSD) if it has changed, otherwise nullptr
      */
     void onWorkDone(
-            std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
+            std::unique_ptr<C2Work> work,
+            const sp<AMessage> &inputFormat,
+            const sp<AMessage> &outputFormat,
             const C2StreamInitDataInfo::output *initData);
 
     /**
@@ -311,7 +314,9 @@
                                       std::shared_ptr<C2LinearBlock> encryptedBlock = nullptr,
                                       size_t blockSize = 0);
     bool handleWork(
-            std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
+            std::unique_ptr<C2Work> work,
+            const sp<AMessage> &inputFormat,
+            const sp<AMessage> &outputFormat,
             const C2StreamInitDataInfo::output *initData);
     void sendOutputBuffers();
     void ensureDecryptDestination(size_t size);
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 9297520..3841831 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -436,6 +436,86 @@
     { C2Config::hdr_format_t::HDR10_PLUS, AV1ProfileMain10HDR10Plus },
 };
 
+// APV
+ALookup<C2Config::profile_t, int32_t> sApvProfiles = {
+    { C2Config::PROFILE_APV_422_10, APVProfile422_10 },
+    { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10 },
+    { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10Plus },
+};
+
+ALookup<C2Config::profile_t, int32_t> sApvHdrProfiles = {
+    { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10 },
+};
+
+ALookup<C2Config::profile_t, int32_t> sApvHdr10PlusProfiles = {
+    { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10Plus },
+};
+
+ALookup<C2Config::level_t, int32_t> sApvLevels = {
+    { C2Config::LEVEL_APV_1_BAND_0, APVLevel1Band0 },
+    { C2Config::LEVEL_APV_1_BAND_1, APVLevel1Band1 },
+    { C2Config::LEVEL_APV_1_BAND_2, APVLevel1Band2 },
+    { C2Config::LEVEL_APV_1_BAND_3, APVLevel1Band3 },
+    { C2Config::LEVEL_APV_1_1_BAND_0, APVLevel11Band0 },
+    { C2Config::LEVEL_APV_1_1_BAND_1, APVLevel11Band1 },
+    { C2Config::LEVEL_APV_1_1_BAND_2, APVLevel11Band2 },
+    { C2Config::LEVEL_APV_1_1_BAND_3, APVLevel11Band3 },
+    { C2Config::LEVEL_APV_2_BAND_0, APVLevel2Band0 },
+    { C2Config::LEVEL_APV_2_BAND_1, APVLevel2Band1 },
+    { C2Config::LEVEL_APV_2_BAND_2, APVLevel2Band2 },
+    { C2Config::LEVEL_APV_2_BAND_3, APVLevel2Band3 },
+    { C2Config::LEVEL_APV_2_1_BAND_0, APVLevel21Band0 },
+    { C2Config::LEVEL_APV_2_1_BAND_1, APVLevel21Band1 },
+    { C2Config::LEVEL_APV_2_1_BAND_2, APVLevel21Band2 },
+    { C2Config::LEVEL_APV_2_1_BAND_3, APVLevel21Band3 },
+    { C2Config::LEVEL_APV_3_BAND_0, APVLevel3Band0 },
+    { C2Config::LEVEL_APV_3_BAND_1, APVLevel3Band1 },
+    { C2Config::LEVEL_APV_3_BAND_2, APVLevel3Band2 },
+    { C2Config::LEVEL_APV_3_BAND_3, APVLevel3Band3 },
+    { C2Config::LEVEL_APV_3_1_BAND_0, APVLevel31Band0 },
+    { C2Config::LEVEL_APV_3_1_BAND_1, APVLevel31Band1 },
+    { C2Config::LEVEL_APV_3_1_BAND_2, APVLevel31Band2 },
+    { C2Config::LEVEL_APV_3_1_BAND_3, APVLevel31Band3 },
+    { C2Config::LEVEL_APV_4_BAND_0, APVLevel4Band0 },
+    { C2Config::LEVEL_APV_4_BAND_1, APVLevel4Band1 },
+    { C2Config::LEVEL_APV_4_BAND_2, APVLevel4Band2 },
+    { C2Config::LEVEL_APV_4_BAND_3, APVLevel4Band3 },
+    { C2Config::LEVEL_APV_4_1_BAND_0, APVLevel41Band0 },
+    { C2Config::LEVEL_APV_4_1_BAND_1, APVLevel41Band1 },
+    { C2Config::LEVEL_APV_4_1_BAND_2, APVLevel41Band2 },
+    { C2Config::LEVEL_APV_4_1_BAND_3, APVLevel41Band3 },
+    { C2Config::LEVEL_APV_5_BAND_0, APVLevel5Band0 },
+    { C2Config::LEVEL_APV_5_BAND_1, APVLevel5Band1 },
+    { C2Config::LEVEL_APV_5_BAND_2, APVLevel5Band2 },
+    { C2Config::LEVEL_APV_5_BAND_3, APVLevel5Band3 },
+    { C2Config::LEVEL_APV_5_1_BAND_0, APVLevel51Band0 },
+    { C2Config::LEVEL_APV_5_1_BAND_1, APVLevel51Band1 },
+    { C2Config::LEVEL_APV_5_1_BAND_2, APVLevel51Band2 },
+    { C2Config::LEVEL_APV_5_1_BAND_3, APVLevel51Band3 },
+    { C2Config::LEVEL_APV_6_BAND_0, APVLevel6Band0 },
+    { C2Config::LEVEL_APV_6_BAND_1, APVLevel6Band1 },
+    { C2Config::LEVEL_APV_6_BAND_2, APVLevel6Band2 },
+    { C2Config::LEVEL_APV_6_BAND_3, APVLevel6Band3 },
+    { C2Config::LEVEL_APV_6_1_BAND_0, APVLevel61Band0 },
+    { C2Config::LEVEL_APV_6_1_BAND_1, APVLevel61Band1 },
+    { C2Config::LEVEL_APV_6_1_BAND_2, APVLevel61Band2 },
+    { C2Config::LEVEL_APV_6_1_BAND_3, APVLevel61Band3 },
+    { C2Config::LEVEL_APV_7_BAND_0, APVLevel7Band0 },
+    { C2Config::LEVEL_APV_7_BAND_1, APVLevel7Band1 },
+    { C2Config::LEVEL_APV_7_BAND_2, APVLevel7Band2 },
+    { C2Config::LEVEL_APV_7_BAND_3, APVLevel7Band3 },
+    { C2Config::LEVEL_APV_7_1_BAND_0, APVLevel71Band0 },
+    { C2Config::LEVEL_APV_7_1_BAND_1, APVLevel71Band1 },
+    { C2Config::LEVEL_APV_7_1_BAND_2, APVLevel71Band2 },
+    { C2Config::LEVEL_APV_7_1_BAND_3, APVLevel71Band3 },
+};
+
+ALookup<C2Config::hdr_format_t, int32_t> sApvHdrFormats = {
+    { C2Config::hdr_format_t::HLG, APVProfile422_10 },
+    { C2Config::hdr_format_t::HDR10, APVProfile422_10HDR10 },
+    { C2Config::hdr_format_t::HDR10_PLUS, APVProfile422_10HDR10Plus },
+};
+
 // HAL_PIXEL_FORMAT_* -> COLOR_Format*
 ALookup<uint32_t, int32_t> sPixelFormats = {
     { HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, COLOR_FormatSurface },
@@ -720,6 +800,37 @@
     int32_t mBitDepth;
 };
 
+// APV
+struct ApvProfileLevelMapper : ProfileLevelMapperHelper {
+    ApvProfileLevelMapper(bool isHdr = false, bool isHdr10Plus = false) :
+        ProfileLevelMapperHelper(),
+        mIsHdr(isHdr), mIsHdr10Plus(isHdr10Plus) {}
+
+    virtual bool simpleMap(C2Config::level_t from, int32_t *to) {
+        return sApvLevels.map(from, to);
+    }
+    virtual bool simpleMap(int32_t from, C2Config::level_t *to) {
+        return sApvLevels.map(from, to);
+    }
+    virtual bool simpleMap(C2Config::profile_t from, int32_t *to) {
+        return mIsHdr10Plus ? sApvHdr10PlusProfiles.map(from, to) :
+                     mIsHdr ? sApvHdrProfiles.map(from, to) :
+                              sApvProfiles.map(from, to);
+    }
+    virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
+        return mIsHdr10Plus ? sApvHdr10PlusProfiles.map(from, to) :
+                     mIsHdr ? sApvHdrProfiles.map(from, to) :
+                              sApvProfiles.map(from, to);
+    }
+    virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+        return sApvHdrFormats.map(from, to);
+    }
+
+private:
+    bool mIsHdr;
+    bool mIsHdr10Plus;
+};
+
 } // namespace
 
 // the default mapper is used for media types that do not support HDR
@@ -753,6 +864,8 @@
         return std::make_shared<Vp9ProfileLevelMapper>();
     } else if (mediaType == MIMETYPE_VIDEO_AV1) {
         return std::make_shared<Av1ProfileLevelMapper>();
+    } else if (mediaType == MIMETYPE_VIDEO_APV) {
+        return std::make_shared<ApvProfileLevelMapper>();
     }
     return nullptr;
 }
@@ -767,6 +880,8 @@
         return std::make_shared<Vp9ProfileLevelMapper>(true, isHdr10Plus);
     } else if (mediaType == MIMETYPE_VIDEO_AV1) {
         return std::make_shared<Av1ProfileLevelMapper>(true, isHdr10Plus);
+    } else if (mediaType == MIMETYPE_VIDEO_APV) {
+        return std::make_shared<ApvProfileLevelMapper>(true, isHdr10Plus);
     }
     return nullptr;
 }
@@ -779,6 +894,8 @@
         return GetProfileLevelMapper(mediaType);
     } else if (mediaType == MIMETYPE_VIDEO_AV1 && bitDepth == 10) {
         return std::make_shared<Av1ProfileLevelMapper>(false, false, bitDepth);
+    } else if (mediaType == MIMETYPE_VIDEO_APV) {
+        return std::make_shared<ApvProfileLevelMapper>();
     }
     return nullptr;
 }
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index dc06ee6..9d1cbff 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -53,7 +53,7 @@
     ],
 
     defaults: [
-	"aconfig_lib_cc_static_link.defaults",
+        "aconfig_lib_cc_static_link.defaults",
         "libcodec2_hal_selection",
     ],
 
@@ -68,6 +68,7 @@
         "C2PlatformStorePluginLoader.cpp",
         "C2Store.cpp",
         "platform/C2BqBuffer.cpp",
+        "platform/C2BqPoolInvalidator.cpp",
         "platform/C2SurfaceSyncObj.cpp",
         "platform/C2IgbaBuffer.cpp",
         "types.cpp",
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index 0987da2..6ec9d6b 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -1206,7 +1206,8 @@
     emplace("libcodec2_soft_vp8enc.so");
     emplace("libcodec2_soft_vp9dec.so");
     emplace("libcodec2_soft_vp9enc.so");
-
+    emplace("libcodec2_soft_apvenc.so");
+    emplace("libcodec2_soft_apvdec.so");
 }
 
 // For testing only
diff --git a/media/codec2/vndk/include/C2BqPoolInvalidator.h b/media/codec2/vndk/include/C2BqPoolInvalidator.h
new file mode 100644
index 0000000..612d023
--- /dev/null
+++ b/media/codec2/vndk/include/C2BqPoolInvalidator.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/no_destructor.h>
+#include <media/stagefright/foundation/ABase.h>
+
+#include <condition_variable>
+#include <deque>
+#include <list>
+#include <memory>
+#include <thread>
+
+class C2BufferQueueBlockPool;
+
+namespace android {
+
+/**
+ * Container class in order to invalidate C2BufferQueueBlockPool(s) and their resources
+ * when the client process is dead abruptly.
+ */
+class C2BqPoolInvalidateItem {
+public:
+
+    /**
+     * invalidate contained C2BufferQueueBlockPool(s) and their resources
+     */
+    void invalidate();
+
+    /**
+     * skip invalidate(), if it is scheduled and not yet invalidated.
+     */
+    void skip();
+
+    /**
+     * returns whether invalidate() is reuqired or not.
+     */
+    bool needsInvalidate();
+
+    C2BqPoolInvalidateItem(std::list<std::shared_ptr<C2BufferQueueBlockPool>> &&pools);
+
+    ~C2BqPoolInvalidateItem() = default;
+private:
+
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>>  mPools;
+    bool mNeedsInvalidate;
+    std::mutex mLock;
+
+    DISALLOW_EVIL_CONSTRUCTORS(C2BqPoolInvalidateItem);
+};
+
+/**
+ * Asynchronous C2BufferQueueBlockPool invalidator.
+ *
+ * this has C2BqPoolInvalidateItem inside. and call invalidate() from a separate
+ * thread asynchronously.
+ */
+class C2BqPoolInvalidator {
+public:
+    /**
+     * This gets the singleton instance of the class.
+     */
+    static C2BqPoolInvalidator &getInstance();
+
+    /**
+     * queue invalidation items. the item will be invalidated after certain
+     * amount of delay from a separate thread.
+     */
+    void queue(std::shared_ptr<C2BqPoolInvalidateItem> &item);
+
+    ~C2BqPoolInvalidator();
+private:
+
+    C2BqPoolInvalidator();
+
+    void run();
+
+    std::thread mThread;
+    bool mDone;
+
+    std::mutex mMutex;
+    std::condition_variable mCv;
+
+    std::deque<std::pair<int64_t, std::shared_ptr<C2BqPoolInvalidateItem>>> mItems;
+
+    friend class ::android::base::NoDestructor<C2BqPoolInvalidator>;
+
+    DISALLOW_EVIL_CONSTRUCTORS(C2BqPoolInvalidator);
+};
+
+}  // namespace android
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 665f9fc..17dfe9c 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -997,7 +997,7 @@
         return -1;
     }
 
-    if (toUsage != graphicBuffer->getUsage()) {
+    if ((toUsage & graphicBuffer->getUsage()) != toUsage) {
         sp<GraphicBuffer> newBuffer = new GraphicBuffer(
             graphicBuffer->handle, GraphicBuffer::CLONE_HANDLE,
             graphicBuffer->width, graphicBuffer->height, graphicBuffer->format,
diff --git a/media/codec2/vndk/platform/C2BqPoolInvalidator.cpp b/media/codec2/vndk/platform/C2BqPoolInvalidator.cpp
new file mode 100644
index 0000000..2666cd3
--- /dev/null
+++ b/media/codec2/vndk/platform/C2BqPoolInvalidator.cpp
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2BqPoolInvalidator"
+#include <utils/Log.h>
+#include <utils/SystemClock.h>
+
+#include <C2BqBufferPriv.h>
+#include <C2BqPoolInvalidator.h>
+
+namespace android {
+
+namespace {
+    static constexpr int64_t kBqPoolInvalidateDelayMs = 1000;
+} // anonymous namespace
+
+C2BqPoolInvalidateItem::C2BqPoolInvalidateItem(
+        std::list<std::shared_ptr<C2BufferQueueBlockPool>> &&pools) : mPools(std::move(pools)) {
+    if (!mPools.empty()) {
+        mNeedsInvalidate = true;
+    } else {
+        mNeedsInvalidate = false;
+    }
+}
+
+void C2BqPoolInvalidateItem::invalidate() {
+    std::list<std::shared_ptr<C2BufferQueueBlockPool>> pools;
+    {
+        std::unique_lock<std::mutex> l(mLock);
+        if (!mNeedsInvalidate) {
+            return;
+        }
+        pools = std::move(mPools);
+        mNeedsInvalidate = false;
+    }
+    for(auto it = pools.begin(); it != pools.end(); ++it) {
+        (*it)->invalidate();
+    }
+}
+
+void C2BqPoolInvalidateItem::skip() {
+    std::unique_lock<std::mutex> l(mLock);
+    mNeedsInvalidate = false;
+    mPools.clear();
+}
+
+bool C2BqPoolInvalidateItem::needsInvalidate() {
+    std::unique_lock<std::mutex> l(mLock);
+    return mNeedsInvalidate;
+}
+
+C2BqPoolInvalidator &C2BqPoolInvalidator::getInstance() {
+    static android::base::NoDestructor<C2BqPoolInvalidator> sInvalidator;
+    return *sInvalidator;
+}
+
+C2BqPoolInvalidator::C2BqPoolInvalidator() : mDone(false) {
+    mThread = std::thread(&C2BqPoolInvalidator::run, this);
+}
+
+C2BqPoolInvalidator::~C2BqPoolInvalidator() {
+    {
+        std::unique_lock<std::mutex> l(mMutex);
+        mDone = true;
+        mCv.notify_one();
+    }
+    if (mThread.joinable()) {
+        mThread.join();
+    }
+}
+
+void C2BqPoolInvalidator::queue(std::shared_ptr<C2BqPoolInvalidateItem> &item) {
+    std::unique_lock<std::mutex> l(mMutex);
+    std::pair<int64_t, std::shared_ptr<C2BqPoolInvalidateItem>> p =
+            std::make_pair(::android::elapsedRealtime() + kBqPoolInvalidateDelayMs, item);
+    mItems.push_back(p);
+    mCv.notify_one();
+}
+
+void C2BqPoolInvalidator::run() {
+    while(true) {
+        int64_t nowMs = ::android::elapsedRealtime();
+        std::unique_lock<std::mutex> l(mMutex);
+        if (mDone) {
+            break;
+        }
+        std::list<std::shared_ptr<C2BqPoolInvalidateItem>> items;
+        while (!mItems.empty()) {
+            if (mItems.front().first <= nowMs) {
+                items.push_back(mItems.front().second);
+                mItems.pop_front();
+            } else {
+                break;
+            }
+        }
+        if (items.empty()) {
+            if (mItems.empty()) {
+                mCv.wait(l);
+            } else {
+                int64_t nextMs = mItems.front().first;
+                if (nextMs > nowMs) {
+                    mCv.wait_for(l, std::chrono::milliseconds(nextMs - nowMs));
+                }
+            }
+        } else {
+            l.unlock();
+            int invalidated = 0;
+            for (auto it = items.begin(); it != items.end(); ++it, ++invalidated) {
+                (*it)->invalidate();
+            }
+            ALOGD("invalidated %d bqpool items", invalidated);
+        }
+    }
+}
+
+} // android
diff --git a/media/janitors/media_solutions_OWNERS b/media/janitors/media_solutions_OWNERS
index 17bc7dd..004fa30 100644
--- a/media/janitors/media_solutions_OWNERS
+++ b/media/janitors/media_solutions_OWNERS
@@ -1,22 +1,24 @@
 # Bug component: 1344
 # go/android-fwk-media-solutions for info on areas of ownership.
 
-# MediaRouter and native mirroring only:
-adadukin@google.com
-aquilescanta@google.com
-bishoygendy@google.com
-ivanbuper@google.com
-
-# MediaMuxer, MediaRecorder, and seamless transcoding only:
 andrewlewis@google.com
-claincly@google.com
-
-# Everything in go/android-fwk-media-solutions not covered above:
 bachinger@google.com
-christosts@google.com
+claincly@google.com
+dancho@google.com
 ibaker@google.com
+ivanbuper@google.com
 jbibik@google.com
 michaelkatz@google.com
 rohks@google.com
+sheenachhabra@google.com
+simakova@google.com
 tianyifeng@google.com
 tonihei@google.com
+
+# MediaRouter and native mirroring only:
+aquilescanta@google.com
+
+# Emergency rollbacks and fixes outside LON timezone
+jmtrivi@google.com # US-MTV
+lajos@google.com # US-MTV
+scottnien@google.com # TW-NTC
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index ba231c1..67c6715 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -58,14 +58,13 @@
         "libaudioclient",
         "libaudioutils",
         "libbase_ndk",
-        "libcgrouprc",
-        "libcgrouprc_format",
         "libcutils",
         "libjsoncpp",
         "liblog",
         "libmedia_helper",
         "libmediametrics",
         "libprocessgroup",
+        "libprocessgroup_util",
         "mediametricsservice-aidl-cpp",
         "shared-file-region-aidl-cpp",
     ],
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index a1d5b2b..8c330d4 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -1141,11 +1141,10 @@
             // start of lock scope
             AutoMutex lock(mLock);
 
-            uint32_t newSequence = mSequence;
             // did previous obtainBuffer() fail due to media server death or voluntary invalidation?
             if (status == DEAD_OBJECT) {
                 // re-create track, unless someone else has already done so
-                if (newSequence == oldSequence) {
+                if (mSequence == oldSequence) {
                     if (!audio_is_linear_pcm(mFormat)) {
                         // If compressed capture, don't attempt to restore the track.
                         // Return a DEAD_OBJECT error and let the caller recreate.
@@ -1161,7 +1160,7 @@
                     }
                 }
             }
-            oldSequence = newSequence;
+            oldSequence = mSequence;
 
             // Keep the extra references
             proxy = mProxy;
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index 61d5ccd..14e528f 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -24,12 +24,11 @@
         "android.hardware.audio.common@7.0-enums",
         "audiopermissioncontroller",
         "libaudiomockhal",
-        "libcgrouprc",
-        "libcgrouprc_format",
         "libfakeservicemanager",
         "libjsoncpp",
         "libmediametricsservice",
         "libprocessgroup",
+        "libprocessgroup_util",
         "shared-file-region-aidl-cpp",
     ],
     shared_libs: [
diff --git a/media/libaudioclient/fuzzer/Android.bp b/media/libaudioclient/fuzzer/Android.bp
index a95c700..65ada70 100644
--- a/media/libaudioclient/fuzzer/Android.bp
+++ b/media/libaudioclient/fuzzer/Android.bp
@@ -37,8 +37,6 @@
         "effect-aidl-cpp",
         "libaudioclient",
         "libbase",
-        "libcgrouprc",
-        "libcgrouprc_format",
         "libcutils",
         "libjsoncpp",
         "liblog",
@@ -46,6 +44,7 @@
         "libmediametrics",
         "libmediametricsservice",
         "libprocessgroup",
+        "libprocessgroup_util",
         "shared-file-region-aidl-cpp",
     ],
     shared_libs: [
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index ddf14a3..3941280 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -107,7 +107,6 @@
         "framework-permission-aidl-cpp",
         "libaudioutils",
         "libbase",
-        "libcgrouprc",
         "libdl",
         "libmedia",
         "libmedia_helper",
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
index 7f55e48..aa6cb0d 100644
--- a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -670,6 +670,25 @@
                                          AudioEncapsulationMetadataType::FRAMEWORK_TUNER,
                                          AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR));
 
+TEST(AudioPortDeviceExt_speakerLayoutRoundTripTest, Aidl2Legacy2Aidl_layoutMask) {
+    AudioPortDeviceExt initial{};
+    initial.speakerLayout = make_ACL_Stereo();
+    auto conv = aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+
+TEST(AudioPortDeviceExt_speakerLayoutRoundTripTest, Aidl2Legacy2Aidl_null) {
+    const AudioPortDeviceExt initial{};  // speakerLayout is null
+    auto conv = aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+
 class AudioGainModeRoundTripTest : public testing::TestWithParam<AudioGainMode> {};
 TEST_P(AudioGainModeRoundTripTest, Aidl2Legacy2Aidl) {
     const auto initial = GetParam();
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index 75e2c11..74a64bf 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -23,7 +23,6 @@
     ],
 
     required: [
-        "libaudiohal@5.0",
         "libaudiohal@6.0",
         "libaudiohal@7.0",
         "libaudiohal@7.1",
diff --git a/media/libaudiohal/FactoryHal.cpp b/media/libaudiohal/FactoryHal.cpp
index 15cb297..2c30693 100644
--- a/media/libaudiohal/FactoryHal.cpp
+++ b/media/libaudiohal/FactoryHal.cpp
@@ -50,12 +50,11 @@
  * This list need to keep sync with AudioHalVersionInfo.VERSIONS in
  * media/java/android/media/AudioHalVersionInfo.java.
  */
-static const std::array<AudioHalVersionInfo, 5> sAudioHALVersions = {
+static const std::array<AudioHalVersionInfo, 4> sAudioHALVersions = {
     AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, 1, 0),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 0),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 6, 0),
-    AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 5, 0),
 };
 
 static const std::map<AudioHalVersionInfo::Type, InterfaceName> sDevicesHALInterfaces = {
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index 1a6b949..f5dec56 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -83,32 +83,6 @@
 }
 
 cc_library_shared {
-    name: "libaudiohal@5.0",
-    defaults: [
-        "libaudiohal_default",
-        "libaudiohal_hidl_default",
-    ],
-    srcs: [
-        ":audio_core_hal_client_sources",
-        ":audio_effect_hidl_hal_client_sources",
-        "EffectsFactoryHalEntry.cpp",
-    ],
-    shared_libs: [
-        "android.hardware.audio.common@5.0",
-        "android.hardware.audio.common@5.0-util",
-        "android.hardware.audio.effect@5.0",
-        "android.hardware.audio.effect@5.0-util",
-        "android.hardware.audio@5.0",
-        "android.hardware.audio@5.0-util",
-    ],
-    cflags: [
-        "-DMAJOR_VERSION=5",
-        "-DMINOR_VERSION=0",
-        "-include common/all-versions/VersionMacro.h",
-    ],
-}
-
-cc_library_shared {
     name: "libaudiohal@6.0",
     defaults: [
         "libaudiohal_default",
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index c4e4ae8..e138cea 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -232,7 +232,9 @@
             RETURN_STATUS_IF_ERROR(pause(&reply));
             if (reply.state != StreamDescriptor::State::PAUSED &&
                     reply.state != StreamDescriptor::State::DRAIN_PAUSED &&
-                    reply.state != StreamDescriptor::State::TRANSFER_PAUSED) {
+                    reply.state != StreamDescriptor::State::TRANSFER_PAUSED &&
+                    (state != StreamDescriptor::State::DRAINING ||
+                        reply.state != StreamDescriptor::State::IDLE)) {
                 AUGMENT_LOG(E, "unexpected stream state: %s (expected PAUSED)",
                             toString(reply.state).c_str());
                 return INVALID_OPERATION;
@@ -367,8 +369,12 @@
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
     RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, statePositions));
-    *frames = std::max<int64_t>(0, reply.observable.frames);
-    *timestamp = std::max<int64_t>(0, reply.observable.timeNs);
+    if (reply.observable.frames == StreamDescriptor::Position::UNKNOWN ||
+        reply.observable.timeNs == StreamDescriptor::Position::UNKNOWN) {
+        return INVALID_OPERATION;
+    }
+    *frames = reply.observable.frames;
+    *timestamp = reply.observable.timeNs;
     return OK;
 }
 
@@ -377,8 +383,12 @@
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
     RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
-    *frames = std::max<int64_t>(0, reply.hardware.frames);
-    *timestamp = std::max<int64_t>(0, reply.hardware.timeNs);
+    if (reply.hardware.frames == StreamDescriptor::Position::UNKNOWN ||
+        reply.hardware.timeNs == StreamDescriptor::Position::UNKNOWN) {
+        return INVALID_OPERATION;
+    }
+    *frames = reply.hardware.frames;
+    *timestamp = reply.hardware.timeNs;
     return OK;
 }
 
@@ -387,7 +397,10 @@
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
     RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
-    *frames = std::max<int32_t>(0, reply.xrunFrames);
+    if (reply.xrunFrames == StreamDescriptor::Position::UNKNOWN) {
+        return INVALID_OPERATION;
+    }
+    *frames = reply.xrunFrames;
     return OK;
 }
 
@@ -577,7 +590,9 @@
         // For compatibility with HIDL behavior, apply a "soft" position reset
         // after receiving the "drain ready" callback.
         std::lock_guard l(mLock);
-        mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames;
+        if (mLastReply.observable.frames != StreamDescriptor::Position::UNKNOWN) {
+            mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames;
+        }
     } else {
         AUGMENT_LOG(W, "unexpected onDrainReady in the state %s", toString(state).c_str());
     }
@@ -670,7 +685,8 @@
             }
             mLastReply = *reply;
             mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
-            if (!mIsInput && reply->status == STATUS_OK) {
+            if (!mIsInput && reply->status == STATUS_OK &&
+                    reply->observable.frames != StreamDescriptor::Position::UNKNOWN) {
                 if (command.getTag() == StreamDescriptor::Command::standby &&
                         reply->state == StreamDescriptor::State::STANDBY) {
                     mStatePositions.framesAtStandby = reply->observable.frames;
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index fd4e615..dd14ac2 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -52,6 +52,7 @@
     if (mDpFreq != nullptr) {
         mDpFreq->reset();
     }
+    mEngineInited = false;
     return RetCode::SUCCESS;
 }
 
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index 44b7d97..d791fab 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -55,8 +55,8 @@
     defaults: ["libaudiopreprocessing-defaults"],
     relative_install_path: "soundfx",
     srcs: ["PreProcessing.cpp"],
-    header_libs: [
-        "libwebrtc_absl_headers",
+    static_libs: [
+        "libabsl",
     ],
 }
 
@@ -77,6 +77,7 @@
         "libutils",
     ],
     static_libs: [
+        "libabsl",
         "webrtc_audio_processing",
     ],
     header_libs: [
diff --git a/media/libmedia/AudioCapabilities.cpp b/media/libmedia/AudioCapabilities.cpp
index e8cf517..1a92307 100644
--- a/media/libmedia/AudioCapabilities.cpp
+++ b/media/libmedia/AudioCapabilities.cpp
@@ -26,7 +26,7 @@
 
 namespace android {
 
-const Range<int>& AudioCapabilities::getBitrateRange() const {
+const Range<int32_t>& AudioCapabilities::getBitrateRange() const {
     return mBitrateRange;
 }
 
@@ -86,7 +86,7 @@
 }
 
 void AudioCapabilities::initWithPlatformLimits() {
-    mBitrateRange = Range<int>(0, INT_MAX);
+    mBitrateRange = Range<int>(0, INT32_MAX);
     mInputChannelRanges.push_back(Range<int>(1, MAX_INPUT_CHANNEL_COUNT));
 
     const int minSampleRate = base::GetIntProperty("ro.mediacodec.min_sample_rate", 7350);
@@ -94,30 +94,31 @@
     mSampleRateRanges.push_back(Range<int>(minSampleRate, maxSampleRate));
 }
 
-bool AudioCapabilities::supports(int sampleRate, int inputChannels) {
+bool AudioCapabilities::supports(std::optional<int> sampleRate,
+        std::optional<int> inputChannels) {
     // channels and sample rates are checked orthogonally
-    if (inputChannels != 0
+    if (inputChannels
             && !std::any_of(mInputChannelRanges.begin(), mInputChannelRanges.end(),
-            [inputChannels](const Range<int> &a) { return a.contains(inputChannels); })) {
+            [inputChannels](const Range<int> &a) { return a.contains(inputChannels.value()); })) {
         return false;
     }
-    if (sampleRate != 0
+    if (sampleRate
             && !std::any_of(mSampleRateRanges.begin(), mSampleRateRanges.end(),
-            [sampleRate](const Range<int> &a) { return a.contains(sampleRate); })) {
+            [sampleRate](const Range<int> &a) { return a.contains(sampleRate.value()); })) {
         return false;
     }
     return true;
 }
 
 bool AudioCapabilities::isSampleRateSupported(int sampleRate) {
-    return supports(sampleRate, 0);
+    return supports(std::make_optional<int>(sampleRate), std::nullopt);
 }
 
 void AudioCapabilities::limitSampleRates(std::vector<int> rates) {
     std::vector<Range<int>> sampleRateRanges;
     std::sort(rates.begin(), rates.end());
     for (int rate : rates) {
-        if (supports(rate, 0 /* channels */)) {
+        if (supports(std::make_optional<int>(rate), std::nullopt /* channels */)) {
             sampleRateRanges.push_back(Range<int>(rate, rate));
         }
     }
@@ -280,7 +281,7 @@
 
 void AudioCapabilities::applyLimits(
         const std::vector<Range<int>> &inputChannels,
-        const std::optional<Range<int>> &bitRates) {
+        const std::optional<Range<int32_t>> &bitRates) {
     // clamp & make a local copy
     std::vector<Range<int>> inputChannelsCopy(inputChannels.size());
     for (int i = 0; i < inputChannels.size(); i++) {
@@ -301,7 +302,7 @@
 void AudioCapabilities::parseFromInfo(const sp<AMessage> &format) {
     int maxInputChannels = MAX_INPUT_CHANNEL_COUNT;
     std::vector<Range<int>> channels = { Range<int>(1, maxInputChannels) };
-    std::optional<Range<int>> bitRates = POSITIVE_INTEGERS;
+    std::optional<Range<int32_t>> bitRates = POSITIVE_INT32;
 
     AString rateAString;
     if (format->findString("sample-rate-ranges", &rateAString)) {
@@ -348,7 +349,7 @@
     }
 
     if (format->findString("bitrate-range", &valueStr)) {
-        std::optional<Range<int>> parsedBitrate = ParseIntRange(valueStr.c_str());
+        std::optional<Range<int32_t>> parsedBitrate = ParseIntRange(valueStr.c_str());
         if (parsedBitrate) {
             bitRates = bitRates.value().intersect(parsedBitrate.value());
         }
@@ -372,10 +373,12 @@
 }
 
 bool AudioCapabilities::supportsFormat(const sp<AMessage> &format) {
-    int32_t sampleRate;
-    format->findInt32(KEY_SAMPLE_RATE, &sampleRate);
-    int32_t channels;
-    format->findInt32(KEY_CHANNEL_COUNT, &channels);
+    int32_t sampleRateValue;
+    std::optional<int> sampleRate = format->findInt32(KEY_SAMPLE_RATE, &sampleRateValue)
+            ? std::make_optional<int>(sampleRateValue) : std::nullopt;
+    int32_t channelsValue;
+    std::optional<int> channels = format->findInt32(KEY_CHANNEL_COUNT, &channelsValue)
+            ? std::make_optional<int>(channelsValue) : std::nullopt;
 
     if (!supports(sampleRate, channels)) {
         return false;
diff --git a/media/libmedia/CodecCapabilities.cpp b/media/libmedia/CodecCapabilities.cpp
index 5bed1c4..87eb4bc 100644
--- a/media/libmedia/CodecCapabilities.cpp
+++ b/media/libmedia/CodecCapabilities.cpp
@@ -25,7 +25,7 @@
 
 namespace android {
 
-bool CodecCapabilities::SupportsBitrate(Range<int> bitrateRange,
+bool CodecCapabilities::SupportsBitrate(Range<int32_t> bitrateRange,
         const sp<AMessage> &format) {
     // consider max bitrate over average bitrate for support
     int32_t maxBitrate = 0;
diff --git a/media/libmedia/include/media/AudioCapabilities.h b/media/libmedia/include/media/AudioCapabilities.h
index 2bc3335..d2bd9d7 100644
--- a/media/libmedia/include/media/AudioCapabilities.h
+++ b/media/libmedia/include/media/AudioCapabilities.h
@@ -37,7 +37,7 @@
     /**
      * Returns the range of supported bitrates in bits/second.
      */
-    const Range<int>& getBitrateRange() const;
+    const Range<int32_t>& getBitrateRange() const;
 
     /**
      * Returns the array of supported sample rates if the codec
@@ -110,7 +110,7 @@
     std::string mMediaType;
     std::vector<ProfileLevel> mProfileLevels;
 
-    Range<int> mBitrateRange;
+    Range<int32_t> mBitrateRange;
 
     std::vector<int> mSampleRates;
     std::vector<Range<int>> mSampleRateRanges;
@@ -121,13 +121,13 @@
     void init(std::string mediaType, std::vector<ProfileLevel> profLevs,
             const sp<AMessage> &format);
     void initWithPlatformLimits();
-    bool supports(int sampleRate, int inputChannels);
+    bool supports(std::optional<int> sampleRate, std::optional<int> inputChannels);
     void limitSampleRates(std::vector<int> rates);
     void createDiscreteSampleRates();
     void limitSampleRates(std::vector<Range<int>> rateRanges);
     void applyLevelLimits();
     void applyLimits(const std::vector<Range<int>> &inputChannels,
-            const std::optional<Range<int>> &bitRates);
+            const std::optional<Range<int32_t>> &bitRates);
     void parseFromInfo(const sp<AMessage> &format);
 
     friend struct CodecCapabilities;
diff --git a/media/libmedia/include/media/CodecCapabilities.h b/media/libmedia/include/media/CodecCapabilities.h
index 9d1c4ea..570c8b5 100644
--- a/media/libmedia/include/media/CodecCapabilities.h
+++ b/media/libmedia/include/media/CodecCapabilities.h
@@ -34,7 +34,7 @@
 
 struct CodecCapabilities {
 
-    static bool SupportsBitrate(Range<int> bitrateRange,
+    static bool SupportsBitrate(Range<int32_t> bitrateRange,
             const sp<AMessage> &format);
 
     /**
diff --git a/media/libmedia/include/media/CodecCapabilitiesUtils.h b/media/libmedia/include/media/CodecCapabilitiesUtils.h
index 2bf822a..89a452c 100644
--- a/media/libmedia/include/media/CodecCapabilitiesUtils.h
+++ b/media/libmedia/include/media/CodecCapabilitiesUtils.h
@@ -118,7 +118,7 @@
     T upper_;
 };
 
-static const Range<int> POSITIVE_INTEGERS = Range<int>(1, INT_MAX);
+static const Range<int32_t> POSITIVE_INT32 = Range<int32_t>(1, INT32_MAX);
 
 // found stuff that is not supported by framework (=> this should not happen)
 constexpr int ERROR_CAPABILITIES_UNRECOGNIZED   = (1 << 0);
diff --git a/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp b/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp
index 89c9739..02e43a4 100644
--- a/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp
+++ b/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp
@@ -64,7 +64,7 @@
 };
 
 TEST_F(AudioCapsAacTest, AudioCaps_Aac_Bitrate) {
-    const Range<int>& bitrateRange = audioCaps->getBitrateRange();
+    const Range<int32_t>& bitrateRange = audioCaps->getBitrateRange();
     EXPECT_EQ(bitrateRange.lower(), 8000) << "bitrate range1 does not match. lower: "
             << bitrateRange.lower();
     EXPECT_EQ(bitrateRange.upper(), 510000) << "bitrate range1 does not match. upper: "
@@ -114,7 +114,7 @@
 };
 
 TEST_F(AudioCapsRawTest, AudioCaps_Raw_Bitrate) {
-    const Range<int>& bitrateRange = audioCaps->getBitrateRange();
+    const Range<int32_t>& bitrateRange = audioCaps->getBitrateRange();
     EXPECT_EQ(bitrateRange.lower(), 1);
     EXPECT_EQ(bitrateRange.upper(), 10000000);
 }
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 761137e..b267c08 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -1816,8 +1816,6 @@
         const sp<AudioSystem::AudioDeviceCallback>& deviceCallback)
     : mCachedPlayerIId(PLAYER_PIID_INVALID),
       mCallback(NULL),
-      mCallbackCookie(NULL),
-      mCallbackData(NULL),
       mStreamType(AUDIO_STREAM_MUSIC),
       mLeftVolume(1.0),
       mRightVolume(1.0),
@@ -2085,7 +2083,7 @@
 status_t MediaPlayerService::AudioOutput::open(
         uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
         audio_format_t format, int bufferCount,
-        AudioCallback cb, void *cookie,
+        AudioCallback cb, const wp<RefBase>& cookie,
         audio_output_flags_t flags,
         const audio_offload_info_t *offloadInfo,
         bool doNotReconnect,
@@ -2714,7 +2712,7 @@
         return 0;
     }
     size_t actualSize = (*me->mCallback)(
-            me.get(), buffer.data(), buffer.size(), me->mCallbackCookie,
+            me, buffer.data(), buffer.size(), me->mCallbackCookie,
             CB_EVENT_FILL_BUFFER);
 
     // Log when no data is returned from the callback.
@@ -2739,7 +2737,7 @@
         return;
     }
     ALOGV("callbackwrapper: deliver EVENT_STREAM_END");
-    (*me->mCallback)(me.get(), NULL /* buffer */, 0 /* size */,
+    (*me->mCallback)(me, nullptr /* buffer */, 0 /* size */,
             me->mCallbackCookie, CB_EVENT_STREAM_END);
     unlock();
 }
@@ -2753,7 +2751,7 @@
         return;
     }
     ALOGV("callbackwrapper: deliver EVENT_TEAR_DOWN");
-    (*me->mCallback)(me.get(),  NULL /* buffer */, 0 /* size */,
+    (*me->mCallback)(me, nullptr /* buffer */, 0 /* size */,
             me->mCallbackCookie, CB_EVENT_TEAR_DOWN);
     unlock();
 }
@@ -2803,7 +2801,7 @@
 struct CallbackThread : public Thread {
     CallbackThread(const wp<MediaPlayerBase::AudioSink> &sink,
                    MediaPlayerBase::AudioSink::AudioCallback cb,
-                   void *cookie);
+                   const wp<RefBase>& cookie);
 
 protected:
     virtual ~CallbackThread();
@@ -2813,7 +2811,7 @@
 private:
     wp<MediaPlayerBase::AudioSink> mSink;
     MediaPlayerBase::AudioSink::AudioCallback mCallback;
-    void *mCookie;
+    wp<RefBase> mCookie;
     void *mBuffer;
     size_t mBufferSize;
 
@@ -2824,7 +2822,7 @@
 CallbackThread::CallbackThread(
         const wp<MediaPlayerBase::AudioSink> &sink,
         MediaPlayerBase::AudioSink::AudioCallback cb,
-        void *cookie)
+        const wp<RefBase>& cookie)
     : mSink(sink),
       mCallback(cb),
       mCookie(cookie),
@@ -2851,7 +2849,7 @@
     }
 
     size_t actualSize =
-        (*mCallback)(sink.get(), mBuffer, mBufferSize, mCookie,
+        (*mCallback)(sink, mBuffer, mBufferSize, mCookie,
                 MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER);
 
     if (actualSize > 0) {
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index cb544bd..76b7bcf 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -104,14 +104,14 @@
         virtual int64_t         getBufferDurationInUs() const;
         virtual audio_output_flags_t getFlags() const { return mFlags; }
 
-        virtual status_t        open(
+        status_t open(
                 uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
                 audio_format_t format, int bufferCount,
-                AudioCallback cb, void *cookie,
+                AudioCallback cb, const wp<RefBase>& cookie,
                 audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
                 const audio_offload_info_t *offloadInfo = NULL,
                 bool doNotReconnect = false,
-                uint32_t suggestedFrameCount = 0);
+                uint32_t suggestedFrameCount = 0) override;
 
         virtual void            setPlayerIId(int32_t playerIId);
 
@@ -164,7 +164,7 @@
         sp<AudioOutput>         mNextOutput;
         int                     mCachedPlayerIId;
         AudioCallback           mCallback;
-        void *                  mCallbackCookie;
+        wp<RefBase>             mCallbackCookie;
         sp<CallbackData>        mCallbackData;
         audio_stream_type_t     mStreamType;
         audio_attributes_t *    mAttributes;
diff --git a/media/libmediaplayerservice/include/MediaPlayerInterface.h b/media/libmediaplayerservice/include/MediaPlayerInterface.h
index be1aa00..495cf00 100644
--- a/media/libmediaplayerservice/include/MediaPlayerInterface.h
+++ b/media/libmediaplayerservice/include/MediaPlayerInterface.h
@@ -76,6 +76,33 @@
         virtual ~Listener() {}
     };
 
+    // For the AudioCallback, we provide a WeakWrapper class
+    // to wrap a virtual RefBase derived object to pass into the AudioCallback.
+    // This is not used for NuPlayer::Renderer, only for legacy AudioPlayer implementation.
+    template <typename T>
+    class WeakWrapper : public RefBase {
+    public:
+        explicit WeakWrapper(const sp<T>& object)
+                : mObject(object) {}
+
+        sp<T> promote() const {
+            if (mObject == nullptr) return {};
+            return mObject.promote();
+        }
+
+        static sp<T> promoteFromRefBase(const wp<RefBase>& weakWrapper) {
+            if (weakWrapper == nullptr) return {};
+            const auto refBase = weakWrapper.promote();
+            if (!refBase) return {};
+            const auto wrapper = sp<WeakWrapper<T>>::fromExisting(
+                    static_cast<WeakWrapper<T>*>(refBase.get()));
+            return wrapper->promote();
+        }
+
+    private:
+        const wp<T> mObject;
+    };
+
     // AudioSink: abstraction layer for audio output
     class AudioSink : public RefBase {
     public:
@@ -89,8 +116,8 @@
 
         // Callback returns the number of bytes actually written to the buffer.
         typedef size_t (*AudioCallback)(
-                AudioSink *audioSink, void *buffer, size_t size, void *cookie,
-                        cb_event_t event);
+                const sp<AudioSink>& audioSink, void *buffer, size_t size,
+                const wp<RefBase>& cookie, cb_event_t event);
 
         virtual             ~AudioSink() {}
         virtual bool        ready() const = 0; // audio output is open and ready
@@ -117,7 +144,7 @@
                 audio_format_t format=AUDIO_FORMAT_PCM_16_BIT,
                 int bufferCount=DEFAULT_AUDIOSINK_BUFFERCOUNT,
                 AudioCallback cb = NULL,
-                void *cookie = NULL,
+                const wp<RefBase>& cookie = {},
                 audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
                 const audio_offload_info_t *offloadInfo = NULL,
                 bool doNotReconnect = false,
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 3d4e955..e434a3d 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -102,6 +102,10 @@
     switch (pcmEncoding) {
     case kAudioEncodingPcmFloat:
         return AUDIO_FORMAT_PCM_FLOAT;
+    case kAudioEncodingPcm32bit:
+        return AUDIO_FORMAT_PCM_32_BIT;
+    case kAudioEncodingPcm24bitPacked:
+        return AUDIO_FORMAT_PCM_24_BIT_PACKED;
     case kAudioEncodingPcm16bit:
         return AUDIO_FORMAT_PCM_16_BIT;
     case kAudioEncodingPcm8bit:
@@ -904,12 +908,15 @@
 
 // static
 size_t NuPlayer::Renderer::AudioSinkCallback(
-        MediaPlayerBase::AudioSink * /* audioSink */,
+        const sp<MediaPlayerBase::AudioSink>& /* audioSink */,
         void *buffer,
         size_t size,
-        void *cookie,
+        const wp<RefBase>& cookie,
         MediaPlayerBase::AudioSink::cb_event_t event) {
-    NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
+    if (cookie == nullptr) return 0;
+    const auto ref = cookie.promote();
+    if (!ref) return 0;
+    const auto me = static_cast<NuPlayer::Renderer*>(ref.get()); // we already hold a sp.
 
     switch (event) {
         case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
@@ -2025,7 +2032,12 @@
     if (offloadingAudio()) {
         AString mime;
         CHECK(format->findString("mime", &mime));
-        status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
+        status_t err = OK;
+        if (audioFormat == AUDIO_FORMAT_PCM_16_BIT) {
+            // If there is probably no pcm-encoding in the format message, try to get the format by
+            // its mimetype.
+            err = mapMimeToAudioFormat(audioFormat, mime.c_str());
+        }
 
         if (err != OK) {
             ALOGE("Couldn't map mime \"%s\" to a valid "
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
index 574ad3d..cfa742e 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
@@ -43,8 +43,8 @@
              uint32_t flags = 0);
 
     static size_t AudioSinkCallback(
-            MediaPlayerBase::AudioSink *audioSink,
-            void *data, size_t size, void *me,
+            const sp<MediaPlayerBase::AudioSink>& audioSink,
+            void *data, size_t size, const wp<RefBase>& me,
             MediaPlayerBase::AudioSink::cb_event_t event);
 
     void queueBuffer(
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 3aa0107..cb3c185 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -54,6 +54,7 @@
 #include "include/HevcUtils.h"
 
 #include <com_android_media_editing_flags.h>
+namespace editing_flags = com::android::media::editing::flags;
 
 #ifndef __predict_false
 #define __predict_false(exp) __builtin_expect((exp) != 0, 0)
@@ -160,6 +161,7 @@
     bool isAvc() const { return mIsAvc; }
     bool isHevc() const { return mIsHevc; }
     bool isAv1() const { return mIsAv1; }
+    bool isApv() const { return mIsApv; }
     bool isHeic() const { return mIsHeic; }
     bool isAvif() const { return mIsAvif; }
     bool isHeif() const { return mIsHeif; }
@@ -328,6 +330,7 @@
     bool mIsAvc;
     bool mIsHevc;
     bool mIsAv1;
+    bool mIsApv;
     bool mIsDovi;
     bool mIsAudio;
     bool mIsVideo;
@@ -479,6 +482,7 @@
     void writeAvccBox();
     void writeHvccBox();
     void writeAv1cBox();
+    void writeApvcBox();
     void writeDoviConfigBox();
     void writeUrlBox();
     void writeDrefBox();
@@ -680,6 +684,9 @@
             return "hvc1";
         } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime)) {
             return "av01";
+        } else if (editing_flags::muxer_mp4_enable_apv() &&
+                   !strcasecmp(MEDIA_MIMETYPE_VIDEO_APV, mime)) {
+            return "apv1";
         }
     } else if (!strncasecmp(mime, "application/", 12)) {
         return "mett";
@@ -2266,6 +2273,7 @@
     mIsAvc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
     mIsHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
     mIsAv1 = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1);
+    mIsApv = editing_flags::muxer_mp4_enable_apv() && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_APV);
     mIsDovi = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
     mIsAudio = !strncasecmp(mime, "audio/", 6);
     mIsVideo = !strncasecmp(mime, "video/", 6);
@@ -2708,6 +2716,9 @@
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1) ||
                !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
         mMeta->findData(kKeyAV1C, &type, &data, &size);
+    } else if (editing_flags::muxer_mp4_enable_apv() &&
+               !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_APV)) {
+        mMeta->findData(kKeyAPVC, &type, &data, &size);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
         getDolbyVisionProfile();
         if (!mMeta->findData(kKeyAVCC, &type, &data, &size) &&
@@ -3609,7 +3620,7 @@
                             (const uint8_t *)buffer->data()
                                 + buffer->range_offset(),
                             buffer->range_length());
-                } else if (mIsMPEG4 || mIsAv1) {
+                } else if (mIsMPEG4 || mIsAv1 || mIsApv) {
                     err = copyCodecSpecificData((const uint8_t *)buffer->data() + buffer->range_offset(),
                             buffer->range_length());
                 }
@@ -4338,6 +4349,7 @@
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime) ||
+        (editing_flags::muxer_mp4_enable_apv() && !strcasecmp(MEDIA_MIMETYPE_VIDEO_APV, mime)) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_IMAGE_AVIF, mime)) {
@@ -4512,6 +4524,9 @@
         writeHvccBox();
     } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime)) {
         writeAv1cBox();
+    } else if (editing_flags::muxer_mp4_enable_apv() &&
+               !strcasecmp(MEDIA_MIMETYPE_VIDEO_APV, mime)) {
+        writeApvcBox();
     } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime)) {
         if (mDoviProfile <= DolbyVisionProfileDvheSt) {
             writeHvccBox();
@@ -5103,6 +5118,15 @@
     mOwner->endBox();  // av1C
 }
 
+void MPEG4Writer::Track::writeApvcBox() {
+    CHECK(mCodecSpecificData);
+    CHECK_GE(mCodecSpecificDataSize, 4u);
+
+    mOwner->beginBox("apvC");
+    mOwner->write(mCodecSpecificData, mCodecSpecificDataSize);
+    mOwner->endBox();  // apvC
+}
+
 void MPEG4Writer::Track::writeDoviConfigBox() {
     CHECK_NE(mDoviProfile, 0u);
 
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 7d47837..8cb1674 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -1189,6 +1189,19 @@
     return new PersistentSurface(bufferProducer, bufferSource);
 }
 
+//static
+status_t MediaCodec::getGloballyAvailableResources(std::vector<GlobalResourceInfo>& resources) {
+    resources.clear();
+    // Make sure codec availability feature is on.
+    if (!android::media::codec::codec_availability()) {
+        return ERROR_UNSUPPORTED;
+    }
+    // TODO: For now this is just an empty function.
+    // The actual implementation should use component store to query the
+    // available resources from hal, and fill in resources with the same.
+    return ERROR_UNSUPPORTED;
+}
+
 // GenerateCodecId generates a 64bit Random ID for each codec that is created.
 // The Codec ID is generated as:
 //   - A process-unique random high 32bits
@@ -1295,7 +1308,12 @@
     CHECK_EQ(mState, UNINITIALIZED);
     mResourceManagerProxy->removeClient();
 
-    flushMediametrics();
+    flushMediametrics();  // this deletes mMetricsHandle
+    // don't keep the last metrics handle around
+    if (mLastMetricsHandle != 0) {
+        mediametrics_delete(mLastMetricsHandle);
+        mLastMetricsHandle = 0;
+    }
 
     // clean any saved metrics info we stored as part of configure()
     if (mConfigureMsg != nullptr) {
@@ -1306,7 +1324,7 @@
     }
 }
 
-// except for in constructor, called from the looper thread (and therefore mutexed)
+// except for in constructor, called from the looper thread (and therefore not mutexed)
 void MediaCodec::initMediametrics() {
     if (mMetricsHandle == 0) {
         mMetricsHandle = mediametrics_create(kCodecKeyName);
@@ -1332,6 +1350,7 @@
         mInputBufferCounter = 0;
     }
 
+    mSubsessionCount = 0;
     mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
     resetMetricsFields();
 }
@@ -1343,6 +1362,17 @@
     mReliabilityContextMetrics = ReliabilityContextMetrics();
 }
 
+// always called from the looper thread (and therefore not mutexed)
+void MediaCodec::resetSubsessionMetricsFields() {
+    mBytesEncoded = 0;
+    mFramesEncoded = 0;
+    mFramesInput = 0;
+    mBytesInput = 0;
+    mEarliestEncodedPtsUs = INT64_MAX;
+    mLatestEncodedPtsUs = INT64_MIN;
+}
+
+// always called from the looper thread
 void MediaCodec::updateMediametrics() {
     if (mMetricsHandle == 0) {
         ALOGV("no metrics handle found");
@@ -1707,6 +1737,7 @@
     }
 }
 
+// except for in destructor, called from the looper thread
 void MediaCodec::flushMediametrics() {
     ALOGV("flushMediametrics");
 
@@ -1720,7 +1751,14 @@
         if (mMetricsToUpload && mediametrics_count(mMetricsHandle) > 0) {
             mediametrics_selfRecord(mMetricsHandle);
         }
-        mediametrics_delete(mMetricsHandle);
+        // keep previous metrics handle for subsequent getMetrics() calls.
+        // NOTE: There could be multiple error events, each flushing the metrics.
+        // We keep the last non-empty metrics handle, so getMetrics() in the
+        // next call will get the latest metrics prior to the errors.
+        if (mLastMetricsHandle != 0) {
+            mediametrics_delete(mLastMetricsHandle);
+        }
+        mLastMetricsHandle = mMetricsHandle;
         mMetricsHandle = 0;
     }
     // we no longer have anything pending upload
@@ -1885,7 +1923,10 @@
         });
     }
 
-    if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
+    // NOTE: these were erroneously restricted to video encoders, but we want them for all
+    // codecs.
+    if (android::media::codec::provider_->subsession_metrics()
+            || (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder))) {
         mBytesInput += buffer->size();
         mFramesInput++;
     }
@@ -1907,12 +1948,15 @@
     ++mInputBufferCounter;
 }
 
-// when we get a buffer back from the codec
+// when we get a buffer back from the codec, always called from the looper thread
 void MediaCodec::statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
 
     CHECK_NE(mState, UNINITIALIZED);
 
-    if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
+    // NOTE: these were erroneously restricted to video encoders, but we want them for all
+    // codecs.
+    if (android::media::codec::provider_->subsession_metrics()
+            || (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder))) {
         int32_t flags = 0;
         (void) buffer->meta()->findInt32("flags", &flags);
 
@@ -2460,12 +2504,8 @@
             mediametrics_setInt32(nextMetricsHandle, kCodecCrypto, 1);
         }
     } else if (mFlags & kFlagIsSecure) {
-        if (android::media::codec::provider_->secure_codecs_require_crypto()) {
-            mErrorLog.log(LOG_TAG, "Crypto or descrambler must be given for secure codec");
-            return INVALID_OPERATION;
-        } else {
-            ALOGW("Crypto or descrambler should be given for secure codec");
-        }
+        // We'll catch this later when we process the buffers.
+        ALOGW("Crypto or descrambler should be given for secure codec");
     }
 
     if (mConfigureMsg != nullptr) {
@@ -2526,6 +2566,31 @@
     return err;
 }
 
+status_t MediaCodec::getRequiredResources(std::vector<InstanceResourceInfo>& resources) {
+    resources.clear();
+    // Make sure codec availability feature is on.
+    if (!android::media::codec::codec_availability()) {
+        return ERROR_UNSUPPORTED;
+    }
+    // Make sure that the codec was configured already.
+    if (mState != CONFIGURED && mState != STARTING && mState != STARTED &&
+        mState != FLUSHING && mState != FLUSHED) {
+        ALOGE("Codec wasn't configured yet!");
+        return INVALID_OPERATION;
+    }
+
+    if (!mRequiredResourceInfo.empty()) {
+        resources = mRequiredResourceInfo;
+        return OK;
+    }
+
+    // TODO: For now this is just an empty function.
+    // The actual implementation should use component interface
+    // (for example, through mCodec->getRequiredDeviceResources) to query the
+    // the required resources for this configuration, and fill in resources with the same.
+    return ERROR_UNSUPPORTED;
+}
+
 // Media Format Shaping support
 //
 
@@ -3614,6 +3679,10 @@
         updateMediametrics();
         results = mediametrics_dup(mMetricsHandle);
         updateEphemeralMediametrics(results);
+    } else if (mLastMetricsHandle != 0) {
+        // After error, mMetricsHandle is cleared, but we keep the last
+        // metrics around so that it can be queried by getMetrics().
+        results = mediametrics_dup(mLastMetricsHandle);
     } else {
         results = mediametrics_dup(mMetricsHandle);
     }
@@ -3883,6 +3952,7 @@
     return true;
 }
 
+// always called from the looper thread
 MediaCodec::DequeueOutputResult MediaCodec::handleDequeueOutputBuffer(
         const sp<AReplyToken> &replyID, bool newRequest) {
     if (!isExecuting()) {
@@ -3938,6 +4008,9 @@
 
         response->setInt32("flags", flags);
 
+        // NOTE: we must account the stats for an output buffer only after we
+        // already handled a potential output format change that could have
+        // started a new subsession.
         statsBufferReceived(timeUs, buffer);
 
         response->postReply(replyID);
@@ -5842,6 +5915,7 @@
     }
 }
 
+// always called from the looper thread
 void MediaCodec::handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer) {
     sp<AMessage> format = buffer->format();
     if (mOutputFormat == format) {
@@ -5925,6 +5999,24 @@
             }
         }
     }
+
+    // Update the width and the height.
+    int32_t left = 0, top = 0, right = 0, bottom = 0, width = 0, height = 0;
+    bool newSubsession = false;
+    if (android::media::codec::provider_->subsession_metrics()
+            && mOutputFormat->findInt32("width", &width)
+            && mOutputFormat->findInt32("height", &height)
+            && (width != mWidth || height != mHeight)) {
+        // consider a new subsession if the width or height changes.
+        newSubsession = true;
+    }
+    // TODO: properly detect new audio subsession
+
+    // Only consider a new subsession if we already have output (from a previous subsession).
+    if (newSubsession && mMetricsToUpload && mBytesEncoded > 0) {
+        handleStartingANewSubsession();
+    }
+
     if (mFlags & kFlagIsAsync) {
         onOutputFormatChanged();
     } else {
@@ -5932,8 +6024,6 @@
         postActivityNotificationIfPossible();
     }
 
-    // Update the width and the height.
-    int32_t left = 0, top = 0, right = 0, bottom = 0, width = 0, height = 0;
     bool resolutionChanged = false;
     if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
         mWidth = right - left + 1;
@@ -5960,6 +6050,35 @@
     updateHdrMetrics(false /* isConfig */);
 }
 
+// always called from the looper thread (and therefore not mutexed)
+void MediaCodec::handleStartingANewSubsession() {
+    // create a new metrics item for the subsession with the new resolution.
+    // TODO: properly account input counts for the previous and the new
+    // subsessions. We only find out that a new subsession started from the
+    // output format, but by that time we already accounted the input counts
+    // to the previous subsession.
+    flushMediametrics(); // this deletes mMetricsHandle, but stores it in mLastMetricsHandle
+
+    // hence mLastMetricsHandle has the metrics item for the previous subsession.
+    if ((mFlags & kFlagIsAsync) && mCallback != nullptr) {
+        sp<AMessage> msg = mCallback->dup();
+        msg->setInt32("callbackID", CB_METRICS_FLUSHED);
+        std::unique_ptr<mediametrics::Item> flushedMetrics(
+                mediametrics::Item::convert(mediametrics_dup(mLastMetricsHandle)));
+        msg->setObject("metrics", new WrapperObject<std::unique_ptr<mediametrics::Item>>(
+                std::move(flushedMetrics)));
+        msg->post();
+    }
+
+    // reuse/continue old metrics item for the new subsession.
+    mMetricsHandle = mediametrics_dup(mLastMetricsHandle);
+    mMetricsToUpload = true;
+    // TODO: configured width/height for the new subsession should be the
+    // previous width/height.
+    mSubsessionCount++;
+    resetSubsessionMetricsFields();
+}
+
 void MediaCodec::extractCSD(const sp<AMessage> &format) {
     mCSD.clear();
 
@@ -6210,6 +6329,12 @@
     CryptoPlugin::SubSample ss;
     CryptoPlugin::Pattern pattern;
 
+    if (android::media::codec::provider_->secure_codecs_require_crypto()
+            && (mFlags & kFlagIsSecure) && !hasCryptoOrDescrambler()) {
+        mErrorLog.log(LOG_TAG, "Crypto or descrambler must be given for secure codec");
+        return INVALID_OPERATION;
+    }
+
     if (msg->findSize("size", &size)) {
         if (hasCryptoOrDescrambler()) {
             ss.mNumBytesOfClearData = size;
@@ -6954,6 +7079,18 @@
     }
 }
 
+void MediaCodec::onRequiredResourcesChanged(
+        const std::vector<InstanceResourceInfo>& resourceInfo) {
+    mRequiredResourceInfo = resourceInfo;
+    // Make sure codec availability feature is on.
+    if (mCallback != nullptr && android::media::codec::codec_availability()) {
+        // Post the callback
+        sp<AMessage> msg = mCallback->dup();
+        msg->setInt32("callbackID", CB_REQUIRED_RESOURCES_CHANGED);
+        msg->post();
+    }
+}
+
 void MediaCodec::postActivityNotificationIfPossible() {
     if (mActivityNotify == NULL) {
         return;
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index 137b282..72a2551 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -95,6 +95,14 @@
             <Feature name="adaptive-playback" />
             <Attribute name="software-codec" />
         </MediaCodec>
+        <MediaCodec name="c2.android.apv.decoder" type="video/apv" enabled="false" minsdk="36" variant="!slow-cpu">
+            <Limit name="size" min="16x16" max="1920x1920"/>
+            <Limit name="alignment" value="2x2"/>
+            <Limit name="bitrate" range="1-240000000"/>
+            <Limit name="block-size" value="16x16" />
+            <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+            <Attribute name="software-codec"/>
+         </MediaCodec>
     </Decoders>
 
     <Encoders>
@@ -160,5 +168,13 @@
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="bitrate-modes" value="VBR,CBR" />
         </MediaCodec>
+        <MediaCodec name="c2.android.apv.encoder" type="video/apv" enabled="false" minsdk="36" variant="!slow-cpu">
+            <Limit name="size" min="2x2" max="1920x1920" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+            <Limit name="bitrate" range="1-240000000" />
+            <Feature name="bitrate-modes" value="VBR,CBR" />
+        </MediaCodec>
     </Encoders>
 </Included>
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index c18ab94..4601831 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -255,6 +255,14 @@
             <Feature name="adaptive-playback" />
             <Attribute name="software-codec" />
         </MediaCodec>
+        <MediaCodec name="c2.android.apv.decoder" type="video/apv" enabled="false" minsdk="36" variant="!slow-cpu">
+            <Limit name="size" min="16x16" max="1920x1920"/>
+            <Limit name="alignment" value="2x2"/>
+            <Limit name="bitrate" range="1-240000000"/>
+            <Limit name="block-size" value="16x16" />
+            <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+            <Attribute name="software-codec"/>
+        </MediaCodec>
     </Decoders>
     <Encoders>
         <MediaCodec name="c2.android.aac.encoder" type="audio/mp4a-latm">
@@ -409,5 +417,14 @@
             <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
+        <MediaCodec name="c2.android.apv.encoder" type="video/apv" enabled="false" minsdk="36" variant="!slow-cpu">
+            <Limit name="size" min="2x2" max="1920x1920" />
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+            <Limit name="bitrate" range="1-240000000" />
+            <Feature name="bitrate-modes" value="VBR,CBR" />
+            <Attribute name="software-codec" />
+        </MediaCodec>
     </Encoders>
 </MediaCodecs>
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 7169b1e..df1ebd7 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -123,6 +123,18 @@
         CB_RESOURCE_RECLAIMED = 5,
         CB_CRYPTO_ERROR = 6,
         CB_LARGE_FRAME_OUTPUT_AVAILABLE = 7,
+
+        /** Callback ID for when the metrics for this codec have been flushed
+         * due to the start of a new subsession. The associated AMessage will
+         * contain an sp<WrapperObject<std::unique_ptr<mediametrics::Item>>>
+         * Object at the "metrics" key.
+         */
+        CB_METRICS_FLUSHED = 8,
+
+        /** Callback ID to notify the change in resource requirement
+         * for the codec component.
+         */
+        CB_REQUIRED_RESOURCES_CHANGED = 9,
     };
 
     static const pid_t kNoPid = -1;
@@ -142,6 +154,73 @@
 
     static sp<PersistentSurface> CreatePersistentInputSurface();
 
+    /**
+     * Abstraction for the Global Codec resources.
+     * This encapsulates all the available codec resources on the device.
+     */
+    struct GlobalResourceInfo {
+        /**
+         * Name of the Resource type.
+         */
+        std::string mName;
+        /**
+         * Total count/capacity of resources of this type.
+         */
+        int mCapacity;
+        /**
+         * Available count of this resource type.
+         */
+        int mAvailable;
+
+        GlobalResourceInfo(const std::string& name, int capacity, int available) :
+                mName(name),
+                mCapacity(capacity),
+                mAvailable(available) {}
+
+        GlobalResourceInfo(const GlobalResourceInfo& info) :
+                mName(info.mName),
+                mCapacity(info.mCapacity),
+                mAvailable(info.mAvailable) {}
+    };
+
+    /**
+     * Abstraction for the resources associated with a codec instance.
+     * This encapsulates the required codec resources for a configured codec instance.
+     */
+    struct InstanceResourceInfo {
+        /**
+         * Name of the Resource type.
+         */
+        std::string mName;
+        /**
+         * Required resource count of this type.
+         */
+        int mStaticCount;
+        /**
+         * Per frame resource requirement of this resource type.
+         */
+        int mPerFrameCount;
+
+        InstanceResourceInfo(const std::string& name, int staticCount, int perFrameCount) :
+                mName(name),
+                mStaticCount(staticCount),
+                mPerFrameCount(perFrameCount) {}
+
+        InstanceResourceInfo(const InstanceResourceInfo& info) :
+                mName(info.mName),
+                mStaticCount(info.mStaticCount),
+                mPerFrameCount(info.mPerFrameCount) {}
+    };
+
+    /**
+     * Get a list of Globally available device codec resources.
+     *
+     * It will return INVALID_OPERATION if:
+     *  - HAL does not implement codec availability API
+     *  - codec_availability feature flag isn't defined.
+     */
+    static status_t getGloballyAvailableResources(std::vector<GlobalResourceInfo>& resources);
+
     status_t configure(
             const sp<AMessage> &format,
             const sp<Surface> &nativeWindow,
@@ -155,6 +234,19 @@
             const sp<IDescrambler> &descrambler,
             uint32_t flags);
 
+    /**
+     * Get a list of required codec resources.
+     *
+     * This may only be called after configuring the codec.
+     *
+     * Calling this while the codec wasn't configured, will result in
+     * returning INVALID_OPERATION error code.
+     * It will also return INVALID_OPERATION if:
+     *  - HAL does not implement codec availability API
+     *  - codec_availability feature flag isn't defined.
+     */
+    status_t getRequiredResources(std::vector<InstanceResourceInfo>& resources);
+
     status_t releaseCrypto();
 
     status_t setCallback(const sp<AMessage> &callback);
@@ -484,12 +576,21 @@
 
     Mutex mMetricsLock;
     mediametrics_handle_t mMetricsHandle = 0;
+    mediametrics_handle_t mLastMetricsHandle = 0; // only accessed from the looper or destructor
     bool mMetricsToUpload = false;
     nsecs_t mLifetimeStartNs = 0;
     void initMediametrics();
     void updateMediametrics();
     void flushMediametrics();
     void resetMetricsFields();
+
+    // Reset the metrics fields for a new subsession.
+    void resetSubsessionMetricsFields();
+
+    // Start a new subsession (for metrics). This includes flushing the current
+    // metrics, notifying the client and resetting the session fields.
+    void handleStartingANewSubsession();
+
     void updateEphemeralMediametrics(mediametrics_handle_t item);
     void updateLowLatency(const sp<AMessage> &msg);
     void updateCodecImportance(const sp<AMessage>& msg);
@@ -551,6 +652,7 @@
         int32_t setOutputSurfaceCount;
         int32_t resolutionChangeCount;
     } mReliabilityContextMetrics;
+    int32_t mSubsessionCount;
 
     // initial create parameters
     AString mInitName;
@@ -671,6 +773,7 @@
     void onCryptoError(const sp<AMessage> &msg);
     void onError(status_t err, int32_t actionCode, const char *detail = NULL);
     void onOutputFormatChanged();
+    void onRequiredResourcesChanged(const std::vector<InstanceResourceInfo>& resourceInfo);
 
     status_t onSetParameters(const sp<AMessage> &params);
 
@@ -770,6 +873,8 @@
     friend class MediaTestHelper;
 
     CodecErrorLog mErrorLog;
+    // Required resource info for this codec.
+    std::vector<InstanceResourceInfo> mRequiredResourceInfo;
 
     DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);
 };
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index b1cf665..8f2f162 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -586,6 +586,139 @@
     }
 }
 
+// APV ProfileLevel
+inline constexpr int32_t APVProfile422_10           = 0x01;
+inline constexpr int32_t APVProfile422_10HDR10      = 0x1000;
+inline constexpr int32_t APVProfile422_10HDR10Plus  = 0x2000;
+
+inline static const char *asString_APVProfile(int32_t i, const char *def = "??") {
+    switch (i) {
+        case APVProfile422_10:           return "APVProfile422_10";
+        case APVProfile422_10HDR10:      return "APVProfile422_10HDR10";
+        case APVProfile422_10HDR10Plus:  return "APVProfile422_10HDR10Plus";
+        default:                        return def;
+    }
+}
+
+inline constexpr int32_t APVLevel1Band0 = 0x101;
+inline constexpr int32_t APVLevel1Band1 = 0x102;
+inline constexpr int32_t APVLevel1Band2 = 0x104;
+inline constexpr int32_t APVLevel1Band3 = 0x108;
+inline constexpr int32_t APVLevel11Band0 = 0x201;
+inline constexpr int32_t APVLevel11Band1 = 0x202;
+inline constexpr int32_t APVLevel11Band2 = 0x204;
+inline constexpr int32_t APVLevel11Band3 = 0x208;
+inline constexpr int32_t APVLevel2Band0 = 0x401;
+inline constexpr int32_t APVLevel2Band1 = 0x402;
+inline constexpr int32_t APVLevel2Band2 = 0x404;
+inline constexpr int32_t APVLevel2Band3 = 0x408;
+inline constexpr int32_t APVLevel21Band0 = 0x801;
+inline constexpr int32_t APVLevel21Band1 = 0x802;
+inline constexpr int32_t APVLevel21Band2 = 0x804;
+inline constexpr int32_t APVLevel21Band3 = 0x808;
+inline constexpr int32_t APVLevel3Band0 = 0x1001;
+inline constexpr int32_t APVLevel3Band1 = 0x1002;
+inline constexpr int32_t APVLevel3Band2 = 0x1004;
+inline constexpr int32_t APVLevel3Band3 = 0x1008;
+inline constexpr int32_t APVLevel31Band0 = 0x2001;
+inline constexpr int32_t APVLevel31Band1 = 0x2002;
+inline constexpr int32_t APVLevel31Band2 = 0x2004;
+inline constexpr int32_t APVLevel31Band3 = 0x2008;
+inline constexpr int32_t APVLevel4Band0 = 0x4001;
+inline constexpr int32_t APVLevel4Band1 = 0x4002;
+inline constexpr int32_t APVLevel4Band2 = 0x4004;
+inline constexpr int32_t APVLevel4Band3 = 0x4008;
+inline constexpr int32_t APVLevel41Band0 = 0x8001;
+inline constexpr int32_t APVLevel41Band1 = 0x8002;
+inline constexpr int32_t APVLevel41Band2 = 0x8004;
+inline constexpr int32_t APVLevel41Band3 = 0x8008;
+inline constexpr int32_t APVLevel5Band0 = 0x10001;
+inline constexpr int32_t APVLevel5Band1 = 0x10002;
+inline constexpr int32_t APVLevel5Band2 = 0x10004;
+inline constexpr int32_t APVLevel5Band3 = 0x10008;
+inline constexpr int32_t APVLevel51Band0 = 0x20001;
+inline constexpr int32_t APVLevel51Band1 = 0x20002;
+inline constexpr int32_t APVLevel51Band2 = 0x20004;
+inline constexpr int32_t APVLevel51Band3 = 0x20008;
+inline constexpr int32_t APVLevel6Band0 = 0x40001;
+inline constexpr int32_t APVLevel6Band1 = 0x40002;
+inline constexpr int32_t APVLevel6Band2 = 0x40004;
+inline constexpr int32_t APVLevel6Band3 = 0x40008;
+inline constexpr int32_t APVLevel61Band0 = 0x80001;
+inline constexpr int32_t APVLevel61Band1 = 0x80002;
+inline constexpr int32_t APVLevel61Band2 = 0x80004;
+inline constexpr int32_t APVLevel61Band3 = 0x80008;
+inline constexpr int32_t APVLevel7Band0 = 0x100001;
+inline constexpr int32_t APVLevel7Band1 = 0x100002;
+inline constexpr int32_t APVLevel7Band2 = 0x100004;
+inline constexpr int32_t APVLevel7Band3 = 0x100008;
+inline constexpr int32_t APVLevel71Band0 = 0x200001;
+inline constexpr int32_t APVLevel71Band1 = 0x200002;
+inline constexpr int32_t APVLevel71Band2 = 0x200004;
+inline constexpr int32_t APVLevel71Band3 = 0x200008;
+
+inline static const char *asString_APVBandLevel(int32_t i, const char *def = "??") {
+    switch (i) {
+        case APVLevel1Band0:     return "Level 1, Band 0";
+        case APVLevel1Band1:     return "Level 1, Band 1";
+        case APVLevel1Band2:     return "Level 1, Band 2";
+        case APVLevel1Band3:     return "Level 1, Band 3";
+        case APVLevel11Band0:     return "Level 1.1, Band 0";
+        case APVLevel11Band1:     return "Level 1.1, Band 1";
+        case APVLevel11Band2:     return "Level 1.1, Band 2";
+        case APVLevel11Band3:     return "Level 1.1, Band 3";
+        case APVLevel2Band0:     return "Level 2, Band 0";
+        case APVLevel2Band1:     return "Level 2, Band 1";
+        case APVLevel2Band2:     return "Level 2, Band 2";
+        case APVLevel2Band3:     return "Level 2, Band 3";
+        case APVLevel21Band0:     return "Level 2.1, Band 0";
+        case APVLevel21Band1:     return "Level 2.1, Band 1";
+        case APVLevel21Band2:     return "Level 2.1, Band 2";
+        case APVLevel21Band3:     return "Level 2.1, Band 3";
+        case APVLevel3Band0:     return "Level 3, Band 0";
+        case APVLevel3Band1:     return "Level 3, Band 1";
+        case APVLevel3Band2:     return "Level 3, Band 2";
+        case APVLevel3Band3:     return "Level 3, Band 3";
+        case APVLevel31Band0:     return "Level 3.1, Band 0";
+        case APVLevel31Band1:     return "Level 3.1, Band 1";
+        case APVLevel31Band2:     return "Level 3.1, Band 2";
+        case APVLevel31Band3:     return "Level 3.1, Band 3";
+        case APVLevel4Band0:     return "Level 4, Band 0";
+        case APVLevel4Band1:     return "Level 4, Band 1";
+        case APVLevel4Band2:     return "Level 4, Band 2";
+        case APVLevel4Band3:     return "Level 4, Band 3";
+        case APVLevel41Band0:     return "Level 4.1, Band 0";
+        case APVLevel41Band1:     return "Level 4.1, Band 1";
+        case APVLevel41Band2:     return "Level 4.1, Band 2";
+        case APVLevel41Band3:     return "Level 4.1, Band 3";
+        case APVLevel5Band0:     return "Level 5, Band 0";
+        case APVLevel5Band1:     return "Level 5, Band 1";
+        case APVLevel5Band2:     return "Level 5, Band 2";
+        case APVLevel5Band3:     return "Level 5, Band 3";
+        case APVLevel51Band0:     return "Level 5.1, Band 0";
+        case APVLevel51Band1:     return "Level 5.1, Band 1";
+        case APVLevel51Band2:     return "Level 5.1, Band 2";
+        case APVLevel51Band3:     return "Level 5.1, Band 3";
+        case APVLevel6Band0:     return "Level 6, Band 0";
+        case APVLevel6Band1:     return "Level 6, Band 1";
+        case APVLevel6Band2:     return "Level 6, Band 2";
+        case APVLevel6Band3:     return "Level 6, Band 3";
+        case APVLevel61Band0:     return "Level 6.1, Band 0";
+        case APVLevel61Band1:     return "Level 6.1, Band 1";
+        case APVLevel61Band2:     return "Level 6.1, Band 2";
+        case APVLevel61Band3:     return "Level 6.1, Band 3";
+        case APVLevel7Band0:     return "Level 7, Band 0";
+        case APVLevel7Band1:     return "Level 7, Band 1";
+        case APVLevel7Band2:     return "Level 7, Band 2";
+        case APVLevel7Band3:     return "Level 7, Band 3";
+        case APVLevel71Band0:     return "Level 7.1, Band 0";
+        case APVLevel71Band1:     return "Level 7.1, Band 1";
+        case APVLevel71Band2:     return "Level 7.1, Band 2";
+        case APVLevel71Band3:     return "Level 7.1, Band 3";
+        default:                return def;
+    }
+}
+
 inline constexpr int32_t BITRATE_MODE_CBR = 2;
 inline constexpr int32_t BITRATE_MODE_CBR_FD = 3;
 inline constexpr int32_t BITRATE_MODE_CQ = 0;
@@ -654,6 +787,7 @@
 inline constexpr int32_t COLOR_FormatYUV444Flexible          = 0x7F444888;
 inline constexpr int32_t COLOR_FormatYUV444Interleaved       = 29;
 inline constexpr int32_t COLOR_FormatYUVP010                 = 54;
+inline constexpr int32_t COLOR_FormatYUVP210                 = 60;
 inline constexpr int32_t COLOR_QCOM_FormatYUV420SemiPlanar   = 0x7fa30c00;
 inline constexpr int32_t COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100;
 
@@ -712,6 +846,7 @@
         case COLOR_FormatYUV444Flexible:            return "YUV444Flexible";
         case COLOR_FormatYUV444Interleaved:         return "YUV444Interleaved";
         case COLOR_FormatYUVP010:                   return "YUVP010";
+        case COLOR_FormatYUVP210:                   return "YUVP210";
         case COLOR_QCOM_FormatYUV420SemiPlanar:     return "QCOM_YUV420SemiPlanar";
         case COLOR_TI_FormatYUV420PackedSemiPlanar: return "TI_YUV420PackedSemiPlanar";
         default:                                    return def;
@@ -731,6 +866,7 @@
 inline constexpr char MIMETYPE_VIDEO_VP8[] = "video/x-vnd.on2.vp8";
 inline constexpr char MIMETYPE_VIDEO_VP9[] = "video/x-vnd.on2.vp9";
 inline constexpr char MIMETYPE_VIDEO_AV1[] = "video/av01";
+inline constexpr char MIMETYPE_VIDEO_APV[] = "video/apv";
 inline constexpr char MIMETYPE_VIDEO_AVC[] = "video/avc";
 inline constexpr char MIMETYPE_VIDEO_HEVC[] = "video/hevc";
 inline constexpr char MIMETYPE_VIDEO_MPEG4[] = "video/mp4v-es";
@@ -847,6 +983,7 @@
 inline constexpr char KEY_MAX_PTS_GAP_TO_ENCODER[] = "max-pts-gap-to-encoder";
 inline constexpr char KEY_MAX_WIDTH[] = "max-width";
 inline constexpr char KEY_MIME[] = "mime";
+inline constexpr char KEY_NUM_SLOTS[] = "num-slots";
 inline constexpr char KEY_OPERATING_RATE[] = "operating-rate";
 inline constexpr char KEY_OUTPUT_REORDER_DEPTH[] = "output-reorder-depth";
 inline constexpr char KEY_PCM_ENCODING[] = "pcm-encoding";
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index a7d2eb9..9dce55b 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -63,6 +63,7 @@
     kKeyDVVC              = 'dvvc',  // raw data
     kKeyDVWC              = 'dvwc',  // raw data
     kKeyAV1C              = 'av1c',  // raw data
+    kKeyAPVC              = 'apvc',  // raw data
     kKeyThumbnailHVCC     = 'thvc',  // raw data
     kKeyThumbnailAV1C     = 'tav1',  // raw data
     kKeyD263              = 'd263',  // raw data
diff --git a/media/libstagefright/xmlparser/vts/Android.bp b/media/libstagefright/xmlparser/vts/Android.bp
index 1e36c8f..527230c 100644
--- a/media/libstagefright/xmlparser/vts/Android.bp
+++ b/media/libstagefright/xmlparser/vts/Android.bp
@@ -15,6 +15,7 @@
 //
 
 package {
+    default_team: "trendy_team_android_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
diff --git a/media/module/aidlpersistentsurface/Android.bp b/media/module/aidlpersistentsurface/Android.bp
index 5c1a010..8b273f3 100644
--- a/media/module/aidlpersistentsurface/Android.bp
+++ b/media/module/aidlpersistentsurface/Android.bp
@@ -3,6 +3,9 @@
     unstable: true,
     local_include_dir: "aidl",
     min_sdk_version: "29",
+    defaults: [
+        "android.hardware.graphics.common-latest"
+    ],
     srcs: [
         "aidl/android/media/AidlColorAspects.aidl",
         "aidl/android/media/IAidlGraphicBufferSource.aidl",
@@ -12,9 +15,6 @@
     headers: [
         "HardwareBuffer_aidl",
     ],
-    imports: [
-        "android.hardware.graphics.common-V5",
-    ],
     include_dirs: [
         "frameworks/native/aidl/gui",
     ],
@@ -41,6 +41,9 @@
 cc_library_shared {
     name: "libstagefright_graphicbuffersource_aidl",
     min_sdk_version: "29",
+    defaults: [
+        "android.hardware.graphics.common-ndk_shared",
+    ],
     srcs: [
         "AidlGraphicBufferSource.cpp",
         "wrapper/WAidlGraphicBufferSource.cpp",
@@ -56,7 +59,6 @@
         "media_plugin_headers",
     ],
     shared_libs: [
-        "android.hardware.graphics.common-V5-ndk",
         "graphicbuffersource-aidl-ndk",
         "libbinder_ndk",
         "libcutils",
diff --git a/media/module/codecs/amrnb/common/Android.bp b/media/module/codecs/amrnb/common/Android.bp
index 0bc6ed2..35937cb 100644
--- a/media/module/codecs/amrnb/common/Android.bp
+++ b/media/module/codecs/amrnb/common/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_codec_framework",
     default_applicable_licenses: [
         "frameworks_av_media_codecs_amrnb_common_license",
     ],
@@ -42,8 +43,8 @@
         "src/gains_tbl.cpp",
         "src/gc_pred.cpp",
         "src/gmed_n.cpp",
-        "src/grid_tbl.cpp",
         "src/gray_tbl.cpp",
+        "src/grid_tbl.cpp",
         "src/int_lpc.cpp",
         "src/inv_sqrt.cpp",
         "src/inv_sqrt_tbl.cpp",
@@ -91,9 +92,9 @@
     export_include_dirs: ["include"],
 
     cflags: [
-        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
-        "-DOSCL_IMPORT_REF=",
         "-DOSCL_EXPORT_REF=",
+        "-DOSCL_IMPORT_REF=",
+        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
 
         "-Werror",
     ],
diff --git a/media/module/codecs/amrnb/dec/Android.bp b/media/module/codecs/amrnb/dec/Android.bp
index 70741d2..a28500a 100644
--- a/media/module/codecs/amrnb/dec/Android.bp
+++ b/media/module/codecs/amrnb/dec/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_codec_framework",
     default_applicable_licenses: [
         "frameworks_av_media_codecs_amrnb_dec_license",
     ],
@@ -47,12 +48,12 @@
         "src/b_cn_cod.cpp",
         "src/bgnscd.cpp",
         "src/c_g_aver.cpp",
-        "src/d1035pf.cpp",
-        "src/d2_11pf.cpp",
         "src/d2_9pf.cpp",
+        "src/d2_11pf.cpp",
         "src/d3_14pf.cpp",
         "src/d4_17pf.cpp",
         "src/d8_31pf.cpp",
+        "src/d1035pf.cpp",
         "src/d_gain_c.cpp",
         "src/d_gain_p.cpp",
         "src/d_plsf.cpp",
@@ -81,8 +82,8 @@
     export_include_dirs: ["src"],
 
     cflags: [
-        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
         "-DOSCL_IMPORT_REF=",
+        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
 
         "-Werror",
     ],
@@ -94,8 +95,8 @@
     //},
 
     shared_libs: [
-        "libstagefright_amrnb_common",
         "liblog",
+        "libstagefright_amrnb_common",
     ],
 
     target: {
@@ -113,19 +114,22 @@
 
     srcs: ["test/amrnbdec_test.cpp"],
 
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
 
     local_include_dirs: ["src"],
 
     static_libs: [
-        "libstagefright_amrnbdec",
         "libsndfile",
+        "libstagefright_amrnbdec",
     ],
 
     shared_libs: [
-        "libstagefright_amrnb_common",
         "libaudioutils",
         "liblog",
+        "libstagefright_amrnb_common",
     ],
 
     target: {
diff --git a/media/module/codecs/amrnb/enc/Android.bp b/media/module/codecs/amrnb/enc/Android.bp
index 3c6566e..13bb29c 100644
--- a/media/module/codecs/amrnb/enc/Android.bp
+++ b/media/module/codecs/amrnb/enc/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_codec_framework",
     default_applicable_licenses: [
         "frameworks_av_media_codecs_amrnb_enc_license",
     ],
@@ -42,12 +43,12 @@
     srcs: [
         "src/amrencode.cpp",
         "src/autocorr.cpp",
-        "src/c1035pf.cpp",
-        "src/c2_11pf.cpp",
         "src/c2_9pf.cpp",
+        "src/c2_11pf.cpp",
         "src/c3_14pf.cpp",
         "src/c4_17pf.cpp",
         "src/c8_31pf.cpp",
+        "src/c1035pf.cpp",
         "src/calc_cor.cpp",
         "src/calc_en.cpp",
         "src/cbsearch.cpp",
@@ -132,7 +133,10 @@
 
     srcs: ["test/amrnb_enc_test.cpp"],
 
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
 
     local_include_dirs: ["src"],
 
diff --git a/media/module/codecs/amrnb/enc/fuzzer/Android.bp b/media/module/codecs/amrnb/enc/fuzzer/Android.bp
index bcbcee2..1b2ec87 100644
--- a/media/module/codecs/amrnb/enc/fuzzer/Android.bp
+++ b/media/module/codecs/amrnb/enc/fuzzer/Android.bp
@@ -19,6 +19,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_codecs_amrnb_enc_license"
@@ -39,8 +40,8 @@
 
     static_libs: [
         "liblog",
-        "libstagefright_amrnbenc",
         "libstagefright_amrnb_common",
+        "libstagefright_amrnbenc",
     ],
 
     fuzz_config: {
diff --git a/media/module/codecs/amrnb/fuzzer/Android.bp b/media/module/codecs/amrnb/fuzzer/Android.bp
index 3f29267..c5cbbe2 100644
--- a/media/module/codecs/amrnb/fuzzer/Android.bp
+++ b/media/module/codecs/amrnb/fuzzer/Android.bp
@@ -19,6 +19,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -34,9 +35,9 @@
         "amrnb_dec_fuzzer.cpp",
     ],
     static_libs: [
-        "libstagefright_amrnbdec",
-        "libstagefright_amrnb_common",
         "liblog",
+        "libstagefright_amrnb_common",
+        "libstagefright_amrnbdec",
     ],
     target: {
         darwin: {
diff --git a/media/module/codecserviceregistrant/Android.bp b/media/module/codecserviceregistrant/Android.bp
index 56cd8b8..1d1dd71 100644
--- a/media/module/codecserviceregistrant/Android.bp
+++ b/media/module/codecserviceregistrant/Android.bp
@@ -43,6 +43,8 @@
         "libcodec2_soft_flacdec",
         "libcodec2_soft_flacenc",
         "libcodec2_soft_gsmdec",
+        "libcodec2_soft_apvenc",
+        "libcodec2_soft_apvdec",
     ],
 }
 
diff --git a/media/module/extractors/Android.bp b/media/module/extractors/Android.bp
index e29d3e6..cbaabe3 100644
--- a/media/module/extractors/Android.bp
+++ b/media/module/extractors/Android.bp
@@ -81,6 +81,12 @@
     srcs: ["extractor.aconfig"],
 }
 
+java_aconfig_library {
+    name: "android.media.extractor.flags-aconfig-java",
+    aconfig_declarations: "android.media.extractor.flags-aconfig",
+    defaults: ["framework-minus-apex-aconfig-java-defaults"],
+}
+
 cc_aconfig_library {
     name: "android.media.extractor.flags-aconfig-cc",
     aconfig_declarations: "android.media.extractor.flags-aconfig",
diff --git a/media/module/extractors/extractor.aconfig b/media/module/extractors/extractor.aconfig
index c61efa4..a7d3397 100644
--- a/media/module/extractors/extractor.aconfig
+++ b/media/module/extractors/extractor.aconfig
@@ -7,7 +7,17 @@
 flag {
     name: "extractor_sniff_midi_optimizations"
     is_exported: true
-    namespace: "media_extractor"
+    is_fixed_read_only: true
+    namespace: "media_solutions"
     description: "Enable SniffMidi optimizations."
     bug: "359920208"
 }
+
+flag {
+    name: "extractor_mp4_enable_apv"
+    is_exported: true
+    is_fixed_read_only: true
+    namespace: "media_solutions"
+    description: "Enable APV support in mp4 extractor."
+    bug: "370061501"
+}
diff --git a/media/module/extractors/fuzzers/Android.bp b/media/module/extractors/fuzzers/Android.bp
index 3da1589..f3da389 100644
--- a/media/module/extractors/fuzzers/Android.bp
+++ b/media/module/extractors/fuzzers/Android.bp
@@ -129,12 +129,18 @@
     ],
 
     static_libs: [
+        "android.media.extractor.flags-aconfig-cc",
+        "libaconfig_storage_read_api_cc",
         "libstagefright_id3",
         "libstagefright_esds",
         "libmp4extractor",
         "libstagefright_metadatautils",
     ],
 
+    shared_libs: [
+        "server_configurable_flags",
+    ],
+
     dictionary: "mp4_extractor_fuzzer.dict",
 
     corpus: ["corpus_mp4/*"],
diff --git a/media/module/extractors/mp4/Android.bp b/media/module/extractors/mp4/Android.bp
index 8072002..effd24a 100644
--- a/media/module/extractors/mp4/Android.bp
+++ b/media/module/extractors/mp4/Android.bp
@@ -42,12 +42,18 @@
     ],
 
     static_libs: [
+        "android.media.extractor.flags-aconfig-cc",
+        "libaconfig_storage_read_api_cc",
         "libstagefright_esds",
         "libstagefright_foundation",
         "libstagefright_id3",
         "libutils",
     ],
 
+    shared_libs: [
+        "server_configurable_flags",
+    ],
+
     host_supported: true,
 
     target: {
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index 12c0aaf..f062491 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -33,6 +33,7 @@
 #include "SampleTable.h"
 #include "ItemTable.h"
 
+#include <com_android_media_extractor_flags.h>
 #include <media/esds/ESDS.h>
 #include <ID3.h>
 #include <media/stagefright/DataSourceBase.h>
@@ -147,6 +148,7 @@
 
     bool mIsAVC;
     bool mIsHEVC;
+    bool mIsAPV;
     bool mIsDolbyVision;
     bool mIsAC4;
     bool mIsMpegH = false;
@@ -366,6 +368,13 @@
         case FOURCC("hev1"):
             return MEDIA_MIMETYPE_VIDEO_HEVC;
 
+        case FOURCC("apv1"):
+            if (!com::android::media::extractor::flags::extractor_mp4_enable_apv()) {
+                ALOGV("APV support not enabled");
+                return "application/octet-stream";
+            }
+            return MEDIA_MIMETYPE_VIDEO_APV;
+
         case FOURCC("dvav"):
         case FOURCC("dva1"):
         case FOURCC("dvhe"):
@@ -2106,6 +2115,7 @@
         case FOURCC("dav1"):
         case FOURCC("av01"):
         case FOURCC("vp09"):
+        case FOURCC("apv1"):
         {
             uint8_t buffer[78];
             if (chunk_data_size < (ssize_t)sizeof(buffer)) {
@@ -2623,8 +2633,16 @@
             break;
         }
 
+        case FOURCC("apvC"):
         case FOURCC("av1C"):
         {
+            if (!com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
+                chunk_type == FOURCC("apvC")) {
+                ALOGV("APV support not enabled");
+                *offset += chunk_size;
+                break;
+            }
+
             auto buffer = heapbuffer<uint8_t>(chunk_data_size);
 
             if (buffer.get() == NULL) {
@@ -5145,6 +5163,7 @@
       mCurrentSampleInfoOffsets(NULL),
       mIsAVC(false),
       mIsHEVC(false),
+      mIsAPV(false),
       mIsDolbyVision(false),
       mIsAC4(false),
       mIsPcm(false),
@@ -5187,6 +5206,8 @@
     mIsAVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
     mIsHEVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC) ||
               !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
+    mIsAPV = com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
+             !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_APV);
     mIsAC4 = !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC4);
     mIsDolbyVision = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
     mIsHeif = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC) && mItemTable != NULL;
diff --git a/media/module/extractors/mpeg2/Android.bp b/media/module/extractors/mpeg2/Android.bp
index aa59a0c..63dbcda 100644
--- a/media/module/extractors/mpeg2/Android.bp
+++ b/media/module/extractors/mpeg2/Android.bp
@@ -44,7 +44,6 @@
 
     shared_libs: [
         "libbase",
-        "libcgrouprc#29",
     ],
 
     header_libs: [
diff --git a/media/module/extractors/tests/Android.bp b/media/module/extractors/tests/Android.bp
index d6e79c7..5f0f4fa 100644
--- a/media/module/extractors/tests/Android.bp
+++ b/media/module/extractors/tests/Android.bp
@@ -21,6 +21,7 @@
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
     default_applicable_licenses: ["frameworks_av_license"],
+    default_team: "trendy_team_android_media_solutions_playback",
 }
 
 cc_test {
@@ -31,6 +32,8 @@
     srcs: ["ExtractorUnitTest.cpp"],
 
     static_libs: [
+        "android.media.extractor.flags-aconfig-cc",
+        "libaconfig_storage_read_api_cc",
         "libaacextractor",
         "libamrextractor",
         "libmp3extractor",
@@ -77,6 +80,7 @@
         "libhidlmemory",
         "libhidlbase",
         "libbase",
+        "server_configurable_flags",
     ],
 
     compile_multilib: "first",
diff --git a/media/module/foundation/MediaDefs.cpp b/media/module/foundation/MediaDefs.cpp
index 7abab63..a890696 100644
--- a/media/module/foundation/MediaDefs.cpp
+++ b/media/module/foundation/MediaDefs.cpp
@@ -25,6 +25,7 @@
 const char *MEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
 const char *MEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
 const char *MEDIA_MIMETYPE_VIDEO_AV1 = "video/av01";
+const char *MEDIA_MIMETYPE_VIDEO_APV = "video/apv";
 const char *MEDIA_MIMETYPE_VIDEO_AVC = "video/avc";
 const char *MEDIA_MIMETYPE_VIDEO_HEVC = "video/hevc";
 const char *MEDIA_MIMETYPE_VIDEO_MPEG4 = "video/mp4v-es";
diff --git a/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h b/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
index 05ee7fc..2b3f446 100644
--- a/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
+++ b/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
@@ -27,6 +27,7 @@
 extern const char *MEDIA_MIMETYPE_VIDEO_VP8;
 extern const char *MEDIA_MIMETYPE_VIDEO_VP9;
 extern const char *MEDIA_MIMETYPE_VIDEO_AV1;
+extern const char *MEDIA_MIMETYPE_VIDEO_APV;
 extern const char *MEDIA_MIMETYPE_VIDEO_AVC;
 extern const char *MEDIA_MIMETYPE_VIDEO_HEVC;
 extern const char *MEDIA_MIMETYPE_VIDEO_MPEG4;
diff --git a/media/module/libapexcodecs/Android.bp b/media/module/libapexcodecs/Android.bp
new file mode 100644
index 0000000..790b749
--- /dev/null
+++ b/media/module/libapexcodecs/Android.bp
@@ -0,0 +1,71 @@
+//
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+package {
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "libapexcodecs-defaults",
+    header_libs: [
+        "libbase_headers",
+    ],
+
+    srcs: ["ApexCodecs.cpp"],
+
+    shared_libs: [
+        "libbase",
+        "libnativewindow",
+    ],
+
+    export_include_dirs: ["include"],
+
+    export_shared_lib_headers: [
+        "libbase",
+        "libnativewindow",
+    ],
+
+}
+
+cc_library {
+    name: "libapexcodecs-testing",
+    defaults: ["libapexcodecs-defaults"],
+
+    visibility: [
+        ":__subpackages__",
+    ],
+}
+
+cc_library {
+    name: "libapexcodecs",
+    defaults: ["libapexcodecs-defaults"],
+
+    visibility: [
+        "//frameworks/av/apex:__subpackages__",
+        "//frameworks/av/media/codec2/hal/client",
+    ],
+
+    min_sdk_version: "apex_inherit",
+    version_script: "libapexcodecs.map.txt",
+    stubs: {
+        symbol_file: "libapexcodecs.map.txt",
+        versions: ["36"],
+    },
+
+    apex_available: [
+        "com.android.media.swcodec",
+    ],
+}
diff --git a/media/module/libapexcodecs/ApexCodecs.cpp b/media/module/libapexcodecs/ApexCodecs.cpp
new file mode 100644
index 0000000..7101677
--- /dev/null
+++ b/media/module/libapexcodecs/ApexCodecs.cpp
@@ -0,0 +1,148 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <new>
+
+#include <android-base/no_destructor.h>
+#include <apex/ApexCodecs.h>
+
+// TODO: remove when we have real implementations
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wunused-parameter"
+
+struct ApexCodec_ComponentStore {
+    ApexCodec_ComponentStore() = default;
+};
+
+ApexCodec_ComponentStore *ApexCodec_GetComponentStore() {
+    ::android::base::NoDestructor<ApexCodec_ComponentStore> store;
+    return store.get();
+}
+
+ApexCodec_ComponentTraits *ApexCodec_Traits_get(
+        ApexCodec_ComponentStore *store, size_t index) {
+    return nullptr;
+}
+
+ApexCodec_Status ApexCodec_Component_create(
+        ApexCodec_ComponentStore *store, const char *name, ApexCodec_Component **comp) {
+    *comp = nullptr;
+    return APEXCODEC_STATUS_NOT_FOUND;
+}
+
+void ApexCodec_Component_destroy(ApexCodec_Component *comp) {}
+
+ApexCodec_Status ApexCodec_Component_start(ApexCodec_Component *comp) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Component_flush(ApexCodec_Component *comp) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Component_reset(ApexCodec_Component *comp) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Configurable *ApexCodec_Component_getConfigurable(
+        ApexCodec_Component *comp) {
+    return nullptr;
+}
+
+ApexCodec_Status ApexCodec_SupportedValues_getTypeAndValues(
+        ApexCodec_SupportedValues *supportedValues,
+        ApexCodec_SupportedValuesType *type,
+        ApexCodec_SupportedValuesNumberType *numberType,
+        ApexCodec_Value **values,
+        uint32_t *numValues) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+void ApexCodec_SupportedValues_release(ApexCodec_SupportedValues *values) {}
+
+ApexCodec_Status ApexCodec_SettingResults_getResultAtIndex(
+        ApexCodec_SettingResults *results,
+        size_t index,
+        ApexCodec_SettingResultFailure *failure,
+        ApexCodec_ParamFieldValues *field,
+        ApexCodec_ParamFieldValues **conflicts,
+        size_t *numConflicts) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+void ApexCodec_SettingResults_release(ApexCodec_SettingResults *results) {}
+
+ApexCodec_Status ApexCodec_Component_process(
+        ApexCodec_Component *comp,
+        const ApexCodec_Buffer *input,
+        ApexCodec_Buffer *output,
+        size_t *consumed,
+        size_t *produced) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Configurable_config(
+        ApexCodec_Configurable *comp,
+        ApexCodec_LinearBuffer *config,
+        ApexCodec_SettingResults **results) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Configurable_query(
+        ApexCodec_Configurable *comp,
+        uint32_t indices[],
+        size_t numIndices,
+        ApexCodec_LinearBuffer *config,
+        size_t *written) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_getIndices(
+        ApexCodec_ParamDescriptors *descriptors,
+        uint32_t **indices,
+        size_t *numIndices) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_getDescriptor(
+        ApexCodec_ParamDescriptors *descriptors,
+        uint32_t index,
+        ApexCodec_ParamAttribute *attr,
+        const char **name,
+        uint32_t **dependencies,
+        size_t *numDependencies) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_release(
+        ApexCodec_ParamDescriptors *descriptors) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Configurable_querySupportedParams(
+        ApexCodec_Configurable *comp,
+        ApexCodec_ParamDescriptors **descriptors) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Configurable_querySupportedValues(
+        ApexCodec_Configurable *comp,
+        ApexCodec_SupportedValuesQuery *queries,
+        size_t numQueries) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+#pragma clang diagnostic pop
\ No newline at end of file
diff --git a/media/module/libapexcodecs/include/apex/ApexCodecs.h b/media/module/libapexcodecs/include/apex/ApexCodecs.h
new file mode 100644
index 0000000..b9f2e83
--- /dev/null
+++ b/media/module/libapexcodecs/include/apex/ApexCodecs.h
@@ -0,0 +1,768 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/cdefs.h>
+#include <errno.h>
+#include <stdint.h>
+
+#include <android/api-level.h>
+#include <android/hardware_buffer.h>
+#include <android/versioning.h>
+
+__BEGIN_DECLS
+
+/**
+ * An API to access and operate codecs implemented within an APEX module,
+ * used only by the OS when using the codecs within a client process
+ * (instead of via a HAL).
+ *
+ * NOTE: Many of the constants and types mirror the ones in the Codec 2.0 API.
+ */
+
+/**
+ * Error code for ApexCodec APIs.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_Status : int32_t {
+    APEXCODEC_STATUS_OK        = 0,
+
+    /* bad input */
+    APEXCODEC_STATUS_BAD_VALUE = EINVAL,
+    APEXCODEC_STATUS_BAD_INDEX = ENXIO,
+    APEXCODEC_STATUS_CANNOT_DO = ENOTSUP,
+
+    /* bad sequencing of events */
+    APEXCODEC_STATUS_DUPLICATE = EEXIST,
+    APEXCODEC_STATUS_NOT_FOUND = ENOENT,
+    APEXCODEC_STATUS_BAD_STATE = EPERM,
+    APEXCODEC_STATUS_BLOCKING  = EWOULDBLOCK,
+    APEXCODEC_STATUS_CANCELED  = EINTR,
+
+    /* bad environment */
+    APEXCODEC_STATUS_NO_MEMORY = ENOMEM,
+    APEXCODEC_STATUS_REFUSED   = EACCES,
+
+    APEXCODEC_STATUS_TIMED_OUT = ETIMEDOUT,
+
+    /* bad versioning */
+    APEXCODEC_STATUS_OMITTED   = ENOSYS,
+
+    /* unknown fatal */
+    APEXCODEC_STATUS_CORRUPTED = EFAULT,
+    APEXCODEC_STATUS_NO_INIT   = ENODEV,
+} ApexCodec_Status;
+
+/**
+ * Enum that represents the kind of component
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_Kind : uint32_t {
+    /**
+     * The component is of a kind that is not listed below.
+     */
+    APEXCODEC_KIND_OTHER = 0x0,
+    /**
+     * The component is a decoder, which decodes coded bitstream
+     * into raw buffers.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_KIND_DECODER = 0x1,
+    /**
+     * The component is an encoder, which encodes raw buffers
+     * into coded bitstream.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_KIND_ENCODER = 0x2,
+} ApexCodec_Kind;
+
+typedef enum ApexCodec_Domain : uint32_t {
+    /**
+     * A component domain that is not listed below.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_DOMAIN_OTHER = 0x0,
+    /**
+     * A component domain that operates on video.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_DOMAIN_VIDEO = 0x1,
+    /**
+     * A component domain that operates on audio.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_DOMAIN_AUDIO = 0x2,
+    /**
+     * A component domain that operates on image.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_DOMAIN_IMAGE = 0x3,
+} ApexCodec_Domain;
+
+/**
+ * Handle for component traits such as name, media type, kind (decoder/encoder),
+ * domain (audio/video/image), etc.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ComponentTraits {
+    /**
+     * The name of the component.
+     */
+    const char *name;
+    /**
+     * The supported media type of the component.
+     */
+    const char *mediaType;
+    /**
+     * The kind of the component.
+     */
+    ApexCodec_Kind kind;
+    /**
+     * The domain on which the component operates.
+     */
+    ApexCodec_Domain domain;
+} ApexCodec_ComponentTraits;
+
+/**
+ * An opaque struct that represents a component store.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ComponentStore ApexCodec_ComponentStore;
+
+/**
+ * Get the component store object. This function never fails.
+ *
+ * \return component store object.
+ */
+ApexCodec_ComponentStore *ApexCodec_GetComponentStore()
+        __INTRODUCED_IN(36);
+
+/**
+ * Get the traits object of a component at given index. ApexCodecs_Traits_*
+ * functions are used to extract information from the traits object.
+ *
+ * Returns nullptr if index is out of bounds. The returned object is owned by
+ * ApexCodec_ComponentStore object and the client should not delete it.
+ *
+ * The client can iterate through the traits objects by calling this function
+ * with an index incrementing from 0 until it gets a nullptr.
+ *
+ * \param index index of the traits object to query
+ * \return traits object at the index, or nullptr if the index is out of bounds.
+ */
+ApexCodec_ComponentTraits *ApexCodec_Traits_get(
+        ApexCodec_ComponentStore *store, size_t index) __INTRODUCED_IN(36);
+
+/**
+ * An opaque struct that represents a codec.
+ */
+typedef struct ApexCodec_Component ApexCodec_Component;
+
+/**
+ * Create a component by the name.
+ *
+ * \param store the component store
+ * \param name the name of the component
+ * \param component out-param to be filled with the component; must not be null
+ * \return  APEXCODEC_STATUS_OK         if successful
+ *          APEXCODEC_STATUS_NOT_FOUND  if the name is not found
+ */
+ApexCodec_Status ApexCodec_Component_create(
+        ApexCodec_ComponentStore *store, const char *name, ApexCodec_Component **comp)
+        __INTRODUCED_IN(36);
+
+/**
+ * Destroy the component by the handle. It is invalid to call component methods on the handle
+ * after calling this method. It is no-op to call this method with |comp| == nullptr.
+ *
+ * \param comp the handle for the component
+ */
+void ApexCodec_Component_destroy(ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * Start the component. The component is ready to process buffers after this call.
+ *
+ * \param comp the handle for the component
+ */
+ApexCodec_Status ApexCodec_Component_start(
+        ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * Flush the component's internal states. This operation preserves the existing configurations.
+ *
+ * \param comp the handle for the component
+ */
+ApexCodec_Status ApexCodec_Component_flush(
+        ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * Resets the component to the initial state, right after creation. Note that the configuration
+ * will also revert to the initial state, so if there are configurations required those should be
+ * set again to use the component.
+ *
+ * \param comp the handle for the component
+ */
+ApexCodec_Status ApexCodec_Component_reset(
+        ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * An opaque struct that represents a configurable part of the component.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_Configurable ApexCodec_Configurable;
+
+/**
+ * Return the configurable object for the given ApexCodec_Component.
+ * The returned object has the same lifecycle as |comp|.
+ *
+ * \param comp the handle for the component
+ * \return the configurable object handle
+ */
+ApexCodec_Configurable *ApexCodec_Component_getConfigurable(
+        ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * Enum that represents the flags for ApexCodec_Buffer.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_BufferFlags : uint32_t {
+    APEXCODEC_FLAG_DROP_FRAME    = (1 << 0),
+    APEXCODEC_FLAG_END_OF_STREAM = (1 << 1),
+    APEXCODEC_FLAG_DISCARD_FRAME = (1 << 2),
+    APEXCODEC_FLAG_INCOMPLETE    = (1 << 3),
+    APEXCODEC_FLAG_CORRECTED     = (1 << 4),
+    APEXCODEC_FLAG_CORRUPT       = (1 << 5),
+    APEXCODEC_FLAG_CODEC_CONFIG  = (1u << 31),
+} ApexCodec_BufferFlags;
+
+/**
+ * Enum that represents the type of buffer.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_BufferType : uint32_t {
+    APEXCODEC_BUFFER_TYPE_INVALID,
+    APEXCODEC_BUFFER_TYPE_LINEAR,
+    APEXCODEC_BUFFER_TYPE_LINEAR_CHUNKS,
+    APEXCODEC_BUFFER_TYPE_GRAPHIC,
+    APEXCODEC_BUFFER_TYPE_GRAPHIC_CHUNKS,
+} ApexCodec_BufferType;
+
+/**
+ * Struct that represents the memory for ApexCodec_Buffer.
+ *
+ * All memory regions have the simple 1D representation.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_LinearBuffer {
+    /**
+     * A pointer to the start of the buffer. This is not aligned.
+     */
+    uint8_t *data;
+    /**
+     * Size of the buffer. The memory region between |data| (inclusive) and
+     * |data + size| (exclusive) is assumed to be valid for read/write.
+     */
+    size_t size;
+} ApexCodec_LinearBuffer;
+
+/**
+ * Struct that represents a buffer for ApexCodec_Component.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_Buffer {
+    /**
+     * Flags associated with the buffer.
+     */
+    ApexCodec_BufferFlags flags;
+    /**
+     * For input buffers client assign a unique sequential index for each buffer. For output buffers
+     * it is the same as the associated input buffer's frame index.
+     */
+    uint64_t frameIndex;
+    /**
+     * A timestamp associated with the buffer in microseconds.
+     */
+    uint64_t timestampUs;
+    /**
+     * The type of the buffer. The component may reject request to process a buffer with the wrong
+     * type. For example, a video decoder will reject an input buffer with type BUFFER_TYPE_GRAPHIC,
+     * or an output buffer with type BUFFER_TYPE_LINEAR.
+     */
+    ApexCodec_BufferType type;
+    /**
+     * The actual memory for the buffer.
+     */
+    union {
+        ApexCodec_LinearBuffer linear;
+        AHardwareBuffer *graphic;
+    } memory;
+    /**
+     * Config updates associated with the buffer. For input buffers these are sent to the component
+     * at the specific input frame. For output buffers these are config updates as a result of
+     * processing the buffer.
+     */
+    ApexCodec_LinearBuffer configUpdates;
+} ApexCodec_Buffer;
+
+/**
+ * Enum that represents the query type for the supported values.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SupportedValuesQueryType : uint32_t {
+    /** Query all possible supported values regardless of current configuration */
+    APEXCODEC_SUPPORTED_VALUES_QUERY_POSSIBLE,
+    /** Query supported values at current configuration */
+    APEXCODEC_SUPPORTED_VALUES_QUERY_CURRENT,
+} ApexCodec_SupportedValuesQueryType;
+
+/**
+ * Enum that represents the type of the supported values.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SupportedValuesType : uint32_t {
+    /** The supported values are empty. */
+    APEXCODEC_SUPPORTED_VALUES_EMPTY,
+    /**
+     * The supported values are represented by a range defined with {min, max, step, num, den}.
+     *
+     * If step is 0 and num and denom are both 1, the supported values are any value, for which
+     * min <= value <= max.
+     *
+     * Otherwise, the range represents a geometric/arithmetic/multiply-accumulate series, where
+     * successive supported values can be derived from previous values (starting at min), using the
+     * following formula:
+     *  v[0] = min
+     *  v[i] = v[i-1] * num / denom + step for i >= 1, while min < v[i] <= max.
+     */
+    APEXCODEC_SUPPORTED_VALUES_RANGE,
+    /** The supported values are represented by a list of values. */
+    APEXCODEC_SUPPORTED_VALUES_VALUES,
+    /** The supported values are represented by a list of flags. */
+    APEXCODEC_SUPPORTED_VALUES_FLAGS,
+} ApexCodec_SupportedValuesType;
+
+/**
+ * Enum that represents numeric types of the supported values.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SupportedValuesNumberType : uint32_t {
+    APEXCODEC_SUPPORTED_VALUES_TYPE_NONE   = 0,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_INT32  = 1,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_UINT32 = 2,
+    // RESERVED                            = 3,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_INT64  = 4,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_UINT64 = 5,
+    // RESERVED                            = 6,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_FLOAT  = 7,
+} ApexCodec_SupportedValuesNumberType;
+
+/**
+ * Union of primitive types.
+ *
+ * Introduced in API 36.
+ */
+typedef union {
+    int32_t i32;
+    uint32_t u32;
+    int64_t i64;
+    uint64_t u64;
+    float f;
+} ApexCodec_Value;
+
+/**
+ * An opaque struct that represents the supported values of a parameter.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_SupportedValues ApexCodec_SupportedValues;
+
+/**
+ * Extract information from ApexCodec_SupportedValues object.
+ *
+ * \param [in] supportedValues the supported values object
+ * \param [out] type        pointer to be filled with the type of the supported values
+ * \param [out] numberType  pointer to be filled with the numeric type of the supported values
+ * \param [out] values      pointer to be filled with the array of the actual supported values.
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_EMPTY: nullptr
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_RANGE: {min, max, step, num, den}
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_VALUES/_FLAGS:
+ *                              the array of supported values/flags
+ *                          the array is owned by the |supportedValues| object and the client
+ *                          should not free it.
+ * \param [out] numValues   pointer to be filled with the number of values.
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_EMPTY: 0
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_RANGE: 5
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_VALUES/_FLAGS: varies
+ */
+ApexCodec_Status ApexCodec_SupportedValues_getTypeAndValues(
+        ApexCodec_SupportedValues *supportedValues,
+        ApexCodec_SupportedValuesType *type,
+        ApexCodec_SupportedValuesNumberType *numberType,
+        ApexCodec_Value **values,
+        uint32_t *numValues) __INTRODUCED_IN(36);
+
+/**
+ * Release the supported values object.
+ *
+ * \param values the supported values object
+ */
+void ApexCodec_SupportedValues_release(
+        ApexCodec_SupportedValues *values) __INTRODUCED_IN(36);
+
+/**
+ * Struct that represents the result of ApexCodec_Configurable_config.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_SettingResults ApexCodec_SettingResults;
+
+/**
+ * Enum that represents the failure code of ApexCodec_SettingResults.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SettingResultFailure : uint32_t {
+    /** parameter type is not supported */
+    APEXCODEC_SETTING_RESULT_BAD_TYPE,
+    /** parameter is not supported on the specific port */
+    APEXCODEC_SETTING_RESULT_BAD_PORT,
+    /** parameter is not supported on the specific stream */
+    APEXCODEC_SETTING_RESULT_BAD_INDEX,
+    /** parameter is read-only */
+    APEXCODEC_SETTING_RESULT_READ_ONLY,
+    /** parameter mismatches input data */
+    APEXCODEC_SETTING_RESULT_MISMATCH,
+    /** strict parameter does not accept value for the field at all */
+    APEXCODEC_SETTING_RESULT_BAD_VALUE,
+    /** strict parameter field value conflicts with another settings */
+    APEXCODEC_SETTING_RESULT_CONFLICT,
+    /** strict parameter field is out of range due to other settings */
+    APEXCODEC_SETTING_RESULT_UNSUPPORTED,
+    /**
+     * field does not accept the requested parameter value at all. It has been corrected to
+     * the closest supported value. This failure mode is provided to give guidance as to what
+     * are the currently supported values for this field (which may be a subset of the at-all-
+     * potential values)
+     */
+    APEXCODEC_SETTING_RESULT_INFO_BAD_VALUE,
+    /**
+     * requested parameter value is in conflict with an/other setting(s)
+     * and has been corrected to the closest supported value. This failure
+     * mode is given to provide guidance as to what are the currently supported values as well
+     * as to optionally provide suggestion to the client as to how to enable the requested
+     * parameter value.
+     */
+    APEXCODEC_SETTING_RESULT_INFO_CONFLICT,
+} ApexCodec_SettingResultFailure;
+
+/**
+ * Struct that represents a field and its supported values of a parameter.
+ *
+ * The offset and size of the field are where the field is located in the blob representation of
+ * the parameter, as used in the ApexCodec_Configurable_query() and ApexCodec_Configurable_config(),
+ * for example.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ParamFieldValues {
+    /** index of the param */
+    uint32_t index;
+    /** offset of the param field */
+    uint32_t offset;
+    /** size of the param field */
+    uint32_t size;
+    /** currently supported values of the param field */
+    ApexCodec_SupportedValues *values;
+} ApexCodec_ParamFieldValues;
+
+/**
+ * Extract the result of ApexCodec_Configurable_config.
+ * The client can iterate through the results with index starting from 0 until this function returns
+ * APEXCODEC_STATUS_NOT_FOUND.
+ *
+ * \param [in]  result  the result object
+ * \param [in]  index   the index of the result to extract, starts from 0.
+ * \param [out] failure pointer to be filled with the failure code
+ * \param [out] field   pointer to be filled with the field that failed.
+ *                      |field->value| is owned by the |result| object and the client should not
+ *                      free it.
+ * \param [out] conflicts   pointer to be filled with the array of conflicts.
+ *                          nullptr if |numConflicts| is 0.
+ *                          the array and its content is owned by the |result| object and the client
+ *                          should not free it.
+ * \param [out] numConflicts pointer to be filled with the number of conflicts
+ *                          may be 0 if there are no conflicts
+ * \return APEXCODEC_STATUS_OK         if successful
+ * \return APEXCODEC_STATUS_NOT_FOUND  if index is out of range
+ */
+ApexCodec_Status ApexCodec_SettingResults_getResultAtIndex(
+        ApexCodec_SettingResults *results,
+        size_t index,
+        ApexCodec_SettingResultFailure *failure,
+        ApexCodec_ParamFieldValues *field,
+        ApexCodec_ParamFieldValues **conflicts,
+        size_t *numConflicts) __INTRODUCED_IN(36);
+
+/**
+ * Release the setting result object.
+ *
+ * \param result the setting result object
+ */
+void ApexCodec_SettingResults_release(
+        ApexCodec_SettingResults *results) __INTRODUCED_IN(36);
+
+/**
+ * Process one frame from |input|, and produce one frame to |output| if possible.
+ * When successfully filled, |output->memory.linear| has the size adjusted to the produced
+ * output size, in case of linear buffers. |input->configUpdates| is applied with the input
+ * buffer; |output->configUpdates| contains config updates as a result of processing the frame.
+ *
+ * \param comp      the component to process the buffers
+ * \param input     the input buffer; when nullptr, the component should fill |output| if there are
+ *                  any pending output buffers.
+ * \param output    the output buffer, should not be nullptr.
+ * \param consumed  the number of consumed bytes from the input buffer
+ *                  set to 0 if no input buffer has been consumed, including |input| is nullptr.
+ *                  for graphic buffers, any non-zero value means that the input buffer is consumed.
+ * \param produced  the number of bytes produced on the output buffer
+ *                  set to 0 if no output buffer has been produced.
+ *                  for graphic buffers, any non-zero value means that the output buffer is filled.
+ * \return APEXCODEC_STATUS_OK         if successful
+ * \return APEXCODEC_STATUS_NO_MEMORY  if the output buffer is not suitable to hold the output frame
+ *                                     the client should retry with a new output buffer;
+ *                                     configUpdates should have the information to update
+ *                                     the buffer size.
+ * \return APEXCODEC_STATUS_BAD_VALUE  if the parameters are bad
+ * \return APEXCODEC_STATUS_BAD_STATE  if the component is not in the right state
+ *                                     to process the frame
+ * \return APEXCODEC_STATUS_CORRUPTED  if unexpected error has occurred
+ */
+ApexCodec_Status ApexCodec_Component_process(
+        ApexCodec_Component *comp,
+        const ApexCodec_Buffer *input,
+        ApexCodec_Buffer *output,
+        size_t *consumed,
+        size_t *produced) __INTRODUCED_IN(36);
+
+/**
+ * Configure the component with the given config.
+ *
+ * Configurations are Codec 2.0 configs in binary blobs,
+ * concatenated if there are multiple configs.
+ *
+ * frameworks/av/media/codec2/core/include/C2Param.h contains more details about the configuration
+ * blob layout.
+ *
+ * The component may correct the configured parameters to the closest supported values, and could
+ * fail in case there are no values that the component can auto-correct to. |result| contains the
+ * information about the failures. See ApexCodec_SettingResultFailure and ApexCodec_SettingResults
+ * for more details.
+ *
+ * \param [in]    comp   the handle for the component
+ * \param [inout] config the config blob; after the call, the config blob is updated to the actual
+ *                       config by the component.
+ * \param [out]   result the result of the configuration.
+ *                       the client should call ApexCodec_SettingResult_getResultAtIndex()
+ *                       to extract the result. The result object is owned by the client and should
+ *                       be released with ApexCodec_SettingResult_release().
+ *                       |result| may be nullptr if empty.
+ * \return APEXCODEC_STATUS_OK         if successful
+ * \return APEXCODEC_STATUS_BAD_VALUE  if the config is invalid
+ * \return APEXCODEC_STATUS_BAD_STATE  if the component is not in the right state to be configured
+ * \return APEXCODEC_STATUS_CORRUPTED  if unexpected error has occurred
+ */
+ApexCodec_Status ApexCodec_Configurable_config(
+        ApexCodec_Configurable *comp,
+        ApexCodec_LinearBuffer *config,
+        ApexCodec_SettingResults **results) __INTRODUCED_IN(36);
+
+/**
+ * Query the component for the given indices.
+ *
+ * Parameter indices are defined in frameworks/av/media/codec2/core/include/C2Config.h.
+ *
+ * \param [in] comp         the handle for the component
+ * \param [in] indices      the array of indices to query
+ * \param [in] numIndices   the size of the indices array
+ * \param [inout] config    the output buffer for the config blob, allocated by the client.
+ *                          if the |config->size| was insufficient, it is set to the required size
+ *                          and |config->data| remains unchanged.
+ * \param [out] written     the number of bytes written to |config|.
+ * \return APEXCODEC_STATUS_OK          if successful
+ * \return APEXCODEC_STATUS_NO_MEMORY   if |config.size| is too small; |config.size| is updated to the
+ *                                      requested buffer size.
+ * \return APEXCODEC_STATUS_BAD_VALUE   if the parameters are bad. e.g. |indices|, |config|,
+ *                                      |config->data| or |written| is nullptr.
+ */
+ApexCodec_Status ApexCodec_Configurable_query(
+        ApexCodec_Configurable *comp,
+        uint32_t indices[],
+        size_t numIndices,
+        ApexCodec_LinearBuffer *config,
+        size_t *written) __INTRODUCED_IN(36);
+
+/**
+ * Struct that represents a parameter descriptor.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ParamDescriptors ApexCodec_ParamDescriptors;
+
+/**
+ * Enum that represents the attributes of a parameter.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_ParamAttribute : uint32_t {
+    /** parameter is required to be specified */
+    APEXCODEC_PARAM_IS_REQUIRED   = 1u << 0,
+    /** parameter retains its value */
+    APEXCODEC_PARAM_IS_PERSISTENT = 1u << 1,
+    /** parameter is strict */
+    APEXCODEC_PARAM_IS_STRICT     = 1u << 2,
+    /** parameter is read-only */
+    APEXCODEC_PARAM_IS_READ_ONLY  = 1u << 3,
+    /** parameter shall not be visible to clients */
+    APEXCODEC_PARAM_IS_HIDDEN     = 1u << 4,
+    /** parameter shall not be used by framework (other than testing) */
+    APEXCODEC_PARAM_IS_INTERNAL   = 1u << 5,
+    /** parameter is publicly const (hence read-only) */
+    APEXCODEC_PARAM_IS_CONST      = 1u << 6 | APEXCODEC_PARAM_IS_READ_ONLY,
+} ApexCodec_ParamAttribute;
+
+/**
+ * Get the parameter indices of the param descriptors.
+ *
+ * \param [in] descriptors the param descriptors object
+ * \param [out] indices the pointer to be filled with the array of the indices;
+ *                      the array is owned by |descriptors| and should not be freed by the client.
+ * \param [out] numIndices the size of the indices array
+ * \return APEXCODEC_STATUS_OK          if successful
+ * \return APEXCODEC_STATUS_BAD_VALUE   if parameters are bad. e.g. |descriptors|, |indices| or
+ *                                  |numIndices| is nullptr.
+ */
+ApexCodec_Status ApexCodec_ParamDescriptors_getIndices(
+        ApexCodec_ParamDescriptors *descriptors,
+        uint32_t **indices,
+        size_t *numIndices) __INTRODUCED_IN(36);
+
+/**
+ * Get the descriptor of the param.
+ *
+ * \param [in] descriptors the param descriptors object
+ * \param [in] index the index of the param
+ * \param [out] attr the attribute of the param
+ * \param [out] name    the pointer to be filled with the name of the param
+ *                      the string is owned by |descriptors| and should not be freed by the client.
+ * \param [out] dependencies the pointer to be filled with an array of the parameter indices
+ *                        that the parameter with |index| depends on.
+ *                        may be null if empty.
+ *                        the array is owned by |descriptors| and should not be freed by the client.
+ * \param [out] numDependencies the number of dependencies
+ * \return APEXCODEC_STATUS_OK          if successful
+ * \return APEXCODEC_STATUS_BAD_VALUE   if parameters are bad. e.g. |descriptors|, |attr|, |name|,
+ *                                  |dependencies| or |numDependencies| is nullptr.
+ * \return APEXCODEC_STATUS_BAD_INDEX   if the index is not included in the param descriptors.
+ */
+ApexCodec_Status ApexCodec_ParamDescriptors_getDescriptor(
+        ApexCodec_ParamDescriptors *descriptors,
+        uint32_t index,
+        ApexCodec_ParamAttribute *attr,
+        const char **name,
+        uint32_t **dependencies,
+        size_t *numDependencies) __INTRODUCED_IN(36);
+
+/**
+ * Release the param descriptors object.
+ *
+ * \param descriptors the param descriptors object
+ */
+ApexCodec_Status ApexCodec_ParamDescriptors_release(
+        ApexCodec_ParamDescriptors *descriptors) __INTRODUCED_IN(36);
+
+/**
+ * Query the component for the supported parameters.
+ *
+ * \param comp the handle for the component
+ * \param descriptors   the pointer to be filled with the param descriptors object
+ *                      the object should be released with ApexCodec_ParamDescriptors_release().
+ * \return APEXCODEC_STATUS_OK          if successful
+ * \return APEXCODEC_STATUS_BAD_VALUE   if parameters are bad. e.g. |descriptors| is nullptr.
+ */
+ApexCodec_Status ApexCodec_Configurable_querySupportedParams(
+        ApexCodec_Configurable *comp,
+        ApexCodec_ParamDescriptors **descriptors) __INTRODUCED_IN(36);
+
+/**
+ * Struct that represents the query for the supported values of a parameter.
+ *
+ * The offset of the field can be found in the layout of the parameter blob.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_SupportedValuesQuery {
+    /* in-params */
+
+    /** index of the param */
+    uint32_t index;
+    /** offset to the param field */
+    size_t offset;
+    /** query type */
+    ApexCodec_SupportedValuesQueryType type;
+
+    /* out-params */
+
+    /** status of the query */
+    ApexCodec_Status status;
+
+    /** supported values. must be released with ApexCodec_SupportedValues_release(). */
+    ApexCodec_SupportedValues *values;
+} ApexCodec_SupportedValuesQuery;
+
+/**
+ * Query the component for the supported values of the given indices.
+ *
+ * \param comp the handle for the component
+ * \param queries the array of queries
+ * \param numQueries the size of the queries array
+ * \return  APEXCODEC_STATUS_OK         if successful
+ *          APEXCODEC_STATUS_CORRUPTED  if unexpected error has occurred
+ */
+ApexCodec_Status ApexCodec_Configurable_querySupportedValues(
+        ApexCodec_Configurable *comp,
+        ApexCodec_SupportedValuesQuery *queries,
+        size_t numQueries) __INTRODUCED_IN(36);
+
+__END_DECLS
\ No newline at end of file
diff --git a/media/module/libapexcodecs/libapexcodecs.map.txt b/media/module/libapexcodecs/libapexcodecs.map.txt
new file mode 100644
index 0000000..672cf89
--- /dev/null
+++ b/media/module/libapexcodecs/libapexcodecs.map.txt
@@ -0,0 +1,26 @@
+LIBAPEXCODECS_36 { # introduced=36
+  global:
+    ApexCodec_Component_create; # apex
+    ApexCodec_Component_destroy; # apex
+    ApexCodec_Component_flush; # apex
+    ApexCodec_Component_getConfigurable; # apex
+    ApexCodec_Component_process; # apex
+    ApexCodec_Component_start; # apex
+    ApexCodec_Component_reset; # apex
+    ApexCodec_Configurable_config; # apex
+    ApexCodec_Configurable_query; # apex
+    ApexCodec_Configurable_querySupportedParams; # apex
+    ApexCodec_Configurable_querySupportedValues; # apex
+    ApexCodec_GetComponentStore; # apex
+    ApexCodec_ParamDescriptors_getDescriptor; # apex
+    ApexCodec_ParamDescriptors_getIndices; # apex
+    ApexCodec_ParamDescriptors_release; # apex
+    ApexCodec_SettingResults_getResultAtIndex; # apex
+    ApexCodec_SettingResults_release; # apex
+    ApexCodec_SupportedValues_getTypeAndValues; # apex
+    ApexCodec_SupportedValues_release; # apex
+    ApexCodec_Traits_get; # apex
+
+  local:
+    *;
+};
\ No newline at end of file
diff --git a/media/module/libapexcodecs/tests/Android.bp b/media/module/libapexcodecs/tests/Android.bp
new file mode 100644
index 0000000..162d12c
--- /dev/null
+++ b/media/module/libapexcodecs/tests/Android.bp
@@ -0,0 +1,30 @@
+//
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+package {
+    default_team: "trendy_team_android_media_codec_framework",
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_test {
+    name: "libapexcodecs_tests",
+    shared_libs: [
+        "libapexcodecs-testing",
+        "libcodec2",
+    ],
+
+    srcs: ["ApexCodecsTest.cpp"],
+}
diff --git a/media/module/libapexcodecs/tests/ApexCodecsTest.cpp b/media/module/libapexcodecs/tests/ApexCodecsTest.cpp
new file mode 100644
index 0000000..cd5ebba
--- /dev/null
+++ b/media/module/libapexcodecs/tests/ApexCodecsTest.cpp
@@ -0,0 +1,100 @@
+#include <C2.h>
+#include <C2Component.h>
+
+#include <apex/ApexCodecs.h>
+
+// static_asserts for enum values match
+static_assert((uint32_t)APEXCODEC_STATUS_OK        == (uint32_t)C2_OK);
+static_assert((uint32_t)APEXCODEC_STATUS_BAD_VALUE == (uint32_t)C2_BAD_VALUE);
+static_assert((uint32_t)APEXCODEC_STATUS_BAD_INDEX == (uint32_t)C2_BAD_INDEX);
+static_assert((uint32_t)APEXCODEC_STATUS_CANNOT_DO == (uint32_t)C2_CANNOT_DO);
+static_assert((uint32_t)APEXCODEC_STATUS_DUPLICATE == (uint32_t)C2_DUPLICATE);
+static_assert((uint32_t)APEXCODEC_STATUS_NOT_FOUND == (uint32_t)C2_NOT_FOUND);
+static_assert((uint32_t)APEXCODEC_STATUS_BAD_STATE == (uint32_t)C2_BAD_STATE);
+static_assert((uint32_t)APEXCODEC_STATUS_BLOCKING  == (uint32_t)C2_BLOCKING);
+static_assert((uint32_t)APEXCODEC_STATUS_CANCELED  == (uint32_t)C2_CANCELED);
+static_assert((uint32_t)APEXCODEC_STATUS_NO_MEMORY == (uint32_t)C2_NO_MEMORY);
+static_assert((uint32_t)APEXCODEC_STATUS_REFUSED   == (uint32_t)C2_REFUSED);
+static_assert((uint32_t)APEXCODEC_STATUS_TIMED_OUT == (uint32_t)C2_TIMED_OUT);
+static_assert((uint32_t)APEXCODEC_STATUS_OMITTED   == (uint32_t)C2_OMITTED);
+static_assert((uint32_t)APEXCODEC_STATUS_CORRUPTED == (uint32_t)C2_CORRUPTED);
+static_assert((uint32_t)APEXCODEC_STATUS_NO_INIT   == (uint32_t)C2_NO_INIT);
+
+static_assert((uint32_t)APEXCODEC_KIND_OTHER   == (uint32_t)C2Component::KIND_OTHER);
+static_assert((uint32_t)APEXCODEC_KIND_DECODER == (uint32_t)C2Component::KIND_DECODER);
+static_assert((uint32_t)APEXCODEC_KIND_ENCODER == (uint32_t)C2Component::KIND_ENCODER);
+
+static_assert((uint32_t)APEXCODEC_DOMAIN_OTHER == (uint32_t)C2Component::DOMAIN_OTHER);
+static_assert((uint32_t)APEXCODEC_DOMAIN_VIDEO == (uint32_t)C2Component::DOMAIN_VIDEO);
+static_assert((uint32_t)APEXCODEC_DOMAIN_AUDIO == (uint32_t)C2Component::DOMAIN_AUDIO);
+static_assert((uint32_t)APEXCODEC_DOMAIN_IMAGE == (uint32_t)C2Component::DOMAIN_IMAGE);
+
+static_assert((uint32_t)APEXCODEC_FLAG_DROP_FRAME    == (uint32_t)C2FrameData::FLAG_DROP_FRAME);
+static_assert((uint32_t)APEXCODEC_FLAG_END_OF_STREAM == (uint32_t)C2FrameData::FLAG_END_OF_STREAM);
+static_assert((uint32_t)APEXCODEC_FLAG_DISCARD_FRAME == (uint32_t)C2FrameData::FLAG_DISCARD_FRAME);
+static_assert((uint32_t)APEXCODEC_FLAG_INCOMPLETE    == (uint32_t)C2FrameData::FLAG_INCOMPLETE);
+static_assert((uint32_t)APEXCODEC_FLAG_CORRECTED     == (uint32_t)C2FrameData::FLAG_CORRECTED);
+static_assert((uint32_t)APEXCODEC_FLAG_CORRUPT       == (uint32_t)C2FrameData::FLAG_CORRUPT);
+static_assert((uint32_t)APEXCODEC_FLAG_CODEC_CONFIG  == (uint32_t)C2FrameData::FLAG_CODEC_CONFIG);
+
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_INVALID        ==
+              (uint32_t)C2BufferData::INVALID);
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_LINEAR         ==
+              (uint32_t)C2BufferData::LINEAR);
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_LINEAR_CHUNKS  ==
+              (uint32_t)C2BufferData::LINEAR_CHUNKS);
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_GRAPHIC        ==
+              (uint32_t)C2BufferData::GRAPHIC);
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_GRAPHIC_CHUNKS ==
+              (uint32_t)C2BufferData::GRAPHIC_CHUNKS);
+
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_QUERY_CURRENT  ==
+              (uint32_t)C2FieldSupportedValuesQuery::CURRENT);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_QUERY_POSSIBLE ==
+              (uint32_t)C2FieldSupportedValuesQuery::POSSIBLE);
+
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_EMPTY  ==
+              (uint32_t)C2FieldSupportedValues::EMPTY);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_RANGE  ==
+              (uint32_t)C2FieldSupportedValues::RANGE);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_VALUES ==
+              (uint32_t)C2FieldSupportedValues::VALUES);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_FLAGS  ==
+              (uint32_t)C2FieldSupportedValues::FLAGS);
+
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_NONE   == (uint32_t)C2Value::NO_INIT);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_INT32  == (uint32_t)C2Value::INT32);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_UINT32 == (uint32_t)C2Value::UINT32);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_INT64  == (uint32_t)C2Value::INT64);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_UINT64 == (uint32_t)C2Value::UINT64);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_FLOAT  == (uint32_t)C2Value::FLOAT);
+
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_BAD_TYPE       ==
+              (uint32_t)C2SettingResult::BAD_TYPE);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_BAD_PORT       ==
+              (uint32_t)C2SettingResult::BAD_PORT);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_BAD_INDEX      ==
+              (uint32_t)C2SettingResult::BAD_INDEX);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_READ_ONLY      ==
+              (uint32_t)C2SettingResult::READ_ONLY);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_MISMATCH       ==
+              (uint32_t)C2SettingResult::MISMATCH);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_BAD_VALUE      ==
+              (uint32_t)C2SettingResult::BAD_VALUE);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_CONFLICT       ==
+              (uint32_t)C2SettingResult::CONFLICT);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_UNSUPPORTED    ==
+              (uint32_t)C2SettingResult::UNSUPPORTED);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_INFO_BAD_VALUE ==
+              (uint32_t)C2SettingResult::INFO_BAD_VALUE);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_INFO_CONFLICT  ==
+              (uint32_t)C2SettingResult::INFO_CONFLICT);
+
+static_assert((uint32_t)APEXCODEC_PARAM_IS_REQUIRED   == (uint32_t)C2ParamDescriptor::IS_REQUIRED);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_PERSISTENT ==
+              (uint32_t)C2ParamDescriptor::IS_PERSISTENT);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_STRICT     == (uint32_t)C2ParamDescriptor::IS_STRICT);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_READ_ONLY  == (uint32_t)C2ParamDescriptor::IS_READ_ONLY);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_HIDDEN     == (uint32_t)C2ParamDescriptor::IS_HIDDEN);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_INTERNAL   == (uint32_t)C2ParamDescriptor::IS_INTERNAL);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_CONST      == (uint32_t)C2ParamDescriptor::IS_CONST);
\ No newline at end of file
diff --git a/media/mtp/OWNERS b/media/mtp/OWNERS
index 6b5336e..bdb6cdb 100644
--- a/media/mtp/OWNERS
+++ b/media/mtp/OWNERS
@@ -1,10 +1,9 @@
 set noparent
 
-aprasath@google.com
 anothermark@google.com
-kumarashishg@google.com
-sarup@google.com
+febinthattil@google.com
+aprasath@google.com
 jsharkey@android.com
 jameswei@google.com
 rmojumder@google.com
-
+kumarashishg@google.com
\ No newline at end of file
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index 3de9968..83cd024 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -73,7 +73,7 @@
     bool isDirect() const final
                                 { return (mFlags & AUDIO_INPUT_FLAG_DIRECT) != 0; }
 
-    void setSilenced(bool silenced) final { if (!isPatchTrack()) mSilenced = silenced; }
+    void setSilenced(bool silenced) final;
     bool isSilenced() const final { return mSilenced; }
 
     status_t getActiveMicrophones(
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 147a5d6..b7c0bb3 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -7731,6 +7731,7 @@
 
 ssize_t DuplicatingThread::threadLoop_write()
 {
+    ATRACE_BEGIN("write");
     for (size_t i = 0; i < outputTracks.size(); i++) {
         const ssize_t actualWritten = outputTracks[i]->write(mSinkBuffer, writeFrames);
 
@@ -7749,6 +7750,7 @@
 
         // TODO: Report correction for the other output tracks and show in the dump.
     }
+    ATRACE_END();
     if (mStandby) {
         mThreadMetrics.logBeginInterval();
         mThreadSnapshot.onBegin();
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index f5f11cc..3d4e771 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -3136,6 +3136,14 @@
     *backInserter++ = metadata;
 }
 
+void RecordTrack::setSilenced(bool silenced) {
+    if (!isPatchTrack() && mSilenced != silenced) {
+        mSilenced = silenced;
+        ALOGD("%s: track with port id: %d, (%s)", __func__, mPortId,
+              mSilenced ? "silenced" : "unsilenced");
+    }
+}
+
 // ----------------------------------------------------------------------------
 #undef LOG_TAG
 #define LOG_TAG "AF::PatchRecord"
diff --git a/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp b/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp
index a1b4470..b72e517 100644
--- a/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp
+++ b/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp
@@ -122,18 +122,24 @@
 }
 
 ConversionResult<std::string> aidl2legacy_AudioHalCapCriterionV2Type_CapCriterionValue(
-        const AudioHalCapCriterionV2::Type& aidl) {
+        const AudioHalCapCriterionV2& aidl) {
     switch (aidl.getTag()) {
-        case AudioHalCapCriterionV2::Type::availableDevicesType:
+        case AudioHalCapCriterionV2::availableInputDevices:
             return aidl2legacy_AudioHalCapCriterionV2TypeDevice_CapCriterionValue(
-                    aidl.get<AudioHalCapCriterionV2::Type::availableDevicesType>());
-        case AudioHalCapCriterionV2::Type::availableDevicesAddressesType:
-            return aidl.get<AudioHalCapCriterionV2::Type::availableDevicesAddressesType>().template
-                    get<AudioDeviceAddress::id>();
-        case AudioHalCapCriterionV2::Type::telephonyModeType:
-            return toString(aidl.get<AudioHalCapCriterionV2::Type::telephonyModeType>());
-        case AudioHalCapCriterionV2::Type::forcedConfigType:
-            return toString(aidl.get<AudioHalCapCriterionV2::Type::forcedConfigType>());
+                    aidl.get<AudioHalCapCriterionV2::availableInputDevices>().values[0]);
+        case AudioHalCapCriterionV2::availableOutputDevices:
+            return aidl2legacy_AudioHalCapCriterionV2TypeDevice_CapCriterionValue(
+                    aidl.get<AudioHalCapCriterionV2::availableOutputDevices>().values[0]);
+        case AudioHalCapCriterionV2::availableInputDevicesAddresses:
+            return aidl.get<AudioHalCapCriterionV2::availableInputDevicesAddresses>().values[0].
+                    template get<AudioDeviceAddress::id>();
+        case AudioHalCapCriterionV2::availableOutputDevicesAddresses:
+            return aidl.get<AudioHalCapCriterionV2::availableOutputDevicesAddresses>().values[0].
+                    template get<AudioDeviceAddress::id>();
+        case AudioHalCapCriterionV2::telephonyMode:
+            return toString(aidl.get<AudioHalCapCriterionV2::telephonyMode>().values[0]);
+        case AudioHalCapCriterionV2::forceConfigForUse:
+            return toString(aidl.get<AudioHalCapCriterionV2::forceConfigForUse>().values[0]);
         default:
             return unexpected(BAD_VALUE);
     }
@@ -168,11 +174,11 @@
         }
         isFirstCriterionRule = false;
         std::string selectionCriterion = VALUE_OR_RETURN(
-                aidl2legacy_AudioHalCapCriterionV2_CapName(criterionRule.criterion));
+                aidl2legacy_AudioHalCapCriterionV2_CapName(criterionRule.criterionAndValue));
         std::string matchesWhen;
         std::string value = VALUE_OR_RETURN(
                 aidl2legacy_AudioHalCapCriterionV2Type_CapCriterionValue(
-                        criterionRule.criterionTypeValue));
+                        criterionRule.criterionAndValue));
 
         switch (criterionRule.matchingRule) {
             case AudioHalCapRule::MatchingRule::IS:
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index 5a5966b..c6b58fb 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -216,8 +216,8 @@
   std::mutex mLock;
   std::deque<std::unique_ptr<ProcessCaptureRequestTask>> mQueue GUARDED_BY(mLock);
   std::condition_variable mCondVar;
-  volatile bool mTextureUpdateRequested GUARDED_BY(mLock);
-  volatile bool mPendingExit GUARDED_BY(mLock);
+  volatile bool GUARDED_BY(mLock) mTextureUpdateRequested = false;
+  volatile bool GUARDED_BY(mLock) mPendingExit = false;
 
   // Acquisition timestamp of last frame.
   std::atomic<uint64_t> mLastAcquisitionTimestampNanoseconds;