Merge "Refactored libaaudio_fuzzer" into main
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index 664647a..4b189b4 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -42,6 +42,10 @@
         "libnativewindow_headers",
     ],
 
+    static_libs: [
+        "libyuv_static", // for conversion routines
+    ],
+
     shared_libs: [
         "libcutils", // for properties
         "liblog", // for ALOG
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 55a1164..06a21f6 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -21,8 +21,10 @@
 #include <android/hardware_buffer.h>
 #include <cutils/properties.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
 
 #include <inttypes.h>
+#include <libyuv.h>
 
 #include <C2Config.h>
 #include <C2Debug.h>
@@ -32,6 +34,15 @@
 #include <SimpleC2Component.h>
 
 namespace android {
+
+// libyuv version required for I410ToAB30Matrix and I210ToAB30Matrix.
+#if LIBYUV_VERSION >= 1780
+#include <algorithm>
+#define HAVE_LIBYUV_I410_I210_TO_AB30 1
+#else
+#define HAVE_LIBYUV_I410_I210_TO_AB30 0
+#endif
+
 constexpr uint8_t kNeutralUVBitDepth8 = 128;
 constexpr uint16_t kNeutralUVBitDepth10 = 512;
 
@@ -506,6 +517,120 @@
     }
 }
 
+void convertPlanar16ToY410OrRGBA1010102(uint8_t* dst, const uint16_t* srcY, const uint16_t* srcU,
+                                        const uint16_t* srcV, size_t srcYStride, size_t srcUStride,
+                                        size_t srcVStride, size_t dstStride, size_t width,
+                                        size_t height,
+                                        std::shared_ptr<const C2ColorAspectsStruct> aspects,
+                                        CONV_FORMAT_T format) {
+    bool processed = false;
+#if HAVE_LIBYUV_I410_I210_TO_AB30
+    if (format == CONV_FORMAT_I444) {
+        libyuv::I410ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dst,
+                                 dstStride, &libyuv::kYuvV2020Constants, width, height);
+        processed = true;
+    } else if (format == CONV_FORMAT_I422) {
+        libyuv::I210ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dst,
+                                 dstStride, &libyuv::kYuvV2020Constants, width, height);
+        processed = true;
+    }
+#endif  // HAVE_LIBYUV_I410_I210_TO_AB30
+    if (!processed) {
+        convertYUV420Planar16ToY410OrRGBA1010102(
+                (uint32_t*)dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                dstStride / sizeof(uint32_t), width, height,
+                std::static_pointer_cast<const C2ColorAspectsStruct>(aspects));
+    }
+}
+
+void convertPlanar16ToP010(uint16_t* dstY, uint16_t* dstUV, const uint16_t* srcY,
+                           const uint16_t* srcU, const uint16_t* srcV, size_t srcYStride,
+                           size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                           size_t dstUStride, size_t dstVStride, size_t width, size_t height,
+                           bool isMonochrome, CONV_FORMAT_T format, uint16_t* tmpFrameBuffer,
+                           size_t tmpFrameBufferSize) {
+#if LIBYUV_VERSION >= 1779
+    if ((format == CONV_FORMAT_I444) || (format == CONV_FORMAT_I422)) {
+        // TODO(https://crbug.com/libyuv/952): replace this block with libyuv::I410ToP010
+        // and libyuv::I210ToP010 when they are available. Note it may be safe to alias dstY
+        // in I010ToP010, but the libyuv API doesn't make any guarantees.
+        const size_t tmpSize = dstYStride * height + dstUStride * align(height, 2);
+        CHECK(tmpSize <= tmpFrameBufferSize);
+
+        uint16_t* const tmpY = tmpFrameBuffer;
+        uint16_t* const tmpU = tmpY + dstYStride * height;
+        uint16_t* const tmpV = tmpU + dstUStride * align(height, 2) / 2;
+        if (format == CONV_FORMAT_I444) {
+            libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
+                               dstYStride, tmpU, dstUStride, tmpV, dstUStride, width, height);
+        } else {
+            libyuv::I210ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
+                               dstYStride, tmpU, dstUStride, tmpV, dstUStride, width, height);
+        }
+        libyuv::I010ToP010(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride, dstY, dstYStride,
+                           dstUV, dstUStride, width, height);
+    } else {
+        convertYUV420Planar16ToP010(dstY, dstUV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                    srcVStride, dstYStride, dstUStride, width, height,
+                                    isMonochrome);
+    }
+#else   // LIBYUV_VERSION < 1779
+    convertYUV420Planar16ToP010(dstY, dstUV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                                dstYStride, dstUStride, width, height, isMonochrome);
+#endif  // LIBYUV_VERSION >= 1779
+}
+
+void convertPlanar16ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV, const uint16_t* srcY,
+                           const uint16_t* srcU, const uint16_t* srcV, size_t srcYStride,
+                           size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                           size_t dstUStride, size_t dstVStride, size_t width, size_t height,
+                           bool isMonochrome, CONV_FORMAT_T format, uint16_t* tmpFrameBuffer,
+                           size_t tmpFrameBufferSize) {
+#if LIBYUV_VERSION >= 1779
+    if (format == CONV_FORMAT_I444) {
+        // TODO(https://crbug.com/libyuv/950): replace this block with libyuv::I410ToI420
+        // when it's available.
+        const size_t tmpSize = dstYStride * height + dstUStride * align(height, 2);
+        CHECK(tmpSize <= tmpFrameBufferSize);
+
+        uint16_t* const tmpY = tmpFrameBuffer;
+        uint16_t* const tmpU = tmpY + dstYStride * height;
+        uint16_t* const tmpV = tmpU + dstUStride * align(height, 2) / 2;
+        libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY, dstYStride,
+                           tmpU, dstUStride, tmpV, dstVStride, width, height);
+        libyuv::I010ToI420(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride, dstY, dstYStride,
+                           dstU, dstUStride, dstV, dstVStride, width, height);
+    } else if (format == CONV_FORMAT_I422) {
+        libyuv::I210ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY, dstYStride,
+                           dstU, dstUStride, dstV, dstVStride, width, height);
+    } else {
+        convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                    srcVStride, dstYStride, dstUStride, width, height,
+                                    isMonochrome);
+    }
+#else   // LIBYUV_VERSION < 1779
+    convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                srcVStride, dstYStride, dstUStride, width, height, isMonochrome);
+#endif  // LIBYUV_VERSION >= 1779
+}
+
+void convertPlanar8ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV, const uint8_t* srcY,
+                          const uint8_t* srcU, const uint8_t* srcV, size_t srcYStride,
+                          size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                          size_t dstUStride, size_t dstVStride, uint32_t width, uint32_t height,
+                          bool isMonochrome, CONV_FORMAT_T format) {
+    if (format == CONV_FORMAT_I444) {
+        libyuv::I444ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY, dstYStride,
+                           dstU, dstUStride, dstV, dstVStride, width, height);
+    } else if (format == CONV_FORMAT_I422) {
+        libyuv::I422ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY, dstYStride,
+                           dstU, dstUStride, dstV, dstVStride, width, height);
+    } else {
+        convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                   srcVStride, dstYStride, dstUStride, dstVStride, width, height,
+                                   isMonochrome);
+    }
+}
 std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
     std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
     mQueue.pop_front();
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index bc27474..b28c47e 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -31,6 +31,12 @@
 
 namespace android {
 
+typedef enum {
+    CONV_FORMAT_I420,
+    CONV_FORMAT_I422,
+    CONV_FORMAT_I444,
+} CONV_FORMAT_T;
+
 void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
                                 const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
@@ -66,6 +72,30 @@
                                         const uint32_t* srcRGBA, size_t srcRGBStride, size_t width,
                                         size_t height, C2Color::matrix_t colorMatrix,
                                         C2Color::range_t colorRange);
+void convertPlanar16ToY410OrRGBA1010102(uint8_t* dst, const uint16_t* srcY, const uint16_t* srcU,
+                                        const uint16_t* srcV, size_t srcYStride, size_t srcUStride,
+                                        size_t srcVStride, size_t dstStride, size_t width,
+                                        size_t height,
+                                        std::shared_ptr<const C2ColorAspectsStruct> aspects,
+                                        CONV_FORMAT_T format);
+
+void convertPlanar16ToP010(uint16_t* dstY, uint16_t* dstUV, const uint16_t* srcY,
+                           const uint16_t* srcU, const uint16_t* srcV, size_t srcYStride,
+                           size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                           size_t dstUStride, size_t dstVStride, size_t width, size_t height,
+                           bool isMonochrome, CONV_FORMAT_T format, uint16_t* tmpFrameBuffer,
+                           size_t tmpFrameBufferSize);
+void convertPlanar16ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV, const uint16_t* srcY,
+                           const uint16_t* srcU, const uint16_t* srcV, size_t srcYStride,
+                           size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                           size_t dstUStride, size_t dstVStride, size_t width, size_t height,
+                           bool isMonochrome, CONV_FORMAT_T format, uint16_t* tmpFrameBuffer,
+                           size_t tmpFrameBufferSize);
+void convertPlanar8ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV, const uint8_t* srcY,
+                          const uint8_t* srcU, const uint8_t* srcV, size_t srcYStride,
+                          size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                          size_t dstUStride, size_t dstVStride, uint32_t width, uint32_t height,
+                          bool isMonochrome, CONV_FORMAT_T format);
 
 class SimpleC2Component
         : public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
diff --git a/media/codec2/components/dav1d/Android.bp b/media/codec2/components/dav1d/Android.bp
new file mode 100644
index 0000000..d549ccb
--- /dev/null
+++ b/media/codec2/components/dav1d/Android.bp
@@ -0,0 +1,28 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
+    name: "libcodec2_soft_av1dec_dav1d",
+
+    defaults: [
+        "libcodec2_soft-defaults",
+        "libcodec2_soft_sanitize_all-defaults",
+        "libcodec2_soft_sanitize_cfi-defaults",
+    ],
+
+    cflags: [
+        "-DCODECNAME=\"c2.android.av1-dav1d.decoder\"",
+        "-Wno-unused-variable",
+    ],
+
+    srcs: ["C2SoftDav1dDec.cpp", "C2SoftDav1dDump.cpp"],
+    static_libs: [
+        "libdav1d",
+    ],
+}
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
new file mode 100644
index 0000000..3f96cb3
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -0,0 +1,1235 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftDav1dDec"
+#include <android-base/properties.h>
+#include <cutils/properties.h>
+#include <thread>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
+#include <SimpleC2Interface.h>
+#include <log/log.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+#include "C2SoftDav1dDec.h"
+
+namespace android {
+
+// The number of threads used for the dav1d decoder.
+static const int NUM_THREADS_DAV1D_DEFAULT = 0;
+static const char NUM_THREADS_DAV1D_PROPERTY[] = "debug.dav1d.numthreads";
+
+// codecname set and passed in as a compile flag from Android.bp
+constexpr char COMPONENT_NAME[] = CODECNAME;
+
+constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
+
+class C2SoftDav1dDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+  public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+        : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_DECODER,
+                                            C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
+
+        addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                             .withConstValue(new C2ComponentAttributesSetting(
+                                     C2Component::ATTRIB_IS_TEMPORAL))
+                             .build());
+
+        addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+                             .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 4096),
+                                     C2F(mSize, height).inRange(2, 4096),
+                             })
+                             .withSetter(SizeSetter)
+                             .build());
+
+        addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                             .withDefault(new C2StreamProfileLevelInfo::input(
+                                     0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
+                             .withFields({C2F(mProfileLevel, profile)
+                                                  .oneOf({C2Config::PROFILE_AV1_0,
+                                                          C2Config::PROFILE_AV1_1}),
+                                          C2F(mProfileLevel, level)
+                                                  .oneOf({
+                                                          C2Config::LEVEL_AV1_2,
+                                                          C2Config::LEVEL_AV1_2_1,
+                                                          C2Config::LEVEL_AV1_2_2,
+                                                          C2Config::LEVEL_AV1_2_3,
+                                                          C2Config::LEVEL_AV1_3,
+                                                          C2Config::LEVEL_AV1_3_1,
+                                                          C2Config::LEVEL_AV1_3_2,
+                                                          C2Config::LEVEL_AV1_3_3,
+                                                          C2Config::LEVEL_AV1_4,
+                                                          C2Config::LEVEL_AV1_4_1,
+                                                          C2Config::LEVEL_AV1_4_2,
+                                                          C2Config::LEVEL_AV1_4_3,
+                                                          C2Config::LEVEL_AV1_5,
+                                                          C2Config::LEVEL_AV1_5_1,
+                                                          C2Config::LEVEL_AV1_5_2,
+                                                          C2Config::LEVEL_AV1_5_3,
+                                                  })})
+                             .withSetter(ProfileLevelSetter, mSize)
+                             .build());
+
+        mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
+        addParameter(DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
+                             .withDefault(mHdr10PlusInfoInput)
+                             .withFields({
+                                     C2F(mHdr10PlusInfoInput, m.value).any(),
+                             })
+                             .withSetter(Hdr10PlusInfoInputSetter)
+                             .build());
+
+        mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
+        addParameter(DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
+                             .withDefault(mHdr10PlusInfoOutput)
+                             .withFields({
+                                     C2F(mHdr10PlusInfoOutput, m.value).any(),
+                             })
+                             .withSetter(Hdr10PlusInfoOutputSetter)
+                             .build());
+
+        // default static info
+        C2HdrStaticMetadataStruct defaultStaticInfo{};
+        helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
+        addParameter(
+                DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
+                        .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
+                        .withFields({C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
+                                     C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
+                                     C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
+                                     C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)})
+                        .withSetter(HdrStaticInfoSetter)
+                        .build());
+
+        addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+                             .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 2048, 2),
+                                     C2F(mSize, height).inRange(2, 2048, 2),
+                             })
+                             .withSetter(MaxPictureSizeSetter, mSize)
+                             .build());
+
+        addParameter(
+                DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+                        .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
+                        .withFields({
+                                C2F(mMaxInputSize, value).any(),
+                        })
+                        .calculatedAs(MaxInputSizeSetter, mMaxSize)
+                        .build());
+
+        C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
+        std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+                C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+        memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+        defaultColorInfo = C2StreamColorInfo::output::AllocShared(
+                {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+        helper->addStructDescriptors<C2ChromaOffsetStruct>();
+
+        addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+                             .withConstValue(defaultColorInfo)
+                             .build());
+
+        addParameter(DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsTuning::output(
+                                     0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mDefaultColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mDefaultColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mDefaultColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mDefaultColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(DefaultColorAspectsSetter)
+                             .build());
+
+        addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsInfo::input(
+                                     0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mCodedColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mCodedColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mCodedColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mCodedColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(CodedColorAspectsSetter)
+                             .build());
+
+        addParameter(
+                DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+                        .withDefault(new C2StreamColorAspectsInfo::output(
+                                0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                        .withFields(
+                                {C2F(mColorAspects, range)
+                                         .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+                                 C2F(mColorAspects, primaries)
+                                         .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                  C2Color::PRIMARIES_OTHER),
+                                 C2F(mColorAspects, transfer)
+                                         .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                  C2Color::TRANSFER_OTHER),
+                                 C2F(mColorAspects, matrix)
+                                         .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                  C2Color::MATRIX_OTHER)})
+                        .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+                        .build());
+
+        std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+            pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+        }
+        // If color format surface isn't added to supported formats, there is no way to know
+        // when the color-format is configured to surface. This is necessary to be able to
+        // choose 10-bit format while decoding 10-bit clips in surface mode.
+        pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+
+        // TODO: support more formats?
+        addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+                             .withDefault(new C2StreamPixelFormatInfo::output(
+                                     0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+                             .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+                             .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+                             .build());
+    }
+
+    static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
+                          C2P<C2StreamPictureSizeInfo::output>& me) {
+        (void)mayBlock;
+        C2R res = C2R::Ok();
+        if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+            me.set().width = oldMe.v.width;
+        }
+        if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+            me.set().height = oldMe.v.height;
+        }
+        return res;
+    }
+
+    static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output>& me,
+                                    const C2P<C2StreamPictureSizeInfo::output>& size) {
+        (void)mayBlock;
+        // TODO: get max width/height from the size's field helpers vs.
+        // hardcoding
+        me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
+        me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
+        return C2R::Ok();
+    }
+
+    static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& me,
+                                  const C2P<C2StreamMaxPictureSizeTuning::output>& maxSize) {
+        (void)mayBlock;
+        // assume compression ratio of 2, but enforce a floor
+        me.set().value =
+                c2_max((((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072),
+                       kMinInputBufferSize);
+        return C2R::Ok();
+    }
+
+    static C2R DefaultColorAspectsSetter(bool mayBlock,
+                                         C2P<C2StreamColorAspectsTuning::output>& me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+                                  const C2P<C2StreamColorAspectsTuning::output>& def,
+                                  const C2P<C2StreamColorAspectsInfo::input>& coded) {
+        (void)mayBlock;
+        // take default values for all unspecified fields, and coded values for specified ones
+        me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+        me.set().primaries =
+                coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
+        me.set().transfer =
+                coded.v.transfer == TRANSFER_UNSPECIFIED ? def.v.transfer : coded.v.transfer;
+        me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+        return C2R::Ok();
+    }
+
+    static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input>& me,
+                                  const C2P<C2StreamPictureSizeInfo::output>& size) {
+        (void)mayBlock;
+        (void)size;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
+        return mDefaultColorAspects;
+    }
+
+    std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() { return mColorAspects; }
+
+    static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input>& me) {
+        (void)mayBlock;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    static C2R Hdr10PlusInfoOutputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::output>& me) {
+        (void)mayBlock;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    // unsafe getters
+    std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
+        return mPixelFormat;
+    }
+
+    static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
+        (void)mayBlock;
+        if (me.v.mastering.red.x > 1) {
+            me.set().mastering.red.x = 1;
+        }
+        if (me.v.mastering.red.y > 1) {
+            me.set().mastering.red.y = 1;
+        }
+        if (me.v.mastering.green.x > 1) {
+            me.set().mastering.green.x = 1;
+        }
+        if (me.v.mastering.green.y > 1) {
+            me.set().mastering.green.y = 1;
+        }
+        if (me.v.mastering.blue.x > 1) {
+            me.set().mastering.blue.x = 1;
+        }
+        if (me.v.mastering.blue.y > 1) {
+            me.set().mastering.blue.y = 1;
+        }
+        if (me.v.mastering.white.x > 1) {
+            me.set().mastering.white.x = 1;
+        }
+        if (me.v.mastering.white.y > 1) {
+            me.set().mastering.white.y = 1;
+        }
+        if (me.v.mastering.maxLuminance > 65535.0) {
+            me.set().mastering.maxLuminance = 65535.0;
+        }
+        if (me.v.mastering.minLuminance > 6.5535) {
+            me.set().mastering.minLuminance = 6.5535;
+        }
+        if (me.v.maxCll > 65535.0) {
+            me.set().maxCll = 65535.0;
+        }
+        if (me.v.maxFall > 65535.0) {
+            me.set().maxFall = 65535.0;
+        }
+        return C2R::Ok();
+    }
+
+  private:
+    std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+    std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+    std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+    std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+    std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+    std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+    std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
+    std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
+    std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
+};
+
+C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
+                               const std::shared_ptr<IntfImpl>& intfImpl)
+    : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+      mIntf(intfImpl) {
+    mTimeStart = mTimeEnd = systemTime();
+}
+
+C2SoftDav1dDec::~C2SoftDav1dDec() {
+    onRelease();
+}
+
+c2_status_t C2SoftDav1dDec::onInit() {
+    return initDecoder() ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftDav1dDec::onStop() {
+    // TODO: b/277797541 - investigate if the decoder needs to be flushed.
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+    return C2_OK;
+}
+
+void C2SoftDav1dDec::onReset() {
+    (void)onStop();
+    c2_status_t err = onFlush_sm();
+    if (err != C2_OK) {
+        ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
+        destroyDecoder();
+        if (!initDecoder()) {
+            ALOGE("Hard reset failed.");
+        }
+    }
+}
+
+void C2SoftDav1dDec::flushDav1d() {
+    if (mDav1dCtx) {
+        Dav1dPicture p;
+
+        while (mDecodedPictures.size() > 0) {
+            p = mDecodedPictures.front();
+            mDecodedPictures.pop_front();
+
+            dav1d_picture_unref(&p);
+        }
+
+        int res = 0;
+        while (true) {
+            memset(&p, 0, sizeof(p));
+
+            if ((res = dav1d_get_picture(mDav1dCtx, &p)) < 0) {
+                if (res != DAV1D_ERR(EAGAIN)) {
+                    ALOGE("Error decoding frame: %s\n", strerror(DAV1D_ERR(res)));
+                    break;
+                } else {
+                    res = 0;
+                    break;
+                }
+            } else {
+                dav1d_picture_unref(&p);
+            }
+        }
+
+        dav1d_flush(mDav1dCtx);
+    }
+}
+
+void C2SoftDav1dDec::onRelease() {
+    destroyDecoder();
+}
+
+c2_status_t C2SoftDav1dDec::onFlush_sm() {
+    flushDav1d();
+
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+
+    return C2_OK;
+}
+
+static int GetCPUCoreCount() {
+    int cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+    cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+    // _SC_NPROC_ONLN must be defined...
+    cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+    CHECK(cpuCoreCount >= 1);
+    ALOGV("Number of CPU cores: %d", cpuCoreCount);
+    return cpuCoreCount;
+}
+
+bool C2SoftDav1dDec::initDecoder() {
+#ifdef FILE_DUMP_ENABLE
+    mC2SoftDav1dDump.initDumping();
+#endif
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+    mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        mPixelFormatInfo = mIntf->getPixelFormat_l();
+    }
+
+    const char* version = dav1d_version();
+
+    Dav1dSettings lib_settings;
+    dav1d_default_settings(&lib_settings);
+    int cpu_count = GetCPUCoreCount();
+    lib_settings.n_threads = std::max(cpu_count / 2, 1);  // use up to half the cores by default.
+
+    int32_t numThreads =
+            android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
+    if (numThreads > 0) lib_settings.n_threads = numThreads;
+
+    int res = 0;
+    if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
+        ALOGE("dav1d_open failed. status: %d.", res);
+        return false;
+    } else {
+        ALOGD("dav1d_open succeeded(n_threads=%d,version=%s).", lib_settings.n_threads, version);
+    }
+
+    return true;
+}
+
+void C2SoftDav1dDec::destroyDecoder() {
+    if (mDav1dCtx) {
+        Dav1dPicture p;
+        while (mDecodedPictures.size() > 0) {
+            memset(&p, 0, sizeof(p));
+            p = mDecodedPictures.front();
+            mDecodedPictures.pop_front();
+
+            dav1d_picture_unref(&p);
+        }
+
+        dav1d_close(&mDav1dCtx);
+        mDav1dCtx = nullptr;
+        mOutputBufferIndex = 0;
+        mInputBufferIndex = 0;
+    }
+#ifdef FILE_DUMP_ENABLE
+    mC2SoftDav1dDump.destroyDumping();
+#endif
+}
+
+void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
+    uint32_t flags = 0;
+    if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+        flags |= C2FrameData::FLAG_END_OF_STREAM;
+        ALOGV("signalling end_of_stream.");
+    }
+    work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+    work->worklets.front()->output.buffers.clear();
+    work->worklets.front()->output.ordinal = work->input.ordinal;
+    work->workletsProcessed = 1u;
+}
+
+void C2SoftDav1dDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                                const std::shared_ptr<C2GraphicBlock>& block) {
+    std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        buffer->setInfo(mIntf->getColorAspects_l());
+    }
+    auto fillWork = [buffer, index](const std::unique_ptr<C2Work>& work) {
+        uint32_t flags = 0;
+        if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
+            (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
+            flags |= C2FrameData::FLAG_END_OF_STREAM;
+            ALOGV("signalling end_of_stream.");
+        }
+        work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.buffers.push_back(buffer);
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->workletsProcessed = 1u;
+    };
+    if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+        fillWork(work);
+    } else {
+        finish(index, fillWork);
+    }
+}
+
+static void freeCallback(const uint8_t */*data*/, void */*cookie*/) {
+    return;
+}
+
+void C2SoftDav1dDec::process(const std::unique_ptr<C2Work>& work,
+                             const std::shared_ptr<C2BlockPool>& pool) {
+    work->result = C2_OK;
+    work->workletsProcessed = 0u;
+    work->worklets.front()->output.configUpdate.clear();
+    work->worklets.front()->output.flags = work->input.flags;
+    if (mSignalledError || mSignalledOutputEos) {
+        work->result = C2_BAD_VALUE;
+        return;
+    }
+
+    size_t inOffset = 0u;
+    size_t inSize = 0u;
+    C2ReadView rView = mDummyReadView;
+    if (!work->input.buffers.empty()) {
+        rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+        inSize = rView.capacity();
+        if (inSize && rView.error()) {
+            ALOGE("read view map failed %d", rView.error());
+            work->result = C2_CORRUPTED;
+            return;
+        }
+    }
+
+    bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
+    bool end_of_stream = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+
+    if (codecConfig) {
+        fillEmptyWork(work);
+        return;
+    }
+
+    int64_t in_frameIndex = work->input.ordinal.frameIndex.peekll();
+    if (inSize) {
+        mInputBufferIndex = in_frameIndex;
+
+        uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
+
+        mTimeStart = systemTime();
+        nsecs_t delay = mTimeStart - mTimeEnd;
+
+        // Send the bitstream data (inputBuffer) to dav1d.
+        if (mDav1dCtx) {
+            int i_ret = 0;
+
+            Dav1dSequenceHeader seq;
+            int res = dav1d_parse_sequence_header(&seq, bitstream, inSize);
+            if (res == 0) {
+                ALOGV("dav1d found a sequenceHeader (%dx%d) for in_frameIndex=%ld.", seq.max_width,
+                      seq.max_height, (long)in_frameIndex);
+            }
+
+            Dav1dData data;
+
+            res = dav1d_data_wrap(&data, bitstream, inSize, freeCallback, nullptr);
+            if (res != 0) {
+                ALOGE("Decoder wrap error %s!", strerror(DAV1D_ERR(res)));
+                i_ret = -1;
+            } else {
+                data.m.timestamp = in_frameIndex;
+                // ALOGV("inSize=%ld, in_frameIndex=%ld, timestamp=%ld",
+                //       inSize, frameIndex, data.m.timestamp);
+
+
+                // Dump the bitstream data (inputBuffer) if dumping is enabled.
+#ifdef FILE_DUMP_ENABLE
+                mC2SoftDav1dDump.dumpInput(ptr, new_Size);
+#endif
+
+                bool b_draining = false;
+
+                do {
+                    res = dav1d_send_data(mDav1dCtx, &data);
+                    if (res < 0 && res != DAV1D_ERR(EAGAIN)) {
+                        ALOGE("Decoder feed error %s!", strerror(DAV1D_ERR(res)));
+                        /* bitstream decoding errors (typically DAV1D_ERR(EINVAL), are assumed
+                         * to be recoverable. Other errors returned from this function are
+                         * either unexpected, or considered critical failures.
+                         */
+                        i_ret = res == DAV1D_ERR(EINVAL) ? 0 : -1;
+                        break;
+                    }
+
+                    bool b_output_error = false;
+
+                    do {
+                        Dav1dPicture img;
+                        memset(&img, 0, sizeof(img));
+
+                        res = dav1d_get_picture(mDav1dCtx, &img);
+                        if (res == 0) {
+                            mDecodedPictures.push_back(img);
+
+                            if (!end_of_stream) break;
+                        } else if (res == DAV1D_ERR(EAGAIN)) {
+                            /* the decoder needs more data to be able to output something.
+                             * if there is more data pending, continue the loop below or
+                             * otherwise break */
+                            if (data.sz != 0) res = 0;
+                            break;
+                        } else {
+                            ALOGE("warning! Decoder error %d!", res);
+                            b_output_error = true;
+                            break;
+                        }
+                    } while (res == 0);
+
+                    if (b_output_error) break;
+
+                    /* on drain, we must ignore the 1st EAGAIN */
+                    if (!b_draining && (res == DAV1D_ERR(EAGAIN) || res == 0) &&
+                        (end_of_stream)) {
+                        b_draining = true;
+                        res = 0;
+                    }
+                } while (res == 0 && ((data.sz != 0) || b_draining));
+
+                if (data.sz > 0) {
+                    ALOGE("unexpected data.sz=%zu after dav1d_send_data", data.sz);
+                    dav1d_data_unref(&data);
+                }
+            }
+
+            mTimeEnd = systemTime();
+            nsecs_t decodeTime = mTimeEnd - mTimeStart;
+            // ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
+
+            if (i_ret != 0) {
+                ALOGE("av1 decoder failed to decode frame. status: %d.", i_ret);
+                work->result = C2_CORRUPTED;
+                work->workletsProcessed = 1u;
+                mSignalledError = true;
+                return;
+            }
+        }
+    }
+
+    (void)outputBuffer(pool, work);
+
+    if (end_of_stream) {
+        drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+        mSignalledOutputEos = true;
+    } else if (!inSize) {
+        fillEmptyWork(work);
+    }
+}
+
+void C2SoftDav1dDec::getHDRStaticParams(Dav1dPicture* picture,
+                                        const std::unique_ptr<C2Work>& work) {
+    C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
+    bool infoPresent = false;
+
+    if (picture != nullptr) {
+        if (picture->mastering_display != nullptr) {
+            hdrStaticMetadataInfo.mastering.red.x =
+                    picture->mastering_display->primaries[0][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.red.y =
+                    picture->mastering_display->primaries[0][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.green.x =
+                    picture->mastering_display->primaries[1][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.green.y =
+                    picture->mastering_display->primaries[1][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.blue.x =
+                    picture->mastering_display->primaries[2][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.blue.y =
+                    picture->mastering_display->primaries[2][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.white.x =
+                    picture->mastering_display->white_point[0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.white.y =
+                    picture->mastering_display->white_point[1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.maxLuminance =
+                    picture->mastering_display->max_luminance / 256.0;
+            hdrStaticMetadataInfo.mastering.minLuminance =
+                    picture->mastering_display->min_luminance / 16384.0;
+
+            infoPresent = true;
+        }
+
+        if (picture->content_light != nullptr) {
+            hdrStaticMetadataInfo.maxCll = picture->content_light->max_content_light_level;
+            hdrStaticMetadataInfo.maxFall = picture->content_light->max_frame_average_light_level;
+            infoPresent = true;
+        }
+    }
+
+    // if (infoPresent) {
+    //   ALOGD("received a hdrStaticMetadataInfo (mastering.red=%f,%f mastering.green=%f,%f
+    //   mastering.blue=%f,%f mastering.white=%f,%f mastering.maxLuminance=%f
+    //   mastering.minLuminance=%f maxCll=%f maxFall=%f) at mOutputBufferIndex=%d.",
+    //   hdrStaticMetadataInfo.mastering.red.x,hdrStaticMetadataInfo.mastering.red.y,
+    //   hdrStaticMetadataInfo.mastering.green.x,hdrStaticMetadataInfo.mastering.green.y,
+    //   hdrStaticMetadataInfo.mastering.blue.x,hdrStaticMetadataInfo.mastering.blue.y,
+    //   hdrStaticMetadataInfo.mastering.white.x,hdrStaticMetadataInfo.mastering.white.y,
+    //   hdrStaticMetadataInfo.mastering.maxLuminance,hdrStaticMetadataInfo.mastering.minLuminance,
+    //   hdrStaticMetadataInfo.maxCll,
+    //   hdrStaticMetadataInfo.maxFall,
+    //   mOutputBufferIndex);
+    // }
+
+    // config if static info has changed
+    if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
+        mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
+        work->worklets.front()->output.configUpdate.push_back(
+                C2Param::Copy(mHdrStaticMetadataInfo));
+    }
+}
+
+void C2SoftDav1dDec::getHDR10PlusInfoData(Dav1dPicture* picture,
+                                          const std::unique_ptr<C2Work>& work) {
+    if (picture != nullptr) {
+        if (picture->itut_t35 != nullptr) {
+            std::vector<uint8_t> payload;
+            size_t payloadSize = picture->itut_t35->payload_size;
+            if (payloadSize > 0) {
+                payload.push_back(picture->itut_t35->country_code);
+                if (picture->itut_t35->country_code == 0xFF) {
+                    payload.push_back(picture->itut_t35->country_code_extension_byte);
+                }
+                payload.insert(payload.end(), picture->itut_t35->payload,
+                               picture->itut_t35->payload + picture->itut_t35->payload_size);
+            }
+
+            std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
+                    C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
+            if (!hdr10PlusInfo) {
+                ALOGE("Hdr10PlusInfo allocation failed");
+                mSignalledError = true;
+                work->result = C2_NO_MEMORY;
+                return;
+            }
+            memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
+
+            // ALOGD("Received a hdr10PlusInfo from picture->itut_t32
+            // (payload_size=%ld,country_code=%d) at mOutputBufferIndex=%d.",
+            // picture->itut_t35->payload_size,
+            // picture->itut_t35->country_code,
+            // mOutputBufferIndex);
+
+            // config if hdr10Plus info has changed
+            if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
+                mHdr10PlusInfo = std::move(hdr10PlusInfo);
+                work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
+            }
+        }
+    }
+}
+
+void C2SoftDav1dDec::getVuiParams(Dav1dPicture* picture) {
+    VuiColorAspects vuiColorAspects;
+
+    if (picture) {
+        vuiColorAspects.primaries = picture->seq_hdr->pri;
+        vuiColorAspects.transfer = picture->seq_hdr->trc;
+        vuiColorAspects.coeffs = picture->seq_hdr->mtrx;
+        vuiColorAspects.fullRange = picture->seq_hdr->color_range;
+
+        // ALOGD("Received a vuiColorAspects from dav1d
+        //       (primaries = % d, transfer = % d, coeffs = % d, fullRange = % d)
+        //               at mOutputBufferIndex = % d,
+        //       out_frameIndex = % ld.",
+        //                          vuiColorAspects.primaries,
+        //       vuiColorAspects.transfer, vuiColorAspects.coeffs, vuiColorAspects.fullRange,
+        //       mOutputBufferIndex, picture->m.timestamp);
+    }
+
+    // convert vui aspects to C2 values if changed
+    if (!(vuiColorAspects == mBitstreamColorAspects)) {
+        mBitstreamColorAspects = vuiColorAspects;
+        ColorAspects sfAspects;
+        C2StreamColorAspectsInfo::input codedAspects = {0u};
+        ColorUtils::convertIsoColorAspectsToCodecAspects(
+                vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+                vuiColorAspects.fullRange, sfAspects);
+        if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+            codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+            codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+            codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+            codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+        }
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+    }
+}
+
+void C2SoftDav1dDec::setError(const std::unique_ptr<C2Work>& work, c2_status_t error) {
+    mSignalledError = true;
+    work->result = error;
+    work->workletsProcessed = 1u;
+}
+
+bool C2SoftDav1dDec::allocTmpFrameBuffer(size_t size) {
+    if (size > mTmpFrameBufferSize) {
+        mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
+        if (mTmpFrameBuffer == nullptr) {
+            mTmpFrameBufferSize = 0;
+            return false;
+        }
+        mTmpFrameBufferSize = size;
+    }
+    return true;
+}
+
+bool C2SoftDav1dDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                                  const std::unique_ptr<C2Work>& work) {
+    if (!(work && pool)) return false;
+    if (mDav1dCtx == nullptr) return false;
+
+    // Get a decoded picture from dav1d if it is enabled.
+    Dav1dPicture img;
+    memset(&img, 0, sizeof(img));
+
+    int res = 0;
+    if (mDecodedPictures.size() > 0) {
+        img = mDecodedPictures.front();
+        mDecodedPictures.pop_front();
+        // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from the deque for
+        // outputBuffer.",img.m.timestamp,img.m.timestamp);
+    } else {
+        res = dav1d_get_picture(mDav1dCtx, &img);
+        if (res == 0) {
+            // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from dav1d for
+            // outputBuffer.",img.m.timestamp,img.m.timestamp);
+        } else {
+            ALOGE("failed to get a picture from dav1d for outputBuffer.");
+        }
+    }
+
+    if (res == DAV1D_ERR(EAGAIN)) {
+        ALOGD("Not enough data to output a picture.");
+        return false;
+    }
+    if (res != 0) {
+        ALOGE("The AV1 decoder failed to get a picture (res=%s).", strerror(DAV1D_ERR(res)));
+        return false;
+    }
+
+    const int width = img.p.w;
+    const int height = img.p.h;
+    if (width != mWidth || height != mHeight) {
+        mWidth = width;
+        mHeight = height;
+
+        C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+        if (err == C2_OK) {
+            work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(size));
+        } else {
+            ALOGE("Config update size failed");
+            mSignalledError = true;
+            work->result = C2_CORRUPTED;
+            work->workletsProcessed = 1u;
+            return false;
+        }
+    }
+
+    getVuiParams(&img);
+    getHDRStaticParams(&img, work);
+    getHDR10PlusInfoData(&img, work);
+
+    // out_frameIndex that the decoded picture returns from dav1d.
+    int64_t out_frameIndex = img.m.timestamp;
+
+    const bool isMonochrome = img.p.layout == DAV1D_PIXEL_LAYOUT_I400;
+
+    int bitdepth = img.p.bpc;
+
+    std::shared_ptr<C2GraphicBlock> block;
+    uint32_t format = HAL_PIXEL_FORMAT_YV12;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
+    if (bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+        IntfImpl::Lock lock = mIntf->lock();
+        codedColorAspects = mIntf->getColorAspects_l();
+        bool allowRGBA1010102 = false;
+        if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
+            codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
+            codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+            allowRGBA1010102 = true;
+        }
+        format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
+    }
+
+    if (mHalPixelFormat != format) {
+        C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = mIntf->config({&pixelFormat}, C2_MAY_BLOCK, &failures);
+        if (err == C2_OK) {
+            work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(pixelFormat));
+        } else {
+            ALOGE("Config update pixelFormat failed");
+            mSignalledError = true;
+            work->workletsProcessed = 1u;
+            work->result = C2_CORRUPTED;
+            return UNKNOWN_ERROR;
+        }
+        mHalPixelFormat = format;
+    }
+
+    C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
+    // We always create a graphic block that is width aligned to 16 and height
+    // aligned to 2. We set the correct "crop" value of the image in the call to
+    // createGraphicBuffer() by setting the correct image dimensions.
+    c2_status_t err =
+            pool->fetchGraphicBlock(align(mWidth, 16), align(mHeight, 2), format, usage, &block);
+
+    if (err != C2_OK) {
+        ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+        work->result = err;
+        return false;
+    }
+
+    C2GraphicView wView = block->map().get();
+
+    if (wView.error()) {
+        ALOGE("graphic view map failed %d", wView.error());
+        work->result = C2_CORRUPTED;
+        return false;
+    }
+
+    // ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
+    //       block->height(), mWidth, mHeight, (int)out_frameIndex);
+
+    mOutputBufferIndex = out_frameIndex;
+
+    uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
+    uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
+
+    C2PlanarLayout layout = wView.layout();
+    size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+    size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+    CONV_FORMAT_T convFormat;
+    switch (img.p.layout) {
+        case DAV1D_PIXEL_LAYOUT_I444:
+            convFormat = CONV_FORMAT_I444;
+            break;
+        case DAV1D_PIXEL_LAYOUT_I422:
+            convFormat = CONV_FORMAT_I422;
+            break;
+        default:
+            convFormat = CONV_FORMAT_I420;
+            break;
+    }
+
+    if (bitdepth == 10) {
+        // TODO: b/277797541 - Investigate if we can ask DAV1D to output the required format during
+        // decompression to avoid color conversion.
+        const uint16_t* srcY = (const uint16_t*)img.data[0];
+        const uint16_t* srcU = (const uint16_t*)img.data[1];
+        const uint16_t* srcV = (const uint16_t*)img.data[2];
+        size_t srcYStride = img.stride[0] / 2;
+        size_t srcUStride = img.stride[1] / 2;
+        size_t srcVStride = img.stride[1] / 2;
+
+        if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
+            if (isMonochrome) {
+                const size_t tmpSize = mWidth;
+                const bool needFill = tmpSize > mTmpFrameBufferSize;
+                if (!allocTmpFrameBuffer(tmpSize)) {
+                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+                srcU = srcV = mTmpFrameBuffer.get();
+                srcUStride = srcVStride = 0;
+                if (needFill) {
+                    std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
+                }
+            }
+            convertPlanar16ToY410OrRGBA1010102(
+                    dstY, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                    dstYStride, mWidth, mHeight,
+                    std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects),
+                    convFormat);
+        } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+            dstYStride /= 2;
+            dstUStride /= 2;
+            dstVStride /= 2;
+            size_t tmpSize = 0;
+            if ((img.p.layout == DAV1D_PIXEL_LAYOUT_I444) ||
+                (img.p.layout == DAV1D_PIXEL_LAYOUT_I422)) {
+                tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+                if (!allocTmpFrameBuffer(tmpSize)) {
+                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+            }
+            convertPlanar16ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV, srcYStride,
+                                  srcUStride, srcVStride, dstYStride, dstUStride, dstVStride,
+                                  mWidth, mHeight, isMonochrome, convFormat, mTmpFrameBuffer.get(),
+                                  tmpSize);
+        } else {
+            size_t tmpSize = 0;
+            if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
+                tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+                if (!allocTmpFrameBuffer(tmpSize)) {
+                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+            }
+            convertPlanar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                  srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight,
+                                  isMonochrome, convFormat, mTmpFrameBuffer.get(), tmpSize);
+        }
+
+        // if(mOutputBufferIndex % 100 == 0)
+        ALOGV("output a 10bit picture %dx%d from dav1d "
+              "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
+              mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
+
+        // Dump the output buffer if dumping is enabled (debug only).
+#ifdef FILE_DUMP_ENABLE
+        mC2SoftDav1dDump.dumpOutput<uint16_t>(srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                                              mWidth, mHeight);
+#endif
+    } else {
+        const uint8_t* srcY = (const uint8_t*)img.data[0];
+        const uint8_t* srcU = (const uint8_t*)img.data[1];
+        const uint8_t* srcV = (const uint8_t*)img.data[2];
+
+        size_t srcYStride = img.stride[0];
+        size_t srcUStride = img.stride[1];
+        size_t srcVStride = img.stride[1];
+
+        // if(mOutputBufferIndex % 100 == 0)
+        ALOGV("output a 8bit picture %dx%d from dav1d "
+              "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
+              mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
+
+        // Dump the output buffer is dumping is enabled (debug only)
+#ifdef FILE_DUMP_ENABLE
+        mC2SoftDav1dDump.dumpOutput<uint8_t>(srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                                             mWidth, mHeight);
+#endif
+        convertPlanar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                             dstYStride, dstUStride, dstVStride, mWidth, mHeight, isMonochrome,
+                             convFormat);
+    }
+
+    dav1d_picture_unref(&img);
+
+    finishWork(out_frameIndex, work, std::move(block));
+    block = nullptr;
+    return true;
+}
+
+c2_status_t C2SoftDav1dDec::drainInternal(uint32_t drainMode,
+                                          const std::shared_ptr<C2BlockPool>& pool,
+                                          const std::unique_ptr<C2Work>& work) {
+    if (drainMode == NO_DRAIN) {
+        ALOGW("drain with NO_DRAIN: no-op");
+        return C2_OK;
+    }
+    if (drainMode == DRAIN_CHAIN) {
+        ALOGW("DRAIN_CHAIN not supported");
+        return C2_OMITTED;
+    }
+
+    while (outputBuffer(pool, work)) {
+    }
+
+    if (drainMode == DRAIN_COMPONENT_WITH_EOS && work && work->workletsProcessed == 0u) {
+        fillEmptyWork(work);
+    }
+
+    return C2_OK;
+}
+
+c2_status_t C2SoftDav1dDec::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
+    return drainInternal(drainMode, pool, nullptr);
+}
+
+class C2SoftDav1dFactory : public C2ComponentFactory {
+  public:
+    C2SoftDav1dFactory()
+        : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+                  GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+    virtual c2_status_t createComponent(c2_node_id_t id,
+                                        std::shared_ptr<C2Component>* const component,
+                                        std::function<void(C2Component*)> deleter) override {
+        *component = std::shared_ptr<C2Component>(
+                new C2SoftDav1dDec(COMPONENT_NAME, id,
+                                   std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual c2_status_t createInterface(
+            c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+            std::function<void(C2ComponentInterface*)> deleter) override {
+        *interface = std::shared_ptr<C2ComponentInterface>(
+                new SimpleInterface<C2SoftDav1dDec::IntfImpl>(
+                        COMPONENT_NAME, id, std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual ~C2SoftDav1dFactory() override = default;
+
+  private:
+    std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+}  // namespace android
+
+__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+    ALOGV("in %s", __func__);
+    return new ::android::C2SoftDav1dFactory();
+}
+
+__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
+        ::C2ComponentFactory* factory) {
+    ALOGV("in %s", __func__);
+    delete factory;
+}
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.h b/media/codec2/components/dav1d/C2SoftDav1dDec.h
new file mode 100644
index 0000000..e3d2a93
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_DAV1D_DEC_H_
+#define ANDROID_C2_SOFT_DAV1D_DEC_H_
+
+#include <inttypes.h>
+
+#include <memory>
+
+#include <media/stagefright/foundation/ColorUtils.h>
+
+#include <C2Config.h>
+#include <SimpleC2Component.h>
+
+#include <dav1d/dav1d.h>
+#include <deque>
+#include <C2SoftDav1dDump.h>
+
+//#define FILE_DUMP_ENABLE 1
+
+namespace android {
+
+struct C2SoftDav1dDec : public SimpleC2Component {
+    class IntfImpl;
+
+    C2SoftDav1dDec(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+    ~C2SoftDav1dDec();
+
+    // Begin SimpleC2Component overrides.
+    c2_status_t onInit() override;
+    c2_status_t onStop() override;
+    void onReset() override;
+    void onRelease() override;
+    c2_status_t onFlush_sm() override;
+    void process(const std::unique_ptr<C2Work>& work,
+                 const std::shared_ptr<C2BlockPool>& pool) override;
+    c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override;
+    // End SimpleC2Component overrides.
+
+  private:
+    std::shared_ptr<IntfImpl> mIntf;
+
+    int mInputBufferIndex = 0;
+    int mOutputBufferIndex = 0;
+
+    Dav1dContext* mDav1dCtx = nullptr;
+    std::deque<Dav1dPicture> mDecodedPictures;
+
+    // configurations used by component in process
+    // (TODO: keep this in intf but make them internal only)
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+
+    uint32_t mHalPixelFormat;
+    uint32_t mWidth;
+    uint32_t mHeight;
+    bool mSignalledOutputEos;
+    bool mSignalledError;
+    // Used during 10-bit I444/I422 to 10-bit P010 & 8-bit I420 conversions.
+    std::unique_ptr<uint16_t[]> mTmpFrameBuffer;
+    size_t mTmpFrameBufferSize = 0;
+
+    C2StreamHdrStaticMetadataInfo::output mHdrStaticMetadataInfo;
+    std::unique_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfo = nullptr;
+
+    // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+    // converting them to C2 values for each frame
+    struct VuiColorAspects {
+        uint8_t primaries;
+        uint8_t transfer;
+        uint8_t coeffs;
+        uint8_t fullRange;
+
+        // default color aspects
+        VuiColorAspects()
+            : primaries(C2Color::PRIMARIES_UNSPECIFIED),
+              transfer(C2Color::TRANSFER_UNSPECIFIED),
+              coeffs(C2Color::MATRIX_UNSPECIFIED),
+              fullRange(C2Color::RANGE_UNSPECIFIED) {}
+
+        bool operator==(const VuiColorAspects& o) {
+            return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs &&
+                   fullRange == o.fullRange;
+        }
+    } mBitstreamColorAspects;
+
+    nsecs_t mTimeStart = 0;  // Time at the start of decode()
+    nsecs_t mTimeEnd = 0;    // Time at the end of decode()
+
+    bool initDecoder();
+    void getHDRStaticParams(Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+    void getHDR10PlusInfoData(Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+    void getVuiParams(Dav1dPicture* picture);
+    void destroyDecoder();
+    void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                    const std::shared_ptr<C2GraphicBlock>& block);
+    // Sets |work->result| and mSignalledError. Returns false.
+    void setError(const std::unique_ptr<C2Work>& work, c2_status_t error);
+    bool allocTmpFrameBuffer(size_t size);
+    bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                      const std::unique_ptr<C2Work>& work);
+
+    c2_status_t drainInternal(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool,
+                              const std::unique_ptr<C2Work>& work);
+
+    void flushDav1d();
+
+#ifdef FILE_DUMP_ENABLE
+    C2SoftDav1dDump mC2SoftDav1dDump;
+#endif
+
+    C2_DO_NOT_COPY(C2SoftDav1dDec);
+};
+
+}  // namespace android
+
+#endif  // ANDROID_C2_SOFT_DAV1D_DEC_H_
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDump.cpp b/media/codec2/components/dav1d/C2SoftDav1dDump.cpp
new file mode 100644
index 0000000..ec8d6cd
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDump.cpp
@@ -0,0 +1,191 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftDav1dDump"
+#include "C2SoftDav1dDump.h"
+
+namespace android {
+
+// Flag to enable dumping the bitsteram and the decoded pictures to files.
+static const bool ENABLE_DUMPING_FILES_DEFAULT = true;
+static const char ENABLE_DUMPING_FILES_PROPERTY[] = "debug.dav1d.enabledumping";
+
+// The number of frames to dump to a file
+static const int NUM_FRAMES_TO_DUMP_DEFAULT = INT_MAX;
+static const char NUM_FRAMES_TO_DUMP_PROPERTY[] = "debug.dav1d.numframestodump";
+
+// start dumping from this frame
+static const int STARTING_FRAME_TO_DUMP_DEFAULT = 0;
+static const char STARTING_FRAME_TO_DUMP_PROPERTY[] = "debug.dav1d.startingframetodump";
+
+void C2SoftDav1dDump::initDumping() {
+    nsecs_t now = systemTime();
+    snprintf(mInDataFileName, kFileNameLength, "%s_%" PRId64 "d.%s", DUMP_FILE_PATH, now,
+             INPUT_DATA_DUMP_EXT);
+    snprintf(mInSizeFileName, kFileNameLength, "%s_%" PRId64 "d.%s", DUMP_FILE_PATH, now,
+             INPUT_SIZE_DUMP_EXT);
+    snprintf(mDav1dOutYuvFileName, kFileNameLength, "%s_%" PRId64 "dx.%s", DUMP_FILE_PATH, now,
+             OUTPUT_YUV_DUMP_EXT);
+
+    mFramesToDump =
+            android::base::GetIntProperty(NUM_FRAMES_TO_DUMP_PROPERTY, NUM_FRAMES_TO_DUMP_DEFAULT);
+    mFirstFrameToDump = android::base::GetIntProperty(STARTING_FRAME_TO_DUMP_PROPERTY,
+                                                      STARTING_FRAME_TO_DUMP_DEFAULT);
+    bool enableDumping = android::base::GetBoolProperty(ENABLE_DUMPING_FILES_PROPERTY,
+                                                        ENABLE_DUMPING_FILES_DEFAULT);
+    ALOGD("enableDumping = %d, mFramesToDump = %d", enableDumping, mFramesToDump);
+
+    if (enableDumping) {
+        mInDataFile = fopen(mInDataFileName, "wb");
+        if (mInDataFile == nullptr) {
+            ALOGD("Could not open file %s", mInDataFileName);
+        }
+
+        mInSizeFile = fopen(mInSizeFileName, "wb");
+        if (mInSizeFile == nullptr) {
+            ALOGD("Could not open file %s", mInSizeFileName);
+        }
+
+        mDav1dOutYuvFile = fopen(mDav1dOutYuvFileName, "wb");
+        if (mDav1dOutYuvFile == nullptr) {
+            ALOGD("Could not open file %s", mDav1dOutYuvFileName);
+        }
+    }
+}
+
+void C2SoftDav1dDump::destroyDumping() {
+    if (mInDataFile != nullptr) {
+        fclose(mInDataFile);
+        mInDataFile = nullptr;
+    }
+
+    if (mInSizeFile != nullptr) {
+        fclose(mInSizeFile);
+        mInSizeFile = nullptr;
+    }
+
+    if (mDav1dOutYuvFile != nullptr) {
+        fclose(mDav1dOutYuvFile);
+        mDav1dOutYuvFile = nullptr;
+    }
+}
+
+void C2SoftDav1dDump::dumpInput(uint8_t* ptr, int size) {
+    if (mInDataFile) {
+        int ret = fwrite(ptr, 1, size, mInDataFile);
+
+        if (ret != size) {
+            ALOGE("Error in fwrite %s, requested %d, returned %d", mInDataFileName, size, ret);
+        }
+    }
+
+    // Dump the size per inputBuffer if dumping is enabled.
+    if (mInSizeFile) {
+        int ret = fwrite(&size, 1, 4, mInSizeFile);
+
+        if (ret != 4) {
+            ALOGE("Error in fwrite %s, requested %d, returned %d", mInSizeFileName, 4, ret);
+        }
+    }
+}
+
+template <typename T>
+void C2SoftDav1dDump::dumpOutput(const T* srcY, const T* srcU, const T* srcV, size_t srcYStride,
+                                 size_t srcUStride, size_t srcVStride, int width, int height) {
+    mOutputCount++;
+    FILE* fp_out = mDav1dOutYuvFile;
+    int typeSize = sizeof(T);
+    if (fp_out && mOutputCount >= mFirstFrameToDump &&
+        mOutputCount <= (mFirstFrameToDump + mFramesToDump - 1)) {
+        for (int i = 0; i < height; i++) {
+            int ret =
+                    fwrite((uint8_t*)srcY + i * srcYStride * typeSize, 1, width * typeSize, fp_out);
+            if (ret != width * typeSize) {
+                ALOGE("Error in fwrite, requested %d, returned %d", width * typeSize, ret);
+                break;
+            }
+        }
+
+        for (int i = 0; i < height / 2; i++) {
+            int ret = fwrite((uint8_t*)srcU + i * srcUStride * typeSize, 1, width * typeSize / 2,
+                             fp_out);
+            if (ret != width * typeSize / 2) {
+                ALOGE("Error in fwrite, requested %d, returned %d", width * typeSize / 2, ret);
+                break;
+            }
+        }
+
+        for (int i = 0; i < height / 2; i++) {
+            int ret = fwrite((uint8_t*)srcV + i * srcVStride * typeSize, 1, width * typeSize / 2,
+                             fp_out);
+            if (ret != width * typeSize / 2) {
+                ALOGE("Error in fwrite, requested %d, returned %d", width * typeSize / 2, ret);
+                break;
+            }
+        }
+    }
+}
+
+void C2SoftDav1dDump::writeDav1dOutYuvFile(const Dav1dPicture& p) {
+    if (mDav1dOutYuvFile != NULL) {
+        uint8_t* ptr;
+        const int hbd = p.p.bpc > 8;
+
+        ptr = (uint8_t*)p.data[0];
+        for (int y = 0; y < p.p.h; y++) {
+            int iSize = p.p.w << hbd;
+            int ret = fwrite(ptr, 1, iSize, mDav1dOutYuvFile);
+            if (ret != iSize) {
+                ALOGE("Error in fwrite %s, requested %d, returned %d", mDav1dOutYuvFileName, iSize,
+                      ret);
+                break;
+            }
+
+            ptr += p.stride[0];
+        }
+
+        if (p.p.layout != DAV1D_PIXEL_LAYOUT_I400) {
+            // u/v
+            const int ss_ver = p.p.layout == DAV1D_PIXEL_LAYOUT_I420;
+            const int ss_hor = p.p.layout != DAV1D_PIXEL_LAYOUT_I444;
+            const int cw = (p.p.w + ss_hor) >> ss_hor;
+            const int ch = (p.p.h + ss_ver) >> ss_ver;
+            for (int pl = 1; pl <= 2; pl++) {
+                ptr = (uint8_t*)p.data[pl];
+                for (int y = 0; y < ch; y++) {
+                    int iSize = cw << hbd;
+                    int ret = fwrite(ptr, 1, cw << hbd, mDav1dOutYuvFile);
+                    if (ret != iSize) {
+                        ALOGE("Error in fwrite %s, requested %d, returned %d", mDav1dOutYuvFileName,
+                              iSize, ret);
+                        break;
+                    }
+                    ptr += p.stride[1];
+                }
+            }
+        }
+    }
+}
+
+template void C2SoftDav1dDump::dumpOutput<uint8_t>(const uint8_t* srcY, const uint8_t* srcU,
+                                                   const uint8_t* srcV, size_t srcYStride,
+                                                   size_t srcUStride, size_t srcVStride, int width,
+                                                   int height);
+template void C2SoftDav1dDump::dumpOutput<uint16_t>(const uint16_t* srcY, const uint16_t* srcU,
+                                                    const uint16_t* srcV, size_t srcYStride,
+                                                    size_t srcUStride, size_t srcVStride, int width,
+                                                    int height);
+}  // namespace android
\ No newline at end of file
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDump.h b/media/codec2/components/dav1d/C2SoftDav1dDump.h
new file mode 100644
index 0000000..ea7a48a
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDump.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android-base/properties.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
+#include <dav1d/dav1d.h>
+
+#define DUMP_FILE_PATH "/data/local/tmp/dump"
+#define INPUT_DATA_DUMP_EXT "av1"
+#define INPUT_SIZE_DUMP_EXT "size"
+#define OUTPUT_YUV_DUMP_EXT "yuv"
+
+namespace android {
+constexpr size_t kFileNameLength = 256;
+
+class C2SoftDav1dDump {
+  public:
+    void initDumping();
+    void destroyDumping();
+    void dumpInput(uint8_t* ptr, int new_size);
+    template <typename T>
+    void dumpOutput(const T* srcY, const T* srcU, const T* srcV, size_t srcYStride,
+                    size_t srcUStride, size_t srcVStride, int width, int height);
+    void writeDav1dOutYuvFile(const Dav1dPicture& p);
+
+  private:
+    int mFramesToDump = 0;
+    int mFirstFrameToDump = 0;
+    int mOutputCount = 0;
+
+    char mInDataFileName[kFileNameLength];
+    char mInSizeFileName[kFileNameLength];
+    char mDav1dOutYuvFileName[kFileNameLength];
+
+    FILE* mInDataFile = nullptr;
+    FILE* mInSizeFile = nullptr;
+    FILE* mDav1dOutYuvFile = nullptr;
+};
+}  // namespace android
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 3e4247b..f056759 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "C2SoftGav1Dec"
 #include "C2SoftGav1Dec.h"
 
+#include <android-base/properties.h>
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 #include <Codec2BufferUtils.h>
@@ -39,6 +40,9 @@
 
 namespace android {
 
+// Property used to control the number of threads used in the gav1 decoder.
+constexpr char kNumThreadsProperty[] = "debug.c2.gav1.numthreads";
+
 // codecname set and passed in as a compile flag from Android.bp
 constexpr char COMPONENT_NAME[] = CODECNAME;
 
@@ -506,6 +510,10 @@
 
   libgav1::DecoderSettings settings = {};
   settings.threads = GetCPUCoreCount();
+  int32_t numThreads = android::base::GetIntProperty(kNumThreadsProperty, 0);
+  if (numThreads > 0 && numThreads < settings.threads) {
+    settings.threads = numThreads;
+  }
 
   ALOGV("Using libgav1 AV1 software decoder.");
   Libgav1StatusCode status = mCodecCtx->Init(&settings);
diff --git a/media/codec2/hal/aidl/Android.bp b/media/codec2/hal/aidl/Android.bp
index a6a6b77..73cae93 100644
--- a/media/codec2/hal/aidl/Android.bp
+++ b/media/codec2/hal/aidl/Android.bp
@@ -36,6 +36,7 @@
     ],
 
     static_libs: [
+        "libPlatformProperties",
         "libaidlcommonsupport",
     ],
 
@@ -97,6 +98,7 @@
     ],
 
     static_libs: [
+        "libPlatformProperties",
         "libaidlcommonsupport",
     ],
 
@@ -163,18 +165,15 @@
     defaults: [
         "libcodec2-aidl-defaults",
         "service_fuzzer_defaults",
+        "libcodec2-runtime-libs",
     ],
     shared_libs: [
         "libcodec2_vndk",
-
-        "libcodec2_soft_avcdec",
-        "libcodec2_soft_avcenc",
-        "libcodec2_soft_aacdec",
-        "libcodec2_soft_aacenc",
     ],
     fuzz_config: {
         cc: [
             "wonsik@google.com",
         ],
+        triage_assignee: "waghpawan@google.com",
     },
 }
diff --git a/media/codec2/hal/aidl/Component.cpp b/media/codec2/hal/aidl/Component.cpp
index 2e0859b..4f5b899 100644
--- a/media/codec2/hal/aidl/Component.cpp
+++ b/media/codec2/hal/aidl/Component.cpp
@@ -292,7 +292,7 @@
     static constexpr IComponent::BlockPoolAllocator::Tag ALLOCATOR_ID =
         IComponent::BlockPoolAllocator::allocatorId;
     static constexpr IComponent::BlockPoolAllocator::Tag IGBA =
-        IComponent::BlockPoolAllocator::igba;
+        IComponent::BlockPoolAllocator::allocator;
     c2_status_t status = C2_OK;
     switch (allocator.getTag()) {
         case ALLOCATOR_ID:
@@ -411,7 +411,7 @@
 
         mDeathRecipient = ::ndk::ScopedAIBinder_DeathRecipient(
                 AIBinder_DeathRecipient_new(OnBinderDied));
-        mDeathContext = new DeathContext{weak_from_this()};
+        mDeathContext = new DeathContext{ref<Component>()};
         AIBinder_DeathRecipient_setOnUnlinked(mDeathRecipient.get(), OnBinderUnlinked);
         AIBinder_linkToDeath(mListener->asBinder().get(), mDeathRecipient.get(), mDeathContext);
     } else {
diff --git a/media/codec2/hal/aidl/ComponentStore.cpp b/media/codec2/hal/aidl/ComponentStore.cpp
index 2489683..58407d1 100644
--- a/media/codec2/hal/aidl/ComponentStore.cpp
+++ b/media/codec2/hal/aidl/ComponentStore.cpp
@@ -216,7 +216,7 @@
 #endif
         onInterfaceLoaded(c2component->intf());
         std::shared_ptr<Component> comp =
-            SharedRefBase::make<Component>(c2component, listener, shared_from_this(), pool);
+            SharedRefBase::make<Component>(c2component, listener, ref<ComponentStore>(), pool);
         *component = comp;
         if (!component) {
             status = C2_CORRUPTED;
diff --git a/media/codec2/hal/aidl/ParamTypes.cpp b/media/codec2/hal/aidl/ParamTypes.cpp
index 7026f4c..41e6f50 100644
--- a/media/codec2/hal/aidl/ParamTypes.cpp
+++ b/media/codec2/hal/aidl/ParamTypes.cpp
@@ -18,6 +18,8 @@
 #define LOG_TAG "Codec2-AIDL-ParamTypes"
 #include <android-base/logging.h>
 
+#include <android/binder_manager.h>
+#include <android/sysprop/MediaProperties.sysprop.h>
 #include <codec2/aidl/ParamTypes.h>
 #include <codec2/common/ParamTypes.h>
 
@@ -157,8 +159,30 @@
 namespace c2 {
 namespace utils {
 
-// TODO: read it from aconfig flags
-bool IsEnabled() { return false; }
+bool IsSelected() {
+    // TODO: read from aconfig flags
+    const bool enabled = false;
+
+    if (!enabled) {
+        // Cannot select AIDL if not enabled
+        return false;
+    }
+    using ::android::sysprop::MediaProperties::codec2_hal_selection;
+    using ::android::sysprop::MediaProperties::codec2_hal_selection_values;
+    constexpr codec2_hal_selection_values AIDL = codec2_hal_selection_values::AIDL;
+    constexpr codec2_hal_selection_values HIDL = codec2_hal_selection_values::HIDL;
+    codec2_hal_selection_values selection = codec2_hal_selection().value_or(HIDL);
+    switch (selection) {
+    case AIDL:
+        return true;
+    case HIDL:
+        return false;
+    default:
+        LOG(FATAL) << "Unexpected codec2 HAL selection value: " << (int)selection;
+    }
+
+    return false;
+}
 
 const char* asString(Status status, const char* def) {
     return asString(static_cast<c2_status_t>(status.status), def);
diff --git a/media/codec2/hal/aidl/fuzzer.cpp b/media/codec2/hal/aidl/fuzzer.cpp
index c1a2762..111ef26 100644
--- a/media/codec2/hal/aidl/fuzzer.cpp
+++ b/media/codec2/hal/aidl/fuzzer.cpp
@@ -29,6 +29,7 @@
     std::shared_ptr<C2ComponentStore> store = ::android::GetCodec2PlatformComponentStore();
     std::shared_ptr<ComponentStore> binder = SharedRefBase::make<ComponentStore>(store);
 
+    signal(SIGPIPE, SIG_IGN);
     fuzzService(binder->asBinder().get(), FuzzedDataProvider(data, size));
 
     return 0;
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/Component.h b/media/codec2/hal/aidl/include/codec2/aidl/Component.h
index d5ea92b..4a090e9 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/Component.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/Component.h
@@ -46,8 +46,7 @@
 
 struct ComponentStore;
 
-struct Component : public BnComponent,
-                   public std::enable_shared_from_this<Component> {
+struct Component : public BnComponent {
     Component(
             const std::shared_ptr<C2Component>&,
             const std::shared_ptr<IComponentListener>& listener,
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
index 7fc5d2f..b3c97d5 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
@@ -50,8 +50,7 @@
 
 using ::aidl::android::hardware::media::bufferpool2::IClientManager;
 
-struct ComponentStore : public BnComponentStore,
-                        public std::enable_shared_from_this<ComponentStore> {
+struct ComponentStore : public BnComponentStore {
     ComponentStore(const std::shared_ptr<C2ComponentStore>& store);
     virtual ~ComponentStore();
 
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/ParamTypes.h b/media/codec2/hal/aidl/include/codec2/aidl/ParamTypes.h
index 3f82ee3..7c31a06 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/ParamTypes.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/ParamTypes.h
@@ -37,8 +37,8 @@
 namespace c2 {
 namespace utils {
 
-// Returns true iff AIDL c2 HAL is enabled
-bool IsEnabled();
+// Returns true iff AIDL c2 HAL is selected for the system
+bool IsSelected();
 
 // Make asString() and operator<< work with Status as well as c2_status_t.
 C2_DECLARE_AS_STRING_AND_DEFINE_STREAM_OUT(Status);
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index e3f8b1c..ab6505e 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -1438,35 +1438,35 @@
 std::vector<std::string> Codec2Client::CacheServiceNames() {
     std::vector<std::string> names;
 
-    if (c2_aidl::utils::IsEnabled()) {
+    if (c2_aidl::utils::IsSelected()) {
         // Get AIDL service names
         AServiceManager_forEachDeclaredInstance(
                 AidlBase::descriptor, &names, [](const char *name, void *context) {
                     std::vector<std::string> *names = (std::vector<std::string> *)context;
                     names->emplace_back(name);
                 });
-    }
+    } else {
+        // Get HIDL service names
+        using ::android::hardware::media::c2::V1_0::IComponentStore;
+        using ::android::hidl::manager::V1_2::IServiceManager;
+        while (true) {
+            sp<IServiceManager> serviceManager = IServiceManager::getService();
+            CHECK(serviceManager) << "Hardware service manager is not running.";
 
-    // Get HIDL service names
-    using ::android::hardware::media::c2::V1_0::IComponentStore;
-    using ::android::hidl::manager::V1_2::IServiceManager;
-    while (true) {
-        sp<IServiceManager> serviceManager = IServiceManager::getService();
-        CHECK(serviceManager) << "Hardware service manager is not running.";
-
-        Return<void> transResult;
-        transResult = serviceManager->listManifestByInterface(
-                IComponentStore::descriptor,
-                [&names](
-                        hidl_vec<hidl_string> const& instanceNames) {
-                    names.insert(names.end(), instanceNames.begin(), instanceNames.end());
-                });
-        if (transResult.isOk()) {
-            break;
+            Return<void> transResult;
+            transResult = serviceManager->listManifestByInterface(
+                    IComponentStore::descriptor,
+                    [&names](
+                            hidl_vec<hidl_string> const& instanceNames) {
+                        names.insert(names.end(), instanceNames.begin(), instanceNames.end());
+                    });
+            if (transResult.isOk()) {
+                break;
+            }
+            LOG(ERROR) << "Could not retrieve the list of service instances of "
+                       << IComponentStore::descriptor
+                       << ". Retrying...";
         }
-        LOG(ERROR) << "Could not retrieve the list of service instances of "
-                   << IComponentStore::descriptor
-                   << ". Retrying...";
     }
     // Sort service names in each category.
     std::stable_sort(
@@ -1545,7 +1545,7 @@
     std::string const& name = GetServiceNames()[index];
     LOG(VERBOSE) << "Creating a Codec2 client to service \"" << name << "\"";
 
-    if (c2_aidl::utils::IsEnabled()) {
+    if (c2_aidl::utils::IsSelected()) {
         std::string instanceName =
             ::android::base::StringPrintf("%s/%s", AidlBase::descriptor, name.c_str());
         if (AServiceManager_isDeclared(instanceName.c_str())) {
@@ -1559,20 +1559,23 @@
             CHECK(transStatus.isOk()) << "Codec2 AIDL service \"" << name << "\""
                                         "does not have IConfigurable.";
             return std::make_shared<Codec2Client>(baseStore, configurable, index);
+        } else {
+            LOG(ERROR) << "Codec2 AIDL service \"" << name << "\" is not declared";
         }
+    } else {
+        std::string instanceName = "android.hardware.media.c2/" + name;
+        sp<HidlBase> baseStore = HidlBase::getService(name);
+        CHECK(baseStore) << "Codec2 service \"" << name << "\""
+                            " inaccessible for unknown reasons.";
+        LOG(VERBOSE) << "Client to Codec2 service \"" << name << "\" created";
+        Return<sp<c2_hidl::IConfigurable>> transResult = baseStore->getConfigurable();
+        CHECK(transResult.isOk()) << "Codec2 service \"" << name << "\""
+                                    "does not have IConfigurable.";
+        sp<c2_hidl::IConfigurable> configurable =
+            static_cast<sp<c2_hidl::IConfigurable>>(transResult);
+        return std::make_shared<Codec2Client>(baseStore, configurable, index);
     }
-
-    std::string instanceName = "android.hardware.media.c2/" + name;
-    sp<HidlBase> baseStore = HidlBase::getService(name);
-    CHECK(baseStore) << "Codec2 service \"" << name << "\""
-                        " inaccessible for unknown reasons.";
-    LOG(VERBOSE) << "Client to Codec2 service \"" << name << "\" created";
-    Return<sp<c2_hidl::IConfigurable>> transResult = baseStore->getConfigurable();
-    CHECK(transResult.isOk()) << "Codec2 service \"" << name << "\""
-                                "does not have IConfigurable.";
-    sp<c2_hidl::IConfigurable> configurable =
-        static_cast<sp<c2_hidl::IConfigurable>>(transResult);
-    return std::make_shared<Codec2Client>(baseStore, configurable, index);
+    return nullptr;
 }
 
 c2_status_t Codec2Client::ForAllServices(
diff --git a/media/codec2/hal/client/output.cpp b/media/codec2/hal/client/output.cpp
index 4eebd1c..48f9756 100644
--- a/media/codec2/hal/client/output.cpp
+++ b/media/codec2/hal/client/output.cpp
@@ -336,9 +336,25 @@
 }
 
 void OutputBufferQueue::stop() {
-    std::scoped_lock<std::mutex> l(mMutex);
-    mStopped = true;
-    mOwner.reset(); // destructor of the block will not triger IGBP::cancel()
+    std::shared_ptr<C2SurfaceSyncMemory> oldMem;
+    {
+        std::scoped_lock<std::mutex> l(mMutex);
+        if (mStopped) {
+            return;
+        }
+        mStopped = true;
+        mOwner.reset(); // destructor of the block will not trigger IGBP::cancel()
+        // basically configuring null surface
+        oldMem = mSyncMem;
+        mSyncMem.reset();
+        mIgbp.clear();
+        mGeneration = 0;
+        mBqId = 0;
+    }
+    {
+        std::scoped_lock<std::mutex> l(mOldMutex);
+        mOldMem = oldMem;
+    }
 }
 
 bool OutputBufferQueue::registerBuffer(const C2ConstGraphicBlock& block) {
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 86fd8ab..8082dd7 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -1915,8 +1915,16 @@
         }
         comp = state->comp;
     }
-    status_t err = comp->stop();
+
+    // Note: Logically mChannel->stopUseOutputSurface() should be after comp->stop().
+    // But in the case some HAL implementations hang forever on comp->stop().
+    // (HAL is waiting for C2Fence until fetchGraphicBlock unblocks and not
+    // completing stop()).
+    // So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
+    // prior to comp->stop().
+    // See also b/300350761.
     mChannel->stopUseOutputSurface(pushBlankBuffer);
+    status_t err = comp->stop();
     if (err != C2_OK) {
         // TODO: convert err into status_t
         mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
@@ -2004,8 +2012,15 @@
         }
         comp = state->comp;
     }
-    comp->release();
+    // Note: Logically mChannel->stopUseOutputSurface() should be after comp->release().
+    // But in the case some HAL implementations hang forever on comp->release().
+    // (HAL is waiting for C2Fence until fetchGraphicBlock unblocks and not
+    // completing release()).
+    // So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
+    // prior to comp->release().
+    // See also b/300350761.
     mChannel->stopUseOutputSurface(pushBlankBuffer);
+    comp->release();
 
     {
         Mutexed<State>::Locked state(mState);
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index b91ac6d..4c385f1 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -378,7 +378,7 @@
         struct timespec ts;
         if (timeoutNs >= 0) {
             ts.tv_sec = int(timeoutNs / 1000000000);
-            ts.tv_nsec = timeoutNs;
+            ts.tv_nsec = timeoutNs % 1000000000;
         } else {
             ALOGD("polling for indefinite duration requested, but changed to wait for %d sec",
                   kPipeFenceWaitLimitSecs);
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index f6f97da..61aafa7 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -1081,6 +1081,7 @@
     emplace("libcodec2_soft_amrwbenc.so");
     //emplace("libcodec2_soft_av1dec_aom.so"); // deprecated for the gav1 implementation
     emplace("libcodec2_soft_av1dec_gav1.so");
+    emplace("libcodec2_soft_av1dec_dav1d.so");
     emplace("libcodec2_soft_av1enc.so");
     emplace("libcodec2_soft_avcdec.so");
     emplace("libcodec2_soft_avcenc.so");
diff --git a/media/janitors/media_reliability_OWNERS b/media/janitors/media_reliability_OWNERS
new file mode 100644
index 0000000..cced19c
--- /dev/null
+++ b/media/janitors/media_reliability_OWNERS
@@ -0,0 +1,5 @@
+# Bug component: 1051309
+# go/android-media-reliability
+
+essick@google.com
+nchalko@google.com
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 4bd12b8..01edf72 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -898,6 +898,22 @@
     return NO_ERROR;
 }
 
+status_t AudioFlingerClientAdapter::getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                                    struct audio_port_v7 *mixPort) const {
+    if (devicePort == nullptr || mixPort == nullptr) {
+        return BAD_VALUE;
+    }
+    media::AudioPortFw devicePortAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_v7_AudioPortFw(*devicePort));
+    media::AudioPortFw mixPortAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_v7_AudioPortFw(*mixPort));
+    media::AudioPortFw aidlRet;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mDelegate->getAudioMixPort(devicePortAidl, mixPortAidl, &aidlRet)));
+    *mixPort = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPortFw_audio_port_v7(aidlRet));
+    return OK;
+}
+
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // AudioFlingerServerAdapter
 AudioFlingerServerAdapter::AudioFlingerServerAdapter(
@@ -1444,4 +1460,16 @@
     return Status::fromStatusT(mDelegate->getAudioPolicyConfig(_aidl_return));
 }
 
+Status AudioFlingerServerAdapter::getAudioMixPort(const media::AudioPortFw &devicePort,
+                                                  const media::AudioPortFw &mixPort,
+                                                  media::AudioPortFw *_aidl_return) {
+    audio_port_v7 devicePortLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_AudioPortFw_audio_port_v7(devicePort));
+    audio_port_v7 mixPortLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_AudioPortFw_audio_port_v7(mixPort));
+    RETURN_BINDER_IF_ERROR(mDelegate->getAudioMixPort(&devicePortLegacy, &mixPortLegacy));
+    *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_port_v7_AudioPortFw(mixPortLegacy));
+    return Status::ok();
+}
+
 } // namespace android
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 6412810..31d3af5 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -288,6 +288,11 @@
      */
     AudioPolicyConfig getAudioPolicyConfig();
 
+    /**
+     * Get the attributes of the mix port when connecting to the given device port.
+     */
+    AudioPortFw getAudioMixPort(in AudioPortFw devicePort, in AudioPortFw mixPort);
+
     // When adding a new method, please review and update
     // IAudioFlinger.h AudioFlingerServerAdapter::Delegate::TransactionCode
     // AudioFlinger.cpp AudioFlinger::onTransactWrapper()
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 3c96862..eb27e25 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -384,6 +384,9 @@
     virtual status_t supportsBluetoothVariableLatency(bool* support) const = 0;
 
     virtual status_t getAudioPolicyConfig(media::AudioPolicyConfig* output) = 0;
+
+    virtual status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                     struct audio_port_v7 *mixPort) const = 0;
 };
 
 /**
@@ -498,6 +501,8 @@
                                    sp<media::ISoundDose>* soundDose) const override;
     status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) override;
     status_t getAudioPolicyConfig(media::AudioPolicyConfig* output) override;
+    status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
+                             struct audio_port_v7 *mixPort) const override;
 
 private:
     const sp<media::IAudioFlingerService> mDelegate;
@@ -599,6 +604,7 @@
             INVALIDATE_TRACKS = media::BnAudioFlingerService::TRANSACTION_invalidateTracks,
             GET_AUDIO_POLICY_CONFIG =
                     media::BnAudioFlingerService::TRANSACTION_getAudioPolicyConfig,
+            GET_AUDIO_MIX_PORT = media::BnAudioFlingerService::TRANSACTION_getAudioMixPort,
         };
 
     protected:
@@ -732,6 +738,9 @@
                                  sp<media::ISoundDose>* _aidl_return) override;
     Status invalidateTracks(const std::vector<int32_t>& portIds) override;
     Status getAudioPolicyConfig(media::AudioPolicyConfig* _aidl_return) override;
+    Status getAudioMixPort(const media::AudioPortFw& devicePort,
+                           const media::AudioPortFw& mixPort,
+                           media::AudioPortFw* _aidl_return) override;
 private:
     const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
 };
diff --git a/media/libaudioclient/tests/audioeffect_tests.cpp b/media/libaudioclient/tests/audioeffect_tests.cpp
index e6149e4..26121cd 100644
--- a/media/libaudioclient/tests/audioeffect_tests.cpp
+++ b/media/libaudioclient/tests/audioeffect_tests.cpp
@@ -250,8 +250,10 @@
             if (!isEffectDefaultOnRecord(&descriptors[i].type, &descriptors[i].uuid,
                                          capture->getAudioRecordHandle())) {
                 selectedEffect = i;
+                EXPECT_EQ(OK, capture->stop());
                 break;
             }
+            EXPECT_EQ(OK, capture->stop());
         }
     }
     if (selectedEffect == -1) GTEST_SKIP() << " expected at least one preprocessing effect";
diff --git a/media/libaudiofoundation/AudioContainers.cpp b/media/libaudiofoundation/AudioContainers.cpp
index 202a400..e1265cf 100644
--- a/media/libaudiofoundation/AudioContainers.cpp
+++ b/media/libaudiofoundation/AudioContainers.cpp
@@ -119,4 +119,115 @@
     return ss.str();
 }
 
+std::string dumpMixerBehaviors(const MixerBehaviorSet& mixerBehaviors) {
+    std::stringstream ss;
+    for (auto it = mixerBehaviors.begin(); it != mixerBehaviors.end(); ++it) {
+        if (it != mixerBehaviors.begin()) {
+            ss << ", ";
+        }
+        ss << (*it);
+    }
+    return ss.str();
+}
+
+AudioProfileAttributesMultimap createAudioProfilesAttrMap(audio_profile profiles[],
+                                                          uint32_t first,
+                                                          uint32_t last) {
+    AudioProfileAttributesMultimap result;
+    for (uint32_t i = first; i < last; ++i) {
+        SampleRateSet sampleRates(profiles[i].sample_rates,
+                                  profiles[i].sample_rates + profiles[i].num_sample_rates);
+        ChannelMaskSet channelMasks(profiles[i].channel_masks,
+                                    profiles[i].channel_masks + profiles[i].num_channel_masks);
+        result.emplace(profiles[i].format, std::make_pair(sampleRates, channelMasks));
+    }
+    return result;
+}
+
+namespace {
+
+void populateAudioProfile(audio_format_t format,
+                          const ChannelMaskSet& channelMasks,
+                          const SampleRateSet& samplingRates,
+                          audio_profile* profile) {
+    profile->format = format;
+    profile->num_channel_masks = 0;
+    for (auto it = channelMasks.begin();
+         it != channelMasks.end() && profile->num_channel_masks < AUDIO_PORT_MAX_CHANNEL_MASKS;
+         ++it) {
+        profile->channel_masks[profile->num_channel_masks++] = *it;
+    }
+    profile->num_sample_rates = 0;
+    for (auto it = samplingRates.begin();
+         it != samplingRates.end() && profile->num_sample_rates < AUDIO_PORT_MAX_SAMPLING_RATES;
+         ++it) {
+        profile->sample_rates[profile->num_sample_rates++] = *it;
+    }
+}
+
+} // namespace
+
+void populateAudioProfiles(const AudioProfileAttributesMultimap& profileAttrs,
+                           audio_format_t format,
+                           ChannelMaskSet allChannelMasks,
+                           SampleRateSet allSampleRates,
+                           audio_profile audioProfiles[],
+                           uint32_t* numAudioProfiles,
+                           uint32_t maxAudioProfiles) {
+    if (*numAudioProfiles >= maxAudioProfiles) {
+        return;
+    }
+
+    const auto lower= profileAttrs.lower_bound(format);
+    const auto upper = profileAttrs.upper_bound(format);
+    SampleRateSet sampleRatesPresent;
+    ChannelMaskSet channelMasksPresent;
+    for (auto it = lower; it != upper && *numAudioProfiles < maxAudioProfiles; ++it) {
+        SampleRateSet srs;
+        std::set_intersection(it->second.first.begin(), it->second.first.end(),
+                              allSampleRates.begin(), allSampleRates.end(),
+                              std::inserter(srs, srs.begin()));
+        if (srs.empty()) {
+            continue;
+        }
+        ChannelMaskSet cms;
+        std::set_intersection(it->second.second.begin(), it->second.second.end(),
+                              allChannelMasks.begin(), allChannelMasks.end(),
+                              std::inserter(cms, cms.begin()));
+        if (cms.empty()) {
+            continue;
+        }
+        sampleRatesPresent.insert(srs.begin(), srs.end());
+        channelMasksPresent.insert(cms.begin(), cms.end());
+        populateAudioProfile(it->first, cms, srs,
+                             &audioProfiles[(*numAudioProfiles)++]);
+    }
+    if (*numAudioProfiles >= maxAudioProfiles) {
+        ALOGW("%s, too many audio profiles", __func__);
+        return;
+    }
+
+    SampleRateSet srs;
+    std::set_difference(allSampleRates.begin(), allSampleRates.end(),
+                        sampleRatesPresent.begin(), sampleRatesPresent.end(),
+                        std::inserter(srs, srs.begin()));
+    if (!srs.empty()) {
+        populateAudioProfile(format, allChannelMasks, srs,
+                             &audioProfiles[(*numAudioProfiles)++]);
+    }
+    if (*numAudioProfiles >= maxAudioProfiles) {
+        ALOGW("%s, too many audio profiles", __func__);
+        return;
+    }
+    ChannelMaskSet cms;
+    std::set_difference(allChannelMasks.begin(), allChannelMasks.end(),
+                        channelMasksPresent.begin(), channelMasksPresent.end(),
+                        std::inserter(cms, cms.begin()));
+    if (!cms.empty()) {
+        populateAudioProfile(format, cms, allSampleRates,
+                             &audioProfiles[(*numAudioProfiles)++]);
+    }
+
+}
+
 } // namespace android
diff --git a/media/libaudiofoundation/AudioProfile.cpp b/media/libaudiofoundation/AudioProfile.cpp
index 999e263..4a5fb96 100644
--- a/media/libaudiofoundation/AudioProfile.cpp
+++ b/media/libaudiofoundation/AudioProfile.cpp
@@ -383,6 +383,16 @@
     }
 }
 
+ChannelMaskSet AudioProfileVector::getSupportedChannelMasks() const {
+    ChannelMaskSet channelMasks;
+    for (const auto& profile : *this) {
+        if (profile->isValid()) {
+            channelMasks.insert(profile->getChannels().begin(), profile->getChannels().end());
+        }
+    }
+    return channelMasks;
+}
+
 ConversionResult<AudioProfileVector>
 aidl2legacy_AudioProfileVector(const AudioProfileVector::Aidl& aidl, bool isInput) {
     return convertContainers<AudioProfileVector>(aidl.first, aidl.second,
diff --git a/media/libaudiofoundation/TEST_MAPPING b/media/libaudiofoundation/TEST_MAPPING
index a4e271e..f7e5b12 100644
--- a/media/libaudiofoundation/TEST_MAPPING
+++ b/media/libaudiofoundation/TEST_MAPPING
@@ -20,5 +20,10 @@
         }
       ]
     }
+  ],
+  "postsubmit": [
+    {
+      "name": "audiofoundation_containers_test"
+    }
   ]
 }
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index 88dcee9..46fd620 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -19,6 +19,7 @@
 #include <algorithm>
 #include <functional>
 #include <iterator>
+#include <map>
 #include <set>
 #include <vector>
 
@@ -34,6 +35,8 @@
 using MixerBehaviorSet = std::set<audio_mixer_behavior_t>;
 
 using FormatVector = std::vector<audio_format_t>;
+using AudioProfileAttributesMultimap =
+        std::multimap<audio_format_t, std::pair<SampleRateSet, ChannelMaskSet>>;
 
 const DeviceTypeSet& getAudioDeviceOutAllSet();
 const DeviceTypeSet& getAudioDeviceOutAllA2dpSet();
@@ -126,6 +129,8 @@
 
 std::string dumpDeviceTypes(const DeviceTypeSet& deviceTypes);
 
+std::string dumpMixerBehaviors(const MixerBehaviorSet& mixerBehaviors);
+
 /**
  * Return human readable string for device types.
  */
@@ -133,5 +138,49 @@
     return deviceTypesToString(deviceTypes);
 }
 
+/**
+ * Create audio profile attributes map by given audio profile array from the range of [first, last).
+ *
+ * @param profiles the array of audio profiles.
+ * @param first the first index of the profile.
+ * @param last the last index of the profile.
+ * @return a multipmap of audio format to pair of corresponding sample rates and channel masks set.
+ */
+AudioProfileAttributesMultimap createAudioProfilesAttrMap(audio_profile profiles[],
+                                                          uint32_t first,
+                                                          uint32_t last);
+
+/**
+ * Populate audio profiles according to given profile attributes, format, channel masks and
+ * sample rates.
+ *
+ * The function will first go over all pairs of channel masks and sample rates that are present in
+ * the profile attributes of the given map. Note that the channel masks and the sample rates that
+ * are not present in the collections of all valid channel masks and all valid sample rates will be
+ * excluded. After that, if there are channel masks and sample rates that present in the all values
+ * collection but not in profile attributes, they will also be place in a new audio profile in the
+ * profile array.
+ *
+ * Note that if the resulting index of the audio profile exceeds the maximum, no new audio profiles
+ * will be placed in the array.
+ *
+ * @param profileAttrs a multimap that contains format and its corresponding channel masks and
+ *                     sample rates.
+ * @param format the targeted audio format.
+ * @param allChannelMasks all valid channel masks for the format.
+ * @param allSampleRates all valid sample rates for the format.
+ * @param audioProfiles the audio profile array.
+ * @param numAudioProfiles the start index to put audio profile in the array. The value will be
+ *                         updated if there is new audio profile placed.
+ * @param maxAudioProfiles the maximum number of audio profile.
+ */
+void populateAudioProfiles(const AudioProfileAttributesMultimap& profileAttrs,
+                           audio_format_t format,
+                           ChannelMaskSet allChannelMasks,
+                           SampleRateSet allSampleRates,
+                           audio_profile audioProfiles[],
+                           uint32_t* numAudioProfiles,
+                           uint32_t maxAudioProfiles = AUDIO_PORT_MAX_AUDIO_PROFILES);
+
 
 } // namespace android
diff --git a/media/libaudiofoundation/include/media/AudioProfile.h b/media/libaudiofoundation/include/media/AudioProfile.h
index a668afe..bcde1fe 100644
--- a/media/libaudiofoundation/include/media/AudioProfile.h
+++ b/media/libaudiofoundation/include/media/AudioProfile.h
@@ -149,6 +149,8 @@
     bool equals(const AudioProfileVector& other) const;
     void addAllValidProfiles(const AudioProfileVector& audioProfiles);
 
+    ChannelMaskSet getSupportedChannelMasks() const;
+
     using Aidl = std::pair<
             std::vector<media::audio::common::AudioProfile>,
             std::vector<media::AudioProfileSys>>;
diff --git a/media/libaudiofoundation/tests/Android.bp b/media/libaudiofoundation/tests/Android.bp
index 2f4aee0..82c7db7 100644
--- a/media/libaudiofoundation/tests/Android.bp
+++ b/media/libaudiofoundation/tests/Android.bp
@@ -43,3 +43,30 @@
 
     test_suites: ["device-tests"],
 }
+
+cc_test {
+    name: "audiofoundation_containers_test",
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    static_libs: [
+        "libaudiofoundation",
+    ],
+
+    header_libs: [
+        "libaudio_system_headers",
+    ],
+
+    srcs: [
+        "audiofoundation_containers_test.cpp",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    test_suites: ["device-tests"],
+}
diff --git a/media/libaudiofoundation/tests/audiofoundation_containers_test.cpp b/media/libaudiofoundation/tests/audiofoundation_containers_test.cpp
new file mode 100644
index 0000000..967e2ee
--- /dev/null
+++ b/media/libaudiofoundation/tests/audiofoundation_containers_test.cpp
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include <media/AudioContainers.h>
+
+namespace android {
+
+const static AudioProfileAttributesMultimap AUDIO_PROFILE_ATTRIBUTES = {
+        {AUDIO_FORMAT_PCM_16_BIT, {{44100, 48000},
+                                   {AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_7POINT1}}},
+        {AUDIO_FORMAT_PCM_16_BIT, {{96000},
+                                   {AUDIO_CHANNEL_OUT_STEREO}}},
+        {AUDIO_FORMAT_PCM_8_24_BIT, {{48000},
+                                     {AUDIO_CHANNEL_OUT_STEREO}}}
+};
+
+TEST(PopulateAudioProfilesTest, AllAttributesMatches) {
+    const AudioProfileAttributesMultimap expected = {
+            {AUDIO_FORMAT_PCM_16_BIT, {{44100, 48000},
+                                       {AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_7POINT1}}},
+            {AUDIO_FORMAT_PCM_16_BIT, {{96000},
+                                       {AUDIO_CHANNEL_OUT_STEREO}}}
+    };
+    const audio_format_t format = AUDIO_FORMAT_PCM_16_BIT;
+    const SampleRateSet allSampleRates = {44100, 48000, 96000};
+    const ChannelMaskSet allChannelMasks = {AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_7POINT1};
+
+    audio_profile profiles[AUDIO_PORT_MAX_AUDIO_PROFILES];
+    uint32_t numProfiles = 0;
+    populateAudioProfiles(AUDIO_PROFILE_ATTRIBUTES, format, allChannelMasks, allSampleRates,
+                          profiles, &numProfiles);
+    ASSERT_EQ(expected, createAudioProfilesAttrMap(profiles, 0, numProfiles));
+}
+
+TEST(PopulateAudioProfilesTest, AttributesNotInAllValues) {
+    const AudioProfileAttributesMultimap expected = {
+            {AUDIO_FORMAT_PCM_16_BIT, {{48000},
+                                       {AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_7POINT1}}},
+            {AUDIO_FORMAT_PCM_16_BIT, {{96000},
+                                       {AUDIO_CHANNEL_OUT_STEREO}}}
+    };
+    const audio_format_t format = AUDIO_FORMAT_PCM_16_BIT;
+    const SampleRateSet allSampleRates = {48000, 96000};
+    const ChannelMaskSet allChannelMasks = {AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_7POINT1};
+
+    audio_profile profiles[AUDIO_PORT_MAX_AUDIO_PROFILES];
+    uint32_t numProfiles = 0;
+    populateAudioProfiles(AUDIO_PROFILE_ATTRIBUTES, format, allChannelMasks, allSampleRates,
+            profiles, &numProfiles);
+    ASSERT_EQ(expected, createAudioProfilesAttrMap(profiles, 0, numProfiles));
+}
+
+TEST(PopulateAudioProfilesTest, AllValuesNotInAttributes) {
+    const AudioProfileAttributesMultimap expected = {
+            {AUDIO_FORMAT_PCM_16_BIT, {{48000},
+                                       {AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_7POINT1}}},
+            {AUDIO_FORMAT_PCM_16_BIT, {{96000},
+                                       {AUDIO_CHANNEL_OUT_STEREO}}},
+            {AUDIO_FORMAT_PCM_16_BIT, {{88200},
+                                       {AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO,
+                                        AUDIO_CHANNEL_OUT_7POINT1}}},
+            {AUDIO_FORMAT_PCM_16_BIT, {{48000, 88200, 96000},
+                                       {AUDIO_CHANNEL_OUT_MONO}}}
+    };
+    const audio_format_t format = AUDIO_FORMAT_PCM_16_BIT;
+    const SampleRateSet allSampleRates = {48000, 88200, 96000};
+    const ChannelMaskSet allChannelMasks =
+            {AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_7POINT1};
+
+    audio_profile profiles[AUDIO_PORT_MAX_AUDIO_PROFILES];
+    uint32_t numProfiles = 0;
+    populateAudioProfiles(AUDIO_PROFILE_ATTRIBUTES, format, allChannelMasks, allSampleRates,
+            profiles, &numProfiles);
+    ASSERT_EQ(expected, createAudioProfilesAttrMap(profiles, 0, numProfiles));
+}
+
+TEST(PopulateAudioProfilesTest, NoOverflow) {
+    const audio_format_t format = AUDIO_FORMAT_PCM_16_BIT;
+    const SampleRateSet allSampleRates = {48000, 88200, 96000};
+    const ChannelMaskSet allChannelMasks =
+            {AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_7POINT1};
+
+    audio_profile profiles[AUDIO_PORT_MAX_AUDIO_PROFILES];
+    const uint32_t expectedNumProfiles = 4;
+    for (uint32_t i = 0; i <= AUDIO_PORT_MAX_AUDIO_PROFILES; ++i) {
+        uint32_t numProfiles = 0;
+        populateAudioProfiles(AUDIO_PROFILE_ATTRIBUTES, format, allChannelMasks, allSampleRates,
+                              profiles, &numProfiles, i);
+        ASSERT_EQ(std::min(i, expectedNumProfiles), numProfiles);
+    }
+}
+
+} // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index b18e64b..e2233c7 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -836,6 +836,33 @@
     return OK;
 }
 
+status_t DeviceHalAidl::getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                        struct audio_port_v7 *mixPort) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    if (devicePort->type != AUDIO_PORT_TYPE_DEVICE) {
+        return BAD_VALUE;
+    }
+    if (mixPort->type != AUDIO_PORT_TYPE_MIX) {
+        return BAD_VALUE;
+    }
+    const int32_t aidlHandle = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_io_handle_t_int32_t(mixPort->ext.mix.handle));
+    auto it = findPortConfig(std::nullopt /*config*/, std::nullopt/*flags*/, aidlHandle);
+    if (it == mPortConfigs.end()) {
+        ALOGE("%s, cannot find mix port config for handle=%u", __func__, aidlHandle);
+        return BAD_VALUE;
+    }
+    AudioPort port;
+    if (status_t status = getAudioPort(it->second.portId, &port); status != NO_ERROR) {
+        return status;
+    }
+    const bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
+            mixPort->role, mixPort->type)) == ::aidl::android::AudioPortDirection::INPUT;
+    *mixPort = VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_AudioPort_audio_port_v7(
+            port, isInput));
+    return NO_ERROR;
+}
+
 status_t DeviceHalAidl::setAudioPortConfig(const struct audio_port_config* config) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
@@ -1010,7 +1037,7 @@
     return mModule->dump(fd, Args(args).args(), args.size());
 }
 
-int32_t DeviceHalAidl::supportsBluetoothVariableLatency(bool* supports) {
+status_t DeviceHalAidl::supportsBluetoothVariableLatency(bool* supports) {
     TIME_CHECK();
     if (!mModule) return NO_INIT;
     if (supports == nullptr) {
@@ -1194,10 +1221,15 @@
     TIME_CHECK();
     if (String8 key = String8(AudioParameter::keyReconfigA2dpSupported); keys.containsKey(key)) {
         keys.remove(key);
-        bool supports;
-        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-                        mBluetoothA2dp->supportsOffloadReconfiguration(&supports)));
-        result->addInt(key, supports ? 1 : 0);
+        if (mBluetoothA2dp != nullptr) {
+            bool supports;
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                            mBluetoothA2dp->supportsOffloadReconfiguration(&supports)));
+            result->addInt(key, supports ? 1 : 0);
+        } else {
+            ALOGI("%s: no IBluetoothA2dp on %s", __func__, mInstance.c_str());
+            result->addInt(key, 0);
+        }
     }
     return OK;
 }
@@ -1702,7 +1734,9 @@
 }
 
 DeviceHalAidl::PortConfigs::iterator DeviceHalAidl::findPortConfig(
-            const AudioConfig& config, const std::optional<AudioIoFlags>& flags, int32_t ioHandle) {
+            const std::optional<AudioConfig>& config,
+            const std::optional<AudioIoFlags>& flags,
+            int32_t ioHandle) {
     using Tag = AudioPortExt::Tag;
     return std::find_if(mPortConfigs.begin(), mPortConfigs.end(),
             [&](const auto& pair) {
@@ -1713,7 +1747,8 @@
                         "%s: stored mix port config is not fully specified: %s",
                         __func__, p.toString().c_str());
                 return p.ext.getTag() == Tag::mix &&
-                        isConfigEqualToPortConfig(config, p) &&
+                        (!config.has_value() ||
+                                isConfigEqualToPortConfig(config.value(), p)) &&
                         (!flags.has_value() || p.flags.value() == flags.value()) &&
                         p.ext.template get<Tag::mix>().handle == ioHandle; });
 }
@@ -1840,6 +1875,25 @@
     return OK;
 }
 
+status_t DeviceHalAidl::getAudioPort(int32_t portId, AudioPort* port) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mModule) {
+        return NO_INIT;
+    }
+    const status_t status = statusTFromBinderStatus(mModule->getAudioPort(portId, port));
+    if (status == OK) {
+        auto portIt = mPorts.find(portId);
+        if (portIt != mPorts.end()) {
+            portIt->second = *port;
+        } else {
+            ALOGW("%s, port(%d) returned successfully from the HAL but not it is not cached",
+                  __func__, portId);
+        }
+    }
+    return status;
+}
+
 void DeviceHalAidl::clearCallbacks(void* cookie) {
     std::lock_guard l(mLock);
     mCallbacks.erase(cookie);
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index 20cf88c..e1fe4d1 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -165,7 +165,7 @@
 
     error::Result<audio_hw_sync_t> getHwAvSync() override;
 
-    int32_t supportsBluetoothVariableLatency(bool* supports __unused) override;
+    status_t supportsBluetoothVariableLatency(bool* supports __unused) override;
 
     status_t getSoundDoseInterface(const std::string& module,
                                    ::ndk::SpAIBinder* soundDoseBinder) override;
@@ -176,6 +176,9 @@
 
     status_t setSimulateDeviceConnections(bool enabled) override;
 
+    status_t getAudioMixPort(const struct audio_port_v7* devicePort,
+                             struct audio_port_v7* mixPort) override;
+
     status_t dump(int __unused, const Vector<String16>& __unused) override;
 
   private:
@@ -260,7 +263,7 @@
     PortConfigs::iterator findPortConfig(
             const ::aidl::android::media::audio::common::AudioDevice& device);
     PortConfigs::iterator findPortConfig(
-            const ::aidl::android::media::audio::common::AudioConfig& config,
+            const std::optional<::aidl::android::media::audio::common::AudioConfig>& config,
             const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
             int32_t ioHandle);
     bool isPortHeldByAStream(int32_t portId);
@@ -280,6 +283,7 @@
     void resetUnusedPatchesAndPortConfigs();
     void resetUnusedPortConfigs();
     status_t updateRoutes();
+    status_t getAudioPort(int32_t portId, ::aidl::android::media::audio::common::AudioPort* port);
 
     // CallbackBroker implementation
     void clearCallbacks(void* cookie) override;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index f96d419..e8e1f46 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -304,7 +304,12 @@
                 }
                 HidlUtils::audioConfigToHal(suggestedConfig, config);
             });
-    return processReturn("openOutputStream", ret, retval);
+    const status_t status = processReturn("openOutputStream", ret, retval);
+    cleanupStreams();
+    if (status == NO_ERROR) {
+        mStreams.insert({handle, *outStream});
+    }
+    return status;
 }
 
 status_t DeviceHalHidl::openInputStream(
@@ -377,7 +382,12 @@
                 }
                 HidlUtils::audioConfigToHal(suggestedConfig, config);
             });
-    return processReturn("openInputStream", ret, retval);
+    const status_t status = processReturn("openInputStream", ret, retval);
+    cleanupStreams();
+    if (status == NO_ERROR) {
+        mStreams.insert({handle, *inStream});
+    }
+    return status;
 }
 
 status_t DeviceHalHidl::supportsAudioPatches(bool *supportsPatches) {
@@ -684,4 +694,148 @@
 }
 #endif
 
+status_t DeviceHalHidl::supportsBluetoothVariableLatency(bool* supports) {
+    if (supports == nullptr) {
+        return BAD_VALUE;
+    }
+    *supports = false;
+
+    String8 reply;
+    status_t status = getParameters(
+            String8(AUDIO_PARAMETER_BT_VARIABLE_LATENCY_SUPPORTED), &reply);
+    if (status != NO_ERROR) {
+        return status;
+    }
+    AudioParameter replyParams(reply);
+    String8 trueOrFalse;
+    status = replyParams.get(
+            String8(AUDIO_PARAMETER_BT_VARIABLE_LATENCY_SUPPORTED), trueOrFalse);
+    if (status != NO_ERROR) {
+        return status;
+    }
+    *supports = trueOrFalse == AudioParameter::valueTrue;
+    return NO_ERROR;
+}
+
+namespace {
+
+status_t getParametersFromStream(
+        sp<StreamHalInterface> stream,
+        const char* parameters,
+        const char* extraParameters,
+        String8* reply) {
+    String8 request(parameters);
+    if (extraParameters != nullptr) {
+        request.append(";");
+        request.append(extraParameters);
+    }
+    status_t status = stream->getParameters(request, reply);
+    if (status != NO_ERROR) {
+        ALOGW("%s, failed to query %s, status=%d", __func__, parameters, status);
+        return status;
+    }
+    AudioParameter repliedParameters(*reply);
+    status = repliedParameters.get(String8(parameters), *reply);
+    if (status != NO_ERROR) {
+        ALOGW("%s: failed to retrieve %s, bailing out", __func__, parameters);
+    }
+    return status;
+}
+
+} // namespace
+
+status_t DeviceHalHidl::getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                        struct audio_port_v7 *mixPort) {
+    // For HIDL HAL, querying mix port information is not supported. If the HAL supports
+    // `getAudioPort` API to query the device port attributes, use the structured audio profiles
+    // that have the same attributes reported by the `getParameters` API. Otherwise, only use
+    // the attributes reported by `getParameters` API.
+    struct audio_port_v7 temp = *devicePort;
+    AudioProfileAttributesMultimap attrsFromDevice;
+    status_t status = getAudioPort(&temp);
+    if (status == NO_ERROR) {
+        attrsFromDevice = createAudioProfilesAttrMap(temp.audio_profiles, 0 /*first*/,
+                                                     temp.num_audio_profiles);
+    }
+    auto streamIt = mStreams.find(mixPort->ext.mix.handle);
+    if (streamIt == mStreams.end()) {
+        return BAD_VALUE;
+    }
+    auto stream = streamIt->second.promote();
+    if (stream == nullptr) {
+        return BAD_VALUE;
+    }
+
+    String8 formatsStr;
+    status = getParametersFromStream(
+            stream, AudioParameter::keyStreamSupportedFormats, nullptr /*extraParameters*/,
+            &formatsStr);
+    if (status != NO_ERROR) {
+        return status;
+    }
+    FormatVector formats = formatsFromString(formatsStr.c_str());
+
+    mixPort->num_audio_profiles = 0;
+    for (audio_format_t format : formats) {
+        if (mixPort->num_audio_profiles >= AUDIO_PORT_MAX_AUDIO_PROFILES) {
+            ALOGW("%s, too many audio profiles", __func__);
+            break;
+        }
+        AudioParameter formatParameter;
+        formatParameter.addInt(String8(AudioParameter::keyFormat), format);
+
+        String8 samplingRatesStr;
+        status = getParametersFromStream(
+                stream, AudioParameter::keyStreamSupportedSamplingRates,
+                formatParameter.toString(), &samplingRatesStr);
+        if (status != NO_ERROR) {
+            // Failed to query supported sample rate for current format, may succeed with
+            // other formats.
+            ALOGW("Skip adding format=%#x, status=%d", format, status);
+            continue;
+        }
+        SampleRateSet sampleRatesFromStream = samplingRatesFromString(samplingRatesStr.c_str());
+        if (sampleRatesFromStream.empty()) {
+            ALOGW("Skip adding format=%#x as the returned sampling rates are empty", format);
+            continue;
+        }
+        String8 channelMasksStr;
+        status = getParametersFromStream(
+                stream, AudioParameter::keyStreamSupportedChannels,
+                formatParameter.toString(), &channelMasksStr);
+        if (status != NO_ERROR) {
+            // Failed to query supported channel masks for current format, may succeed with
+            // other formats.
+            ALOGW("Skip adding format=%#x, status=%d", format, status);
+            continue;
+        }
+        ChannelMaskSet channelMasksFromStream = channelMasksFromString(channelMasksStr.c_str());
+        if (channelMasksFromStream.empty()) {
+            ALOGW("Skip adding format=%#x as the returned channel masks are empty", format);
+            continue;
+        }
+
+        // For an audio format, all audio profiles from the device port with the same format will
+        // be added to mix port after filtering sample rates, channel masks according to the reply
+        // of getParameters API. If there is any sample rate or channel mask reported by
+        // getParameters API but not reported by the device, additional audio profiles will be
+        // added.
+        populateAudioProfiles(attrsFromDevice, format, channelMasksFromStream,
+                              sampleRatesFromStream, mixPort->audio_profiles,
+                              &mixPort->num_audio_profiles);
+    }
+
+    return NO_ERROR;
+}
+
+void DeviceHalHidl::cleanupStreams() {
+    for (auto it = mStreams.begin(); it != mStreams.end();) {
+        if (it->second.promote() == nullptr) {
+            it = mStreams.erase(it);
+        } else {
+            ++it;
+        }
+    }
+}
+
 } // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 989c1f5..7a712df 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -21,6 +21,7 @@
 #include PATH(android/hardware/audio/FILE_VERSION/IPrimaryDevice.h)
 #include <media/audiohal/DeviceHalInterface.h>
 #include <media/audiohal/EffectHalInterface.h>
+#include <media/audiohal/StreamHalInterface.h>
 
 #include "CoreConversionHelperHidl.h"
 
@@ -127,10 +128,7 @@
         return INVALID_OPERATION;
     }
 
-    int32_t supportsBluetoothVariableLatency(bool* supports __unused) override {
-        // TODO: Implement the HAL query when moving to AIDL HAL.
-        return INVALID_OPERATION;
-    }
+    status_t supportsBluetoothVariableLatency(bool* supports) override;
 
     status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
 
@@ -148,6 +146,9 @@
 
     status_t prepareToDisconnectExternalDevice(const struct audio_port_v7* port) override;
 
+    status_t getAudioMixPort(const struct audio_port_v7* devicePort,
+                             struct audio_port_v7* mixPort) override;
+
   private:
     friend class DevicesFactoryHalHidl;
     sp<::android::hardware::audio::CPP_VERSION::IDevice> mDevice;
@@ -157,12 +158,15 @@
     class SoundDoseWrapper;
     const std::unique_ptr<SoundDoseWrapper> mSoundDoseWrapper;
     std::set<audio_port_handle_t> mDeviceDisconnectionNotified;
+    std::map<audio_io_handle_t, wp<StreamHalInterface>> mStreams;
 
     // Can not be constructed directly by clients.
     explicit DeviceHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IDevice>& device);
     explicit DeviceHalHidl(
             const sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice>& device);
 
+    void cleanupStreams();
+
     // The destructor automatically closes the device.
     virtual ~DeviceHalHidl();
 
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index 87aaeac..7d807b2 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -243,7 +243,7 @@
                                 [&](const auto& desc) { return desc.common.id.uuid == uuid; });
     if (matchIt == list.end()) {
         ALOGE("%s UUID not found in HAL and proxy list %s", __func__, toString(uuid).c_str());
-        return BAD_VALUE;
+        return NAME_NOT_FOUND;
     }
     ALOGI("%s UUID impl found %s", __func__, toString(uuid).c_str());
 
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index a965709..bb5f851 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -142,7 +142,7 @@
     virtual int32_t getAAudioMixerBurstCount() = 0;
     virtual int32_t getAAudioHardwareBurstMinUsec() = 0;
 
-    virtual int32_t supportsBluetoothVariableLatency(bool* supports) = 0;
+    virtual status_t supportsBluetoothVariableLatency(bool* supports) = 0;
 
     // Update the connection status of an external device.
     virtual status_t setConnectedState(const struct audio_port_v7* port, bool connected) = 0;
@@ -160,6 +160,9 @@
 
     virtual status_t prepareToDisconnectExternalDevice(const struct audio_port_v7* port) = 0;
 
+    virtual status_t getAudioMixPort(const struct audio_port_v7* devicePort,
+                                     struct audio_port_v7* mixPort) = 0;
+
   protected:
     // Subclasses can not be constructed directly by clients.
     DeviceHalInterface() {}
diff --git a/media/libaudioprocessing/BufferProviders.cpp b/media/libaudioprocessing/BufferProviders.cpp
index 9f19f7b..fbc7f90 100644
--- a/media/libaudioprocessing/BufferProviders.cpp
+++ b/media/libaudioprocessing/BufferProviders.cpp
@@ -185,6 +185,8 @@
      mDownmixConfig.inputCfg.mask = EFFECT_CONFIG_SMP_RATE | EFFECT_CONFIG_CHANNELS |
              EFFECT_CONFIG_FORMAT | EFFECT_CONFIG_ACC_MODE;
      mDownmixConfig.outputCfg.mask = mDownmixConfig.inputCfg.mask;
+     mDownmixConfig.inputCfg.buffer.frameCount = bufferFrameCount;
+     mDownmixConfig.outputCfg.buffer.frameCount = bufferFrameCount;
 
      mInFrameSize =
              audio_bytes_per_sample(format) * audio_channel_count_from_out_mask(inputChannelMask);
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
index 1fed9a5..63cb48d 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
@@ -112,7 +112,7 @@
         DynamicsProcessing::EqBandConfig({.channel = std::numeric_limits<int>::max(),
                                           .band = std::numeric_limits<int>::max(),
                                           .enable = true,
-                                          .cutoffFrequencyHz = 20000,
+                                          .cutoffFrequencyHz = 20000.1,
                                           .gainDb = 200});
 
 static const Range::DynamicsProcessingRange kPreEqBandConfigRange = {
@@ -144,7 +144,7 @@
                         {.channel = std::numeric_limits<int>::max(),
                          .band = std::numeric_limits<int>::max(),
                          .enable = true,
-                         .cutoffFrequencyHz = 20000,
+                         .cutoffFrequencyHz = 20000.1,
                          .attackTimeMs = 60000,
                          .releaseTimeMs = 60000,
                          .ratio = 50,
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index 9d77135..57c873b 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -416,14 +416,25 @@
 template <typename T>
 bool DynamicsProcessingContext::validateBandConfig(const std::vector<T>& bands, int maxChannel,
                                                    int maxBand) {
-    std::vector<float> freqs(bands.size(), -1);
+    std::map<int, float> freqs;
     for (auto band : bands) {
-        if (!validateChannel(band.channel, maxChannel)) return false;
-        if (!validateBand(band.band, maxBand)) return false;
+        if (!validateChannel(band.channel, maxChannel)) {
+            LOG(ERROR) << __func__ << " " << band.toString() << " invalid, maxCh " << maxChannel;
+            return false;
+        }
+        if (!validateBand(band.band, maxBand)) {
+            LOG(ERROR) << __func__ << " " << band.toString() << " invalid, maxBand " << maxBand;
+            return false;
+        }
+        if (freqs.find(band.band) != freqs.end()) {
+            LOG(ERROR) << __func__ << " " << band.toString() << " found duplicate";
+            return false;
+        }
         freqs[band.band] = band.cutoffFrequencyHz;
     }
-    if (std::count(freqs.begin(), freqs.end(), -1)) return false;
-    return std::is_sorted(freqs.begin(), freqs.end());
+    return std::is_sorted(freqs.begin(), freqs.end(), [](const auto& a, const auto& b) {
+        return a.second <= b.second; //index is already sorted as map key
+    });
 }
 
 bool DynamicsProcessingContext::validateLimiterConfig(
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index 665ceee..ee41867 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -218,6 +218,24 @@
             <Feature name="adaptive-playback" />
             <Attribute name="software-codec" />
         </MediaCodec>
+        <MediaCodec name="c2.android.av1-dav1d.decoder" type="video/av01" variant="slow-cpu,!slow-cpu" rank="1024">
+            <Limit name="alignment" value="1x1" />
+            <Limit name="block-size" value="16x16" />
+            <Variant name="!slow-cpu">
+                <Limit name="size" min="2x2" max="2048x2048" />
+                <Limit name="block-count" range="1-8192" /> <!-- max 2048x1024 -->
+                <Limit name="blocks-per-second" range="1-245760" />
+                <Limit name="bitrate" range="1-40000000" />
+            </Variant>
+            <Variant name="slow-cpu">
+                <Limit name="size" min="2x2" max="1280x1280" />
+                <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
+                <Limit name="blocks-per-second" range="1-108000" />
+                <Limit name="bitrate" range="1-5000000" />
+            </Variant>
+            <Feature name="adaptive-playback" />
+            <Attribute name="software-codec" />
+        </MediaCodec>
         <MediaCodec name="c2.android.mpeg2.decoder" type="video/mpeg2" domain="tv">
             <Alias name="OMX.google.mpeg2.decoder" />
             <!-- profiles and levels:  ProfileMain : LevelHL -->
diff --git a/media/module/codecs/m4v_h263/dec/src/vop.cpp b/media/module/codecs/m4v_h263/dec/src/vop.cpp
index abc0861..2c937c3 100644
--- a/media/module/codecs/m4v_h263/dec/src/vop.cpp
+++ b/media/module/codecs/m4v_h263/dec/src/vop.cpp
@@ -136,6 +136,7 @@
                 case 0x05:
                 case 0x06:
                 case 0x08:
+                case 0x09:
                 case 0x10:
                 case 0x11:
                 case 0x12:
diff --git a/media/module/codecserviceregistrant/Android.bp b/media/module/codecserviceregistrant/Android.bp
index f3a1723..9ee81a4 100644
--- a/media/module/codecserviceregistrant/Android.bp
+++ b/media/module/codecserviceregistrant/Android.bp
@@ -6,34 +6,8 @@
     //   SPDX-license-identifier-Apache-2.0
 }
 
-cc_library {
-    name: "libmedia_codecserviceregistrant",
-    vendor_available: true,
-    min_sdk_version: "29",
-    apex_available: [
-        "//apex_available:platform",
-        "com.android.media.swcodec",
-    ],
-
-    srcs: [
-        "CodecServiceRegistrant.cpp",
-    ],
-
-    header_libs: [
-        "libmedia_headers",
-    ],
-
-    defaults: [
-        "libcodec2-hidl-defaults",
-    ],
-    shared_libs: [
-        "libbase",
-        "libcodec2_hidl@1.0",
-        "libcodec2_vndk",
-        "libhidlbase",
-        "libutils",
-    ],
-
+cc_defaults {
+    name: "libcodec2-runtime-libs",
     // Codecs
     runtime_libs: [
         "libcodec2_soft_avcdec",
@@ -61,6 +35,7 @@
         "libcodec2_soft_vp9dec",
         // "libcodec2_soft_av1dec_aom",  // replaced by the gav1 implementation
         "libcodec2_soft_av1dec_gav1",
+        "libcodec2_soft_av1dec_dav1d",
         "libcodec2_soft_av1enc",
         "libcodec2_soft_vp8enc",
         "libcodec2_soft_vp9enc",
@@ -70,3 +45,34 @@
         "libcodec2_soft_gsmdec",
     ],
 }
+
+cc_library {
+    name: "libmedia_codecserviceregistrant",
+    vendor_available: true,
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
+    srcs: [
+        "CodecServiceRegistrant.cpp",
+    ],
+
+    header_libs: [
+        "libmedia_headers",
+    ],
+
+    defaults: [
+        "libcodec2-hidl-defaults",
+        "libcodec2-runtime-libs",
+    ],
+    shared_libs: [
+        "libbase",
+        "libcodec2_hidl@1.0",
+        "libcodec2_vndk",
+        "libhidlbase",
+        "libutils",
+    ],
+
+}
diff --git a/media/mtp/tests/MtpFuzzer/MtpPacketFuzzerUtils.h b/media/mtp/tests/MtpFuzzer/MtpPacketFuzzerUtils.h
index 87fea9f..9be53a2 100644
--- a/media/mtp/tests/MtpFuzzer/MtpPacketFuzzerUtils.h
+++ b/media/mtp/tests/MtpFuzzer/MtpPacketFuzzerUtils.h
@@ -28,6 +28,7 @@
 constexpr size_t kMinSize = 0;
 constexpr size_t kMaxSize = 1000;
 constexpr size_t kMaxLength = 1000;
+constexpr size_t kMaxPathLength = 64;
 
 class MtpPacketFuzzerUtils {
   protected:
@@ -43,7 +44,7 @@
     };
 
     void fillFilePath(FuzzedDataProvider* fdp) {
-       mPath= fdp->ConsumeRandomLengthString(kMaxLength);
+       mPath= fdp->ConsumeRandomLengthString(kMaxPathLength);
     };
 
     void fillUsbDevFsUrb(FuzzedDataProvider* fdp) {
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index 2fb5728..b230df5 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -672,7 +672,7 @@
         if (out_size != NULL) {
             *out_size = abuf->capacity();
         }
-        return abuf->base();
+        return abuf->data();
     }
 
     android::Vector<android::sp<android::MediaCodecBuffer> > abufs;
@@ -689,7 +689,7 @@
         if (out_size != NULL) {
             *out_size = abufs[idx]->capacity();
         }
-        return abufs[idx]->base();
+        return abufs[idx]->data();
     }
     ALOGE("couldn't get input buffers");
     return NULL;
@@ -707,7 +707,7 @@
         if (out_size != NULL) {
             *out_size = abuf->capacity();
         }
-        return abuf->base();
+        return abuf->data();
     }
 
     android::Vector<android::sp<android::MediaCodecBuffer> > abufs;
@@ -720,7 +720,7 @@
         if (out_size != NULL) {
             *out_size = abufs[idx]->capacity();
         }
-        return abufs[idx]->base();
+        return abufs[idx]->data();
     }
     ALOGE("couldn't get output buffers");
     return NULL;
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 0d539c0..a277ad1 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -189,6 +189,7 @@
 BINDER_METHOD_ENTRY(supportsBluetoothVariableLatency) \
 BINDER_METHOD_ENTRY(getSoundDoseInterface) \
 BINDER_METHOD_ENTRY(getAudioPolicyConfig) \
+BINDER_METHOD_ENTRY(getAudioMixPort) \
 
 // singleton for Binder Method Statistics for IAudioFlinger
 static auto& getIAudioFlingerStatistics() {
@@ -4607,6 +4608,24 @@
     return mPatchPanel->listAudioPatches_l(num_patches, patches);
 }
 
+/**
+ * Get the attributes of the mix port when connecting to the given device port.
+ */
+status_t AudioFlinger::getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                       struct audio_port_v7 *mixPort) const {
+    if (status_t status = AudioValidator::validateAudioPort(*devicePort); status != NO_ERROR) {
+        ALOGE("%s, invalid device port, status=%d", __func__, status);
+        return status;
+    }
+    if (status_t status = AudioValidator::validateAudioPort(*mixPort); status != NO_ERROR) {
+        ALOGE("%s, invalid mix port, status=%d", __func__, status);
+        return status;
+    }
+
+    audio_utils::lock_guard _l(mutex());
+    return mPatchPanel->getAudioMixPort_l(devicePort, mixPort);
+}
+
 // ----------------------------------------------------------------------------
 
 status_t AudioFlinger::onTransactWrapper(TransactionCode code,
@@ -4640,6 +4659,7 @@
         case TransactionCode::GET_SUPPORTED_LATENCY_MODES:
         case TransactionCode::INVALIDATE_TRACKS:
         case TransactionCode::GET_AUDIO_POLICY_CONFIG:
+        case TransactionCode::GET_AUDIO_MIX_PORT:
             ALOGW("%s: transaction %d received from PID %d",
                   __func__, code, IPCThreadState::self()->getCallingPid());
             // return status only for non void methods
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 2c34144..e7f9255 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -255,6 +255,10 @@
     status_t getAudioPolicyConfig(media::AudioPolicyConfig* config) final
             EXCLUDES_AudioFlinger_Mutex;
 
+    // Get the attributes of the mix port when connecting to the given device port.
+    status_t getAudioMixPort(const struct audio_port_v7* devicePort,
+                             struct audio_port_v7* mixPort) const final EXCLUDES_AudioFlinger_Mutex;
+
     status_t onTransactWrapper(TransactionCode code, const Parcel& data, uint32_t flags,
             const std::function<status_t()>& delegate) final EXCLUDES_AudioFlinger_Mutex;
 
diff --git a/services/audioflinger/IAfPatchPanel.h b/services/audioflinger/IAfPatchPanel.h
index 5a6621e..6110e4c 100644
--- a/services/audioflinger/IAfPatchPanel.h
+++ b/services/audioflinger/IAfPatchPanel.h
@@ -302,6 +302,13 @@
 
     virtual void closeThreadInternal_l(const sp<IAfThreadBase>& thread) const
             REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
+
+    /**
+     * Get the attributes of the mix port when connecting to the given device port.
+     */
+    virtual status_t getAudioMixPort_l(
+            const struct audio_port_v7* devicePort,
+            struct audio_port_v7* mixPort) REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 };
 
 }  // namespace android
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 7d3900b..17591dd 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -447,6 +447,24 @@
     return status;
 }
 
+status_t PatchPanel::getAudioMixPort_l(const audio_port_v7 *devicePort,
+                                       audio_port_v7 *mixPort) {
+    if (devicePort->type != AUDIO_PORT_TYPE_DEVICE) {
+        ALOGE("%s the type of given device port is not DEVICE", __func__);
+        return INVALID_OPERATION;
+    }
+    if (mixPort->type != AUDIO_PORT_TYPE_MIX) {
+        ALOGE("%s the type of given mix port is not MIX", __func__);
+        return INVALID_OPERATION;
+    }
+    AudioHwDevice* hwDevice = findAudioHwDeviceByModule_l(devicePort->ext.device.hw_module);
+    if (hwDevice == nullptr) {
+        ALOGW("%s cannot find hw module %d", __func__, devicePort->ext.device.hw_module);
+        return BAD_VALUE;
+    }
+    return hwDevice->getAudioMixPort(devicePort, mixPort);
+}
+
 PatchPanel::Patch::~Patch()
 {
     ALOGE_IF(isSoftware(), "Software patch connections leaked %d %d",
diff --git a/services/audioflinger/PatchPanel.h b/services/audioflinger/PatchPanel.h
index 1ff8fff..b107eb0 100644
--- a/services/audioflinger/PatchPanel.h
+++ b/services/audioflinger/PatchPanel.h
@@ -73,6 +73,12 @@
     void closeThreadInternal_l(const sp<IAfThreadBase>& thread) const final
             REQUIRES(audio_utils::AudioFlinger_Mutex);
 
+    /**
+     * Get the attributes of the mix port when connecting to the given device port
+     */
+    status_t getAudioMixPort_l(const audio_port_v7* devicePort, audio_port_v7* mixPort) final
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
+
 private:
     AudioHwDevice* findAudioHwDeviceByModule_l(audio_module_handle_t module)
             REQUIRES(audio_utils::AudioFlinger_Mutex);
diff --git a/services/audioflinger/datapath/AudioHwDevice.cpp b/services/audioflinger/datapath/AudioHwDevice.cpp
index 9ff316c..67e9991 100644
--- a/services/audioflinger/datapath/AudioHwDevice.cpp
+++ b/services/audioflinger/datapath/AudioHwDevice.cpp
@@ -118,5 +118,10 @@
     return mHwDevice->getAAudioHardwareBurstMinUsec();
 }
 
+status_t AudioHwDevice::getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                        struct audio_port_v7 *mixPort) const {
+    return mHwDevice->getAudioMixPort(devicePort, mixPort);
+}
+
 
 }; // namespace android
diff --git a/services/audioflinger/datapath/AudioHwDevice.h b/services/audioflinger/datapath/AudioHwDevice.h
index f9cb80e..cfb6fbd 100644
--- a/services/audioflinger/datapath/AudioHwDevice.h
+++ b/services/audioflinger/datapath/AudioHwDevice.h
@@ -101,6 +101,9 @@
 
     [[nodiscard]] int32_t getAAudioHardwareBurstMinUsec() const;
 
+    [[nodiscard]] status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                           struct audio_port_v7 *mixPort) const;
+
 private:
     const audio_module_handle_t mHandle;
     const char * const          mModuleName;
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index da0df5f..d49a002 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -574,6 +574,10 @@
                                              media::DeviceConnectedState state) = 0;
 
     virtual status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) = 0;
+
+    // Get the attributes of the mix port when connecting to the given device port.
+    virtual status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                     struct audio_port_v7 *mixPort) = 0;
 };
 
     // These are the signatures of createAudioPolicyManager/destroyAudioPolicyManager
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index c489eed..f3a9518 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -63,13 +63,7 @@
         if (getRole() == AUDIO_PORT_ROLE_SINK && (flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
             maxActiveCount = 0;
         }
-        if (getRole() == AUDIO_PORT_ROLE_SOURCE) {
-            mMixerBehaviors.clear();
-            mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_DEFAULT);
-            if (mFlags.output & AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
-                mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_BIT_PERFECT);
-            }
-        }
+        refreshMixerBehaviors();
     }
 
     const MixerBehaviorSet& getMixerBehaviors() const {
@@ -222,6 +216,10 @@
 
     void toSupportedMixerAttributes(std::vector<audio_mixer_attributes_t>* mixerAttributes) const;
 
+    status_t readFromParcelable(const media::AudioPortFw& parcelable);
+
+    void importAudioPort(const audio_port_v7& port) override;
+
     // Number of streams currently opened for this profile.
     uint32_t     curOpenCount;
     // Number of streams currently active for this profile. This is not the number of active clients
@@ -229,6 +227,8 @@
     uint32_t     curActiveCount;
 
 private:
+    void refreshMixerBehaviors();
+
     DeviceVector mSupportedDevices; // supported devices: this input/output can be routed from/to
 
     MixerBehaviorSet mMixerBehaviors;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 4877166..475059c 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -322,7 +322,7 @@
     mOutput1(0), mOutput2(0), mDirectOpenCount(0),
     mDirectClientSession(AUDIO_SESSION_NONE)
 {
-    if (profile != NULL) {
+    if (profile != nullptr) {
         // By default, opening the output without immutable flags, the bit-perfect flags should be
         // applied when the apps explicitly request.
         mFlags = (audio_output_flags_t)(profile->getFlags() & (~AUDIO_OUTPUT_FLAG_BIT_PERFECT));
@@ -376,7 +376,10 @@
         supportedDevices.merge(mOutput2->supportedDevices());
         return supportedDevices;
     }
-    return mProfile->getSupportedDevices();
+    if (mProfile != nullptr) {
+        return mProfile->getSupportedDevices();
+    }
+    return DeviceVector();
 }
 
 bool SwAudioOutputDescriptor::supportsDevice(const sp<DeviceDescriptor> &device) const
@@ -407,9 +410,10 @@
     if (isDuplicated()) {
         return (mOutput1->devicesSupportEncodedFormats(deviceTypes)
                     || mOutput2->devicesSupportEncodedFormats(deviceTypes));
-    } else {
+    } else if (mProfile != nullptr) {
        return mProfile->devicesSupportEncodedFormats(deviceTypes);
     }
+    return false;
 }
 
 bool SwAudioOutputDescriptor::containsSingleDeviceSupportingEncodedFormats(
@@ -419,7 +423,10 @@
         return (mOutput1->containsSingleDeviceSupportingEncodedFormats(device) &&
                 mOutput2->containsSingleDeviceSupportingEncodedFormats(device));
     }
-    return mProfile->containsSingleDeviceSupportingEncodedFormats(device);
+    if (mProfile != nullptr) {
+        return mProfile->containsSingleDeviceSupportingEncodedFormats(device);
+    }
+    return false;
 }
 
 uint32_t SwAudioOutputDescriptor::latency()
@@ -578,6 +585,11 @@
                         "with the requested devices, all device types: %s",
                         __func__, dumpDeviceTypes(devices.types()).c_str());
 
+    if (mProfile == nullptr) {
+        ALOGE("%s : Cannot open descriptor without a profile ", __func__);
+        return INVALID_OPERATION;
+    }
+
     audio_config_t lHalConfig;
     if (halConfig == nullptr) {
         lHalConfig = AUDIO_CONFIG_INITIALIZER;
@@ -662,7 +674,7 @@
         }
         return NO_ERROR;
     }
-    if (!isActive()) {
+    if (mProfile != nullptr && !isActive()) {
         if (!mProfile->canStartNewIo()) {
             return INVALID_OPERATION;
         }
@@ -679,7 +691,7 @@
         return;
     }
 
-    if (!isActive()) {
+    if (mProfile != nullptr && !isActive()) {
         LOG_ALWAYS_FATAL_IF(mProfile->curActiveCount < 1,
                             "%s invalid profile active count %u",
                             __func__, mProfile->curActiveCount);
@@ -702,10 +714,11 @@
         }
 
         mClientInterface->closeOutput(mIoHandle);
-
-        LOG_ALWAYS_FATAL_IF(mProfile->curOpenCount < 1, "%s profile open count %u",
-                            __FUNCTION__, mProfile->curOpenCount);
-        mProfile->curOpenCount--;
+        if (mProfile != nullptr) {
+            LOG_ALWAYS_FATAL_IF(mProfile->curOpenCount < 1, "%s profile open count %u",
+                                __FUNCTION__, mProfile->curOpenCount);
+            mProfile->curOpenCount--;
+        }
         mIoHandle = AUDIO_IO_HANDLE_NONE;
     }
 }
@@ -740,7 +753,10 @@
         return std::max(mOutput1->getRecommendedMuteDurationMs(),
                 mOutput2->getRecommendedMuteDurationMs());
     }
-    return mProfile->recommendedMuteDurationMs;
+    if (mProfile != nullptr) {
+        return mProfile->recommendedMuteDurationMs;
+    }
+    return 0;
 }
 
 void SwAudioOutputDescriptor::setTracksInvalidatedStatusByStrategy(product_strategy_t strategy) {
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index 62e5bd4..514601c 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -225,8 +225,7 @@
 {
     bool added = false;
     for (const auto& device : devices) {
-        ALOG_ASSERT(device != nullptr, "Null pointer found when adding DeviceVector");
-        if (indexOf(device) < 0 && SortedVector::add(device) >= 0) {
+        if (device && indexOf(device) < 0 && SortedVector::add(device) >= 0) {
             added = true;
         }
     }
@@ -238,7 +237,10 @@
 
 ssize_t DeviceVector::add(const sp<DeviceDescriptor>& item)
 {
-    ALOG_ASSERT(item != nullptr, "Adding null pointer to DeviceVector");
+    if (!item) {
+        ALOGW("DeviceVector::%s() null device", __func__);
+        return -1;
+    }
     ssize_t ret = indexOf(item);
 
     if (ret < 0) {
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index 03ab3f8..dd222de 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -171,6 +171,49 @@
     }
 }
 
+void IOProfile::refreshMixerBehaviors() {
+    if (getRole() == AUDIO_PORT_ROLE_SOURCE) {
+        mMixerBehaviors.clear();
+        mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_DEFAULT);
+        if (mFlags.output & AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+            mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_BIT_PERFECT);
+        }
+    }
+}
+
+status_t IOProfile::readFromParcelable(const media::AudioPortFw &parcelable) {
+    status_t status = AudioPort::readFromParcelable(parcelable);
+    if (status == OK) {
+        refreshMixerBehaviors();
+    }
+    return status;
+}
+
+void IOProfile::importAudioPort(const audio_port_v7 &port) {
+    if (mProfiles.hasDynamicFormat()) {
+        std::set<audio_format_t> formats;
+        for (size_t i = 0; i < port.num_audio_profiles; ++i) {
+            formats.insert(port.audio_profiles[i].format);
+        }
+        addProfilesForFormats(mProfiles, FormatVector(formats.begin(), formats.end()));
+    }
+    for (audio_format_t format : mProfiles.getSupportedFormats()) {
+        for (size_t i = 0; i < port.num_audio_profiles; ++i) {
+            if (port.audio_profiles[i].format == format) {
+                ChannelMaskSet channelMasks(port.audio_profiles[i].channel_masks,
+                        port.audio_profiles[i].channel_masks +
+                                port.audio_profiles[i].num_channel_masks);
+                SampleRateSet sampleRates(port.audio_profiles[i].sample_rates,
+                        port.audio_profiles[i].sample_rates +
+                                port.audio_profiles[i].num_sample_rates);
+                addDynamicAudioProfileAndSort(
+                        mProfiles, sp<AudioProfile>::make(
+                                format, channelMasks, sampleRates));
+            }
+        }
+    }
+}
+
 void IOProfile::dump(String8 *dst, int spaces) const
 {
     String8 extraInfo;
@@ -195,6 +238,10 @@
             spaces - 2, "", maxActiveCount, curActiveCount);
     dst->appendFormat("%*s- recommendedMuteDurationMs: %u ms\n",
             spaces - 2, "", recommendedMuteDurationMs);
+    if (hasDynamicAudioProfile() && !mMixerBehaviors.empty()) {
+        dst->appendFormat("%*s- mixerBehaviors: %s\n",
+                spaces - 2, "", dumpMixerBehaviors(mMixerBehaviors).c_str());
+    }
 }
 
 void IOProfile::log()
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index 8793085..58fcb5c 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -661,7 +661,9 @@
 }
 
 AudioPolicyManagerFuzzerDPPlaybackReRouting::~AudioPolicyManagerFuzzerDPPlaybackReRouting() {
-    mManager->stopInput(mPortId);
+    if (mManager) {
+        mManager->stopInput(mPortId);
+    }
 }
 
 bool AudioPolicyManagerFuzzerDPPlaybackReRouting::initialize() {
@@ -773,7 +775,9 @@
 }
 
 AudioPolicyManagerFuzzerDPMixRecordInjection::~AudioPolicyManagerFuzzerDPMixRecordInjection() {
-    mManager->stopOutput(mPortId);
+    if (mManager) {
+        mManager->stopOutput(mPortId);
+    }
 }
 
 bool AudioPolicyManagerFuzzerDPMixRecordInjection::initialize() {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index cc32aec..e70af52 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1736,7 +1736,8 @@
     // Compressed formats for MSD module, ordered from most preferred to least preferred.
     static const std::vector<audio_format_t> formatsOrder = {{
             AUDIO_FORMAT_IEC60958, AUDIO_FORMAT_MAT_2_1, AUDIO_FORMAT_MAT_2_0, AUDIO_FORMAT_E_AC3,
-            AUDIO_FORMAT_AC3, AUDIO_FORMAT_PCM_16_BIT }};
+            AUDIO_FORMAT_AC3, AUDIO_FORMAT_PCM_FLOAT, AUDIO_FORMAT_PCM_32_BIT,
+            AUDIO_FORMAT_PCM_8_24_BIT, AUDIO_FORMAT_PCM_24_BIT_PACKED, AUDIO_FORMAT_PCM_16_BIT }};
     static const std::vector<audio_channel_mask_t> channelMasksOrder = [](){
         // Channel position masks for MSD module, 3D > 2D > 1D ordering (most preferred to least
         // preferred).
@@ -6519,7 +6520,7 @@
                     mpClientInterface->setParameters(input, String8(param));
                     free(param);
                 }
-                updateAudioProfiles(device, input, profile->getAudioProfiles());
+                updateAudioProfiles(device, input, profile);
                 if (!profile->hasValidAudioProfile()) {
                     ALOGW("checkInputsForDevice() direct input missing param");
                     desc->close();
@@ -8049,77 +8050,54 @@
 
 void AudioPolicyManager::updateAudioProfiles(const sp<DeviceDescriptor>& devDesc,
                                              audio_io_handle_t ioHandle,
-                                             AudioProfileVector &profiles)
-{
-    String8 reply;
-    audio_devices_t device = devDesc->type();
-
-    // Format MUST be checked first to update the list of AudioProfile
-    if (profiles.hasDynamicFormat()) {
-        reply = mpClientInterface->getParameters(
-                ioHandle, String8(AudioParameter::keyStreamSupportedFormats));
-        ALOGV("%s: supported formats %d, %s", __FUNCTION__, ioHandle, reply.c_str());
-        AudioParameter repliedParameters(reply);
-        FormatVector formats;
-        if (repliedParameters.get(
-                String8(AudioParameter::keyStreamSupportedFormats), reply) == NO_ERROR) {
-            formats = formatsFromString(reply.c_str());
-        } else if (devDesc->hasValidAudioProfile()) {
-            ALOGD("%s: using the device profiles", __func__);
-            formats = devDesc->getAudioProfiles().getSupportedFormats();
-        } else {
-            ALOGE("%s: failed to retrieve format, bailing out", __func__);
-            return;
-        }
-        mReportedFormatsMap[devDesc] = formats;
-        if (device == AUDIO_DEVICE_OUT_HDMI
-                || isDeviceOfModule(devDesc, AUDIO_HARDWARE_MODULE_ID_MSD)) {
-            modifySurroundFormats(devDesc, &formats);
-        }
-        addProfilesForFormats(profiles, formats);
+                                             const sp<IOProfile>& profile) {
+    if (!profile->hasDynamicAudioProfile()) {
+        return;
     }
 
-    for (audio_format_t format : profiles.getSupportedFormats()) {
-        std::optional<ChannelMaskSet> channelMasks;
-        SampleRateSet samplingRates;
-        AudioParameter requestedParameters;
-        requestedParameters.addInt(String8(AudioParameter::keyFormat), format);
+    audio_port_v7 devicePort;
+    devDesc->toAudioPort(&devicePort);
 
-        if (profiles.hasDynamicRateFor(format)) {
-            reply = mpClientInterface->getParameters(
-                    ioHandle,
-                    requestedParameters.toString() + ";" +
-                    AudioParameter::keyStreamSupportedSamplingRates);
-            ALOGV("%s: supported sampling rates %s", __FUNCTION__, reply.c_str());
-            AudioParameter repliedParameters(reply);
-            if (repliedParameters.get(
-                    String8(AudioParameter::keyStreamSupportedSamplingRates), reply) == NO_ERROR) {
-                samplingRates = samplingRatesFromString(reply.c_str());
-            } else {
-                samplingRates = devDesc->getAudioProfiles().getSampleRatesFor(format);
-            }
-        }
-        if (profiles.hasDynamicChannelsFor(format)) {
-            reply = mpClientInterface->getParameters(ioHandle,
-                                                     requestedParameters.toString() + ";" +
-                                                     AudioParameter::keyStreamSupportedChannels);
-            ALOGV("%s: supported channel masks %s", __FUNCTION__, reply.c_str());
-            AudioParameter repliedParameters(reply);
-            if (repliedParameters.get(
-                    String8(AudioParameter::keyStreamSupportedChannels), reply) == NO_ERROR) {
-                channelMasks = channelMasksFromString(reply.c_str());
-            } else {
-                channelMasks = devDesc->getAudioProfiles().getChannelMasksFor(format);
-            }
-            if (channelMasks.has_value() && (device == AUDIO_DEVICE_OUT_HDMI
-                    || isDeviceOfModule(devDesc, AUDIO_HARDWARE_MODULE_ID_MSD))) {
-                modifySurroundChannelMasks(&channelMasks.value());
-            }
-        }
-        addDynamicAudioProfileAndSort(
-                profiles, new AudioProfile(
-                        format, channelMasks.value_or(ChannelMaskSet()), samplingRates));
+    audio_port_v7 mixPort;
+    profile->toAudioPort(&mixPort);
+    mixPort.ext.mix.handle = ioHandle;
+
+    status_t status = mpClientInterface->getAudioMixPort(&devicePort, &mixPort);
+    if (status != NO_ERROR) {
+        ALOGE("%s failed to query the attributes of the mix port", __func__);
+        return;
     }
+
+    std::set<audio_format_t> supportedFormats;
+    for (size_t i = 0; i < mixPort.num_audio_profiles; ++i) {
+        supportedFormats.insert(mixPort.audio_profiles[i].format);
+    }
+    FormatVector formats(supportedFormats.begin(), supportedFormats.end());
+    mReportedFormatsMap[devDesc] = formats;
+
+    if (devDesc->type() == AUDIO_DEVICE_OUT_HDMI ||
+        isDeviceOfModule(devDesc,AUDIO_HARDWARE_MODULE_ID_MSD)) {
+        modifySurroundFormats(devDesc, &formats);
+        size_t modifiedNumProfiles = 0;
+        for (size_t i = 0; i < mixPort.num_audio_profiles; ++i) {
+            if (std::find(formats.begin(), formats.end(), mixPort.audio_profiles[i].format) ==
+                formats.end()) {
+                // Skip the format that is not present after modifying surround formats.
+                continue;
+            }
+            memcpy(&mixPort.audio_profiles[modifiedNumProfiles], &mixPort.audio_profiles[i],
+                   sizeof(struct audio_profile));
+            ChannelMaskSet channels(mixPort.audio_profiles[modifiedNumProfiles].channel_masks,
+                    mixPort.audio_profiles[modifiedNumProfiles].channel_masks +
+                            mixPort.audio_profiles[modifiedNumProfiles].num_channel_masks);
+            modifySurroundChannelMasks(&channels);
+            std::copy(channels.begin(), channels.end(),
+                      std::begin(mixPort.audio_profiles[modifiedNumProfiles].channel_masks));
+            mixPort.audio_profiles[modifiedNumProfiles++].num_channel_masks = channels.size();
+        }
+        mixPort.num_audio_profiles = modifiedNumProfiles;
+    }
+    profile->importAudioPort(mixPort);
 }
 
 status_t AudioPolicyManager::installPatch(const char *caller,
@@ -8246,7 +8224,7 @@
         mpClientInterface->setParameters(output, String8(param));
         free(param);
     }
-    updateAudioProfiles(device, output, profile->getAudioProfiles());
+    updateAudioProfiles(device, output, profile);
     if (!profile->hasValidAudioProfile()) {
         ALOGW("%s() missing param", __func__);
         desc->close();
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 88bafef..440fd01 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -1040,9 +1040,9 @@
         void modifySurroundFormats(const sp<DeviceDescriptor>& devDesc, FormatVector *formatsPtr);
         void modifySurroundChannelMasks(ChannelMaskSet *channelMasksPtr);
 
-        // If any, resolve any "dynamic" fields of an Audio Profiles collection
+        // If any, resolve any "dynamic" fields of the Audio Profiles collection of and IOProfile
         void updateAudioProfiles(const sp<DeviceDescriptor>& devDesc, audio_io_handle_t ioHandle,
-                AudioProfileVector &profiles);
+                const sp<IOProfile> &profiles);
 
         // Notify the policy client to prepare for disconnecting external device.
         void prepareToDisconnectExternalDevice(const sp<DeviceDescriptor> &device);
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 2874824..7584632 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -340,4 +340,14 @@
     return af->invalidateTracks(portIds);
 }
 
+status_t AudioPolicyService::AudioPolicyClient::getAudioMixPort(
+        const struct audio_port_v7 *devicePort,
+        struct audio_port_v7 *port) {
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (af == 0) {
+        return PERMISSION_DENIED;
+    }
+    return af->getAudioMixPort(devicePort, port);
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index d0cde64..8d5628f 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -856,6 +856,9 @@
 
         status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) override;
 
+        status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                 struct audio_port_v7 *port) override;
+
      private:
         AudioPolicyService *mAudioPolicyService;
     };
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index 3629c16..7ef0266 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -201,6 +201,26 @@
         return mAudioParameters.toString();
     }
 
+    status_t getAudioMixPort(const struct audio_port_v7 *devicePort __unused,
+                             struct audio_port_v7 *mixPort) override {
+        mixPort->num_audio_profiles = 0;
+        for (auto format : mSupportedFormats) {
+            const int i = mixPort->num_audio_profiles;
+            mixPort->audio_profiles[i].format = format;
+            mixPort->audio_profiles[i].num_sample_rates = 1;
+            mixPort->audio_profiles[i].sample_rates[0] = 48000;
+            mixPort->audio_profiles[i].num_channel_masks = 0;
+            for (const auto& cm : mSupportedChannelMasks) {
+                if (audio_channel_mask_is_valid(cm)) {
+                    mixPort->audio_profiles[i].channel_masks[
+                            mixPort->audio_profiles[i].num_channel_masks++] = cm;
+                }
+            }
+            mixPort->num_audio_profiles++;
+        }
+        return NO_ERROR;
+    }
+
     void addSupportedFormat(audio_format_t format) {
         mSupportedFormats.insert(format);
     }
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index 2ae0e97..e55e935 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -106,6 +106,10 @@
     status_t invalidateTracks(const std::vector<audio_port_handle_t>& /*portIds*/) override {
         return NO_INIT;
     }
+    status_t getAudioMixPort(const struct audio_port_v7 *devicePort __unused,
+                             struct audio_port_v7 *mixPort __unused) override {
+        return INVALID_OPERATION;
+    }
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 5e58dbb..378255d 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -1423,6 +1423,7 @@
     ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTest::SetUp());
     mClient->addSupportedFormat(AUDIO_FORMAT_AC3);
     mClient->addSupportedFormat(AUDIO_FORMAT_E_AC3);
+    mClient->addSupportedChannelMask(AUDIO_CHANNEL_OUT_STEREO);
     mManager->setDeviceConnectionState(
             AUDIO_DEVICE_OUT_HDMI, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
             "" /*address*/, "" /*name*/, AUDIO_FORMAT_DEFAULT);
@@ -1548,13 +1549,13 @@
     mManager->setForceUse(
             AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL);
 
-    ASSERT_EQ(NO_ERROR, mManager->setSurroundFormatEnabled(GetParam(), false /*enabled*/));
-    auto formats = getFormatsFromPorts();
-    ASSERT_EQ(0, formats.count(GetParam()));
-
     ASSERT_EQ(NO_ERROR, mManager->setSurroundFormatEnabled(GetParam(), true /*enabled*/));
-    formats = getFormatsFromPorts();
+    auto formats = getFormatsFromPorts();
     ASSERT_EQ(1, formats.count(GetParam()));
+
+    ASSERT_EQ(NO_ERROR, mManager->setSurroundFormatEnabled(GetParam(), false /*enabled*/));
+    formats = getFormatsFromPorts();
+    ASSERT_EQ(0, formats.count(GetParam()));
 }
 
 TEST_P(AudioPolicyManagerTestForHdmi,