Merge "libaudiohal@aidl: Work around use of unsupported mix port configurations" into main
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
index 3f96cb3..10e4b79 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -42,6 +42,8 @@
 
 constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
 
+constexpr uint32_t kOutputDelay = 4;
+
 class C2SoftDav1dDec::IntfImpl : public SimpleInterface<void>::BaseParams {
   public:
     explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
@@ -239,6 +241,13 @@
                              .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
                              .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
                              .build());
+
+        addParameter(
+                DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
+                .withDefault(new C2PortActualDelayTuning::output(kOutputDelay))
+                .withFields({C2F(mActualOutputDelay, value).inRange(0, kOutputDelay)})
+                .withSetter(Setter<decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
+                .build());
     }
 
     static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
@@ -450,13 +459,6 @@
     if (mDav1dCtx) {
         Dav1dPicture p;
 
-        while (mDecodedPictures.size() > 0) {
-            p = mDecodedPictures.front();
-            mDecodedPictures.pop_front();
-
-            dav1d_picture_unref(&p);
-        }
-
         int res = 0;
         while (true) {
             memset(&p, 0, sizeof(p));
@@ -527,6 +529,8 @@
             android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
     if (numThreads > 0) lib_settings.n_threads = numThreads;
 
+    lib_settings.max_frame_delay = kOutputDelay;
+
     int res = 0;
     if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
         ALOGE("dav1d_open failed. status: %d.", res);
@@ -540,15 +544,6 @@
 
 void C2SoftDav1dDec::destroyDecoder() {
     if (mDav1dCtx) {
-        Dav1dPicture p;
-        while (mDecodedPictures.size() > 0) {
-            memset(&p, 0, sizeof(p));
-            p = mDecodedPictures.front();
-            mDecodedPictures.pop_front();
-
-            dav1d_picture_unref(&p);
-        }
-
         dav1d_close(&mDav1dCtx);
         mDav1dCtx = nullptr;
         mOutputBufferIndex = 0;
@@ -572,19 +567,24 @@
 }
 
 void C2SoftDav1dDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
-                                const std::shared_ptr<C2GraphicBlock>& block) {
+                                const std::shared_ptr<C2GraphicBlock>& block,
+                                const Dav1dPicture &img) {
     std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
     {
         IntfImpl::Lock lock = mIntf->lock();
         buffer->setInfo(mIntf->getColorAspects_l());
     }
-    auto fillWork = [buffer, index](const std::unique_ptr<C2Work>& work) {
+
+    auto fillWork = [buffer, index, img, this](const std::unique_ptr<C2Work>& work) {
         uint32_t flags = 0;
         if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
             (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
             flags |= C2FrameData::FLAG_END_OF_STREAM;
             ALOGV("signalling end_of_stream.");
         }
+        getHDRStaticParams(&img, work);
+        getHDR10PlusInfoData(&img, work);
+
         work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
         work->worklets.front()->output.buffers.clear();
         work->worklets.front()->output.buffers.push_back(buffer);
@@ -598,10 +598,6 @@
     }
 }
 
-static void freeCallback(const uint8_t */*data*/, void */*cookie*/) {
-    return;
-}
-
 void C2SoftDav1dDec::process(const std::unique_ptr<C2Work>& work,
                              const std::shared_ptr<C2BlockPool>& pool) {
     work->result = C2_OK;
@@ -654,17 +650,40 @@
                       seq.max_height, (long)in_frameIndex);
             }
 
+            // insert OBU TD if it is not present.
+            // TODO: b/286852962
+            uint8_t obu_type = (bitstream[0] >> 3) & 0xf;
             Dav1dData data;
 
-            res = dav1d_data_wrap(&data, bitstream, inSize, freeCallback, nullptr);
-            if (res != 0) {
-                ALOGE("Decoder wrap error %s!", strerror(DAV1D_ERR(res)));
+            uint8_t* ptr = (obu_type == DAV1D_OBU_TD) ? dav1d_data_create(&data, inSize)
+                                                      : dav1d_data_create(&data, inSize + 2);
+            if (ptr == nullptr) {
+                ALOGE("dav1d_data_create failed!");
                 i_ret = -1;
+
             } else {
                 data.m.timestamp = in_frameIndex;
-                // ALOGV("inSize=%ld, in_frameIndex=%ld, timestamp=%ld",
-                //       inSize, frameIndex, data.m.timestamp);
 
+                int new_Size;
+                if (obu_type != DAV1D_OBU_TD) {
+                    new_Size = (int)(inSize + 2);
+
+                    // OBU TD
+                    ptr[0] = 0x12;
+                    ptr[1] = 0;
+
+                    memcpy(ptr + 2, bitstream, inSize);
+                } else {
+                    new_Size = (int)(inSize);
+                    // TODO: b/277797541 - investigate how to wrap this pointer in Dav1dData to
+                    // avoid memcopy operations.
+                    memcpy(ptr, bitstream, new_Size);
+                }
+
+                // ALOGV("memcpy(ptr,bitstream,inSize=%ld,new_Size=%d,in_frameIndex=%ld,timestamp=%ld,"
+                //       "ptr[0,1,2,3,4]=%x,%x,%x,%x,%x)",
+                //       inSize, new_Size, frameIndex, data.m.timestamp, ptr[0], ptr[1], ptr[2],
+                //       ptr[3], ptr[4]);
 
                 // Dump the bitstream data (inputBuffer) if dumping is enabled.
 #ifdef FILE_DUMP_ENABLE
@@ -672,6 +691,7 @@
 #endif
 
                 bool b_draining = false;
+                int res;
 
                 do {
                     res = dav1d_send_data(mDav1dCtx, &data);
@@ -685,39 +705,9 @@
                         break;
                     }
 
-                    bool b_output_error = false;
+                    outputBuffer(pool, work);
 
-                    do {
-                        Dav1dPicture img;
-                        memset(&img, 0, sizeof(img));
-
-                        res = dav1d_get_picture(mDav1dCtx, &img);
-                        if (res == 0) {
-                            mDecodedPictures.push_back(img);
-
-                            if (!end_of_stream) break;
-                        } else if (res == DAV1D_ERR(EAGAIN)) {
-                            /* the decoder needs more data to be able to output something.
-                             * if there is more data pending, continue the loop below or
-                             * otherwise break */
-                            if (data.sz != 0) res = 0;
-                            break;
-                        } else {
-                            ALOGE("warning! Decoder error %d!", res);
-                            b_output_error = true;
-                            break;
-                        }
-                    } while (res == 0);
-
-                    if (b_output_error) break;
-
-                    /* on drain, we must ignore the 1st EAGAIN */
-                    if (!b_draining && (res == DAV1D_ERR(EAGAIN) || res == 0) &&
-                        (end_of_stream)) {
-                        b_draining = true;
-                        res = 0;
-                    }
-                } while (res == 0 && ((data.sz != 0) || b_draining));
+                } while (res == DAV1D_ERR(EAGAIN));
 
                 if (data.sz > 0) {
                     ALOGE("unexpected data.sz=%zu after dav1d_send_data", data.sz);
@@ -739,8 +729,6 @@
         }
     }
 
-    (void)outputBuffer(pool, work);
-
     if (end_of_stream) {
         drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
         mSignalledOutputEos = true;
@@ -749,7 +737,7 @@
     }
 }
 
-void C2SoftDav1dDec::getHDRStaticParams(Dav1dPicture* picture,
+void C2SoftDav1dDec::getHDRStaticParams(const Dav1dPicture* picture,
                                         const std::unique_ptr<C2Work>& work) {
     C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
     bool infoPresent = false;
@@ -813,7 +801,7 @@
     }
 }
 
-void C2SoftDav1dDec::getHDR10PlusInfoData(Dav1dPicture* picture,
+void C2SoftDav1dDec::getHDR10PlusInfoData(const Dav1dPicture* picture,
                                           const std::unique_ptr<C2Work>& work) {
     if (picture != nullptr) {
         if (picture->itut_t35 != nullptr) {
@@ -853,7 +841,7 @@
     }
 }
 
-void C2SoftDav1dDec::getVuiParams(Dav1dPicture* picture) {
+void C2SoftDav1dDec::getVuiParams(const Dav1dPicture* picture) {
     VuiColorAspects vuiColorAspects;
 
     if (picture) {
@@ -924,26 +912,11 @@
     memset(&img, 0, sizeof(img));
 
     int res = 0;
-    if (mDecodedPictures.size() > 0) {
-        img = mDecodedPictures.front();
-        mDecodedPictures.pop_front();
-        // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from the deque for
-        // outputBuffer.",img.m.timestamp,img.m.timestamp);
-    } else {
-        res = dav1d_get_picture(mDav1dCtx, &img);
-        if (res == 0) {
-            // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from dav1d for
-            // outputBuffer.",img.m.timestamp,img.m.timestamp);
-        } else {
-            ALOGE("failed to get a picture from dav1d for outputBuffer.");
-        }
-    }
-
+    res = dav1d_get_picture(mDav1dCtx, &img);
     if (res == DAV1D_ERR(EAGAIN)) {
-        ALOGD("Not enough data to output a picture.");
+        ALOGV("Not enough data to output a picture.");
         return false;
-    }
-    if (res != 0) {
+    } else if (res != 0) {
         ALOGE("The AV1 decoder failed to get a picture (res=%s).", strerror(DAV1D_ERR(res)));
         return false;
     }
@@ -969,8 +942,6 @@
     }
 
     getVuiParams(&img);
-    getHDRStaticParams(&img, work);
-    getHDR10PlusInfoData(&img, work);
 
     // out_frameIndex that the decoded picture returns from dav1d.
     int64_t out_frameIndex = img.m.timestamp;
@@ -1156,9 +1127,8 @@
                              convFormat);
     }
 
+    finishWork(out_frameIndex, work, std::move(block), img);
     dav1d_picture_unref(&img);
-
-    finishWork(out_frameIndex, work, std::move(block));
     block = nullptr;
     return true;
 }
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.h b/media/codec2/components/dav1d/C2SoftDav1dDec.h
index e3d2a93..5d2a725 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.h
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.h
@@ -58,7 +58,6 @@
     int mOutputBufferIndex = 0;
 
     Dav1dContext* mDav1dCtx = nullptr;
-    std::deque<Dav1dPicture> mDecodedPictures;
 
     // configurations used by component in process
     // (TODO: keep this in intf but make them internal only)
@@ -101,12 +100,13 @@
     nsecs_t mTimeEnd = 0;    // Time at the end of decode()
 
     bool initDecoder();
-    void getHDRStaticParams(Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
-    void getHDR10PlusInfoData(Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
-    void getVuiParams(Dav1dPicture* picture);
+    void getHDRStaticParams(const Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+    void getHDR10PlusInfoData(const Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+    void getVuiParams(const Dav1dPicture* picture);
     void destroyDecoder();
     void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
-                    const std::shared_ptr<C2GraphicBlock>& block);
+                    const std::shared_ptr<C2GraphicBlock>& block,
+                    const Dav1dPicture &img);
     // Sets |work->result| and mSignalledError. Returns false.
     void setError(const std::unique_ptr<C2Work>& work, c2_status_t error);
     bool allocTmpFrameBuffer(size_t size);
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 9a3399d..785cdf2 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -160,6 +160,10 @@
     kParamIndexSecureMode,
     kParamIndexEncryptedBuffer, // info-buffer, used with SM_READ_PROTECTED_WITH_ENCRYPTED
 
+    /* multiple access unit support */
+    kParamIndexLargeFrame,
+    kParamIndexAccessUnitInfos, // struct
+
     // deprecated
     kParamIndexDelayRequest = kParamIndexDelay | C2Param::CoreIndex::IS_REQUEST_FLAG,
 
@@ -1114,6 +1118,36 @@
 constexpr char C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE[] = "input.buffers.max-size";
 constexpr char C2_PARAMKEY_OUTPUT_MAX_BUFFER_SIZE[] = "output.buffers.max-size";
 
+/**
+ * Large frame struct
+ *
+ * This structure describes the size limits for large frames (frames with multiple
+ * access units.)
+ */
+struct C2LargeFrameStruct {
+    uint32_t maxSize;         ///< maximum size of the buffer in bytes
+    uint32_t thresholdSize;   ///< size threshold for the buffer in bytes. The buffer is considered
+                              ///< full as soon as its size reaches or surpasses this limit.
+    C2LargeFrameStruct()
+        : maxSize(0),
+          thresholdSize(0) {}
+
+    C2LargeFrameStruct(uint32_t maxSize_, uint32_t thresholdSize_)
+        : maxSize(maxSize_), thresholdSize(thresholdSize_) {}
+
+    DEFINE_AND_DESCRIBE_C2STRUCT(LargeFrame)
+    C2FIELD(maxSize, "max-size")
+    C2FIELD(thresholdSize, "threshold-size")
+};
+
+/**
+ * This tuning controls the size limits for large output frames for the component.
+ * The default value for this tuning is platform specific.
+ */
+typedef C2StreamParam<C2Tuning, C2LargeFrameStruct, kParamIndexLargeFrame>
+        C2LargeFrame;
+constexpr char C2_PARAMKEY_OUTPUT_LARGE_FRAME[] = "output.large-frame";
+
 /* ---------------------------------------- misc. state ---------------------------------------- */
 
 /**
@@ -2146,6 +2180,49 @@
         C2StreamAudioFrameSizeInfo;
 constexpr char C2_PARAMKEY_AUDIO_FRAME_SIZE[] = "raw.audio-frame-size";
 
+/**
+ * Information for an access unit in a large frame (containing multiple access units)
+ */
+struct C2AccessUnitInfosStruct {
+
+    inline C2AccessUnitInfosStruct() {
+        memset(this, 0, sizeof(*this));
+    }
+
+    inline C2AccessUnitInfosStruct(
+            uint32_t flags_,
+            uint32_t size_,
+            int64_t timestamp_)
+        : flags(flags_),
+          size(size_),
+          timestamp(timestamp_) { }
+
+    uint32_t flags; ///<flags for the access-unit
+    uint32_t size; ///<size of access-unit
+    int64_t timestamp; ///<timestamp in us for the access-unit
+
+    DEFINE_AND_DESCRIBE_C2STRUCT(AccessUnitInfos)
+    C2FIELD(flags, "flags")
+    C2FIELD(size, "size")
+    C2FIELD(timestamp, "timestamp")
+};
+
+/**
+ * Multiple access unit support (e.g large audio frames)
+ *
+ * If supported by a component, multiple access units may be contained
+ * in a single work item. For now this is only defined for linear buffers.
+ * The metadata indicates the access-unit boundaries in a single buffer.
+ * The boundary of each access-units are marked by its size, immediately
+ * followed by the next access-unit.
+ */
+typedef C2StreamParam<C2Info, C2SimpleArrayStruct<C2AccessUnitInfosStruct>,
+                kParamIndexAccessUnitInfos>
+        C2AccessUnitInfos;
+
+constexpr char C2_PARAMKEY_INPUT_ACCESS_UNIT_INFOS[] = "input.access-unit-infos";
+constexpr char C2_PARAMKEY_OUTPUT_ACCESS_UNIT_INFOS[] = "output.access-unit-infos";
+
 /* --------------------------------------- AAC components --------------------------------------- */
 
 /**
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 7575a6f..fc3f699 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -903,15 +903,39 @@
 }
 
 status_t DeviceHalAidl::prepareToDisconnectExternalDevice(const struct audio_port_v7* port) {
-    // There is not AIDL API defined for `prepareToDisconnectExternalDevice`.
-    // Call `setConnectedState` instead.
-    // TODO(b/279824103): call prepareToDisconnectExternalDevice when it is added.
-    RETURN_STATUS_IF_ERROR(setConnectedState(port, false /*connected*/));
-    std::lock_guard l(mLock);
-    mDeviceDisconnectionNotified.insert(port->id);
-    // Return that there was no error as otherwise the disconnection procedure will not be
-    // considered complete for upper layers, and 'setConnectedState' will not be called again
-    return OK;
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (mModule == nullptr) return NO_INIT;
+    if (port == nullptr) {
+        return BAD_VALUE;
+    }
+    const bool isInput = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::portDirection(port->role, port->type)) ==
+                    ::aidl::android::AudioPortDirection::INPUT;
+    AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
+    if (aidlPort.ext.getTag() != AudioPortExt::device) {
+        ALOGE("%s: provided port is not a device port (module %s): %s",
+              __func__, mInstance.c_str(), aidlPort.toString().c_str());
+        return BAD_VALUE;
+    }
+    status_t status = NO_ERROR;
+    {
+        std::lock_guard l(mLock);
+        status = mMapper.prepareToDisconnectExternalDevice(aidlPort);
+    }
+    if (status == UNKNOWN_TRANSACTION) {
+        // If there is not AIDL API defined for `prepareToDisconnectExternalDevice`.
+        // Call `setConnectedState` instead.
+        RETURN_STATUS_IF_ERROR(setConnectedState(port, false /*connected*/));
+        std::lock_guard l(mLock);
+        mDeviceDisconnectionNotified.insert(port->id);
+        // Return that there was no error as otherwise the disconnection procedure will not be
+        // considered complete for upper layers, and 'setConnectedState' will not be called again
+        return OK;
+    } else {
+        return status;
+    }
 }
 
 status_t DeviceHalAidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
@@ -925,11 +949,10 @@
         std::lock_guard l(mLock);
         if (mDeviceDisconnectionNotified.erase(port->id) > 0) {
             // For device disconnection, APM will first call `prepareToDisconnectExternalDevice`
-            // and then call `setConnectedState`. However, there is no API for
-            // `prepareToDisconnectExternalDevice` yet. In that case, `setConnectedState` will be
-            // called when calling `prepareToDisconnectExternalDevice`. Do not call to the HAL if
-            // previous call is successful. Also remove the cache here to avoid a large cache after
-            // a long run.
+            // and then call `setConnectedState`. If `prepareToDisconnectExternalDevice` doesn't
+            // exit, `setConnectedState` will be called when calling
+            // `prepareToDisconnectExternalDevice`. Do not call to the HAL if previous call is
+            // successful. Also remove the cache here to avoid a large cache after a long run.
             return OK;
         }
     }
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
index 05ddf87..63ace8c 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.cpp
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -688,6 +688,14 @@
     return false;
 }
 
+status_t Hal2AidlMapper::prepareToDisconnectExternalDevice(const AudioPort& devicePort) {
+    auto portsIt = findPort(devicePort.ext.get<AudioPortExt::device>().device);
+    if (portsIt == mPorts.end()) {
+        return BAD_VALUE;
+    }
+    return statusTFromBinderStatus(mModule->prepareToDisconnectExternalDevice(portsIt->second.id));
+}
+
 status_t Hal2AidlMapper::prepareToOpenStream(
         int32_t ioHandle, const AudioDevice& device, const AudioIoFlags& flags,
         AudioSource source, Cleanups* cleanups, AudioConfig* config,
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.h b/media/libaudiohal/impl/Hal2AidlMapper.h
index a6ce7d1..21cfd5a 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.h
+++ b/media/libaudiohal/impl/Hal2AidlMapper.h
@@ -72,6 +72,8 @@
         return ::aidl::android::convertContainer(mRoutes, routes, converter);
     }
     status_t initialize();
+    status_t prepareToDisconnectExternalDevice(
+            const ::aidl::android::media::audio::common::AudioPort& devicePort);
     // If the resulting 'mixPortConfig->id' is 0, that means the stream was not created,
     // and 'config' is a suggested config.
     status_t prepareToOpenStream(
diff --git a/media/libeffects/downmix/aidl/DownmixContext.cpp b/media/libeffects/downmix/aidl/DownmixContext.cpp
index 0e76d1d..13e0e5a 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.cpp
+++ b/media/libeffects/downmix/aidl/DownmixContext.cpp
@@ -20,12 +20,60 @@
 
 #include "DownmixContext.h"
 
-using aidl::android::hardware::audio::effect::IEffect;
 using aidl::android::hardware::audio::common::getChannelCount;
+using aidl::android::hardware::audio::effect::IEffect;
 using aidl::android::media::audio::common::AudioChannelLayout;
+using aidl::android::media::audio::common::AudioConfig;
 
 namespace aidl::android::hardware::audio::effect {
 
+namespace {
+
+inline bool isChannelMaskValid(const AudioChannelLayout& channelMask) {
+    if (channelMask.getTag() != AudioChannelLayout::layoutMask) return false;
+    int chMask = channelMask.get<AudioChannelLayout::layoutMask>();
+    // check against unsupported channels (up to FCC_26)
+    constexpr uint32_t MAXIMUM_CHANNEL_MASK = AudioChannelLayout::LAYOUT_22POINT2 |
+                                              AudioChannelLayout::CHANNEL_FRONT_WIDE_LEFT |
+                                              AudioChannelLayout::CHANNEL_FRONT_WIDE_RIGHT;
+    if (chMask & ~MAXIMUM_CHANNEL_MASK) {
+        LOG(ERROR) << "Unsupported channels in " << (chMask & ~MAXIMUM_CHANNEL_MASK);
+        return false;
+    }
+    return true;
+}
+
+inline bool isStereoChannelMask(const AudioChannelLayout& channelMask) {
+    if (channelMask.getTag() != AudioChannelLayout::layoutMask) return false;
+
+    return channelMask.get<AudioChannelLayout::layoutMask>() == AudioChannelLayout::LAYOUT_STEREO;
+}
+
+}  // namespace
+
+bool DownmixContext::validateCommonConfig(const Parameter::Common& common) {
+    const AudioConfig& input = common.input;
+    const AudioConfig& output = common.output;
+    if (input.base.sampleRate != output.base.sampleRate) {
+        LOG(ERROR) << __func__ << ": SRC not supported, input: " << input.toString()
+                   << " output: " << output.toString();
+        return false;
+    }
+
+    if (!isStereoChannelMask(output.base.channelMask)) {
+        LOG(ERROR) << __func__ << ": output should be stereo, not "
+                   << output.base.channelMask.toString();
+        return false;
+    }
+
+    if (!isChannelMaskValid(input.base.channelMask)) {
+        LOG(ERROR) << __func__ << ": invalid input channel, " << input.base.channelMask.toString();
+        return false;
+    }
+
+    return true;
+}
+
 DownmixContext::DownmixContext(int statusDepth, const Parameter::Common& common)
     : EffectContext(statusDepth, common) {
     LOG(DEBUG) << __func__;
@@ -62,7 +110,7 @@
     resetBuffer();
 }
 
-IEffect::Status DownmixContext::lvmProcess(float* in, float* out, int samples) {
+IEffect::Status DownmixContext::downmixProcess(float* in, float* out, int samples) {
     LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
     IEffect::Status status = {EX_ILLEGAL_ARGUMENT, 0, 0};
 
@@ -122,18 +170,4 @@
     }
 }
 
-bool DownmixContext::isChannelMaskValid(AudioChannelLayout channelMask) {
-    if (channelMask.getTag() != AudioChannelLayout::layoutMask) return false;
-    int chMask = channelMask.get<AudioChannelLayout::layoutMask>();
-    // check against unsupported channels (up to FCC_26)
-    constexpr uint32_t MAXIMUM_CHANNEL_MASK = AudioChannelLayout::LAYOUT_22POINT2 |
-                                              AudioChannelLayout::CHANNEL_FRONT_WIDE_LEFT |
-                                              AudioChannelLayout::CHANNEL_FRONT_WIDE_RIGHT;
-    if (chMask & ~MAXIMUM_CHANNEL_MASK) {
-        LOG(ERROR) << "Unsupported channels in " << (chMask & ~MAXIMUM_CHANNEL_MASK);
-        return false;
-    }
-    return true;
-}
-
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/downmix/aidl/DownmixContext.h b/media/libeffects/downmix/aidl/DownmixContext.h
index 1571c38..a381d7f 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.h
+++ b/media/libeffects/downmix/aidl/DownmixContext.h
@@ -50,7 +50,9 @@
         return RetCode::SUCCESS;
     }
 
-    IEffect::Status lvmProcess(float* in, float* out, int samples);
+    IEffect::Status downmixProcess(float* in, float* out, int samples);
+
+    static bool validateCommonConfig(const Parameter::Common& common);
 
   private:
     DownmixState mState;
@@ -60,7 +62,6 @@
 
     // Common Params
     void init_params(const Parameter::Common& common);
-    bool isChannelMaskValid(::aidl::android::media::audio::common::AudioChannelLayout channelMask);
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.cpp b/media/libeffects/downmix/aidl/EffectDownmix.cpp
index 7068c5c..702a6f0 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.cpp
+++ b/media/libeffects/downmix/aidl/EffectDownmix.cpp
@@ -193,6 +193,8 @@
         return mContext;
     }
 
+    if (!DownmixContext::validateCommonConfig(common)) return nullptr;
+
     mContext = std::make_shared<DownmixContext>(1 /* statusFmqDepth */, common);
     return mContext;
 }
@@ -210,7 +212,7 @@
         LOG(ERROR) << __func__ << " nullContext";
         return {EX_NULL_POINTER, 0, 0};
     }
-    return mContext->lvmProcess(in, out, sampleToProcess);
+    return mContext->downmixProcess(in, out, sampleToProcess);
 }
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 4c6b02c..01e7b0d 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -185,7 +185,7 @@
 // Minimum amount of time between checking to see if the timestamp is advancing
 // for underrun detection. If we check too frequently, we may not detect a
 // timestamp update and will falsely detect underrun.
-static const nsecs_t kMinimumTimeBetweenTimestampChecksNs = 150 /* ms */ * 1000;
+static constexpr nsecs_t kMinimumTimeBetweenTimestampChecksNs = 150 /* ms */ * 1'000'000;
 
 // The universal constant for ubiquitous 20ms value. The value of 20ms seems to provide a good
 // balance between power consumption and latency, and allows threads to be scheduled reliably