Audio : Add remote submix stream implementation

Bug: 286914845
Test: atest VtsHalAudioCoreTargetTest
Change-Id: Ia477458193ade9068eaf56e953ab670fee53cc7d
diff --git a/audio/aidl/default/Android.bp b/audio/aidl/default/Android.bp
index bda0de2..e9294cf 100644
--- a/audio/aidl/default/Android.bp
+++ b/audio/aidl/default/Android.bp
@@ -18,6 +18,7 @@
         "libbinder_ndk",
         "libcutils",
         "libfmq",
+        "libnbaio_mono",
         "libstagefright_foundation",
         "libtinyalsav2",
         "libutils",
@@ -76,6 +77,10 @@
         "Stream.cpp",
         "StreamStub.cpp",
         "Telephony.cpp",
+        "r_submix/ModuleRemoteSubmix.cpp",
+        "r_submix/RemoteSubmixUtils.cpp",
+        "r_submix/SubmixRoute.cpp",
+        "r_submix/StreamRemoteSubmix.cpp",
         "usb/ModuleUsb.cpp",
         "usb/StreamUsb.cpp",
         "usb/UsbAlsaMixerControl.cpp",
diff --git a/audio/aidl/default/Module.cpp b/audio/aidl/default/Module.cpp
index 6f89d4b..48d1458 100644
--- a/audio/aidl/default/Module.cpp
+++ b/audio/aidl/default/Module.cpp
@@ -27,6 +27,7 @@
 
 #include "core-impl/Bluetooth.h"
 #include "core-impl/Module.h"
+#include "core-impl/ModuleRemoteSubmix.h"
 #include "core-impl/ModuleUsb.h"
 #include "core-impl/SoundDose.h"
 #include "core-impl/StreamStub.h"
@@ -111,8 +112,9 @@
     switch (type) {
         case Module::Type::USB:
             return ndk::SharedRefBase::make<ModuleUsb>(type);
-        case Type::DEFAULT:
         case Type::R_SUBMIX:
+            return ndk::SharedRefBase::make<ModuleRemoteSubmix>(type);
+        case Type::DEFAULT:
         default:
             return ndk::SharedRefBase::make<Module>(type);
     }
@@ -181,8 +183,8 @@
         StreamContext temp(
                 std::make_unique<StreamContext::CommandMQ>(1, true /*configureEventFlagWord*/),
                 std::make_unique<StreamContext::ReplyMQ>(1, true /*configureEventFlagWord*/),
-                portConfigIt->format.value(), portConfigIt->channelMask.value(),
-                portConfigIt->sampleRate.value().value, flags,
+                portConfigIt->portId, portConfigIt->format.value(),
+                portConfigIt->channelMask.value(), portConfigIt->sampleRate.value().value, flags,
                 portConfigIt->ext.get<AudioPortExt::mix>().handle,
                 std::make_unique<StreamContext::DataMQ>(frameSize * in_bufferSizeFrames),
                 asyncCallback, outEventCallback, params);
@@ -490,6 +492,17 @@
         return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
     }
 
+    for (auto profile : connectedPort.profiles) {
+        if (profile.channelMasks.empty()) {
+            LOG(ERROR) << __func__ << ": the profile " << profile.name << " has no channel masks";
+            return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+        }
+        if (profile.sampleRates.empty()) {
+            LOG(ERROR) << __func__ << ": the profile " << profile.name << " has no sample rates";
+            return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+        }
+    }
+
     connectedPort.id = ++getConfig().nextPortId;
     auto [connectedPortsIt, _] =
             mConnectedDevicePorts.insert(std::pair(connectedPort.id, std::vector<int32_t>()));
diff --git a/audio/aidl/default/include/core-impl/ModuleRemoteSubmix.h b/audio/aidl/default/include/core-impl/ModuleRemoteSubmix.h
new file mode 100644
index 0000000..7b1d375
--- /dev/null
+++ b/audio/aidl/default/include/core-impl/ModuleRemoteSubmix.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "core-impl/Module.h"
+
+namespace aidl::android::hardware::audio::core {
+
+class ModuleRemoteSubmix : public Module {
+  public:
+    explicit ModuleRemoteSubmix(Module::Type type) : Module(type) {}
+
+  private:
+    // IModule interfaces
+    ndk::ScopedAStatus getTelephony(std::shared_ptr<ITelephony>* _aidl_return) override;
+    ndk::ScopedAStatus getBluetooth(std::shared_ptr<IBluetooth>* _aidl_return) override;
+    ndk::ScopedAStatus getMicMute(bool* _aidl_return) override;
+    ndk::ScopedAStatus setMicMute(bool in_mute) override;
+
+    // Module interfaces
+    ndk::ScopedAStatus createInputStream(
+            const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata,
+            StreamContext&& context,
+            const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones,
+            std::shared_ptr<StreamIn>* result) override;
+    ndk::ScopedAStatus createOutputStream(
+            const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata,
+            StreamContext&& context,
+            const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>&
+                    offloadInfo,
+            std::shared_ptr<StreamOut>* result) override;
+    ndk::ScopedAStatus populateConnectedDevicePort(
+            ::aidl::android::media::audio::common::AudioPort* audioPort) override;
+    ndk::ScopedAStatus checkAudioPatchEndpointsMatch(
+            const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sources,
+            const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sinks)
+            override;
+    void onExternalDeviceConnectionChanged(
+            const ::aidl::android::media::audio::common::AudioPort& audioPort,
+            bool connected) override;
+    ndk::ScopedAStatus onMasterMuteChanged(bool mute) override;
+    ndk::ScopedAStatus onMasterVolumeChanged(float volume) override;
+};
+
+}  // namespace aidl::android::hardware::audio::core
diff --git a/audio/aidl/default/include/core-impl/Stream.h b/audio/aidl/default/include/core-impl/Stream.h
index c20a421..e9f4fd4 100644
--- a/audio/aidl/default/include/core-impl/Stream.h
+++ b/audio/aidl/default/include/core-impl/Stream.h
@@ -78,6 +78,7 @@
 
     StreamContext() = default;
     StreamContext(std::unique_ptr<CommandMQ> commandMQ, std::unique_ptr<ReplyMQ> replyMQ,
+                  int portId,
                   const ::aidl::android::media::audio::common::AudioFormatDescription& format,
                   const ::aidl::android::media::audio::common::AudioChannelLayout& channelLayout,
                   int sampleRate, const ::aidl::android::media::audio::common::AudioIoFlags& flags,
@@ -88,6 +89,7 @@
         : mCommandMQ(std::move(commandMQ)),
           mInternalCommandCookie(std::rand()),
           mReplyMQ(std::move(replyMQ)),
+          mPortId(portId),
           mFormat(format),
           mChannelLayout(channelLayout),
           mSampleRate(sampleRate),
@@ -101,6 +103,7 @@
         : mCommandMQ(std::move(other.mCommandMQ)),
           mInternalCommandCookie(other.mInternalCommandCookie),
           mReplyMQ(std::move(other.mReplyMQ)),
+          mPortId(other.mPortId),
           mFormat(other.mFormat),
           mChannelLayout(other.mChannelLayout),
           mSampleRate(other.mSampleRate),
@@ -114,6 +117,7 @@
         mCommandMQ = std::move(other.mCommandMQ);
         mInternalCommandCookie = other.mInternalCommandCookie;
         mReplyMQ = std::move(other.mReplyMQ);
+        mPortId = std::move(other.mPortId);
         mFormat = std::move(other.mFormat);
         mChannelLayout = std::move(other.mChannelLayout);
         mSampleRate = other.mSampleRate;
@@ -145,6 +149,7 @@
     std::shared_ptr<IStreamOutEventCallback> getOutEventCallback() const {
         return mOutEventCallback;
     }
+    int getPortId() const { return mPortId; }
     ReplyMQ* getReplyMQ() const { return mReplyMQ.get(); }
     int getTransientStateDelayMs() const { return mDebugParameters.transientStateDelayMs; }
     int getSampleRate() const { return mSampleRate; }
@@ -155,6 +160,7 @@
     std::unique_ptr<CommandMQ> mCommandMQ;
     int mInternalCommandCookie;  // The value used to confirm that the command was posted internally
     std::unique_ptr<ReplyMQ> mReplyMQ;
+    int mPortId;
     ::aidl::android::media::audio::common::AudioFormatDescription mFormat;
     ::aidl::android::media::audio::common::AudioChannelLayout mChannelLayout;
     int mSampleRate;
diff --git a/audio/aidl/default/include/core-impl/StreamRemoteSubmix.h b/audio/aidl/default/include/core-impl/StreamRemoteSubmix.h
new file mode 100644
index 0000000..c1194ab
--- /dev/null
+++ b/audio/aidl/default/include/core-impl/StreamRemoteSubmix.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <mutex>
+#include <vector>
+
+#include "core-impl/Stream.h"
+#include "r_submix/SubmixRoute.h"
+
+namespace aidl::android::hardware::audio::core {
+
+using aidl::android::hardware::audio::core::r_submix::AudioConfig;
+using aidl::android::hardware::audio::core::r_submix::SubmixRoute;
+
+class StreamRemoteSubmix : public StreamCommonImpl {
+  public:
+    StreamRemoteSubmix(const Metadata& metadata, StreamContext&& context);
+
+    ::android::status_t init() override;
+    ::android::status_t drain(StreamDescriptor::DrainMode) override;
+    ::android::status_t flush() override;
+    ::android::status_t pause() override;
+    ::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount,
+                                 int32_t* latencyMs) override;
+    ::android::status_t standby() override;
+    void shutdown() override;
+
+    // Overridden methods of 'StreamCommonImpl', called on a Binder thread.
+    ndk::ScopedAStatus prepareToClose() override;
+
+  private:
+    size_t getPipeSizeInFrames();
+    size_t getStreamPipeSizeInFrames();
+    ::android::status_t outWrite(void* buffer, size_t frameCount, size_t* actualFrameCount);
+    ::android::status_t inRead(void* buffer, size_t frameCount, size_t* actualFrameCount);
+
+    const int mPortId;
+    const bool mIsInput;
+    AudioConfig mStreamConfig;
+    std::shared_ptr<SubmixRoute> mCurrentRoute = nullptr;
+    ::android::status_t mStatus = ::android::NO_INIT;
+
+    // Mutex lock to protect vector of submix routes, each of these submix routes have their mutex
+    // locks and none of the mutex locks should be taken together.
+    static std::mutex sSubmixRoutesLock;
+    static std::map<int32_t, std::shared_ptr<SubmixRoute>> sSubmixRoutes
+            GUARDED_BY(sSubmixRoutesLock);
+
+    // limit for number of read error log entries to avoid spamming the logs
+    static constexpr int kMaxReadErrorLogs = 5;
+    // The duration of kMaxReadFailureAttempts * READ_ATTEMPT_SLEEP_MS must be strictly inferior
+    // to the duration of a record buffer at the current record sample rate (of the device, not of
+    // the recording itself). Here we have: 3 * 5ms = 15ms < 1024 frames * 1000 / 48000 = 21.333ms
+    static constexpr int kMaxReadFailureAttempts = 3;
+    // 5ms between two read attempts when pipe is empty
+    static constexpr int kReadAttemptSleepUs = 5000;
+};
+
+class StreamInRemoteSubmix final : public StreamRemoteSubmix, public StreamIn {
+  public:
+    friend class ndk::SharedRefBase;
+    StreamInRemoteSubmix(
+            const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata,
+            StreamContext&& context,
+            const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones);
+
+  private:
+    ndk::ScopedAStatus getActiveMicrophones(
+            std::vector<::aidl::android::media::audio::common::MicrophoneDynamicInfo>* _aidl_return)
+            override;
+};
+
+class StreamOutRemoteSubmix final : public StreamRemoteSubmix, public StreamOut {
+  public:
+    friend class ndk::SharedRefBase;
+    StreamOutRemoteSubmix(
+            const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata,
+            StreamContext&& context,
+            const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>&
+                    offloadInfo);
+};
+
+}  // namespace aidl::android::hardware::audio::core
diff --git a/audio/aidl/default/r_submix/ModuleRemoteSubmix.cpp b/audio/aidl/default/r_submix/ModuleRemoteSubmix.cpp
new file mode 100644
index 0000000..2b79f51
--- /dev/null
+++ b/audio/aidl/default/r_submix/ModuleRemoteSubmix.cpp
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AHAL_ModuleRemoteSubmix"
+
+#include <vector>
+
+#include <android-base/logging.h>
+
+#include "RemoteSubmixUtils.h"
+#include "core-impl/ModuleRemoteSubmix.h"
+#include "core-impl/StreamRemoteSubmix.h"
+
+using aidl::android::hardware::audio::common::SinkMetadata;
+using aidl::android::hardware::audio::common::SourceMetadata;
+using aidl::android::media::audio::common::AudioOffloadInfo;
+using aidl::android::media::audio::common::AudioPort;
+using aidl::android::media::audio::common::AudioPortConfig;
+using aidl::android::media::audio::common::MicrophoneInfo;
+
+namespace aidl::android::hardware::audio::core {
+
+ndk::ScopedAStatus ModuleRemoteSubmix::getTelephony(std::shared_ptr<ITelephony>* _aidl_return) {
+    *_aidl_return = nullptr;
+    LOG(DEBUG) << __func__ << ": returning null";
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::getBluetooth(std::shared_ptr<IBluetooth>* _aidl_return) {
+    *_aidl_return = nullptr;
+    LOG(DEBUG) << __func__ << ": returning null";
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::getMicMute(bool* _aidl_return __unused) {
+    LOG(DEBUG) << __func__ << ": is not supported";
+    return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::setMicMute(bool in_mute __unused) {
+    LOG(DEBUG) << __func__ << ": is not supported";
+    return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::createInputStream(
+        const SinkMetadata& sinkMetadata, StreamContext&& context,
+        const std::vector<MicrophoneInfo>& microphones, std::shared_ptr<StreamIn>* result) {
+    return createStreamInstance<StreamInRemoteSubmix>(result, sinkMetadata, std::move(context),
+                                                      microphones);
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::createOutputStream(
+        const SourceMetadata& sourceMetadata, StreamContext&& context,
+        const std::optional<AudioOffloadInfo>& offloadInfo, std::shared_ptr<StreamOut>* result) {
+    return createStreamInstance<StreamOutRemoteSubmix>(result, sourceMetadata, std::move(context),
+                                                       offloadInfo);
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::populateConnectedDevicePort(AudioPort* audioPort) {
+    LOG(VERBOSE) << __func__ << ": Profiles already populated by Configuration";
+    for (auto profile : audioPort->profiles) {
+        for (auto channelMask : profile.channelMasks) {
+            if (!r_submix::isChannelMaskSupported(channelMask)) {
+                LOG(ERROR) << __func__ << ": the profile " << profile.name
+                           << " has unsupported channel mask : " << channelMask.toString();
+                return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+            }
+        }
+        for (auto sampleRate : profile.sampleRates) {
+            if (!r_submix::isSampleRateSupported(sampleRate)) {
+                LOG(ERROR) << __func__ << ": the profile " << profile.name
+                           << " has unsupported sample rate : " << sampleRate;
+                return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+            }
+        }
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::checkAudioPatchEndpointsMatch(
+        const std::vector<AudioPortConfig*>& sources, const std::vector<AudioPortConfig*>& sinks) {
+    for (const auto& source : sources) {
+        for (const auto& sink : sinks) {
+            if (source->sampleRate != sink->sampleRate ||
+                source->channelMask != sink->channelMask || source->format != sink->format) {
+                LOG(ERROR) << __func__
+                           << ": mismatch port configuration, source=" << source->toString()
+                           << ", sink=" << sink->toString();
+                return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+            }
+        }
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
+void ModuleRemoteSubmix::onExternalDeviceConnectionChanged(
+        const ::aidl::android::media::audio::common::AudioPort& audioPort __unused,
+        bool connected __unused) {
+    LOG(DEBUG) << __func__ << ": do nothing and return";
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::onMasterMuteChanged(bool __unused) {
+    LOG(DEBUG) << __func__ << ": is not supported";
+    return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::onMasterVolumeChanged(float __unused) {
+    LOG(DEBUG) << __func__ << ": is not supported";
+    return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+}
+
+}  // namespace aidl::android::hardware::audio::core
diff --git a/audio/aidl/default/r_submix/RemoteSubmixUtils.cpp b/audio/aidl/default/r_submix/RemoteSubmixUtils.cpp
new file mode 100644
index 0000000..2f5d17d
--- /dev/null
+++ b/audio/aidl/default/r_submix/RemoteSubmixUtils.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <vector>
+
+#include "RemoteSubmixUtils.h"
+
+namespace aidl::android::hardware::audio::core::r_submix {
+
+bool isChannelMaskSupported(const AudioChannelLayout& channelMask) {
+    const static std::vector<AudioChannelLayout> kSupportedChannelMask = {
+            AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                    AudioChannelLayout::LAYOUT_MONO),
+            AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                    AudioChannelLayout::LAYOUT_STEREO)};
+
+    if (std::find(kSupportedChannelMask.begin(), kSupportedChannelMask.end(), channelMask) !=
+        kSupportedChannelMask.end()) {
+        return true;
+    }
+    return false;
+}
+
+bool isSampleRateSupported(int sampleRate) {
+    const static std::vector<int> kSupportedSampleRates = {8000,  11025, 12000, 16000, 22050,
+                                                           24000, 32000, 44100, 48000};
+
+    if (std::find(kSupportedSampleRates.begin(), kSupportedSampleRates.end(), sampleRate) !=
+        kSupportedSampleRates.end()) {
+        return true;
+    }
+    return false;
+}
+
+}  // namespace aidl::android::hardware::audio::core::r_submix
diff --git a/audio/aidl/default/r_submix/RemoteSubmixUtils.h b/audio/aidl/default/r_submix/RemoteSubmixUtils.h
new file mode 100644
index 0000000..952a992
--- /dev/null
+++ b/audio/aidl/default/r_submix/RemoteSubmixUtils.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/media/audio/common/AudioChannelLayout.h>
+#include <aidl/android/media/audio/common/AudioFormatDescription.h>
+
+using aidl::android::media::audio::common::AudioChannelLayout;
+
+namespace aidl::android::hardware::audio::core::r_submix {
+
+bool isChannelMaskSupported(const AudioChannelLayout& channelMask);
+
+bool isSampleRateSupported(int sampleRate);
+
+}  // namespace aidl::android::hardware::audio::core::r_submix
diff --git a/audio/aidl/default/r_submix/StreamRemoteSubmix.cpp b/audio/aidl/default/r_submix/StreamRemoteSubmix.cpp
new file mode 100644
index 0000000..9cc6fb8
--- /dev/null
+++ b/audio/aidl/default/r_submix/StreamRemoteSubmix.cpp
@@ -0,0 +1,354 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AHAL_StreamRemoteSubmix"
+#include <android-base/logging.h>
+
+#include <cmath>
+
+#include "core-impl/StreamRemoteSubmix.h"
+
+using aidl::android::hardware::audio::common::SinkMetadata;
+using aidl::android::hardware::audio::common::SourceMetadata;
+using aidl::android::media::audio::common::AudioOffloadInfo;
+using aidl::android::media::audio::common::MicrophoneDynamicInfo;
+using aidl::android::media::audio::common::MicrophoneInfo;
+
+namespace aidl::android::hardware::audio::core {
+
+StreamRemoteSubmix::StreamRemoteSubmix(const Metadata& metadata, StreamContext&& context)
+    : StreamCommonImpl(metadata, std::move(context)),
+      mPortId(context.getPortId()),
+      mIsInput(isInput(metadata)) {
+    mStreamConfig.frameSize = context.getFrameSize();
+    mStreamConfig.format = context.getFormat();
+    mStreamConfig.channelLayout = context.getChannelLayout();
+    mStreamConfig.sampleRate = context.getSampleRate();
+}
+
+std::mutex StreamRemoteSubmix::sSubmixRoutesLock;
+std::map<int32_t, std::shared_ptr<SubmixRoute>> StreamRemoteSubmix::sSubmixRoutes;
+
+::android::status_t StreamRemoteSubmix::init() {
+    {
+        std::lock_guard guard(sSubmixRoutesLock);
+        if (sSubmixRoutes.find(mPortId) != sSubmixRoutes.end()) {
+            mCurrentRoute = sSubmixRoutes[mPortId];
+        }
+    }
+    // If route is not available for this port, add it.
+    if (mCurrentRoute == nullptr) {
+        // Initialize the pipe.
+        mCurrentRoute = std::make_shared<SubmixRoute>();
+        if (::android::OK != mCurrentRoute->createPipe(mStreamConfig)) {
+            LOG(ERROR) << __func__ << ": create pipe failed";
+            return mStatus;
+        }
+        {
+            std::lock_guard guard(sSubmixRoutesLock);
+            sSubmixRoutes.emplace(mPortId, mCurrentRoute);
+        }
+    } else {
+        if (!mCurrentRoute->isStreamConfigValid(mIsInput, mStreamConfig)) {
+            LOG(ERROR) << __func__ << ": invalid stream config";
+            return mStatus;
+        }
+        sp<MonoPipe> sink = mCurrentRoute->getSink();
+        if (sink == nullptr) {
+            LOG(ERROR) << __func__ << ": nullptr sink when opening stream";
+            return mStatus;
+        }
+        // If the sink has been shutdown or pipe recreation is forced, delete the pipe and
+        // recreate it.
+        if (sink->isShutdown()) {
+            LOG(DEBUG) << __func__ << ": Non-nullptr shut down sink when opening stream";
+            if (::android::OK != mCurrentRoute->resetPipe()) {
+                LOG(ERROR) << __func__ << ": reset pipe failed";
+                return mStatus;
+            }
+        }
+    }
+
+    mCurrentRoute->openStream(mIsInput);
+    mStatus = ::android::OK;
+    return mStatus;
+}
+
+::android::status_t StreamRemoteSubmix::drain(StreamDescriptor::DrainMode) {
+    usleep(1000);
+    return ::android::OK;
+}
+
+::android::status_t StreamRemoteSubmix::flush() {
+    usleep(1000);
+    return ::android::OK;
+}
+
+::android::status_t StreamRemoteSubmix::pause() {
+    usleep(1000);
+    return ::android::OK;
+}
+
+ndk::ScopedAStatus StreamRemoteSubmix::prepareToClose() {
+    if (!mIsInput) {
+        std::shared_ptr<SubmixRoute> route = nullptr;
+        {
+            std::lock_guard guard(sSubmixRoutesLock);
+            if (sSubmixRoutes.find(mPortId) != sSubmixRoutes.end()) {
+                route = sSubmixRoutes[mPortId];
+            }
+        }
+        if (route != nullptr) {
+            sp<MonoPipe> sink = route->getSink();
+            if (sink == nullptr) {
+                ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+            }
+            LOG(DEBUG) << __func__ << ": shutting down MonoPipe sink";
+
+            sink->shutdown(true);
+        } else {
+            LOG(DEBUG) << __func__ << ": stream already closed.";
+            ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+        }
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
+// Remove references to the specified input and output streams.  When the device no longer
+// references input and output streams destroy the associated pipe.
+void StreamRemoteSubmix::shutdown() {
+    mCurrentRoute->closeStream(mIsInput);
+    // If all stream instances are closed, we can remove route information for this port.
+    if (!mCurrentRoute->hasAtleastOneStreamOpen()) {
+        mCurrentRoute->releasePipe();
+        LOG(DEBUG) << __func__ << ": pipe destroyed";
+
+        std::lock_guard guard(sSubmixRoutesLock);
+        sSubmixRoutes.erase(mPortId);
+        mStatus = ::android::NO_INIT;
+    }
+}
+
+::android::status_t StreamRemoteSubmix::transfer(void* buffer, size_t frameCount,
+                                                 size_t* actualFrameCount, int32_t* latencyMs) {
+    if (mStatus != ::android::OK) {
+        LOG(ERROR) << __func__ << ": failed, not configured";
+        return ::android::NO_INIT;
+    }
+
+    *latencyMs = (getStreamPipeSizeInFrames() * MILLIS_PER_SECOND) / mStreamConfig.sampleRate;
+    LOG(VERBOSE) << __func__ << ": Latency " << *latencyMs << "ms";
+
+    sp<MonoPipe> sink = mCurrentRoute->getSink();
+    if (sink != nullptr) {
+        if (sink->isShutdown()) {
+            sink.clear();
+            LOG(VERBOSE) << __func__ << ": pipe shutdown, ignoring the transfer.";
+            // the pipe has already been shutdown, this buffer will be lost but we must simulate
+            // timing so we don't drain the output faster than realtime
+            const size_t delayUs = static_cast<size_t>(
+                    std::roundf(frameCount * MICROS_PER_SECOND / mStreamConfig.sampleRate));
+            usleep(delayUs);
+
+            *actualFrameCount = frameCount;
+            return ::android::OK;
+        }
+    } else {
+        LOG(ERROR) << __func__ << ": transfer without a pipe!";
+        return ::android::UNEXPECTED_NULL;
+    }
+
+    mCurrentRoute->exitStandby(mIsInput);
+    return (mIsInput ? inRead(buffer, frameCount, actualFrameCount)
+                     : outWrite(buffer, frameCount, actualFrameCount));
+}
+
+// Calculate the maximum size of the pipe buffer in frames for the specified stream.
+size_t StreamRemoteSubmix::getStreamPipeSizeInFrames() {
+    auto pipeConfig = mCurrentRoute->mPipeConfig;
+    const size_t maxFrameSize = std::max(mStreamConfig.frameSize, pipeConfig.frameSize);
+    return (pipeConfig.frameCount * pipeConfig.frameSize) / maxFrameSize;
+}
+
+::android::status_t StreamRemoteSubmix::outWrite(void* buffer, size_t frameCount,
+                                                 size_t* actualFrameCount) {
+    sp<MonoPipe> sink = mCurrentRoute->getSink();
+    if (sink != nullptr) {
+        if (sink->isShutdown()) {
+            sink.clear();
+            LOG(VERBOSE) << __func__ << ": pipe shutdown, ignoring the write.";
+            // the pipe has already been shutdown, this buffer will be lost but we must
+            // simulate timing so we don't drain the output faster than realtime
+            const size_t delayUs = static_cast<size_t>(
+                    std::roundf(frameCount * MICROS_PER_SECOND / mStreamConfig.sampleRate));
+            usleep(delayUs);
+            *actualFrameCount = frameCount;
+            return ::android::OK;
+        }
+    } else {
+        LOG(FATAL) << __func__ << ": without a pipe!";
+        return ::android::UNKNOWN_ERROR;
+    }
+
+    const size_t availableToWrite = sink->availableToWrite();
+    // NOTE: sink has been checked above and sink and source life cycles are synchronized
+    sp<MonoPipeReader> source = mCurrentRoute->getSource();
+    // If the write to the sink should be blocked, flush enough frames from the pipe to make space
+    // to write the most recent data.
+    if (!mCurrentRoute->shouldBlockWrite() && availableToWrite < frameCount) {
+        static uint8_t flushBuffer[64];
+        const size_t flushBufferSizeFrames = sizeof(flushBuffer) / mStreamConfig.frameSize;
+        size_t framesToFlushFromSource = frameCount - availableToWrite;
+        LOG(VERBOSE) << __func__ << ": flushing " << framesToFlushFromSource
+                     << " frames from the pipe to avoid blocking";
+        while (framesToFlushFromSource) {
+            const size_t flushSize = std::min(framesToFlushFromSource, flushBufferSizeFrames);
+            framesToFlushFromSource -= flushSize;
+            // read does not block
+            source->read(flushBuffer, flushSize);
+        }
+    }
+
+    ssize_t writtenFrames = sink->write(buffer, frameCount);
+    if (writtenFrames < 0) {
+        if (writtenFrames == (ssize_t)::android::NEGOTIATE) {
+            LOG(ERROR) << __func__ << ": write to pipe returned NEGOTIATE";
+            sink.clear();
+            *actualFrameCount = 0;
+            return ::android::UNKNOWN_ERROR;
+        } else {
+            // write() returned UNDERRUN or WOULD_BLOCK, retry
+            LOG(ERROR) << __func__ << ": write to pipe returned unexpected " << writtenFrames;
+            writtenFrames = sink->write(buffer, frameCount);
+        }
+    }
+    sink.clear();
+
+    if (writtenFrames < 0) {
+        LOG(ERROR) << __func__ << ": failed writing to pipe with " << writtenFrames;
+        *actualFrameCount = 0;
+        return ::android::UNKNOWN_ERROR;
+    }
+    LOG(VERBOSE) << __func__ << ": wrote " << writtenFrames << "frames";
+    *actualFrameCount = writtenFrames;
+    return ::android::OK;
+}
+
+::android::status_t StreamRemoteSubmix::inRead(void* buffer, size_t frameCount,
+                                               size_t* actualFrameCount) {
+    // about to read from audio source
+    sp<MonoPipeReader> source = mCurrentRoute->getSource();
+    if (source == nullptr) {
+        int readErrorCount = mCurrentRoute->notifyReadError();
+        if (readErrorCount < kMaxReadErrorLogs) {
+            LOG(ERROR)
+                    << __func__
+                    << ": no audio pipe yet we're trying to read! (not all errors will be logged)";
+        } else {
+            LOG(ERROR) << __func__ << ": Read errors " << readErrorCount;
+        }
+        const size_t delayUs = static_cast<size_t>(
+                std::roundf(frameCount * MICROS_PER_SECOND / mStreamConfig.sampleRate));
+        usleep(delayUs);
+        memset(buffer, 0, mStreamConfig.frameSize * frameCount);
+        *actualFrameCount = frameCount;
+        return ::android::OK;
+    }
+
+    // read the data from the pipe
+    int attempts = 0;
+    const size_t delayUs = static_cast<size_t>(std::roundf(kReadAttemptSleepUs));
+    char* buff = (char*)buffer;
+    size_t remainingFrames = frameCount;
+
+    while ((remainingFrames > 0) && (attempts < kMaxReadFailureAttempts)) {
+        LOG(VERBOSE) << __func__ << ": frames available to read " << source->availableToRead();
+
+        ssize_t framesRead = source->read(buff, remainingFrames);
+
+        LOG(VERBOSE) << __func__ << ": frames read " << framesRead;
+
+        if (framesRead > 0) {
+            remainingFrames -= framesRead;
+            buff += framesRead * mStreamConfig.frameSize;
+            LOG(VERBOSE) << __func__ << ": (attempts = " << attempts << ") got " << framesRead
+                         << " frames, remaining=" << remainingFrames;
+        } else {
+            attempts++;
+            LOG(WARNING) << __func__ << ": read returned " << framesRead
+                         << " , read failure attempts = " << attempts;
+            usleep(delayUs);
+        }
+    }
+    // done using the source
+    source.clear();
+
+    if (remainingFrames > 0) {
+        const size_t remainingBytes = remainingFrames * mStreamConfig.frameSize;
+        LOG(VERBOSE) << __func__ << ": clearing remaining_frames = " << remainingFrames;
+        memset(((char*)buffer) + (mStreamConfig.frameSize * frameCount) - remainingBytes, 0,
+               remainingBytes);
+    }
+
+    long readCounterFrames = mCurrentRoute->updateReadCounterFrames(frameCount);
+    *actualFrameCount = frameCount;
+
+    // compute how much we need to sleep after reading the data by comparing the wall clock with
+    //   the projected time at which we should return.
+    // wall clock after reading from the pipe
+    auto recordDurationUs = std::chrono::steady_clock::now() - mCurrentRoute->getRecordStartTime();
+
+    // readCounterFrames contains the number of frames that have been read since the beginning of
+    // recording (including this call): it's converted to usec and compared to how long we've been
+    // recording for, which gives us how long we must wait to sync the projected recording time, and
+    // the observed recording time.
+    static constexpr float kScaleFactor = .8f;
+    const size_t projectedVsObservedOffsetUs =
+            kScaleFactor * (static_cast<size_t>(std::roundf((readCounterFrames * MICROS_PER_SECOND /
+                                                             mStreamConfig.sampleRate) -
+                                                            recordDurationUs.count())));
+
+    LOG(VERBOSE) << __func__ << ": record duration " << recordDurationUs.count()
+                 << " microseconds, will wait: " << projectedVsObservedOffsetUs << " microseconds";
+    if (projectedVsObservedOffsetUs > 0) {
+        usleep(projectedVsObservedOffsetUs);
+    }
+    return ::android::OK;
+}
+
+::android::status_t StreamRemoteSubmix::standby() {
+    mCurrentRoute->standby(mIsInput);
+    return ::android::OK;
+}
+
+StreamInRemoteSubmix::StreamInRemoteSubmix(const SinkMetadata& sinkMetadata,
+                                           StreamContext&& context,
+                                           const std::vector<MicrophoneInfo>& microphones)
+    : StreamRemoteSubmix(sinkMetadata, std::move(context)), StreamIn(microphones) {}
+
+ndk::ScopedAStatus StreamInRemoteSubmix::getActiveMicrophones(
+        std::vector<MicrophoneDynamicInfo>* _aidl_return) {
+    LOG(DEBUG) << __func__ << ": not supported";
+    *_aidl_return = std::vector<MicrophoneDynamicInfo>();
+    return ndk::ScopedAStatus::ok();
+}
+
+StreamOutRemoteSubmix::StreamOutRemoteSubmix(const SourceMetadata& sourceMetadata,
+                                             StreamContext&& context,
+                                             const std::optional<AudioOffloadInfo>& offloadInfo)
+    : StreamRemoteSubmix(sourceMetadata, std::move(context)), StreamOut(offloadInfo) {}
+
+}  // namespace aidl::android::hardware::audio::core
diff --git a/audio/aidl/default/r_submix/SubmixRoute.cpp b/audio/aidl/default/r_submix/SubmixRoute.cpp
new file mode 100644
index 0000000..8f5b8cb
--- /dev/null
+++ b/audio/aidl/default/r_submix/SubmixRoute.cpp
@@ -0,0 +1,230 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AHAL_SubmixRoute"
+#include <android-base/logging.h>
+#include <media/AidlConversionCppNdk.h>
+
+#include <Utils.h>
+
+#include "SubmixRoute.h"
+
+using aidl::android::hardware::audio::common::getChannelCount;
+
+namespace aidl::android::hardware::audio::core::r_submix {
+
+// Verify a submix input or output stream can be opened.
+bool SubmixRoute::isStreamConfigValid(bool isInput, const AudioConfig streamConfig) {
+    // If the stream is already open, don't open it again.
+    // ENABLE_LEGACY_INPUT_OPEN is default behaviour
+    if (!isInput && isStreamOutOpen()) {
+        LOG(ERROR) << __func__ << ": output stream already open.";
+        return false;
+    }
+    // If either stream is open, verify the existing pipe config matches the stream config.
+    if (hasAtleastOneStreamOpen() && !isStreamConfigCompatible(streamConfig)) {
+        return false;
+    }
+    return true;
+}
+
+// Compare this stream config with existing pipe config, returning false if they do *not*
+// match, true otherwise.
+bool SubmixRoute::isStreamConfigCompatible(const AudioConfig streamConfig) {
+    if (streamConfig.channelLayout != mPipeConfig.channelLayout) {
+        LOG(ERROR) << __func__ << ": channel count mismatch, stream channels = "
+                   << streamConfig.channelLayout.toString()
+                   << " pipe config channels = " << mPipeConfig.channelLayout.toString();
+        return false;
+    }
+    if (streamConfig.sampleRate != mPipeConfig.sampleRate) {
+        LOG(ERROR) << __func__
+                   << ": sample rate mismatch, stream sample rate = " << streamConfig.sampleRate
+                   << " pipe config sample rate = " << mPipeConfig.sampleRate;
+        return false;
+    }
+    if (streamConfig.format != mPipeConfig.format) {
+        LOG(ERROR) << __func__
+                   << ": format mismatch, stream format = " << streamConfig.format.toString()
+                   << " pipe config format = " << mPipeConfig.format.toString();
+        return false;
+    }
+    return true;
+}
+
+bool SubmixRoute::hasAtleastOneStreamOpen() {
+    std::lock_guard guard(mLock);
+    return (mStreamInOpen || mStreamOutOpen);
+}
+
+// We DO NOT block if:
+// - no peer input stream is present
+// - the peer input is in standby AFTER having been active.
+// We DO block if:
+// - the input was never activated to avoid discarding first frames in the pipe in case capture
+// start was delayed
+bool SubmixRoute::shouldBlockWrite() {
+    std::lock_guard guard(mLock);
+    return (mStreamInOpen || (mStreamInStandby && (mReadCounterFrames != 0)));
+}
+
+int SubmixRoute::notifyReadError() {
+    std::lock_guard guard(mLock);
+    return ++mReadErrorCount;
+}
+
+long SubmixRoute::updateReadCounterFrames(size_t frameCount) {
+    std::lock_guard guard(mLock);
+    mReadCounterFrames += frameCount;
+    return mReadCounterFrames;
+}
+
+void SubmixRoute::openStream(bool isInput) {
+    std::lock_guard guard(mLock);
+    if (isInput) {
+        if (mStreamInOpen) {
+            mInputRefCount++;
+        } else {
+            mInputRefCount = 1;
+            mStreamInOpen = true;
+        }
+        mStreamInStandby = true;
+        mReadCounterFrames = 0;
+        mReadErrorCount = 0;
+    } else {
+        mStreamOutOpen = true;
+    }
+}
+
+void SubmixRoute::closeStream(bool isInput) {
+    std::lock_guard guard(mLock);
+    if (isInput) {
+        mInputRefCount--;
+        if (mInputRefCount == 0) {
+            mStreamInOpen = false;
+            if (mSink != nullptr) {
+                mSink->shutdown(true);
+            }
+        }
+    } else {
+        mStreamOutOpen = false;
+    }
+}
+
+// If SubmixRoute doesn't exist for a port, create a pipe for the submix audio device of size
+// buffer_size_frames and store config of the submix audio device.
+::android::status_t SubmixRoute::createPipe(const AudioConfig streamConfig) {
+    const int channelCount = getChannelCount(streamConfig.channelLayout);
+    const audio_format_t audioFormat = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioFormatDescription_audio_format_t(streamConfig.format));
+    const ::android::NBAIO_Format format =
+            ::android::Format_from_SR_C(streamConfig.sampleRate, channelCount, audioFormat);
+    const ::android::NBAIO_Format offers[1] = {format};
+    size_t numCounterOffers = 0;
+
+    const size_t pipeSizeInFrames =
+            r_submix::kDefaultPipeSizeInFrames *
+            ((float)streamConfig.sampleRate / r_submix::kDefaultSampleRateHz);
+    LOG(VERBOSE) << __func__ << ": creating pipe, rate : " << streamConfig.sampleRate
+                 << ", pipe size : " << pipeSizeInFrames;
+
+    // Create a MonoPipe with optional blocking set to true.
+    sp<MonoPipe> sink = sp<MonoPipe>::make(pipeSizeInFrames, format, true /*writeCanBlock*/);
+    if (sink == nullptr) {
+        LOG(FATAL) << __func__ << ": sink is null";
+        return ::android::UNEXPECTED_NULL;
+    }
+
+    // Negotiation between the source and sink cannot fail as the device open operation
+    // creates both ends of the pipe using the same audio format.
+    ssize_t index = sink->negotiate(offers, 1, nullptr, numCounterOffers);
+    if (index != 0) {
+        LOG(FATAL) << __func__ << ": Negotiation for the sink failed, index = " << index;
+        return ::android::BAD_INDEX;
+    }
+    sp<MonoPipeReader> source = sp<MonoPipeReader>::make(sink.get());
+    if (source == nullptr) {
+        LOG(FATAL) << __func__ << ": source is null";
+        return ::android::UNEXPECTED_NULL;
+    }
+    numCounterOffers = 0;
+    index = source->negotiate(offers, 1, nullptr, numCounterOffers);
+    if (index != 0) {
+        LOG(FATAL) << __func__ << ": Negotiation for the source failed, index = " << index;
+        return ::android::BAD_INDEX;
+    }
+    LOG(VERBOSE) << __func__ << ": created pipe";
+
+    mPipeConfig = streamConfig;
+    mPipeConfig.frameCount = sink->maxFrames();
+
+    LOG(VERBOSE) << __func__ << ": Pipe frame size : " << mPipeConfig.frameSize
+                 << ", pipe frames : " << mPipeConfig.frameCount;
+
+    // Save references to the source and sink.
+    {
+        std::lock_guard guard(mLock);
+        mSink = std::move(sink);
+        mSource = std::move(source);
+    }
+
+    return ::android::OK;
+}
+
+// Release references to the sink and source.
+void SubmixRoute::releasePipe() {
+    std::lock_guard guard(mLock);
+    mSink.clear();
+    mSource.clear();
+}
+
+::android::status_t SubmixRoute::resetPipe() {
+    releasePipe();
+    return createPipe(mPipeConfig);
+}
+
+void SubmixRoute::standby(bool isInput) {
+    std::lock_guard guard(mLock);
+
+    if (isInput) {
+        mStreamInStandby = true;
+    } else {
+        mStreamOutStandby = true;
+        mStreamOutStandbyTransition = !mStreamOutStandbyTransition;
+    }
+}
+
+void SubmixRoute::exitStandby(bool isInput) {
+    std::lock_guard guard(mLock);
+
+    if (isInput) {
+        if (mStreamInStandby || mStreamOutStandbyTransition) {
+            mStreamInStandby = false;
+            mStreamOutStandbyTransition = false;
+            // keep track of when we exit input standby (== first read == start "real recording")
+            // or when we start recording silence, and reset projected time
+            mRecordStartTime = std::chrono::steady_clock::now();
+            mReadCounterFrames = 0;
+        }
+    } else {
+        if (mStreamOutStandby) {
+            mStreamOutStandby = false;
+            mStreamOutStandbyTransition = true;
+        }
+    }
+}
+
+}  // namespace aidl::android::hardware::audio::core::r_submix
diff --git a/audio/aidl/default/r_submix/SubmixRoute.h b/audio/aidl/default/r_submix/SubmixRoute.h
new file mode 100644
index 0000000..5f7ea75
--- /dev/null
+++ b/audio/aidl/default/r_submix/SubmixRoute.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <mutex>
+
+#include <audio_utils/clock.h>
+
+#include <media/nbaio/MonoPipe.h>
+#include <media/nbaio/MonoPipeReader.h>
+
+#include <aidl/android/media/audio/common/AudioChannelLayout.h>
+
+#include "core-impl/Stream.h"
+
+using aidl::android::media::audio::common::AudioChannelLayout;
+using aidl::android::media::audio::common::AudioFormatDescription;
+using aidl::android::media::audio::common::AudioFormatType;
+using aidl::android::media::audio::common::PcmType;
+using ::android::MonoPipe;
+using ::android::MonoPipeReader;
+using ::android::sp;
+
+namespace aidl::android::hardware::audio::core::r_submix {
+
+static constexpr int kDefaultSampleRateHz = 48000;
+// Size at default sample rate
+// NOTE: This value will be rounded up to the nearest power of 2 by MonoPipe().
+static constexpr int kDefaultPipeSizeInFrames = (1024 * 4);
+
+// Configuration of the audio stream.
+struct AudioConfig {
+    int sampleRate = kDefaultSampleRateHz;
+    AudioFormatDescription format =
+            AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = PcmType::INT_16_BIT};
+    AudioChannelLayout channelLayout =
+            AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                    AudioChannelLayout::LAYOUT_STEREO);
+    size_t frameSize;
+    size_t frameCount;
+};
+
+class SubmixRoute {
+  public:
+    AudioConfig mPipeConfig;
+
+    bool isStreamInOpen() {
+        std::lock_guard guard(mLock);
+        return mStreamInOpen;
+    }
+    bool getStreamInStandby() {
+        std::lock_guard guard(mLock);
+        return mStreamInStandby;
+    }
+    bool isStreamOutOpen() {
+        std::lock_guard guard(mLock);
+        return mStreamOutOpen;
+    }
+    bool getStreamOutStandby() {
+        std::lock_guard guard(mLock);
+        return mStreamOutStandby;
+    }
+    long getReadCounterFrames() {
+        std::lock_guard guard(mLock);
+        return mReadCounterFrames;
+    }
+    int getReadErrorCount() {
+        std::lock_guard guard(mLock);
+        return mReadErrorCount;
+    }
+    std::chrono::time_point<std::chrono::steady_clock> getRecordStartTime() {
+        std::lock_guard guard(mLock);
+        return mRecordStartTime;
+    }
+    sp<MonoPipe> getSink() {
+        std::lock_guard guard(mLock);
+        return mSink;
+    }
+    sp<MonoPipeReader> getSource() {
+        std::lock_guard guard(mLock);
+        return mSource;
+    }
+
+    bool isStreamConfigValid(bool isInput, const AudioConfig streamConfig);
+    void closeStream(bool isInput);
+    ::android::status_t createPipe(const AudioConfig streamConfig);
+    void exitStandby(bool isInput);
+    bool hasAtleastOneStreamOpen();
+    int notifyReadError();
+    void openStream(bool isInput);
+    void releasePipe();
+    ::android::status_t resetPipe();
+    bool shouldBlockWrite();
+    void standby(bool isInput);
+    long updateReadCounterFrames(size_t frameCount);
+
+  private:
+    bool isStreamConfigCompatible(const AudioConfig streamConfig);
+
+    std::mutex mLock;
+
+    bool mStreamInOpen GUARDED_BY(mLock) = false;
+    int mInputRefCount GUARDED_BY(mLock) = 0;
+    bool mStreamInStandby GUARDED_BY(mLock) = true;
+    bool mStreamOutStandbyTransition GUARDED_BY(mLock) = false;
+    bool mStreamOutOpen GUARDED_BY(mLock) = false;
+    bool mStreamOutStandby GUARDED_BY(mLock) = true;
+    // how many frames have been requested to be read since standby
+    long mReadCounterFrames GUARDED_BY(mLock) = 0;
+    int mReadErrorCount GUARDED_BY(mLock) = 0;
+    // wall clock when recording starts
+    std::chrono::time_point<std::chrono::steady_clock> mRecordStartTime GUARDED_BY(mLock);
+
+    // Pipe variables: they handle the ring buffer that "pipes" audio:
+    //  - from the submix virtual audio output == what needs to be played
+    //    remotely, seen as an output for the client
+    //  - to the virtual audio source == what is captured by the component
+    //    which "records" the submix / virtual audio source, and handles it as needed.
+    // A usecase example is one where the component capturing the audio is then sending it over
+    // Wifi for presentation on a remote Wifi Display device (e.g. a dongle attached to a TV, or a
+    // TV with Wifi Display capabilities), or to a wireless audio player.
+    sp<MonoPipe> mSink GUARDED_BY(mLock);
+    sp<MonoPipeReader> mSource GUARDED_BY(mLock);
+};
+
+}  // namespace aidl::android::hardware::audio::core::r_submix