Merge changes from topic "cp-aosp-2647304" into udc-qpr-dev
* changes:
audio: Add DriverInterface::start method
Audio : Add remote submix stream implementation
diff --git a/audio/aidl/default/Android.bp b/audio/aidl/default/Android.bp
index bda0de2..e9294cf 100644
--- a/audio/aidl/default/Android.bp
+++ b/audio/aidl/default/Android.bp
@@ -18,6 +18,7 @@
"libbinder_ndk",
"libcutils",
"libfmq",
+ "libnbaio_mono",
"libstagefright_foundation",
"libtinyalsav2",
"libutils",
@@ -76,6 +77,10 @@
"Stream.cpp",
"StreamStub.cpp",
"Telephony.cpp",
+ "r_submix/ModuleRemoteSubmix.cpp",
+ "r_submix/RemoteSubmixUtils.cpp",
+ "r_submix/SubmixRoute.cpp",
+ "r_submix/StreamRemoteSubmix.cpp",
"usb/ModuleUsb.cpp",
"usb/StreamUsb.cpp",
"usb/UsbAlsaMixerControl.cpp",
diff --git a/audio/aidl/default/Module.cpp b/audio/aidl/default/Module.cpp
index 6f89d4b..48d1458 100644
--- a/audio/aidl/default/Module.cpp
+++ b/audio/aidl/default/Module.cpp
@@ -27,6 +27,7 @@
#include "core-impl/Bluetooth.h"
#include "core-impl/Module.h"
+#include "core-impl/ModuleRemoteSubmix.h"
#include "core-impl/ModuleUsb.h"
#include "core-impl/SoundDose.h"
#include "core-impl/StreamStub.h"
@@ -111,8 +112,9 @@
switch (type) {
case Module::Type::USB:
return ndk::SharedRefBase::make<ModuleUsb>(type);
- case Type::DEFAULT:
case Type::R_SUBMIX:
+ return ndk::SharedRefBase::make<ModuleRemoteSubmix>(type);
+ case Type::DEFAULT:
default:
return ndk::SharedRefBase::make<Module>(type);
}
@@ -181,8 +183,8 @@
StreamContext temp(
std::make_unique<StreamContext::CommandMQ>(1, true /*configureEventFlagWord*/),
std::make_unique<StreamContext::ReplyMQ>(1, true /*configureEventFlagWord*/),
- portConfigIt->format.value(), portConfigIt->channelMask.value(),
- portConfigIt->sampleRate.value().value, flags,
+ portConfigIt->portId, portConfigIt->format.value(),
+ portConfigIt->channelMask.value(), portConfigIt->sampleRate.value().value, flags,
portConfigIt->ext.get<AudioPortExt::mix>().handle,
std::make_unique<StreamContext::DataMQ>(frameSize * in_bufferSizeFrames),
asyncCallback, outEventCallback, params);
@@ -490,6 +492,17 @@
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
+ for (auto profile : connectedPort.profiles) {
+ if (profile.channelMasks.empty()) {
+ LOG(ERROR) << __func__ << ": the profile " << profile.name << " has no channel masks";
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+ }
+ if (profile.sampleRates.empty()) {
+ LOG(ERROR) << __func__ << ": the profile " << profile.name << " has no sample rates";
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+ }
+ }
+
connectedPort.id = ++getConfig().nextPortId;
auto [connectedPortsIt, _] =
mConnectedDevicePorts.insert(std::pair(connectedPort.id, std::vector<int32_t>()));
diff --git a/audio/aidl/default/Stream.cpp b/audio/aidl/default/Stream.cpp
index 73f1293..251dea0 100644
--- a/audio/aidl/default/Stream.cpp
+++ b/audio/aidl/default/Stream.cpp
@@ -166,10 +166,15 @@
case Tag::start:
if (mState == StreamDescriptor::State::STANDBY ||
mState == StreamDescriptor::State::DRAINING) {
- populateReply(&reply, mIsConnected);
- mState = mState == StreamDescriptor::State::STANDBY
- ? StreamDescriptor::State::IDLE
- : StreamDescriptor::State::ACTIVE;
+ if (::android::status_t status = mDriver->start(); status == ::android::OK) {
+ populateReply(&reply, mIsConnected);
+ mState = mState == StreamDescriptor::State::STANDBY
+ ? StreamDescriptor::State::IDLE
+ : StreamDescriptor::State::ACTIVE;
+ } else {
+ LOG(ERROR) << __func__ << ": start failed: " << status;
+ mState = StreamDescriptor::State::ERROR;
+ }
} else {
populateReplyWrongState(&reply, command);
}
@@ -377,26 +382,36 @@
populateReply(&reply, mIsConnected);
break;
case Tag::start: {
- bool commandAccepted = true;
+ std::optional<StreamDescriptor::State> nextState;
switch (mState) {
case StreamDescriptor::State::STANDBY:
- mState = StreamDescriptor::State::IDLE;
+ nextState = StreamDescriptor::State::IDLE;
break;
case StreamDescriptor::State::PAUSED:
- mState = StreamDescriptor::State::ACTIVE;
+ nextState = StreamDescriptor::State::ACTIVE;
break;
case StreamDescriptor::State::DRAIN_PAUSED:
- switchToTransientState(StreamDescriptor::State::DRAINING);
+ nextState = StreamDescriptor::State::DRAINING;
break;
case StreamDescriptor::State::TRANSFER_PAUSED:
- switchToTransientState(StreamDescriptor::State::TRANSFERRING);
+ nextState = StreamDescriptor::State::TRANSFERRING;
break;
default:
populateReplyWrongState(&reply, command);
- commandAccepted = false;
}
- if (commandAccepted) {
- populateReply(&reply, mIsConnected);
+ if (nextState.has_value()) {
+ if (::android::status_t status = mDriver->start(); status == ::android::OK) {
+ populateReply(&reply, mIsConnected);
+ if (*nextState == StreamDescriptor::State::IDLE ||
+ *nextState == StreamDescriptor::State::ACTIVE) {
+ mState = *nextState;
+ } else {
+ switchToTransientState(*nextState);
+ }
+ } else {
+ LOG(ERROR) << __func__ << ": start failed: " << status;
+ mState = StreamDescriptor::State::ERROR;
+ }
}
} break;
case Tag::burst:
diff --git a/audio/aidl/default/StreamStub.cpp b/audio/aidl/default/StreamStub.cpp
index d88dfbc..2dcf4d4 100644
--- a/audio/aidl/default/StreamStub.cpp
+++ b/audio/aidl/default/StreamStub.cpp
@@ -33,33 +33,67 @@
StreamStub::StreamStub(const Metadata& metadata, StreamContext&& context)
: StreamCommonImpl(metadata, std::move(context)),
- mFrameSizeBytes(context.getFrameSize()),
- mSampleRate(context.getSampleRate()),
- mIsAsynchronous(!!context.getAsyncCallback()),
+ mFrameSizeBytes(getContext().getFrameSize()),
+ mSampleRate(getContext().getSampleRate()),
+ mIsAsynchronous(!!getContext().getAsyncCallback()),
mIsInput(isInput(metadata)) {}
::android::status_t StreamStub::init() {
+ mIsInitialized = true;
usleep(500);
return ::android::OK;
}
::android::status_t StreamStub::drain(StreamDescriptor::DrainMode) {
+ if (!mIsInitialized) {
+ LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver";
+ }
usleep(500);
return ::android::OK;
}
::android::status_t StreamStub::flush() {
+ if (!mIsInitialized) {
+ LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver";
+ }
usleep(500);
return ::android::OK;
}
::android::status_t StreamStub::pause() {
+ if (!mIsInitialized) {
+ LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver";
+ }
usleep(500);
return ::android::OK;
}
+::android::status_t StreamStub::standby() {
+ if (!mIsInitialized) {
+ LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver";
+ }
+ usleep(500);
+ mIsStandby = true;
+ return ::android::OK;
+}
+
+::android::status_t StreamStub::start() {
+ if (!mIsInitialized) {
+ LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver";
+ }
+ usleep(500);
+ mIsStandby = false;
+ return ::android::OK;
+}
+
::android::status_t StreamStub::transfer(void* buffer, size_t frameCount, size_t* actualFrameCount,
int32_t* latencyMs) {
+ if (!mIsInitialized) {
+ LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver";
+ }
+ if (mIsStandby) {
+ LOG(FATAL) << __func__ << ": must not happen while in standby";
+ }
static constexpr float kMicrosPerSecond = MICROS_PER_SECOND;
static constexpr float kScaleFactor = .8f;
if (mIsAsynchronous) {
@@ -80,13 +114,10 @@
return ::android::OK;
}
-::android::status_t StreamStub::standby() {
- usleep(500);
- return ::android::OK;
+void StreamStub::shutdown() {
+ mIsInitialized = false;
}
-void StreamStub::shutdown() {}
-
StreamInStub::StreamInStub(const SinkMetadata& sinkMetadata, StreamContext&& context,
const std::vector<MicrophoneInfo>& microphones)
: StreamStub(sinkMetadata, std::move(context)), StreamIn(microphones) {}
diff --git a/audio/aidl/default/include/core-impl/ModuleRemoteSubmix.h b/audio/aidl/default/include/core-impl/ModuleRemoteSubmix.h
new file mode 100644
index 0000000..7b1d375
--- /dev/null
+++ b/audio/aidl/default/include/core-impl/ModuleRemoteSubmix.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "core-impl/Module.h"
+
+namespace aidl::android::hardware::audio::core {
+
+class ModuleRemoteSubmix : public Module {
+ public:
+ explicit ModuleRemoteSubmix(Module::Type type) : Module(type) {}
+
+ private:
+ // IModule interfaces
+ ndk::ScopedAStatus getTelephony(std::shared_ptr<ITelephony>* _aidl_return) override;
+ ndk::ScopedAStatus getBluetooth(std::shared_ptr<IBluetooth>* _aidl_return) override;
+ ndk::ScopedAStatus getMicMute(bool* _aidl_return) override;
+ ndk::ScopedAStatus setMicMute(bool in_mute) override;
+
+ // Module interfaces
+ ndk::ScopedAStatus createInputStream(
+ const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata,
+ StreamContext&& context,
+ const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones,
+ std::shared_ptr<StreamIn>* result) override;
+ ndk::ScopedAStatus createOutputStream(
+ const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata,
+ StreamContext&& context,
+ const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>&
+ offloadInfo,
+ std::shared_ptr<StreamOut>* result) override;
+ ndk::ScopedAStatus populateConnectedDevicePort(
+ ::aidl::android::media::audio::common::AudioPort* audioPort) override;
+ ndk::ScopedAStatus checkAudioPatchEndpointsMatch(
+ const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sources,
+ const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sinks)
+ override;
+ void onExternalDeviceConnectionChanged(
+ const ::aidl::android::media::audio::common::AudioPort& audioPort,
+ bool connected) override;
+ ndk::ScopedAStatus onMasterMuteChanged(bool mute) override;
+ ndk::ScopedAStatus onMasterVolumeChanged(float volume) override;
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/audio/aidl/default/include/core-impl/Stream.h b/audio/aidl/default/include/core-impl/Stream.h
index c20a421..aaf5860 100644
--- a/audio/aidl/default/include/core-impl/Stream.h
+++ b/audio/aidl/default/include/core-impl/Stream.h
@@ -78,6 +78,7 @@
StreamContext() = default;
StreamContext(std::unique_ptr<CommandMQ> commandMQ, std::unique_ptr<ReplyMQ> replyMQ,
+ int portId,
const ::aidl::android::media::audio::common::AudioFormatDescription& format,
const ::aidl::android::media::audio::common::AudioChannelLayout& channelLayout,
int sampleRate, const ::aidl::android::media::audio::common::AudioIoFlags& flags,
@@ -88,6 +89,7 @@
: mCommandMQ(std::move(commandMQ)),
mInternalCommandCookie(std::rand()),
mReplyMQ(std::move(replyMQ)),
+ mPortId(portId),
mFormat(format),
mChannelLayout(channelLayout),
mSampleRate(sampleRate),
@@ -101,6 +103,7 @@
: mCommandMQ(std::move(other.mCommandMQ)),
mInternalCommandCookie(other.mInternalCommandCookie),
mReplyMQ(std::move(other.mReplyMQ)),
+ mPortId(other.mPortId),
mFormat(other.mFormat),
mChannelLayout(other.mChannelLayout),
mSampleRate(other.mSampleRate),
@@ -114,6 +117,7 @@
mCommandMQ = std::move(other.mCommandMQ);
mInternalCommandCookie = other.mInternalCommandCookie;
mReplyMQ = std::move(other.mReplyMQ);
+ mPortId = std::move(other.mPortId);
mFormat = std::move(other.mFormat);
mChannelLayout = std::move(other.mChannelLayout);
mSampleRate = other.mSampleRate;
@@ -145,6 +149,7 @@
std::shared_ptr<IStreamOutEventCallback> getOutEventCallback() const {
return mOutEventCallback;
}
+ int getPortId() const { return mPortId; }
ReplyMQ* getReplyMQ() const { return mReplyMQ.get(); }
int getTransientStateDelayMs() const { return mDebugParameters.transientStateDelayMs; }
int getSampleRate() const { return mSampleRate; }
@@ -155,6 +160,7 @@
std::unique_ptr<CommandMQ> mCommandMQ;
int mInternalCommandCookie; // The value used to confirm that the command was posted internally
std::unique_ptr<ReplyMQ> mReplyMQ;
+ int mPortId;
::aidl::android::media::audio::common::AudioFormatDescription mFormat;
::aidl::android::media::audio::common::AudioChannelLayout mChannelLayout;
int mSampleRate;
@@ -174,9 +180,10 @@
virtual ::android::status_t drain(StreamDescriptor::DrainMode mode) = 0;
virtual ::android::status_t flush() = 0;
virtual ::android::status_t pause() = 0;
+ virtual ::android::status_t standby() = 0;
+ virtual ::android::status_t start() = 0;
virtual ::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount,
int32_t* latencyMs) = 0;
- virtual ::android::status_t standby() = 0;
virtual void shutdown() = 0; // This function is only called once.
};
diff --git a/audio/aidl/default/include/core-impl/StreamRemoteSubmix.h b/audio/aidl/default/include/core-impl/StreamRemoteSubmix.h
new file mode 100644
index 0000000..2253ec7
--- /dev/null
+++ b/audio/aidl/default/include/core-impl/StreamRemoteSubmix.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <mutex>
+#include <vector>
+
+#include "core-impl/Stream.h"
+#include "r_submix/SubmixRoute.h"
+
+namespace aidl::android::hardware::audio::core {
+
+using aidl::android::hardware::audio::core::r_submix::AudioConfig;
+using aidl::android::hardware::audio::core::r_submix::SubmixRoute;
+
+class StreamRemoteSubmix : public StreamCommonImpl {
+ public:
+ StreamRemoteSubmix(const Metadata& metadata, StreamContext&& context);
+
+ ::android::status_t init() override;
+ ::android::status_t drain(StreamDescriptor::DrainMode) override;
+ ::android::status_t flush() override;
+ ::android::status_t pause() override;
+ ::android::status_t standby() override;
+ ::android::status_t start() override;
+ ::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount,
+ int32_t* latencyMs) override;
+ void shutdown() override;
+
+ // Overridden methods of 'StreamCommonImpl', called on a Binder thread.
+ ndk::ScopedAStatus prepareToClose() override;
+
+ private:
+ size_t getPipeSizeInFrames();
+ size_t getStreamPipeSizeInFrames();
+ ::android::status_t outWrite(void* buffer, size_t frameCount, size_t* actualFrameCount);
+ ::android::status_t inRead(void* buffer, size_t frameCount, size_t* actualFrameCount);
+
+ const int mPortId;
+ const bool mIsInput;
+ AudioConfig mStreamConfig;
+ std::shared_ptr<SubmixRoute> mCurrentRoute = nullptr;
+
+ // Mutex lock to protect vector of submix routes, each of these submix routes have their mutex
+ // locks and none of the mutex locks should be taken together.
+ static std::mutex sSubmixRoutesLock;
+ static std::map<int32_t, std::shared_ptr<SubmixRoute>> sSubmixRoutes
+ GUARDED_BY(sSubmixRoutesLock);
+
+ // limit for number of read error log entries to avoid spamming the logs
+ static constexpr int kMaxReadErrorLogs = 5;
+ // The duration of kMaxReadFailureAttempts * READ_ATTEMPT_SLEEP_MS must be strictly inferior
+ // to the duration of a record buffer at the current record sample rate (of the device, not of
+ // the recording itself). Here we have: 3 * 5ms = 15ms < 1024 frames * 1000 / 48000 = 21.333ms
+ static constexpr int kMaxReadFailureAttempts = 3;
+ // 5ms between two read attempts when pipe is empty
+ static constexpr int kReadAttemptSleepUs = 5000;
+};
+
+class StreamInRemoteSubmix final : public StreamRemoteSubmix, public StreamIn {
+ public:
+ friend class ndk::SharedRefBase;
+ StreamInRemoteSubmix(
+ const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata,
+ StreamContext&& context,
+ const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones);
+
+ private:
+ ndk::ScopedAStatus getActiveMicrophones(
+ std::vector<::aidl::android::media::audio::common::MicrophoneDynamicInfo>* _aidl_return)
+ override;
+};
+
+class StreamOutRemoteSubmix final : public StreamRemoteSubmix, public StreamOut {
+ public:
+ friend class ndk::SharedRefBase;
+ StreamOutRemoteSubmix(
+ const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata,
+ StreamContext&& context,
+ const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>&
+ offloadInfo);
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/audio/aidl/default/include/core-impl/StreamStub.h b/audio/aidl/default/include/core-impl/StreamStub.h
index c8900f3..6b1b2dd 100644
--- a/audio/aidl/default/include/core-impl/StreamStub.h
+++ b/audio/aidl/default/include/core-impl/StreamStub.h
@@ -28,9 +28,10 @@
::android::status_t drain(StreamDescriptor::DrainMode) override;
::android::status_t flush() override;
::android::status_t pause() override;
+ ::android::status_t standby() override;
+ ::android::status_t start() override;
::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount,
int32_t* latencyMs) override;
- ::android::status_t standby() override;
void shutdown() override;
private:
@@ -38,6 +39,8 @@
const int mSampleRate;
const bool mIsAsynchronous;
const bool mIsInput;
+ bool mIsInitialized = false; // Used for validating the state machine logic.
+ bool mIsStandby = true; // Used for validating the state machine logic.
};
class StreamInStub final : public StreamStub, public StreamIn {
diff --git a/audio/aidl/default/include/core-impl/StreamUsb.h b/audio/aidl/default/include/core-impl/StreamUsb.h
index 5e55cd8..8c40782 100644
--- a/audio/aidl/default/include/core-impl/StreamUsb.h
+++ b/audio/aidl/default/include/core-impl/StreamUsb.h
@@ -16,7 +16,10 @@
#pragma once
+#include <atomic>
+#include <functional>
#include <mutex>
+#include <optional>
#include <vector>
#include <aidl/android/media/audio/common/AudioChannelLayout.h>
@@ -38,9 +41,10 @@
::android::status_t drain(StreamDescriptor::DrainMode) override;
::android::status_t flush() override;
::android::status_t pause() override;
+ ::android::status_t standby() override;
+ ::android::status_t start() override;
::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount,
int32_t* latencyMs) override;
- ::android::status_t standby() override;
void shutdown() override;
// Overridden methods of 'StreamCommonImpl', called on a Binder thread.
@@ -48,15 +52,20 @@
ndk::ScopedAStatus setConnectedDevices(const ConnectedDevices& devices) override;
private:
- ::android::status_t exitStandby();
+ using AlsaDeviceProxyDeleter = std::function<void(alsa_device_proxy*)>;
+ using AlsaDeviceProxy = std::unique_ptr<alsa_device_proxy, AlsaDeviceProxyDeleter>;
+
+ static std::optional<struct pcm_config> maybePopulateConfig(const StreamContext& context,
+ bool isInput);
mutable std::mutex mLock;
const size_t mFrameSizeBytes;
- std::optional<struct pcm_config> mConfig;
const bool mIsInput;
- std::vector<std::shared_ptr<alsa_device_proxy>> mAlsaDeviceProxies GUARDED_BY(mLock);
- bool mIsStandby = true;
+ const std::optional<struct pcm_config> mConfig;
+ std::atomic<bool> mConnectedDevicesUpdated = false;
+ // All fields below are only used on the worker thread.
+ std::vector<AlsaDeviceProxy> mAlsaDeviceProxies;
};
class StreamInUsb final : public StreamUsb, public StreamIn {
diff --git a/audio/aidl/default/r_submix/ModuleRemoteSubmix.cpp b/audio/aidl/default/r_submix/ModuleRemoteSubmix.cpp
new file mode 100644
index 0000000..2b79f51
--- /dev/null
+++ b/audio/aidl/default/r_submix/ModuleRemoteSubmix.cpp
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AHAL_ModuleRemoteSubmix"
+
+#include <vector>
+
+#include <android-base/logging.h>
+
+#include "RemoteSubmixUtils.h"
+#include "core-impl/ModuleRemoteSubmix.h"
+#include "core-impl/StreamRemoteSubmix.h"
+
+using aidl::android::hardware::audio::common::SinkMetadata;
+using aidl::android::hardware::audio::common::SourceMetadata;
+using aidl::android::media::audio::common::AudioOffloadInfo;
+using aidl::android::media::audio::common::AudioPort;
+using aidl::android::media::audio::common::AudioPortConfig;
+using aidl::android::media::audio::common::MicrophoneInfo;
+
+namespace aidl::android::hardware::audio::core {
+
+ndk::ScopedAStatus ModuleRemoteSubmix::getTelephony(std::shared_ptr<ITelephony>* _aidl_return) {
+ *_aidl_return = nullptr;
+ LOG(DEBUG) << __func__ << ": returning null";
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::getBluetooth(std::shared_ptr<IBluetooth>* _aidl_return) {
+ *_aidl_return = nullptr;
+ LOG(DEBUG) << __func__ << ": returning null";
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::getMicMute(bool* _aidl_return __unused) {
+ LOG(DEBUG) << __func__ << ": is not supported";
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::setMicMute(bool in_mute __unused) {
+ LOG(DEBUG) << __func__ << ": is not supported";
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::createInputStream(
+ const SinkMetadata& sinkMetadata, StreamContext&& context,
+ const std::vector<MicrophoneInfo>& microphones, std::shared_ptr<StreamIn>* result) {
+ return createStreamInstance<StreamInRemoteSubmix>(result, sinkMetadata, std::move(context),
+ microphones);
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::createOutputStream(
+ const SourceMetadata& sourceMetadata, StreamContext&& context,
+ const std::optional<AudioOffloadInfo>& offloadInfo, std::shared_ptr<StreamOut>* result) {
+ return createStreamInstance<StreamOutRemoteSubmix>(result, sourceMetadata, std::move(context),
+ offloadInfo);
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::populateConnectedDevicePort(AudioPort* audioPort) {
+ LOG(VERBOSE) << __func__ << ": Profiles already populated by Configuration";
+ for (auto profile : audioPort->profiles) {
+ for (auto channelMask : profile.channelMasks) {
+ if (!r_submix::isChannelMaskSupported(channelMask)) {
+ LOG(ERROR) << __func__ << ": the profile " << profile.name
+ << " has unsupported channel mask : " << channelMask.toString();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+ }
+ }
+ for (auto sampleRate : profile.sampleRates) {
+ if (!r_submix::isSampleRateSupported(sampleRate)) {
+ LOG(ERROR) << __func__ << ": the profile " << profile.name
+ << " has unsupported sample rate : " << sampleRate;
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+ }
+ }
+ }
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::checkAudioPatchEndpointsMatch(
+ const std::vector<AudioPortConfig*>& sources, const std::vector<AudioPortConfig*>& sinks) {
+ for (const auto& source : sources) {
+ for (const auto& sink : sinks) {
+ if (source->sampleRate != sink->sampleRate ||
+ source->channelMask != sink->channelMask || source->format != sink->format) {
+ LOG(ERROR) << __func__
+ << ": mismatch port configuration, source=" << source->toString()
+ << ", sink=" << sink->toString();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ }
+ }
+ return ndk::ScopedAStatus::ok();
+}
+
+void ModuleRemoteSubmix::onExternalDeviceConnectionChanged(
+ const ::aidl::android::media::audio::common::AudioPort& audioPort __unused,
+ bool connected __unused) {
+ LOG(DEBUG) << __func__ << ": do nothing and return";
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::onMasterMuteChanged(bool __unused) {
+ LOG(DEBUG) << __func__ << ": is not supported";
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+}
+
+ndk::ScopedAStatus ModuleRemoteSubmix::onMasterVolumeChanged(float __unused) {
+ LOG(DEBUG) << __func__ << ": is not supported";
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+}
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/audio/aidl/default/r_submix/RemoteSubmixUtils.cpp b/audio/aidl/default/r_submix/RemoteSubmixUtils.cpp
new file mode 100644
index 0000000..2f5d17d
--- /dev/null
+++ b/audio/aidl/default/r_submix/RemoteSubmixUtils.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <vector>
+
+#include "RemoteSubmixUtils.h"
+
+namespace aidl::android::hardware::audio::core::r_submix {
+
+bool isChannelMaskSupported(const AudioChannelLayout& channelMask) {
+ const static std::vector<AudioChannelLayout> kSupportedChannelMask = {
+ AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+ AudioChannelLayout::LAYOUT_MONO),
+ AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+ AudioChannelLayout::LAYOUT_STEREO)};
+
+ if (std::find(kSupportedChannelMask.begin(), kSupportedChannelMask.end(), channelMask) !=
+ kSupportedChannelMask.end()) {
+ return true;
+ }
+ return false;
+}
+
+bool isSampleRateSupported(int sampleRate) {
+ const static std::vector<int> kSupportedSampleRates = {8000, 11025, 12000, 16000, 22050,
+ 24000, 32000, 44100, 48000};
+
+ if (std::find(kSupportedSampleRates.begin(), kSupportedSampleRates.end(), sampleRate) !=
+ kSupportedSampleRates.end()) {
+ return true;
+ }
+ return false;
+}
+
+} // namespace aidl::android::hardware::audio::core::r_submix
diff --git a/audio/aidl/default/r_submix/RemoteSubmixUtils.h b/audio/aidl/default/r_submix/RemoteSubmixUtils.h
new file mode 100644
index 0000000..952a992
--- /dev/null
+++ b/audio/aidl/default/r_submix/RemoteSubmixUtils.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/media/audio/common/AudioChannelLayout.h>
+#include <aidl/android/media/audio/common/AudioFormatDescription.h>
+
+using aidl::android::media::audio::common::AudioChannelLayout;
+
+namespace aidl::android::hardware::audio::core::r_submix {
+
+bool isChannelMaskSupported(const AudioChannelLayout& channelMask);
+
+bool isSampleRateSupported(int sampleRate);
+
+} // namespace aidl::android::hardware::audio::core::r_submix
diff --git a/audio/aidl/default/r_submix/StreamRemoteSubmix.cpp b/audio/aidl/default/r_submix/StreamRemoteSubmix.cpp
new file mode 100644
index 0000000..5af0d91
--- /dev/null
+++ b/audio/aidl/default/r_submix/StreamRemoteSubmix.cpp
@@ -0,0 +1,352 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AHAL_StreamRemoteSubmix"
+#include <android-base/logging.h>
+
+#include <cmath>
+
+#include "core-impl/StreamRemoteSubmix.h"
+
+using aidl::android::hardware::audio::common::SinkMetadata;
+using aidl::android::hardware::audio::common::SourceMetadata;
+using aidl::android::media::audio::common::AudioOffloadInfo;
+using aidl::android::media::audio::common::MicrophoneDynamicInfo;
+using aidl::android::media::audio::common::MicrophoneInfo;
+
+namespace aidl::android::hardware::audio::core {
+
+StreamRemoteSubmix::StreamRemoteSubmix(const Metadata& metadata, StreamContext&& context)
+ : StreamCommonImpl(metadata, std::move(context)),
+ mPortId(context.getPortId()),
+ mIsInput(isInput(metadata)) {
+ mStreamConfig.frameSize = context.getFrameSize();
+ mStreamConfig.format = context.getFormat();
+ mStreamConfig.channelLayout = context.getChannelLayout();
+ mStreamConfig.sampleRate = context.getSampleRate();
+}
+
+std::mutex StreamRemoteSubmix::sSubmixRoutesLock;
+std::map<int32_t, std::shared_ptr<SubmixRoute>> StreamRemoteSubmix::sSubmixRoutes;
+
+::android::status_t StreamRemoteSubmix::init() {
+ {
+ std::lock_guard guard(sSubmixRoutesLock);
+ if (sSubmixRoutes.find(mPortId) != sSubmixRoutes.end()) {
+ mCurrentRoute = sSubmixRoutes[mPortId];
+ }
+ }
+ // If route is not available for this port, add it.
+ if (mCurrentRoute == nullptr) {
+ // Initialize the pipe.
+ mCurrentRoute = std::make_shared<SubmixRoute>();
+ if (::android::OK != mCurrentRoute->createPipe(mStreamConfig)) {
+ LOG(ERROR) << __func__ << ": create pipe failed";
+ return ::android::NO_INIT;
+ }
+ {
+ std::lock_guard guard(sSubmixRoutesLock);
+ sSubmixRoutes.emplace(mPortId, mCurrentRoute);
+ }
+ } else {
+ if (!mCurrentRoute->isStreamConfigValid(mIsInput, mStreamConfig)) {
+ LOG(ERROR) << __func__ << ": invalid stream config";
+ return ::android::NO_INIT;
+ }
+ sp<MonoPipe> sink = mCurrentRoute->getSink();
+ if (sink == nullptr) {
+ LOG(ERROR) << __func__ << ": nullptr sink when opening stream";
+ return ::android::NO_INIT;
+ }
+ // If the sink has been shutdown or pipe recreation is forced, delete the pipe and
+ // recreate it.
+ if (sink->isShutdown()) {
+ LOG(DEBUG) << __func__ << ": Non-nullptr shut down sink when opening stream";
+ if (::android::OK != mCurrentRoute->resetPipe()) {
+ LOG(ERROR) << __func__ << ": reset pipe failed";
+ return ::android::NO_INIT;
+ }
+ }
+ }
+
+ mCurrentRoute->openStream(mIsInput);
+ return ::android::OK;
+}
+
+::android::status_t StreamRemoteSubmix::drain(StreamDescriptor::DrainMode) {
+ usleep(1000);
+ return ::android::OK;
+}
+
+::android::status_t StreamRemoteSubmix::flush() {
+ usleep(1000);
+ return ::android::OK;
+}
+
+::android::status_t StreamRemoteSubmix::pause() {
+ usleep(1000);
+ return ::android::OK;
+}
+
+::android::status_t StreamRemoteSubmix::standby() {
+ mCurrentRoute->standby(mIsInput);
+ return ::android::OK;
+}
+
+::android::status_t StreamRemoteSubmix::start() {
+ mCurrentRoute->exitStandby(mIsInput);
+ return ::android::OK;
+}
+
+ndk::ScopedAStatus StreamRemoteSubmix::prepareToClose() {
+ if (!mIsInput) {
+ std::shared_ptr<SubmixRoute> route = nullptr;
+ {
+ std::lock_guard guard(sSubmixRoutesLock);
+ if (sSubmixRoutes.find(mPortId) != sSubmixRoutes.end()) {
+ route = sSubmixRoutes[mPortId];
+ }
+ }
+ if (route != nullptr) {
+ sp<MonoPipe> sink = route->getSink();
+ if (sink == nullptr) {
+ ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+ }
+ LOG(DEBUG) << __func__ << ": shutting down MonoPipe sink";
+
+ sink->shutdown(true);
+ } else {
+ LOG(DEBUG) << __func__ << ": stream already closed.";
+ ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+ }
+ }
+ return ndk::ScopedAStatus::ok();
+}
+
+// Remove references to the specified input and output streams. When the device no longer
+// references input and output streams destroy the associated pipe.
+void StreamRemoteSubmix::shutdown() {
+ mCurrentRoute->closeStream(mIsInput);
+ // If all stream instances are closed, we can remove route information for this port.
+ if (!mCurrentRoute->hasAtleastOneStreamOpen()) {
+ mCurrentRoute->releasePipe();
+ LOG(DEBUG) << __func__ << ": pipe destroyed";
+
+ std::lock_guard guard(sSubmixRoutesLock);
+ sSubmixRoutes.erase(mPortId);
+ }
+ mCurrentRoute.reset();
+}
+
+::android::status_t StreamRemoteSubmix::transfer(void* buffer, size_t frameCount,
+ size_t* actualFrameCount, int32_t* latencyMs) {
+ *latencyMs = (getStreamPipeSizeInFrames() * MILLIS_PER_SECOND) / mStreamConfig.sampleRate;
+ LOG(VERBOSE) << __func__ << ": Latency " << *latencyMs << "ms";
+
+ sp<MonoPipe> sink = mCurrentRoute->getSink();
+ if (sink != nullptr) {
+ if (sink->isShutdown()) {
+ sink.clear();
+ LOG(VERBOSE) << __func__ << ": pipe shutdown, ignoring the transfer.";
+ // the pipe has already been shutdown, this buffer will be lost but we must simulate
+ // timing so we don't drain the output faster than realtime
+ const size_t delayUs = static_cast<size_t>(
+ std::roundf(frameCount * MICROS_PER_SECOND / mStreamConfig.sampleRate));
+ usleep(delayUs);
+
+ *actualFrameCount = frameCount;
+ return ::android::OK;
+ }
+ } else {
+ LOG(ERROR) << __func__ << ": transfer without a pipe!";
+ return ::android::UNEXPECTED_NULL;
+ }
+
+ return (mIsInput ? inRead(buffer, frameCount, actualFrameCount)
+ : outWrite(buffer, frameCount, actualFrameCount));
+}
+
+// Calculate the maximum size of the pipe buffer in frames for the specified stream.
+size_t StreamRemoteSubmix::getStreamPipeSizeInFrames() {
+ auto pipeConfig = mCurrentRoute->mPipeConfig;
+ const size_t maxFrameSize = std::max(mStreamConfig.frameSize, pipeConfig.frameSize);
+ return (pipeConfig.frameCount * pipeConfig.frameSize) / maxFrameSize;
+}
+
+::android::status_t StreamRemoteSubmix::outWrite(void* buffer, size_t frameCount,
+ size_t* actualFrameCount) {
+ sp<MonoPipe> sink = mCurrentRoute->getSink();
+ if (sink != nullptr) {
+ if (sink->isShutdown()) {
+ sink.clear();
+ LOG(VERBOSE) << __func__ << ": pipe shutdown, ignoring the write.";
+ // the pipe has already been shutdown, this buffer will be lost but we must
+ // simulate timing so we don't drain the output faster than realtime
+ const size_t delayUs = static_cast<size_t>(
+ std::roundf(frameCount * MICROS_PER_SECOND / mStreamConfig.sampleRate));
+ usleep(delayUs);
+ *actualFrameCount = frameCount;
+ return ::android::OK;
+ }
+ } else {
+ LOG(FATAL) << __func__ << ": without a pipe!";
+ return ::android::UNKNOWN_ERROR;
+ }
+
+ const size_t availableToWrite = sink->availableToWrite();
+ // NOTE: sink has been checked above and sink and source life cycles are synchronized
+ sp<MonoPipeReader> source = mCurrentRoute->getSource();
+ // If the write to the sink should be blocked, flush enough frames from the pipe to make space
+ // to write the most recent data.
+ if (!mCurrentRoute->shouldBlockWrite() && availableToWrite < frameCount) {
+ static uint8_t flushBuffer[64];
+ const size_t flushBufferSizeFrames = sizeof(flushBuffer) / mStreamConfig.frameSize;
+ size_t framesToFlushFromSource = frameCount - availableToWrite;
+ LOG(VERBOSE) << __func__ << ": flushing " << framesToFlushFromSource
+ << " frames from the pipe to avoid blocking";
+ while (framesToFlushFromSource) {
+ const size_t flushSize = std::min(framesToFlushFromSource, flushBufferSizeFrames);
+ framesToFlushFromSource -= flushSize;
+ // read does not block
+ source->read(flushBuffer, flushSize);
+ }
+ }
+
+ ssize_t writtenFrames = sink->write(buffer, frameCount);
+ if (writtenFrames < 0) {
+ if (writtenFrames == (ssize_t)::android::NEGOTIATE) {
+ LOG(ERROR) << __func__ << ": write to pipe returned NEGOTIATE";
+ sink.clear();
+ *actualFrameCount = 0;
+ return ::android::UNKNOWN_ERROR;
+ } else {
+ // write() returned UNDERRUN or WOULD_BLOCK, retry
+ LOG(ERROR) << __func__ << ": write to pipe returned unexpected " << writtenFrames;
+ writtenFrames = sink->write(buffer, frameCount);
+ }
+ }
+ sink.clear();
+
+ if (writtenFrames < 0) {
+ LOG(ERROR) << __func__ << ": failed writing to pipe with " << writtenFrames;
+ *actualFrameCount = 0;
+ return ::android::UNKNOWN_ERROR;
+ }
+ LOG(VERBOSE) << __func__ << ": wrote " << writtenFrames << "frames";
+ *actualFrameCount = writtenFrames;
+ return ::android::OK;
+}
+
+::android::status_t StreamRemoteSubmix::inRead(void* buffer, size_t frameCount,
+ size_t* actualFrameCount) {
+ // about to read from audio source
+ sp<MonoPipeReader> source = mCurrentRoute->getSource();
+ if (source == nullptr) {
+ int readErrorCount = mCurrentRoute->notifyReadError();
+ if (readErrorCount < kMaxReadErrorLogs) {
+ LOG(ERROR)
+ << __func__
+ << ": no audio pipe yet we're trying to read! (not all errors will be logged)";
+ } else {
+ LOG(ERROR) << __func__ << ": Read errors " << readErrorCount;
+ }
+ const size_t delayUs = static_cast<size_t>(
+ std::roundf(frameCount * MICROS_PER_SECOND / mStreamConfig.sampleRate));
+ usleep(delayUs);
+ memset(buffer, 0, mStreamConfig.frameSize * frameCount);
+ *actualFrameCount = frameCount;
+ return ::android::OK;
+ }
+
+ // read the data from the pipe
+ int attempts = 0;
+ const size_t delayUs = static_cast<size_t>(std::roundf(kReadAttemptSleepUs));
+ char* buff = (char*)buffer;
+ size_t remainingFrames = frameCount;
+
+ while ((remainingFrames > 0) && (attempts < kMaxReadFailureAttempts)) {
+ LOG(VERBOSE) << __func__ << ": frames available to read " << source->availableToRead();
+
+ ssize_t framesRead = source->read(buff, remainingFrames);
+
+ LOG(VERBOSE) << __func__ << ": frames read " << framesRead;
+
+ if (framesRead > 0) {
+ remainingFrames -= framesRead;
+ buff += framesRead * mStreamConfig.frameSize;
+ LOG(VERBOSE) << __func__ << ": (attempts = " << attempts << ") got " << framesRead
+ << " frames, remaining=" << remainingFrames;
+ } else {
+ attempts++;
+ LOG(WARNING) << __func__ << ": read returned " << framesRead
+ << " , read failure attempts = " << attempts;
+ usleep(delayUs);
+ }
+ }
+ // done using the source
+ source.clear();
+
+ if (remainingFrames > 0) {
+ const size_t remainingBytes = remainingFrames * mStreamConfig.frameSize;
+ LOG(VERBOSE) << __func__ << ": clearing remaining_frames = " << remainingFrames;
+ memset(((char*)buffer) + (mStreamConfig.frameSize * frameCount) - remainingBytes, 0,
+ remainingBytes);
+ }
+
+ long readCounterFrames = mCurrentRoute->updateReadCounterFrames(frameCount);
+ *actualFrameCount = frameCount;
+
+ // compute how much we need to sleep after reading the data by comparing the wall clock with
+ // the projected time at which we should return.
+ // wall clock after reading from the pipe
+ auto recordDurationUs = std::chrono::steady_clock::now() - mCurrentRoute->getRecordStartTime();
+
+ // readCounterFrames contains the number of frames that have been read since the beginning of
+ // recording (including this call): it's converted to usec and compared to how long we've been
+ // recording for, which gives us how long we must wait to sync the projected recording time, and
+ // the observed recording time.
+ static constexpr float kScaleFactor = .8f;
+ const size_t projectedVsObservedOffsetUs =
+ kScaleFactor * (static_cast<size_t>(std::roundf((readCounterFrames * MICROS_PER_SECOND /
+ mStreamConfig.sampleRate) -
+ recordDurationUs.count())));
+
+ LOG(VERBOSE) << __func__ << ": record duration " << recordDurationUs.count()
+ << " microseconds, will wait: " << projectedVsObservedOffsetUs << " microseconds";
+ if (projectedVsObservedOffsetUs > 0) {
+ usleep(projectedVsObservedOffsetUs);
+ }
+ return ::android::OK;
+}
+
+StreamInRemoteSubmix::StreamInRemoteSubmix(const SinkMetadata& sinkMetadata,
+ StreamContext&& context,
+ const std::vector<MicrophoneInfo>& microphones)
+ : StreamRemoteSubmix(sinkMetadata, std::move(context)), StreamIn(microphones) {}
+
+ndk::ScopedAStatus StreamInRemoteSubmix::getActiveMicrophones(
+ std::vector<MicrophoneDynamicInfo>* _aidl_return) {
+ LOG(DEBUG) << __func__ << ": not supported";
+ *_aidl_return = std::vector<MicrophoneDynamicInfo>();
+ return ndk::ScopedAStatus::ok();
+}
+
+StreamOutRemoteSubmix::StreamOutRemoteSubmix(const SourceMetadata& sourceMetadata,
+ StreamContext&& context,
+ const std::optional<AudioOffloadInfo>& offloadInfo)
+ : StreamRemoteSubmix(sourceMetadata, std::move(context)), StreamOut(offloadInfo) {}
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/audio/aidl/default/r_submix/SubmixRoute.cpp b/audio/aidl/default/r_submix/SubmixRoute.cpp
new file mode 100644
index 0000000..8f5b8cb
--- /dev/null
+++ b/audio/aidl/default/r_submix/SubmixRoute.cpp
@@ -0,0 +1,230 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AHAL_SubmixRoute"
+#include <android-base/logging.h>
+#include <media/AidlConversionCppNdk.h>
+
+#include <Utils.h>
+
+#include "SubmixRoute.h"
+
+using aidl::android::hardware::audio::common::getChannelCount;
+
+namespace aidl::android::hardware::audio::core::r_submix {
+
+// Verify a submix input or output stream can be opened.
+bool SubmixRoute::isStreamConfigValid(bool isInput, const AudioConfig streamConfig) {
+ // If the stream is already open, don't open it again.
+ // ENABLE_LEGACY_INPUT_OPEN is default behaviour
+ if (!isInput && isStreamOutOpen()) {
+ LOG(ERROR) << __func__ << ": output stream already open.";
+ return false;
+ }
+ // If either stream is open, verify the existing pipe config matches the stream config.
+ if (hasAtleastOneStreamOpen() && !isStreamConfigCompatible(streamConfig)) {
+ return false;
+ }
+ return true;
+}
+
+// Compare this stream config with existing pipe config, returning false if they do *not*
+// match, true otherwise.
+bool SubmixRoute::isStreamConfigCompatible(const AudioConfig streamConfig) {
+ if (streamConfig.channelLayout != mPipeConfig.channelLayout) {
+ LOG(ERROR) << __func__ << ": channel count mismatch, stream channels = "
+ << streamConfig.channelLayout.toString()
+ << " pipe config channels = " << mPipeConfig.channelLayout.toString();
+ return false;
+ }
+ if (streamConfig.sampleRate != mPipeConfig.sampleRate) {
+ LOG(ERROR) << __func__
+ << ": sample rate mismatch, stream sample rate = " << streamConfig.sampleRate
+ << " pipe config sample rate = " << mPipeConfig.sampleRate;
+ return false;
+ }
+ if (streamConfig.format != mPipeConfig.format) {
+ LOG(ERROR) << __func__
+ << ": format mismatch, stream format = " << streamConfig.format.toString()
+ << " pipe config format = " << mPipeConfig.format.toString();
+ return false;
+ }
+ return true;
+}
+
+bool SubmixRoute::hasAtleastOneStreamOpen() {
+ std::lock_guard guard(mLock);
+ return (mStreamInOpen || mStreamOutOpen);
+}
+
+// We DO NOT block if:
+// - no peer input stream is present
+// - the peer input is in standby AFTER having been active.
+// We DO block if:
+// - the input was never activated to avoid discarding first frames in the pipe in case capture
+// start was delayed
+bool SubmixRoute::shouldBlockWrite() {
+ std::lock_guard guard(mLock);
+ return (mStreamInOpen || (mStreamInStandby && (mReadCounterFrames != 0)));
+}
+
+int SubmixRoute::notifyReadError() {
+ std::lock_guard guard(mLock);
+ return ++mReadErrorCount;
+}
+
+long SubmixRoute::updateReadCounterFrames(size_t frameCount) {
+ std::lock_guard guard(mLock);
+ mReadCounterFrames += frameCount;
+ return mReadCounterFrames;
+}
+
+void SubmixRoute::openStream(bool isInput) {
+ std::lock_guard guard(mLock);
+ if (isInput) {
+ if (mStreamInOpen) {
+ mInputRefCount++;
+ } else {
+ mInputRefCount = 1;
+ mStreamInOpen = true;
+ }
+ mStreamInStandby = true;
+ mReadCounterFrames = 0;
+ mReadErrorCount = 0;
+ } else {
+ mStreamOutOpen = true;
+ }
+}
+
+void SubmixRoute::closeStream(bool isInput) {
+ std::lock_guard guard(mLock);
+ if (isInput) {
+ mInputRefCount--;
+ if (mInputRefCount == 0) {
+ mStreamInOpen = false;
+ if (mSink != nullptr) {
+ mSink->shutdown(true);
+ }
+ }
+ } else {
+ mStreamOutOpen = false;
+ }
+}
+
+// If SubmixRoute doesn't exist for a port, create a pipe for the submix audio device of size
+// buffer_size_frames and store config of the submix audio device.
+::android::status_t SubmixRoute::createPipe(const AudioConfig streamConfig) {
+ const int channelCount = getChannelCount(streamConfig.channelLayout);
+ const audio_format_t audioFormat = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioFormatDescription_audio_format_t(streamConfig.format));
+ const ::android::NBAIO_Format format =
+ ::android::Format_from_SR_C(streamConfig.sampleRate, channelCount, audioFormat);
+ const ::android::NBAIO_Format offers[1] = {format};
+ size_t numCounterOffers = 0;
+
+ const size_t pipeSizeInFrames =
+ r_submix::kDefaultPipeSizeInFrames *
+ ((float)streamConfig.sampleRate / r_submix::kDefaultSampleRateHz);
+ LOG(VERBOSE) << __func__ << ": creating pipe, rate : " << streamConfig.sampleRate
+ << ", pipe size : " << pipeSizeInFrames;
+
+ // Create a MonoPipe with optional blocking set to true.
+ sp<MonoPipe> sink = sp<MonoPipe>::make(pipeSizeInFrames, format, true /*writeCanBlock*/);
+ if (sink == nullptr) {
+ LOG(FATAL) << __func__ << ": sink is null";
+ return ::android::UNEXPECTED_NULL;
+ }
+
+ // Negotiation between the source and sink cannot fail as the device open operation
+ // creates both ends of the pipe using the same audio format.
+ ssize_t index = sink->negotiate(offers, 1, nullptr, numCounterOffers);
+ if (index != 0) {
+ LOG(FATAL) << __func__ << ": Negotiation for the sink failed, index = " << index;
+ return ::android::BAD_INDEX;
+ }
+ sp<MonoPipeReader> source = sp<MonoPipeReader>::make(sink.get());
+ if (source == nullptr) {
+ LOG(FATAL) << __func__ << ": source is null";
+ return ::android::UNEXPECTED_NULL;
+ }
+ numCounterOffers = 0;
+ index = source->negotiate(offers, 1, nullptr, numCounterOffers);
+ if (index != 0) {
+ LOG(FATAL) << __func__ << ": Negotiation for the source failed, index = " << index;
+ return ::android::BAD_INDEX;
+ }
+ LOG(VERBOSE) << __func__ << ": created pipe";
+
+ mPipeConfig = streamConfig;
+ mPipeConfig.frameCount = sink->maxFrames();
+
+ LOG(VERBOSE) << __func__ << ": Pipe frame size : " << mPipeConfig.frameSize
+ << ", pipe frames : " << mPipeConfig.frameCount;
+
+ // Save references to the source and sink.
+ {
+ std::lock_guard guard(mLock);
+ mSink = std::move(sink);
+ mSource = std::move(source);
+ }
+
+ return ::android::OK;
+}
+
+// Release references to the sink and source.
+void SubmixRoute::releasePipe() {
+ std::lock_guard guard(mLock);
+ mSink.clear();
+ mSource.clear();
+}
+
+::android::status_t SubmixRoute::resetPipe() {
+ releasePipe();
+ return createPipe(mPipeConfig);
+}
+
+void SubmixRoute::standby(bool isInput) {
+ std::lock_guard guard(mLock);
+
+ if (isInput) {
+ mStreamInStandby = true;
+ } else {
+ mStreamOutStandby = true;
+ mStreamOutStandbyTransition = !mStreamOutStandbyTransition;
+ }
+}
+
+void SubmixRoute::exitStandby(bool isInput) {
+ std::lock_guard guard(mLock);
+
+ if (isInput) {
+ if (mStreamInStandby || mStreamOutStandbyTransition) {
+ mStreamInStandby = false;
+ mStreamOutStandbyTransition = false;
+ // keep track of when we exit input standby (== first read == start "real recording")
+ // or when we start recording silence, and reset projected time
+ mRecordStartTime = std::chrono::steady_clock::now();
+ mReadCounterFrames = 0;
+ }
+ } else {
+ if (mStreamOutStandby) {
+ mStreamOutStandby = false;
+ mStreamOutStandbyTransition = true;
+ }
+ }
+}
+
+} // namespace aidl::android::hardware::audio::core::r_submix
diff --git a/audio/aidl/default/r_submix/SubmixRoute.h b/audio/aidl/default/r_submix/SubmixRoute.h
new file mode 100644
index 0000000..5f7ea75
--- /dev/null
+++ b/audio/aidl/default/r_submix/SubmixRoute.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <mutex>
+
+#include <audio_utils/clock.h>
+
+#include <media/nbaio/MonoPipe.h>
+#include <media/nbaio/MonoPipeReader.h>
+
+#include <aidl/android/media/audio/common/AudioChannelLayout.h>
+
+#include "core-impl/Stream.h"
+
+using aidl::android::media::audio::common::AudioChannelLayout;
+using aidl::android::media::audio::common::AudioFormatDescription;
+using aidl::android::media::audio::common::AudioFormatType;
+using aidl::android::media::audio::common::PcmType;
+using ::android::MonoPipe;
+using ::android::MonoPipeReader;
+using ::android::sp;
+
+namespace aidl::android::hardware::audio::core::r_submix {
+
+static constexpr int kDefaultSampleRateHz = 48000;
+// Size at default sample rate
+// NOTE: This value will be rounded up to the nearest power of 2 by MonoPipe().
+static constexpr int kDefaultPipeSizeInFrames = (1024 * 4);
+
+// Configuration of the audio stream.
+struct AudioConfig {
+ int sampleRate = kDefaultSampleRateHz;
+ AudioFormatDescription format =
+ AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = PcmType::INT_16_BIT};
+ AudioChannelLayout channelLayout =
+ AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+ AudioChannelLayout::LAYOUT_STEREO);
+ size_t frameSize;
+ size_t frameCount;
+};
+
+class SubmixRoute {
+ public:
+ AudioConfig mPipeConfig;
+
+ bool isStreamInOpen() {
+ std::lock_guard guard(mLock);
+ return mStreamInOpen;
+ }
+ bool getStreamInStandby() {
+ std::lock_guard guard(mLock);
+ return mStreamInStandby;
+ }
+ bool isStreamOutOpen() {
+ std::lock_guard guard(mLock);
+ return mStreamOutOpen;
+ }
+ bool getStreamOutStandby() {
+ std::lock_guard guard(mLock);
+ return mStreamOutStandby;
+ }
+ long getReadCounterFrames() {
+ std::lock_guard guard(mLock);
+ return mReadCounterFrames;
+ }
+ int getReadErrorCount() {
+ std::lock_guard guard(mLock);
+ return mReadErrorCount;
+ }
+ std::chrono::time_point<std::chrono::steady_clock> getRecordStartTime() {
+ std::lock_guard guard(mLock);
+ return mRecordStartTime;
+ }
+ sp<MonoPipe> getSink() {
+ std::lock_guard guard(mLock);
+ return mSink;
+ }
+ sp<MonoPipeReader> getSource() {
+ std::lock_guard guard(mLock);
+ return mSource;
+ }
+
+ bool isStreamConfigValid(bool isInput, const AudioConfig streamConfig);
+ void closeStream(bool isInput);
+ ::android::status_t createPipe(const AudioConfig streamConfig);
+ void exitStandby(bool isInput);
+ bool hasAtleastOneStreamOpen();
+ int notifyReadError();
+ void openStream(bool isInput);
+ void releasePipe();
+ ::android::status_t resetPipe();
+ bool shouldBlockWrite();
+ void standby(bool isInput);
+ long updateReadCounterFrames(size_t frameCount);
+
+ private:
+ bool isStreamConfigCompatible(const AudioConfig streamConfig);
+
+ std::mutex mLock;
+
+ bool mStreamInOpen GUARDED_BY(mLock) = false;
+ int mInputRefCount GUARDED_BY(mLock) = 0;
+ bool mStreamInStandby GUARDED_BY(mLock) = true;
+ bool mStreamOutStandbyTransition GUARDED_BY(mLock) = false;
+ bool mStreamOutOpen GUARDED_BY(mLock) = false;
+ bool mStreamOutStandby GUARDED_BY(mLock) = true;
+ // how many frames have been requested to be read since standby
+ long mReadCounterFrames GUARDED_BY(mLock) = 0;
+ int mReadErrorCount GUARDED_BY(mLock) = 0;
+ // wall clock when recording starts
+ std::chrono::time_point<std::chrono::steady_clock> mRecordStartTime GUARDED_BY(mLock);
+
+ // Pipe variables: they handle the ring buffer that "pipes" audio:
+ // - from the submix virtual audio output == what needs to be played
+ // remotely, seen as an output for the client
+ // - to the virtual audio source == what is captured by the component
+ // which "records" the submix / virtual audio source, and handles it as needed.
+ // A usecase example is one where the component capturing the audio is then sending it over
+ // Wifi for presentation on a remote Wifi Display device (e.g. a dongle attached to a TV, or a
+ // TV with Wifi Display capabilities), or to a wireless audio player.
+ sp<MonoPipe> mSink GUARDED_BY(mLock);
+ sp<MonoPipeReader> mSource GUARDED_BY(mLock);
+};
+
+} // namespace aidl::android::hardware::audio::core::r_submix
diff --git a/audio/aidl/default/usb/StreamUsb.cpp b/audio/aidl/default/usb/StreamUsb.cpp
index 49bc1d6..17e1ab4 100644
--- a/audio/aidl/default/usb/StreamUsb.cpp
+++ b/audio/aidl/default/usb/StreamUsb.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include <limits>
+
#define LOG_TAG "AHAL_StreamUsb"
#include <android-base/logging.h>
@@ -45,25 +47,30 @@
StreamUsb::StreamUsb(const Metadata& metadata, StreamContext&& context)
: StreamCommonImpl(metadata, std::move(context)),
- mFrameSizeBytes(context.getFrameSize()),
- mIsInput(isInput(metadata)) {
+ mFrameSizeBytes(getContext().getFrameSize()),
+ mIsInput(isInput(metadata)),
+ mConfig(maybePopulateConfig(getContext(), mIsInput)) {}
+
+// static
+std::optional<struct pcm_config> StreamUsb::maybePopulateConfig(const StreamContext& context,
+ bool isInput) {
struct pcm_config config;
- config.channels = usb::getChannelCountFromChannelMask(context.getChannelLayout(), mIsInput);
+ config.channels = usb::getChannelCountFromChannelMask(context.getChannelLayout(), isInput);
if (config.channels == 0) {
LOG(ERROR) << __func__ << ": invalid channel=" << context.getChannelLayout().toString();
- return;
+ return std::nullopt;
}
config.format = usb::aidl2legacy_AudioFormatDescription_pcm_format(context.getFormat());
if (config.format == PCM_FORMAT_INVALID) {
LOG(ERROR) << __func__ << ": invalid format=" << context.getFormat().toString();
- return;
+ return std::nullopt;
}
config.rate = context.getSampleRate();
if (config.rate == 0) {
LOG(ERROR) << __func__ << ": invalid sample rate=" << config.rate;
- return;
+ return std::nullopt;
}
- mConfig = config;
+ return config;
}
::android::status_t StreamUsb::init() {
@@ -89,8 +96,8 @@
}
}
std::lock_guard guard(mLock);
- mAlsaDeviceProxies.clear();
RETURN_STATUS_IF_ERROR(StreamCommonImpl::setConnectedDevices(connectedDevices));
+ mConnectedDevicesUpdated.store(true, std::memory_order_release);
return ndk::ScopedAStatus::ok();
}
@@ -111,59 +118,53 @@
::android::status_t StreamUsb::transfer(void* buffer, size_t frameCount, size_t* actualFrameCount,
int32_t* latencyMs) {
- {
- std::lock_guard guard(mLock);
- if (!mConfig.has_value() || mConnectedDevices.empty()) {
- LOG(ERROR) << __func__ << ": failed, has config: " << mConfig.has_value()
- << ", has connected devices: " << mConnectedDevices.empty();
- return ::android::NO_INIT;
- }
- }
- if (mIsStandby) {
- if (::android::status_t status = exitStandby(); status != ::android::OK) {
- LOG(ERROR) << __func__ << ": failed to exit standby, status=" << status;
- return status;
- }
- }
- std::vector<std::shared_ptr<alsa_device_proxy>> alsaDeviceProxies;
- {
- std::lock_guard guard(mLock);
- alsaDeviceProxies = mAlsaDeviceProxies;
+ if (mConnectedDevicesUpdated.load(std::memory_order_acquire)) {
+ // 'setConnectedDevices' has been called. I/O will be restarted.
+ *actualFrameCount = 0;
+ *latencyMs = StreamDescriptor::LATENCY_UNKNOWN;
+ return ::android::OK;
}
const size_t bytesToTransfer = frameCount * mFrameSizeBytes;
+ unsigned maxLatency = 0;
if (mIsInput) {
+ if (mAlsaDeviceProxies.empty()) {
+ LOG(FATAL) << __func__ << ": no input devices";
+ return ::android::NO_INIT;
+ }
// For input case, only support single device.
- proxy_read(alsaDeviceProxies[0].get(), buffer, bytesToTransfer);
+ proxy_read(mAlsaDeviceProxies[0].get(), buffer, bytesToTransfer);
+ maxLatency = proxy_get_latency(mAlsaDeviceProxies[0].get());
} else {
- for (auto& proxy : alsaDeviceProxies) {
+ for (auto& proxy : mAlsaDeviceProxies) {
proxy_write(proxy.get(), buffer, bytesToTransfer);
+ maxLatency = std::max(maxLatency, proxy_get_latency(proxy.get()));
}
}
*actualFrameCount = frameCount;
- *latencyMs = Module::kLatencyMs;
+ maxLatency = std::min(maxLatency, static_cast<unsigned>(std::numeric_limits<int32_t>::max()));
+ *latencyMs = maxLatency;
return ::android::OK;
}
::android::status_t StreamUsb::standby() {
- if (!mIsStandby) {
- std::lock_guard guard(mLock);
- mAlsaDeviceProxies.clear();
- mIsStandby = true;
- }
+ mAlsaDeviceProxies.clear();
return ::android::OK;
}
-void StreamUsb::shutdown() {}
+void StreamUsb::shutdown() {
+ mAlsaDeviceProxies.clear();
+}
-::android::status_t StreamUsb::exitStandby() {
+::android::status_t StreamUsb::start() {
std::vector<AudioDeviceAddress> connectedDevices;
{
std::lock_guard guard(mLock);
std::transform(mConnectedDevices.begin(), mConnectedDevices.end(),
std::back_inserter(connectedDevices),
[](const auto& device) { return device.address; });
+ mConnectedDevicesUpdated.store(false, std::memory_order_release);
}
- std::vector<std::shared_ptr<alsa_device_proxy>> alsaDeviceProxies;
+ decltype(mAlsaDeviceProxies) alsaDeviceProxies;
for (const auto& device : connectedDevices) {
alsa_device_profile profile;
profile_init(&profile, mIsInput ? PCM_IN : PCM_OUT);
@@ -175,16 +176,16 @@
return ::android::UNKNOWN_ERROR;
}
- auto proxy = std::shared_ptr<alsa_device_proxy>(new alsa_device_proxy(),
- [](alsa_device_proxy* proxy) {
- proxy_close(proxy);
- free(proxy);
- });
+ AlsaDeviceProxy proxy(new alsa_device_proxy, [](alsa_device_proxy* proxy) {
+ proxy_close(proxy);
+ free(proxy);
+ });
// Always ask for alsa configure as required since the configuration should be supported
// by the connected device. That is guaranteed by `setAudioPortConfig` and
// `setAudioPatch`.
- if (int err =
- proxy_prepare(proxy.get(), &profile, &mConfig.value(), true /*is_bit_perfect*/);
+ if (int err = proxy_prepare(proxy.get(), &profile,
+ const_cast<struct pcm_config*>(&mConfig.value()),
+ true /*is_bit_perfect*/);
err != 0) {
LOG(ERROR) << __func__ << ": fail to prepare for device address=" << device.toString()
<< " error=" << err;
@@ -197,11 +198,7 @@
}
alsaDeviceProxies.push_back(std::move(proxy));
}
- {
- std::lock_guard guard(mLock);
- mAlsaDeviceProxies = alsaDeviceProxies;
- }
- mIsStandby = false;
+ mAlsaDeviceProxies = std::move(alsaDeviceProxies);
return ::android::OK;
}