audio: Add microphone settings to IModule, IStreamIn
Add 'MicrophoneInfo' and 'MicrophoneDynamicInfo' parcelables.
Add IModule.getMicrophones method.
Add following methods to IStreamIn:
- getActiveMicrophonesIds;
- get/setMicrophoneDirection;
- get/setMicrophoneFieldDimension.
Provide trivial implementations and VTS.
Also slightly refactor port retrieval from ModuleConfig
to unify common queries.
Bug: 205884982
Test: atest VtsHalAudioCoreTargetTest
Change-Id: I472c7733e2a331a67cea613cd9218889eff06a43
diff --git a/audio/aidl/Android.bp b/audio/aidl/Android.bp
index 563ee62..691cf34 100644
--- a/audio/aidl/Android.bp
+++ b/audio/aidl/Android.bp
@@ -117,6 +117,8 @@
"android/hardware/audio/core/IStreamIn.aidl",
"android/hardware/audio/core/IStreamOut.aidl",
"android/hardware/audio/core/ITelephony.aidl",
+ "android/hardware/audio/core/MicrophoneDynamicInfo.aidl",
+ "android/hardware/audio/core/MicrophoneInfo.aidl",
"android/hardware/audio/core/MmapBufferDescriptor.aidl",
"android/hardware/audio/core/ModuleDebug.aidl",
"android/hardware/audio/core/StreamDescriptor.aidl",
diff --git a/audio/aidl/aidl_api/android.hardware.audio.core/current/android/hardware/audio/core/IModule.aidl b/audio/aidl/aidl_api/android.hardware.audio.core/current/android/hardware/audio/core/IModule.aidl
index 7f960e0..0c7ca27 100644
--- a/audio/aidl/aidl_api/android.hardware.audio.core/current/android/hardware/audio/core/IModule.aidl
+++ b/audio/aidl/aidl_api/android.hardware.audio.core/current/android/hardware/audio/core/IModule.aidl
@@ -56,6 +56,7 @@
void setMasterVolume(float volume);
boolean getMicMute();
void setMicMute(boolean mute);
+ android.hardware.audio.core.MicrophoneInfo[] getMicrophones();
void updateAudioMode(android.hardware.audio.core.AudioMode mode);
void updateScreenRotation(android.hardware.audio.core.IModule.ScreenRotation rotation);
void updateScreenState(boolean isTurnedOn);
diff --git a/audio/aidl/aidl_api/android.hardware.audio.core/current/android/hardware/audio/core/IStreamIn.aidl b/audio/aidl/aidl_api/android.hardware.audio.core/current/android/hardware/audio/core/IStreamIn.aidl
index d5ab3e8..e9c727f 100644
--- a/audio/aidl/aidl_api/android.hardware.audio.core/current/android/hardware/audio/core/IStreamIn.aidl
+++ b/audio/aidl/aidl_api/android.hardware.audio.core/current/android/hardware/audio/core/IStreamIn.aidl
@@ -35,5 +35,20 @@
@VintfStability
interface IStreamIn {
void close();
+ android.hardware.audio.core.MicrophoneDynamicInfo[] getActiveMicrophones();
+ android.hardware.audio.core.IStreamIn.MicrophoneDirection getMicrophoneDirection();
+ void setMicrophoneDirection(android.hardware.audio.core.IStreamIn.MicrophoneDirection direction);
+ float getMicrophoneFieldDimension();
+ void setMicrophoneFieldDimension(float zoom);
void updateMetadata(in android.hardware.audio.common.SinkMetadata sinkMetadata);
+ const int MIC_FIELD_DIMENSION_WIDE_ANGLE = -1;
+ const int MIC_FIELD_DIMENSION_NO_ZOOM = 0;
+ const int MIC_FIELD_DIMENSION_MAX_ZOOM = 1;
+ @Backing(type="int") @VintfStability
+ enum MicrophoneDirection {
+ UNSPECIFIED = 0,
+ FRONT = 1,
+ BACK = 2,
+ EXTERNAL = 3,
+ }
}
diff --git a/audio/aidl/aidl_api/android.hardware.audio.core/current/android/hardware/audio/core/MicrophoneDynamicInfo.aidl b/audio/aidl/aidl_api/android.hardware.audio.core/current/android/hardware/audio/core/MicrophoneDynamicInfo.aidl
new file mode 100644
index 0000000..50a5528
--- /dev/null
+++ b/audio/aidl/aidl_api/android.hardware.audio.core/current/android/hardware/audio/core/MicrophoneDynamicInfo.aidl
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.audio.core;
+@JavaDerive(equals=true, toString=true) @VintfStability
+parcelable MicrophoneDynamicInfo {
+ @utf8InCpp String id;
+ android.hardware.audio.core.MicrophoneDynamicInfo.ChannelMapping[] channelMapping;
+ @Backing(type="int") @VintfStability
+ enum ChannelMapping {
+ UNUSED = 0,
+ DIRECT = 1,
+ PROCESSED = 2,
+ }
+}
diff --git a/audio/aidl/aidl_api/android.hardware.audio.core/current/android/hardware/audio/core/MicrophoneInfo.aidl b/audio/aidl/aidl_api/android.hardware.audio.core/current/android/hardware/audio/core/MicrophoneInfo.aidl
new file mode 100644
index 0000000..68c7f88
--- /dev/null
+++ b/audio/aidl/aidl_api/android.hardware.audio.core/current/android/hardware/audio/core/MicrophoneInfo.aidl
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.audio.core;
+@JavaDerive(equals=true, toString=true) @VintfStability
+parcelable MicrophoneInfo {
+ @utf8InCpp String id;
+ android.media.audio.common.AudioDevice device;
+ android.hardware.audio.core.MicrophoneInfo.Location location = android.hardware.audio.core.MicrophoneInfo.Location.UNKNOWN;
+ int group = -1;
+ int indexInTheGroup = -1;
+ @nullable android.hardware.audio.core.MicrophoneInfo.Sensitivity sensitivity;
+ android.hardware.audio.core.MicrophoneInfo.Directionality directionality = android.hardware.audio.core.MicrophoneInfo.Directionality.UNKNOWN;
+ android.hardware.audio.core.MicrophoneInfo.FrequencyResponsePoint[] frequencyResponse;
+ @nullable android.hardware.audio.core.MicrophoneInfo.Coordinate position;
+ @nullable android.hardware.audio.core.MicrophoneInfo.Coordinate orientation;
+ const int GROUP_UNKNOWN = -1;
+ const int INDEX_IN_THE_GROUP_UNKNOWN = -1;
+ @Backing(type="int") @VintfStability
+ enum Location {
+ UNKNOWN = 0,
+ MAINBODY = 1,
+ MAINBODY_MOVABLE = 2,
+ PERIPHERAL = 3,
+ }
+ @VintfStability
+ parcelable Sensitivity {
+ float leveldBFS;
+ float maxSpldB;
+ float minSpldB;
+ }
+ @Backing(type="int") @VintfStability
+ enum Directionality {
+ UNKNOWN = 0,
+ OMNI = 1,
+ BI_DIRECTIONAL = 2,
+ CARDIOID = 3,
+ HYPER_CARDIOID = 4,
+ SUPER_CARDIOID = 5,
+ }
+ @VintfStability
+ parcelable FrequencyResponsePoint {
+ float frequencyHz;
+ float leveldB;
+ }
+ @VintfStability
+ parcelable Coordinate {
+ float x;
+ float y;
+ float z;
+ }
+}
diff --git a/audio/aidl/android/hardware/audio/core/IModule.aidl b/audio/aidl/android/hardware/audio/core/IModule.aidl
index 974e7e8..786d5ee 100644
--- a/audio/aidl/android/hardware/audio/core/IModule.aidl
+++ b/audio/aidl/android/hardware/audio/core/IModule.aidl
@@ -25,6 +25,7 @@
import android.hardware.audio.core.IStreamIn;
import android.hardware.audio.core.IStreamOut;
import android.hardware.audio.core.ITelephony;
+import android.hardware.audio.core.MicrophoneInfo;
import android.hardware.audio.core.ModuleDebug;
import android.hardware.audio.core.StreamDescriptor;
import android.media.audio.common.AudioOffloadInfo;
@@ -605,6 +606,22 @@
void setMicMute(boolean mute);
/**
+ * Provide information describing built-in microphones of the HAL module.
+ *
+ * If there are no built-in microphones in the HAL module, it must return an
+ * empty vector. If there are microphones, but the HAL module does not
+ * possess the required information about them, EX_UNSUPPORTED_OPERATION
+ * must be thrown.
+ *
+ * If this method is supported by the HAL module, it must also support
+ * 'IStreamIn.getActiveMicrophones' method.
+ *
+ * @return The vector with information about each microphone.
+ * @throws EX_UNSUPPORTED_OPERATION If the information is unavailable.
+ */
+ MicrophoneInfo[] getMicrophones();
+
+ /**
* Notify the HAL module on the change of the current audio mode.
*
* The current audio mode is always controlled by the client. This is an
diff --git a/audio/aidl/android/hardware/audio/core/IStreamIn.aidl b/audio/aidl/android/hardware/audio/core/IStreamIn.aidl
index 0c3e3d1..0b6e02c 100644
--- a/audio/aidl/android/hardware/audio/core/IStreamIn.aidl
+++ b/audio/aidl/android/hardware/audio/core/IStreamIn.aidl
@@ -17,6 +17,7 @@
package android.hardware.audio.core;
import android.hardware.audio.common.SinkMetadata;
+import android.hardware.audio.core.MicrophoneDynamicInfo;
/**
* This interface provides means for receiving audio data from input devices.
@@ -39,6 +40,92 @@
void close();
/**
+ * Provides information on the microphones that are active for this stream.
+ *
+ * The returned array contains dynamic information on the microphones which
+ * are active for this stream. Each entry in the returned array must have a
+ * corresponding entry (matched by the 'MicrophoneInfo.id' field value) in
+ * the list of all available microphones which is provided by the
+ * 'IModule.getMicrophones' method.
+ *
+ * This method must be supported by the HAL module if
+ * 'IModule.getMicrophones' is supported.
+ *
+ * @return The vector with dynamic information on the microphones.
+ * @throws EX_ILLEGAL_STATE If the stream has already been closed.
+ * @throws EX_UNSUPPORTED_OPERATION If the information is unavailable.
+ */
+ MicrophoneDynamicInfo[] getActiveMicrophones();
+
+ @VintfStability
+ @Backing(type="int")
+ enum MicrophoneDirection {
+ /**
+ * Don't do any directionality processing of the activated microphone(s).
+ */
+ UNSPECIFIED = 0,
+ /**
+ * Optimize capture for audio coming from the screen-side of the device.
+ */
+ FRONT = 1,
+ /**
+ * Optimize capture for audio coming from the side of the device opposite the screen.
+ */
+ BACK = 2,
+ /**
+ * Optimize capture for audio coming from an off-device microphone.
+ */
+ EXTERNAL = 3,
+ }
+ /**
+ * Get the current logical microphone direction.
+ *
+ * @return The current logical microphone direction.
+ * @throws EX_ILLEGAL_STATE If the stream has already been closed.
+ * @throws EX_UNSUPPORTED_OPERATION If the information is unavailable.
+ */
+ MicrophoneDirection getMicrophoneDirection();
+ /**
+ * Set the current logical microphone direction.
+ *
+ * The client sets this parameter in order to specify its preference for
+ * optimizing the direction of capture when multiple microphones are in use.
+ *
+ * @param direction The preferred capture direction.
+ * @throws EX_ILLEGAL_STATE If the stream has already been closed.
+ * @throws EX_UNSUPPORTED_OPERATION If the operation is not supported.
+ */
+ void setMicrophoneDirection(MicrophoneDirection direction);
+
+ const int MIC_FIELD_DIMENSION_WIDE_ANGLE = -1;
+ const int MIC_FIELD_DIMENSION_NO_ZOOM = 0;
+ const int MIC_FIELD_DIMENSION_MAX_ZOOM = 1;
+ /**
+ * Get the "zoom factor" for the logical microphone.
+ *
+ * The returned value must be within the range of [-1.0, 1.0] (see
+ * MIC_FIELD_DIMENSION_* constants).
+ *
+ * @throws EX_ILLEGAL_STATE If the stream has already been closed.
+ * @throws EX_UNSUPPORTED_OPERATION If the information is unavailable.
+ */
+ float getMicrophoneFieldDimension();
+ /**
+ * Set the "zoom factor" for the logical microphone.
+ *
+ * If multiple microphones are in use, the provided zoom factor must be
+ * treated as a preference for their combined field dimension. The zoom
+ * factor must be within the range of [-1.0, 1.0] (see MIC_FIELD_DIMENSION_*
+ * constants).
+ *
+ * @param zoom The preferred field dimension of the microphone capture.
+ * @throws EX_ILLEGAL_ARGUMENT If the dimension value is outside of the range.
+ * @throws EX_ILLEGAL_STATE If the stream has already been closed.
+ * @throws EX_UNSUPPORTED_OPERATION If the operation is not supported.
+ */
+ void setMicrophoneFieldDimension(float zoom);
+
+ /**
* Update stream metadata.
*
* Updates the metadata initially provided at the stream creation.
diff --git a/audio/aidl/android/hardware/audio/core/MicrophoneDynamicInfo.aidl b/audio/aidl/android/hardware/audio/core/MicrophoneDynamicInfo.aidl
new file mode 100644
index 0000000..36cc51f
--- /dev/null
+++ b/audio/aidl/android/hardware/audio/core/MicrophoneDynamicInfo.aidl
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.core;
+
+/**
+ * Structure providing dynamic information on a microphone. This information
+ * changes between recording sessions.
+ */
+@JavaDerive(equals=true, toString=true)
+@VintfStability
+parcelable MicrophoneDynamicInfo {
+ /**
+ * Unique alphanumeric id for the microphone. It must match the id of one of
+ * the 'MicrophoneInfo' entries returned by 'IModule.getMicrophones'.
+ */
+ @utf8InCpp String id;
+
+ @VintfStability
+ @Backing(type="int")
+ enum ChannelMapping {
+ /** Channel not used. */
+ UNUSED = 0,
+ /** Channel is used and the signal is not processed. */
+ DIRECT = 1,
+ /** Channel is used and the signal has some processing. */
+ PROCESSED = 2,
+ }
+ /**
+ * The vector is indexes by zero-based channels of the microphone, thus the
+ * element '0' corresponds to the first channel, '1' is the second, etc. The
+ * vector must contain at least 1 element.
+ */
+ ChannelMapping[] channelMapping;
+}
diff --git a/audio/aidl/android/hardware/audio/core/MicrophoneInfo.aidl b/audio/aidl/android/hardware/audio/core/MicrophoneInfo.aidl
new file mode 100644
index 0000000..3b8c7f3
--- /dev/null
+++ b/audio/aidl/android/hardware/audio/core/MicrophoneInfo.aidl
@@ -0,0 +1,146 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.core;
+
+import android.media.audio.common.AudioDevice;
+
+/**
+ * Structure providing static information on a microphone. This information
+ * never changes during the lifetime of the IModule which owns the microphone.
+ * The information presented in this structure indicates the location and
+ * orientation of the microphone on the device as well as useful information
+ * like frequency response and sensitivity.
+ */
+@JavaDerive(equals=true, toString=true)
+@VintfStability
+parcelable MicrophoneInfo {
+ /**
+ * Unique alphanumeric id for the microphone. It must remain the same across
+ * device reboots. The client must never attempt to parse the value of this
+ * field.
+ */
+ @utf8InCpp String id;
+ /**
+ * Describes the location of the microphone in terms of managed audio devices.
+ */
+ AudioDevice device;
+
+ @VintfStability
+ @Backing(type="int")
+ enum Location {
+ /** Microphone location is unknown. */
+ UNKNOWN = 0,
+ /** The microphone is located on the main body of the device. */
+ MAINBODY = 1,
+ /** The microphone is located on a movable main body of the device. */
+ MAINBODY_MOVABLE = 2,
+ /** The microphone is located on a peripheral. */
+ PERIPHERAL = 3,
+ }
+ /** Location of the microphone in regard to the body of the device */
+ Location location = Location.UNKNOWN;
+
+ /**
+ * This value is used when the group of the microphone is unknown.
+ */
+ const int GROUP_UNKNOWN = -1;
+ /**
+ * An identifier to group related microphones together, for example,
+ * microphones of a microphone array should all belong to the same group.
+ * Note that microphones assigned to 'GROUP_UNKNOWN' do not form a group.
+ */
+ int group = GROUP_UNKNOWN;
+ /**
+ * This value is used when the index in the group of the microphone is
+ * unknown.
+ */
+ const int INDEX_IN_THE_GROUP_UNKNOWN = -1;
+ /**
+ * Index of this microphone within the group. The pair (group, index) must
+ * be unique within the same HAL module, except the pair
+ * (GROUP_UNKNOWN, INDEX_IN_THE_GROUP_UNKNOWN).
+ */
+ int indexInTheGroup = INDEX_IN_THE_GROUP_UNKNOWN;
+
+ @VintfStability
+ parcelable Sensitivity {
+ /** Level in dBFS produced by a 1000 Hz tone at 94 dB SPL. */
+ float leveldBFS;
+ /** Level in dB of the max SPL supported at 1000 Hz */
+ float maxSpldB;
+ /** Level in dB of the min SPL supported at 1000 Hz */
+ float minSpldB;
+ }
+ /**
+ * If provided, must describe acceptable sound pressure levels (SPL)
+ * for a 1 kHz sine wave, and the resulting level in dBFS.
+ */
+ @nullable Sensitivity sensitivity;
+
+ @VintfStability
+ @Backing(type="int")
+ enum Directionality {
+ UNKNOWN = 0,
+ OMNI = 1,
+ BI_DIRECTIONAL = 2,
+ CARDIOID = 3,
+ HYPER_CARDIOID = 4,
+ SUPER_CARDIOID = 5,
+ }
+ /**
+ * The standard polar pattern of the microphone.
+ */
+ Directionality directionality = Directionality.UNKNOWN;
+
+ /**
+ * A (frequency, level) pair. Used to represent frequency response.
+ */
+ @VintfStability
+ parcelable FrequencyResponsePoint {
+ float frequencyHz;
+ float leveldB;
+ }
+ /**
+ * Vector with ordered frequency responses (from low to high frequencies)
+ * with the frequency response of the microphone. Levels are in dB,
+ * relative to level at 1000 Hz.
+ */
+ FrequencyResponsePoint[] frequencyResponse;
+
+ /**
+ * A 3D point used to represent position or orientation of a microphone.
+ */
+ @VintfStability
+ parcelable Coordinate {
+ float x;
+ float y;
+ float z;
+ }
+ /**
+ * If provided, must specify distances of the microphone's capsule, in
+ * meters, from the bottom-left-back corner of the bounding box of device in
+ * its natural orientation (PORTRAIT for phones, LANDSCAPE for tablets, TVs,
+ * etc).
+ */
+ @nullable Coordinate position;
+ /**
+ * If provided, describes the normalized point which defines the main
+ * orientation of the microphone's capsule.
+ * Magnitude = sqrt(x^2 + y^2 + z^2) = 1.
+ */
+ @nullable Coordinate orientation;
+}
diff --git a/audio/aidl/default/Configuration.cpp b/audio/aidl/default/Configuration.cpp
index a3e5ff7..280814f 100644
--- a/audio/aidl/default/Configuration.cpp
+++ b/audio/aidl/default/Configuration.cpp
@@ -243,6 +243,13 @@
AudioChannelLayout::LAYOUT_MONO, 48000, 0, true,
createDeviceExt(AudioDeviceType::IN_MICROPHONE, 0)));
+ MicrophoneInfo mic;
+ mic.id = "zero";
+ mic.device = zeroInDevice.ext.get<AudioPortExt::Tag::device>().device;
+ mic.group = 0;
+ mic.indexInTheGroup = 0;
+ c.microphones = std::vector<MicrophoneInfo>{mic};
+
AudioPort primaryInMix =
createPort(c.nextPortId++, "primary input", 0, true, createPortMixExt(2, 2));
primaryInMix.profiles.push_back(
diff --git a/audio/aidl/default/Module.cpp b/audio/aidl/default/Module.cpp
index 9dbd61c..1e561d4 100644
--- a/audio/aidl/default/Module.cpp
+++ b/audio/aidl/default/Module.cpp
@@ -31,6 +31,7 @@
using aidl::android::hardware::audio::common::SinkMetadata;
using aidl::android::hardware::audio::common::SourceMetadata;
using aidl::android::media::audio::common::AudioChannelLayout;
+using aidl::android::media::audio::common::AudioDevice;
using aidl::android::media::audio::common::AudioFormatDescription;
using aidl::android::media::audio::common::AudioFormatType;
using aidl::android::media::audio::common::AudioInputFlags;
@@ -136,7 +137,8 @@
StreamContext temp(
std::make_unique<StreamContext::CommandMQ>(1, true /*configureEventFlagWord*/),
std::make_unique<StreamContext::ReplyMQ>(1, true /*configureEventFlagWord*/),
- frameSize, std::make_unique<StreamContext::DataMQ>(frameSize * in_bufferSizeFrames),
+ portConfigIt->format.value(), portConfigIt->channelMask.value(),
+ std::make_unique<StreamContext::DataMQ>(frameSize * in_bufferSizeFrames),
asyncCallback, mDebug.streamTransientStateDelayMs);
if (temp.isValid()) {
*out_context = std::move(temp);
@@ -149,6 +151,39 @@
return ndk::ScopedAStatus::ok();
}
+std::vector<AudioDevice> Module::findConnectedDevices(int32_t portConfigId) {
+ std::vector<AudioDevice> result;
+ auto& ports = getConfig().ports;
+ auto portIds = portIdsFromPortConfigIds(findConnectedPortConfigIds(portConfigId));
+ for (auto it = portIds.begin(); it != portIds.end(); ++it) {
+ auto portIt = findById<AudioPort>(ports, *it);
+ if (portIt != ports.end() && portIt->ext.getTag() == AudioPortExt::Tag::device) {
+ result.push_back(portIt->ext.template get<AudioPortExt::Tag::device>().device);
+ }
+ }
+ return result;
+}
+
+std::set<int32_t> Module::findConnectedPortConfigIds(int32_t portConfigId) {
+ std::set<int32_t> result;
+ auto patchIdsRange = mPatches.equal_range(portConfigId);
+ auto& patches = getConfig().patches;
+ for (auto it = patchIdsRange.first; it != patchIdsRange.second; ++it) {
+ auto patchIt = findById<AudioPatch>(patches, it->second);
+ if (patchIt == patches.end()) {
+ LOG(FATAL) << __func__ << ": patch with id " << it->second << " taken from mPatches "
+ << "not found in the configuration";
+ }
+ if (std::find(patchIt->sourcePortConfigIds.begin(), patchIt->sourcePortConfigIds.end(),
+ portConfigId) != patchIt->sourcePortConfigIds.end()) {
+ result.insert(patchIt->sinkPortConfigIds.begin(), patchIt->sinkPortConfigIds.end());
+ } else {
+ result.insert(patchIt->sourcePortConfigIds.begin(), patchIt->sourcePortConfigIds.end());
+ }
+ }
+ return result;
+}
+
ndk::ScopedAStatus Module::findPortIdForNewStream(int32_t in_portConfigId, AudioPort** port) {
auto& configs = getConfig().portConfigs;
auto portConfigIt = findById<AudioPortConfig>(configs, in_portConfigId);
@@ -187,6 +222,19 @@
return ndk::ScopedAStatus::ok();
}
+template <typename C>
+std::set<int32_t> Module::portIdsFromPortConfigIds(C portConfigIds) {
+ std::set<int32_t> result;
+ auto& portConfigs = getConfig().portConfigs;
+ for (auto it = portConfigIds.begin(); it != portConfigIds.end(); ++it) {
+ auto portConfigIt = findById<AudioPortConfig>(portConfigs, *it);
+ if (portConfigIt != portConfigs.end()) {
+ result.insert(portConfigIt->portId);
+ }
+ }
+ return result;
+}
+
internal::Configuration& Module::getConfig() {
if (!mConfig) {
mConfig.reset(new internal::Configuration(internal::getNullPrimaryConfiguration()));
@@ -223,12 +271,16 @@
idsToConnect.insert(newPatch.sinkPortConfigIds.begin(), newPatch.sinkPortConfigIds.end());
std::for_each(idsToDisconnect.begin(), idsToDisconnect.end(), [&](const auto& portConfigId) {
if (idsToConnect.count(portConfigId) == 0) {
- mStreams.setStreamIsConnected(portConfigId, false);
+ LOG(DEBUG) << "The stream on port config id " << portConfigId << " is not connected";
+ mStreams.setStreamIsConnected(portConfigId, {});
}
});
std::for_each(idsToConnect.begin(), idsToConnect.end(), [&](const auto& portConfigId) {
if (idsToDisconnect.count(portConfigId) == 0) {
- mStreams.setStreamIsConnected(portConfigId, true);
+ const auto connectedDevices = findConnectedDevices(portConfigId);
+ LOG(DEBUG) << "The stream on port config id " << portConfigId
+ << " is connected to: " << ::android::internal::ToString(connectedDevices);
+ mStreams.setStreamIsConnected(portConfigId, connectedDevices);
}
});
}
@@ -468,14 +520,15 @@
return status;
}
context.fillDescriptor(&_aidl_return->desc);
- auto stream = ndk::SharedRefBase::make<StreamIn>(in_args.sinkMetadata, std::move(context));
+ auto stream = ndk::SharedRefBase::make<StreamIn>(in_args.sinkMetadata, std::move(context),
+ mConfig->microphones);
if (auto status = stream->init(); !status.isOk()) {
return status;
}
StreamWrapper streamWrapper(stream);
auto patchIt = mPatches.find(in_args.portConfigId);
if (patchIt != mPatches.end()) {
- streamWrapper.setStreamIsConnected(true);
+ streamWrapper.setStreamIsConnected(findConnectedDevices(in_args.portConfigId));
}
mStreams.insert(port->id, in_args.portConfigId, std::move(streamWrapper));
_aidl_return->stream = std::move(stream);
@@ -525,7 +578,7 @@
StreamWrapper streamWrapper(stream);
auto patchIt = mPatches.find(in_args.portConfigId);
if (patchIt != mPatches.end()) {
- streamWrapper.setStreamIsConnected(true);
+ streamWrapper.setStreamIsConnected(findConnectedDevices(in_args.portConfigId));
}
mStreams.insert(port->id, in_args.portConfigId, std::move(streamWrapper));
_aidl_return->stream = std::move(stream);
@@ -844,6 +897,12 @@
return ndk::ScopedAStatus::ok();
}
+ndk::ScopedAStatus Module::getMicrophones(std::vector<MicrophoneInfo>* _aidl_return) {
+ *_aidl_return = mConfig->microphones;
+ LOG(DEBUG) << __func__ << ": returning " << ::android::internal::ToString(*_aidl_return);
+ return ndk::ScopedAStatus::ok();
+}
+
ndk::ScopedAStatus Module::updateAudioMode(AudioMode in_mode) {
// No checks for supported audio modes here, it's an informative notification.
LOG(DEBUG) << __func__ << ": " << toString(in_mode);
diff --git a/audio/aidl/default/Stream.cpp b/audio/aidl/default/Stream.cpp
index d7c352f..be5887c 100644
--- a/audio/aidl/default/Stream.cpp
+++ b/audio/aidl/default/Stream.cpp
@@ -18,12 +18,17 @@
#include <android-base/logging.h>
#include <utils/SystemClock.h>
+#include <Utils.h>
+
#include "core-impl/Module.h"
#include "core-impl/Stream.h"
using aidl::android::hardware::audio::common::SinkMetadata;
using aidl::android::hardware::audio::common::SourceMetadata;
+using aidl::android::media::audio::common::AudioDevice;
using aidl::android::media::audio::common::AudioOffloadInfo;
+using android::hardware::audio::common::getChannelCount;
+using android::hardware::audio::common::getFrameSizeInBytes;
namespace aidl::android::hardware::audio::core {
@@ -35,13 +40,17 @@
desc->reply = mReplyMQ->dupeDesc();
}
if (mDataMQ) {
- desc->frameSizeBytes = mFrameSize;
- desc->bufferSizeFrames =
- mDataMQ->getQuantumCount() * mDataMQ->getQuantumSize() / mFrameSize;
+ const size_t frameSize = getFrameSize();
+ desc->frameSizeBytes = frameSize;
+ desc->bufferSizeFrames = mDataMQ->getQuantumCount() * mDataMQ->getQuantumSize() / frameSize;
desc->audio.set<StreamDescriptor::AudioBuffer::Tag::fmq>(mDataMQ->dupeDesc());
}
}
+size_t StreamContext::getFrameSize() const {
+ return getFrameSizeInBytes(mFormat, mChannelLayout);
+}
+
bool StreamContext::isValid() const {
if (mCommandMQ && !mCommandMQ->isValid()) {
LOG(ERROR) << "command FMQ is invalid";
@@ -51,8 +60,8 @@
LOG(ERROR) << "reply FMQ is invalid";
return false;
}
- if (mFrameSize == 0) {
- LOG(ERROR) << "frame size is not set";
+ if (getFrameSize() == 0) {
+ LOG(ERROR) << "frame size is invalid";
return false;
}
if (mDataMQ && !mDataMQ->isValid()) {
@@ -530,11 +539,64 @@
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
-StreamIn::StreamIn(const SinkMetadata& sinkMetadata, StreamContext context)
- : StreamCommon<SinkMetadata, StreamInWorker>(sinkMetadata, std::move(context)) {
+namespace {
+static std::map<AudioDevice, std::string> transformMicrophones(
+ const std::vector<MicrophoneInfo>& microphones) {
+ std::map<AudioDevice, std::string> result;
+ std::transform(microphones.begin(), microphones.end(), std::inserter(result, result.begin()),
+ [](const auto& mic) { return std::make_pair(mic.device, mic.id); });
+ return result;
+}
+} // namespace
+
+StreamIn::StreamIn(const SinkMetadata& sinkMetadata, StreamContext context,
+ const std::vector<MicrophoneInfo>& microphones)
+ : StreamCommon<SinkMetadata, StreamInWorker>(sinkMetadata, std::move(context)),
+ mMicrophones(transformMicrophones(microphones)) {
LOG(DEBUG) << __func__;
}
+ndk::ScopedAStatus StreamIn::getActiveMicrophones(
+ std::vector<MicrophoneDynamicInfo>* _aidl_return) {
+ std::vector<MicrophoneDynamicInfo> result;
+ std::vector<MicrophoneDynamicInfo::ChannelMapping> channelMapping{
+ getChannelCount(mContext.getChannelLayout()),
+ MicrophoneDynamicInfo::ChannelMapping::DIRECT};
+ for (auto it = mConnectedDevices.begin(); it != mConnectedDevices.end(); ++it) {
+ if (auto micIt = mMicrophones.find(*it); micIt != mMicrophones.end()) {
+ MicrophoneDynamicInfo dynMic;
+ dynMic.id = micIt->second;
+ dynMic.channelMapping = channelMapping;
+ result.push_back(std::move(dynMic));
+ }
+ }
+ *_aidl_return = std::move(result);
+ LOG(DEBUG) << __func__ << ": returning " << ::android::internal::ToString(*_aidl_return);
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamIn::getMicrophoneDirection(MicrophoneDirection* _aidl_return) {
+ LOG(DEBUG) << __func__;
+ (void)_aidl_return;
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+}
+
+ndk::ScopedAStatus StreamIn::setMicrophoneDirection(MicrophoneDirection in_direction) {
+ LOG(DEBUG) << __func__ << ": direction " << toString(in_direction);
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+}
+
+ndk::ScopedAStatus StreamIn::getMicrophoneFieldDimension(float* _aidl_return) {
+ LOG(DEBUG) << __func__;
+ (void)_aidl_return;
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+}
+
+ndk::ScopedAStatus StreamIn::setMicrophoneFieldDimension(float in_zoom) {
+ LOG(DEBUG) << __func__ << ": zoom " << in_zoom;
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+}
+
StreamOut::StreamOut(const SourceMetadata& sourceMetadata, StreamContext context,
const std::optional<AudioOffloadInfo>& offloadInfo)
: StreamCommon<SourceMetadata, StreamOutWorker>(sourceMetadata, std::move(context)),
diff --git a/audio/aidl/default/include/core-impl/Configuration.h b/audio/aidl/default/include/core-impl/Configuration.h
index d5cd30b..95adcd7 100644
--- a/audio/aidl/default/include/core-impl/Configuration.h
+++ b/audio/aidl/default/include/core-impl/Configuration.h
@@ -21,12 +21,14 @@
#include <aidl/android/hardware/audio/core/AudioPatch.h>
#include <aidl/android/hardware/audio/core/AudioRoute.h>
+#include <aidl/android/hardware/audio/core/MicrophoneInfo.h>
#include <aidl/android/media/audio/common/AudioPort.h>
#include <aidl/android/media/audio/common/AudioPortConfig.h>
namespace aidl::android::hardware::audio::core::internal {
struct Configuration {
+ std::vector<MicrophoneInfo> microphones;
std::vector<::aidl::android::media::audio::common::AudioPort> ports;
std::vector<::aidl::android::media::audio::common::AudioPortConfig> portConfigs;
std::vector<::aidl::android::media::audio::common::AudioPortConfig> initialConfigs;
diff --git a/audio/aidl/default/include/core-impl/Module.h b/audio/aidl/default/include/core-impl/Module.h
index f7b85ed..3509327 100644
--- a/audio/aidl/default/include/core-impl/Module.h
+++ b/audio/aidl/default/include/core-impl/Module.h
@@ -77,6 +77,7 @@
ndk::ScopedAStatus setMasterVolume(float in_volume) override;
ndk::ScopedAStatus getMicMute(bool* _aidl_return) override;
ndk::ScopedAStatus setMicMute(bool in_mute) override;
+ ndk::ScopedAStatus getMicrophones(std::vector<MicrophoneInfo>* _aidl_return) override;
ndk::ScopedAStatus updateAudioMode(
::aidl::android::hardware::audio::core::AudioMode in_mode) override;
ndk::ScopedAStatus updateScreenRotation(
@@ -88,9 +89,14 @@
int32_t in_portConfigId, int64_t in_bufferSizeFrames,
std::shared_ptr<IStreamCallback> asyncCallback,
::aidl::android::hardware::audio::core::StreamContext* out_context);
+ std::vector<::aidl::android::media::audio::common::AudioDevice> findConnectedDevices(
+ int32_t portConfigId);
+ std::set<int32_t> findConnectedPortConfigIds(int32_t portConfigId);
ndk::ScopedAStatus findPortIdForNewStream(
int32_t in_portConfigId, ::aidl::android::media::audio::common::AudioPort** port);
internal::Configuration& getConfig();
+ template <typename C>
+ std::set<int32_t> portIdsFromPortConfigIds(C portConfigIds);
void registerPatch(const AudioPatch& patch);
void updateStreamsConnectedState(const AudioPatch& oldPatch, const AudioPatch& newPatch);
diff --git a/audio/aidl/default/include/core-impl/Stream.h b/audio/aidl/default/include/core-impl/Stream.h
index 3c96973..7a07eeb 100644
--- a/audio/aidl/default/include/core-impl/Stream.h
+++ b/audio/aidl/default/include/core-impl/Stream.h
@@ -30,7 +30,9 @@
#include <aidl/android/hardware/audio/core/BnStreamIn.h>
#include <aidl/android/hardware/audio/core/BnStreamOut.h>
#include <aidl/android/hardware/audio/core/IStreamCallback.h>
+#include <aidl/android/hardware/audio/core/MicrophoneInfo.h>
#include <aidl/android/hardware/audio/core/StreamDescriptor.h>
+#include <aidl/android/media/audio/common/AudioDevice.h>
#include <aidl/android/media/audio/common/AudioOffloadInfo.h>
#include <fmq/AidlMessageQueue.h>
#include <system/thread_defs.h>
@@ -61,12 +63,15 @@
StreamContext() = default;
StreamContext(std::unique_ptr<CommandMQ> commandMQ, std::unique_ptr<ReplyMQ> replyMQ,
- size_t frameSize, std::unique_ptr<DataMQ> dataMQ,
- std::shared_ptr<IStreamCallback> asyncCallback, int transientStateDelayMs)
+ const ::aidl::android::media::audio::common::AudioFormatDescription& format,
+ const ::aidl::android::media::audio::common::AudioChannelLayout& channelLayout,
+ std::unique_ptr<DataMQ> dataMQ, std::shared_ptr<IStreamCallback> asyncCallback,
+ int transientStateDelayMs)
: mCommandMQ(std::move(commandMQ)),
mInternalCommandCookie(std::rand()),
mReplyMQ(std::move(replyMQ)),
- mFrameSize(frameSize),
+ mFormat(format),
+ mChannelLayout(channelLayout),
mDataMQ(std::move(dataMQ)),
mAsyncCallback(asyncCallback),
mTransientStateDelayMs(transientStateDelayMs) {}
@@ -74,7 +79,8 @@
: mCommandMQ(std::move(other.mCommandMQ)),
mInternalCommandCookie(other.mInternalCommandCookie),
mReplyMQ(std::move(other.mReplyMQ)),
- mFrameSize(other.mFrameSize),
+ mFormat(other.mFormat),
+ mChannelLayout(other.mChannelLayout),
mDataMQ(std::move(other.mDataMQ)),
mAsyncCallback(other.mAsyncCallback),
mTransientStateDelayMs(other.mTransientStateDelayMs) {}
@@ -82,7 +88,8 @@
mCommandMQ = std::move(other.mCommandMQ);
mInternalCommandCookie = other.mInternalCommandCookie;
mReplyMQ = std::move(other.mReplyMQ);
- mFrameSize = other.mFrameSize;
+ mFormat = std::move(other.mFormat);
+ mChannelLayout = std::move(other.mChannelLayout);
mDataMQ = std::move(other.mDataMQ);
mAsyncCallback = other.mAsyncCallback;
mTransientStateDelayMs = other.mTransientStateDelayMs;
@@ -91,9 +98,15 @@
void fillDescriptor(StreamDescriptor* desc);
std::shared_ptr<IStreamCallback> getAsyncCallback() const { return mAsyncCallback; }
+ ::aidl::android::media::audio::common::AudioChannelLayout getChannelLayout() const {
+ return mChannelLayout;
+ }
CommandMQ* getCommandMQ() const { return mCommandMQ.get(); }
DataMQ* getDataMQ() const { return mDataMQ.get(); }
- size_t getFrameSize() const { return mFrameSize; }
+ ::aidl::android::media::audio::common::AudioFormatDescription getFormat() const {
+ return mFormat;
+ }
+ size_t getFrameSize() const;
int getInternalCommandCookie() const { return mInternalCommandCookie; }
ReplyMQ* getReplyMQ() const { return mReplyMQ.get(); }
int getTransientStateDelayMs() const { return mTransientStateDelayMs; }
@@ -104,7 +117,8 @@
std::unique_ptr<CommandMQ> mCommandMQ;
int mInternalCommandCookie; // The value used to confirm that the command was posted internally
std::unique_ptr<ReplyMQ> mReplyMQ;
- size_t mFrameSize;
+ ::aidl::android::media::audio::common::AudioFormatDescription mFormat;
+ ::aidl::android::media::audio::common::AudioChannelLayout mChannelLayout;
std::unique_ptr<DataMQ> mDataMQ;
std::shared_ptr<IStreamCallback> mAsyncCallback;
int mTransientStateDelayMs;
@@ -193,7 +207,11 @@
: ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
bool isClosed() const { return mWorker.isClosed(); }
- void setIsConnected(bool connected) { mWorker.setIsConnected(connected); }
+ void setIsConnected(
+ const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) {
+ mWorker.setIsConnected(!devices.empty());
+ mConnectedDevices = devices;
+ }
ndk::ScopedAStatus updateMetadata(const Metadata& metadata);
protected:
@@ -205,6 +223,7 @@
Metadata mMetadata;
StreamContext mContext;
StreamWorker mWorker;
+ std::vector<::aidl::android::media::audio::common::AudioDevice> mConnectedDevices;
};
class StreamIn
@@ -214,6 +233,12 @@
return StreamCommon<::aidl::android::hardware::audio::common::SinkMetadata,
StreamInWorker>::close();
}
+ ndk::ScopedAStatus getActiveMicrophones(
+ std::vector<MicrophoneDynamicInfo>* _aidl_return) override;
+ ndk::ScopedAStatus getMicrophoneDirection(MicrophoneDirection* _aidl_return) override;
+ ndk::ScopedAStatus setMicrophoneDirection(MicrophoneDirection in_direction) override;
+ ndk::ScopedAStatus getMicrophoneFieldDimension(float* _aidl_return) override;
+ ndk::ScopedAStatus setMicrophoneFieldDimension(float in_zoom) override;
ndk::ScopedAStatus updateMetadata(const ::aidl::android::hardware::audio::common::SinkMetadata&
in_sinkMetadata) override {
return StreamCommon<::aidl::android::hardware::audio::common::SinkMetadata,
@@ -222,7 +247,10 @@
public:
StreamIn(const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata,
- StreamContext context);
+ StreamContext context, const std::vector<MicrophoneInfo>& microphones);
+
+ private:
+ const std::map<::aidl::android::media::audio::common::AudioDevice, std::string> mMicrophones;
};
class StreamOut : public StreamCommon<::aidl::android::hardware::audio::common::SourceMetadata,
@@ -261,11 +289,12 @@
},
mStream);
}
- void setStreamIsConnected(bool connected) {
+ void setStreamIsConnected(
+ const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) {
std::visit(
[&](auto&& ws) {
auto s = ws.lock();
- if (s) s->setIsConnected(connected);
+ if (s) s->setIsConnected(devices);
},
mStream);
}
@@ -288,9 +317,11 @@
mStreams.insert(std::pair{portConfigId, sw});
mStreams.insert(std::pair{portId, std::move(sw)});
}
- void setStreamIsConnected(int32_t portConfigId, bool connected) {
+ void setStreamIsConnected(
+ int32_t portConfigId,
+ const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) {
if (auto it = mStreams.find(portConfigId); it != mStreams.end()) {
- it->second.setStreamIsConnected(connected);
+ it->second.setStreamIsConnected(devices);
}
}
diff --git a/audio/aidl/vts/AudioHalBinderServiceUtil.h b/audio/aidl/vts/AudioHalBinderServiceUtil.h
index c8d81b1..b4b4632 100644
--- a/audio/aidl/vts/AudioHalBinderServiceUtil.h
+++ b/audio/aidl/vts/AudioHalBinderServiceUtil.h
@@ -31,7 +31,7 @@
public:
ndk::SpAIBinder connectToService(const std::string& serviceName) {
mServiceName = serviceName;
- mBinder = ndk::SpAIBinder(AServiceManager_getService(serviceName.c_str()));
+ mBinder = ndk::SpAIBinder(AServiceManager_waitForService(serviceName.c_str()));
if (mBinder == nullptr) {
LOG(ERROR) << "Failed to get service " << serviceName;
} else {
diff --git a/audio/aidl/vts/ModuleConfig.cpp b/audio/aidl/vts/ModuleConfig.cpp
index c081402..7e4b148 100644
--- a/audio/aidl/vts/ModuleConfig.cpp
+++ b/audio/aidl/vts/ModuleConfig.cpp
@@ -28,6 +28,7 @@
using aidl::android::hardware::audio::core::IModule;
using aidl::android::media::audio::common::AudioChannelLayout;
+using aidl::android::media::audio::common::AudioDeviceType;
using aidl::android::media::audio::common::AudioEncapsulationMode;
using aidl::android::media::audio::common::AudioFormatDescription;
using aidl::android::media::audio::common::AudioFormatType;
@@ -62,6 +63,18 @@
return {};
}
+// static
+std::vector<aidl::android::media::audio::common::AudioPort> ModuleConfig::getBuiltInMicPorts(
+ const std::vector<aidl::android::media::audio::common::AudioPort>& ports) {
+ std::vector<AudioPort> result;
+ std::copy_if(ports.begin(), ports.end(), std::back_inserter(result), [](const auto& port) {
+ const auto type = port.ext.template get<AudioPortExt::Tag::device>().device.type;
+ return type.connection.empty() && (type.type == AudioDeviceType::IN_MICROPHONE ||
+ type.type == AudioDeviceType::IN_MICROPHONE_BACK);
+ });
+ return result;
+}
+
template <typename T>
auto findById(const std::vector<T>& v, int32_t id) {
return std::find_if(v.begin(), v.end(), [&](const auto& p) { return p.id == id; });
@@ -107,38 +120,45 @@
return result;
}
-std::vector<AudioPort> ModuleConfig::getInputMixPorts() const {
+std::vector<AudioPort> ModuleConfig::getInputMixPorts(bool attachedOnly) const {
std::vector<AudioPort> result;
- std::copy_if(mPorts.begin(), mPorts.end(), std::back_inserter(result), [](const auto& port) {
+ std::copy_if(mPorts.begin(), mPorts.end(), std::back_inserter(result), [&](const auto& port) {
return port.ext.getTag() == AudioPortExt::Tag::mix &&
- port.flags.getTag() == AudioIoFlags::Tag::input;
+ port.flags.getTag() == AudioIoFlags::Tag::input &&
+ (!attachedOnly || !getAttachedSourceDevicesPortsForMixPort(port).empty());
});
return result;
}
-std::vector<AudioPort> ModuleConfig::getOutputMixPorts() const {
+std::vector<AudioPort> ModuleConfig::getOutputMixPorts(bool attachedOnly) const {
std::vector<AudioPort> result;
- std::copy_if(mPorts.begin(), mPorts.end(), std::back_inserter(result), [](const auto& port) {
+ std::copy_if(mPorts.begin(), mPorts.end(), std::back_inserter(result), [&](const auto& port) {
return port.ext.getTag() == AudioPortExt::Tag::mix &&
- port.flags.getTag() == AudioIoFlags::Tag::output;
+ port.flags.getTag() == AudioIoFlags::Tag::output &&
+ (!attachedOnly || !getAttachedSinkDevicesPortsForMixPort(port).empty());
});
return result;
}
std::vector<AudioPort> ModuleConfig::getNonBlockingMixPorts(bool attachedOnly,
bool singlePort) const {
- return findMixPorts(false /*isInput*/, singlePort, [&](const AudioPort& port) {
+ return findMixPorts(false /*isInput*/, attachedOnly, singlePort, [&](const AudioPort& port) {
return isBitPositionFlagSet(port.flags.get<AudioIoFlags::Tag::output>(),
- AudioOutputFlags::NON_BLOCKING) &&
- (!attachedOnly || !getAttachedSinkDevicesPortsForMixPort(port).empty());
+ AudioOutputFlags::NON_BLOCKING);
});
}
std::vector<AudioPort> ModuleConfig::getOffloadMixPorts(bool attachedOnly, bool singlePort) const {
- return findMixPorts(false /*isInput*/, singlePort, [&](const AudioPort& port) {
+ return findMixPorts(false /*isInput*/, attachedOnly, singlePort, [&](const AudioPort& port) {
return isBitPositionFlagSet(port.flags.get<AudioIoFlags::Tag::output>(),
- AudioOutputFlags::COMPRESS_OFFLOAD) &&
- (!attachedOnly || !getAttachedSinkDevicesPortsForMixPort(port).empty());
+ AudioOutputFlags::COMPRESS_OFFLOAD);
+ });
+}
+
+std::vector<AudioPort> ModuleConfig::getPrimaryMixPorts(bool attachedOnly, bool singlePort) const {
+ return findMixPorts(false /*isInput*/, attachedOnly, singlePort, [&](const AudioPort& port) {
+ return isBitPositionFlagSet(port.flags.get<AudioIoFlags::Tag::output>(),
+ AudioOutputFlags::PRIMARY);
});
}
@@ -193,7 +213,7 @@
std::optional<ModuleConfig::SrcSinkPair> ModuleConfig::getNonRoutableSrcSinkPair(
bool isInput) const {
- const auto mixPorts = getMixPorts(isInput);
+ const auto mixPorts = getMixPorts(isInput, false /*attachedOnly*/);
std::set<std::pair<int32_t, int32_t>> allowedRoutes;
for (const auto& route : mRoutes) {
for (const auto srcPortId : route.sourcePortIds) {
@@ -344,9 +364,10 @@
}
std::vector<AudioPort> ModuleConfig::findMixPorts(
- bool isInput, bool singlePort, std::function<bool(const AudioPort&)> pred) const {
+ bool isInput, bool attachedOnly, bool singlePort,
+ const std::function<bool(const AudioPort&)>& pred) const {
std::vector<AudioPort> result;
- const auto mixPorts = getMixPorts(isInput);
+ const auto mixPorts = getMixPorts(isInput, attachedOnly);
for (auto mixPortIt = mixPorts.begin(); mixPortIt != mixPorts.end();) {
mixPortIt = std::find_if(mixPortIt, mixPorts.end(), pred);
if (mixPortIt == mixPorts.end()) break;
diff --git a/audio/aidl/vts/ModuleConfig.h b/audio/aidl/vts/ModuleConfig.h
index a85aa7f..7247f3b 100644
--- a/audio/aidl/vts/ModuleConfig.h
+++ b/audio/aidl/vts/ModuleConfig.h
@@ -37,22 +37,32 @@
static std::optional<aidl::android::media::audio::common::AudioOffloadInfo>
generateOffloadInfoIfNeeded(
const aidl::android::media::audio::common::AudioPortConfig& portConfig);
+ static std::vector<aidl::android::media::audio::common::AudioPort> getBuiltInMicPorts(
+ const std::vector<aidl::android::media::audio::common::AudioPort>& ports);
explicit ModuleConfig(aidl::android::hardware::audio::core::IModule* module);
const ndk::ScopedAStatus& getStatus() const { return mStatus; }
std::string getError() const { return mStatus.getMessage(); }
std::vector<aidl::android::media::audio::common::AudioPort> getAttachedDevicePorts() const;
+ std::vector<aidl::android::media::audio::common::AudioPort> getAttachedMicrophonePorts() const {
+ return getBuiltInMicPorts(getAttachedDevicePorts());
+ }
std::vector<aidl::android::media::audio::common::AudioPort> getExternalDevicePorts() const;
- std::vector<aidl::android::media::audio::common::AudioPort> getInputMixPorts() const;
- std::vector<aidl::android::media::audio::common::AudioPort> getOutputMixPorts() const;
- std::vector<aidl::android::media::audio::common::AudioPort> getMixPorts(bool isInput) const {
- return isInput ? getInputMixPorts() : getOutputMixPorts();
+ std::vector<aidl::android::media::audio::common::AudioPort> getInputMixPorts(
+ bool attachedOnly) const;
+ std::vector<aidl::android::media::audio::common::AudioPort> getOutputMixPorts(
+ bool attachedOnly) const;
+ std::vector<aidl::android::media::audio::common::AudioPort> getMixPorts(
+ bool isInput, bool attachedOnly) const {
+ return isInput ? getInputMixPorts(attachedOnly) : getOutputMixPorts(attachedOnly);
}
std::vector<aidl::android::media::audio::common::AudioPort> getNonBlockingMixPorts(
bool attachedOnly, bool singlePort) const;
std::vector<aidl::android::media::audio::common::AudioPort> getOffloadMixPorts(
bool attachedOnly, bool singlePort) const;
+ std::vector<aidl::android::media::audio::common::AudioPort> getPrimaryMixPorts(
+ bool attachedOnly, bool singlePort) const;
std::vector<aidl::android::media::audio::common::AudioPort> getAttachedDevicesPortsForMixPort(
bool isInput, const aidl::android::media::audio::common::AudioPort& mixPort) const {
@@ -81,14 +91,17 @@
}
std::vector<aidl::android::media::audio::common::AudioPortConfig> getPortConfigsForMixPorts()
const {
- auto inputs = generateAudioMixPortConfigs(getInputMixPorts(), true, false);
- auto outputs = generateAudioMixPortConfigs(getOutputMixPorts(), false, false);
+ auto inputs =
+ generateAudioMixPortConfigs(getInputMixPorts(false /*attachedOnly*/), true, false);
+ auto outputs = generateAudioMixPortConfigs(getOutputMixPorts(false /*attachedOnly*/), false,
+ false);
inputs.insert(inputs.end(), outputs.begin(), outputs.end());
return inputs;
}
std::vector<aidl::android::media::audio::common::AudioPortConfig> getPortConfigsForMixPorts(
bool isInput) const {
- return generateAudioMixPortConfigs(getMixPorts(isInput), isInput, false);
+ return generateAudioMixPortConfigs(getMixPorts(isInput, false /*attachedOnly*/), isInput,
+ false);
}
std::vector<aidl::android::media::audio::common::AudioPortConfig> getPortConfigsForMixPorts(
bool isInput, const aidl::android::media::audio::common::AudioPort& port) const {
@@ -96,7 +109,8 @@
}
std::optional<aidl::android::media::audio::common::AudioPortConfig> getSingleConfigForMixPort(
bool isInput) const {
- const auto config = generateAudioMixPortConfigs(getMixPorts(isInput), isInput, true);
+ const auto config = generateAudioMixPortConfigs(
+ getMixPorts(isInput, false /*attachedOnly*/), isInput, true);
if (!config.empty()) {
return *config.begin();
}
@@ -125,8 +139,9 @@
private:
std::vector<aidl::android::media::audio::common::AudioPort> findMixPorts(
- bool isInput, bool singlePort,
- std::function<bool(const aidl::android::media::audio::common::AudioPort&)> pred) const;
+ bool isInput, bool attachedOnly, bool singlePort,
+ const std::function<bool(const aidl::android::media::audio::common::AudioPort&)>& pred)
+ const;
std::vector<aidl::android::media::audio::common::AudioPortConfig> generateAudioMixPortConfigs(
const std::vector<aidl::android::media::audio::common::AudioPort>& ports, bool isInput,
bool singleProfile) const;
diff --git a/audio/aidl/vts/VtsHalAudioCoreTargetTest.cpp b/audio/aidl/vts/VtsHalAudioCoreTargetTest.cpp
index 79b20fe..c0c04f4 100644
--- a/audio/aidl/vts/VtsHalAudioCoreTargetTest.cpp
+++ b/audio/aidl/vts/VtsHalAudioCoreTargetTest.cpp
@@ -59,6 +59,8 @@
using aidl::android::hardware::audio::core::IStreamIn;
using aidl::android::hardware::audio::core::IStreamOut;
using aidl::android::hardware::audio::core::ITelephony;
+using aidl::android::hardware::audio::core::MicrophoneDynamicInfo;
+using aidl::android::hardware::audio::core::MicrophoneInfo;
using aidl::android::hardware::audio::core::ModuleDebug;
using aidl::android::hardware::audio::core::StreamDescriptor;
using aidl::android::hardware::common::fmq::SynchronizedReadWrite;
@@ -192,6 +194,7 @@
*isSupported = false;
return;
}
+ ASSERT_TRUE(status.isOk()) << "Unexpected status from a getter: " << status;
*isSupported = true;
for (const auto v : validValues) {
EXPECT_IS_OK((inst->*setter)(v)) << "for valid value: " << v;
@@ -1567,6 +1570,42 @@
// TODO: Test that mic mute actually mutes input.
}
+TEST_P(AudioCoreModule, GetMicrophones) {
+ ASSERT_NO_FATAL_FAILURE(SetUpModuleConfig());
+ const std::vector<AudioPort> builtInMicPorts = moduleConfig->getAttachedMicrophonePorts();
+ std::vector<MicrophoneInfo> micInfos;
+ ScopedAStatus status = module->getMicrophones(&micInfos);
+ if (!status.isOk()) {
+ EXPECT_EQ(EX_UNSUPPORTED_OPERATION, status.getExceptionCode());
+ ASSERT_FALSE(builtInMicPorts.empty())
+ << "When the HAL module does not have built-in microphones, IModule.getMicrophones"
+ << " must complete with no error and return an empty list";
+ GTEST_SKIP() << "Microphone info is not supported";
+ }
+ std::set<int32_t> micPortIdsWithInfo;
+ for (const auto& micInfo : micInfos) {
+ const auto& micDevice = micInfo.device;
+ const auto it =
+ std::find_if(builtInMicPorts.begin(), builtInMicPorts.end(), [&](const auto& port) {
+ return port.ext.template get<AudioPortExt::Tag::device>().device == micDevice;
+ });
+ if (it != builtInMicPorts.end()) {
+ micPortIdsWithInfo.insert(it->id);
+ } else {
+ ADD_FAILURE() << "No device port found with a device specified for the microphone \""
+ << micInfo.id << "\": " << micDevice.toString();
+ }
+ }
+ if (micPortIdsWithInfo.size() != builtInMicPorts.size()) {
+ std::vector<AudioPort> micPortsNoInfo;
+ std::copy_if(builtInMicPorts.begin(), builtInMicPorts.end(),
+ std::back_inserter(micPortsNoInfo),
+ [&](const auto& port) { return micPortIdsWithInfo.count(port.id) == 0; });
+ ADD_FAILURE() << "No MicrophoneInfo is provided for the following microphone device ports: "
+ << ::android::internal::ToString(micPortsNoInfo);
+ }
+}
+
TEST_P(AudioCoreModule, UpdateAudioMode) {
for (const auto mode : ::ndk::enum_range<AudioMode>()) {
EXPECT_IS_OK(module->updateAudioMode(mode)) << toString(mode);
@@ -1747,13 +1786,11 @@
void OpenOverMaxCount() {
constexpr bool isInput = IOTraits<Stream>::is_input;
- auto ports = moduleConfig->getMixPorts(isInput);
+ auto ports = moduleConfig->getMixPorts(isInput, true /*attachedOnly*/);
bool hasSingleRun = false;
for (const auto& port : ports) {
const size_t maxStreamCount = port.ext.get<AudioPortExt::Tag::mix>().maxOpenStreamCount;
- if (maxStreamCount == 0 ||
- moduleConfig->getAttachedDevicesPortsForMixPort(isInput, port).empty()) {
- // No restrictions or no permanently attached devices.
+ if (maxStreamCount == 0) {
continue;
}
auto portConfigs = moduleConfig->getPortConfigsForMixPorts(isInput, port);
@@ -1878,20 +1915,127 @@
TEST_IN_AND_OUT_STREAM(ResetPortConfigWithOpenStream);
TEST_IN_AND_OUT_STREAM(SendInvalidCommand);
+namespace aidl::android::hardware::audio::core {
+std::ostream& operator<<(std::ostream& os, const IStreamIn::MicrophoneDirection& md) {
+ os << toString(md);
+ return os;
+}
+} // namespace aidl::android::hardware::audio::core
+
+TEST_P(AudioStreamIn, ActiveMicrophones) {
+ std::vector<MicrophoneInfo> micInfos;
+ ScopedAStatus status = module->getMicrophones(&micInfos);
+ if (!status.isOk()) {
+ GTEST_SKIP() << "Microphone info is not supported";
+ }
+ const auto ports = moduleConfig->getInputMixPorts(true /*attachedOnly*/);
+ if (ports.empty()) {
+ GTEST_SKIP() << "No input mix ports for attached devices";
+ }
+ for (const auto& port : ports) {
+ const auto portConfig = moduleConfig->getSingleConfigForMixPort(true, port);
+ ASSERT_TRUE(portConfig.has_value()) << "No profiles specified for input mix port";
+ WithStream<IStreamIn> stream(portConfig.value());
+ ASSERT_NO_FATAL_FAILURE(stream.SetUp(module.get(), kDefaultBufferSizeFrames));
+ {
+ // The port of the stream is not connected, thus the list of active mics must be empty.
+ std::vector<MicrophoneDynamicInfo> activeMics;
+ EXPECT_IS_OK(stream.get()->getActiveMicrophones(&activeMics));
+ EXPECT_TRUE(activeMics.empty()) << "a stream on an unconnected port returns a "
+ "non-empty list of active microphones";
+ }
+ if (auto micDevicePorts = ModuleConfig::getBuiltInMicPorts(
+ moduleConfig->getAttachedSourceDevicesPortsForMixPort(port));
+ !micDevicePorts.empty()) {
+ auto devicePortConfig = moduleConfig->getSingleConfigForDevicePort(micDevicePorts[0]);
+ WithAudioPatch patch(true /*isInput*/, stream.getPortConfig(), devicePortConfig);
+ ASSERT_NO_FATAL_FAILURE(patch.SetUp(module.get()));
+ std::vector<MicrophoneDynamicInfo> activeMics;
+ EXPECT_IS_OK(stream.get()->getActiveMicrophones(&activeMics));
+ for (const auto& mic : activeMics) {
+ EXPECT_NE(micInfos.end(),
+ std::find_if(micInfos.begin(), micInfos.end(),
+ [&](const auto& micInfo) { return micInfo.id == mic.id; }))
+ << "active microphone \"" << mic.id << "\" is not listed in "
+ << "microphone infos returned by the module: "
+ << ::android::internal::ToString(micInfos);
+ EXPECT_NE(0UL, mic.channelMapping.size())
+ << "No channels specified for the microphone \"" << mic.id << "\"";
+ }
+ }
+ {
+ // Now the port of the stream is not connected again, re-check that there are no
+ // active microphones.
+ std::vector<MicrophoneDynamicInfo> activeMics;
+ EXPECT_IS_OK(stream.get()->getActiveMicrophones(&activeMics));
+ EXPECT_TRUE(activeMics.empty()) << "a stream on an unconnected port returns a "
+ "non-empty list of active microphones";
+ }
+ }
+}
+
+TEST_P(AudioStreamIn, MicrophoneDirection) {
+ using MD = IStreamIn::MicrophoneDirection;
+ const auto ports = moduleConfig->getInputMixPorts(true /*attachedOnly*/);
+ if (ports.empty()) {
+ GTEST_SKIP() << "No input mix ports for attached devices";
+ }
+ bool isSupported = false;
+ for (const auto& port : ports) {
+ const auto portConfig = moduleConfig->getSingleConfigForMixPort(true, port);
+ ASSERT_TRUE(portConfig.has_value()) << "No profiles specified for input mix port";
+ WithStream<IStreamIn> stream(portConfig.value());
+ ASSERT_NO_FATAL_FAILURE(stream.SetUp(module.get(), kDefaultBufferSizeFrames));
+ EXPECT_NO_FATAL_FAILURE(
+ TestAccessors<MD>(stream.get(), &IStreamIn::getMicrophoneDirection,
+ &IStreamIn::setMicrophoneDirection,
+ std::vector<MD>(enum_range<MD>().begin(), enum_range<MD>().end()),
+ {}, &isSupported));
+ if (!isSupported) break;
+ }
+ if (!isSupported) {
+ GTEST_SKIP() << "Microphone direction is not supported";
+ }
+}
+
+TEST_P(AudioStreamIn, MicrophoneFieldDimension) {
+ const auto ports = moduleConfig->getInputMixPorts(true /*attachedOnly*/);
+ if (ports.empty()) {
+ GTEST_SKIP() << "No input mix ports for attached devices";
+ }
+ bool isSupported = false;
+ for (const auto& port : ports) {
+ const auto portConfig = moduleConfig->getSingleConfigForMixPort(true, port);
+ ASSERT_TRUE(portConfig.has_value()) << "No profiles specified for input mix port";
+ WithStream<IStreamIn> stream(portConfig.value());
+ ASSERT_NO_FATAL_FAILURE(stream.SetUp(module.get(), kDefaultBufferSizeFrames));
+ EXPECT_NO_FATAL_FAILURE(TestAccessors<float>(
+ stream.get(), &IStreamIn::getMicrophoneFieldDimension,
+ &IStreamIn::setMicrophoneFieldDimension,
+ {IStreamIn::MIC_FIELD_DIMENSION_WIDE_ANGLE,
+ IStreamIn::MIC_FIELD_DIMENSION_WIDE_ANGLE / 2.0f,
+ IStreamIn::MIC_FIELD_DIMENSION_NO_ZOOM,
+ IStreamIn::MIC_FIELD_DIMENSION_MAX_ZOOM / 2.0f,
+ IStreamIn::MIC_FIELD_DIMENSION_MAX_ZOOM},
+ {IStreamIn::MIC_FIELD_DIMENSION_WIDE_ANGLE * 2,
+ IStreamIn::MIC_FIELD_DIMENSION_MAX_ZOOM * 2,
+ IStreamIn::MIC_FIELD_DIMENSION_WIDE_ANGLE * 1.1f,
+ IStreamIn::MIC_FIELD_DIMENSION_MAX_ZOOM * 1.1f, -INFINITY, INFINITY, -NAN, NAN},
+ &isSupported));
+ if (!isSupported) break;
+ }
+ if (!isSupported) {
+ GTEST_SKIP() << "Microphone direction is not supported";
+ }
+}
+
TEST_P(AudioStreamOut, OpenTwicePrimary) {
- const auto mixPorts = moduleConfig->getMixPorts(false);
- auto primaryPortIt = std::find_if(mixPorts.begin(), mixPorts.end(), [](const AudioPort& port) {
- return port.flags.getTag() == AudioIoFlags::Tag::output &&
- isBitPositionFlagSet(port.flags.get<AudioIoFlags::Tag::output>(),
- AudioOutputFlags::PRIMARY);
- });
- if (primaryPortIt == mixPorts.end()) {
- GTEST_SKIP() << "No primary mix port";
+ const auto mixPorts =
+ moduleConfig->getPrimaryMixPorts(true /*attachedOnly*/, true /*singlePort*/);
+ if (mixPorts.empty()) {
+ GTEST_SKIP() << "No primary mix port which could be routed to attached devices";
}
- if (moduleConfig->getAttachedSinkDevicesPortsForMixPort(*primaryPortIt).empty()) {
- GTEST_SKIP() << "Primary mix port can not be routed to any of attached devices";
- }
- const auto portConfig = moduleConfig->getSingleConfigForMixPort(false, *primaryPortIt);
+ const auto portConfig = moduleConfig->getSingleConfigForMixPort(false, *mixPorts.begin());
ASSERT_TRUE(portConfig.has_value()) << "No profiles specified for the primary mix port";
EXPECT_NO_FATAL_FAILURE(OpenTwiceSamePortConfigImpl(portConfig.value()));
}