Merge "Update EmulatedUserHal to use UserHalHelper native library."
diff --git a/audio/7.0/Android.bp b/audio/7.0/Android.bp
new file mode 100644
index 0000000..d07ce12
--- /dev/null
+++ b/audio/7.0/Android.bp
@@ -0,0 +1,25 @@
+// This file is autogenerated by hidl-gen -Landroidbp.
+
+hidl_interface {
+ name: "android.hardware.audio@7.0",
+ root: "android.hardware",
+ srcs: [
+ "types.hal",
+ "IDevice.hal",
+ "IDevicesFactory.hal",
+ "IPrimaryDevice.hal",
+ "IStream.hal",
+ "IStreamIn.hal",
+ "IStreamOut.hal",
+ "IStreamOutCallback.hal",
+ "IStreamOutEventCallback.hal",
+ ],
+ interfaces: [
+ "android.hardware.audio.common@7.0",
+ "android.hardware.audio.effect@7.0",
+ "android.hidl.base@1.0",
+ "android.hidl.safe_union@1.0",
+ ],
+ gen_java: false,
+ gen_java_constants: true,
+}
diff --git a/audio/7.0/IDevice.hal b/audio/7.0/IDevice.hal
new file mode 100644
index 0000000..eecd92e
--- /dev/null
+++ b/audio/7.0/IDevice.hal
@@ -0,0 +1,345 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio@7.0;
+
+import android.hardware.audio.common@7.0;
+import IStreamIn;
+import IStreamOut;
+
+interface IDevice {
+ /**
+ * Returns whether the audio hardware interface has been initialized.
+ *
+ * @return retval OK on success, NOT_INITIALIZED on failure.
+ */
+ initCheck() generates (Result retval);
+
+ /**
+ * Sets the audio volume for all audio activities other than voice call. If
+ * NOT_SUPPORTED is returned, the software mixer will emulate this
+ * capability.
+ *
+ * @param volume 1.0f means unity, 0.0f is zero.
+ * @return retval operation completion status.
+ */
+ setMasterVolume(float volume) generates (Result retval);
+
+ /**
+ * Get the current master volume value for the HAL, if the HAL supports
+ * master volume control. For example, AudioFlinger will query this value
+ * from the primary audio HAL when the service starts and use the value for
+ * setting the initial master volume across all HALs. HALs which do not
+ * support this method must return NOT_SUPPORTED in 'retval'.
+ *
+ * @return retval operation completion status.
+ * @return volume 1.0f means unity, 0.0f is zero.
+ */
+ getMasterVolume() generates (Result retval, float volume);
+
+ /**
+ * Sets microphone muting state.
+ *
+ * @param mute whether microphone is muted.
+ * @return retval operation completion status.
+ */
+ setMicMute(bool mute) generates (Result retval);
+
+ /**
+ * Gets whether microphone is muted.
+ *
+ * @return retval operation completion status.
+ * @return mute whether microphone is muted.
+ */
+ getMicMute() generates (Result retval, bool mute);
+
+ /**
+ * Set the audio mute status for all audio activities. If the return value
+ * is NOT_SUPPORTED, the software mixer will emulate this capability.
+ *
+ * @param mute whether audio is muted.
+ * @return retval operation completion status.
+ */
+ setMasterMute(bool mute) generates (Result retval);
+
+ /**
+ * Get the current master mute status for the HAL, if the HAL supports
+ * master mute control. AudioFlinger will query this value from the primary
+ * audio HAL when the service starts and use the value for setting the
+ * initial master mute across all HALs. HAL must indicate that the feature
+ * is not supported by returning NOT_SUPPORTED status.
+ *
+ * @return retval operation completion status.
+ * @return mute whether audio is muted.
+ */
+ getMasterMute() generates (Result retval, bool mute);
+
+ /**
+ * Returns audio input buffer size according to parameters passed or
+ * INVALID_ARGUMENTS if one of the parameters is not supported.
+ *
+ * @param config audio configuration.
+ * @return retval operation completion status.
+ * @return bufferSize input buffer size in bytes.
+ */
+ getInputBufferSize(AudioConfig config)
+ generates (Result retval, uint64_t bufferSize);
+
+ /**
+ * This method creates and opens the audio hardware output stream.
+ * If the stream can not be opened with the proposed audio config,
+ * HAL must provide suggested values for the audio config.
+ *
+ * @param ioHandle handle assigned by AudioFlinger.
+ * @param device device type and (if needed) address.
+ * @param config stream configuration.
+ * @param flags additional flags.
+ * @param sourceMetadata Description of the audio that will be played.
+ May be used by implementations to configure hardware effects.
+ * @return retval operation completion status.
+ * @return outStream created output stream.
+ * @return suggestedConfig in case of invalid parameters, suggested config.
+ */
+ openOutputStream(
+ AudioIoHandle ioHandle,
+ DeviceAddress device,
+ AudioConfig config,
+ bitfield<AudioOutputFlag> flags,
+ SourceMetadata sourceMetadata) generates (
+ Result retval,
+ IStreamOut outStream,
+ AudioConfig suggestedConfig);
+
+ /**
+ * This method creates and opens the audio hardware input stream.
+ * If the stream can not be opened with the proposed audio config,
+ * HAL must provide suggested values for the audio config.
+ *
+ * @param ioHandle handle assigned by AudioFlinger.
+ * @param device device type and (if needed) address.
+ * @param config stream configuration.
+ * @param flags additional flags.
+ * @param sinkMetadata Description of the audio that is suggested by the client.
+ * May be used by implementations to configure processing effects.
+ * @return retval operation completion status.
+ * @return inStream in case of success, created input stream.
+ * @return suggestedConfig in case of invalid parameters, suggested config.
+ */
+ openInputStream(
+ AudioIoHandle ioHandle,
+ DeviceAddress device,
+ AudioConfig config,
+ bitfield<AudioInputFlag> flags,
+ SinkMetadata sinkMetadata) generates (
+ Result retval,
+ IStreamIn inStream,
+ AudioConfig suggestedConfig);
+
+ /**
+ * Returns whether HAL supports audio patches. Patch represents a connection
+ * between signal source(s) and signal sink(s). If HAL doesn't support
+ * patches natively (in hardware) then audio system will need to establish
+ * them in software.
+ *
+ * @return supports true if audio patches are supported.
+ */
+ supportsAudioPatches() generates (bool supports);
+
+ /**
+ * Creates an audio patch between several source and sink ports. The handle
+ * is allocated by the HAL and must be unique for this audio HAL module.
+ *
+ * @param sources patch sources.
+ * @param sinks patch sinks.
+ * @return retval operation completion status.
+ * @return patch created patch handle.
+ */
+ createAudioPatch(vec<AudioPortConfig> sources, vec<AudioPortConfig> sinks)
+ generates (Result retval, AudioPatchHandle patch);
+
+ /**
+ * Updates an audio patch.
+ *
+ * Use of this function is preferred to releasing and re-creating a patch
+ * as the HAL module can figure out a way of switching the route without
+ * causing audio disruption.
+ *
+ * @param previousPatch handle of the previous patch to update.
+ * @param sources new patch sources.
+ * @param sinks new patch sinks.
+ * @return retval operation completion status.
+ * @return patch updated patch handle.
+ */
+ updateAudioPatch(
+ AudioPatchHandle previousPatch,
+ vec<AudioPortConfig> sources,
+ vec<AudioPortConfig> sinks) generates (
+ Result retval, AudioPatchHandle patch);
+
+ /**
+ * Release an audio patch.
+ *
+ * @param patch patch handle.
+ * @return retval operation completion status.
+ */
+ releaseAudioPatch(AudioPatchHandle patch) generates (Result retval);
+
+ /**
+ * Returns the list of supported attributes for a given audio port.
+ *
+ * As input, 'port' contains the information (type, role, address etc...)
+ * needed by the HAL to identify the port.
+ *
+ * As output, 'resultPort' contains possible attributes (sampling rates,
+ * formats, channel masks, gain controllers...) for this port.
+ *
+ * @param port port identifier.
+ * @return retval operation completion status.
+ * @return resultPort port descriptor with all parameters filled up.
+ */
+ getAudioPort(AudioPort port)
+ generates (Result retval, AudioPort resultPort);
+
+ /**
+ * Set audio port configuration.
+ *
+ * @param config audio port configuration.
+ * @return retval operation completion status.
+ */
+ setAudioPortConfig(AudioPortConfig config) generates (Result retval);
+
+ /**
+ * Gets the HW synchronization source of the device. Calling this method is
+ * equivalent to getting AUDIO_PARAMETER_HW_AV_SYNC on the legacy HAL.
+ * Optional method
+ *
+ * @return retval operation completion status: OK or NOT_SUPPORTED.
+ * @return hwAvSync HW synchronization source
+ */
+ getHwAvSync() generates (Result retval, AudioHwSync hwAvSync);
+
+ /**
+ * Sets whether the screen is on. Calling this method is equivalent to
+ * setting AUDIO_PARAMETER_KEY_SCREEN_STATE on the legacy HAL.
+ * Optional method
+ *
+ * @param turnedOn whether the screen is turned on.
+ * @return retval operation completion status.
+ */
+ setScreenState(bool turnedOn) generates (Result retval);
+
+ /**
+ * Generic method for retrieving vendor-specific parameter values.
+ * The framework does not interpret the parameters, they are passed
+ * in an opaque manner between a vendor application and HAL.
+ *
+ * Multiple parameters can be retrieved at the same time.
+ * The implementation should return as many requested parameters
+ * as possible, even if one or more is not supported
+ *
+ * @param context provides more information about the request
+ * @param keys keys of the requested parameters
+ * @return retval operation completion status.
+ * OK must be returned if keys is empty.
+ * NOT_SUPPORTED must be returned if at least one key is unknown.
+ * @return parameters parameter key value pairs.
+ * Must contain the value of all requested keys if retval == OK
+ */
+ getParameters(vec<ParameterValue> context, vec<string> keys)
+ generates (Result retval, vec<ParameterValue> parameters);
+
+ /**
+ * Generic method for setting vendor-specific parameter values.
+ * The framework does not interpret the parameters, they are passed
+ * in an opaque manner between a vendor application and HAL.
+ *
+ * Multiple parameters can be set at the same time though this is
+ * discouraged as it make failure analysis harder.
+ *
+ * If possible, a failed setParameters should not impact the platform state.
+ *
+ * @param context provides more information about the request
+ * @param parameters parameter key value pairs.
+ * @return retval operation completion status.
+ * All parameters must be successfully set for OK to be returned
+ */
+ setParameters(vec<ParameterValue> context, vec<ParameterValue> parameters)
+ generates (Result retval);
+
+ /**
+ * Returns an array with available microphones in device.
+ *
+ * @return retval NOT_SUPPORTED if there are no microphones on this device
+ * INVALID_STATE if the call is not successful,
+ * OK otherwise.
+ *
+ * @return microphones array with microphones info
+ */
+ getMicrophones()
+ generates(Result retval, vec<MicrophoneInfo> microphones);
+
+ /**
+ * Notifies the device module about the connection state of an input/output
+ * device attached to it. Calling this method is equivalent to setting
+ * AUDIO_PARAMETER_DEVICE_[DIS]CONNECT on the legacy HAL.
+ *
+ * @param address audio device specification.
+ * @param connected whether the device is connected.
+ * @return retval operation completion status.
+ */
+ setConnectedState(DeviceAddress address, bool connected)
+ generates (Result retval);
+
+ /**
+ * Called by the framework to deinitialize the device and free up
+ * all currently allocated resources. It is recommended to close
+ * the device on the client side as soon as it is becomes unused.
+ *
+ * Note that all streams must be closed by the client before
+ * attempting to close the device they belong to.
+ *
+ * @return retval OK in case the success.
+ * INVALID_STATE if the device was already closed
+ * or there are streams currently opened.
+ */
+ close() generates (Result retval);
+
+ /**
+ * Applies an audio effect to an audio device. The effect is inserted
+ * according to its insertion preference specified by INSERT_... EffectFlags
+ * in the EffectDescriptor.
+ *
+ * @param device identifies the sink or source device this effect must be applied to.
+ * "device" is the AudioPortHandle indicated for the device when the audio
+ * patch connecting that device was created.
+ * @param effectId effect ID (obtained from IEffectsFactory.createEffect) of
+ * the effect to add.
+ * @return retval operation completion status.
+ */
+ addDeviceEffect(AudioPortHandle device, uint64_t effectId) generates (Result retval);
+
+ /**
+ * Stops applying an audio effect to an audio device.
+ *
+ * @param device identifies the sink or source device this effect was applied to.
+ * "device" is the AudioPortHandle indicated for the device when the audio
+ * patch is created at the audio HAL.
+ * @param effectId effect ID (obtained from IEffectsFactory.createEffect) of
+ * the effect.
+ * @return retval operation completion status.
+ */
+ removeDeviceEffect(AudioPortHandle device, uint64_t effectId) generates (Result retval);
+};
diff --git a/audio/7.0/IDevicesFactory.hal b/audio/7.0/IDevicesFactory.hal
new file mode 100644
index 0000000..03549b4
--- /dev/null
+++ b/audio/7.0/IDevicesFactory.hal
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio@7.0;
+
+import android.hardware.audio.common@7.0;
+import IDevice;
+import IPrimaryDevice;
+
+/** This factory allows a HAL implementation to be split in multiple independent
+ * devices (called module in the pre-treble API).
+ * Note that this division is arbitrary and implementation are free
+ * to only have a Primary.
+ * The framework will query the devices according to audio_policy_configuration.xml
+ *
+ * Each device name is arbitrary, provided by the vendor's audio_policy_configuration.xml
+ * and only used to identify a device in this factory.
+ * The framework must not interpret the name, treating it as a vendor opaque data
+ * with the following exception:
+ * - the "r_submix" device that must be present to support policyMixes (Eg: Android projected).
+ * Note that this Device is included by default in a build derived from AOSP.
+ *
+ * Note that on AOSP Oreo (including MR1) the "a2dp" module is not using this API
+ * but is loaded directly from the system partition using the legacy API
+ * due to limitations with the Bluetooth framework.
+ */
+interface IDevicesFactory {
+
+ /**
+ * Opens an audio device. To close the device, it is necessary to release
+ * references to the returned device object.
+ *
+ * @param device device name.
+ * @return retval operation completion status. Returns INVALID_ARGUMENTS
+ * if there is no corresponding hardware module found,
+ * NOT_INITIALIZED if an error occurred while opening the hardware
+ * module.
+ * @return result the interface for the created device.
+ */
+ openDevice(string device) generates (Result retval, IDevice result);
+
+ /**
+ * Opens the Primary audio device that must be present.
+ * This function is not optional and must return successfully the primary device.
+ *
+ * This device must have the name "primary".
+ *
+ * The telephony stack uses this device to control the audio during a voice call.
+ *
+ * @return retval operation completion status. Must be SUCCESS.
+ * For debugging, return INVALID_ARGUMENTS if there is no corresponding
+ * hardware module found, NOT_INITIALIZED if an error occurred
+ * while opening the hardware module.
+ * @return result the interface for the created device.
+ */
+ openPrimaryDevice() generates (Result retval, IPrimaryDevice result);
+};
diff --git a/audio/7.0/IPrimaryDevice.hal b/audio/7.0/IPrimaryDevice.hal
new file mode 100644
index 0000000..1427ae8
--- /dev/null
+++ b/audio/7.0/IPrimaryDevice.hal
@@ -0,0 +1,195 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio@7.0;
+
+import android.hardware.audio.common@7.0;
+import IDevice;
+
+interface IPrimaryDevice extends IDevice {
+ /**
+ * Sets the audio volume of a voice call.
+ *
+ * @param volume 1.0f means unity, 0.0f is zero.
+ * @return retval operation completion status.
+ */
+ setVoiceVolume(float volume) generates (Result retval);
+
+ /**
+ * This method is used to notify the HAL about audio mode changes.
+ *
+ * @param mode new mode.
+ * @return retval operation completion status.
+ */
+ setMode(AudioMode mode) generates (Result retval);
+
+ /**
+ * Sets the name of the current BT SCO headset. Calling this method
+ * is equivalent to setting legacy "bt_headset_name" parameter.
+ * The BT SCO headset name must only be used for debugging purposes.
+ * Optional method
+ *
+ * @param name the name of the current BT SCO headset (can be empty).
+ * @return retval operation completion status.
+ */
+ setBtScoHeadsetDebugName(string name) generates (Result retval);
+
+ /**
+ * Gets whether BT SCO Noise Reduction and Echo Cancellation are enabled.
+ * Calling this method is equivalent to getting AUDIO_PARAMETER_KEY_BT_NREC
+ * on the legacy HAL.
+ *
+ * @return retval operation completion status.
+ * @return enabled whether BT SCO NR + EC are enabled.
+ */
+ getBtScoNrecEnabled() generates (Result retval, bool enabled);
+
+ /**
+ * Sets whether BT SCO Noise Reduction and Echo Cancellation are enabled.
+ * Calling this method is equivalent to setting AUDIO_PARAMETER_KEY_BT_NREC
+ * on the legacy HAL.
+ * Optional method
+ *
+ * @param enabled whether BT SCO NR + EC are enabled.
+ * @return retval operation completion status.
+ */
+ setBtScoNrecEnabled(bool enabled) generates (Result retval);
+
+ /**
+ * Gets whether BT SCO Wideband mode is enabled. Calling this method is
+ * equivalent to getting AUDIO_PARAMETER_KEY_BT_SCO_WB on the legacy HAL.
+ *
+ * @return retval operation completion status.
+ * @return enabled whether BT Wideband is enabled.
+ */
+ getBtScoWidebandEnabled() generates (Result retval, bool enabled);
+
+ /**
+ * Sets whether BT SCO Wideband mode is enabled. Calling this method is
+ * equivalent to setting AUDIO_PARAMETER_KEY_BT_SCO_WB on the legacy HAL.
+ * Optional method
+ *
+ * @param enabled whether BT Wideband is enabled.
+ * @return retval operation completion status.
+ */
+ setBtScoWidebandEnabled(bool enabled) generates (Result retval);
+
+ /**
+ * Gets whether BT HFP (Hands-Free Profile) is enabled. Calling this method
+ * is equivalent to getting "hfp_enable" parameter value on the legacy HAL.
+ *
+ * @return retval operation completion status.
+ * @return enabled whether BT HFP is enabled.
+ */
+ getBtHfpEnabled() generates (Result retval, bool enabled);
+
+ /**
+ * Sets whether BT HFP (Hands-Free Profile) is enabled. Calling this method
+ * is equivalent to setting "hfp_enable" parameter on the legacy HAL.
+ * Optional method
+ *
+ * @param enabled whether BT HFP is enabled.
+ * @return retval operation completion status.
+ */
+ setBtHfpEnabled(bool enabled) generates (Result retval);
+
+ /**
+ * Sets the sampling rate of BT HFP (Hands-Free Profile). Calling this
+ * method is equivalent to setting "hfp_set_sampling_rate" parameter
+ * on the legacy HAL.
+ * Optional method
+ *
+ * @param sampleRateHz sample rate in Hz.
+ * @return retval operation completion status.
+ */
+ setBtHfpSampleRate(uint32_t sampleRateHz) generates (Result retval);
+
+ /**
+ * Sets the current output volume Hz for BT HFP (Hands-Free Profile).
+ * Calling this method is equivalent to setting "hfp_volume" parameter value
+ * on the legacy HAL (except that legacy HAL implementations expect
+ * an integer value in the range from 0 to 15.)
+ * Optional method
+ *
+ * @param volume 1.0f means unity, 0.0f is zero.
+ * @return retval operation completion status.
+ */
+ setBtHfpVolume(float volume) generates (Result retval);
+
+ enum TtyMode : int32_t {
+ OFF,
+ VCO,
+ HCO,
+ FULL
+ };
+
+ /**
+ * Gets current TTY mode selection. Calling this method is equivalent to
+ * getting AUDIO_PARAMETER_KEY_TTY_MODE on the legacy HAL.
+ *
+ * @return retval operation completion status.
+ * @return mode TTY mode.
+ */
+ getTtyMode() generates (Result retval, TtyMode mode);
+
+ /**
+ * Sets current TTY mode. Calling this method is equivalent to setting
+ * AUDIO_PARAMETER_KEY_TTY_MODE on the legacy HAL.
+ *
+ * @param mode TTY mode.
+ * @return retval operation completion status.
+ */
+ setTtyMode(TtyMode mode) generates (Result retval);
+
+ /**
+ * Gets whether Hearing Aid Compatibility - Telecoil (HAC-T) mode is
+ * enabled. Calling this method is equivalent to getting
+ * AUDIO_PARAMETER_KEY_HAC on the legacy HAL.
+ *
+ * @return retval operation completion status.
+ * @return enabled whether HAC mode is enabled.
+ */
+ getHacEnabled() generates (Result retval, bool enabled);
+
+ /**
+ * Sets whether Hearing Aid Compatibility - Telecoil (HAC-T) mode is
+ * enabled. Calling this method is equivalent to setting
+ * AUDIO_PARAMETER_KEY_HAC on the legacy HAL.
+ * Optional method
+ *
+ * @param enabled whether HAC mode is enabled.
+ * @return retval operation completion status.
+ */
+ setHacEnabled(bool enabled) generates (Result retval);
+
+ enum Rotation : int32_t {
+ DEG_0,
+ DEG_90,
+ DEG_180,
+ DEG_270
+ };
+
+ /**
+ * Updates HAL on the current rotation of the device relative to natural
+ * orientation. Calling this method is equivalent to setting legacy
+ * parameter "rotation".
+ *
+ * @param rotation rotation in degrees relative to natural device
+ * orientation.
+ * @return retval operation completion status.
+ */
+ updateRotation(Rotation rotation) generates (Result retval);
+};
diff --git a/audio/7.0/IStream.hal b/audio/7.0/IStream.hal
new file mode 100644
index 0000000..789cb1d
--- /dev/null
+++ b/audio/7.0/IStream.hal
@@ -0,0 +1,273 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio@7.0;
+
+import android.hardware.audio.common@7.0;
+import android.hardware.audio.effect@7.0::IEffect;
+
+interface IStream {
+ /**
+ * Return the frame size (number of bytes per sample).
+ *
+ * @return frameSize frame size in bytes.
+ */
+ getFrameSize() generates (uint64_t frameSize);
+
+ /**
+ * Return the frame count of the buffer. Calling this method is equivalent
+ * to getting AUDIO_PARAMETER_STREAM_FRAME_COUNT on the legacy HAL.
+ *
+ * @return count frame count.
+ */
+ getFrameCount() generates (uint64_t count);
+
+ /**
+ * Return the size of input/output buffer in bytes for this stream.
+ * It must be a multiple of the frame size.
+ *
+ * @return buffer buffer size in bytes.
+ */
+ getBufferSize() generates (uint64_t bufferSize);
+
+ /**
+ * Return supported native sampling rates of the stream for a given format.
+ * A supported native sample rate is a sample rate that can be efficiently
+ * played by the hardware (typically without sample-rate conversions).
+ *
+ * This function is only called for dynamic profile. If called for
+ * non-dynamic profile is should return NOT_SUPPORTED or the same list
+ * as in audio_policy_configuration.xml.
+ *
+ * Calling this method is equivalent to getting
+ * AUDIO_PARAMETER_STREAM_SUP_SAMPLING_RATES on the legacy HAL.
+ *
+ *
+ * @param format audio format for which the sample rates are supported.
+ * @return retval operation completion status.
+ * Must be OK if the format is supported.
+ * @return sampleRateHz supported sample rates.
+ */
+ getSupportedSampleRates(AudioFormat format)
+ generates (Result retval, vec<uint32_t> sampleRates);
+
+ /**
+ * Return supported channel masks of the stream. Calling this method is
+ * equivalent to getting AUDIO_PARAMETER_STREAM_SUP_CHANNELS on the legacy
+ * HAL.
+ *
+ * @param format audio format for which the channel masks are supported.
+ * @return retval operation completion status.
+ * Must be OK if the format is supported.
+ * @return masks supported audio masks.
+ */
+ getSupportedChannelMasks(AudioFormat format)
+ generates (Result retval, vec<vec<AudioChannelMask>> masks);
+
+ /**
+ * Return supported audio formats of the stream. Calling this method is
+ * equivalent to getting AUDIO_PARAMETER_STREAM_SUP_FORMATS on the legacy
+ * HAL.
+ *
+ * @return retval operation completion status.
+ * @return formats supported audio formats.
+ * Must be non empty if retval is OK.
+ */
+ getSupportedFormats() generates (Result retval, vec<AudioFormat> formats);
+
+ /**
+ * Retrieves basic stream configuration: sample rate, audio format,
+ * channel mask.
+ *
+ * @return config basic stream configuration.
+ */
+ getAudioProperties() generates (AudioBasicConfig config);
+
+ /**
+ * Sets stream parameters. Only sets parameters that are specified.
+ * See the description of AudioBasicConfig for the details.
+ *
+ * Optional method. If implemented, only called on a stopped stream.
+ *
+ * @param config basic stream configuration.
+ * @return retval operation completion status.
+ */
+ setAudioProperties(AudioBasicConfig config) generates (Result retval);
+
+ /**
+ * Applies audio effect to the stream.
+ *
+ * @param effectId effect ID (obtained from IEffectsFactory.createEffect) of
+ * the effect to apply.
+ * @return retval operation completion status.
+ */
+ addEffect(uint64_t effectId) generates (Result retval);
+
+ /**
+ * Stops application of the effect to the stream.
+ *
+ * @param effectId effect ID (obtained from IEffectsFactory.createEffect) of
+ * the effect to remove.
+ * @return retval operation completion status.
+ */
+ removeEffect(uint64_t effectId) generates (Result retval);
+
+ /**
+ * Put the audio hardware input/output into standby mode.
+ * Driver must exit from standby mode at the next I/O operation.
+ *
+ * @return retval operation completion status.
+ */
+ standby() generates (Result retval);
+
+ /**
+ * Return the set of devices which this stream is connected to.
+ * Optional method
+ *
+ * @return retval operation completion status: OK or NOT_SUPPORTED.
+ * @return device set of devices which this stream is connected to.
+ */
+ getDevices() generates (Result retval, vec<DeviceAddress> devices);
+
+ /**
+ * Connects the stream to one or multiple devices.
+ *
+ * This method must only be used for HALs that do not support
+ * 'IDevice.createAudioPatch' method. Calling this method is
+ * equivalent to setting AUDIO_PARAMETER_STREAM_ROUTING preceded
+ * with a device address in the legacy HAL interface.
+ *
+ * @param address device to connect the stream to.
+ * @return retval operation completion status.
+ */
+ setDevices(vec<DeviceAddress> devices) generates (Result retval);
+
+ /**
+ * Sets the HW synchronization source. Calling this method is equivalent to
+ * setting AUDIO_PARAMETER_STREAM_HW_AV_SYNC on the legacy HAL.
+ * Optional method
+ *
+ * @param hwAvSync HW synchronization source
+ * @return retval operation completion status.
+ */
+ setHwAvSync(AudioHwSync hwAvSync) generates (Result retval);
+
+ /**
+ * Generic method for retrieving vendor-specific parameter values.
+ * The framework does not interpret the parameters, they are passed
+ * in an opaque manner between a vendor application and HAL.
+ *
+ * Multiple parameters can be retrieved at the same time.
+ * The implementation should return as many requested parameters
+ * as possible, even if one or more is not supported
+ *
+ * @param context provides more information about the request
+ * @param keys keys of the requested parameters
+ * @return retval operation completion status.
+ * OK must be returned if keys is empty.
+ * NOT_SUPPORTED must be returned if at least one key is unknown.
+ * @return parameters parameter key value pairs.
+ * Must contain the value of all requested keys if retval == OK
+ */
+ getParameters(vec<ParameterValue> context, vec<string> keys)
+ generates (Result retval, vec<ParameterValue> parameters);
+
+ /**
+ * Generic method for setting vendor-specific parameter values.
+ * The framework does not interpret the parameters, they are passed
+ * in an opaque manner between a vendor application and HAL.
+ *
+ * Multiple parameters can be set at the same time though this is
+ * discouraged as it make failure analysis harder.
+ *
+ * If possible, a failed setParameters should not impact the platform state.
+ *
+ * @param context provides more information about the request
+ * @param parameters parameter key value pairs.
+ * @return retval operation completion status.
+ * All parameters must be successfully set for OK to be returned
+ */
+ setParameters(vec<ParameterValue> context, vec<ParameterValue> parameters)
+ generates (Result retval);
+
+ /**
+ * Called by the framework to start a stream operating in mmap mode.
+ * createMmapBuffer() must be called before calling start().
+ * Function only implemented by streams operating in mmap mode.
+ *
+ * @return retval OK in case the success.
+ * NOT_SUPPORTED on non mmap mode streams
+ * INVALID_STATE if called out of sequence
+ */
+ start() generates (Result retval);
+
+ /**
+ * Called by the framework to stop a stream operating in mmap mode.
+ * Function only implemented by streams operating in mmap mode.
+ *
+ * @return retval OK in case the success.
+ * NOT_SUPPORTED on non mmap mode streams
+ * INVALID_STATE if called out of sequence
+ */
+ stop() generates (Result retval) ;
+
+ /**
+ * Called by the framework to retrieve information on the mmap buffer used for audio
+ * samples transfer.
+ * Function only implemented by streams operating in mmap mode.
+ *
+ * @param minSizeFrames minimum buffer size requested. The actual buffer
+ * size returned in struct MmapBufferInfo can be larger.
+ * The size must be a positive value.
+ * @return retval OK in case the success.
+ * NOT_SUPPORTED on non mmap mode streams
+ * NOT_INITIALIZED in case of memory allocation error
+ * INVALID_ARGUMENTS if the requested buffer size is invalid
+ * INVALID_STATE if called out of sequence
+ * @return info a MmapBufferInfo struct containing information on the MMMAP buffer created.
+ */
+ createMmapBuffer(int32_t minSizeFrames)
+ generates (Result retval, MmapBufferInfo info);
+
+ /**
+ * Called by the framework to read current read/write position in the mmap buffer
+ * with associated time stamp.
+ * Function only implemented by streams operating in mmap mode.
+ *
+ * @return retval OK in case the success.
+ * NOT_SUPPORTED on non mmap mode streams
+ * INVALID_STATE if called out of sequence
+ * @return position a MmapPosition struct containing current HW read/write position in frames
+ * with associated time stamp.
+ */
+ getMmapPosition()
+ generates (Result retval, MmapPosition position);
+
+ /**
+ * Called by the framework to deinitialize the stream and free up
+ * all currently allocated resources. It is recommended to close
+ * the stream on the client side as soon as it is becomes unused.
+ *
+ * The client must ensure that this function is not called while
+ * audio data is being transferred through the stream's message queues.
+ *
+ * @return retval OK in case the success.
+ * NOT_SUPPORTED if called on IStream instead of input or
+ * output stream interface.
+ * INVALID_STATE if the stream was already closed.
+ */
+ close() generates (Result retval);
+};
diff --git a/audio/7.0/IStreamIn.hal b/audio/7.0/IStreamIn.hal
new file mode 100644
index 0000000..0a3f24b
--- /dev/null
+++ b/audio/7.0/IStreamIn.hal
@@ -0,0 +1,201 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio@7.0;
+
+import android.hardware.audio.common@7.0;
+import IStream;
+
+interface IStreamIn extends IStream {
+ /**
+ * Returns the source descriptor of the input stream. Calling this method is
+ * equivalent to getting AUDIO_PARAMETER_STREAM_INPUT_SOURCE on the legacy
+ * HAL.
+ * Optional method
+ *
+ * @return retval operation completion status.
+ * @return source audio source.
+ */
+ getAudioSource() generates (Result retval, AudioSource source);
+
+ /**
+ * Set the input gain for the audio driver.
+ * Optional method
+ *
+ * @param gain 1.0f is unity, 0.0f is zero.
+ * @result retval operation completion status.
+ */
+ setGain(float gain) generates (Result retval);
+
+ /**
+ * Commands that can be executed on the driver reader thread.
+ */
+ enum ReadCommand : int32_t {
+ READ,
+ GET_CAPTURE_POSITION
+ };
+
+ /**
+ * Data structure passed to the driver for executing commands
+ * on the driver reader thread.
+ */
+ struct ReadParameters {
+ ReadCommand command; // discriminator
+ union Params {
+ uint64_t read; // READ command, amount of bytes to read, >= 0.
+ // No parameters for GET_CAPTURE_POSITION.
+ } params;
+ };
+
+ /**
+ * Data structure passed back to the client via status message queue
+ * of 'read' operation.
+ *
+ * Possible values of 'retval' field:
+ * - OK, read operation was successful;
+ * - INVALID_ARGUMENTS, stream was not configured properly;
+ * - INVALID_STATE, stream is in a state that doesn't allow reads.
+ */
+ struct ReadStatus {
+ Result retval;
+ ReadCommand replyTo; // discriminator
+ union Reply {
+ uint64_t read; // READ command, amount of bytes read, >= 0.
+ struct CapturePosition { // same as generated by getCapturePosition.
+ uint64_t frames;
+ uint64_t time;
+ } capturePosition;
+ } reply;
+ };
+
+ /**
+ * Called when the metadata of the stream's sink has been changed.
+ * @param sinkMetadata Description of the audio that is suggested by the clients.
+ */
+ updateSinkMetadata(SinkMetadata sinkMetadata);
+
+ /**
+ * Set up required transports for receiving audio buffers from the driver.
+ *
+ * The transport consists of three message queues:
+ * -- command queue is used to instruct the reader thread what operation
+ * to perform;
+ * -- data queue is used for passing audio data from the driver
+ * to the client;
+ * -- status queue is used for reporting operation status
+ * (e.g. amount of bytes actually read or error code).
+ *
+ * The driver operates on a dedicated thread. The client must ensure that
+ * the thread is given an appropriate priority and assigned to correct
+ * scheduler and cgroup. For this purpose, the method returns the identifier
+ * of the driver thread.
+ *
+ * @param frameSize the size of a single frame, in bytes.
+ * @param framesCount the number of frames in a buffer.
+ * @param threadPriority priority of the driver thread.
+ * @return retval OK if both message queues were created successfully.
+ * INVALID_STATE if the method was already called.
+ * INVALID_ARGUMENTS if there was a problem setting up
+ * the queues.
+ * @return commandMQ a message queue used for passing commands.
+ * @return dataMQ a message queue used for passing audio data in the format
+ * specified at the stream opening.
+ * @return statusMQ a message queue used for passing status from the driver
+ * using ReadStatus structures.
+ * @return threadId identifier of the driver's dedicated thread; the caller
+ * may adjust the thread priority to match the priority
+ * of the thread that provides audio data.
+ */
+ prepareForReading(uint32_t frameSize, uint32_t framesCount)
+ generates (
+ Result retval,
+ fmq_sync<ReadParameters> commandMQ,
+ fmq_sync<uint8_t> dataMQ,
+ fmq_sync<ReadStatus> statusMQ,
+ int32_t threadId);
+
+ /**
+ * Return the amount of input frames lost in the audio driver since the last
+ * call of this function.
+ *
+ * Audio driver is expected to reset the value to 0 and restart counting
+ * upon returning the current value by this function call. Such loss
+ * typically occurs when the user space process is blocked longer than the
+ * capacity of audio driver buffers.
+ *
+ * @return framesLost the number of input audio frames lost.
+ */
+ getInputFramesLost() generates (uint32_t framesLost);
+
+ /**
+ * Return a recent count of the number of audio frames received and the
+ * clock time associated with that frame count.
+ *
+ * @return retval INVALID_STATE if the device is not ready/available,
+ * NOT_SUPPORTED if the command is not supported,
+ * OK otherwise.
+ * @return frames the total frame count received. This must be as early in
+ * the capture pipeline as possible. In general, frames
+ * must be non-negative and must not go "backwards".
+ * @return time is the clock monotonic time when frames was measured. In
+ * general, time must be a positive quantity and must not
+ * go "backwards".
+ */
+ getCapturePosition()
+ generates (Result retval, uint64_t frames, uint64_t time);
+
+ /**
+ * Returns an array with active microphones in the stream.
+ *
+ * @return retval INVALID_STATE if the call is not successful,
+ * OK otherwise.
+ *
+ * @return microphones array with microphones info
+ */
+ getActiveMicrophones()
+ generates(Result retval, vec<MicrophoneInfo> microphones);
+
+ /**
+ * Specifies the logical microphone (for processing).
+ *
+ * If the feature is not supported an error should be returned
+ * If multiple microphones are present, this should be treated as a preference
+ * for their combined direction.
+ *
+ * Optional method
+ *
+ * @param Direction constant
+ * @return retval OK if the call is successful, an error code otherwise.
+ */
+ setMicrophoneDirection(MicrophoneDirection direction)
+ generates(Result retval);
+
+ /**
+ * Specifies the zoom factor for the selected microphone (for processing).
+ *
+ * If the feature is not supported an error should be returned
+ * If multiple microphones are present, this should be treated as a preference
+ * for their combined field dimension.
+ *
+ * Optional method
+ *
+ * @param the desired field dimension of microphone capture. Range is from -1 (wide angle),
+ * though 0 (no zoom) to 1 (maximum zoom).
+ *
+ * @return retval OK if the call is not successful, an error code otherwise.
+ */
+ setMicrophoneFieldDimension(float zoom) generates(Result retval);
+};
diff --git a/audio/7.0/IStreamOut.hal b/audio/7.0/IStreamOut.hal
new file mode 100644
index 0000000..38d750f
--- /dev/null
+++ b/audio/7.0/IStreamOut.hal
@@ -0,0 +1,380 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio@7.0;
+
+import android.hardware.audio.common@7.0;
+import IStream;
+import IStreamOutCallback;
+import IStreamOutEventCallback;
+
+interface IStreamOut extends IStream {
+ /**
+ * Return the audio hardware driver estimated latency in milliseconds.
+ *
+ * @return latencyMs latency in milliseconds.
+ */
+ getLatency() generates (uint32_t latencyMs);
+
+ /**
+ * This method is used in situations where audio mixing is done in the
+ * hardware. This method serves as a direct interface with hardware,
+ * allowing to directly set the volume as apposed to via the framework.
+ * This method might produce multiple PCM outputs or hardware accelerated
+ * codecs, such as MP3 or AAC.
+ * Optional method
+ *
+ * @param left left channel attenuation, 1.0f is unity, 0.0f is zero.
+ * @param right right channel attenuation, 1.0f is unity, 0.0f is zero.
+ * @return retval operation completion status.
+ * If a volume is outside [0,1], return INVALID_ARGUMENTS
+ */
+ setVolume(float left, float right) generates (Result retval);
+
+ /**
+ * Commands that can be executed on the driver writer thread.
+ */
+ enum WriteCommand : int32_t {
+ WRITE,
+ GET_PRESENTATION_POSITION,
+ GET_LATENCY
+ };
+
+ /**
+ * Data structure passed back to the client via status message queue
+ * of 'write' operation.
+ *
+ * Possible values of 'retval' field:
+ * - OK, write operation was successful;
+ * - INVALID_ARGUMENTS, stream was not configured properly;
+ * - INVALID_STATE, stream is in a state that doesn't allow writes;
+ * - INVALID_OPERATION, retrieving presentation position isn't supported.
+ */
+ struct WriteStatus {
+ Result retval;
+ WriteCommand replyTo; // discriminator
+ union Reply {
+ uint64_t written; // WRITE command, amount of bytes written, >= 0.
+ struct PresentationPosition { // same as generated by
+ uint64_t frames; // getPresentationPosition.
+ TimeSpec timeStamp;
+ } presentationPosition;
+ uint32_t latencyMs; // Same as generated by getLatency.
+ } reply;
+ };
+
+ /**
+ * Called when the metadata of the stream's source has been changed.
+ * @param sourceMetadata Description of the audio that is played by the clients.
+ */
+ updateSourceMetadata(SourceMetadata sourceMetadata);
+
+ /**
+ * Set up required transports for passing audio buffers to the driver.
+ *
+ * The transport consists of three message queues:
+ * -- command queue is used to instruct the writer thread what operation
+ * to perform;
+ * -- data queue is used for passing audio data from the client
+ * to the driver;
+ * -- status queue is used for reporting operation status
+ * (e.g. amount of bytes actually written or error code).
+ *
+ * The driver operates on a dedicated thread. The client must ensure that
+ * the thread is given an appropriate priority and assigned to correct
+ * scheduler and cgroup. For this purpose, the method returns the identifier
+ * of the driver thread.
+ *
+ * @param frameSize the size of a single frame, in bytes.
+ * @param framesCount the number of frames in a buffer.
+ * @return retval OK if both message queues were created successfully.
+ * INVALID_STATE if the method was already called.
+ * INVALID_ARGUMENTS if there was a problem setting up
+ * the queues.
+ * @return commandMQ a message queue used for passing commands.
+ * @return dataMQ a message queue used for passing audio data in the format
+ * specified at the stream opening.
+ * @return statusMQ a message queue used for passing status from the driver
+ * using WriteStatus structures.
+ * @return threadId identifier of the driver's dedicated thread; the caller
+ * may adjust the thread priority to match the priority
+ * of the thread that provides audio data.
+ */
+ prepareForWriting(uint32_t frameSize, uint32_t framesCount)
+ generates (
+ Result retval,
+ fmq_sync<WriteCommand> commandMQ,
+ fmq_sync<uint8_t> dataMQ,
+ fmq_sync<WriteStatus> statusMQ,
+ int32_t threadId);
+
+ /**
+ * Return the number of audio frames written by the audio DSP to DAC since
+ * the output has exited standby.
+ * Optional method
+ *
+ * @return retval operation completion status.
+ * @return dspFrames number of audio frames written.
+ */
+ getRenderPosition() generates (Result retval, uint32_t dspFrames);
+
+ /**
+ * Get the local time at which the next write to the audio driver will be
+ * presented. The units are microseconds, where the epoch is decided by the
+ * local audio HAL.
+ * Optional method
+ *
+ * @return retval operation completion status.
+ * @return timestampUs time of the next write.
+ */
+ getNextWriteTimestamp() generates (Result retval, int64_t timestampUs);
+
+ /**
+ * Set the callback interface for notifying completion of non-blocking
+ * write and drain.
+ *
+ * Calling this function implies that all future 'write' and 'drain'
+ * must be non-blocking and use the callback to signal completion.
+ *
+ * 'clearCallback' method needs to be called in order to release the local
+ * callback proxy on the server side and thus dereference the callback
+ * implementation on the client side.
+ *
+ * @return retval operation completion status.
+ */
+ setCallback(IStreamOutCallback callback) generates (Result retval);
+
+ /**
+ * Clears the callback previously set via 'setCallback' method.
+ *
+ * Warning: failure to call this method results in callback implementation
+ * on the client side being held until the HAL server termination.
+ *
+ * If no callback was previously set, the method should be a no-op
+ * and return OK.
+ *
+ * @return retval operation completion status: OK or NOT_SUPPORTED.
+ */
+ clearCallback() generates (Result retval);
+
+ /**
+ * Set the callback interface for notifying about an output stream event.
+ *
+ * Calling this method with a null pointer will result in releasing
+ * the local callback proxy on the server side and thus dereference
+ * the callback implementation on the client side.
+ *
+ * @return retval operation completion status.
+ */
+ setEventCallback(IStreamOutEventCallback callback)
+ generates (Result retval);
+
+ /**
+ * Returns whether HAL supports pausing and resuming of streams.
+ *
+ * @return supportsPause true if pausing is supported.
+ * @return supportsResume true if resume is supported.
+ */
+ supportsPauseAndResume()
+ generates (bool supportsPause, bool supportsResume);
+
+ /**
+ * Notifies to the audio driver to stop playback however the queued buffers
+ * are retained by the hardware. Useful for implementing pause/resume. Empty
+ * implementation if not supported however must be implemented for hardware
+ * with non-trivial latency. In the pause state, some audio hardware may
+ * still be using power. Client code may consider calling 'suspend' after a
+ * timeout to prevent that excess power usage.
+ *
+ * Implementation of this function is mandatory for offloaded playback.
+ *
+ * @return retval operation completion status.
+ */
+ pause() generates (Result retval);
+
+ /**
+ * Notifies to the audio driver to resume playback following a pause.
+ * Returns error INVALID_STATE if called without matching pause.
+ *
+ * Implementation of this function is mandatory for offloaded playback.
+ *
+ * @return retval operation completion status.
+ */
+ resume() generates (Result retval);
+
+ /**
+ * Returns whether HAL supports draining of streams.
+ *
+ * @return supports true if draining is supported.
+ */
+ supportsDrain() generates (bool supports);
+
+ /**
+ * Requests notification when data buffered by the driver/hardware has been
+ * played. If 'setCallback' has previously been called to enable
+ * non-blocking mode, then 'drain' must not block, instead it must return
+ * quickly and completion of the drain is notified through the callback. If
+ * 'setCallback' has not been called, then 'drain' must block until
+ * completion.
+ *
+ * If 'type' is 'ALL', the drain completes when all previously written data
+ * has been played.
+ *
+ * If 'type' is 'EARLY_NOTIFY', the drain completes shortly before all data
+ * for the current track has played to allow time for the framework to
+ * perform a gapless track switch.
+ *
+ * Drain must return immediately on 'stop' and 'flush' calls.
+ *
+ * Implementation of this function is mandatory for offloaded playback.
+ *
+ * @param type type of drain.
+ * @return retval operation completion status.
+ */
+ drain(AudioDrain type) generates (Result retval);
+
+ /**
+ * Notifies to the audio driver to flush the queued data. Stream must
+ * already be paused before calling 'flush'.
+ * Optional method
+ *
+ * Implementation of this function is mandatory for offloaded playback.
+ *
+ * @return retval operation completion status.
+ */
+ flush() generates (Result retval);
+
+ /**
+ * Return a recent count of the number of audio frames presented to an
+ * external observer. This excludes frames which have been written but are
+ * still in the pipeline. The count is not reset to zero when output enters
+ * standby. Also returns the value of CLOCK_MONOTONIC as of this
+ * presentation count. The returned count is expected to be 'recent', but
+ * does not need to be the most recent possible value. However, the
+ * associated time must correspond to whatever count is returned.
+ *
+ * Example: assume that N+M frames have been presented, where M is a 'small'
+ * number. Then it is permissible to return N instead of N+M, and the
+ * timestamp must correspond to N rather than N+M. The terms 'recent' and
+ * 'small' are not defined. They reflect the quality of the implementation.
+ *
+ * Optional method
+ *
+ * @return retval operation completion status.
+ * @return frames count of presented audio frames.
+ * @return timeStamp associated clock time.
+ */
+ getPresentationPosition()
+ generates (Result retval, uint64_t frames, TimeSpec timeStamp);
+
+ /**
+ * Selects a presentation for decoding from a next generation media stream
+ * (as defined per ETSI TS 103 190-2) and a program within the presentation.
+ * Optional method
+ *
+ * @param presentationId selected audio presentation.
+ * @param programId refinement for the presentation.
+ * @return retval operation completion status.
+ */
+ selectPresentation(int32_t presentationId, int32_t programId)
+ generates (Result retval);
+
+ /**
+ * Returns the Dual Mono mode presentation setting.
+ *
+ * Optional method
+ *
+ * @return retval operation completion status.
+ * @return mode current setting of Dual Mono mode.
+ */
+ getDualMonoMode() generates (Result retval, DualMonoMode mode);
+
+ /**
+ * Sets the Dual Mono mode presentation on the output device.
+ *
+ * The Dual Mono mode is generally applied to stereo audio streams
+ * where the left and right channels come from separate sources.
+ *
+ * Optional method
+ *
+ * @param mode selected Dual Mono mode.
+ * @return retval operation completion status.
+ */
+ setDualMonoMode(DualMonoMode mode) generates (Result retval);
+
+ /**
+ * Returns the Audio Description Mix level in dB.
+ *
+ * The level is applied to streams incorporating a secondary Audio
+ * Description stream. It specifies the relative level of mixing for
+ * the Audio Description with a reference to the Main Audio.
+ *
+ * Optional method
+ *
+ * The value of the relative level is in the range from negative infinity
+ * to +48.
+ *
+ * @return retval operation completion status.
+ * @return leveldB the current Audio Description Mix Level in dB.
+ */
+ getAudioDescriptionMixLevel() generates (Result retval, float leveldB);
+
+ /**
+ * Sets the Audio Description Mix level in dB.
+ *
+ * For streams incorporating a secondary Audio Description stream
+ * the relative level of mixing of the Audio Description to the Main Audio
+ * is controlled by this method.
+ *
+ * Optional method
+ *
+ * The value of the relative level must be in the range from negative
+ * infinity to +48.
+ *
+ * @param leveldB Audio Description Mix Level in dB
+ * @return retval operation completion status.
+ */
+ setAudioDescriptionMixLevel(float leveldB) generates (Result retval);
+
+ /**
+ * Retrieves current playback rate parameters.
+ *
+ * Optional method
+ *
+ * @return retval operation completion status.
+ * @return playbackRate current playback parameters
+ */
+ getPlaybackRateParameters()
+ generates (Result retval, PlaybackRate playbackRate);
+
+ /**
+ * Sets the playback rate parameters that control playback behavior.
+ * This is normally used when playing encoded content and decoding
+ * is performed in hardware. Otherwise, the framework can apply
+ * necessary transformations.
+ *
+ * Optional method
+ *
+ * If the HAL supports setting the playback rate, it is recommended
+ * to support speed and pitch values at least in the range
+ * from 0.5f to 2.0f, inclusive (see the definition of PlaybackRate struct).
+ *
+ * @param playbackRate playback parameters
+ * @return retval operation completion status.
+ */
+ setPlaybackRateParameters(PlaybackRate playbackRate)
+ generates (Result retval);
+};
diff --git a/audio/7.0/IStreamOutCallback.hal b/audio/7.0/IStreamOutCallback.hal
new file mode 100644
index 0000000..7b9d47f
--- /dev/null
+++ b/audio/7.0/IStreamOutCallback.hal
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio@7.0;
+
+/**
+ * Asynchronous write callback interface.
+ */
+interface IStreamOutCallback {
+ /**
+ * Non blocking write completed.
+ */
+ oneway onWriteReady();
+
+ /**
+ * Drain completed.
+ */
+ oneway onDrainReady();
+
+ /**
+ * Stream hit an error.
+ */
+ oneway onError();
+};
diff --git a/audio/7.0/IStreamOutEventCallback.hal b/audio/7.0/IStreamOutEventCallback.hal
new file mode 100644
index 0000000..52e65d3
--- /dev/null
+++ b/audio/7.0/IStreamOutEventCallback.hal
@@ -0,0 +1,140 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio@7.0;
+
+/**
+ * Asynchronous stream out event callback interface. The interface provides
+ * a way for the HAL to notify platform when there are changes, e.g. codec
+ * format change, from the lower layer.
+ */
+interface IStreamOutEventCallback {
+ /**
+ * Codec format changed.
+ *
+ * onCodecFormatChanged returns an AudioMetadata object in read-only ByteString format.
+ * It represents the most recent codec format decoded by a HW audio decoder.
+ *
+ * Codec format is an optional message from HW audio decoders. It serves to
+ * notify the application about the codec format and audio objects contained
+ * within the compressed audio stream for control, informational,
+ * and display purposes.
+ *
+ * audioMetadata ByteString is convertible to an AudioMetadata object through
+ * both a C++ and a C API present in Metadata.h [1], or through a Java API present
+ * in AudioMetadata.java [2].
+ *
+ * The ByteString format is a stable format used for parcelling (marshalling) across
+ * JNI, AIDL, and HIDL interfaces. The test for R compatibility for native marshalling
+ * is TEST(metadata_tests, compatibility_R) [3]. The test for R compatibility for JNI
+ * marshalling is android.media.cts.AudioMetadataTest#testCompatibilityR [4].
+ *
+ * R (audio HAL 7.0) defined keys are as follows [2]:
+ * "bitrate", int32
+ * "channel-mask", int32
+ * "mime", string
+ * "sample-rate", int32
+ * "bit-width", int32
+ * "has-atmos", int32
+ * "audio-encoding", int32
+ *
+ * Parceling Format:
+ * All values are native endian order. [1]
+ *
+ * using type_size_t = uint32_t;
+ * using index_size_t = uint32_t;
+ * using datum_size_t = uint32_t;
+ *
+ * Permitted type indexes are
+ * TYPE_NONE = 0, // Reserved
+ * TYPE_INT32 = 1,
+ * TYPE_INT64 = 2,
+ * TYPE_FLOAT = 3,
+ * TYPE_DOUBLE = 4,
+ * TYPE_STRING = 5,
+ * TYPE_DATA = 6, // A data table of <String, Datum>
+ *
+ * Datum = {
+ * (type_size_t) Type (the type index from type_as_value<T>.)
+ * (datum_size_t) Size (size of the Payload)
+ * (byte string) Payload<Type>
+ * }
+ *
+ * The data is specified in native endian order.
+ * Since the size of the Payload is always present, unknown types may be skipped.
+ *
+ * Payload<Fixed-size Primitive_Value>
+ * [ sizeof(Primitive_Value) in raw bytes ]
+ *
+ * Example of Payload<Int32> of 123:
+ * Payload<Int32>
+ * [ value of 123 ] = 0x7b 0x00 0x00 0x00 123
+ *
+ * Payload<String>
+ * [ (index_size_t) length, not including zero terminator.]
+ * [ (length) raw bytes ]
+ *
+ * Example of Payload<String> of std::string("hi"):
+ * [ (index_size_t) length ] = 0x02 0x00 0x00 0x00 2 strlen("hi")
+ * [ raw bytes "hi" ] = 0x68 0x69 "hi"
+ *
+ * Payload<Data>
+ * [ (index_size_t) entries ]
+ * [ raw bytes (entry 1) Key (Payload<String>)
+ * Value (Datum)
+ * ... (until #entries) ]
+ *
+ * Example of Payload<Data> of {{"hello", "world"},
+ * {"value", (int32_t)1000}};
+ * [ (index_size_t) #entries ] = 0x02 0x00 0x00 0x00 2 entries
+ * Key (Payload<String>)
+ * [ index_size_t length ] = 0x05 0x00 0x00 0x00 5 strlen("hello")
+ * [ raw bytes "hello" ] = 0x68 0x65 0x6c 0x6c 0x6f "hello"
+ * Value (Datum)
+ * [ (type_size_t) type ] = 0x05 0x00 0x00 0x00 5 (TYPE_STRING)
+ * [ (datum_size_t) size ] = 0x09 0x00 0x00 0x00 sizeof(index_size_t) +
+ * strlen("world")
+ * Payload<String>
+ * [ (index_size_t) length ] = 0x05 0x00 0x00 0x00 5 strlen("world")
+ * [ raw bytes "world" ] = 0x77 0x6f 0x72 0x6c 0x64 "world"
+ * Key (Payload<String>)
+ * [ index_size_t length ] = 0x05 0x00 0x00 0x00 5 strlen("value")
+ * [ raw bytes "value" ] = 0x76 0x61 0x6c 0x75 0x65 "value"
+ * Value (Datum)
+ * [ (type_size_t) type ] = 0x01 0x00 0x00 0x00 1 (TYPE_INT32)
+ * [ (datum_size_t) size ] = 0x04 0x00 0x00 0x00 4 sizeof(int32_t)
+ * Payload<Int32>
+ * [ raw bytes 1000 ] = 0xe8 0x03 0x00 0x00 1000
+ *
+ * The contents of audioMetadata is a Payload<Data>.
+ * An implementation dependent detail is that the Keys are always
+ * stored sorted, so the byte string representation generated is unique.
+ *
+ * Vendor keys are allowed for informational and debugging purposes.
+ * Vendor keys should consist of the vendor company name followed
+ * by a dot; for example, "vendorCompany.someVolume" [2].
+ *
+ * [1] system/media/audio_utils/include/audio_utils/Metadata.h
+ * [2] frameworks/base/media/java/android/media/AudioMetadata.java
+ * [3] system/media/audio_utils/tests/metadata_tests.cpp
+ * [4] cts/tests/tests/media/src/android/media/cts/AudioMetadataTest.java
+ *
+ * @param audioMetadata is a buffer containing decoded format changes
+ * reported by codec. The buffer contains data that can be transformed
+ * to audio metadata, which is a C++ object based map.
+ */
+ oneway onCodecFormatChanged(vec<uint8_t> audioMetadata);
+};
diff --git a/audio/7.0/config/Android.bp b/audio/7.0/config/Android.bp
new file mode 100644
index 0000000..015c424
--- /dev/null
+++ b/audio/7.0/config/Android.bp
@@ -0,0 +1,5 @@
+xsd_config {
+ name: "audio_policy_configuration_V7_0",
+ srcs: ["audio_policy_configuration.xsd"],
+ package_name: "audio.policy.configuration.V7_0",
+}
diff --git a/audio/7.0/config/api/current.txt b/audio/7.0/config/api/current.txt
new file mode 100644
index 0000000..fd9a8ef
--- /dev/null
+++ b/audio/7.0/config/api/current.txt
@@ -0,0 +1,534 @@
+// Signature format: 2.0
+package audio.policy.configuration.V7_0 {
+
+ public class AttachedDevices {
+ ctor public AttachedDevices();
+ method public java.util.List<java.lang.String> getItem();
+ }
+
+ public enum AudioChannelMask {
+ method public String getRawName();
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_1;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_10;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_11;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_12;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_13;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_14;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_15;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_16;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_17;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_18;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_19;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_2;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_20;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_21;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_22;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_23;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_24;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_3;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_4;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_5;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_6;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_7;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_8;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_9;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_2POINT0POINT2;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_2POINT1POINT2;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_3POINT0POINT2;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_3POINT1POINT2;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_5POINT1;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_6;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_FRONT_BACK;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_MONO;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_STEREO;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_VOICE_CALL_MONO;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_2POINT0POINT2;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_2POINT1;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_2POINT1POINT2;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_3POINT0POINT2;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_3POINT1POINT2;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1POINT2;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1POINT4;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1_BACK;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1_SIDE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_6POINT1;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_7POINT1;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_7POINT1POINT2;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_7POINT1POINT4;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_HAPTIC_AB;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_MONO;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_MONO_HAPTIC_A;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_MONO_HAPTIC_AB;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_PENTA;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_QUAD;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_QUAD_BACK;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_QUAD_SIDE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_STEREO;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_STEREO_HAPTIC_A;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_STEREO_HAPTIC_AB;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioChannelMask AUDIO_CHANNEL_OUT_SURROUND;
+ }
+
+ public enum AudioContentType {
+ method public String getRawName();
+ enum_constant public static final audio.policy.configuration.V7_0.AudioContentType AUDIO_CONTENT_TYPE_MOVIE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioContentType AUDIO_CONTENT_TYPE_MUSIC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioContentType AUDIO_CONTENT_TYPE_SONIFICATION;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioContentType AUDIO_CONTENT_TYPE_SPEECH;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioContentType AUDIO_CONTENT_TYPE_UNKNOWN;
+ }
+
+ public enum AudioDevice {
+ method public String getRawName();
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_AMBIENT;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_AUX_DIGITAL;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_BACK_MIC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_BLUETOOTH_A2DP;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_BLUETOOTH_BLE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_BUILTIN_MIC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_BUS;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_COMMUNICATION;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_DEFAULT;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_ECHO_REFERENCE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_FM_TUNER;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_HDMI;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_HDMI_ARC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_IP;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_LINE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_LOOPBACK;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_PROXY;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_REMOTE_SUBMIX;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_SPDIF;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_STUB;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_TELEPHONY_RX;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_TV_TUNER;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_USB_ACCESSORY;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_USB_DEVICE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_USB_HEADSET;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_VOICE_CALL;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_IN_WIRED_HEADSET;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_NONE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_AUX_DIGITAL;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_AUX_LINE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_SCO;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_BUS;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_DEFAULT;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_EARPIECE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_ECHO_CANCELLER;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_FM;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_HDMI;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_HDMI_ARC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_HEARING_AID;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_IP;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_LINE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_PROXY;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_SPDIF;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_SPEAKER;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_SPEAKER_SAFE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_STUB;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_TELEPHONY_TX;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_USB_ACCESSORY;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_USB_DEVICE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_USB_HEADSET;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_WIRED_HEADPHONE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioDevice AUDIO_DEVICE_OUT_WIRED_HEADSET;
+ }
+
+ public enum AudioFormat {
+ method public String getRawName();
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_ADIF;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_ADTS;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_ADTS_ELD;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_ADTS_ERLC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_ADTS_HE_V1;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_ADTS_HE_V2;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_ADTS_LC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_ADTS_LD;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_ADTS_LTP;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_ADTS_MAIN;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_ADTS_SCALABLE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_ADTS_SSR;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_ADTS_XHE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_ELD;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_ERLC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_HE_V1;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_HE_V2;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_LATM;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_LATM_HE_V1;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_LATM_HE_V2;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_LATM_LC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_LC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_LD;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_LTP;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_MAIN;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_SCALABLE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_SSR;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AAC_XHE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AC3;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AC4;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_ALAC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AMR_NB;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AMR_WB;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_AMR_WB_PLUS;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_APE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_APTX;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_APTX_ADAPTIVE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_APTX_HD;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_APTX_TWSP;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_CELT;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_DEFAULT;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_DOLBY_TRUEHD;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_DSD;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_DTS;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_DTS_HD;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_EVRC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_EVRCB;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_EVRCNW;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_EVRCWB;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_E_AC3;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_E_AC3_JOC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_FLAC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_HE_AAC_V1;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_HE_AAC_V2;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_IEC61937;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_LDAC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_LHDC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_LHDC_LL;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_MAT_1_0;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_MAT_2_0;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_MAT_2_1;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_MP2;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_MP3;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_OPUS;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_PCM_16_BIT;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_PCM_24_BIT_PACKED;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_PCM_32_BIT;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_PCM_8_24_BIT;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_PCM_8_BIT;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_PCM_FLOAT;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_QCELP;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_SBC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_VORBIS;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_WMA;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioFormat AUDIO_FORMAT_WMA_PRO;
+ }
+
+ public class AudioPolicyConfiguration {
+ ctor public AudioPolicyConfiguration();
+ method public audio.policy.configuration.V7_0.GlobalConfiguration getGlobalConfiguration();
+ method public java.util.List<audio.policy.configuration.V7_0.Modules> getModules();
+ method public audio.policy.configuration.V7_0.SurroundSound getSurroundSound();
+ method public audio.policy.configuration.V7_0.Version getVersion();
+ method public java.util.List<audio.policy.configuration.V7_0.Volumes> getVolumes();
+ method public void setGlobalConfiguration(audio.policy.configuration.V7_0.GlobalConfiguration);
+ method public void setSurroundSound(audio.policy.configuration.V7_0.SurroundSound);
+ method public void setVersion(audio.policy.configuration.V7_0.Version);
+ }
+
+ public enum AudioSource {
+ method public String getRawName();
+ enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_CAMCORDER;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_DEFAULT;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_ECHO_REFERENCE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_FM_TUNER;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_HOTWORD;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_MIC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_REMOTE_SUBMIX;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_UNPROCESSED;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_VOICE_CALL;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_VOICE_COMMUNICATION;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_VOICE_DOWNLINK;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_VOICE_PERFORMANCE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_VOICE_RECOGNITION;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioSource AUDIO_SOURCE_VOICE_UPLINK;
+ }
+
+ public enum AudioStreamType {
+ method public String getRawName();
+ enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_ACCESSIBILITY;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_ALARM;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_ASSISTANT;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_BLUETOOTH_SCO;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_DTMF;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_ENFORCED_AUDIBLE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_MUSIC;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_NOTIFICATION;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_PATCH;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_REROUTING;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_RING;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_SYSTEM;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_TTS;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioStreamType AUDIO_STREAM_VOICE_CALL;
+ }
+
+ public enum AudioUsage {
+ method public String getRawName();
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_ALARM;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_ANNOUNCEMENT;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_ASSISTANCE_SONIFICATION;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_ASSISTANT;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_CALL_ASSISTANT;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_EMERGENCY;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_GAME;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_MEDIA;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_NOTIFICATION;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_SAFETY;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_UNKNOWN;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_VEHICLE_STATUS;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_VIRTUAL_SOURCE;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_VOICE_COMMUNICATION;
+ enum_constant public static final audio.policy.configuration.V7_0.AudioUsage AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
+ }
+
+ public enum DeviceCategory {
+ method public String getRawName();
+ enum_constant public static final audio.policy.configuration.V7_0.DeviceCategory DEVICE_CATEGORY_EARPIECE;
+ enum_constant public static final audio.policy.configuration.V7_0.DeviceCategory DEVICE_CATEGORY_EXT_MEDIA;
+ enum_constant public static final audio.policy.configuration.V7_0.DeviceCategory DEVICE_CATEGORY_HEADSET;
+ enum_constant public static final audio.policy.configuration.V7_0.DeviceCategory DEVICE_CATEGORY_HEARING_AID;
+ enum_constant public static final audio.policy.configuration.V7_0.DeviceCategory DEVICE_CATEGORY_SPEAKER;
+ }
+
+ public class DevicePorts {
+ ctor public DevicePorts();
+ method public java.util.List<audio.policy.configuration.V7_0.DevicePorts.DevicePort> getDevicePort();
+ }
+
+ public static class DevicePorts.DevicePort {
+ ctor public DevicePorts.DevicePort();
+ method public String getAddress();
+ method public java.util.List<audio.policy.configuration.V7_0.AudioFormat> getEncodedFormats();
+ method public audio.policy.configuration.V7_0.Gains getGains();
+ method public java.util.List<audio.policy.configuration.V7_0.Profile> getProfile();
+ method public audio.policy.configuration.V7_0.Role getRole();
+ method public String getTagName();
+ method public String getType();
+ method public boolean get_default();
+ method public void setAddress(String);
+ method public void setEncodedFormats(java.util.List<audio.policy.configuration.V7_0.AudioFormat>);
+ method public void setGains(audio.policy.configuration.V7_0.Gains);
+ method public void setRole(audio.policy.configuration.V7_0.Role);
+ method public void setTagName(String);
+ method public void setType(String);
+ method public void set_default(boolean);
+ }
+
+ public enum EngineSuffix {
+ method public String getRawName();
+ enum_constant public static final audio.policy.configuration.V7_0.EngineSuffix _default;
+ enum_constant public static final audio.policy.configuration.V7_0.EngineSuffix configurable;
+ }
+
+ public enum GainMode {
+ method public String getRawName();
+ enum_constant public static final audio.policy.configuration.V7_0.GainMode AUDIO_GAIN_MODE_CHANNELS;
+ enum_constant public static final audio.policy.configuration.V7_0.GainMode AUDIO_GAIN_MODE_JOINT;
+ enum_constant public static final audio.policy.configuration.V7_0.GainMode AUDIO_GAIN_MODE_RAMP;
+ }
+
+ public class Gains {
+ ctor public Gains();
+ method public java.util.List<audio.policy.configuration.V7_0.Gains.Gain> getGain();
+ }
+
+ public static class Gains.Gain {
+ ctor public Gains.Gain();
+ method public audio.policy.configuration.V7_0.AudioChannelMask getChannel_mask();
+ method public int getDefaultValueMB();
+ method public int getMaxRampMs();
+ method public int getMaxValueMB();
+ method public int getMinRampMs();
+ method public int getMinValueMB();
+ method public audio.policy.configuration.V7_0.GainMode getMode();
+ method public String getName();
+ method public int getStepValueMB();
+ method public boolean getUseForVolume();
+ method public void setChannel_mask(audio.policy.configuration.V7_0.AudioChannelMask);
+ method public void setDefaultValueMB(int);
+ method public void setMaxRampMs(int);
+ method public void setMaxValueMB(int);
+ method public void setMinRampMs(int);
+ method public void setMinValueMB(int);
+ method public void setMode(audio.policy.configuration.V7_0.GainMode);
+ method public void setName(String);
+ method public void setStepValueMB(int);
+ method public void setUseForVolume(boolean);
+ }
+
+ public class GlobalConfiguration {
+ ctor public GlobalConfiguration();
+ method public boolean getCall_screen_mode_supported();
+ method public audio.policy.configuration.V7_0.EngineSuffix getEngine_library();
+ method public boolean getSpeaker_drc_enabled();
+ method public void setCall_screen_mode_supported(boolean);
+ method public void setEngine_library(audio.policy.configuration.V7_0.EngineSuffix);
+ method public void setSpeaker_drc_enabled(boolean);
+ }
+
+ public enum HalVersion {
+ method public String getRawName();
+ enum_constant public static final audio.policy.configuration.V7_0.HalVersion _2_0;
+ enum_constant public static final audio.policy.configuration.V7_0.HalVersion _3_0;
+ }
+
+ public class MixPorts {
+ ctor public MixPorts();
+ method public java.util.List<audio.policy.configuration.V7_0.MixPorts.MixPort> getMixPort();
+ }
+
+ public static class MixPorts.MixPort {
+ ctor public MixPorts.MixPort();
+ method public String getFlags();
+ method public audio.policy.configuration.V7_0.Gains getGains();
+ method public long getMaxActiveCount();
+ method public long getMaxOpenCount();
+ method public String getName();
+ method public java.util.List<audio.policy.configuration.V7_0.AudioUsage> getPreferredUsage();
+ method public java.util.List<audio.policy.configuration.V7_0.Profile> getProfile();
+ method public audio.policy.configuration.V7_0.Role getRole();
+ method public void setFlags(String);
+ method public void setGains(audio.policy.configuration.V7_0.Gains);
+ method public void setMaxActiveCount(long);
+ method public void setMaxOpenCount(long);
+ method public void setName(String);
+ method public void setPreferredUsage(java.util.List<audio.policy.configuration.V7_0.AudioUsage>);
+ method public void setRole(audio.policy.configuration.V7_0.Role);
+ }
+
+ public enum MixType {
+ method public String getRawName();
+ enum_constant public static final audio.policy.configuration.V7_0.MixType mix;
+ enum_constant public static final audio.policy.configuration.V7_0.MixType mux;
+ }
+
+ public class Modules {
+ ctor public Modules();
+ method public java.util.List<audio.policy.configuration.V7_0.Modules.Module> getModule();
+ }
+
+ public static class Modules.Module {
+ ctor public Modules.Module();
+ method public audio.policy.configuration.V7_0.AttachedDevices getAttachedDevices();
+ method public String getDefaultOutputDevice();
+ method public audio.policy.configuration.V7_0.DevicePorts getDevicePorts();
+ method public audio.policy.configuration.V7_0.HalVersion getHalVersion();
+ method public audio.policy.configuration.V7_0.MixPorts getMixPorts();
+ method public String getName();
+ method public audio.policy.configuration.V7_0.Routes getRoutes();
+ method public void setAttachedDevices(audio.policy.configuration.V7_0.AttachedDevices);
+ method public void setDefaultOutputDevice(String);
+ method public void setDevicePorts(audio.policy.configuration.V7_0.DevicePorts);
+ method public void setHalVersion(audio.policy.configuration.V7_0.HalVersion);
+ method public void setMixPorts(audio.policy.configuration.V7_0.MixPorts);
+ method public void setName(String);
+ method public void setRoutes(audio.policy.configuration.V7_0.Routes);
+ }
+
+ public class Profile {
+ ctor public Profile();
+ method public java.util.List<audio.policy.configuration.V7_0.AudioChannelMask> getChannelMasks();
+ method public String getFormat();
+ method public String getName();
+ method public java.util.List<java.math.BigInteger> getSamplingRates();
+ method public void setChannelMasks(java.util.List<audio.policy.configuration.V7_0.AudioChannelMask>);
+ method public void setFormat(String);
+ method public void setName(String);
+ method public void setSamplingRates(java.util.List<java.math.BigInteger>);
+ }
+
+ public class Reference {
+ ctor public Reference();
+ method public String getName();
+ method public java.util.List<java.lang.String> getPoint();
+ method public void setName(String);
+ }
+
+ public enum Role {
+ method public String getRawName();
+ enum_constant public static final audio.policy.configuration.V7_0.Role sink;
+ enum_constant public static final audio.policy.configuration.V7_0.Role source;
+ }
+
+ public class Routes {
+ ctor public Routes();
+ method public java.util.List<audio.policy.configuration.V7_0.Routes.Route> getRoute();
+ }
+
+ public static class Routes.Route {
+ ctor public Routes.Route();
+ method public String getSink();
+ method public String getSources();
+ method public audio.policy.configuration.V7_0.MixType getType();
+ method public void setSink(String);
+ method public void setSources(String);
+ method public void setType(audio.policy.configuration.V7_0.MixType);
+ }
+
+ public class SurroundFormats {
+ ctor public SurroundFormats();
+ method public java.util.List<audio.policy.configuration.V7_0.SurroundFormats.Format> getFormat();
+ }
+
+ public static class SurroundFormats.Format {
+ ctor public SurroundFormats.Format();
+ method public audio.policy.configuration.V7_0.AudioFormat getName();
+ method public java.util.List<audio.policy.configuration.V7_0.AudioFormat> getSubformats();
+ method public void setName(audio.policy.configuration.V7_0.AudioFormat);
+ method public void setSubformats(java.util.List<audio.policy.configuration.V7_0.AudioFormat>);
+ }
+
+ public class SurroundSound {
+ ctor public SurroundSound();
+ method public audio.policy.configuration.V7_0.SurroundFormats getFormats();
+ method public void setFormats(audio.policy.configuration.V7_0.SurroundFormats);
+ }
+
+ public enum Version {
+ method public String getRawName();
+ enum_constant public static final audio.policy.configuration.V7_0.Version _1_0;
+ }
+
+ public class Volume {
+ ctor public Volume();
+ method public audio.policy.configuration.V7_0.DeviceCategory getDeviceCategory();
+ method public java.util.List<java.lang.String> getPoint();
+ method public String getRef();
+ method public audio.policy.configuration.V7_0.AudioStreamType getStream();
+ method public void setDeviceCategory(audio.policy.configuration.V7_0.DeviceCategory);
+ method public void setRef(String);
+ method public void setStream(audio.policy.configuration.V7_0.AudioStreamType);
+ }
+
+ public class Volumes {
+ ctor public Volumes();
+ method public java.util.List<audio.policy.configuration.V7_0.Reference> getReference();
+ method public java.util.List<audio.policy.configuration.V7_0.Volume> getVolume();
+ }
+
+ public class XmlParser {
+ ctor public XmlParser();
+ method public static audio.policy.configuration.V7_0.AudioPolicyConfiguration read(java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException;
+ method public static String readText(org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
+ method public static void skip(org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
+ }
+
+}
+
diff --git a/audio/7.0/config/api/last_current.txt b/audio/7.0/config/api/last_current.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/audio/7.0/config/api/last_current.txt
diff --git a/audio/7.0/config/api/last_removed.txt b/audio/7.0/config/api/last_removed.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/audio/7.0/config/api/last_removed.txt
diff --git a/audio/7.0/config/api/removed.txt b/audio/7.0/config/api/removed.txt
new file mode 100644
index 0000000..d802177
--- /dev/null
+++ b/audio/7.0/config/api/removed.txt
@@ -0,0 +1 @@
+// Signature format: 2.0
diff --git a/audio/7.0/config/audio_policy_configuration.xsd b/audio/7.0/config/audio_policy_configuration.xsd
new file mode 100644
index 0000000..4555a88
--- /dev/null
+++ b/audio/7.0/config/audio_policy_configuration.xsd
@@ -0,0 +1,748 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<xs:schema version="2.0"
+ elementFormDefault="qualified"
+ attributeFormDefault="unqualified"
+ xmlns:xs="http://www.w3.org/2001/XMLSchema">
+ <!-- List the config versions supported by audio policy. -->
+ <xs:simpleType name="version">
+ <xs:restriction base="xs:decimal">
+ <xs:enumeration value="1.0"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType name="halVersion">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ Version of the interface the hal implements. Note that this
+ relates to legacy HAL API versions since HIDL APIs are versioned
+ using other mechanisms.
+ </xs:documentation>
+ </xs:annotation>
+ <xs:restriction base="xs:decimal">
+ <!-- List of HAL versions supported by the framework. -->
+ <xs:enumeration value="2.0"/>
+ <xs:enumeration value="3.0"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:element name="audioPolicyConfiguration">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element name="globalConfiguration" type="globalConfiguration"/>
+ <xs:element name="modules" type="modules" maxOccurs="unbounded"/>
+ <xs:element name="volumes" type="volumes" maxOccurs="unbounded"/>
+ <xs:element name="surroundSound" type="surroundSound" minOccurs="0" />
+ </xs:sequence>
+ <xs:attribute name="version" type="version"/>
+ </xs:complexType>
+ <xs:key name="moduleNameKey">
+ <xs:selector xpath="modules/module"/>
+ <xs:field xpath="@name"/>
+ </xs:key>
+ <xs:unique name="volumeTargetUniqueness">
+ <xs:selector xpath="volumes/volume"/>
+ <xs:field xpath="@stream"/>
+ <xs:field xpath="@deviceCategory"/>
+ </xs:unique>
+ <xs:key name="volumeCurveNameKey">
+ <xs:selector xpath="volumes/reference"/>
+ <xs:field xpath="@name"/>
+ </xs:key>
+ <xs:keyref name="volumeCurveRef" refer="volumeCurveNameKey">
+ <xs:selector xpath="volumes/volume"/>
+ <xs:field xpath="@ref"/>
+ </xs:keyref>
+ </xs:element>
+ <xs:complexType name="globalConfiguration">
+ <xs:attribute name="speaker_drc_enabled" type="xs:boolean" use="required"/>
+ <xs:attribute name="call_screen_mode_supported" type="xs:boolean" use="optional"/>
+ <xs:attribute name="engine_library" type="engineSuffix" use="optional"/>
+ </xs:complexType>
+ <xs:complexType name="modules">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ There should be one section per audio HW module present on the platform.
+ Each <module/> contains two mandatory tags: “halVersion” and “name”.
+ The module "name" is the same as in previous .conf file.
+ Each module must contain the following sections:
+ - <devicePorts/>: a list of device descriptors for all
+ input and output devices accessible via this module.
+ This contains both permanently attached devices and removable devices.
+ - <mixPorts/>: listing all output and input streams exposed by the audio HAL
+ - <routes/>: list of possible connections between input
+ and output devices or between stream and devices.
+ A <route/> is defined by a set of 3 attributes:
+ -"type": mux|mix means all sources are mutual exclusive (mux) or can be mixed (mix)
+ -"sink": the sink involved in this route
+ -"sources": all the sources than can be connected to the sink via this route
+ - <attachedDevices/>: permanently attached devices.
+ The attachedDevices section is a list of devices names.
+ Their names correspond to device names defined in "devicePorts" section.
+ - <defaultOutputDevice/> is the device to be used when no policy rule applies
+ </xs:documentation>
+ </xs:annotation>
+ <xs:sequence>
+ <xs:element name="module" maxOccurs="unbounded">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element name="attachedDevices" type="attachedDevices" minOccurs="0">
+ <xs:unique name="attachedDevicesUniqueness">
+ <xs:selector xpath="item"/>
+ <xs:field xpath="."/>
+ </xs:unique>
+ </xs:element>
+ <xs:element name="defaultOutputDevice" type="xs:token" minOccurs="0"/>
+ <xs:element name="mixPorts" type="mixPorts" minOccurs="0"/>
+ <xs:element name="devicePorts" type="devicePorts" minOccurs="0"/>
+ <xs:element name="routes" type="routes" minOccurs="0"/>
+ </xs:sequence>
+ <xs:attribute name="name" type="xs:string" use="required"/>
+ <xs:attribute name="halVersion" type="halVersion" use="required"/>
+ </xs:complexType>
+ <xs:unique name="mixPortNameUniqueness">
+ <xs:selector xpath="mixPorts/mixPort"/>
+ <xs:field xpath="@name"/>
+ </xs:unique>
+ <xs:key name="devicePortNameKey">
+ <xs:selector xpath="devicePorts/devicePort"/>
+ <xs:field xpath="@tagName"/>
+ </xs:key>
+ <xs:unique name="devicePortUniqueness">
+ <xs:selector xpath="devicePorts/devicePort"/>
+ <xs:field xpath="@type"/>
+ <xs:field xpath="@address"/>
+ </xs:unique>
+ <xs:keyref name="defaultOutputDeviceRef" refer="devicePortNameKey">
+ <xs:selector xpath="defaultOutputDevice"/>
+ <xs:field xpath="."/>
+ </xs:keyref>
+ <xs:keyref name="attachedDeviceRef" refer="devicePortNameKey">
+ <xs:selector xpath="attachedDevices/item"/>
+ <xs:field xpath="."/>
+ </xs:keyref>
+ <!-- The following 3 constraints try to make sure each sink port
+ is reference in one an only one route. -->
+ <xs:key name="routeSinkKey">
+ <!-- predicate [@type='sink'] does not work in xsd 1.0 -->
+ <xs:selector xpath="devicePorts/devicePort|mixPorts/mixPort"/>
+ <xs:field xpath="@tagName|@name"/>
+ </xs:key>
+ <xs:keyref name="routeSinkRef" refer="routeSinkKey">
+ <xs:selector xpath="routes/route"/>
+ <xs:field xpath="@sink"/>
+ </xs:keyref>
+ <xs:unique name="routeUniqueness">
+ <xs:selector xpath="routes/route"/>
+ <xs:field xpath="@sink"/>
+ </xs:unique>
+ </xs:element>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:complexType name="attachedDevices">
+ <xs:sequence>
+ <xs:element name="item" type="xs:token" minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:simpleType name="audioInOutFlags">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ "|" separated list of audio_output_flags_t or audio_input_flags_t.
+ </xs:documentation>
+ </xs:annotation>
+ <xs:restriction base="xs:string">
+ <xs:pattern value="|[_A-Z]+(\|[_A-Z]+)*"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType name="role">
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="sink"/>
+ <xs:enumeration value="source"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:complexType name="mixPorts">
+ <xs:sequence>
+ <xs:element name="mixPort" minOccurs="0" maxOccurs="unbounded">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element name="profile" type="profile" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element name="gains" type="gains" minOccurs="0"/>
+ </xs:sequence>
+ <xs:attribute name="name" type="xs:token" use="required"/>
+ <xs:attribute name="role" type="role" use="required"/>
+ <xs:attribute name="flags" type="audioInOutFlags"/>
+ <xs:attribute name="maxOpenCount" type="xs:unsignedInt"/>
+ <xs:attribute name="maxActiveCount" type="xs:unsignedInt"/>
+ <xs:attribute name="preferredUsage" type="audioUsageList">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ When choosing the mixPort of an audio track, the audioPolicy
+ first considers the mixPorts with a preferredUsage including
+ the track AudioUsage preferred .
+ If non support the track format, the other mixPorts are considered.
+ Eg: a <mixPort preferredUsage="AUDIO_USAGE_MEDIA" /> will receive
+ the audio of all apps playing with a MEDIA usage.
+ It may receive audio from ALARM if there are no audio compatible
+ <mixPort preferredUsage="AUDIO_USAGE_ALARM" />.
+ </xs:documentation>
+ </xs:annotation>
+ </xs:attribute>
+ </xs:complexType>
+ <xs:unique name="mixPortProfileUniqueness">
+ <xs:selector xpath="profile"/>
+ <xs:field xpath="format"/>
+ <xs:field xpath="samplingRate"/>
+ <xs:field xpath="channelMasks"/>
+ </xs:unique>
+ <xs:unique name="mixPortGainUniqueness">
+ <xs:selector xpath="gains/gain"/>
+ <xs:field xpath="@name"/>
+ </xs:unique>
+ </xs:element>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:simpleType name="audioDevice">
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="AUDIO_DEVICE_NONE"/>
+
+ <xs:enumeration value="AUDIO_DEVICE_OUT_EARPIECE"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_SPEAKER"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_WIRED_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_WIRED_HEADPHONE"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_SCO"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_AUX_DIGITAL"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_HDMI"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_USB_ACCESSORY"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_USB_DEVICE"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_REMOTE_SUBMIX"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_TELEPHONY_TX"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_LINE"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_HDMI_ARC"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_SPDIF"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_FM"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_AUX_LINE"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_SPEAKER_SAFE"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_IP"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_BUS"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_PROXY"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_USB_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_HEARING_AID"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_ECHO_CANCELLER"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_DEFAULT"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_STUB"/>
+
+ <xs:enumeration value="AUDIO_DEVICE_IN_COMMUNICATION"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_AMBIENT"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_BUILTIN_MIC"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_WIRED_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_AUX_DIGITAL"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_HDMI"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_VOICE_CALL"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_TELEPHONY_RX"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_BACK_MIC"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_REMOTE_SUBMIX"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_USB_ACCESSORY"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_USB_DEVICE"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_FM_TUNER"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_TV_TUNER"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_LINE"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_SPDIF"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_BLUETOOTH_A2DP"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_LOOPBACK"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_IP"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_BUS"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_PROXY"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_USB_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_BLUETOOTH_BLE"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_HDMI_ARC"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_ECHO_REFERENCE"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_DEFAULT"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_STUB"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType name="vendorExtension">
+ <!-- Vendor extension names must be prefixed by "VX_" to distinguish them from AOSP values.
+ Vendor are encouraged to namespace their module names to avoid conflicts.
+ Example for an hypothetical Google virtual reality device:
+ <devicePort tagName="VR" type="VX_GOOGLE_VR" role="sink">
+ -->
+ <xs:restriction base="xs:string">
+ <xs:pattern value="VX_[_a-zA-Z0-9]+"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType name="extendableAudioDevice">
+ <xs:union memberTypes="audioDevice vendorExtension"/>
+ </xs:simpleType>
+ <xs:simpleType name="audioFormat">
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="AUDIO_FORMAT_DEFAULT" />
+ <xs:enumeration value="AUDIO_FORMAT_PCM_16_BIT" />
+ <xs:enumeration value="AUDIO_FORMAT_PCM_8_BIT"/>
+ <xs:enumeration value="AUDIO_FORMAT_PCM_32_BIT"/>
+ <xs:enumeration value="AUDIO_FORMAT_PCM_8_24_BIT"/>
+ <xs:enumeration value="AUDIO_FORMAT_PCM_FLOAT"/>
+ <xs:enumeration value="AUDIO_FORMAT_PCM_24_BIT_PACKED"/>
+ <xs:enumeration value="AUDIO_FORMAT_MP3"/>
+ <xs:enumeration value="AUDIO_FORMAT_AMR_NB"/>
+ <xs:enumeration value="AUDIO_FORMAT_AMR_WB"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_MAIN"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_LC"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_SSR"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_LTP"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_HE_V1"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_SCALABLE"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_ERLC"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_LD"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_HE_V2"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_ELD"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_MAIN"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_LC"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_SSR"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_LTP"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_HE_V1"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_SCALABLE"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_ERLC"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_LD"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_HE_V2"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_ELD"/>
+ <xs:enumeration value="AUDIO_FORMAT_VORBIS"/>
+ <xs:enumeration value="AUDIO_FORMAT_HE_AAC_V1"/>
+ <xs:enumeration value="AUDIO_FORMAT_HE_AAC_V2"/>
+ <xs:enumeration value="AUDIO_FORMAT_OPUS"/>
+ <xs:enumeration value="AUDIO_FORMAT_AC3"/>
+ <xs:enumeration value="AUDIO_FORMAT_E_AC3"/>
+ <xs:enumeration value="AUDIO_FORMAT_DTS"/>
+ <xs:enumeration value="AUDIO_FORMAT_DTS_HD"/>
+ <xs:enumeration value="AUDIO_FORMAT_IEC61937"/>
+ <xs:enumeration value="AUDIO_FORMAT_DOLBY_TRUEHD"/>
+ <xs:enumeration value="AUDIO_FORMAT_EVRC"/>
+ <xs:enumeration value="AUDIO_FORMAT_EVRCB"/>
+ <xs:enumeration value="AUDIO_FORMAT_EVRCWB"/>
+ <xs:enumeration value="AUDIO_FORMAT_EVRCNW"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_ADIF"/>
+ <xs:enumeration value="AUDIO_FORMAT_WMA"/>
+ <xs:enumeration value="AUDIO_FORMAT_WMA_PRO"/>
+ <xs:enumeration value="AUDIO_FORMAT_AMR_WB_PLUS"/>
+ <xs:enumeration value="AUDIO_FORMAT_MP2"/>
+ <xs:enumeration value="AUDIO_FORMAT_QCELP"/>
+ <xs:enumeration value="AUDIO_FORMAT_DSD"/>
+ <xs:enumeration value="AUDIO_FORMAT_FLAC"/>
+ <xs:enumeration value="AUDIO_FORMAT_ALAC"/>
+ <xs:enumeration value="AUDIO_FORMAT_APE"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_ADTS"/>
+ <xs:enumeration value="AUDIO_FORMAT_SBC"/>
+ <xs:enumeration value="AUDIO_FORMAT_APTX"/>
+ <xs:enumeration value="AUDIO_FORMAT_APTX_HD"/>
+ <xs:enumeration value="AUDIO_FORMAT_AC4"/>
+ <xs:enumeration value="AUDIO_FORMAT_LDAC"/>
+ <xs:enumeration value="AUDIO_FORMAT_E_AC3_JOC"/>
+ <xs:enumeration value="AUDIO_FORMAT_MAT_1_0"/>
+ <xs:enumeration value="AUDIO_FORMAT_MAT_2_0"/>
+ <xs:enumeration value="AUDIO_FORMAT_MAT_2_1"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_XHE"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_XHE"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_LATM"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_LATM_LC"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_LATM_HE_V1"/>
+ <xs:enumeration value="AUDIO_FORMAT_AAC_LATM_HE_V2"/>
+ <xs:enumeration value="AUDIO_FORMAT_CELT"/>
+ <xs:enumeration value="AUDIO_FORMAT_APTX_ADAPTIVE"/>
+ <xs:enumeration value="AUDIO_FORMAT_LHDC"/>
+ <xs:enumeration value="AUDIO_FORMAT_LHDC_LL"/>
+ <xs:enumeration value="AUDIO_FORMAT_APTX_TWSP"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType name="extendableAudioFormat">
+ <xs:union memberTypes="audioFormat vendorExtension"/>
+ </xs:simpleType>
+ <xs:simpleType name="audioUsage">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ Audio usage specifies the intended use case for the sound being played.
+ Please consult frameworks/base/media/java/android/media/AudioAttributes.java
+ for the description of each value.
+ </xs:documentation>
+ </xs:annotation>
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="AUDIO_USAGE_UNKNOWN" />
+ <xs:enumeration value="AUDIO_USAGE_MEDIA" />
+ <xs:enumeration value="AUDIO_USAGE_VOICE_COMMUNICATION" />
+ <xs:enumeration value="AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING" />
+ <xs:enumeration value="AUDIO_USAGE_ALARM" />
+ <xs:enumeration value="AUDIO_USAGE_NOTIFICATION" />
+ <xs:enumeration value="AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE" />
+ <xs:enumeration value="AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY" />
+ <xs:enumeration value="AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE" />
+ <xs:enumeration value="AUDIO_USAGE_ASSISTANCE_SONIFICATION" />
+ <xs:enumeration value="AUDIO_USAGE_GAME" />
+ <xs:enumeration value="AUDIO_USAGE_VIRTUAL_SOURCE" />
+ <xs:enumeration value="AUDIO_USAGE_ASSISTANT" />
+ <xs:enumeration value="AUDIO_USAGE_CALL_ASSISTANT" />
+ <xs:enumeration value="AUDIO_USAGE_EMERGENCY" />
+ <xs:enumeration value="AUDIO_USAGE_SAFETY" />
+ <xs:enumeration value="AUDIO_USAGE_VEHICLE_STATUS" />
+ <xs:enumeration value="AUDIO_USAGE_ANNOUNCEMENT" />
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType name="audioUsageList">
+ <xs:list itemType="audioUsage"/>
+ </xs:simpleType>
+ <xs:simpleType name="audioContentType">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ Audio content type expresses the general category of the content.
+ Please consult frameworks/base/media/java/android/media/AudioAttributes.java
+ for the description of each value.
+ </xs:documentation>
+ </xs:annotation>
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="AUDIO_CONTENT_TYPE_UNKNOWN"/>
+ <xs:enumeration value="AUDIO_CONTENT_TYPE_SPEECH"/>
+ <xs:enumeration value="AUDIO_CONTENT_TYPE_MUSIC"/>
+ <xs:enumeration value="AUDIO_CONTENT_TYPE_MOVIE"/>
+ <xs:enumeration value="AUDIO_CONTENT_TYPE_SONIFICATION"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType name="samplingRates">
+ <xs:list itemType="xs:nonNegativeInteger" />
+ </xs:simpleType>
+ <xs:simpleType name="audioChannelMask">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ Audio channel mask specifies presence of particular channels.
+ There are two representations:
+ - representation position (traditional discrete channel specification,
+ e.g. "left", "right");
+ - indexed (this is similar to "tracks" in audio mixing, channels
+ are represented using numbers).
+ </xs:documentation>
+ </xs:annotation>
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_MONO"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_STEREO"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_2POINT1"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_2POINT0POINT2"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_2POINT1POINT2"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_3POINT0POINT2"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_3POINT1POINT2"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_QUAD"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_QUAD_BACK"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_QUAD_SIDE"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_SURROUND"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_PENTA"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_5POINT1"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_5POINT1_BACK"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_5POINT1_SIDE"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_5POINT1POINT2"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_5POINT1POINT4"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_6POINT1"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_7POINT1"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_7POINT1POINT2"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_7POINT1POINT4"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_MONO_HAPTIC_A"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_STEREO_HAPTIC_A"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_HAPTIC_AB"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_MONO_HAPTIC_AB"/>
+ <xs:enumeration value="AUDIO_CHANNEL_OUT_STEREO_HAPTIC_AB"/>
+ <xs:enumeration value="AUDIO_CHANNEL_IN_MONO"/>
+ <xs:enumeration value="AUDIO_CHANNEL_IN_STEREO"/>
+ <xs:enumeration value="AUDIO_CHANNEL_IN_FRONT_BACK"/>
+ <xs:enumeration value="AUDIO_CHANNEL_IN_6"/>
+ <xs:enumeration value="AUDIO_CHANNEL_IN_2POINT0POINT2"/>
+ <xs:enumeration value="AUDIO_CHANNEL_IN_2POINT1POINT2"/>
+ <xs:enumeration value="AUDIO_CHANNEL_IN_3POINT0POINT2"/>
+ <xs:enumeration value="AUDIO_CHANNEL_IN_3POINT1POINT2"/>
+ <xs:enumeration value="AUDIO_CHANNEL_IN_5POINT1"/>
+ <xs:enumeration value="AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO"/>
+ <xs:enumeration value="AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO"/>
+ <xs:enumeration value="AUDIO_CHANNEL_IN_VOICE_CALL_MONO"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_1"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_2"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_3"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_4"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_5"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_6"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_7"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_8"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_9"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_10"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_11"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_12"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_13"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_14"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_15"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_16"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_17"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_18"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_19"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_20"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_21"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_22"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_23"/>
+ <xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_24"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType name="channelMasks">
+ <xs:list itemType="audioChannelMask" />
+ </xs:simpleType>
+ <xs:complexType name="profile">
+ <xs:attribute name="name" type="xs:token" use="optional"/>
+ <xs:attribute name="format" type="extendableAudioFormat" use="optional"/>
+ <xs:attribute name="samplingRates" type="samplingRates" use="optional"/>
+ <xs:attribute name="channelMasks" type="channelMasks" use="optional"/>
+ </xs:complexType>
+ <xs:simpleType name="gainMode">
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="AUDIO_GAIN_MODE_JOINT"/>
+ <xs:enumeration value="AUDIO_GAIN_MODE_CHANNELS"/>
+ <xs:enumeration value="AUDIO_GAIN_MODE_RAMP"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:complexType name="gains">
+ <xs:sequence>
+ <xs:element name="gain" minOccurs="0" maxOccurs="unbounded">
+ <xs:complexType>
+ <xs:attribute name="name" type="xs:token" use="required"/>
+ <xs:attribute name="mode" type="gainMode" use="required"/>
+ <xs:attribute name="channel_mask" type="audioChannelMask" use="optional"/>
+ <xs:attribute name="minValueMB" type="xs:int" use="optional"/>
+ <xs:attribute name="maxValueMB" type="xs:int" use="optional"/>
+ <xs:attribute name="defaultValueMB" type="xs:int" use="optional"/>
+ <xs:attribute name="stepValueMB" type="xs:int" use="optional"/>
+ <xs:attribute name="minRampMs" type="xs:int" use="optional"/>
+ <xs:attribute name="maxRampMs" type="xs:int" use="optional"/>
+ <xs:attribute name="useForVolume" type="xs:boolean" use="optional"/>
+ </xs:complexType>
+ </xs:element>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:complexType name="devicePorts">
+ <xs:sequence>
+ <xs:element name="devicePort" minOccurs="0" maxOccurs="unbounded">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element name="profile" type="profile" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element name="gains" type="gains" minOccurs="0"/>
+ </xs:sequence>
+ <xs:attribute name="tagName" type="xs:token" use="required"/>
+ <xs:attribute name="type" type="extendableAudioDevice" use="required"/>
+ <xs:attribute name="role" type="role" use="required"/>
+ <xs:attribute name="address" type="xs:string" use="optional" default=""/>
+ <!-- Note that XSD 1.0 can not check that a type only has one default. -->
+ <xs:attribute name="default" type="xs:boolean" use="optional">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ The default device will be used if multiple have the same type
+ and no explicit route request exists for a specific device of
+ that type.
+ </xs:documentation>
+ </xs:annotation>
+ </xs:attribute>
+ <xs:attribute name="encodedFormats" type="audioFormatsList" use="optional"
+ default="" />
+ </xs:complexType>
+ <xs:unique name="devicePortProfileUniqueness">
+ <xs:selector xpath="profile"/>
+ <xs:field xpath="format"/>
+ <xs:field xpath="samplingRate"/>
+ <xs:field xpath="channelMasks"/>
+ </xs:unique>
+ <xs:unique name="devicePortGainUniqueness">
+ <xs:selector xpath="gains/gain"/>
+ <xs:field xpath="@name"/>
+ </xs:unique>
+ </xs:element>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:simpleType name="mixType">
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="mix"/>
+ <xs:enumeration value="mux"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:complexType name="routes">
+ <xs:sequence>
+ <xs:element name="route" minOccurs="0" maxOccurs="unbounded">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ List all available sources for a given sink.
+ </xs:documentation>
+ </xs:annotation>
+ <xs:complexType>
+ <xs:attribute name="type" type="mixType" use="required"/>
+ <xs:attribute name="sink" type="xs:string" use="required"/>
+ <xs:attribute name="sources" type="xs:string" use="required"/>
+ </xs:complexType>
+ </xs:element>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:complexType name="volumes">
+ <xs:sequence>
+ <xs:element name="volume" type="volume" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element name="reference" type="reference" minOccurs="0" maxOccurs="unbounded">
+ </xs:element>
+ </xs:sequence>
+ </xs:complexType>
+ <!-- TODO: Always require a ref for better xsd validations.
+ Currently a volume could have no points nor ref
+ as it can not be forbidden by xsd 1.0.-->
+ <xs:simpleType name="volumePoint">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ Comma separated pair of number.
+ The fist one is the framework level (between 0 and 100).
+ The second one is the volume to send to the HAL.
+ The framework will interpolate volumes not specified.
+ Their MUST be at least 2 points specified.
+ </xs:documentation>
+ </xs:annotation>
+ <xs:restriction base="xs:string">
+ <xs:pattern value="([0-9]{1,2}|100),-?[0-9]+"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType name="audioStreamType">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ Audio stream type describing the intended use case of a stream.
+ Please consult frameworks/base/media/java/android/media/AudioSystem.java
+ for the description of each value.
+ </xs:documentation>
+ </xs:annotation>
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="AUDIO_STREAM_VOICE_CALL"/>
+ <xs:enumeration value="AUDIO_STREAM_SYSTEM"/>
+ <xs:enumeration value="AUDIO_STREAM_RING"/>
+ <xs:enumeration value="AUDIO_STREAM_MUSIC"/>
+ <xs:enumeration value="AUDIO_STREAM_ALARM"/>
+ <xs:enumeration value="AUDIO_STREAM_NOTIFICATION"/>
+ <xs:enumeration value="AUDIO_STREAM_BLUETOOTH_SCO"/>
+ <xs:enumeration value="AUDIO_STREAM_ENFORCED_AUDIBLE"/>
+ <xs:enumeration value="AUDIO_STREAM_DTMF"/>
+ <xs:enumeration value="AUDIO_STREAM_TTS"/>
+ <xs:enumeration value="AUDIO_STREAM_ACCESSIBILITY"/>
+ <xs:enumeration value="AUDIO_STREAM_ASSISTANT"/>
+ <xs:enumeration value="AUDIO_STREAM_REROUTING"/>
+ <xs:enumeration value="AUDIO_STREAM_PATCH"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType name="audioSource">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ An audio source defines the intended use case for the sound being recorded.
+ Please consult frameworks/base/media/java/android/media/MediaRecorder.java
+ for the description of each value.
+ </xs:documentation>
+ </xs:annotation>
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="AUDIO_SOURCE_DEFAULT"/>
+ <xs:enumeration value="AUDIO_SOURCE_MIC"/>
+ <xs:enumeration value="AUDIO_SOURCE_VOICE_UPLINK"/>
+ <xs:enumeration value="AUDIO_SOURCE_VOICE_DOWNLINK"/>
+ <xs:enumeration value="AUDIO_SOURCE_VOICE_CALL"/>
+ <xs:enumeration value="AUDIO_SOURCE_CAMCORDER"/>
+ <xs:enumeration value="AUDIO_SOURCE_VOICE_RECOGNITION"/>
+ <xs:enumeration value="AUDIO_SOURCE_VOICE_COMMUNICATION"/>
+ <xs:enumeration value="AUDIO_SOURCE_REMOTE_SUBMIX"/>
+ <xs:enumeration value="AUDIO_SOURCE_UNPROCESSED"/>
+ <xs:enumeration value="AUDIO_SOURCE_VOICE_PERFORMANCE"/>
+ <xs:enumeration value="AUDIO_SOURCE_ECHO_REFERENCE"/>
+ <xs:enumeration value="AUDIO_SOURCE_FM_TUNER"/>
+ <xs:enumeration value="AUDIO_SOURCE_HOTWORD"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <!-- Enum values of device_category from Volume.h. -->
+ <xs:simpleType name="deviceCategory">
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="DEVICE_CATEGORY_HEADSET"/>
+ <xs:enumeration value="DEVICE_CATEGORY_SPEAKER"/>
+ <xs:enumeration value="DEVICE_CATEGORY_EARPIECE"/>
+ <xs:enumeration value="DEVICE_CATEGORY_EXT_MEDIA"/>
+ <xs:enumeration value="DEVICE_CATEGORY_HEARING_AID"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:complexType name="volume">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ Volume section defines a volume curve for a given use case and device category.
+ It contains a list of points of this curve expressing the attenuation in Millibels
+ for a given volume index from 0 to 100.
+ <volume stream="AUDIO_STREAM_MUSIC" deviceCategory="DEVICE_CATEGORY_SPEAKER">
+ <point>0,-9600</point>
+ <point>100,0</point>
+ </volume>
+
+ It may also reference a reference/@name to avoid duplicating curves.
+ <volume stream="AUDIO_STREAM_MUSIC" deviceCategory="DEVICE_CATEGORY_SPEAKER"
+ ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+ <reference name="DEFAULT_MEDIA_VOLUME_CURVE">
+ <point>0,-9600</point>
+ <point>100,0</point>
+ </reference>
+ </xs:documentation>
+ </xs:annotation>
+ <xs:sequence>
+ <xs:element name="point" type="volumePoint" minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ <xs:attribute name="stream" type="audioStreamType"/>
+ <xs:attribute name="deviceCategory" type="deviceCategory"/>
+ <xs:attribute name="ref" type="xs:token" use="optional"/>
+ </xs:complexType>
+ <xs:complexType name="reference">
+ <xs:sequence>
+ <xs:element name="point" type="volumePoint" minOccurs="2" maxOccurs="unbounded"/>
+ </xs:sequence>
+ <xs:attribute name="name" type="xs:token" use="required"/>
+ </xs:complexType>
+ <xs:complexType name="surroundSound">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ Surround Sound section provides configuration related to handling of
+ multi-channel formats.
+ </xs:documentation>
+ </xs:annotation>
+ <xs:sequence>
+ <xs:element name="formats" type="surroundFormats"/>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:simpleType name="audioFormatsList">
+ <xs:list itemType="audioFormat" />
+ </xs:simpleType>
+ <xs:complexType name="surroundFormats">
+ <xs:sequence>
+ <xs:element name="format" minOccurs="0" maxOccurs="unbounded">
+ <xs:complexType>
+ <xs:attribute name="name" type="audioFormat" use="required"/>
+ <xs:attribute name="subformats" type="audioFormatsList" />
+ </xs:complexType>
+ </xs:element>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:simpleType name="engineSuffix">
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="default"/>
+ <xs:enumeration value="configurable"/>
+ </xs:restriction>
+ </xs:simpleType>
+</xs:schema>
diff --git a/audio/7.0/config/update_audio_policy_config.sh b/audio/7.0/config/update_audio_policy_config.sh
new file mode 100755
index 0000000..8714b5f
--- /dev/null
+++ b/audio/7.0/config/update_audio_policy_config.sh
@@ -0,0 +1,159 @@
+#!/bin/bash
+
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script is used to update audio policy configuration files
+# to comply with the updated audio_policy_configuration.xsd from V7.0.
+#
+# The main difference is the separator used in lists for attributes.
+# Since the XML Schema Definition standard only allows space to be
+# used as a separator (see https://www.w3.org/TR/xmlschema11-2/#list-datatypes)
+# the previous versions used a regular expression to validate lists
+# in attribute values. E.g. the channel masks were validated using
+# the following regexp: [_A-Z][_A-Z0-9]*(,[_A-Z][_A-Z0-9]*)*
+# This has an obvious drawback of missing typos in the config file.
+#
+# The V7.0 has shifted to defining most of the frequently changed
+# types in the XSD schema only. This allows for verifying all the values
+# in lists, but in order to comply with XML Schema requirements
+# list elements must be separated by space.
+#
+# Since the APM config files typically use include directives,
+# the script must be pointed to the main APM config file and will
+# take care all the included files automatically.
+# If the included file is a shared version from 'frameworks/av',
+# instead of updating it the script checks if there is a newer
+# version with the corresponding name suffix (e.g.
+# 'a2dp_audio_policy_configuration_7_0.xml') and updates the include
+# path instead.
+
+set -euo pipefail
+
+if (echo "$@" | grep -qe -h); then
+ echo "This script will update Audio Policy Manager config file"
+ echo "to the format required by V7.0 XSD schema from a previous"
+ echo "version."
+ echo
+ echo "USAGE: $0 [APM_XML_FILE] [OLD_VERSION]"
+ echo " APM_XML_FILE specifies the path to audio_policy_configuration.xml"
+ echo " relative to Android repository root"
+ echo " OLD_VERSION specifies the version of schema currently used"
+ echo
+ echo "Example: $0 device/generic/goldfish/audio/policy/audio_policy_configuration.xml 6.0"
+ exit
+fi
+readonly HAL_DIRECTORY=hardware/interfaces/audio
+readonly SHARED_CONFIGS_DIRECTORY=frameworks/av/services/audiopolicy/config
+readonly OLD_VERSION=${2:-$(ls ${ANDROID_BUILD_TOP}/${HAL_DIRECTORY} | grep -E '[0-9]+\.[0-9]+' |
+ sort -n | tail -n1)}
+readonly NEW_VERSION=7.0
+readonly NEW_VERSION_UNDERSCORE=7_0
+
+readonly SOURCE_CONFIG=${ANDROID_BUILD_TOP}/$1
+
+# First, validate the input using the schema of the current version
+
+echo Validating the source against the $OLD_VERSION schema
+xmllint --noout --xinclude \
+ --nofixup-base-uris --path "$ANDROID_BUILD_TOP/$SHARED_CONFIGS_DIRECTORY" \
+ --schema ${ANDROID_BUILD_TOP}/${HAL_DIRECTORY}/${OLD_VERSION}/config/audio_policy_configuration.xsd \
+ ${SOURCE_CONFIG}
+if [ $? -ne 0 ]; then
+ echo
+ echo "Config file fails validation for the specified version $OLD_VERSION--unsafe to update"
+ exit 1
+fi
+
+# Find all the source files recursively
+
+SOURCE_FILES=${SOURCE_CONFIG}
+SHARED_FILES=
+findIncludes() {
+ local FILES_TO_CHECK=
+ for F in $1; do
+ local FOUND_INCLUDES=$(grep -Po '<xi:include href="\K[^"]+(?="\/>)' ${F})
+ for I in ${FOUND_INCLUDES}; do
+ SOURCE_FULL_PATH=$(dirname ${F})/${I}
+ SHARED_FULL_PATH=${ANDROID_BUILD_TOP}/${SHARED_CONFIGS_DIRECTORY}/${I}
+ if [ -f "$SOURCE_FULL_PATH" ]; then
+ # Device-specific file.
+ SOURCE_FILES+=$'\n'${SOURCE_FULL_PATH}
+ FILES_TO_CHECK+=$'\n'${SOURCE_FULL_PATH}
+ elif [ -f "$SHARED_FULL_PATH" ]; then
+ # Shared file from the frameworks repo.
+ SHARED_FILES+=$'\n'${I}
+ FILES_TO_CHECK+=$'\n'${SHARED_FULL_PATH}
+ else
+ echo
+ echo "Include file not found: $I"
+ exit 1
+ fi
+ done
+ done
+ if [ "$FILES_TO_CHECK" ]; then
+ findIncludes "$FILES_TO_CHECK"
+ fi
+}
+findIncludes ${SOURCE_FILES}
+
+echo "Will update $1 and included device-specific files in place."
+echo "Will update paths to shared included files."
+echo "Press Ctrl-C to cancel, Enter to continue"
+read
+
+updateFile() {
+ FILE=$1
+ ATTR=$2
+ SEPARATOR=$3
+ SRC_LINES=$(grep -nPo "$ATTR=\"[^\"]+\"" ${FILE} || true)
+ for S in $SRC_LINES; do
+ # Prepare instruction for 'sed' for in-place editing of specified line
+ R=$(echo ${S} | sed -e 's/^[0-9]\+:/\//' | sed -e "s/$SEPARATOR/ /g")
+ S=$(echo ${S} | sed -e 's/:/s\//')${R}/
+ echo ${S} | sed -i -f - ${FILE}
+ done
+}
+for F in $SOURCE_FILES; do
+ updateFile ${F} "channelMasks" ","
+ updateFile ${F} "samplingRates" ","
+done;
+
+updateIncludes() {
+ FILE=$1
+ for I in $SHARED_FILES; do
+ NEW_VERSION_I=${I%.*}_${NEW_VERSION_UNDERSCORE}.${I##*.}
+ if [ -e "$ANDROID_BUILD_TOP/$SHARED_CONFIGS_DIRECTORY/$NEW_VERSION_I" ]; then
+ echo "s/$I/$NEW_VERSION_I/g" | sed -i -f - ${FILE}
+ fi
+ done
+}
+for F in $SOURCE_FILES; do
+ updateIncludes ${F}
+done
+
+# Validate the results against the new schema
+
+echo Validating the result against the $NEW_VERSION schema
+xmllint --noout --xinclude \
+ --nofixup-base-uris --path "$ANDROID_BUILD_TOP/$SHARED_CONFIGS_DIRECTORY" \
+ --schema ${ANDROID_BUILD_TOP}/${HAL_DIRECTORY}/${NEW_VERSION}/config/audio_policy_configuration.xsd \
+ ${SOURCE_CONFIG}
+if [ $? -ne 0 ]; then
+ echo
+ echo "Config file fails validation for the specified version $NEW_VERSION--please check the changes"
+ exit 1
+fi
+echo
+echo "Please check the diff and update path to APM shared files in the device makefile!"
diff --git a/audio/7.0/types.hal b/audio/7.0/types.hal
new file mode 100644
index 0000000..15ca492
--- /dev/null
+++ b/audio/7.0/types.hal
@@ -0,0 +1,412 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio@7.0;
+
+import android.hardware.audio.common@7.0;
+
+enum Result : int32_t {
+ OK,
+ NOT_INITIALIZED,
+ INVALID_ARGUMENTS,
+ INVALID_STATE,
+ /**
+ * Methods marked as "Optional method" must return this result value
+ * if the operation is not supported by HAL.
+ */
+ NOT_SUPPORTED
+};
+
+@export(name="audio_drain_type_t", value_prefix="AUDIO_DRAIN_")
+enum AudioDrain : int32_t {
+ /** drain() returns when all data has been played. */
+ ALL,
+ /**
+ * drain() returns a short time before all data from the current track has
+ * been played to give time for gapless track switch.
+ */
+ EARLY_NOTIFY
+};
+
+/**
+ * A substitute for POSIX timespec.
+ */
+struct TimeSpec {
+ uint64_t tvSec; // seconds
+ uint64_t tvNSec; // nanoseconds
+};
+
+struct ParameterValue {
+ string key;
+ string value;
+};
+
+enum MmapBufferFlag : uint32_t {
+ NONE = 0x0,
+ /**
+ * If the buffer can be securely shared to untrusted applications
+ * through the AAudio exclusive mode.
+ * Only set this flag if applications are restricted from accessing the
+ * memory surrounding the audio data buffer by a kernel mechanism.
+ * See Linux kernel's dma_buf.
+ */
+ APPLICATION_SHAREABLE = 0x1,
+};
+
+/**
+ * Mmap buffer descriptor returned by IStream.createMmapBuffer().
+ * Used by streams opened in mmap mode.
+ */
+struct MmapBufferInfo {
+ /** Mmap memory buffer */
+ memory sharedMemory;
+ /** Total buffer size in frames */
+ uint32_t bufferSizeFrames;
+ /** Transfer size granularity in frames */
+ uint32_t burstSizeFrames;
+ /** Attributes describing the buffer. */
+ bitfield<MmapBufferFlag> flags;
+};
+
+/**
+ * Mmap buffer read/write position returned by IStream.getMmapPosition().
+ * Used by streams opened in mmap mode.
+ */
+struct MmapPosition {
+ int64_t timeNanoseconds; // time stamp in ns, CLOCK_MONOTONIC
+ int32_t positionFrames; // increasing 32 bit frame count reset when IStream.stop() is called
+};
+
+/**
+ * The message queue flags used to synchronize reads and writes from
+ * message queues used by StreamIn and StreamOut.
+ */
+enum MessageQueueFlagBits : uint32_t {
+ NOT_EMPTY = 1 << 0,
+ NOT_FULL = 1 << 1
+};
+
+/*
+ * Microphone information
+ *
+ */
+
+/**
+ * A 3D point used to represent position or orientation of a microphone.
+ *
+ * Position: Coordinates of the microphone's capsule, in meters, from the
+ * bottom-left-back corner of the bounding box of android device in natural
+ * orientation (PORTRAIT for phones, LANDSCAPE for tablets, tvs, etc).
+ * The orientation musth match the reported by the api Display.getRotation().
+ *
+ * Orientation: Normalized vector to signal the main orientation of the
+ * microphone's capsule. Magnitude = sqrt(x^2 + y^2 + z^2) = 1
+ */
+struct AudioMicrophoneCoordinate {
+ float x;
+ float y;
+ float z;
+};
+
+/**
+ * Enum to identify the type of channel mapping for active microphones.
+ * Used channels further identify if the microphone has any significative
+ * process (e.g. High Pass Filtering, dynamic compression)
+ * Simple processing as constant gain adjustment must be DIRECT.
+ */
+enum AudioMicrophoneChannelMapping : uint32_t {
+ UNUSED = 0, /* Channel not used */
+ DIRECT = 1, /* Channel used and signal not processed */
+ PROCESSED = 2, /* Channel used and signal has some process */
+};
+
+/**
+ * Enum to identify locations of microphones in regards to the body of the
+ * android device.
+ */
+enum AudioMicrophoneLocation : uint32_t {
+ UNKNOWN = 0,
+ MAINBODY = 1,
+ MAINBODY_MOVABLE = 2,
+ PERIPHERAL = 3,
+};
+
+/**
+ * Identifier to help group related microphones together
+ * e.g. microphone arrays should belong to the same group
+ */
+typedef int32_t AudioMicrophoneGroup;
+
+/**
+ * Enum with standard polar patterns of microphones
+ */
+enum AudioMicrophoneDirectionality : uint32_t {
+ UNKNOWN = 0,
+ OMNI = 1,
+ BI_DIRECTIONAL = 2,
+ CARDIOID = 3,
+ HYPER_CARDIOID = 4,
+ SUPER_CARDIOID = 5,
+};
+
+/**
+ * A (frequency, level) pair. Used to represent frequency response.
+ */
+struct AudioFrequencyResponsePoint {
+ /** In Hz */
+ float frequency;
+ /** In dB */
+ float level;
+};
+
+/**
+ * Structure used by the HAL to describe microphone's characteristics
+ * Used by StreamIn and Device
+ */
+struct MicrophoneInfo {
+ /** Unique alphanumeric id for microphone. Guaranteed to be the same
+ * even after rebooting.
+ */
+ string deviceId;
+ /**
+ * Device specific information
+ */
+ DeviceAddress deviceAddress;
+ /** Each element of the vector must describe the channel with the same
+ * index.
+ */
+ vec<AudioMicrophoneChannelMapping> channelMapping;
+ /** Location of the microphone in regard to the body of the device */
+ AudioMicrophoneLocation location;
+ /** Identifier to help group related microphones together
+ * e.g. microphone arrays should belong to the same group
+ */
+ AudioMicrophoneGroup group;
+ /** Index of this microphone within the group.
+ * (group, index) must be unique within the same device.
+ */
+ uint32_t indexInTheGroup;
+ /** Level in dBFS produced by a 1000 Hz tone at 94 dB SPL */
+ float sensitivity;
+ /** Level in dB of the max SPL supported at 1000 Hz */
+ float maxSpl;
+ /** Level in dB of the min SPL supported at 1000 Hz */
+ float minSpl;
+ /** Standard polar pattern of the microphone */
+ AudioMicrophoneDirectionality directionality;
+ /** Vector with ordered frequency responses (from low to high frequencies)
+ * with the frequency response of the microphone.
+ * Levels are in dB, relative to level at 1000 Hz
+ */
+ vec<AudioFrequencyResponsePoint> frequencyResponse;
+ /** Position of the microphone's capsule in meters, from the
+ * bottom-left-back corner of the bounding box of device.
+ */
+ AudioMicrophoneCoordinate position;
+ /** Normalized point to signal the main orientation of the microphone's
+ * capsule. sqrt(x^2 + y^2 + z^2) = 1
+ */
+ AudioMicrophoneCoordinate orientation;
+};
+
+/**
+ * Constants used by the HAL to determine how to select microphones and process those inputs in
+ * order to optimize for capture in the specified direction.
+ *
+ * MicrophoneDirection Constants are defined in MicrophoneDirection.java.
+ */
+@export(name="audio_microphone_direction_t", value_prefix="MIC_DIRECTION_")
+enum MicrophoneDirection : int32_t {
+ /**
+ * Don't do any directionality processing of the activated microphone(s).
+ */
+ UNSPECIFIED = 0,
+ /**
+ * Optimize capture for audio coming from the screen-side of the device.
+ */
+ FRONT = 1,
+ /**
+ * Optimize capture for audio coming from the side of the device opposite the screen.
+ */
+ BACK = 2,
+ /**
+ * Optimize capture for audio coming from an off-device microphone.
+ */
+ EXTERNAL = 3,
+};
+
+
+/* Dual Mono handling is used when a stereo audio stream
+ * contains separate audio content on the left and right channels.
+ * Such information about the content of the stream may be found, for example,
+ * in ITU T-REC-J.94-201610 A.6.2.3 Component descriptor.
+ */
+@export(name="audio_dual_mono_mode_t", value_prefix="AUDIO_DUAL_MONO_MODE_")
+enum DualMonoMode : int32_t {
+ // Need to be in sync with DUAL_MONO_MODE* constants in
+ // frameworks/base/media/java/android/media/AudioTrack.java
+ /**
+ * Disable any Dual Mono presentation effect.
+ *
+ */
+ OFF = 0,
+ /**
+ * This mode indicates that a stereo stream should be presented
+ * with the left and right audio channels blended together
+ * and delivered to both channels.
+ *
+ * Behavior for non-stereo streams is implementation defined.
+ * A suggested guideline is that the left-right stereo symmetric
+ * channels are pairwise blended, the other channels such as center
+ * are left alone.
+ */
+ LR = 1,
+ /**
+ * This mode indicates that a stereo stream should be presented
+ * with the left audio channel replicated into the right audio channel.
+ *
+ * Behavior for non-stereo streams is implementation defined.
+ * A suggested guideline is that all channels with left-right
+ * stereo symmetry will have the left channel position replicated
+ * into the right channel position. The center channels (with no
+ * left/right symmetry) or unbalanced channels are left alone.
+ */
+ LL = 2,
+ /**
+ * This mode indicates that a stereo stream should be presented
+ * with the right audio channel replicated into the left audio channel.
+ *
+ * Behavior for non-stereo streams is implementation defined.
+ * A suggested guideline is that all channels with left-right
+ * stereo symmetry will have the right channel position replicated
+ * into the left channel position. The center channels (with no
+ * left/right symmetry) or unbalanced channels are left alone.
+ */
+ RR = 3,
+};
+
+/**
+ * Algorithms used for timestretching (preserving pitch while playing audio
+ * content at different speed).
+ */
+@export(name="audio_timestretch_stretch_mode_t", value_prefix="AUDIO_TIMESTRETCH_STRETCH_")
+enum TimestretchMode : int32_t {
+ // Need to be in sync with AUDIO_STRETCH_MODE_* constants in
+ // frameworks/base/media/java/android/media/PlaybackParams.java
+ DEFAULT = 0,
+ /** Selects timestretch algorithm best suitable for voice (speech) content. */
+ VOICE = 1,
+};
+
+/**
+ * Behavior when the values for speed and / or pitch are out
+ * of applicable range.
+ */
+@export(name="audio_timestretch_fallback_mode_t", value_prefix="AUDIO_TIMESTRETCH_FALLBACK_")
+enum TimestretchFallbackMode : int32_t {
+ // Need to be in sync with AUDIO_FALLBACK_MODE_* constants in
+ // frameworks/base/media/java/android/media/PlaybackParams.java
+ /** Play silence for parameter values that are out of range. */
+ MUTE = 1,
+ /** Return an error while trying to set the parameters. */
+ FAIL = 2,
+};
+
+/**
+ * Parameters determining playback behavior. They are used to speed up or
+ * slow down playback and / or change the tonal frequency of the audio content
+ * (pitch).
+ */
+struct PlaybackRate {
+ /**
+ * Speed factor (multiplier). Normal speed has the value of 1.0f.
+ * Values less than 1.0f slow down playback, value greater than 1.0f
+ * speed it up.
+ */
+ float speed;
+ /**
+ * Pitch factor (multiplier). Setting pitch value to 1.0f together
+ * with changing playback speed preserves the pitch, this is often
+ * called "timestretching." Setting the pitch value equal to speed produces
+ * the same effect as playing audio content at different sampling rate.
+ */
+ float pitch;
+ /**
+ * Selects the algorithm used for timestretching (preserving pitch while
+ * playing audio at different speed).
+ */
+ TimestretchMode timestretchMode;
+ /**
+ * Selects the behavior when the specified values for speed and / or pitch
+ * are out of applicable range.
+ */
+ TimestretchFallbackMode fallbackMode;
+};
+
+/**
+ * The audio output flags serve two purposes:
+ *
+ * - when an output stream is created they indicate its attributes;
+ *
+ * - when present in an output profile descriptor listed for a particular audio
+ * hardware module, they indicate that an output stream can be opened that
+ * supports the attributes indicated by the flags.
+ */
+@export(name="audio_output_flags_t", value_prefix="AUDIO_OUTPUT_FLAG_")
+enum AudioOutputFlag : int32_t {
+ NONE = 0x0, // no attributes
+ DIRECT = 0x1, // this output directly connects a track
+ // to one output stream: no software mixer
+ PRIMARY = 0x2, // this output is the primary output of the device. It is
+ // unique and must be present. It is opened by default and
+ // receives routing, audio mode and volume controls related
+ // to voice calls.
+ FAST = 0x4, // output supports "fast tracks", defined elsewhere
+ DEEP_BUFFER = 0x8, // use deep audio buffers
+ COMPRESS_OFFLOAD = 0x10, // offload playback of compressed streams to
+ // hardware codec
+ NON_BLOCKING = 0x20, // use non-blocking write
+ HW_AV_SYNC = 0x40, // output uses a hardware A/V sync
+ TTS = 0x80, // output for streams transmitted through speaker at a
+ // sample rate high enough to accommodate lower-range
+ // ultrasonic p/b
+ RAW = 0x100, // minimize signal processing
+ SYNC = 0x200, // synchronize I/O streams
+ IEC958_NONAUDIO = 0x400, // Audio stream contains compressed audio in SPDIF
+ // data bursts, not PCM.
+ DIRECT_PCM = 0x2000, // Audio stream containing PCM data that needs
+ // to pass through compress path for DSP post proc.
+ MMAP_NOIRQ = 0x4000, // output operates in MMAP no IRQ mode.
+ VOIP_RX = 0x8000, // preferred output for VoIP calls.
+ /** preferred output for call music */
+ INCALL_MUSIC = 0x10000,
+};
+
+/**
+ * The audio input flags are analogous to audio output flags.
+ */
+@export(name="audio_input_flags_t", value_prefix="AUDIO_INPUT_FLAG_")
+enum AudioInputFlag : int32_t {
+ NONE = 0x0, // no attributes
+ FAST = 0x1, // prefer an input that supports "fast tracks"
+ HW_HOTWORD = 0x2, // prefer an input that captures from hw hotword source
+ RAW = 0x4, // minimize signal processing
+ SYNC = 0x8, // synchronize I/O streams
+ MMAP_NOIRQ = 0x10, // input operates in MMAP no IRQ mode.
+ VOIP_TX = 0x20, // preferred input for VoIP calls.
+ HW_AV_SYNC = 0x40, // input connected to an output that uses a hardware A/V sync
+ DIRECT = 0x80, // for acquiring encoded streams
+};
diff --git a/audio/common/7.0/Android.bp b/audio/common/7.0/Android.bp
new file mode 100644
index 0000000..e24871c
--- /dev/null
+++ b/audio/common/7.0/Android.bp
@@ -0,0 +1,27 @@
+// This file is autogenerated by hidl-gen -Landroidbp.
+
+hidl_interface {
+ name: "android.hardware.audio.common@7.0",
+ root: "android.hardware",
+ srcs: [
+ "types.hal",
+ ],
+ interfaces: [
+ "android.hidl.safe_union@1.0",
+ ],
+ gen_java: true,
+ gen_java_constants: true,
+}
+
+cc_library {
+ name: "android.hardware.audio.common@7.0-enums",
+ vendor_available: true,
+ generated_sources: ["audio_policy_configuration_V7_0"],
+ generated_headers: ["audio_policy_configuration_V7_0"],
+ header_libs: ["libxsdc-utils"],
+ shared_libs: [
+ "libbase",
+ "liblog",
+ "libxml2",
+ ],
+}
diff --git a/audio/common/7.0/types.hal b/audio/common/7.0/types.hal
new file mode 100644
index 0000000..94d0af7
--- /dev/null
+++ b/audio/common/7.0/types.hal
@@ -0,0 +1,419 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.common@7.0;
+
+import android.hidl.safe_union@1.0;
+
+/**
+ * Handle type for identifying audio resources. Handles are allocated by the framework.
+ */
+typedef int32_t AudioIoHandle;
+
+/**
+ * Each port has a unique ID or handle allocated by policy manager.
+ */
+typedef int32_t AudioPortHandle;
+
+/**
+ * Each patch is identified by a handle allocated by the HAL.
+ */
+typedef int32_t AudioPatchHandle;
+
+/**
+ * A HW synchronization source returned by the audio HAL.
+ */
+typedef uint32_t AudioHwSync;
+
+/**
+ * Commonly used structure for passing unique identifieds (UUID).
+ * For the definition of UUID, refer to ITU-T X.667 spec.
+ */
+struct Uuid {
+ uint32_t timeLow;
+ uint16_t timeMid;
+ uint16_t versionAndTimeHigh;
+ uint16_t variantAndClockSeqHigh;
+ uint8_t[6] node;
+};
+
+
+/*
+ *
+ * Audio streams
+ *
+ */
+
+/**
+ * Audio stream type describing the intended use case of a stream.
+ * See 'audioStreamType' in audio_policy_configuration.xsd for the
+ * list of allowed values.
+ */
+typedef string AudioStreamType;
+
+/**
+ * An audio source defines the intended use case for the sound being recorded.
+ * See 'audioSource' in audio_policy_configuration.xsd for the
+ * list of allowed values.
+ */
+typedef string AudioSource;
+
+/**
+ * An audio session identifier is used to designate the particular
+ * playback or recording session (e.g. playback performed by a certain
+ * application).
+ */
+typedef int32_t AudioSession;
+
+enum AudioSessionConsts : int32_t {
+ /**
+ * Session for effects attached to a particular sink or source audio device
+ * (e.g an effect only applied to a speaker)
+ */
+ DEVICE = -2,
+ /**
+ * Session for effects attached to a particular output stream
+ * (value must be less than 0)
+ */
+ OUTPUT_STAGE = -1,
+ /**
+ * Session for effects applied to output mix. These effects can
+ * be moved by audio policy manager to another output stream
+ * (value must be 0)
+ */
+ OUTPUT_MIX = 0,
+};
+
+/**
+ * Audio format indicates audio codec type.
+ * See 'audioFormat' in audio_policy_configuration.xsd for the
+ * list of allowed values.
+ */
+typedef string AudioFormat;
+
+/**
+ * Audio channel mask indicates presence of particular channels.
+ * See 'audioChannelMask' in audio_policy_configuration.xsd for the
+ * list of allowed values.
+ */
+typedef string AudioChannelMask;
+
+/**
+ * Basic configuration applicable to any stream of audio.
+ */
+struct AudioBasicConfig {
+ uint32_t sampleRateHz; // 0 means 'unspecified'
+ vec<AudioChannelMask> channelMask; // empty means 'unspecified'
+ AudioFormat format; // 'DEFAULT' means 'unspecified'
+};
+
+/**
+ * Major modes for a mobile device. The current mode setting affects audio
+ * routing.
+ */
+@export(name="audio_mode_t", value_prefix="AUDIO_MODE_")
+enum AudioMode : int32_t {
+ NORMAL = 0,
+ RINGTONE = 1,
+ /** Calls handled by the telephony stack (Eg: PSTN). */
+ IN_CALL = 2,
+ /** Calls handled by apps (Eg: Hangout). */
+ IN_COMMUNICATION = 3,
+ /** Call screening in progress. */
+ CALL_SCREEN = 4,
+};
+
+/**
+ * Specifies a device address in case when several devices of the same type
+ * can be connected (e.g. BT A2DP, USB).
+ */
+struct DeviceAddress {
+ /**
+ * Audio device specifies type (or category) of audio I/O device
+ * (e.g. speaker or headphones).
+ * See 'audioDevice' in audio_policy_configuration.xsd for the
+ * list of allowed values.
+ */
+ string deviceType;
+ safe_union Address {
+ /**
+ * The address may be left unspecified if 'device' specifies
+ * a physical device unambiguously.
+ */
+ Monostate unspecified;
+ /** IEEE 802 MAC address. Set for Bluetooth devices. */
+ uint8_t[6] mac;
+ /** IPv4 Address. Set for IPv4 devices. */
+ uint8_t[4] ipv4;
+ /** IPv6 Address. Set for IPv6 devices. */
+ uint16_t[8] ipv6;
+ /** PCI bus Address. Set for USB devices. */
+ struct Alsa {
+ int32_t card;
+ int32_t device;
+ } alsa;
+ /** Arbitrary BUS device unique address. Not interpreted by the framework. */
+ string bus;
+ /** Arbitrary REMOTE_SUBMIX device unique address. Not interpreted by the HAL. */
+ string rSubmix;
+ } address;
+};
+
+/**
+ * Audio usage specifies the intended use case for the sound being played.
+ * See 'audioUsage' in audio_policy_configuration.xsd for the
+ * list of allowed values.
+ */
+typedef string AudioUsage;
+
+/**
+ * Audio content type expresses the general category of the content.
+ * See 'audioContentType' in audio_policy_configuration.xsd for the
+ * list of allowed values.
+ */
+typedef string AudioContentType;
+
+/** Encapsulation mode used for sending audio compressed data. */
+@export(name="audio_encapsulation_mode_t", value_prefix="AUDIO_ENCAPSULATION_MODE_")
+enum AudioEncapsulationMode : int32_t {
+ // Do not change these values without updating their counterparts
+ // in frameworks/base/media/java/android/media/AudioTrack.java
+ /**
+ * No encapsulation mode for metadata.
+ */
+ NONE = 0,
+ /**
+ * Elementary stream payload with metadata
+ */
+ ELEMENTARY_STREAM = 1,
+ /**
+ * Handle-based payload with metadata
+ */
+ HANDLE = 2,
+};
+
+/**
+ * Additional information about the stream passed to hardware decoders.
+ */
+struct AudioOffloadInfo {
+ AudioBasicConfig base;
+ AudioStreamType streamType;
+ uint32_t bitRatePerSecond;
+ int64_t durationMicroseconds; // -1 if unknown
+ bool hasVideo;
+ bool isStreaming;
+ uint32_t bitWidth;
+ uint32_t bufferSize;
+ AudioUsage usage;
+ AudioEncapsulationMode encapsulationMode;
+ int32_t contentId;
+ int32_t syncId;
+};
+
+/**
+ * Commonly used audio stream configuration parameters.
+ */
+struct AudioConfig {
+ AudioBasicConfig base;
+ AudioOffloadInfo offloadInfo;
+ uint64_t frameCount;
+};
+
+/** Metadata of a playback track for a StreamOut. */
+struct PlaybackTrackMetadata {
+ AudioUsage usage;
+ AudioContentType contentType;
+ /**
+ * Positive linear gain applied to the track samples. 0 being muted and 1 is no attenuation,
+ * 2 means double amplification...
+ * Must not be negative.
+ */
+ float gain;
+};
+
+/** Metadatas of the source of a StreamOut. */
+struct SourceMetadata {
+ vec<PlaybackTrackMetadata> tracks;
+};
+
+/** Metadata of a record track for a StreamIn. */
+struct RecordTrackMetadata {
+ AudioSource source;
+ /**
+ * Positive linear gain applied to the track samples. 0 being muted and 1 is no attenuation,
+ * 2 means double amplification...
+ * Must not be negative.
+ */
+ float gain;
+ /**
+ * Indicates the destination of an input stream, can be left unspecified.
+ */
+ safe_union Destination {
+ Monostate unspecified;
+ DeviceAddress device;
+ } destination;
+};
+
+/** Metadatas of the sink of a StreamIn. */
+struct SinkMetadata {
+ vec<RecordTrackMetadata> tracks;
+};
+
+/*
+ *
+ * Volume control
+ *
+ */
+
+/**
+ * Type of gain control exposed by an audio port.
+ */
+@export(name="", value_prefix="AUDIO_GAIN_MODE_")
+enum AudioGainMode : uint32_t {
+ JOINT = 0x1, // supports joint channel gain control
+ CHANNELS = 0x2, // supports separate channel gain control
+ RAMP = 0x4 // supports gain ramps
+};
+
+/**
+ * An audio_gain struct is a representation of a gain stage.
+ * A gain stage is always attached to an audio port.
+ */
+struct AudioGain {
+ bitfield<AudioGainMode> mode;
+ vec<AudioChannelMask> channelMask; // channels which gain an be controlled
+ int32_t minValue; // minimum gain value in millibels
+ int32_t maxValue; // maximum gain value in millibels
+ int32_t defaultValue; // default gain value in millibels
+ uint32_t stepValue; // gain step in millibels
+ uint32_t minRampMs; // minimum ramp duration in ms
+ uint32_t maxRampMs; // maximum ramp duration in ms
+};
+
+/**
+ * The gain configuration structure is used to get or set the gain values of a
+ * given port.
+ */
+struct AudioGainConfig {
+ int32_t index; // index of the corresponding AudioGain in AudioPort.gains
+ AudioGainMode mode;
+ vec<AudioChannelMask> channelMask; // channels which gain value follows
+ /**
+ * Gain values in millibels for each channel ordered from LSb to MSb in
+ * channel mask. The number of values is 1 in joint mode or
+ * popcount(channel_mask).
+ */
+ int32_t[4 * 8] values;
+ uint32_t rampDurationMs; // ramp duration in ms
+};
+
+
+/*
+ *
+ * Routing control
+ *
+ */
+
+/*
+ * Types defined here are used to describe an audio source or sink at internal
+ * framework interfaces (audio policy, patch panel) or at the audio HAL.
+ * Sink and sources are grouped in a concept of “audio port” representing an
+ * audio end point at the edge of the system managed by the module exposing
+ * the interface.
+ */
+
+/**
+ * A helper aggregate structure providing parameters that depend on the
+ * port role.
+ */
+safe_union AudioPortExtendedInfo {
+ /** Set when no information is provided. */
+ Monostate unspecified;
+ /** Set when the audio port is an audio device. */
+ DeviceAddress device;
+ /** Set when the audio port is a mix. The handle is of a stream. */
+ struct AudioPortMixExt {
+ /** I/O handle of the input/output stream. */
+ AudioIoHandle ioHandle;
+ safe_union UseCase {
+ /** Specified when the port is in the SOURCE role. */
+ AudioStreamType stream;
+ /** Specified when the port is in the SINK role. */
+ AudioSource source;
+ } useCase;
+ } mix;
+ /** Set when the audio port is an audio session. */
+ AudioSession session;
+};
+
+/**
+ * Audio port configuration structure used to specify a particular configuration
+ * of an audio port.
+ */
+struct AudioPortConfig {
+ /**
+ * The 'id' field is set when it is needed to select the port and
+ * apply new configuration for it.
+ */
+ AudioPortHandle id;
+ /**
+ * Basic parameters: sampling rate, format, channel mask. Only some of the
+ * parameters (or none) may be set. See the documentation of the
+ * AudioBasicConfig struct.
+ */
+ AudioBasicConfig config;
+ /** Associated gain control. */
+ safe_union OptionalGain {
+ Monostate unspecified;
+ AudioGainConfig config;
+ } gain;
+ /** Parameters that depend on the actual port role. */
+ AudioPortExtendedInfo ext;
+};
+
+/**
+ * Audio port structure describes the capabilities of an audio port
+ * as well as its current configuration.
+ */
+struct AudioPort {
+ /**
+ * Unique identifier of the port within this HAL service. When calling
+ * from the client side functions like IDevice.getAudioPort is it allowed
+ * to only specify the 'id' and leave the other fields unspecified.
+ */
+ AudioPortHandle id;
+ /**
+ * Human-readable name describing the function of the port.
+ * E.g. "telephony_tx" or "fm_tuner".
+ */
+ string name;
+ /** List of audio profiles supported by the port. */
+ struct AudioProfile {
+ AudioFormat format;
+ /** List of the sample rates supported by the profile. */
+ vec<uint32_t> sampleRates;
+ /** List of channel masks supported by the profile. */
+ vec<AudioChannelMask> channelMasks;
+ };
+ vec<AudioProfile> profiles;
+ /** List of gain controls attached to the port. */
+ vec<AudioGain> gains;
+ /**
+ * Current configuration of the audio port, may have all the fields left
+ * unspecified.
+ */
+ AudioPortConfig activeConfig;
+};
diff --git a/audio/common/all-versions/copyHAL.sh b/audio/common/all-versions/copyHAL.sh
index 0a32a51..23e057a 100755
--- a/audio/common/all-versions/copyHAL.sh
+++ b/audio/common/all-versions/copyHAL.sh
@@ -16,6 +16,7 @@
readonly HAL_DIRECTORY=hardware/interfaces/audio
readonly HAL_VTS_DIRECTORY=core/all-versions/vts/functional
readonly HAL_VTS_FILE=AudioPrimaryHidlHalTest.cpp
+readonly HAL_EFFECT_VTS_DIRECTORY=effect/all-versions/vts/functional
readonly HAL_SERVICE_DIRECTORY=common/all-versions/default/service/
readonly HAL_SERVICE_CPP=service.cpp
@@ -25,7 +26,7 @@
readonly VTS_DIRECTORY=test/vts-testcase/hal/audio
readonly VTS_LIST=test/vts/tools/build/tasks/list/vts_test_lib_hidl_package_list.mk
-readonly WATCHDOG=frameworks/base/services/core/java/com/android/server/Watchdog.cpp
+readonly WATCHDOG=frameworks/base/services/core/java/com/android/server/Watchdog.java
readonly DUMP_UTILS=frameworks/native/libs/dumputils/dump_utils.cpp
readonly GSI_CURRENT=build/make/target/product/gsi/current.txt
@@ -45,6 +46,9 @@
readonly BASE_VERSION_ESCAPE="${BASE_MAJOR_VERSION}\.${BASE_MINOR_VERSION}"
readonly BASE_VERSION_UNDERSCORE="${BASE_MAJOR_VERSION}_${BASE_MINOR_VERSION}"
readonly NEW_VERSION_UNDERSCORE="${NEW_MAJOR_VERSION}_${NEW_MINOR_VERSION}"
+
+readonly HAL_VTS_CONFIG_FILE_GLOB="*Audio*V${BASE_VERSION_UNDERSCORE}*Test.xml"
+
updateVersion() {
if [ $1 == "-e" ]; then
local -r REGEX="$2"; shift 2
@@ -71,6 +75,10 @@
updateVersion -e "audio.*$BASE_VERSION_REGEX" "$@"
}
+updateAudioVtsTargetVersion() {
+ updateVersion -e "Audio.*V$BASE_VERSION_REGEX" "$@"
+}
+
updateLicenceDates() {
# Update date on the 2 first lines
sed -i "1,2 s/20[0-9][0-9]/$(date +"%Y")/g" "$@"
@@ -101,9 +109,16 @@
cp -Tar $DIR/$BASE_VERSION $DIR/$NEW_VERSION
COPY+=" $DIR/$NEW_VERSION"
done
+ local COPY_FILES_TO=
+ for FILE_FROM in $(find . -type f -name "$HAL_VTS_CONFIG_FILE_GLOB"); do
+ local FILE_TO=${FILE_FROM/$BASE_VERSION_UNDERSCORE/$NEW_VERSION_UNDERSCORE}
+ cp "$FILE_FROM" "$FILE_TO"
+ COPY_FILES_TO+=" $FILE_TO"
+ done
echo "Replacing $BASE_VERSION by $NEW_VERSION in the copied files"
updateVersion $(find $COPY -type f)
+ updateVersion $COPY_FILES_TO
updateLicenceDates $(find $COPY -type f)
echo "Update implementation and VTS generic code"
@@ -156,18 +171,12 @@
echo "Now creating the framework adapter version"
runIfNeeded $FWK_DIRECTORY createFrameworkAdapter
-createVTSXML() {
- cp -Tar V$BASE_VERSION_UNDERSCORE V$NEW_VERSION_UNDERSCORE
- cp -Tar effect/{V$BASE_VERSION_UNDERSCORE,V$NEW_VERSION_UNDERSCORE}
- local -r FILES=$(find {.,effect}/V$NEW_VERSION_UNDERSCORE -type f)
- updateVersion $FILES
- updateLicenceDates $FILES
-}
-echo "Now update VTS XML"
-runIfNeeded $VTS_DIRECTORY createVTSXML
-
echo "Now register new VTS"
+PREV_MODIFIED="$MODIFIED"
runIfNeeded $(dirname $VTS_LIST) updateAudioVersion -v original_before=1 $(basename $VTS_LIST)
+if [[ "$PREV_MODIFIED" != "$MODIFIED" ]]; then
+ updateAudioVtsTargetVersion -v original_after=1 $(basename $VTS_LIST)
+fi
echo "Now update watchdog"
runIfNeeded $(dirname $WATCHDOG) updateAudioVersion -v original_before=1 $(basename $WATCHDOG)
diff --git a/audio/common/all-versions/default/Android.bp b/audio/common/all-versions/default/Android.bp
index 0eb4a71..a72c8dc 100644
--- a/audio/common/all-versions/default/Android.bp
+++ b/audio/common/all-versions/default/Android.bp
@@ -36,7 +36,7 @@
],
export_header_lib_headers: [
"android.hardware.audio.common.util@all-versions",
- ]
+ ],
}
cc_defaults {
@@ -57,7 +57,7 @@
"android.hardware.audio.common-util",
],
export_shared_lib_headers: [
- "android.hardware.audio.common-util"
+ "android.hardware.audio.common-util",
],
header_libs: [
@@ -76,7 +76,7 @@
"-DMAJOR_VERSION=2",
"-DMINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
}
cc_library_shared {
@@ -89,7 +89,7 @@
"-DMAJOR_VERSION=4",
"-DMINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
}
cc_library_shared {
@@ -102,7 +102,7 @@
"-DMAJOR_VERSION=5",
"-DMINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
}
cc_library_shared {
@@ -115,5 +115,19 @@
"-DMAJOR_VERSION=6",
"-DMINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
+}
+
+cc_library_shared {
+ enabled: false,
+ name: "android.hardware.audio.common@7.0-util",
+ defaults: ["android.hardware.audio.common-util_default"],
+ shared_libs: [
+ "android.hardware.audio.common@7.0",
+ ],
+ cflags: [
+ "-DMAJOR_VERSION=7",
+ "-DMINOR_VERSION=0",
+ "-include common/all-versions/VersionMacro.h",
+ ],
}
diff --git a/audio/core/all-versions/default/Android.bp b/audio/core/all-versions/default/Android.bp
index 8fdb70d..6be0628 100644
--- a/audio/core/all-versions/default/Android.bp
+++ b/audio/core/all-versions/default/Android.bp
@@ -123,3 +123,19 @@
name: "android.hardware.audio@6.0-impl",
defaults: ["android.hardware.audio@6.0-impl_default"],
}
+
+cc_library_shared {
+ enabled: false,
+ name: "android.hardware.audio@7.0-impl",
+ defaults: ["android.hardware.audio-impl_default"],
+ shared_libs: [
+ "android.hardware.audio@7.0",
+ "android.hardware.audio.common@7.0",
+ "android.hardware.audio.common@7.0-util",
+ ],
+ cflags: [
+ "-DMAJOR_VERSION=7",
+ "-DMINOR_VERSION=0",
+ "-include common/all-versions/VersionMacro.h",
+ ],
+}
diff --git a/audio/core/all-versions/vts/functional/7.0/AudioPrimaryHidlHalTest.cpp b/audio/core/all-versions/vts/functional/7.0/AudioPrimaryHidlHalTest.cpp
new file mode 100644
index 0000000..33efa6f
--- /dev/null
+++ b/audio/core/all-versions/vts/functional/7.0/AudioPrimaryHidlHalTest.cpp
@@ -0,0 +1,18 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// pull in all the <= 6.0 tests
+#include "6.0/AudioPrimaryHidlHalTest.cpp"
diff --git a/audio/core/all-versions/vts/functional/Android.bp b/audio/core/all-versions/vts/functional/Android.bp
index 2d5e8a5..6ac9b20 100644
--- a/audio/core/all-versions/vts/functional/Android.bp
+++ b/audio/core/all-versions/vts/functional/Android.bp
@@ -128,3 +128,27 @@
// TODO(b/146104851): Add auto-gen rules and remove it.
test_config: "VtsHalAudioV6_0TargetTest.xml",
}
+
+cc_test {
+ enabled: false,
+ name: "VtsHalAudioV7_0TargetTest",
+ defaults: ["VtsHalAudioTargetTest_defaults"],
+ srcs: [
+ "7.0/AudioPrimaryHidlHalTest.cpp",
+ ],
+ static_libs: [
+ "android.hardware.audio@7.0",
+ "android.hardware.audio.common@7.0",
+ ],
+ cflags: [
+ "-DMAJOR_VERSION=7",
+ "-DMINOR_VERSION=0",
+ "-include common/all-versions/VersionMacro.h",
+ ],
+ data: [
+ ":audio_policy_configuration_V7_0",
+ ],
+ // Use test_config for vts suite.
+ // TODO(b/146104851): Add auto-gen rules and remove it.
+ test_config: "VtsHalAudioV7_0TargetTest.xml",
+}
diff --git a/audio/core/all-versions/vts/functional/VtsHalAudioV7_0TargetTest.xml b/audio/core/all-versions/vts/functional/VtsHalAudioV7_0TargetTest.xml
new file mode 100644
index 0000000..6635f31
--- /dev/null
+++ b/audio/core/all-versions/vts/functional/VtsHalAudioV7_0TargetTest.xml
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration description="Runs VtsHalAudioV7_0TargetTest.">
+ <option name="test-suite-tag" value="apct" />
+ <option name="test-suite-tag" value="apct-native" />
+
+ <target_preparer class="com.android.tradefed.targetprep.RootTargetPreparer"/>
+ <target_preparer class="com.android.tradefed.targetprep.StopServicesSetup"/>
+
+ <target_preparer class="com.android.tradefed.targetprep.RunCommandTargetPreparer">
+ <option name="run-command" value="setprop vts.native_server.on 1"/>
+ <option name="teardown-command" value="setprop vts.native_server.on 0"/>
+ </target_preparer>
+
+ <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+ <option name="cleanup" value="true" />
+ <option name="push" value="VtsHalAudioV7_0TargetTest->/data/local/tmp/VtsHalAudioV7_0TargetTest" />
+ <option name="push" value="audio_policy_configuration_V7_0.xsd->/data/local/tmp/audio_policy_configuration_V7_0.xsd" />
+ </target_preparer>
+
+ <test class="com.android.tradefed.testtype.GTest" >
+ <option name="native-test-device-path" value="/data/local/tmp" />
+ <option name="module-name" value="VtsHalAudioV7_0TargetTest" />
+ </test>
+</configuration>
diff --git a/audio/effect/7.0/Android.bp b/audio/effect/7.0/Android.bp
new file mode 100644
index 0000000..c113782
--- /dev/null
+++ b/audio/effect/7.0/Android.bp
@@ -0,0 +1,30 @@
+// This file is autogenerated by hidl-gen -Landroidbp.
+
+hidl_interface {
+ name: "android.hardware.audio.effect@7.0",
+ root: "android.hardware",
+ srcs: [
+ "types.hal",
+ "IAcousticEchoCancelerEffect.hal",
+ "IAutomaticGainControlEffect.hal",
+ "IBassBoostEffect.hal",
+ "IDownmixEffect.hal",
+ "IEffect.hal",
+ "IEffectBufferProviderCallback.hal",
+ "IEffectsFactory.hal",
+ "IEnvironmentalReverbEffect.hal",
+ "IEqualizerEffect.hal",
+ "ILoudnessEnhancerEffect.hal",
+ "INoiseSuppressionEffect.hal",
+ "IPresetReverbEffect.hal",
+ "IVirtualizerEffect.hal",
+ "IVisualizerEffect.hal",
+ ],
+ interfaces: [
+ "android.hardware.audio.common@7.0",
+ "android.hidl.base@1.0",
+ "android.hidl.safe_union@1.0",
+ ],
+ gen_java: false,
+ gen_java_constants: true,
+}
diff --git a/audio/effect/7.0/IAcousticEchoCancelerEffect.hal b/audio/effect/7.0/IAcousticEchoCancelerEffect.hal
new file mode 100644
index 0000000..2bc2a7f
--- /dev/null
+++ b/audio/effect/7.0/IAcousticEchoCancelerEffect.hal
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.effect@7.0;
+
+import android.hardware.audio.common@7.0;
+import IEffect;
+
+interface IAcousticEchoCancelerEffect extends IEffect {
+ /**
+ * Sets echo delay value in milliseconds.
+ */
+ setEchoDelay(uint32_t echoDelayMs) generates (Result retval);
+
+ /**
+ * Gets echo delay value in milliseconds.
+ */
+ getEchoDelay() generates (Result retval, uint32_t echoDelayMs);
+};
diff --git a/audio/effect/7.0/IAutomaticGainControlEffect.hal b/audio/effect/7.0/IAutomaticGainControlEffect.hal
new file mode 100644
index 0000000..8ffa659
--- /dev/null
+++ b/audio/effect/7.0/IAutomaticGainControlEffect.hal
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.effect@7.0;
+
+import android.hardware.audio.common@7.0;
+import IEffect;
+
+interface IAutomaticGainControlEffect extends IEffect {
+ /**
+ * Sets target level in millibels.
+ */
+ setTargetLevel(int16_t targetLevelMb) generates (Result retval);
+
+ /**
+ * Gets target level.
+ */
+ getTargetLevel() generates (Result retval, int16_t targetLevelMb);
+
+ /**
+ * Sets gain in the compression range in millibels.
+ */
+ setCompGain(int16_t compGainMb) generates (Result retval);
+
+ /**
+ * Gets gain in the compression range.
+ */
+ getCompGain() generates (Result retval, int16_t compGainMb);
+
+ /**
+ * Enables or disables limiter.
+ */
+ setLimiterEnabled(bool enabled) generates (Result retval);
+
+ /**
+ * Returns whether limiter is enabled.
+ */
+ isLimiterEnabled() generates (Result retval, bool enabled);
+
+ struct AllProperties {
+ int16_t targetLevelMb;
+ int16_t compGainMb;
+ bool limiterEnabled;
+ };
+
+ /**
+ * Sets all properties at once.
+ */
+ setAllProperties(AllProperties properties) generates (Result retval);
+
+ /**
+ * Gets all properties at once.
+ */
+ getAllProperties() generates (Result retval, AllProperties properties);
+};
diff --git a/audio/effect/7.0/IBassBoostEffect.hal b/audio/effect/7.0/IBassBoostEffect.hal
new file mode 100644
index 0000000..d8d049e
--- /dev/null
+++ b/audio/effect/7.0/IBassBoostEffect.hal
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.effect@7.0;
+
+import android.hardware.audio.common@7.0;
+import IEffect;
+
+interface IBassBoostEffect extends IEffect {
+ /**
+ * Returns whether setting bass boost strength is supported.
+ */
+ isStrengthSupported() generates (Result retval, bool strengthSupported);
+
+ enum StrengthRange : uint16_t {
+ MIN = 0,
+ MAX = 1000
+ };
+
+ /**
+ * Sets bass boost strength.
+ *
+ * @param strength strength of the effect. The valid range for strength
+ * strength is [0, 1000], where 0 per mille designates the
+ * mildest effect and 1000 per mille designates the
+ * strongest.
+ * @return retval operation completion status.
+ */
+ setStrength(uint16_t strength) generates (Result retval);
+
+ /**
+ * Gets virtualization strength.
+ */
+ getStrength() generates (Result retval, uint16_t strength);
+};
diff --git a/audio/effect/7.0/IDownmixEffect.hal b/audio/effect/7.0/IDownmixEffect.hal
new file mode 100644
index 0000000..2035430
--- /dev/null
+++ b/audio/effect/7.0/IDownmixEffect.hal
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.effect@7.0;
+
+import android.hardware.audio.common@7.0;
+import IEffect;
+
+interface IDownmixEffect extends IEffect {
+ enum Type : int32_t {
+ STRIP, // throw away the extra channels
+ FOLD // mix the extra channels with FL/FR
+ };
+
+ /**
+ * Sets the current downmix preset.
+ */
+ setType(Type preset) generates (Result retval);
+
+ /**
+ * Gets the current downmix preset.
+ */
+ getType() generates (Result retval, Type preset);
+};
diff --git a/audio/effect/7.0/IEffect.hal b/audio/effect/7.0/IEffect.hal
new file mode 100644
index 0000000..aa94f6d
--- /dev/null
+++ b/audio/effect/7.0/IEffect.hal
@@ -0,0 +1,417 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.effect@7.0;
+
+import android.hardware.audio.common@7.0;
+import IEffectBufferProviderCallback;
+
+interface IEffect {
+ /**
+ * Initialize effect engine--all configurations return to default.
+ *
+ * @return retval operation completion status.
+ */
+ @entry
+ init() generates (Result retval);
+
+ /**
+ * Apply new audio parameters configurations for input and output buffers.
+ * The provider callbacks may be empty, but in this case the buffer
+ * must be provided in the EffectConfig structure.
+ *
+ * @param config configuration descriptor.
+ * @param inputBufferProvider optional buffer provider reference.
+ * @param outputBufferProvider optional buffer provider reference.
+ * @return retval operation completion status.
+ */
+ setConfig(EffectConfig config,
+ IEffectBufferProviderCallback inputBufferProvider,
+ IEffectBufferProviderCallback outputBufferProvider)
+ generates (Result retval);
+
+ /**
+ * Reset the effect engine. Keep configuration but resets state and buffer
+ * content.
+ *
+ * @return retval operation completion status.
+ */
+ reset() generates (Result retval);
+
+ /**
+ * Enable processing.
+ *
+ * @return retval operation completion status.
+ */
+ enable() generates (Result retval);
+
+ /**
+ * Disable processing.
+ *
+ * @return retval operation completion status.
+ */
+ disable() generates (Result retval);
+
+ /**
+ * Set the rendering device the audio output path is connected to. The
+ * effect implementation must set EFFECT_FLAG_DEVICE_IND flag in its
+ * descriptor to receive this command when the device changes.
+ *
+ * Note: this method is only supported for effects inserted into
+ * the output chain.
+ *
+ * @param device output device specification.
+ * @return retval operation completion status.
+ */
+ setDevice(DeviceAddress device) generates (Result retval);
+
+ /**
+ * Set and get volume. Used by audio framework to delegate volume control to
+ * effect engine. The effect implementation must set EFFECT_FLAG_VOLUME_CTRL
+ * flag in its descriptor to receive this command. The effect engine must
+ * return the volume that should be applied before the effect is
+ * processed. The overall volume (the volume actually applied by the effect
+ * engine multiplied by the returned value) should match the value indicated
+ * in the command.
+ *
+ * @param volumes vector containing volume for each channel defined in
+ * EffectConfig for output buffer expressed in 8.24 fixed
+ * point format.
+ * @return result updated volume values.
+ * @return retval operation completion status.
+ */
+ setAndGetVolume(vec<uint32_t> volumes)
+ generates (Result retval, vec<uint32_t> result);
+
+ /**
+ * Notify the effect of the volume change. The effect implementation must
+ * set EFFECT_FLAG_VOLUME_IND flag in its descriptor to receive this
+ * command.
+ *
+ * @param volumes vector containing volume for each channel defined in
+ * EffectConfig for output buffer expressed in 8.24 fixed
+ * point format.
+ * @return retval operation completion status.
+ */
+ volumeChangeNotification(vec<uint32_t> volumes)
+ generates (Result retval);
+
+ /**
+ * Set the audio mode. The effect implementation must set
+ * EFFECT_FLAG_AUDIO_MODE_IND flag in its descriptor to receive this command
+ * when the audio mode changes.
+ *
+ * @param mode desired audio mode.
+ * @return retval operation completion status.
+ */
+ setAudioMode(AudioMode mode) generates (Result retval);
+
+ /**
+ * Apply new audio parameters configurations for input and output buffers of
+ * reverse stream. An example of reverse stream is the echo reference
+ * supplied to an Acoustic Echo Canceler.
+ *
+ * @param config configuration descriptor.
+ * @param inputBufferProvider optional buffer provider reference.
+ * @param outputBufferProvider optional buffer provider reference.
+ * @return retval operation completion status.
+ */
+ setConfigReverse(EffectConfig config,
+ IEffectBufferProviderCallback inputBufferProvider,
+ IEffectBufferProviderCallback outputBufferProvider)
+ generates (Result retval);
+
+ /**
+ * Set the capture device the audio input path is connected to. The effect
+ * implementation must set EFFECT_FLAG_DEVICE_IND flag in its descriptor to
+ * receive this command when the device changes.
+ *
+ * Note: this method is only supported for effects inserted into
+ * the input chain.
+ *
+ * @param device input device specification.
+ * @return retval operation completion status.
+ */
+ setInputDevice(DeviceAddress device) generates (Result retval);
+
+ /**
+ * Read audio parameters configurations for input and output buffers.
+ *
+ * @return retval operation completion status.
+ * @return config configuration descriptor.
+ */
+ getConfig() generates (Result retval, EffectConfig config);
+
+ /**
+ * Read audio parameters configurations for input and output buffers of
+ * reverse stream.
+ *
+ * @return retval operation completion status.
+ * @return config configuration descriptor.
+ */
+ getConfigReverse() generates (Result retval, EffectConfig config);
+
+ /**
+ * Queries for supported combinations of main and auxiliary channels
+ * (e.g. for a multi-microphone noise suppressor).
+ *
+ * @param maxConfigs maximum number of the combinations to return.
+ * @return retval absence of the feature support is indicated using
+ * NOT_SUPPORTED code. RESULT_TOO_BIG is returned if
+ * the number of supported combinations exceeds 'maxConfigs'.
+ * @return result list of configuration descriptors.
+ */
+ getSupportedAuxChannelsConfigs(uint32_t maxConfigs)
+ generates (Result retval, vec<EffectAuxChannelsConfig> result);
+
+ /**
+ * Retrieves the current configuration of main and auxiliary channels.
+ *
+ * @return retval absence of the feature support is indicated using
+ * NOT_SUPPORTED code.
+ * @return result configuration descriptor.
+ */
+ getAuxChannelsConfig()
+ generates (Result retval, EffectAuxChannelsConfig result);
+
+ /**
+ * Sets the current configuration of main and auxiliary channels.
+ *
+ * @return retval operation completion status; absence of the feature
+ * support is indicated using NOT_SUPPORTED code.
+ */
+ setAuxChannelsConfig(EffectAuxChannelsConfig config)
+ generates (Result retval);
+
+ /**
+ * Set the audio source the capture path is configured for (Camcorder, voice
+ * recognition...).
+ *
+ * Note: this method is only supported for effects inserted into
+ * the input chain.
+ *
+ * @param source source descriptor.
+ * @return retval operation completion status.
+ */
+ setAudioSource(AudioSource source) generates (Result retval);
+
+ /**
+ * This command indicates if the playback thread the effect is attached to
+ * is offloaded or not, and updates the I/O handle of the playback thread
+ * the effect is attached to.
+ *
+ * @param param effect offload descriptor.
+ * @return retval operation completion status.
+ */
+ offload(EffectOffloadParameter param) generates (Result retval);
+
+ /**
+ * Returns the effect descriptor.
+ *
+ * @return retval operation completion status.
+ * @return descriptor effect descriptor.
+ */
+ getDescriptor() generates (Result retval, EffectDescriptor descriptor);
+
+ /**
+ * Set up required transports for passing audio buffers to the effect.
+ *
+ * The transport consists of shared memory and a message queue for reporting
+ * effect processing operation status. The shared memory is set up
+ * separately using 'setProcessBuffers' method.
+ *
+ * Processing is requested by setting 'REQUEST_PROCESS' or
+ * 'REQUEST_PROCESS_REVERSE' EventFlags associated with the status message
+ * queue. The result of processing may be one of the following:
+ * OK if there were no errors during processing;
+ * INVALID_ARGUMENTS if audio buffers are invalid;
+ * INVALID_STATE if the engine has finished the disable phase;
+ * NOT_INITIALIZED if the audio buffers were not set;
+ * NOT_SUPPORTED if the requested processing type is not supported by
+ * the effect.
+ *
+ * @return retval OK if both message queues were created successfully.
+ * INVALID_STATE if the method was already called.
+ * INVALID_ARGUMENTS if there was a problem setting up
+ * the queue.
+ * @return statusMQ a message queue used for passing status from the effect.
+ */
+ prepareForProcessing() generates (Result retval, fmq_sync<Result> statusMQ);
+
+ /**
+ * Set up input and output buffers for processing audio data. The effect
+ * may modify both the input and the output buffer during the operation.
+ * Buffers may be set multiple times during effect lifetime.
+ *
+ * The input and the output buffer may be reused between different effects,
+ * and the input buffer may be used as an output buffer. Buffers are
+ * distinguished using 'AudioBuffer.id' field.
+ *
+ * @param inBuffer input audio buffer.
+ * @param outBuffer output audio buffer.
+ * @return retval OK if both buffers were mapped successfully.
+ * INVALID_ARGUMENTS if there was a problem with mapping
+ * any of the buffers.
+ */
+ setProcessBuffers(AudioBuffer inBuffer, AudioBuffer outBuffer)
+ generates (Result retval);
+
+ /**
+ * Execute a vendor specific command on the effect. The command code
+ * and data, as well as result data are not interpreted by Android
+ * Framework and are passed as-is between the application and the effect.
+ *
+ * The effect must use standard POSIX.1-2001 error codes for the operation
+ * completion status.
+ *
+ * Use this method only if the effect is provided by a third party, and
+ * there is no interface defined for it. This method only works for effects
+ * implemented in software.
+ *
+ * @param commandId the ID of the command.
+ * @param data command data.
+ * @param resultMaxSize maximum size in bytes of the result; can be 0.
+ * @return status command completion status.
+ * @return result result data.
+ */
+ command(uint32_t commandId, vec<uint8_t> data, uint32_t resultMaxSize)
+ generates (int32_t status, vec<uint8_t> result);
+
+ /**
+ * Set a vendor-specific parameter and apply it immediately. The parameter
+ * code and data are not interpreted by Android Framework and are passed
+ * as-is between the application and the effect.
+ *
+ * The effect must use INVALID_ARGUMENTS return code if the parameter ID is
+ * unknown or if provided parameter data is invalid. If the effect does not
+ * support setting vendor-specific parameters, it must return NOT_SUPPORTED.
+ *
+ * Use this method only if the effect is provided by a third party, and
+ * there is no interface defined for it. This method only works for effects
+ * implemented in software.
+ *
+ * @param parameter identifying data of the parameter.
+ * @param value the value of the parameter.
+ * @return retval operation completion status.
+ */
+ setParameter(vec<uint8_t> parameter, vec<uint8_t> value)
+ generates (Result retval);
+
+ /**
+ * Get a vendor-specific parameter value. The parameter code and returned
+ * data are not interpreted by Android Framework and are passed as-is
+ * between the application and the effect.
+ *
+ * The effect must use INVALID_ARGUMENTS return code if the parameter ID is
+ * unknown. If the effect does not support setting vendor-specific
+ * parameters, it must return NOT_SUPPORTED.
+ *
+ * Use this method only if the effect is provided by a third party, and
+ * there is no interface defined for it. This method only works for effects
+ * implemented in software.
+ *
+ * @param parameter identifying data of the parameter.
+ * @param valueMaxSize maximum size in bytes of the value.
+ * @return retval operation completion status.
+ * @return result the value of the parameter.
+ */
+ getParameter(vec<uint8_t> parameter, uint32_t valueMaxSize)
+ generates (Result retval, vec<uint8_t> value);
+
+ /**
+ * Get supported configs for a vendor-specific feature. The configs returned
+ * are not interpreted by Android Framework and are passed as-is between the
+ * application and the effect.
+ *
+ * The effect must use INVALID_ARGUMENTS return code if the feature ID is
+ * unknown. If the effect does not support getting vendor-specific feature
+ * configs, it must return NOT_SUPPORTED. If the feature is supported but
+ * the total number of supported configurations exceeds the maximum number
+ * indicated by the caller, the method must return RESULT_TOO_BIG.
+ *
+ * Use this method only if the effect is provided by a third party, and
+ * there is no interface defined for it. This method only works for effects
+ * implemented in software.
+ *
+ * @param featureId feature identifier.
+ * @param maxConfigs maximum number of configs to return.
+ * @param configSize size of each config in bytes.
+ * @return retval operation completion status.
+ * @return configsCount number of configs returned.
+ * @return configsData data for all the configs returned.
+ */
+ getSupportedConfigsForFeature(
+ uint32_t featureId,
+ uint32_t maxConfigs,
+ uint32_t configSize) generates (
+ Result retval,
+ uint32_t configsCount,
+ vec<uint8_t> configsData);
+
+ /**
+ * Get the current config for a vendor-specific feature. The config returned
+ * is not interpreted by Android Framework and is passed as-is between the
+ * application and the effect.
+ *
+ * The effect must use INVALID_ARGUMENTS return code if the feature ID is
+ * unknown. If the effect does not support getting vendor-specific
+ * feature configs, it must return NOT_SUPPORTED.
+ *
+ * Use this method only if the effect is provided by a third party, and
+ * there is no interface defined for it. This method only works for effects
+ * implemented in software.
+ *
+ * @param featureId feature identifier.
+ * @param configSize size of the config in bytes.
+ * @return retval operation completion status.
+ * @return configData config data.
+ */
+ getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize)
+ generates (Result retval, vec<uint8_t> configData);
+
+ /**
+ * Set the current config for a vendor-specific feature. The config data
+ * is not interpreted by Android Framework and is passed as-is between the
+ * application and the effect.
+ *
+ * The effect must use INVALID_ARGUMENTS return code if the feature ID is
+ * unknown. If the effect does not support getting vendor-specific
+ * feature configs, it must return NOT_SUPPORTED.
+ *
+ * Use this method only if the effect is provided by a third party, and
+ * there is no interface defined for it. This method only works for effects
+ * implemented in software.
+ *
+ * @param featureId feature identifier.
+ * @param configData config data.
+ * @return retval operation completion status.
+ */
+ setCurrentConfigForFeature(uint32_t featureId, vec<uint8_t> configData)
+ generates (Result retval);
+
+ /**
+ * Called by the framework to deinitialize the effect and free up
+ * all currently allocated resources. It is recommended to close
+ * the effect on the client side as soon as it is becomes unused.
+ *
+ * The client must ensure that this function is not called while
+ * audio data is being transferred through the effect's message queues.
+ *
+ * @return retval OK in case the success.
+ * INVALID_STATE if the effect was already closed.
+ */
+ close() generates (Result retval);
+};
diff --git a/audio/effect/7.0/IEffectBufferProviderCallback.hal b/audio/effect/7.0/IEffectBufferProviderCallback.hal
new file mode 100644
index 0000000..d18f7df
--- /dev/null
+++ b/audio/effect/7.0/IEffectBufferProviderCallback.hal
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.effect@7.0;
+
+/**
+ * This callback interface contains functions that can be used by the effect
+ * engine 'process' function to exchange input and output audio buffers.
+ */
+interface IEffectBufferProviderCallback {
+ /**
+ * Called to retrieve a buffer where data should read from by 'process'
+ * function.
+ *
+ * @return buffer audio buffer for processing
+ */
+ getBuffer() generates (AudioBuffer buffer);
+
+ /**
+ * Called to provide a buffer with the data written by 'process' function.
+ *
+ * @param buffer audio buffer for processing
+ */
+ putBuffer(AudioBuffer buffer);
+};
diff --git a/audio/effect/7.0/IEffectsFactory.hal b/audio/effect/7.0/IEffectsFactory.hal
new file mode 100644
index 0000000..337251c
--- /dev/null
+++ b/audio/effect/7.0/IEffectsFactory.hal
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.effect@7.0;
+
+import android.hardware.audio.common@7.0;
+import IEffect;
+
+interface IEffectsFactory {
+ /**
+ * Returns descriptors of different effects in all loaded libraries.
+ *
+ * @return retval operation completion status.
+ * @return result list of effect descriptors.
+ */
+ getAllDescriptors() generates(Result retval, vec<EffectDescriptor> result);
+
+ /**
+ * Returns a descriptor of a particular effect.
+ *
+ * @return retval operation completion status.
+ * @return result effect descriptor.
+ */
+ getDescriptor(Uuid uid) generates(Result retval, EffectDescriptor result);
+
+ /**
+ * Creates an effect engine of the specified type. To release the effect
+ * engine, it is necessary to release references to the returned effect
+ * object.
+ *
+ * @param uid effect uuid.
+ * @param session audio session to which this effect instance will be
+ * attached. All effects created with the same session ID
+ * are connected in series and process the same signal
+ * stream.
+ * @param ioHandle identifies the output or input stream this effect is
+ * directed to in audio HAL.
+ * @param device identifies the sink or source device this effect is directed to in the
+ * audio HAL. Must be specified if session is AudioSessionConsts.DEVICE.
+ * "device" is the AudioPortHandle used for the device when the audio
+ * patch is created at the audio HAL.
+ * @return retval operation completion status.
+ * @return result the interface for the created effect.
+ * @return effectId the unique ID of the effect to be used with
+ * IStream::addEffect and IStream::removeEffect methods.
+ */
+ createEffect(Uuid uid, AudioSession session, AudioIoHandle ioHandle, AudioPortHandle device)
+ generates (Result retval, IEffect result, uint64_t effectId);
+};
diff --git a/audio/effect/7.0/IEnvironmentalReverbEffect.hal b/audio/effect/7.0/IEnvironmentalReverbEffect.hal
new file mode 100644
index 0000000..e02cfbc
--- /dev/null
+++ b/audio/effect/7.0/IEnvironmentalReverbEffect.hal
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.effect@7.0;
+
+import android.hardware.audio.common@7.0;
+import IEffect;
+
+interface IEnvironmentalReverbEffect extends IEffect {
+ /**
+ * Sets whether the effect should be bypassed.
+ */
+ setBypass(bool bypass) generates (Result retval);
+
+ /**
+ * Gets whether the effect should be bypassed.
+ */
+ getBypass() generates (Result retval, bool bypass);
+
+ enum ParamRange : int16_t {
+ ROOM_LEVEL_MIN = -6000,
+ ROOM_LEVEL_MAX = 0,
+ ROOM_HF_LEVEL_MIN = -4000,
+ ROOM_HF_LEVEL_MAX = 0,
+ DECAY_TIME_MIN = 100,
+ DECAY_TIME_MAX = 20000,
+ DECAY_HF_RATIO_MIN = 100,
+ DECAY_HF_RATIO_MAX = 1000,
+ REFLECTIONS_LEVEL_MIN = -6000,
+ REFLECTIONS_LEVEL_MAX = 0,
+ REFLECTIONS_DELAY_MIN = 0,
+ REFLECTIONS_DELAY_MAX = 65,
+ REVERB_LEVEL_MIN = -6000,
+ REVERB_LEVEL_MAX = 0,
+ REVERB_DELAY_MIN = 0,
+ REVERB_DELAY_MAX = 65,
+ DIFFUSION_MIN = 0,
+ DIFFUSION_MAX = 1000,
+ DENSITY_MIN = 0,
+ DENSITY_MAX = 1000
+ };
+
+ /**
+ * Sets the room level.
+ */
+ setRoomLevel(int16_t roomLevel) generates (Result retval);
+
+ /**
+ * Gets the room level.
+ */
+ getRoomLevel() generates (Result retval, int16_t roomLevel);
+
+ /**
+ * Sets the room high frequencies level.
+ */
+ setRoomHfLevel(int16_t roomHfLevel) generates (Result retval);
+
+ /**
+ * Gets the room high frequencies level.
+ */
+ getRoomHfLevel() generates (Result retval, int16_t roomHfLevel);
+
+ /**
+ * Sets the room decay time.
+ */
+ setDecayTime(uint32_t decayTime) generates (Result retval);
+
+ /**
+ * Gets the room decay time.
+ */
+ getDecayTime() generates (Result retval, uint32_t decayTime);
+
+ /**
+ * Sets the ratio of high frequencies decay.
+ */
+ setDecayHfRatio(int16_t decayHfRatio) generates (Result retval);
+
+ /**
+ * Gets the ratio of high frequencies decay.
+ */
+ getDecayHfRatio() generates (Result retval, int16_t decayHfRatio);
+
+ /**
+ * Sets the level of reflections in the room.
+ */
+ setReflectionsLevel(int16_t reflectionsLevel) generates (Result retval);
+
+ /**
+ * Gets the level of reflections in the room.
+ */
+ getReflectionsLevel() generates (Result retval, int16_t reflectionsLevel);
+
+ /**
+ * Sets the reflections delay in the room.
+ */
+ setReflectionsDelay(uint32_t reflectionsDelay) generates (Result retval);
+
+ /**
+ * Gets the reflections delay in the room.
+ */
+ getReflectionsDelay() generates (Result retval, uint32_t reflectionsDelay);
+
+ /**
+ * Sets the reverb level of the room.
+ */
+ setReverbLevel(int16_t reverbLevel) generates (Result retval);
+
+ /**
+ * Gets the reverb level of the room.
+ */
+ getReverbLevel() generates (Result retval, int16_t reverbLevel);
+
+ /**
+ * Sets the reverb delay of the room.
+ */
+ setReverbDelay(uint32_t reverDelay) generates (Result retval);
+
+ /**
+ * Gets the reverb delay of the room.
+ */
+ getReverbDelay() generates (Result retval, uint32_t reverbDelay);
+
+ /**
+ * Sets room diffusion.
+ */
+ setDiffusion(int16_t diffusion) generates (Result retval);
+
+ /**
+ * Gets room diffusion.
+ */
+ getDiffusion() generates (Result retval, int16_t diffusion);
+
+ /**
+ * Sets room wall density.
+ */
+ setDensity(int16_t density) generates (Result retval);
+
+ /**
+ * Gets room wall density.
+ */
+ getDensity() generates (Result retval, int16_t density);
+
+ struct AllProperties {
+ int16_t roomLevel; // in millibels, range -6000 to 0
+ int16_t roomHfLevel; // in millibels, range -4000 to 0
+ uint32_t decayTime; // in milliseconds, range 100 to 20000
+ int16_t decayHfRatio; // in permilles, range 100 to 1000
+ int16_t reflectionsLevel; // in millibels, range -6000 to 0
+ uint32_t reflectionsDelay; // in milliseconds, range 0 to 65
+ int16_t reverbLevel; // in millibels, range -6000 to 0
+ uint32_t reverbDelay; // in milliseconds, range 0 to 65
+ int16_t diffusion; // in permilles, range 0 to 1000
+ int16_t density; // in permilles, range 0 to 1000
+ };
+
+ /**
+ * Sets all properties at once.
+ */
+ setAllProperties(AllProperties properties) generates (Result retval);
+
+ /**
+ * Gets all properties at once.
+ */
+ getAllProperties() generates (Result retval, AllProperties properties);
+};
diff --git a/audio/effect/7.0/IEqualizerEffect.hal b/audio/effect/7.0/IEqualizerEffect.hal
new file mode 100644
index 0000000..e7d7ae1
--- /dev/null
+++ b/audio/effect/7.0/IEqualizerEffect.hal
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.effect@7.0;
+
+import android.hardware.audio.common@7.0;
+import IEffect;
+
+interface IEqualizerEffect extends IEffect {
+ /**
+ * Gets the number of frequency bands that the equalizer supports.
+ */
+ getNumBands() generates (Result retval, uint16_t numBands);
+
+ /**
+ * Returns the minimum and maximum band levels supported.
+ */
+ getLevelRange()
+ generates (Result retval, int16_t minLevel, int16_t maxLevel);
+
+ /**
+ * Sets the gain for the given equalizer band.
+ */
+ setBandLevel(uint16_t band, int16_t level) generates (Result retval);
+
+ /**
+ * Gets the gain for the given equalizer band.
+ */
+ getBandLevel(uint16_t band) generates (Result retval, int16_t level);
+
+ /**
+ * Gets the center frequency of the given band, in milliHertz.
+ */
+ getBandCenterFrequency(uint16_t band)
+ generates (Result retval, uint32_t centerFreqmHz);
+
+ /**
+ * Gets the frequency range of the given frequency band, in milliHertz.
+ */
+ getBandFrequencyRange(uint16_t band)
+ generates (Result retval, uint32_t minFreqmHz, uint32_t maxFreqmHz);
+
+ /**
+ * Gets the band that has the most effect on the given frequency
+ * in milliHertz.
+ */
+ getBandForFrequency(uint32_t freqmHz)
+ generates (Result retval, uint16_t band);
+
+ /**
+ * Gets the names of all presets the equalizer supports.
+ */
+ getPresetNames() generates (Result retval, vec<string> names);
+
+ /**
+ * Sets the current preset using the index of the preset in the names
+ * vector returned via 'getPresetNames'.
+ */
+ setCurrentPreset(uint16_t preset) generates (Result retval);
+
+ /**
+ * Gets the current preset.
+ */
+ getCurrentPreset() generates (Result retval, uint16_t preset);
+
+ struct AllProperties {
+ uint16_t curPreset;
+ vec<int16_t> bandLevels;
+ };
+
+ /**
+ * Sets all properties at once.
+ */
+ setAllProperties(AllProperties properties) generates (Result retval);
+
+ /**
+ * Gets all properties at once.
+ */
+ getAllProperties() generates (Result retval, AllProperties properties);
+};
diff --git a/audio/effect/7.0/ILoudnessEnhancerEffect.hal b/audio/effect/7.0/ILoudnessEnhancerEffect.hal
new file mode 100644
index 0000000..0304f20
--- /dev/null
+++ b/audio/effect/7.0/ILoudnessEnhancerEffect.hal
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.effect@7.0;
+
+import android.hardware.audio.common@7.0;
+import IEffect;
+
+interface ILoudnessEnhancerEffect extends IEffect {
+ /**
+ * Sets target gain expressed in millibels.
+ */
+ setTargetGain(int32_t targetGainMb) generates (Result retval);
+
+ /**
+ * Gets target gain expressed in millibels.
+ */
+ getTargetGain() generates (Result retval, int32_t targetGainMb);
+};
diff --git a/audio/effect/7.0/INoiseSuppressionEffect.hal b/audio/effect/7.0/INoiseSuppressionEffect.hal
new file mode 100644
index 0000000..2c6210c
--- /dev/null
+++ b/audio/effect/7.0/INoiseSuppressionEffect.hal
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.effect@7.0;
+
+import android.hardware.audio.common@7.0;
+import IEffect;
+
+interface INoiseSuppressionEffect extends IEffect {
+ enum Level : int32_t {
+ LOW,
+ MEDIUM,
+ HIGH
+ };
+
+ /**
+ * Sets suppression level.
+ */
+ setSuppressionLevel(Level level) generates (Result retval);
+
+ /**
+ * Gets suppression level.
+ */
+ getSuppressionLevel() generates (Result retval, Level level);
+
+ enum Type : int32_t {
+ SINGLE_CHANNEL,
+ MULTI_CHANNEL
+ };
+
+ /**
+ * Set suppression type.
+ */
+ setSuppressionType(Type type) generates (Result retval);
+
+ /**
+ * Get suppression type.
+ */
+ getSuppressionType() generates (Result retval, Type type);
+
+ struct AllProperties {
+ Level level;
+ Type type;
+ };
+
+ /**
+ * Sets all properties at once.
+ */
+ setAllProperties(AllProperties properties) generates (Result retval);
+
+ /**
+ * Gets all properties at once.
+ */
+ getAllProperties() generates (Result retval, AllProperties properties);
+};
diff --git a/audio/effect/7.0/IPresetReverbEffect.hal b/audio/effect/7.0/IPresetReverbEffect.hal
new file mode 100644
index 0000000..da61d24
--- /dev/null
+++ b/audio/effect/7.0/IPresetReverbEffect.hal
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.effect@7.0;
+
+import android.hardware.audio.common@7.0;
+import IEffect;
+
+interface IPresetReverbEffect extends IEffect {
+ enum Preset : int32_t {
+ NONE, // no reverb or reflections
+ SMALLROOM, // a small room less than five meters in length
+ MEDIUMROOM, // a medium room with a length of ten meters or less
+ LARGEROOM, // a large-sized room suitable for live performances
+ MEDIUMHALL, // a medium-sized hall
+ LARGEHALL, // a large-sized hall suitable for a full orchestra
+ PLATE, // synthesis of the traditional plate reverb
+ LAST = PLATE
+ };
+
+ /**
+ * Sets the current preset.
+ */
+ setPreset(Preset preset) generates (Result retval);
+
+ /**
+ * Gets the current preset.
+ */
+ getPreset() generates (Result retval, Preset preset);
+};
diff --git a/audio/effect/7.0/IVirtualizerEffect.hal b/audio/effect/7.0/IVirtualizerEffect.hal
new file mode 100644
index 0000000..141b4e6
--- /dev/null
+++ b/audio/effect/7.0/IVirtualizerEffect.hal
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.effect@7.0;
+
+import android.hardware.audio.common@7.0;
+import IEffect;
+
+interface IVirtualizerEffect extends IEffect {
+ /**
+ * Returns whether setting virtualization strength is supported.
+ */
+ isStrengthSupported() generates (bool strengthSupported);
+
+ enum StrengthRange : uint16_t {
+ MIN = 0,
+ MAX = 1000
+ };
+
+ /**
+ * Sets virtualization strength.
+ *
+ * @param strength strength of the effect. The valid range for strength
+ * strength is [0, 1000], where 0 per mille designates the
+ * mildest effect and 1000 per mille designates the
+ * strongest.
+ * @return retval operation completion status.
+ */
+ setStrength(uint16_t strength) generates (Result retval);
+
+ /**
+ * Gets virtualization strength.
+ */
+ getStrength() generates (Result retval, uint16_t strength);
+
+ struct SpeakerAngle {
+ /** Speaker channel mask */
+ vec<AudioChannelMask> mask;
+ // all angles are expressed in degrees and
+ // are relative to the listener.
+ int16_t azimuth; // 0 is the direction the listener faces
+ // 180 is behind the listener
+ // -90 is to their left
+ int16_t elevation; // 0 is the horizontal plane
+ // +90 is above the listener, -90 is below
+ };
+ /**
+ * Retrieves virtual speaker angles for the given channel mask on the
+ * specified device.
+ */
+ getVirtualSpeakerAngles(vec<AudioChannelMask> mask, DeviceAddress device)
+ generates (Result retval, vec<SpeakerAngle> speakerAngles);
+
+ /**
+ * Forces the virtualizer effect for the given output device.
+ */
+ forceVirtualizationMode(DeviceAddress device) generates (Result retval);
+
+ /**
+ * Returns audio device reflecting the current virtualization mode,
+ * Device type can be empty when not virtualizing.
+ */
+ getVirtualizationMode() generates (Result retval, DeviceAddress device);
+};
diff --git a/audio/effect/7.0/IVisualizerEffect.hal b/audio/effect/7.0/IVisualizerEffect.hal
new file mode 100644
index 0000000..b4e8659
--- /dev/null
+++ b/audio/effect/7.0/IVisualizerEffect.hal
@@ -0,0 +1,110 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.effect@7.0;
+
+import android.hardware.audio.common@7.0;
+import IEffect;
+
+interface IVisualizerEffect extends IEffect {
+ enum CaptureSizeRange : int32_t {
+ MAX = 1024, // maximum capture size in samples
+ MIN = 128 // minimum capture size in samples
+ };
+
+ /**
+ * Sets the number PCM samples in the capture.
+ */
+ setCaptureSize(uint16_t captureSize) generates (Result retval);
+
+ /**
+ * Gets the number PCM samples in the capture.
+ */
+ getCaptureSize() generates (Result retval, uint16_t captureSize);
+
+ enum ScalingMode : int32_t {
+ // Keep in sync with SCALING_MODE_... in
+ // frameworks/base/media/java/android/media/audiofx/Visualizer.java
+ NORMALIZED = 0,
+ AS_PLAYED = 1
+ };
+
+ /**
+ * Specifies the way the captured data is scaled.
+ */
+ setScalingMode(ScalingMode scalingMode) generates (Result retval);
+
+ /**
+ * Retrieves the way the captured data is scaled.
+ */
+ getScalingMode() generates (Result retval, ScalingMode scalingMode);
+
+ /**
+ * Informs the visualizer about the downstream latency.
+ */
+ setLatency(uint32_t latencyMs) generates (Result retval);
+
+ /**
+ * Gets the downstream latency.
+ */
+ getLatency() generates (Result retval, uint32_t latencyMs);
+
+ enum MeasurementMode : int32_t {
+ // Keep in sync with MEASUREMENT_MODE_... in
+ // frameworks/base/media/java/android/media/audiofx/Visualizer.java
+ NONE = 0x0,
+ PEAK_RMS = 0x1
+ };
+
+ /**
+ * Specifies which measurements are to be made.
+ */
+ setMeasurementMode(MeasurementMode measurementMode)
+ generates (Result retval);
+
+ /**
+ * Retrieves which measurements are to be made.
+ */
+ getMeasurementMode() generates (
+ Result retval, MeasurementMode measurementMode);
+
+ /**
+ * Retrieves the latest PCM snapshot captured by the visualizer engine. The
+ * number of samples to capture is specified by 'setCaptureSize' parameter.
+ *
+ * @return retval operation completion status.
+ * @return samples samples in 8 bit unsigned format (0 = 0x80)
+ */
+ capture() generates (Result retval, vec<uint8_t> samples);
+
+ struct Measurement {
+ MeasurementMode mode; // discriminator
+ union Values {
+ struct PeakAndRms {
+ int32_t peakMb; // millibels
+ int32_t rmsMb; // millibels
+ } peakAndRms;
+ } value;
+ };
+ /**
+ * Retrieves the latest measurements. The measurements to be made
+ * are specified by 'setMeasurementMode' parameter.
+ *
+ * @return retval operation completion status.
+ * @return result measurement.
+ */
+ measure() generates (Result retval, Measurement result);
+};
diff --git a/audio/effect/7.0/types.hal b/audio/effect/7.0/types.hal
new file mode 100644
index 0000000..fe4ee51
--- /dev/null
+++ b/audio/effect/7.0/types.hal
@@ -0,0 +1,301 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.audio.effect@7.0;
+
+import android.hardware.audio.common@7.0;
+
+enum Result : int32_t {
+ OK,
+ NOT_INITIALIZED,
+ INVALID_ARGUMENTS,
+ INVALID_STATE,
+ NOT_SUPPORTED,
+ RESULT_TOO_BIG
+};
+
+/**
+ * Effect engine capabilities/requirements flags.
+ *
+ * Definitions for flags field of effect descriptor.
+ *
+ * +----------------+--------+--------------------------------------------------
+ * | description | bits | values
+ * +----------------+--------+--------------------------------------------------
+ * | connection | 0..2 | 0 insert: after track process
+ * | mode | | 1 auxiliary: connect to track auxiliary
+ * | | | output and use send level
+ * | | | 2 replace: replaces track process function;
+ * | | | must implement SRC, volume and mono to stereo.
+ * | | | 3 pre processing: applied below audio HAL on in
+ * | | | 4 post processing: applied below audio HAL on out
+ * | | | 5 - 7 reserved
+ * +----------------+--------+--------------------------------------------------
+ * | insertion | 3..5 | 0 none
+ * | preference | | 1 first of the chain
+ * | | | 2 last of the chain
+ * | | | 3 exclusive (only effect in the insert chain)
+ * | | | 4..7 reserved
+ * +----------------+--------+--------------------------------------------------
+ * | Volume | 6..8 | 0 none
+ * | management | | 1 implements volume control
+ * | | | 2 requires volume indication
+ * | | | 3 monitors requested volume
+ * | | | 4 reserved
+ * +----------------+--------+--------------------------------------------------
+ * | Device | 9..11 | 0 none
+ * | indication | | 1 requires device updates
+ * | | | 2, 4 reserved
+ * +----------------+--------+--------------------------------------------------
+ * | Sample input | 12..13 | 1 direct: process() function or
+ * | mode | | EFFECT_CMD_SET_CONFIG command must specify
+ * | | | a buffer descriptor
+ * | | | 2 provider: process() function uses the
+ * | | | bufferProvider indicated by the
+ * | | | EFFECT_CMD_SET_CONFIG command to request input.
+ * | | | buffers.
+ * | | | 3 both: both input modes are supported
+ * +----------------+--------+--------------------------------------------------
+ * | Sample output | 14..15 | 1 direct: process() function or
+ * | mode | | EFFECT_CMD_SET_CONFIG command must specify
+ * | | | a buffer descriptor
+ * | | | 2 provider: process() function uses the
+ * | | | bufferProvider indicated by the
+ * | | | EFFECT_CMD_SET_CONFIG command to request output
+ * | | | buffers.
+ * | | | 3 both: both output modes are supported
+ * +----------------+--------+--------------------------------------------------
+ * | Hardware | 16..17 | 0 No hardware acceleration
+ * | acceleration | | 1 non tunneled hw acceleration: the process()
+ * | | | function reads the samples, send them to HW
+ * | | | accelerated effect processor, reads back
+ * | | | the processed samples and returns them
+ * | | | to the output buffer.
+ * | | | 2 tunneled hw acceleration: the process()
+ * | | | function is transparent. The effect interface
+ * | | | is only used to control the effect engine.
+ * | | | This mode is relevant for global effects
+ * | | | actually applied by the audio hardware on
+ * | | | the output stream.
+ * +----------------+--------+--------------------------------------------------
+ * | Audio Mode | 18..19 | 0 none
+ * | indication | | 1 requires audio mode updates
+ * | | | 2..3 reserved
+ * +----------------+--------+--------------------------------------------------
+ * | Audio source | 20..21 | 0 none
+ * | indication | | 1 requires audio source updates
+ * | | | 2..3 reserved
+ * +----------------+--------+--------------------------------------------------
+ * | Effect offload | 22 | 0 The effect cannot be offloaded to an audio DSP
+ * | supported | | 1 The effect can be offloaded to an audio DSP
+ * +----------------+--------+--------------------------------------------------
+ * | Process | 23 | 0 The effect implements a process function.
+ * | function | | 1 The effect does not implement a process
+ * | not | | function: enabling the effect has no impact
+ * | implemented | | on latency or CPU load.
+ * | | | Effect implementations setting this flag do not
+ * | | | have to implement a process function.
+ * +----------------+--------+--------------------------------------------------
+ */
+@export(name="", value_prefix="EFFECT_FLAG_")
+enum EffectFlags : int32_t {
+ // Insert mode
+ TYPE_SHIFT = 0,
+ TYPE_SIZE = 3,
+ TYPE_MASK = ((1 << TYPE_SIZE) -1) << TYPE_SHIFT,
+ TYPE_INSERT = 0 << TYPE_SHIFT,
+ TYPE_AUXILIARY = 1 << TYPE_SHIFT,
+ TYPE_REPLACE = 2 << TYPE_SHIFT,
+ TYPE_PRE_PROC = 3 << TYPE_SHIFT,
+ TYPE_POST_PROC = 4 << TYPE_SHIFT,
+
+ // Insert preference
+ INSERT_SHIFT = TYPE_SHIFT + TYPE_SIZE,
+ INSERT_SIZE = 3,
+ INSERT_MASK = ((1 << INSERT_SIZE) -1) << INSERT_SHIFT,
+ INSERT_ANY = 0 << INSERT_SHIFT,
+ INSERT_FIRST = 1 << INSERT_SHIFT,
+ INSERT_LAST = 2 << INSERT_SHIFT,
+ INSERT_EXCLUSIVE = 3 << INSERT_SHIFT,
+
+ // Volume control
+ VOLUME_SHIFT = INSERT_SHIFT + INSERT_SIZE,
+ VOLUME_SIZE = 3,
+ VOLUME_MASK = ((1 << VOLUME_SIZE) -1) << VOLUME_SHIFT,
+ VOLUME_CTRL = 1 << VOLUME_SHIFT,
+ VOLUME_IND = 2 << VOLUME_SHIFT,
+ VOLUME_MONITOR = 3 << VOLUME_SHIFT,
+ VOLUME_NONE = 0 << VOLUME_SHIFT,
+
+ // Device indication
+ DEVICE_SHIFT = VOLUME_SHIFT + VOLUME_SIZE,
+ DEVICE_SIZE = 3,
+ DEVICE_MASK = ((1 << DEVICE_SIZE) -1) << DEVICE_SHIFT,
+ DEVICE_IND = 1 << DEVICE_SHIFT,
+ DEVICE_NONE = 0 << DEVICE_SHIFT,
+
+ // Sample input modes
+ INPUT_SHIFT = DEVICE_SHIFT + DEVICE_SIZE,
+ INPUT_SIZE = 2,
+ INPUT_MASK = ((1 << INPUT_SIZE) -1) << INPUT_SHIFT,
+ INPUT_DIRECT = 1 << INPUT_SHIFT,
+ INPUT_PROVIDER = 2 << INPUT_SHIFT,
+ INPUT_BOTH = 3 << INPUT_SHIFT,
+
+ // Sample output modes
+ OUTPUT_SHIFT = INPUT_SHIFT + INPUT_SIZE,
+ OUTPUT_SIZE = 2,
+ OUTPUT_MASK = ((1 << OUTPUT_SIZE) -1) << OUTPUT_SHIFT,
+ OUTPUT_DIRECT = 1 << OUTPUT_SHIFT,
+ OUTPUT_PROVIDER = 2 << OUTPUT_SHIFT,
+ OUTPUT_BOTH = 3 << OUTPUT_SHIFT,
+
+ // Hardware acceleration mode
+ HW_ACC_SHIFT = OUTPUT_SHIFT + OUTPUT_SIZE,
+ HW_ACC_SIZE = 2,
+ HW_ACC_MASK = ((1 << HW_ACC_SIZE) -1) << HW_ACC_SHIFT,
+ HW_ACC_SIMPLE = 1 << HW_ACC_SHIFT,
+ HW_ACC_TUNNEL = 2 << HW_ACC_SHIFT,
+
+ // Audio mode indication
+ AUDIO_MODE_SHIFT = HW_ACC_SHIFT + HW_ACC_SIZE,
+ AUDIO_MODE_SIZE = 2,
+ AUDIO_MODE_MASK = ((1 << AUDIO_MODE_SIZE) -1) << AUDIO_MODE_SHIFT,
+ AUDIO_MODE_IND = 1 << AUDIO_MODE_SHIFT,
+ AUDIO_MODE_NONE = 0 << AUDIO_MODE_SHIFT,
+
+ // Audio source indication
+ AUDIO_SOURCE_SHIFT = AUDIO_MODE_SHIFT + AUDIO_MODE_SIZE,
+ AUDIO_SOURCE_SIZE = 2,
+ AUDIO_SOURCE_MASK = ((1 << AUDIO_SOURCE_SIZE) -1) << AUDIO_SOURCE_SHIFT,
+ AUDIO_SOURCE_IND = 1 << AUDIO_SOURCE_SHIFT,
+ AUDIO_SOURCE_NONE = 0 << AUDIO_SOURCE_SHIFT,
+
+ // Effect offload indication
+ OFFLOAD_SHIFT = AUDIO_SOURCE_SHIFT + AUDIO_SOURCE_SIZE,
+ OFFLOAD_SIZE = 1,
+ OFFLOAD_MASK = ((1 << OFFLOAD_SIZE) -1) << OFFLOAD_SHIFT,
+ OFFLOAD_SUPPORTED = 1 << OFFLOAD_SHIFT,
+
+ // Effect has no process indication
+ NO_PROCESS_SHIFT = OFFLOAD_SHIFT + OFFLOAD_SIZE,
+ NO_PROCESS_SIZE = 1,
+ NO_PROCESS_MASK = ((1 << NO_PROCESS_SIZE) -1) << NO_PROCESS_SHIFT,
+ NO_PROCESS = 1 << NO_PROCESS_SHIFT
+};
+
+/**
+ * The effect descriptor contains necessary information to facilitate the
+ * enumeration of the effect engines present in a library.
+ */
+struct EffectDescriptor {
+ Uuid type; // UUID of to the OpenSL ES interface implemented
+ // by this effect
+ Uuid uuid; // UUID for this particular implementation
+ bitfield<EffectFlags> flags; // effect engine capabilities/requirements flags
+ uint16_t cpuLoad; // CPU load indication expressed in 0.1 MIPS units
+ // as estimated on an ARM9E core (ARMv5TE) with 0 WS
+ uint16_t memoryUsage; // data memory usage expressed in KB and includes
+ // only dynamically allocated memory
+ uint8_t[64] name; // human readable effect name
+ uint8_t[64] implementor; // human readable effect implementor name
+};
+
+/**
+ * A buffer is a chunk of audio data for processing. Multi-channel audio is
+ * always interleaved. The channel order is from LSB to MSB with regard to the
+ * channel mask definition in audio.h, audio_channel_mask_t, e.g.:
+ * Stereo: L, R; 5.1: FL, FR, FC, LFE, BL, BR.
+ *
+ * The buffer size is expressed in frame count, a frame being composed of
+ * samples for all channels at a given time. Frame size for unspecified format
+ * (AUDIO_FORMAT_OTHER) is 8 bit by definition.
+ */
+struct AudioBuffer {
+ uint64_t id;
+ uint32_t frameCount;
+ memory data;
+};
+
+@export(name="effect_buffer_access_e", value_prefix="EFFECT_BUFFER_")
+enum EffectBufferAccess : int32_t {
+ ACCESS_WRITE,
+ ACCESS_READ,
+ ACCESS_ACCUMULATE
+};
+
+/**
+ * Determines what fields of EffectBufferConfig need to be considered.
+ */
+@export(name="", value_prefix="EFFECT_CONFIG_")
+enum EffectConfigParameters : int32_t {
+ BUFFER = 0x0001, // buffer field
+ SMP_RATE = 0x0002, // samplingRate
+ CHANNELS = 0x0004, // channels
+ FORMAT = 0x0008, // format
+ ACC_MODE = 0x0010, // accessMode
+ // Note that the 2.0 ALL have been moved to an helper function
+};
+
+/**
+ * The buffer config structure specifies the input or output audio format
+ * to be used by the effect engine.
+ */
+struct EffectBufferConfig {
+ AudioBuffer buffer;
+ uint32_t samplingRateHz;
+ AudioChannelMask channels;
+ AudioFormat format;
+ EffectBufferAccess accessMode;
+ bitfield<EffectConfigParameters> mask;
+};
+
+struct EffectConfig {
+ EffectBufferConfig inputCfg;
+ EffectBufferConfig outputCfg;
+};
+
+@export(name="effect_feature_e", value_prefix="EFFECT_FEATURE_")
+enum EffectFeature : int32_t {
+ AUX_CHANNELS, // supports auxiliary channels
+ // (e.g. dual mic noise suppressor)
+ CNT
+};
+
+struct EffectAuxChannelsConfig {
+ vec<AudioChannelMask> mainChannels; // channel mask for main channels
+ vec<AudioChannelMask> auxChannels; // channel mask for auxiliary channels
+};
+
+struct EffectOffloadParameter {
+ bool isOffload; // true if the playback thread the effect
+ // is attached to is offloaded
+ AudioIoHandle ioHandle; // io handle of the playback thread
+ // the effect is attached to
+};
+
+/**
+ * The message queue flags used to synchronize reads and writes from
+ * the status message queue used by effects.
+ */
+enum MessageQueueFlagBits : uint32_t {
+ DONE_PROCESSING = 1 << 0,
+ REQUEST_PROCESS = 1 << 1,
+ REQUEST_PROCESS_REVERSE = 1 << 2,
+ REQUEST_QUIT = 1 << 3,
+ REQUEST_PROCESS_ALL =
+ REQUEST_PROCESS | REQUEST_PROCESS_REVERSE | REQUEST_QUIT
+};
diff --git a/audio/effect/7.0/xml/Android.bp b/audio/effect/7.0/xml/Android.bp
new file mode 100644
index 0000000..dc12e63
--- /dev/null
+++ b/audio/effect/7.0/xml/Android.bp
@@ -0,0 +1,5 @@
+xsd_config {
+ name: "audio_effects_conf_V7_0",
+ srcs: ["audio_effects_conf.xsd"],
+ package_name: "audio.effects.V7_0",
+}
diff --git a/audio/effect/7.0/xml/api/current.txt b/audio/effect/7.0/xml/api/current.txt
new file mode 100644
index 0000000..34cb541
--- /dev/null
+++ b/audio/effect/7.0/xml/api/current.txt
@@ -0,0 +1,208 @@
+// Signature format: 2.0
+package audio.effects.V7_0 {
+
+ public class AudioEffectsConf {
+ ctor public AudioEffectsConf();
+ method public audio.effects.V7_0.AudioEffectsConf.DeviceEffects getDeviceEffects();
+ method public audio.effects.V7_0.EffectsType getEffects();
+ method public audio.effects.V7_0.LibrariesType getLibraries();
+ method public audio.effects.V7_0.AudioEffectsConf.Postprocess getPostprocess();
+ method public audio.effects.V7_0.AudioEffectsConf.Preprocess getPreprocess();
+ method public audio.effects.V7_0.VersionType getVersion();
+ method public void setDeviceEffects(audio.effects.V7_0.AudioEffectsConf.DeviceEffects);
+ method public void setEffects(audio.effects.V7_0.EffectsType);
+ method public void setLibraries(audio.effects.V7_0.LibrariesType);
+ method public void setPostprocess(audio.effects.V7_0.AudioEffectsConf.Postprocess);
+ method public void setPreprocess(audio.effects.V7_0.AudioEffectsConf.Preprocess);
+ method public void setVersion(audio.effects.V7_0.VersionType);
+ }
+
+ public static class AudioEffectsConf.DeviceEffects {
+ ctor public AudioEffectsConf.DeviceEffects();
+ method public java.util.List<audio.effects.V7_0.DeviceProcessType> getDevicePort();
+ }
+
+ public static class AudioEffectsConf.Postprocess {
+ ctor public AudioEffectsConf.Postprocess();
+ method public java.util.List<audio.effects.V7_0.StreamPostprocessType> getStream();
+ }
+
+ public static class AudioEffectsConf.Preprocess {
+ ctor public AudioEffectsConf.Preprocess();
+ method public java.util.List<audio.effects.V7_0.StreamPreprocessType> getStream();
+ }
+
+ public class DeviceProcessType extends audio.effects.V7_0.StreamProcessingType {
+ ctor public DeviceProcessType();
+ method public String getAddress();
+ method public audio.effects.V7_0.DeviceType getType();
+ method public void setAddress(String);
+ method public void setType(audio.effects.V7_0.DeviceType);
+ }
+
+ public enum DeviceType {
+ method public String getRawName();
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_AUX_DIGITAL;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_BACK_MIC;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_BLUETOOTH_A2DP;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_BLUETOOTH_BLE;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_BUILTIN_MIC;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_BUS;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_COMMUNICATION;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_ECHO_REFERENCE;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_FM_TUNER;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_HDMI;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_HDMI_ARC;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_IP;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_LINE;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_LOOPBACK;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_PROXY;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_REMOTE_SUBMIX;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_SPDIF;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_TELEPHONY_RX;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_TV_TUNER;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_USB_ACCESSORY;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_USB_DEVICE;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_USB_HEADSET;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_VOICE_CALL;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_IN_WIRED_HEADSET;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_AUX_DIGITAL;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_AUX_LINE;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_BLUETOOTH_SCO;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_BUS;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_EARPIECE;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_ECHO_CANCELLER;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_FM;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_HDMI;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_HDMI_ARC;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_HEARING_AID;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_IP;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_LINE;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_PROXY;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_SPDIF;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_SPEAKER;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_SPEAKER_SAFE;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_TELEPHONY_TX;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_USB_ACCESSORY;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_USB_DEVICE;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_USB_HEADSET;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_WIRED_HEADPHONE;
+ enum_constant public static final audio.effects.V7_0.DeviceType AUDIO_DEVICE_OUT_WIRED_HEADSET;
+ }
+
+ public class EffectImplType {
+ ctor public EffectImplType();
+ method public String getLibrary();
+ method public String getUuid();
+ method public void setLibrary(String);
+ method public void setUuid(String);
+ }
+
+ public class EffectProxyType extends audio.effects.V7_0.EffectType {
+ ctor public EffectProxyType();
+ method public audio.effects.V7_0.EffectImplType getLibhw();
+ method public audio.effects.V7_0.EffectImplType getLibsw();
+ method public void setLibhw(audio.effects.V7_0.EffectImplType);
+ method public void setLibsw(audio.effects.V7_0.EffectImplType);
+ }
+
+ public class EffectType extends audio.effects.V7_0.EffectImplType {
+ ctor public EffectType();
+ method public String getName();
+ method public void setName(String);
+ }
+
+ public class EffectsType {
+ ctor public EffectsType();
+ method public java.util.List<audio.effects.V7_0.EffectProxyType> getEffectProxy_optional();
+ method public java.util.List<audio.effects.V7_0.EffectType> getEffect_optional();
+ }
+
+ public class LibrariesType {
+ ctor public LibrariesType();
+ method public java.util.List<audio.effects.V7_0.LibrariesType.Library> getLibrary();
+ }
+
+ public static class LibrariesType.Library {
+ ctor public LibrariesType.Library();
+ method public String getName();
+ method public String getPath();
+ method public void setName(String);
+ method public void setPath(String);
+ }
+
+ public enum StreamInputType {
+ method public String getRawName();
+ enum_constant public static final audio.effects.V7_0.StreamInputType camcorder;
+ enum_constant public static final audio.effects.V7_0.StreamInputType echo_reference;
+ enum_constant public static final audio.effects.V7_0.StreamInputType fm_tuner;
+ enum_constant public static final audio.effects.V7_0.StreamInputType mic;
+ enum_constant public static final audio.effects.V7_0.StreamInputType unprocessed;
+ enum_constant public static final audio.effects.V7_0.StreamInputType voice_call;
+ enum_constant public static final audio.effects.V7_0.StreamInputType voice_communication;
+ enum_constant public static final audio.effects.V7_0.StreamInputType voice_downlink;
+ enum_constant public static final audio.effects.V7_0.StreamInputType voice_performance;
+ enum_constant public static final audio.effects.V7_0.StreamInputType voice_recognition;
+ enum_constant public static final audio.effects.V7_0.StreamInputType voice_uplink;
+ }
+
+ public enum StreamOutputType {
+ method public String getRawName();
+ enum_constant public static final audio.effects.V7_0.StreamOutputType alarm;
+ enum_constant public static final audio.effects.V7_0.StreamOutputType assistant;
+ enum_constant public static final audio.effects.V7_0.StreamOutputType bluetooth_sco;
+ enum_constant public static final audio.effects.V7_0.StreamOutputType dtmf;
+ enum_constant public static final audio.effects.V7_0.StreamOutputType enforced_audible;
+ enum_constant public static final audio.effects.V7_0.StreamOutputType music;
+ enum_constant public static final audio.effects.V7_0.StreamOutputType notification;
+ enum_constant public static final audio.effects.V7_0.StreamOutputType ring;
+ enum_constant public static final audio.effects.V7_0.StreamOutputType system;
+ enum_constant public static final audio.effects.V7_0.StreamOutputType tts;
+ enum_constant public static final audio.effects.V7_0.StreamOutputType voice_call;
+ }
+
+ public class StreamPostprocessType extends audio.effects.V7_0.StreamProcessingType {
+ ctor public StreamPostprocessType();
+ method public audio.effects.V7_0.StreamOutputType getType();
+ method public void setType(audio.effects.V7_0.StreamOutputType);
+ }
+
+ public class StreamPreprocessType extends audio.effects.V7_0.StreamProcessingType {
+ ctor public StreamPreprocessType();
+ method public audio.effects.V7_0.StreamInputType getType();
+ method public void setType(audio.effects.V7_0.StreamInputType);
+ }
+
+ public class StreamProcessingType {
+ ctor public StreamProcessingType();
+ method public java.util.List<audio.effects.V7_0.StreamProcessingType.Apply> getApply();
+ }
+
+ public static class StreamProcessingType.Apply {
+ ctor public StreamProcessingType.Apply();
+ method public String getEffect();
+ method public void setEffect(String);
+ }
+
+ public enum VersionType {
+ method public String getRawName();
+ enum_constant public static final audio.effects.V7_0.VersionType _2_0;
+ }
+
+ public class XmlParser {
+ ctor public XmlParser();
+ method public static audio.effects.V7_0.AudioEffectsConf read(java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException;
+ method public static String readText(org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
+ method public static void skip(org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
+ }
+
+}
+
diff --git a/audio/effect/7.0/xml/api/last_current.txt b/audio/effect/7.0/xml/api/last_current.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/audio/effect/7.0/xml/api/last_current.txt
diff --git a/audio/effect/7.0/xml/api/last_removed.txt b/audio/effect/7.0/xml/api/last_removed.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/audio/effect/7.0/xml/api/last_removed.txt
diff --git a/audio/effect/7.0/xml/api/removed.txt b/audio/effect/7.0/xml/api/removed.txt
new file mode 100644
index 0000000..d802177
--- /dev/null
+++ b/audio/effect/7.0/xml/api/removed.txt
@@ -0,0 +1 @@
+// Signature format: 2.0
diff --git a/audio/effect/7.0/xml/audio_effects_conf.xsd b/audio/effect/7.0/xml/audio_effects_conf.xsd
new file mode 100644
index 0000000..94f9f76
--- /dev/null
+++ b/audio/effect/7.0/xml/audio_effects_conf.xsd
@@ -0,0 +1,323 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+ targetNamespace="http://schemas.android.com/audio/audio_effects_conf/v2_0"
+ xmlns:aec="http://schemas.android.com/audio/audio_effects_conf/v2_0"
+ elementFormDefault="qualified">
+ <!-- Simple types -->
+ <xs:simpleType name="versionType">
+ <xs:restriction base="xs:decimal">
+ <xs:enumeration value="2.0"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType name="uuidType">
+ <xs:restriction base="xs:string">
+ <xs:pattern value="[0-9A-Fa-f]{8}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{12}"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType name="streamInputType">
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="mic"/>
+ <xs:enumeration value="voice_uplink"/>
+ <xs:enumeration value="voice_downlink"/>
+ <xs:enumeration value="voice_call"/>
+ <xs:enumeration value="camcorder"/>
+ <xs:enumeration value="voice_recognition"/>
+ <xs:enumeration value="voice_communication"/>
+ <xs:enumeration value="unprocessed"/>
+ <xs:enumeration value="voice_performance"/>
+ <xs:enumeration value="echo_reference"/>
+ <xs:enumeration value="fm_tuner"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType name="streamOutputType">
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="voice_call"/>
+ <xs:enumeration value="system"/>
+ <xs:enumeration value="ring"/>
+ <xs:enumeration value="music"/>
+ <xs:enumeration value="alarm"/>
+ <xs:enumeration value="notification"/>
+ <xs:enumeration value="bluetooth_sco"/>
+ <xs:enumeration value="enforced_audible"/>
+ <xs:enumeration value="dtmf"/>
+ <xs:enumeration value="tts"/>
+ <xs:enumeration value="assistant"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType name="relativePathType">
+ <xs:restriction base="xs:string">
+ <xs:pattern value="[^/].*"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType name="deviceType">
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="AUDIO_DEVICE_OUT_EARPIECE"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_SPEAKER"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_WIRED_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_WIRED_HEADPHONE"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_SCO"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_AUX_DIGITAL"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_HDMI"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_USB_ACCESSORY"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_USB_DEVICE"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_REMOTE_SUBMIX"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_TELEPHONY_TX"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_LINE"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_HDMI_ARC"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_SPDIF"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_FM"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_AUX_LINE"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_SPEAKER_SAFE"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_IP"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_BUS"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_PROXY"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_USB_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_HEARING_AID"/>
+ <xs:enumeration value="AUDIO_DEVICE_OUT_ECHO_CANCELLER"/>
+ <!-- Due to the xml format, IN types can not be a separated from OUT types -->
+ <xs:enumeration value="AUDIO_DEVICE_IN_COMMUNICATION"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_BUILTIN_MIC"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_WIRED_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_AUX_DIGITAL"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_HDMI"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_VOICE_CALL"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_TELEPHONY_RX"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_BACK_MIC"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_REMOTE_SUBMIX"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_USB_ACCESSORY"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_USB_DEVICE"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_FM_TUNER"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_TV_TUNER"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_LINE"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_SPDIF"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_BLUETOOTH_A2DP"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_LOOPBACK"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_IP"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_BUS"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_PROXY"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_USB_HEADSET"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_BLUETOOTH_BLE"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_HDMI_ARC"/>
+ <xs:enumeration value="AUDIO_DEVICE_IN_ECHO_REFERENCE"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <!-- Complex types -->
+ <xs:complexType name="librariesType">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ List of effect libraries to load. Each library element must have "name" and
+ "path" attributes. The latter is giving the path of the library .so file
+ relative to the standard effect folders: /(vendor|odm|system)/lib(64)?/soundfx/
+ Example for a library in "/vendor/lib/soundfx/lib.so":
+ <library name="name" path="lib.so"/>
+ </xs:documentation>
+ </xs:annotation>
+ <xs:sequence>
+ <xs:element name="library" minOccurs="0" maxOccurs="unbounded">
+ <xs:complexType>
+ <xs:attribute name="name" type="xs:string" use="required"/>
+ <xs:attribute name="path" type="aec:relativePathType" use="required"/>
+ </xs:complexType>
+ </xs:element>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:complexType name="effectImplType">
+ <xs:attribute name="library" type="xs:string" use="required"/>
+ <xs:attribute name="uuid" type="aec:uuidType" use="required"/>
+ </xs:complexType>
+ <xs:complexType name="effectType">
+ <xs:complexContent>
+ <xs:extension base="aec:effectImplType">
+ <xs:attribute name="name" type="xs:string" use="required"/>
+ </xs:extension>
+ </xs:complexContent>
+ </xs:complexType>
+ <xs:complexType name="effectProxyType">
+ <xs:complexContent>
+ <xs:extension base="aec:effectType">
+ <xs:sequence>
+ <xs:element name="libsw" type="aec:effectImplType"/>
+ <xs:element name="libhw" type="aec:effectImplType"/>
+ </xs:sequence>
+ </xs:extension>
+ </xs:complexContent>
+ </xs:complexType>
+ <xs:complexType name="effectsType">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ List of effects to load. Each effect element must contain "name",
+ "library", and "uuid" attrs. The value of the "library" attr must
+ correspond to the name of a "library" element. The name of the effect
+ element is indicative, only the value of the "uuid" element designates
+ the effect for the audio framework. The uuid is the implementation
+ specific UUID as specified by the effect vendor. This is not the generic
+ effect type UUID.
+ For effect proxy implementations, SW and HW implementations of the effect
+ can be specified.
+ Example:
+ <effect name="name" library="lib" uuid="uuuu"/>
+ <effectProxy name="proxied" library="proxy" uuid="xxxx">
+ <libsw library="sw_bundle" uuid="yyyy"/>
+ <libhw library="offload_bundle" uuid="zzzz"/>
+ </effectProxy>
+ </xs:documentation>
+ </xs:annotation>
+ <xs:choice maxOccurs="unbounded">
+ <xs:element name="effect" type="aec:effectType" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element name="effectProxy" type="aec:effectProxyType" minOccurs="0" maxOccurs="unbounded"/>
+ </xs:choice>
+ </xs:complexType>
+ <xs:complexType name="streamProcessingType">
+ <xs:sequence>
+ <xs:element name="apply" minOccurs="0" maxOccurs="unbounded">
+ <xs:complexType>
+ <xs:attribute name="effect" type="xs:string" use="required"/>
+ </xs:complexType>
+ </xs:element>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:complexType name="streamPreprocessType">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ Audio preprocessing configuration. The processing configuration consists
+ of a list of elements each describing processing settings for a given
+ input stream. Valid input stream types are listed in "streamInputType".
+ Each stream element contains a list of "apply" elements. The value of the
+ "effect" attr must correspond to the name of an "effect" element.
+ Example:
+ <stream type="voice_communication">
+ <apply effect="effect1"/>
+ <apply effect="effect2"/>
+ </stream>
+ </xs:documentation>
+ </xs:annotation>
+ <xs:complexContent>
+ <xs:extension base="aec:streamProcessingType">
+ <xs:attribute name="type" type="aec:streamInputType" use="required"/>
+ </xs:extension>
+ </xs:complexContent>
+ </xs:complexType>
+ <xs:complexType name="streamPostprocessType">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ Audio postprocessing configuration. The processing configuration consists
+ of a list of elements each describing processing settings for a given
+ output stream. Valid output stream types are listed in "streamOutputType".
+ Each stream element contains a list of "apply" elements. The value of the
+ "effect" attr must correspond to the name of an "effect" element.
+ Example:
+ <stream type="music">
+ <apply effect="effect1"/>
+ </stream>
+ </xs:documentation>
+ </xs:annotation>
+ <xs:complexContent>
+ <xs:extension base="aec:streamProcessingType">
+ <xs:attribute name="type" type="aec:streamOutputType" use="required"/>
+ </xs:extension>
+ </xs:complexContent>
+ </xs:complexType>
+ <xs:complexType name="deviceProcessType">
+ <xs:annotation>
+ <xs:documentation xml:lang="en">
+ Audio Device Effects configuration. The processing configuration consists
+ of a list of effects to be automatically added on a device Port when involved in an audio
+ patch.
+ Valid device type are listed in "deviceType" and shall be aligned.
+ Each stream element contains a list of "apply" elements. The value of the
+ "effect" attr must correspond to the name of an "effect" element.
+ Note that if the device is involved in a hardware patch, the effect must be hardware
+ accelerated.
+ Example:
+ <devicePort address="BUS00_USAGE_MAIN" type="AUDIO_DEVICE_OUT_BUS">
+ <apply effect="equalizer"/>
+ <apply effect="effect2"/>
+ </devicePort>
+ </xs:documentation>
+ </xs:annotation>
+ <xs:complexContent>
+ <xs:extension base="aec:streamProcessingType">
+ <xs:attribute name="address" type="xs:string" use="required"/>
+ <xs:attribute name="type" type="aec:deviceType" use="required"/>
+ </xs:extension>
+ </xs:complexContent>
+ </xs:complexType>
+ <!-- Root element -->
+ <xs:element name="audio_effects_conf">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element name="libraries" type="aec:librariesType"/>
+ <xs:element name="effects" type="aec:effectsType"/>
+ <xs:element name="postprocess" minOccurs="0" maxOccurs="1">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element name="stream" type="aec:streamPostprocessType" minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+ <xs:element name="preprocess" minOccurs="0" maxOccurs="1">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element name="stream" type="aec:streamPreprocessType" minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+ <xs:element name="deviceEffects" minOccurs="0" maxOccurs="1">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element name="devicePort" type="aec:deviceProcessType" minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+ </xs:sequence>
+ <xs:attribute name="version" type="aec:versionType" use="required"/>
+ </xs:complexType>
+ <!-- Keys and references -->
+ <xs:key name="libraryName">
+ <xs:selector xpath="aec:libraries/aec:library"/>
+ <xs:field xpath="@name"/>
+ </xs:key>
+ <xs:keyref name="libraryNameRef1" refer="aec:libraryName">
+ <xs:selector xpath="aec:effects/aec:effect"/>
+ <xs:field xpath="@library"/>
+ </xs:keyref>
+ <xs:keyref name="libraryNameRef2" refer="aec:libraryName">
+ <xs:selector xpath="aec:effects/aec:effect/aec:libsw"/>
+ <xs:field xpath="@library"/>
+ </xs:keyref>
+ <xs:keyref name="libraryNameRef3" refer="aec:libraryName">
+ <xs:selector xpath="aec:effects/aec:effect/aec:libhw"/>
+ <xs:field xpath="@library"/>
+ </xs:keyref>
+ <xs:key name="effectName">
+ <xs:selector xpath="aec:effects/aec:effect|aec:effects/aec:effectProxy"/>
+ <xs:field xpath="@name"/>
+ </xs:key>
+ <xs:keyref name="effectNamePreRef" refer="aec:effectName">
+ <xs:selector xpath="aec:preprocess/aec:stream/aec:apply"/>
+ <xs:field xpath="@effect"/>
+ </xs:keyref>
+ <xs:keyref name="effectNamePostRef" refer="aec:effectName">
+ <xs:selector xpath="aec:postprocess/aec:stream/aec:apply"/>
+ <xs:field xpath="@effect"/>
+ </xs:keyref>
+ </xs:element>
+</xs:schema>
diff --git a/audio/effect/all-versions/default/Android.bp b/audio/effect/all-versions/default/Android.bp
index d9bb78b..1c3dc74 100644
--- a/audio/effect/all-versions/default/Android.bp
+++ b/audio/effect/all-versions/default/Android.bp
@@ -56,7 +56,7 @@
"-DMAJOR_VERSION=2",
"-DMINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
}
cc_library_shared {
@@ -71,7 +71,7 @@
"-DMAJOR_VERSION=4",
"-DMINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
}
cc_library_shared {
@@ -86,7 +86,7 @@
"-DMAJOR_VERSION=5",
"-DMINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
}
cc_library_shared {
@@ -101,5 +101,21 @@
"-DMAJOR_VERSION=6",
"-DMINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
+}
+
+cc_library_shared {
+ enabled: false,
+ name: "android.hardware.audio.effect@7.0-impl",
+ defaults: ["android.hardware.audio.effect-impl_default"],
+ shared_libs: [
+ "android.hardware.audio.common@7.0",
+ "android.hardware.audio.common@7.0-util",
+ "android.hardware.audio.effect@7.0",
+ ],
+ cflags: [
+ "-DMAJOR_VERSION=7",
+ "-DMINOR_VERSION=0",
+ "-include common/all-versions/VersionMacro.h",
+ ],
}
diff --git a/audio/effect/all-versions/vts/functional/Android.bp b/audio/effect/all-versions/vts/functional/Android.bp
index 309aa9d..7cdb18f 100644
--- a/audio/effect/all-versions/vts/functional/Android.bp
+++ b/audio/effect/all-versions/vts/functional/Android.bp
@@ -19,7 +19,7 @@
defaults: ["VtsHalTargetTestDefaults"],
srcs: [
"VtsHalAudioEffectTargetTest.cpp",
- "ValidateAudioEffectsConfiguration.cpp"
+ "ValidateAudioEffectsConfiguration.cpp",
],
static_libs: [
"android.hardware.audio.common.test.utility",
@@ -31,7 +31,10 @@
header_libs: [
"android.hardware.audio.common.util@all-versions",
],
- test_suites: ["general-tests", "vts"],
+ test_suites: [
+ "general-tests",
+ "vts",
+ ],
}
cc_test {
@@ -51,7 +54,7 @@
"-DMAJOR_VERSION=2",
"-DMINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
}
cc_test {
@@ -71,7 +74,7 @@
"-DMAJOR_VERSION=4",
"-DMINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
}
cc_test {
@@ -91,7 +94,7 @@
"-DMAJOR_VERSION=5",
"-DMINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
}
cc_test {
@@ -111,5 +114,26 @@
"-DMAJOR_VERSION=6",
"-DMINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
+}
+
+cc_test {
+ enabled: false,
+ name: "VtsHalAudioEffectV7_0TargetTest",
+ defaults: ["VtsHalAudioEffectTargetTest_default"],
+ // Use test_config for vts suite.
+ // TODO(b/146104851): Add auto-gen rules and remove it.
+ test_config: "VtsHalAudioEffectV7_0TargetTest.xml",
+ static_libs: [
+ "android.hardware.audio.common@7.0",
+ "android.hardware.audio.effect@7.0",
+ ],
+ data: [
+ ":audio_effects_conf_V7_0",
+ ],
+ cflags: [
+ "-DMAJOR_VERSION=7",
+ "-DMINOR_VERSION=0",
+ "-include common/all-versions/VersionMacro.h",
+ ],
}
diff --git a/audio/effect/all-versions/vts/functional/VtsHalAudioEffectV7_0TargetTest.xml b/audio/effect/all-versions/vts/functional/VtsHalAudioEffectV7_0TargetTest.xml
new file mode 100644
index 0000000..e609756
--- /dev/null
+++ b/audio/effect/all-versions/vts/functional/VtsHalAudioEffectV7_0TargetTest.xml
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2019 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration description="Runs VtsHalAudioEffectV7_0TargetTest.">
+ <option name="test-suite-tag" value="apct" />
+ <option name="test-suite-tag" value="apct-native" />
+
+ <target_preparer class="com.android.tradefed.targetprep.RootTargetPreparer"/>
+ <target_preparer class="com.android.tradefed.targetprep.StopServicesSetup"/>
+
+ <target_preparer class="com.android.tradefed.targetprep.RunCommandTargetPreparer">
+ <option name="run-command" value="setprop vts.native_server.on 1"/>
+ <option name="teardown-command" value="setprop vts.native_server.on 0"/>
+ </target_preparer>
+
+ <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+ <option name="cleanup" value="true" />
+ <option name="push" value="VtsHalAudioEffectV7_0TargetTest->/data/local/tmp/VtsHalAudioEffectV7_0TargetTest" />
+ <option name="push" value="audio_effects_conf_V7_0.xsd->/data/local/tmp/audio_effects_conf_V7_0.xsd" />
+ </target_preparer>
+
+ <test class="com.android.tradefed.testtype.GTest" >
+ <option name="native-test-device-path" value="/data/local/tmp" />
+ <option name="module-name" value="VtsHalAudioEffectV7_0TargetTest" />
+ </test>
+</configuration>
diff --git a/automotive/can/1.0/default/libnetdevice/can.cpp b/automotive/can/1.0/default/libnetdevice/can.cpp
index 5a1105c..083f4f0 100644
--- a/automotive/can/1.0/default/libnetdevice/can.cpp
+++ b/automotive/can/1.0/default/libnetdevice/can.cpp
@@ -34,7 +34,7 @@
static constexpr can_err_mask_t kErrMask = CAN_ERR_MASK;
base::unique_fd socket(const std::string& ifname) {
- struct sockaddr_can addr = {};
+ sockaddr_can addr = {};
addr.can_family = AF_CAN;
addr.can_ifindex = nametoindex(ifname);
if (addr.can_ifindex == 0) {
@@ -58,7 +58,7 @@
return {};
}
- if (0 != bind(sock.get(), reinterpret_cast<struct sockaddr*>(&addr), sizeof(addr))) {
+ if (0 != bind(sock.get(), reinterpret_cast<sockaddr*>(&addr), sizeof(addr))) {
LOG(ERROR) << "Can't bind to CAN interface " << ifname;
return {};
}
@@ -67,26 +67,25 @@
}
bool setBitrate(std::string ifname, uint32_t bitrate) {
- struct can_bittiming bt = {};
+ can_bittiming bt = {};
bt.bitrate = bitrate;
- nl::MessageFactory<struct ifinfomsg> req(RTM_NEWLINK, NLM_F_REQUEST | NLM_F_ACK);
+ nl::MessageFactory<ifinfomsg> req(RTM_NEWLINK, NLM_F_REQUEST | NLM_F_ACK);
- const auto ifidx = nametoindex(ifname);
- if (ifidx == 0) {
+ req->ifi_index = nametoindex(ifname);
+ if (req->ifi_index == 0) {
LOG(ERROR) << "Can't find interface " << ifname;
return false;
}
- req.data().ifi_index = ifidx;
{
- auto linkinfo = req.nest(IFLA_LINKINFO);
- req.addattr(IFLA_INFO_KIND, "can");
+ auto linkinfo = req.addNested(IFLA_LINKINFO);
+ req.add(IFLA_INFO_KIND, "can");
{
- auto infodata = req.nest(IFLA_INFO_DATA);
+ auto infodata = req.addNested(IFLA_INFO_DATA);
/* For CAN FD, it would require to add IFLA_CAN_DATA_BITTIMING
* and IFLA_CAN_CTRLMODE as well. */
- req.addattr(IFLA_CAN_BITTIMING, bt);
+ req.add(IFLA_CAN_BITTIMING, bt);
}
}
diff --git a/automotive/can/1.0/default/libnetdevice/include/libnetdevice/libnetdevice.h b/automotive/can/1.0/default/libnetdevice/include/libnetdevice/libnetdevice.h
index 9a26ff1..70cb688 100644
--- a/automotive/can/1.0/default/libnetdevice/include/libnetdevice/libnetdevice.h
+++ b/automotive/can/1.0/default/libnetdevice/include/libnetdevice/libnetdevice.h
@@ -20,6 +20,7 @@
#include <array>
#include <optional>
+#include <set>
#include <string>
namespace android::netdevice {
@@ -53,14 +54,34 @@
std::optional<bool> isUp(std::string ifname);
/**
- * Checks, if the network interface exists and is up.
- *
- * This is a convenience function to call both exists() and isUp().
- *
- * \param ifname Interface to check
- * \return true if the interface is up, false otherwise
+ * Interface condition to wait for.
*/
-bool existsAndIsUp(const std::string& ifname);
+enum class WaitCondition {
+ /**
+ * Interface is present (but not necessarily up).
+ */
+ PRESENT,
+
+ /**
+ * Interface is up.
+ */
+ PRESENT_AND_UP,
+
+ /**
+ * Interface is down or not present (disconnected) at all.
+ */
+ DOWN_OR_GONE,
+};
+
+/**
+ * Listens for interface changes until anticipated condition takes place.
+ *
+ * \param ifnames List of interfaces to watch for.
+ * \param cnd Awaited condition.
+ * \param allOf true if all interfaces need to satisfy the condition, false if only one satistying
+ * interface should stop the wait.
+ */
+void waitFor(std::set<std::string> ifnames, WaitCondition cnd, bool allOf = true);
/**
* Brings network interface up.
diff --git a/automotive/can/1.0/default/libnetdevice/libnetdevice.cpp b/automotive/can/1.0/default/libnetdevice/libnetdevice.cpp
index e2ba2cb..4c5b309 100644
--- a/automotive/can/1.0/default/libnetdevice/libnetdevice.cpp
+++ b/automotive/can/1.0/default/libnetdevice/libnetdevice.cpp
@@ -27,6 +27,8 @@
#include <linux/rtnetlink.h>
#include <net/if.h>
+#include <sstream>
+
namespace android::netdevice {
void useSocketDomain(int domain) {
@@ -37,16 +39,6 @@
return nametoindex(ifname) != 0;
}
-std::optional<bool> isUp(std::string ifname) {
- auto ifr = ifreqs::fromName(ifname);
- if (!ifreqs::send(SIOCGIFFLAGS, ifr)) return std::nullopt;
- return ifr.ifr_flags & IFF_UP;
-}
-
-bool existsAndIsUp(const std::string& ifname) {
- return exists(ifname) && isUp(ifname).value_or(false);
-}
-
bool up(std::string ifname) {
auto ifr = ifreqs::fromName(ifname);
if (!ifreqs::send(SIOCGIFFLAGS, ifr)) return false;
@@ -62,13 +54,13 @@
}
bool add(std::string dev, std::string type) {
- nl::MessageFactory<struct ifinfomsg> req(RTM_NEWLINK,
- NLM_F_REQUEST | NLM_F_CREATE | NLM_F_EXCL | NLM_F_ACK);
- req.addattr(IFLA_IFNAME, dev);
+ nl::MessageFactory<ifinfomsg> req(RTM_NEWLINK,
+ NLM_F_REQUEST | NLM_F_CREATE | NLM_F_EXCL | NLM_F_ACK);
+ req.add(IFLA_IFNAME, dev);
{
- auto linkinfo = req.nest(IFLA_LINKINFO);
- req.addattr(IFLA_INFO_KIND, type);
+ auto linkinfo = req.addNested(IFLA_LINKINFO);
+ req.add(IFLA_INFO_KIND, type);
}
nl::Socket sock(NETLINK_ROUTE);
@@ -76,8 +68,8 @@
}
bool del(std::string dev) {
- nl::MessageFactory<struct ifinfomsg> req(RTM_DELLINK, NLM_F_REQUEST | NLM_F_ACK);
- req.addattr(IFLA_IFNAME, dev);
+ nl::MessageFactory<ifinfomsg> req(RTM_DELLINK, NLM_F_REQUEST | NLM_F_ACK);
+ req.add(IFLA_IFNAME, dev);
nl::Socket sock(NETLINK_ROUTE);
return sock.send(req) && sock.receiveAck(req);
@@ -102,6 +94,97 @@
return ifreqs::send(SIOCSIFHWADDR, ifr);
}
+std::optional<bool> isUp(std::string ifname) {
+ auto ifr = ifreqs::fromName(ifname);
+ if (!ifreqs::send(SIOCGIFFLAGS, ifr)) return std::nullopt;
+ return ifr.ifr_flags & IFF_UP;
+}
+
+struct WaitState {
+ bool present;
+ bool up;
+
+ bool satisfied(WaitCondition cnd) const {
+ switch (cnd) {
+ case WaitCondition::PRESENT:
+ if (present) return true;
+ break;
+ case WaitCondition::PRESENT_AND_UP:
+ if (present && up) return true;
+ break;
+ case WaitCondition::DOWN_OR_GONE:
+ if (!present || !up) return true;
+ break;
+ }
+ return false;
+ }
+};
+
+static std::string toString(WaitCondition cnd) {
+ switch (cnd) {
+ case WaitCondition::PRESENT:
+ return "become present";
+ case WaitCondition::PRESENT_AND_UP:
+ return "come up";
+ case WaitCondition::DOWN_OR_GONE:
+ return "go down";
+ }
+}
+
+static std::string toString(const std::set<std::string>& ifnames) {
+ std::stringstream ss;
+ std::copy(ifnames.begin(), ifnames.end(), std::ostream_iterator<std::string>(ss, ","));
+ auto str = ss.str();
+ str.pop_back();
+ return str;
+}
+
+void waitFor(std::set<std::string> ifnames, WaitCondition cnd, bool allOf) {
+ nl::Socket sock(NETLINK_ROUTE, 0, RTMGRP_LINK);
+
+ using StatesMap = std::map<std::string, WaitState>;
+ StatesMap states = {};
+ for (const auto ifname : ifnames) {
+ const auto present = exists(ifname);
+ const auto up = present && isUp(ifname).value_or(false);
+ states[ifname] = {present, up};
+ }
+
+ const auto mapConditionChecker = [cnd](const StatesMap::iterator::value_type& it) {
+ return it.second.satisfied(cnd);
+ };
+ const auto isFullySatisfied = [&states, allOf, mapConditionChecker]() {
+ if (allOf) {
+ return std::all_of(states.begin(), states.end(), mapConditionChecker);
+ } else {
+ return std::any_of(states.begin(), states.end(), mapConditionChecker);
+ }
+ };
+
+ if (isFullySatisfied()) return;
+
+ LOG(DEBUG) << "Waiting for " << (allOf ? "" : "any of ") << toString(ifnames) << " to "
+ << toString(cnd);
+ for (const auto rawMsg : sock) {
+ const auto msg = nl::Message<ifinfomsg>::parse(rawMsg, {RTM_NEWLINK, RTM_DELLINK});
+ if (!msg.has_value()) continue;
+
+ const auto ifname = msg->attributes.get<std::string>(IFLA_IFNAME);
+ if (ifnames.count(ifname) == 0) continue;
+
+ const bool present = (msg->header.nlmsg_type != RTM_DELLINK);
+ const bool up = present && (msg->data.ifi_flags & IFF_UP) != 0;
+ states[ifname] = {present, up};
+
+ if (isFullySatisfied()) {
+ LOG(DEBUG) << "Finished waiting for " << (allOf ? "" : "some of ") << toString(ifnames)
+ << " to " << toString(cnd);
+ return;
+ }
+ }
+ LOG(FATAL) << "Can't read Netlink socket";
+}
+
} // namespace android::netdevice
bool operator==(const android::netdevice::hwaddr_t lhs, const unsigned char rhs[ETH_ALEN]) {
diff --git a/automotive/can/1.0/default/libnetdevice/vlan.cpp b/automotive/can/1.0/default/libnetdevice/vlan.cpp
index 33dc029..ee02f7b 100644
--- a/automotive/can/1.0/default/libnetdevice/vlan.cpp
+++ b/automotive/can/1.0/default/libnetdevice/vlan.cpp
@@ -33,18 +33,18 @@
return false;
}
- nl::MessageFactory<struct ifinfomsg> req(RTM_NEWLINK,
- NLM_F_REQUEST | NLM_F_CREATE | NLM_F_EXCL | NLM_F_ACK);
- req.addattr(IFLA_IFNAME, vlan);
- req.addattr<uint32_t>(IFLA_LINK, ethidx);
+ nl::MessageFactory<ifinfomsg> req(RTM_NEWLINK,
+ NLM_F_REQUEST | NLM_F_CREATE | NLM_F_EXCL | NLM_F_ACK);
+ req.add(IFLA_IFNAME, vlan);
+ req.add<uint32_t>(IFLA_LINK, ethidx);
{
- auto linkinfo = req.nest(IFLA_LINKINFO);
- req.addattr(IFLA_INFO_KIND, "vlan");
+ auto linkinfo = req.addNested(IFLA_LINKINFO);
+ req.add(IFLA_INFO_KIND, "vlan");
{
- auto linkinfo = req.nest(IFLA_INFO_DATA);
- req.addattr(IFLA_VLAN_ID, id);
+ auto linkinfo = req.addNested(IFLA_INFO_DATA);
+ req.add(IFLA_VLAN_ID, id);
}
}
diff --git a/automotive/can/1.0/default/libnl++/Attributes.cpp b/automotive/can/1.0/default/libnl++/Attributes.cpp
index c101647..620f57b 100644
--- a/automotive/can/1.0/default/libnl++/Attributes.cpp
+++ b/automotive/can/1.0/default/libnl++/Attributes.cpp
@@ -49,7 +49,11 @@
template <>
std::string Attributes::parse(Buffer<nlattr> buf) {
const auto rawString = buf.data<char>().getRaw();
- return std::string(rawString.ptr(), rawString.len());
+ std::string str(rawString.ptr(), rawString.len());
+
+ str.erase(std::find(str.begin(), str.end(), '\0'), str.end());
+
+ return str;
}
template <typename T>
diff --git a/automotive/can/1.0/default/libnl++/MessageFactory.cpp b/automotive/can/1.0/default/libnl++/MessageFactory.cpp
index 0c6a331..6f35897 100644
--- a/automotive/can/1.0/default/libnl++/MessageFactory.cpp
+++ b/automotive/can/1.0/default/libnl++/MessageFactory.cpp
@@ -17,41 +17,36 @@
#include <libnl++/MessageFactory.h>
#include <android-base/logging.h>
+#include <libnl++/bits.h>
-// for RTA_ macros missing from NLA_ definitions
-#include <linux/rtnetlink.h>
+namespace android::nl {
-namespace android::nl::impl {
-
-static struct nlattr* nlmsg_tail(struct nlmsghdr* n) {
- return reinterpret_cast<struct nlattr*>( //
- reinterpret_cast<uintptr_t>(n) + NLMSG_ALIGN(n->nlmsg_len));
+static nlattr* tail(nlmsghdr* msg) {
+ return reinterpret_cast<nlattr*>(uintptr_t(msg) + impl::align(msg->nlmsg_len));
}
-struct nlattr* addattr_l(struct nlmsghdr* n, size_t maxLen, nlattrtype_t type, const void* data,
- size_t dataLen) {
- size_t newLen = NLMSG_ALIGN(n->nlmsg_len) + RTA_SPACE(dataLen);
+nlattr* MessageFactoryBase::add(nlmsghdr* msg, size_t maxLen, nlattrtype_t type, const void* data,
+ size_t dataLen) {
+ const auto totalAttrLen = impl::space<nlattr>(dataLen);
+ const auto newLen = impl::align(msg->nlmsg_len) + totalAttrLen;
if (newLen > maxLen) {
- LOG(ERROR) << "addattr_l failed - exceeded maxLen: " << newLen << " > " << maxLen;
+ LOG(ERROR) << "Can't add attribute of size " << dataLen //
+ << " - exceeded maxLen: " << newLen << " > " << maxLen;
return nullptr;
}
- auto attr = nlmsg_tail(n);
- attr->nla_len = RTA_SPACE(dataLen);
+ auto attr = tail(msg);
+ attr->nla_len = totalAttrLen;
attr->nla_type = type;
- if (dataLen > 0) memcpy(RTA_DATA(attr), data, dataLen);
+ if (dataLen > 0) memcpy(impl::data<nlattr, void>(attr), data, dataLen);
- n->nlmsg_len = newLen;
+ msg->nlmsg_len = newLen;
return attr;
}
-struct nlattr* addattr_nest(struct nlmsghdr* n, size_t maxLen, nlattrtype_t type) {
- return addattr_l(n, maxLen, type, nullptr, 0);
+void MessageFactoryBase::closeNested(nlmsghdr* msg, nlattr* nested) {
+ if (nested == nullptr) return;
+ nested->nla_len = uintptr_t(tail(msg)) - uintptr_t(nested);
}
-void addattr_nest_end(struct nlmsghdr* n, struct nlattr* nest) {
- size_t nestLen = reinterpret_cast<uintptr_t>(nlmsg_tail(n)) - reinterpret_cast<uintptr_t>(nest);
- nest->nla_len = nestLen;
-}
-
-} // namespace android::nl::impl
+} // namespace android::nl
diff --git a/automotive/can/1.0/default/libnl++/Socket.cpp b/automotive/can/1.0/default/libnl++/Socket.cpp
index 1a34df8..08683ca 100644
--- a/automotive/can/1.0/default/libnl++/Socket.cpp
+++ b/automotive/can/1.0/default/libnl++/Socket.cpp
@@ -49,8 +49,8 @@
bool Socket::send(const Buffer<nlmsghdr>& msg, const sockaddr_nl& sa) {
if constexpr (kSuperVerbose) {
- LOG(VERBOSE) << (mFailed ? "(not) " : "") << "sending Netlink message (" //
- << msg->nlmsg_pid << " -> " << sa.nl_pid << "): " << toString(msg, mProtocol);
+ LOG(VERBOSE) << (mFailed ? "(not) " : "") << "sending to " << sa.nl_pid << ": "
+ << toString(msg, mProtocol);
}
if (mFailed) return false;
@@ -68,6 +68,16 @@
return true;
}
+bool Socket::increaseReceiveBuffer(size_t maxSize) {
+ if (maxSize == 0) {
+ LOG(ERROR) << "Maximum receive size should not be zero";
+ return false;
+ }
+
+ if (mReceiveBuffer.size() < maxSize) mReceiveBuffer.resize(maxSize);
+ return true;
+}
+
std::optional<Buffer<nlmsghdr>> Socket::receive(size_t maxSize) {
return receiveFrom(maxSize).first;
}
@@ -75,11 +85,7 @@
std::pair<std::optional<Buffer<nlmsghdr>>, sockaddr_nl> Socket::receiveFrom(size_t maxSize) {
if (mFailed) return {std::nullopt, {}};
- if (maxSize == 0) {
- LOG(ERROR) << "Maximum receive size should not be zero";
- return {std::nullopt, {}};
- }
- if (mReceiveBuffer.size() < maxSize) mReceiveBuffer.resize(maxSize);
+ if (!increaseReceiveBuffer(maxSize)) return {std::nullopt, {}};
sockaddr_nl sa = {};
socklen_t saLen = sizeof(sa);
@@ -97,8 +103,7 @@
Buffer<nlmsghdr> msg(reinterpret_cast<nlmsghdr*>(mReceiveBuffer.data()), bytesReceived);
if constexpr (kSuperVerbose) {
- LOG(VERBOSE) << "received (" << sa.nl_pid << " -> " << msg->nlmsg_pid << "):" //
- << toString(msg, mProtocol);
+ LOG(VERBOSE) << "received from " << sa.nl_pid << ": " << toString(msg, mProtocol);
}
return {msg, sa};
}
@@ -121,19 +126,16 @@
std::optional<Buffer<nlmsghdr>> Socket::receive(const std::set<nlmsgtype_t>& msgtypes,
size_t maxSize) {
- while (!mFailed) {
- const auto msgBuf = receive(maxSize);
- if (!msgBuf.has_value()) return std::nullopt;
+ if (mFailed || !increaseReceiveBuffer(maxSize)) return std::nullopt;
- for (const auto rawMsg : *msgBuf) {
- if (msgtypes.count(rawMsg->nlmsg_type) == 0) {
- LOG(WARNING) << "Received (and ignored) unexpected Netlink message of type "
- << rawMsg->nlmsg_type;
- continue;
- }
-
- return rawMsg;
+ for (const auto rawMsg : *this) {
+ if (msgtypes.count(rawMsg->nlmsg_type) == 0) {
+ LOG(WARNING) << "Received (and ignored) unexpected Netlink message of type "
+ << rawMsg->nlmsg_type;
+ continue;
}
+
+ return rawMsg;
}
return std::nullopt;
@@ -151,4 +153,47 @@
return sa.nl_pid;
}
+Socket::receive_iterator::receive_iterator(Socket& socket, bool end)
+ : mSocket(socket), mIsEnd(end) {
+ if (!end) receive();
+}
+
+Socket::receive_iterator Socket::receive_iterator::operator++() {
+ CHECK(!mIsEnd) << "Trying to increment end iterator";
+ ++mCurrent;
+ if (mCurrent.isEnd()) receive();
+ return *this;
+}
+
+bool Socket::receive_iterator::operator==(const receive_iterator& other) const {
+ if (mIsEnd != other.mIsEnd) return false;
+ if (mIsEnd && other.mIsEnd) return true;
+ return mCurrent == other.mCurrent;
+}
+
+const Buffer<nlmsghdr>& Socket::receive_iterator::operator*() const {
+ CHECK(!mIsEnd) << "Trying to dereference end iterator";
+ return *mCurrent;
+}
+
+void Socket::receive_iterator::receive() {
+ CHECK(!mIsEnd) << "Trying to receive on end iterator";
+ CHECK(mCurrent.isEnd()) << "Trying to receive without draining previous read";
+
+ const auto buf = mSocket.receive();
+ if (buf.has_value()) {
+ mCurrent = buf->begin();
+ } else {
+ mIsEnd = true;
+ }
+}
+
+Socket::receive_iterator Socket::begin() {
+ return {*this, false};
+}
+
+Socket::receive_iterator Socket::end() {
+ return {*this, true};
+}
+
} // namespace android::nl
diff --git a/automotive/can/1.0/default/libnl++/common.cpp b/automotive/can/1.0/default/libnl++/common.cpp
index 74eee24..23c2d94 100644
--- a/automotive/can/1.0/default/libnl++/common.cpp
+++ b/automotive/can/1.0/default/libnl++/common.cpp
@@ -32,9 +32,7 @@
return 0;
}
-std::string sanitize(std::string str) {
- str.erase(std::find(str.begin(), str.end(), '\0'), str.end());
-
+std::string printableOnly(std::string str) {
const auto isInvalid = [](char c) { return !isprint(c); };
std::replace_if(str.begin(), str.end(), isInvalid, '?');
diff --git a/automotive/can/1.0/default/libnl++/common.h b/automotive/can/1.0/default/libnl++/common.h
index 1d9dbab..38263c5 100644
--- a/automotive/can/1.0/default/libnl++/common.h
+++ b/automotive/can/1.0/default/libnl++/common.h
@@ -37,11 +37,14 @@
unsigned int nametoindex(const std::string& ifname);
/**
- * Sanitize a string of unknown contents.
+ * Filter a string against non-printable characters.
*
- * Trims the string to the first '\0' character and replaces all non-printable characters with '?'.
+ * Replaces all non-printable characters with '?'.
+ *
+ * \param str String to filter.
+ * \return Filtered string.
*/
-std::string sanitize(std::string str);
+std::string printableOnly(std::string str);
/**
* Calculates a (optionally running) CRC16 checksum.
diff --git a/automotive/can/1.0/default/libnl++/include/libnl++/Buffer.h b/automotive/can/1.0/default/libnl++/include/libnl++/Buffer.h
index a6f8f7a..bf83fbc 100644
--- a/automotive/can/1.0/default/libnl++/include/libnl++/Buffer.h
+++ b/automotive/can/1.0/default/libnl++/include/libnl++/Buffer.h
@@ -17,6 +17,7 @@
#pragma once
#include <android-base/logging.h>
+#include <libnl++/bits.h>
#include <linux/netlink.h>
@@ -25,7 +26,7 @@
namespace android::nl {
/**
- * Buffer wrapper containing netlink structure (e.g. struct nlmsghdr, struct nlattr).
+ * Buffer wrapper containing netlink structure (e.g. nlmsghdr, nlattr).
*
* This is a C++-style, memory safe(r) and generic implementation of linux/netlink.h macros.
*
@@ -43,15 +44,6 @@
*/
template <typename T>
class Buffer {
- // The following definitions are C++ equivalents of NLMSG_* macros from linux/netlink.h
-
- static constexpr size_t alignto = NLMSG_ALIGNTO;
- static_assert(NLMSG_ALIGNTO == NLA_ALIGNTO);
-
- static constexpr size_t align(size_t ptr) { return (ptr + alignto - 1) & ~(alignto - 1); }
-
- static constexpr size_t hdrlen = align(sizeof(T));
-
public:
/**
* Constructs empty buffer of size 0.
@@ -96,35 +88,35 @@
template <typename D>
const Buffer<D> data(size_t offset = 0) const {
- // Equivalent to NLMSG_DATA(hdr) + NLMSG_ALIGN(offset)
- const D* dptr = reinterpret_cast<const D*>(uintptr_t(mData) + hdrlen + align(offset));
- return {dptr, dataEnd()};
+ return {impl::data<const T, const D>(mData, offset), dataEnd()};
}
class iterator {
public:
iterator() : mCurrent(nullptr, size_t(0)) {
- CHECK(!mCurrent.ok()) << "end() iterator should indicate it's beyond end";
+ CHECK(isEnd()) << "end() iterator should indicate it's beyond end";
}
iterator(const Buffer<T>& buf) : mCurrent(buf) {}
iterator operator++() {
// mBufferEnd stays the same
mCurrent.mData = reinterpret_cast<const T*>( //
- uintptr_t(mCurrent.mData) + align(mCurrent.declaredLength()));
+ uintptr_t(mCurrent.mData) + impl::align(mCurrent.declaredLength()));
return *this;
}
bool operator==(const iterator& other) const {
// all iterators beyond end are the same
- if (!mCurrent.ok() && !other.mCurrent.ok()) return true;
+ if (isEnd() && other.isEnd()) return true;
return uintptr_t(other.mCurrent.mData) == uintptr_t(mCurrent.mData);
}
const Buffer<T>& operator*() const { return mCurrent; }
+ bool isEnd() const { return !mCurrent.ok(); }
+
protected:
Buffer<T> mCurrent;
};
diff --git a/automotive/can/1.0/default/libnl++/include/libnl++/MessageFactory.h b/automotive/can/1.0/default/libnl++/include/libnl++/MessageFactory.h
index 5272577..c3d72c5 100644
--- a/automotive/can/1.0/default/libnl++/include/libnl++/MessageFactory.h
+++ b/automotive/can/1.0/default/libnl++/include/libnl++/MessageFactory.h
@@ -17,6 +17,7 @@
#pragma once
#include <android-base/macros.h>
+#include <libnl++/Buffer.h>
#include <libnl++/types.h>
#include <linux/netlink.h>
@@ -25,131 +26,140 @@
namespace android::nl {
-/** Implementation details, do not use outside MessageFactory template. */
-namespace impl {
-
-struct nlattr* addattr_l(struct nlmsghdr* n, size_t maxLen, nlattrtype_t type, const void* data,
- size_t dataLen);
-struct nlattr* addattr_nest(struct nlmsghdr* n, size_t maxLen, nlattrtype_t type);
-void addattr_nest_end(struct nlmsghdr* n, struct nlattr* nest);
-
-} // namespace impl
+class MessageFactoryBase {
+ protected:
+ static nlattr* add(nlmsghdr* msg, size_t maxLen, nlattrtype_t type, const void* data,
+ size_t dataLen);
+ static void closeNested(nlmsghdr* msg, nlattr* nested);
+};
/**
* Wrapper around NETLINK_ROUTE messages, to build them in C++ style.
*
- * \param T specific message header (such as struct ifinfomsg)
- * \param BUFSIZE how much space to reserve for payload (not counting the header size)
+ * \param T Message payload type (such as ifinfomsg).
+ * \param BUFSIZE how much space to reserve for attributes.
*/
template <class T, unsigned int BUFSIZE = 128>
-struct MessageFactory {
- struct RequestData {
- struct nlmsghdr nlmsg;
+class MessageFactory : private MessageFactoryBase {
+ struct alignas(NLMSG_ALIGNTO) Message {
+ nlmsghdr header;
T data;
- char buf[BUFSIZE];
+ uint8_t attributesBuffer[BUFSIZE];
};
- static constexpr size_t totalLength = sizeof(RequestData);
-
+ public:
/**
* Create empty message.
*
- * \param type Message type (such as RTM_NEWLINK)
- * \param flags Message flags (such as NLM_F_REQUEST)
+ * \param type Message type (such as RTM_NEWLINK).
+ * \param flags Message flags (such as NLM_F_REQUEST).
*/
- MessageFactory(nlmsgtype_t type, uint16_t flags) {
- mRequest.nlmsg.nlmsg_len = NLMSG_LENGTH(sizeof(mRequest.data));
- mRequest.nlmsg.nlmsg_type = type;
- mRequest.nlmsg.nlmsg_flags = flags;
+ MessageFactory(nlmsgtype_t type, uint16_t flags)
+ : header(mMessage.header), data(mMessage.data) {
+ mMessage.header.nlmsg_len = offsetof(Message, attributesBuffer);
+ mMessage.header.nlmsg_type = type;
+ mMessage.header.nlmsg_flags = flags;
}
- /** \return pointer to raw netlink message header. */
- struct nlmsghdr* header() {
- return &mRequest.nlmsg;
- }
- /** Reference to message-specific header. */
- T& data() { return mRequest.data; }
-
/**
- * Adds an attribute of a simple type.
+ * Netlink message header.
*
- * If this method fails (i.e. due to insufficient space), the message will be marked
- * as bad (\see isGood).
+ * This is a generic Netlink header containing information such as message flags.
+ */
+ nlmsghdr& header;
+
+ /**
+ * Netlink message data.
+ *
+ * This is a payload specific to a given message type.
+ */
+ T& data;
+
+ T* operator->() { return &mMessage.data; }
+
+ /**
+ * Build netlink message.
+ *
+ * In fact, this operation is almost a no-op, since the factory builds the message in a single
+ * buffer, using native data structures.
+ *
+ * A likely failure case is when the BUFSIZE template parameter is too small to acommodate
+ * added attributes. In such a case, please increase this parameter.
+ *
+ * \return Netlink message or std::nullopt in case of failure.
+ */
+ std::optional<Buffer<nlmsghdr>> build() const {
+ if (!mIsGood) return std::nullopt;
+ return {{&mMessage.header, mMessage.header.nlmsg_len}};
+ }
+
+ /**
+ * Adds an attribute of a trivially copyable type.
+ *
+ * Template specializations may extend this function for other types, such as std::string.
+ *
+ * If this method fails (i.e. due to insufficient space), a warning will be printed to the log
+ * and the message will be marked as bad, causing later \see build call to fail.
*
* \param type attribute type (such as IFLA_IFNAME)
* \param attr attribute data
*/
template <class A>
- void addattr(nlattrtype_t type, const A& attr) {
- if (!mIsGood) return;
- auto ap = impl::addattr_l(&mRequest.nlmsg, sizeof(mRequest), type, &attr, sizeof(attr));
- if (ap == nullptr) mIsGood = false;
+ void add(nlattrtype_t type, const A& attr) {
+ add(type, &attr, sizeof(attr));
}
template <>
- void addattr(nlattrtype_t type, const std::string& s) {
- if (!mIsGood) return;
- auto ap = impl::addattr_l(&mRequest.nlmsg, sizeof(mRequest), type, s.c_str(), s.size() + 1);
- if (ap == nullptr) mIsGood = false;
+ void add(nlattrtype_t type, const std::string& s) {
+ add(type, s.c_str(), s.size() + 1);
}
- /** Guard class to frame nested attributes. See nest(int). */
- struct Nest {
- Nest(MessageFactory& req, nlattrtype_t type) : mReq(req), mAttr(req.nestStart(type)) {}
- ~Nest() { mReq.nestEnd(mAttr); }
+ /** Guard class to frame nested attributes. \see addNested(nlattrtype_t). */
+ class [[nodiscard]] NestedGuard {
+ public:
+ NestedGuard(MessageFactory & req, nlattrtype_t type) : mReq(req), mAttr(req.add(type)) {}
+ ~NestedGuard() { closeNested(&mReq.mMessage.header, mAttr); }
private:
MessageFactory& mReq;
- struct nlattr* mAttr;
+ nlattr* mAttr;
- DISALLOW_COPY_AND_ASSIGN(Nest);
+ DISALLOW_COPY_AND_ASSIGN(NestedGuard);
};
/**
* Add nested attribute.
*
* The returned object is a guard for auto-nesting children inside the argument attribute.
- * When the Nest object goes out of scope, the nesting attribute is closed.
+ * When the guard object goes out of scope, the nesting attribute is closed.
*
* Example usage nesting IFLA_CAN_BITTIMING inside IFLA_INFO_DATA, which is nested
* inside IFLA_LINKINFO:
- * MessageFactory<struct ifinfomsg> req(RTM_NEWLINK, NLM_F_REQUEST);
+ * MessageFactory<ifinfomsg> req(RTM_NEWLINK, NLM_F_REQUEST);
* {
- * auto linkinfo = req.nest(IFLA_LINKINFO);
- * req.addattr(IFLA_INFO_KIND, "can");
+ * auto linkinfo = req.addNested(IFLA_LINKINFO);
+ * req.add(IFLA_INFO_KIND, "can");
* {
- * auto infodata = req.nest(IFLA_INFO_DATA);
- * req.addattr(IFLA_CAN_BITTIMING, bitTimingStruct);
+ * auto infodata = req.addNested(IFLA_INFO_DATA);
+ * req.add(IFLA_CAN_BITTIMING, bitTimingStruct);
* }
* }
* // use req
*
* \param type attribute type (such as IFLA_LINKINFO)
*/
- Nest nest(int type) { return Nest(*this, type); }
-
- /**
- * Indicates, whether the message is in a good state.
- *
- * The bad state is usually a result of payload buffer being too small.
- * You can modify BUFSIZE template parameter to fix this.
- */
- bool isGood() const { return mIsGood; }
+ NestedGuard addNested(nlattrtype_t type) { return {*this, type}; }
private:
+ Message mMessage = {};
bool mIsGood = true;
- RequestData mRequest = {};
- struct nlattr* nestStart(nlattrtype_t type) {
+ nlattr* add(nlattrtype_t type, const void* data = nullptr, size_t len = 0) {
if (!mIsGood) return nullptr;
- auto attr = impl::addattr_nest(&mRequest.nlmsg, sizeof(mRequest), type);
+ auto attr = MessageFactoryBase::add(&mMessage.header, sizeof(mMessage), type, data, len);
if (attr == nullptr) mIsGood = false;
return attr;
}
-
- void nestEnd(struct nlattr* nest) {
- if (mIsGood && nest != nullptr) impl::addattr_nest_end(&mRequest.nlmsg, nest);
- }
};
} // namespace android::nl
diff --git a/automotive/can/1.0/default/libnl++/include/libnl++/Socket.h b/automotive/can/1.0/default/libnl++/include/libnl++/Socket.h
index 16b63f5..c69523d 100644
--- a/automotive/can/1.0/default/libnl++/include/libnl++/Socket.h
+++ b/automotive/can/1.0/default/libnl++/include/libnl++/Socket.h
@@ -76,13 +76,12 @@
*/
template <typename T, unsigned BUFSIZE>
bool send(MessageFactory<T, BUFSIZE>& req, const sockaddr_nl& sa) {
- if (!req.isGood()) return false;
+ req.header.nlmsg_seq = mSeq + 1;
- const auto nlmsg = req.header();
- nlmsg->nlmsg_seq = mSeq + 1;
+ const auto msg = req.build();
+ if (!msg.has_value()) return false;
- // With MessageFactory<>, we trust nlmsg_len to be correct.
- return send({nlmsg, nlmsg->nlmsg_len}, sa);
+ return send(*msg, sa);
}
/**
@@ -156,7 +155,7 @@
*/
template <typename T, unsigned BUFSIZE>
bool receiveAck(MessageFactory<T, BUFSIZE>& req) {
- return receiveAck(req.header()->nlmsg_seq);
+ return receiveAck(req.header.nlmsg_seq);
}
/**
@@ -174,6 +173,42 @@
*/
std::optional<unsigned> getPid();
+ /**
+ * Live iterator continuously receiving messages from Netlink socket.
+ *
+ * Iteration ends when socket fails to receive a buffer.
+ *
+ * Example:
+ * ```
+ * nl::Socket sock(NETLINK_ROUTE, 0, RTMGRP_LINK);
+ * for (const auto rawMsg : sock) {
+ * const auto msg = nl::Message<ifinfomsg>::parse(rawMsg, {RTM_NEWLINK, RTM_DELLINK});
+ * if (!msg.has_value()) continue;
+ *
+ * LOG(INFO) << msg->attributes.get<std::string>(IFLA_IFNAME)
+ * << " is " << ((msg->data.ifi_flags & IFF_UP) ? "up" : "down");
+ * }
+ * LOG(FATAL) << "Failed to read from Netlink socket";
+ * ```
+ */
+ class receive_iterator {
+ public:
+ receive_iterator(Socket& socket, bool end);
+
+ receive_iterator operator++();
+ bool operator==(const receive_iterator& other) const;
+ const Buffer<nlmsghdr>& operator*() const;
+
+ private:
+ Socket& mSocket;
+ bool mIsEnd;
+ Buffer<nlmsghdr>::iterator mCurrent;
+
+ void receive();
+ };
+ receive_iterator begin();
+ receive_iterator end();
+
private:
const int mProtocol;
base::unique_fd mFd;
@@ -182,6 +217,7 @@
bool mFailed = false;
uint32_t mSeq = 0;
+ bool increaseReceiveBuffer(size_t maxSize);
std::optional<Buffer<nlmsghdr>> receive(const std::set<nlmsgtype_t>& msgtypes, size_t maxSize);
DISALLOW_COPY_AND_ASSIGN(Socket);
diff --git a/automotive/can/1.0/default/libnl++/include/libnl++/bits.h b/automotive/can/1.0/default/libnl++/include/libnl++/bits.h
new file mode 100644
index 0000000..4c8f1aa
--- /dev/null
+++ b/automotive/can/1.0/default/libnl++/include/libnl++/bits.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <linux/netlink.h>
+
+namespace android::nl::impl {
+
+// The following definitions are C++ equivalents of NLMSG_* macros from linux/netlink.h.
+
+/**
+ * Equivalent to NLMSG_ALIGNTO.
+ */
+constexpr size_t alignto = NLMSG_ALIGNTO;
+static_assert(NLMSG_ALIGNTO == NLA_ALIGNTO);
+
+/**
+ * Equivalent to NLMSG_ALIGN(len).
+ */
+constexpr size_t align(size_t len) {
+ return (len + alignto - 1) & ~(alignto - 1);
+}
+
+/**
+ * Equivalent to NLMSG_SPACE(len).
+ */
+template <typename H>
+constexpr size_t space(size_t len) {
+ return align(align(sizeof(H)) + len);
+}
+
+/**
+ * Equivalent to NLMSG_DATA(hdr) + NLMSG_ALIGN(offset).
+ */
+template <typename H, typename D>
+constexpr D* data(H* header, size_t offset = 0) {
+ return reinterpret_cast<D*>(uintptr_t(header) + space<H>(offset));
+}
+
+} // namespace android::nl::impl
diff --git a/automotive/can/1.0/default/libnl++/include/libnl++/printer.h b/automotive/can/1.0/default/libnl++/include/libnl++/printer.h
index 53a06d8..3570918 100644
--- a/automotive/can/1.0/default/libnl++/include/libnl++/printer.h
+++ b/automotive/can/1.0/default/libnl++/include/libnl++/printer.h
@@ -32,6 +32,6 @@
* \param printPayload True will stringify message data, false will only stringify the header(s).
* \return Stringified message.
*/
-std::string toString(const Buffer<nlmsghdr> hdr, int protocol, bool printPayload = false);
+std::string toString(const Buffer<nlmsghdr> hdr, int protocol, bool printPayload = true);
} // namespace android::nl
diff --git a/automotive/can/1.0/default/libnl++/printer.cpp b/automotive/can/1.0/default/libnl++/printer.cpp
index 9735db1..e6cada2 100644
--- a/automotive/can/1.0/default/libnl++/printer.cpp
+++ b/automotive/can/1.0/default/libnl++/printer.cpp
@@ -28,10 +28,10 @@
namespace android::nl {
-static void flagsToStream(std::stringstream& ss, __u16 nlmsg_flags) {
+static void flagsToStream(std::stringstream& ss, __u16 nlmsg_flags, protocols::MessageGenre genre) {
bool first = true;
auto printFlag = [&ss, &first, &nlmsg_flags](__u16 flag, const std::string& name) {
- if (!(nlmsg_flags & flag)) return;
+ if ((nlmsg_flags & flag) != flag) return;
nlmsg_flags &= ~flag;
if (first) {
@@ -42,6 +42,7 @@
ss << name;
};
+
printFlag(NLM_F_REQUEST, "REQUEST");
printFlag(NLM_F_MULTI, "MULTI");
printFlag(NLM_F_ACK, "ACK");
@@ -49,11 +50,29 @@
printFlag(NLM_F_DUMP_INTR, "DUMP_INTR");
printFlag(NLM_F_DUMP_FILTERED, "DUMP_FILTERED");
- // TODO(twasilczyk): print flags depending on request type
- printFlag(NLM_F_ROOT, "ROOT-REPLACE");
- printFlag(NLM_F_MATCH, "MATCH-EXCL");
- printFlag(NLM_F_ATOMIC, "ATOMIC-CREATE");
- printFlag(NLM_F_APPEND, "APPEND");
+ switch (genre) {
+ case protocols::MessageGenre::UNKNOWN:
+ break;
+ case protocols::MessageGenre::GET:
+ printFlag(NLM_F_DUMP, "DUMP"); // ROOT | MATCH
+ printFlag(NLM_F_ROOT, "ROOT");
+ printFlag(NLM_F_MATCH, "MATCH");
+ printFlag(NLM_F_ATOMIC, "ATOMIC");
+ break;
+ case protocols::MessageGenre::NEW:
+ printFlag(NLM_F_REPLACE, "REPLACE");
+ printFlag(NLM_F_EXCL, "EXCL");
+ printFlag(NLM_F_CREATE, "CREATE");
+ printFlag(NLM_F_APPEND, "APPEND");
+ break;
+ case protocols::MessageGenre::DELETE:
+ printFlag(NLM_F_NONREC, "NONREC");
+ break;
+ case protocols::MessageGenre::ACK:
+ printFlag(NLM_F_CAPPED, "CAPPED");
+ printFlag(NLM_F_ACK_TLVS, "ACK_TLVS");
+ break;
+ }
if (nlmsg_flags != 0) {
if (!first) ss << '|';
@@ -100,7 +119,7 @@
}
case DataType::String: {
const auto str = attr.data<char>().getRaw();
- ss << '"' << sanitize({str.ptr(), str.len()}) << '"';
+ ss << '"' << printableOnly({str.ptr(), str.len()}) << '"';
break;
}
case DataType::Uint:
@@ -128,27 +147,26 @@
}
protocols::NetlinkProtocol& protocolDescr = *protocolMaybe;
- auto msgDescMaybe = protocolDescr.getMessageDescriptor(hdr->nlmsg_type);
- const auto msgTypeName = msgDescMaybe.has_value()
- ? msgDescMaybe->get().getMessageName(hdr->nlmsg_type)
- : std::to_string(hdr->nlmsg_type);
+ const auto msgDescMaybe = protocolDescr.getMessageDescriptor(hdr->nlmsg_type);
+ const auto msgDetails =
+ protocols::MessageDescriptor::getMessageDetails(msgDescMaybe, hdr->nlmsg_type);
ss << "nlmsg{" << protocolDescr.getName() << " ";
ss << "hdr={";
- ss << "type=" << msgTypeName;
+ ss << "type=" << msgDetails.name;
if (hdr->nlmsg_flags != 0) {
ss << ", flags=";
- flagsToStream(ss, hdr->nlmsg_flags);
+ flagsToStream(ss, hdr->nlmsg_flags, msgDetails.genre);
}
if (hdr->nlmsg_seq != 0) ss << ", seq=" << hdr->nlmsg_seq;
if (hdr->nlmsg_pid != 0) ss << ", pid=" << hdr->nlmsg_pid;
ss << ", len=" << hdr->nlmsg_len;
-
ss << ", crc=" << std::hex << std::setw(4) << crc16(hdr.data<uint8_t>()) << std::dec;
- ss << "} ";
+ ss << '}';
if (!printPayload) return ss.str();
+ ss << ' ';
if (!msgDescMaybe.has_value()) {
toStream(ss, hdr.data<uint8_t>());
diff --git a/automotive/can/1.0/default/libnl++/protocols/MessageDefinition.cpp b/automotive/can/1.0/default/libnl++/protocols/MessageDefinition.cpp
index dc56643..c93d865 100644
--- a/automotive/can/1.0/default/libnl++/protocols/MessageDefinition.cpp
+++ b/automotive/can/1.0/default/libnl++/protocols/MessageDefinition.cpp
@@ -32,11 +32,12 @@
return find(nla_type)->second;
}
-MessageDescriptor::MessageDescriptor(const std::string& name, const MessageTypeMap&& messageTypes,
+MessageDescriptor::MessageDescriptor(const std::string& name,
+ const MessageDetailsMap&& messageDetails,
const AttributeMap&& attrTypes, size_t contentsSize)
: mName(name),
mContentsSize(contentsSize),
- mMessageTypes(messageTypes),
+ mMessageDetails(messageDetails),
mAttributeMap(attrTypes) {}
MessageDescriptor::~MessageDescriptor() {}
@@ -45,18 +46,25 @@
return mContentsSize;
}
-const MessageDescriptor::MessageTypeMap& MessageDescriptor::getMessageTypeMap() const {
- return mMessageTypes;
+const MessageDescriptor::MessageDetailsMap& MessageDescriptor::getMessageDetailsMap() const {
+ return mMessageDetails;
}
const AttributeMap& MessageDescriptor::getAttributeMap() const {
return mAttributeMap;
}
-const std::string MessageDescriptor::getMessageName(nlmsgtype_t msgtype) const {
- const auto it = mMessageTypes.find(msgtype);
- if (it == mMessageTypes.end()) return "?";
+MessageDescriptor::MessageDetails MessageDescriptor::getMessageDetails(nlmsgtype_t msgtype) const {
+ const auto it = mMessageDetails.find(msgtype);
+ if (it == mMessageDetails.end()) return {std::to_string(msgtype), MessageGenre::UNKNOWN};
return it->second;
}
+MessageDescriptor::MessageDetails MessageDescriptor::getMessageDetails(
+ const std::optional<std::reference_wrapper<const MessageDescriptor>>& msgDescMaybe,
+ nlmsgtype_t msgtype) {
+ if (msgDescMaybe.has_value()) return msgDescMaybe->get().getMessageDetails(msgtype);
+ return {std::to_string(msgtype), protocols::MessageGenre::UNKNOWN};
+}
+
} // namespace android::nl::protocols
diff --git a/automotive/can/1.0/default/libnl++/protocols/MessageDefinition.h b/automotive/can/1.0/default/libnl++/protocols/MessageDefinition.h
index ef73d09..bd0e60f 100644
--- a/automotive/can/1.0/default/libnl++/protocols/MessageDefinition.h
+++ b/automotive/can/1.0/default/libnl++/protocols/MessageDefinition.h
@@ -68,30 +68,53 @@
};
/**
+ * General message type's kind.
+ *
+ * For example, RTM_NEWLINK is a NEW kind. For details, please see "Flags values"
+ * section in linux/netlink.h.
+ */
+enum class MessageGenre {
+ UNKNOWN,
+ GET,
+ NEW,
+ DELETE,
+ ACK,
+};
+
+/**
* Message family descriptor.
*
* Describes the structure of all message types with the same header and attributes.
*/
class MessageDescriptor {
- protected:
- typedef std::map<nlmsgtype_t, std::string> MessageTypeMap;
-
- MessageDescriptor(const std::string& name, const MessageTypeMap&& messageTypes,
- const AttributeMap&& attrTypes, size_t contentsSize);
+ public:
+ struct MessageDetails {
+ std::string name;
+ MessageGenre genre;
+ };
+ typedef std::map<nlmsgtype_t, MessageDetails> MessageDetailsMap;
public:
virtual ~MessageDescriptor();
size_t getContentsSize() const;
- const MessageTypeMap& getMessageTypeMap() const;
+ const MessageDetailsMap& getMessageDetailsMap() const;
const AttributeMap& getAttributeMap() const;
- const std::string getMessageName(nlmsgtype_t msgtype) const;
+ MessageDetails getMessageDetails(nlmsgtype_t msgtype) const;
virtual void dataToStream(std::stringstream& ss, const Buffer<nlmsghdr> hdr) const = 0;
+ static MessageDetails getMessageDetails(
+ const std::optional<std::reference_wrapper<const MessageDescriptor>>& msgDescMaybe,
+ nlmsgtype_t msgtype);
+
+ protected:
+ MessageDescriptor(const std::string& name, const MessageDetailsMap&& messageDetails,
+ const AttributeMap&& attrTypes, size_t contentsSize);
+
private:
const std::string mName;
const size_t mContentsSize;
- const MessageTypeMap mMessageTypes;
+ const MessageDetailsMap mMessageDetails;
const AttributeMap mAttributeMap;
};
@@ -103,11 +126,11 @@
template <typename T>
class MessageDefinition : public MessageDescriptor {
public:
- MessageDefinition(
+ MessageDefinition( //
const std::string& name,
- const std::initializer_list<MessageDescriptor::MessageTypeMap::value_type> messageTypes,
+ const std::initializer_list<MessageDescriptor::MessageDetailsMap::value_type> msgDet,
const std::initializer_list<AttributeMap::value_type> attrTypes = {})
- : MessageDescriptor(name, messageTypes, attrTypes, sizeof(T)) {}
+ : MessageDescriptor(name, msgDet, attrTypes, sizeof(T)) {}
void dataToStream(std::stringstream& ss, const Buffer<nlmsghdr> hdr) const override {
const auto& [ok, msg] = hdr.data<T>().getFirst();
diff --git a/automotive/can/1.0/default/libnl++/protocols/NetlinkProtocol.cpp b/automotive/can/1.0/default/libnl++/protocols/NetlinkProtocol.cpp
index 4b795d9..cd2e8c6 100644
--- a/automotive/can/1.0/default/libnl++/protocols/NetlinkProtocol.cpp
+++ b/automotive/can/1.0/default/libnl++/protocols/NetlinkProtocol.cpp
@@ -18,7 +18,7 @@
namespace android::nl::protocols {
-NetlinkProtocol::NetlinkProtocol(int protocol, const std::string name,
+NetlinkProtocol::NetlinkProtocol(int protocol, const std::string& name,
const MessageDescriptorList&& messageDescrs)
: mProtocol(protocol), mName(name), mMessageDescrs(toMap(messageDescrs, protocol)) {}
@@ -42,7 +42,7 @@
const NetlinkProtocol::MessageDescriptorList& descrs, int protocol) {
MessageDescriptorMap map;
for (const auto& descr : descrs) {
- for (const auto& [mtype, mname] : descr->getMessageTypeMap()) {
+ for (const auto& [mtype, mdet] : descr->getMessageDetailsMap()) {
map.emplace(mtype, descr);
}
}
@@ -53,7 +53,7 @@
};
for (const auto& descr : baseDescriptors) {
- for (const auto& [mtype, mname] : descr->getMessageTypeMap()) {
+ for (const auto& [mtype, mdet] : descr->getMessageDetailsMap()) {
map.emplace(mtype, descr);
}
}
diff --git a/automotive/can/1.0/default/libnl++/protocols/NetlinkProtocol.h b/automotive/can/1.0/default/libnl++/protocols/NetlinkProtocol.h
index 81a0a65..c969547 100644
--- a/automotive/can/1.0/default/libnl++/protocols/NetlinkProtocol.h
+++ b/automotive/can/1.0/default/libnl++/protocols/NetlinkProtocol.h
@@ -46,7 +46,7 @@
protected:
typedef std::vector<std::shared_ptr<const MessageDescriptor>> MessageDescriptorList;
- NetlinkProtocol(int protocol, const std::string name,
+ NetlinkProtocol(int protocol, const std::string& name,
const MessageDescriptorList&& messageDescrs);
private:
diff --git a/automotive/can/1.0/default/libnl++/protocols/common/Empty.cpp b/automotive/can/1.0/default/libnl++/protocols/common/Empty.cpp
index dbf10d4..8a672d3 100644
--- a/automotive/can/1.0/default/libnl++/protocols/common/Empty.cpp
+++ b/automotive/can/1.0/default/libnl++/protocols/common/Empty.cpp
@@ -20,9 +20,9 @@
// clang-format off
Empty::Empty() : MessageDefinition<char>("nlmsg", {
- {NLMSG_NOOP, "NOOP"},
- {NLMSG_DONE, "DONE"},
- {NLMSG_OVERRUN, "OVERRUN"},
+ {NLMSG_NOOP, {"NOOP", MessageGenre::UNKNOWN}},
+ {NLMSG_DONE, {"DONE", MessageGenre::UNKNOWN}},
+ {NLMSG_OVERRUN, {"OVERRUN", MessageGenre::UNKNOWN}},
}) {}
// clang-format on
diff --git a/automotive/can/1.0/default/libnl++/protocols/common/Error.cpp b/automotive/can/1.0/default/libnl++/protocols/common/Error.cpp
index 1d6bd1c..44708a3 100644
--- a/automotive/can/1.0/default/libnl++/protocols/common/Error.cpp
+++ b/automotive/can/1.0/default/libnl++/protocols/common/Error.cpp
@@ -22,15 +22,21 @@
namespace android::nl::protocols::base {
+using DataType = AttributeDefinition::DataType;
+
// clang-format off
Error::Error(int protocol) : MessageDefinition<nlmsgerr>("nlmsg", {
- {NLMSG_ERROR, "ERROR"},
+ {NLMSG_ERROR, {"ERROR", MessageGenre::ACK}},
+}, {
+ {NLMSGERR_ATTR_MSG, {"MSG", DataType::String}},
+ {NLMSGERR_ATTR_OFFS, {"OFFS", DataType::Uint}},
+ {NLMSGERR_ATTR_COOKIE, {"COOKIE", DataType::Raw}},
}), mProtocol(protocol) {}
// clang-format on
void Error::toStream(std::stringstream& ss, const nlmsgerr& data) const {
ss << "nlmsgerr{error=" << data.error
- << ", msg=" << toString({&data.msg, sizeof(data.msg)}, mProtocol) << "}";
+ << ", msg=" << toString({&data.msg, sizeof(data.msg)}, mProtocol, false) << "}";
}
} // namespace android::nl::protocols::base
diff --git a/automotive/can/1.0/default/libnl++/protocols/generic/GenericMessageBase.cpp b/automotive/can/1.0/default/libnl++/protocols/generic/GenericMessageBase.cpp
index 5203368..134638e 100644
--- a/automotive/can/1.0/default/libnl++/protocols/generic/GenericMessageBase.cpp
+++ b/automotive/can/1.0/default/libnl++/protocols/generic/GenericMessageBase.cpp
@@ -19,13 +19,14 @@
namespace android::nl::protocols::generic {
GenericMessageBase::GenericMessageBase(
- nlmsgtype_t msgtype, std::string msgname,
+ nlmsgtype_t msgtype, const std::string&& msgname,
const std::initializer_list<GenericCommandNameMap::value_type> commandNames,
const std::initializer_list<AttributeMap::value_type> attrTypes)
- : MessageDefinition<struct genlmsghdr>(msgname, {{msgtype, msgname}}, attrTypes),
+ : MessageDefinition<genlmsghdr>(msgname, {{msgtype, {msgname, MessageGenre::UNKNOWN}}},
+ attrTypes),
mCommandNames(commandNames) {}
-void GenericMessageBase::toStream(std::stringstream& ss, const struct genlmsghdr& data) const {
+void GenericMessageBase::toStream(std::stringstream& ss, const genlmsghdr& data) const {
ss << "genlmsghdr{";
if (mCommandNames.count(data.cmd) == 0) {
ss << "cmd=" << unsigned(data.cmd);
diff --git a/automotive/can/1.0/default/libnl++/protocols/generic/GenericMessageBase.h b/automotive/can/1.0/default/libnl++/protocols/generic/GenericMessageBase.h
index f3b0b31..443f10c 100644
--- a/automotive/can/1.0/default/libnl++/protocols/generic/GenericMessageBase.h
+++ b/automotive/can/1.0/default/libnl++/protocols/generic/GenericMessageBase.h
@@ -22,16 +22,16 @@
namespace android::nl::protocols::generic {
-class GenericMessageBase : public MessageDefinition<struct genlmsghdr> {
+class GenericMessageBase : public MessageDefinition<genlmsghdr> {
public:
typedef std::map<uint8_t, std::string> GenericCommandNameMap;
GenericMessageBase(
- nlmsgtype_t msgtype, std::string msgname,
+ nlmsgtype_t msgtype, const std::string&& msgname,
const std::initializer_list<GenericCommandNameMap::value_type> commandNames = {},
const std::initializer_list<AttributeMap::value_type> attrTypes = {});
- void toStream(std::stringstream& ss, const struct genlmsghdr& data) const override;
+ void toStream(std::stringstream& ss, const genlmsghdr& data) const override;
private:
const GenericCommandNameMap mCommandNames;
diff --git a/automotive/can/1.0/default/libnl++/protocols/route/Link.cpp b/automotive/can/1.0/default/libnl++/protocols/route/Link.cpp
index 26d3e3e..7db487a 100644
--- a/automotive/can/1.0/default/libnl++/protocols/route/Link.cpp
+++ b/automotive/can/1.0/default/libnl++/protocols/route/Link.cpp
@@ -25,10 +25,10 @@
using DataType = AttributeDefinition::DataType;
// clang-format off
-Link::Link() : MessageDefinition<struct ifinfomsg>("link", {
- {RTM_NEWLINK, "NEWLINK"},
- {RTM_DELLINK, "DELLINK"},
- {RTM_GETLINK, "GETLINK"},
+Link::Link() : MessageDefinition<ifinfomsg>("link", {
+ {RTM_NEWLINK, {"NEWLINK", MessageGenre::NEW}},
+ {RTM_DELLINK, {"DELLINK", MessageGenre::DELETE}},
+ {RTM_GETLINK, {"GETLINK", MessageGenre::GET}},
}, {
{IFLA_ADDRESS, {"ADDRESS"}},
{IFLA_BROADCAST, {"BROADCAST"}},
@@ -107,7 +107,7 @@
}) {}
// clang-format off
-void Link::toStream(std::stringstream& ss, const struct ifinfomsg& data) const {
+void Link::toStream(std::stringstream& ss, const ifinfomsg& data) const {
ss << "ifinfomsg{"
<< "family=" << unsigned(data.ifi_family) << ", type=" << data.ifi_type
<< ", index=" << data.ifi_index << ", flags=" << data.ifi_flags
diff --git a/automotive/can/1.0/default/libnl++/protocols/route/Link.h b/automotive/can/1.0/default/libnl++/protocols/route/Link.h
index 4ea3eba..ecfefc9 100644
--- a/automotive/can/1.0/default/libnl++/protocols/route/Link.h
+++ b/automotive/can/1.0/default/libnl++/protocols/route/Link.h
@@ -22,10 +22,10 @@
namespace android::nl::protocols::route {
-class Link : public MessageDefinition<struct ifinfomsg> {
+class Link : public MessageDefinition<ifinfomsg> {
public:
Link();
- void toStream(std::stringstream& ss, const struct ifinfomsg& data) const override;
+ void toStream(std::stringstream& ss, const ifinfomsg& data) const override;
};
} // namespace android::nl::protocols::route
diff --git a/automotive/evs/1.0/default/EvsCamera.cpp b/automotive/evs/1.0/default/EvsCamera.cpp
index e0782ec..0daea5a 100644
--- a/automotive/evs/1.0/default/EvsCamera.cpp
+++ b/automotive/evs/1.0/default/EvsCamera.cpp
@@ -49,7 +49,7 @@
mDescription.cameraId = id;
- // Set up dummy data for testing
+ // Set up mock data for testing
if (mDescription.cameraId == kCameraName_Backup) {
mWidth = 640; // full NTSC/VGA
mHeight = 480; // full NTSC/VGA
diff --git a/automotive/evs/1.0/vts/functional/VtsHalEvsV1_0TargetTest.cpp b/automotive/evs/1.0/vts/functional/VtsHalEvsV1_0TargetTest.cpp
index 7fe7a33..ad607d8 100644
--- a/automotive/evs/1.0/vts/functional/VtsHalEvsV1_0TargetTest.cpp
+++ b/automotive/evs/1.0/vts/functional/VtsHalEvsV1_0TargetTest.cpp
@@ -369,7 +369,7 @@
TEST_P(EvsHidlTest, CameraStreamBuffering) {
ALOGI("Starting CameraStreamBuffering test");
- // Arbitrary constant (should be > 1 and less than crazy)
+ // Arbitrary constant (should be > 1 and not too big)
static const unsigned int kBuffersToHold = 6;
// Get the camera list
@@ -381,7 +381,7 @@
sp<IEvsCamera> pCam = pEnumerator->openCamera(cam.cameraId);
ASSERT_NE(pCam, nullptr);
- // Ask for a crazy number of buffers in flight to ensure it errors correctly
+ // Ask for a very large number of buffers in flight to ensure it errors correctly
Return<EvsResult> badResult = pCam->setMaxFramesInFlight(0xFFFFFFFF);
EXPECT_EQ(EvsResult::BUFFER_NOT_AVAILABLE, badResult);
diff --git a/automotive/evs/1.1/default/ConfigManager.h b/automotive/evs/1.1/default/ConfigManager.h
index 870af1c..b0b2670 100644
--- a/automotive/evs/1.1/default/ConfigManager.h
+++ b/automotive/evs/1.1/default/ConfigManager.h
@@ -76,7 +76,7 @@
}
/*
- * List of supported controls that the master client can program.
+ * List of supported controls that the primary client can program.
* Paraemters are stored with its valid range
*/
unordered_map<CameraParam,
diff --git a/automotive/evs/1.1/default/EvsEnumerator.cpp b/automotive/evs/1.1/default/EvsEnumerator.cpp
index 117ee7a..d066471 100644
--- a/automotive/evs/1.1/default/EvsEnumerator.cpp
+++ b/automotive/evs/1.1/default/EvsEnumerator.cpp
@@ -70,7 +70,7 @@
// Add ultrasonics array desc.
sUltrasonicsArrayRecordList.emplace_back(
- EvsUltrasonicsArray::GetDummyArrayDesc("front_array"));
+ EvsUltrasonicsArray::GetMockArrayDesc("front_array"));
}
diff --git a/automotive/evs/1.1/default/EvsUltrasonicsArray.cpp b/automotive/evs/1.1/default/EvsUltrasonicsArray.cpp
index bc69aa4..ebd47c6 100644
--- a/automotive/evs/1.1/default/EvsUltrasonicsArray.cpp
+++ b/automotive/evs/1.1/default/EvsUltrasonicsArray.cpp
@@ -45,7 +45,7 @@
namespace {
-void fillDummyArrayDesc(UltrasonicsArrayDesc& arrayDesc) {
+void fillMockArrayDesc(UltrasonicsArrayDesc& arrayDesc) {
arrayDesc.maxReadingsPerSensorCount = kMaxReadingsPerSensor;
arrayDesc.maxReceiversCount = kMaxReceiversCount;
@@ -99,8 +99,8 @@
}
}
-// Fills dataFrameDesc with dummy data.
-bool fillDummyDataFrame(UltrasonicsDataFrameDesc& dataFrameDesc, sp<IMemory> pIMemory) {
+// Fills dataFrameDesc with mock data.
+bool fillMockDataFrame(UltrasonicsDataFrameDesc& dataFrameDesc, sp<IMemory> pIMemory) {
dataFrameDesc.timestampNs = elapsedRealtimeNano();
const std::vector<uint8_t> transmittersIdList = {0};
@@ -137,9 +137,9 @@
: mFramesAllowed(0), mFramesInUse(0), mStreamState(STOPPED) {
LOG(DEBUG) << "EvsUltrasonicsArray instantiated";
- // Set up dummy data for description.
+ // Set up mock data for description.
mArrayDesc.ultrasonicsArrayId = deviceName;
- fillDummyArrayDesc(mArrayDesc);
+ fillMockArrayDesc(mArrayDesc);
// Assign allocator.
mShmemAllocator = IAllocator::getService("ashmem");
@@ -182,10 +182,10 @@
mStreamState = DEAD;
}
-UltrasonicsArrayDesc EvsUltrasonicsArray::GetDummyArrayDesc(const char* deviceName) {
+UltrasonicsArrayDesc EvsUltrasonicsArray::GetMockArrayDesc(const char* deviceName) {
UltrasonicsArrayDesc ultrasonicsArrayDesc;
ultrasonicsArrayDesc.ultrasonicsArrayId = deviceName;
- fillDummyArrayDesc(ultrasonicsArrayDesc);
+ fillMockArrayDesc(ultrasonicsArrayDesc);
return ultrasonicsArrayDesc;
}
@@ -497,17 +497,17 @@
if (timeForFrame) {
// Assemble the buffer description we'll transmit below
- UltrasonicsDataFrameDesc dummyDataFrameDesc;
- dummyDataFrameDesc.dataFrameId = idx;
- dummyDataFrameDesc.waveformsData = mDataFrames[idx].sharedMemory.hidlMemory;
+ UltrasonicsDataFrameDesc mockDataFrameDesc;
+ mockDataFrameDesc.dataFrameId = idx;
+ mockDataFrameDesc.waveformsData = mDataFrames[idx].sharedMemory.hidlMemory;
- // Fill dummy waveform data.
- fillDummyDataFrame(dummyDataFrameDesc, mDataFrames[idx].sharedMemory.pIMemory);
+ // Fill mock waveform data.
+ fillMockDataFrame(mockDataFrameDesc, mDataFrames[idx].sharedMemory.pIMemory);
// Issue the (asynchronous) callback to the client -- can't be holding the lock
- auto result = mStream->deliverDataFrame(dummyDataFrameDesc);
+ auto result = mStream->deliverDataFrame(mockDataFrameDesc);
if (result.isOk()) {
- LOG(DEBUG) << "Delivered data frame id: " << dummyDataFrameDesc.dataFrameId;
+ LOG(DEBUG) << "Delivered data frame id: " << mockDataFrameDesc.dataFrameId;
} else {
// This can happen if the client dies and is likely unrecoverable.
// To avoid consuming resources generating failing calls, we stop sending
diff --git a/automotive/evs/1.1/default/EvsUltrasonicsArray.h b/automotive/evs/1.1/default/EvsUltrasonicsArray.h
index 7a41012..88aa600 100644
--- a/automotive/evs/1.1/default/EvsUltrasonicsArray.h
+++ b/automotive/evs/1.1/default/EvsUltrasonicsArray.h
@@ -58,7 +58,7 @@
static sp<EvsUltrasonicsArray> Create(const char* deviceName);
// Returns a ultrasonics array descriptor filled with sample data.
- static UltrasonicsArrayDesc GetDummyArrayDesc(const char* id);
+ static UltrasonicsArrayDesc GetMockArrayDesc(const char* id);
DISALLOW_COPY_AND_ASSIGN(EvsUltrasonicsArray);
virtual ~EvsUltrasonicsArray() override;
diff --git a/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp b/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
index 4398fb1..638ecd5 100644
--- a/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
+++ b/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
@@ -500,7 +500,7 @@
TEST_P(EvsHidlTest, CameraStreamBuffering) {
LOG(INFO) << "Starting CameraStreamBuffering test";
- // Arbitrary constant (should be > 1 and less than crazy)
+ // Arbitrary constant (should be > 1 and not too big)
static const unsigned int kBuffersToHold = 6;
// Get the camera list
@@ -527,7 +527,7 @@
// Store a camera handle for a clean-up
activeCameras.push_back(pCam);
- // Ask for a crazy number of buffers in flight to ensure it errors correctly
+ // Ask for a very large number of buffers in flight to ensure it errors correctly
Return<EvsResult> badResult = pCam->setMaxFramesInFlight(0xFFFFFFFF);
EXPECT_EQ(EvsResult::BUFFER_NOT_AVAILABLE, badResult);
@@ -930,12 +930,12 @@
/*
- * CameraMasterRelease
- * Verify that non-master client gets notified when the master client either
+ * CameraPrimaryClientRelease
+ * Verify that non-primary client gets notified when the primary client either
* terminates or releases a role.
*/
-TEST_P(EvsHidlTest, CameraMasterRelease) {
- LOG(INFO) << "Starting CameraMasterRelease test";
+TEST_P(EvsHidlTest, CameraPrimaryClientRelease) {
+ LOG(INFO) << "Starting CameraPrimaryClientRelease test";
if (mIsHwModule) {
// This test is not for HW module implementation.
@@ -961,57 +961,57 @@
}
// Create two camera clients.
- sp<IEvsCamera_1_1> pCamMaster =
+ sp<IEvsCamera_1_1> pCamPrimary =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
- ASSERT_NE(pCamMaster, nullptr);
+ ASSERT_NE(pCamPrimary, nullptr);
// Store a camera handle for a clean-up
- activeCameras.push_back(pCamMaster);
+ activeCameras.push_back(pCamPrimary);
- sp<IEvsCamera_1_1> pCamNonMaster =
+ sp<IEvsCamera_1_1> pCamSecondary =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
- ASSERT_NE(pCamNonMaster, nullptr);
+ ASSERT_NE(pCamSecondary, nullptr);
// Store a camera handle for a clean-up
- activeCameras.push_back(pCamNonMaster);
+ activeCameras.push_back(pCamSecondary);
// Set up per-client frame receiver objects which will fire up its own thread
- sp<FrameHandler> frameHandlerMaster =
- new FrameHandler(pCamMaster, cam,
+ sp<FrameHandler> frameHandlerPrimary =
+ new FrameHandler(pCamPrimary, cam,
nullptr,
FrameHandler::eAutoReturn);
- ASSERT_NE(frameHandlerMaster, nullptr);
- sp<FrameHandler> frameHandlerNonMaster =
- new FrameHandler(pCamNonMaster, cam,
+ ASSERT_NE(frameHandlerPrimary, nullptr);
+ sp<FrameHandler> frameHandlerSecondary =
+ new FrameHandler(pCamSecondary, cam,
nullptr,
FrameHandler::eAutoReturn);
- ASSERT_NE(frameHandlerNonMaster, nullptr);
+ ASSERT_NE(frameHandlerSecondary, nullptr);
- // Set one client as the master
- EvsResult result = pCamMaster->setMaster();
+ // Set one client as the primary client
+ EvsResult result = pCamPrimary->setMaster();
ASSERT_TRUE(result == EvsResult::OK);
- // Try to set another client as the master.
- result = pCamNonMaster->setMaster();
+ // Try to set another client as the primary client.
+ result = pCamSecondary->setMaster();
ASSERT_TRUE(result == EvsResult::OWNERSHIP_LOST);
- // Start the camera's video stream via a master client.
- bool startResult = frameHandlerMaster->startStream();
+ // Start the camera's video stream via a primary client client.
+ bool startResult = frameHandlerPrimary->startStream();
ASSERT_TRUE(startResult);
// Ensure the stream starts
- frameHandlerMaster->waitForFrameCount(1);
+ frameHandlerPrimary->waitForFrameCount(1);
// Start the camera's video stream via another client
- startResult = frameHandlerNonMaster->startStream();
+ startResult = frameHandlerSecondary->startStream();
ASSERT_TRUE(startResult);
// Ensure the stream starts
- frameHandlerNonMaster->waitForFrameCount(1);
+ frameHandlerSecondary->waitForFrameCount(1);
- // Non-master client expects to receive a master role relesed
+ // Non-primary client expects to receive a primary client role relesed
// notification.
EvsEventDesc aTargetEvent = {};
EvsEventDesc aNotification = {};
@@ -1020,14 +1020,14 @@
std::mutex eventLock;
std::condition_variable eventCond;
std::thread listener = std::thread(
- [&aNotification, &frameHandlerNonMaster, &listening, &eventCond]() {
+ [&aNotification, &frameHandlerSecondary, &listening, &eventCond]() {
// Notify that a listening thread is running.
listening = true;
eventCond.notify_all();
EvsEventDesc aTargetEvent;
aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
- if (!frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification, true)) {
+ if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification, true)) {
LOG(WARNING) << "A timer is expired before a target event is fired.";
}
@@ -1043,8 +1043,8 @@
}
lock.unlock();
- // Release a master role.
- pCamMaster->unsetMaster();
+ // Release a primary client role.
+ pCamPrimary->unsetMaster();
// Join a listening thread.
if (listener.joinable()) {
@@ -1055,24 +1055,24 @@
ASSERT_EQ(EvsEventType::MASTER_RELEASED,
static_cast<EvsEventType>(aNotification.aType));
- // Non-master becomes a master.
- result = pCamNonMaster->setMaster();
+ // Non-primary becomes a primary client.
+ result = pCamSecondary->setMaster();
ASSERT_TRUE(result == EvsResult::OK);
- // Previous master client fails to become a master.
- result = pCamMaster->setMaster();
+ // Previous primary client fails to become a primary client.
+ result = pCamPrimary->setMaster();
ASSERT_TRUE(result == EvsResult::OWNERSHIP_LOST);
listening = false;
listener = std::thread(
- [&aNotification, &frameHandlerMaster, &listening, &eventCond]() {
+ [&aNotification, &frameHandlerPrimary, &listening, &eventCond]() {
// Notify that a listening thread is running.
listening = true;
eventCond.notify_all();
EvsEventDesc aTargetEvent;
aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
- if (!frameHandlerMaster->waitForEvent(aTargetEvent, aNotification, true)) {
+ if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification, true)) {
LOG(WARNING) << "A timer is expired before a target event is fired.";
}
@@ -1087,8 +1087,8 @@
}
lock.unlock();
- // Closing current master client.
- frameHandlerNonMaster->shutdown();
+ // Closing current primary client.
+ frameHandlerSecondary->shutdown();
// Join a listening thread.
if (listener.joinable()) {
@@ -1100,11 +1100,11 @@
static_cast<EvsEventType>(aNotification.aType));
// Closing streams.
- frameHandlerMaster->shutdown();
+ frameHandlerPrimary->shutdown();
// Explicitly release the camera
- pEnumerator->closeCamera(pCamMaster);
- pEnumerator->closeCamera(pCamNonMaster);
+ pEnumerator->closeCamera(pCamPrimary);
+ pEnumerator->closeCamera(pCamSecondary);
activeCameras.clear();
}
}
@@ -1112,7 +1112,7 @@
/*
* MultiCameraParameter:
- * Verify that master and non-master clients behave as expected when they try to adjust
+ * Verify that primary and non-primary clients behave as expected when they try to adjust
* camera parameters.
*/
TEST_P(EvsHidlTest, MultiCameraParameter) {
@@ -1142,88 +1142,88 @@
}
// Create two camera clients.
- sp<IEvsCamera_1_1> pCamMaster =
+ sp<IEvsCamera_1_1> pCamPrimary =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
- ASSERT_NE(pCamMaster, nullptr);
+ ASSERT_NE(pCamPrimary, nullptr);
// Store a camera handle for a clean-up
- activeCameras.push_back(pCamMaster);
+ activeCameras.push_back(pCamPrimary);
- sp<IEvsCamera_1_1> pCamNonMaster =
+ sp<IEvsCamera_1_1> pCamSecondary =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
- ASSERT_NE(pCamNonMaster, nullptr);
+ ASSERT_NE(pCamSecondary, nullptr);
// Store a camera handle for a clean-up
- activeCameras.push_back(pCamNonMaster);
+ activeCameras.push_back(pCamSecondary);
// Get the parameter list
- std::vector<CameraParam> camMasterCmds, camNonMasterCmds;
- pCamMaster->getParameterList([&camMasterCmds](hidl_vec<CameraParam> cmdList) {
- camMasterCmds.reserve(cmdList.size());
+ std::vector<CameraParam> camPrimaryCmds, camSecondaryCmds;
+ pCamPrimary->getParameterList([&camPrimaryCmds](hidl_vec<CameraParam> cmdList) {
+ camPrimaryCmds.reserve(cmdList.size());
for (auto &&cmd : cmdList) {
- camMasterCmds.push_back(cmd);
+ camPrimaryCmds.push_back(cmd);
}
}
);
- pCamNonMaster->getParameterList([&camNonMasterCmds](hidl_vec<CameraParam> cmdList) {
- camNonMasterCmds.reserve(cmdList.size());
+ pCamSecondary->getParameterList([&camSecondaryCmds](hidl_vec<CameraParam> cmdList) {
+ camSecondaryCmds.reserve(cmdList.size());
for (auto &&cmd : cmdList) {
- camNonMasterCmds.push_back(cmd);
+ camSecondaryCmds.push_back(cmd);
}
}
);
- if (camMasterCmds.size() < 1 ||
- camNonMasterCmds.size() < 1) {
+ if (camPrimaryCmds.size() < 1 ||
+ camSecondaryCmds.size() < 1) {
// Skip a camera device if it does not support any parameter.
continue;
}
// Set up per-client frame receiver objects which will fire up its own thread
- sp<FrameHandler> frameHandlerMaster =
- new FrameHandler(pCamMaster, cam,
+ sp<FrameHandler> frameHandlerPrimary =
+ new FrameHandler(pCamPrimary, cam,
nullptr,
FrameHandler::eAutoReturn);
- ASSERT_NE(frameHandlerMaster, nullptr);
- sp<FrameHandler> frameHandlerNonMaster =
- new FrameHandler(pCamNonMaster, cam,
+ ASSERT_NE(frameHandlerPrimary, nullptr);
+ sp<FrameHandler> frameHandlerSecondary =
+ new FrameHandler(pCamSecondary, cam,
nullptr,
FrameHandler::eAutoReturn);
- ASSERT_NE(frameHandlerNonMaster, nullptr);
+ ASSERT_NE(frameHandlerSecondary, nullptr);
- // Set one client as the master
- EvsResult result = pCamMaster->setMaster();
+ // Set one client as the primary client.
+ EvsResult result = pCamPrimary->setMaster();
ASSERT_EQ(EvsResult::OK, result);
- // Try to set another client as the master.
- result = pCamNonMaster->setMaster();
+ // Try to set another client as the primary client.
+ result = pCamSecondary->setMaster();
ASSERT_EQ(EvsResult::OWNERSHIP_LOST, result);
- // Start the camera's video stream via a master client.
- bool startResult = frameHandlerMaster->startStream();
+ // Start the camera's video stream via a primary client client.
+ bool startResult = frameHandlerPrimary->startStream();
ASSERT_TRUE(startResult);
// Ensure the stream starts
- frameHandlerMaster->waitForFrameCount(1);
+ frameHandlerPrimary->waitForFrameCount(1);
// Start the camera's video stream via another client
- startResult = frameHandlerNonMaster->startStream();
+ startResult = frameHandlerSecondary->startStream();
ASSERT_TRUE(startResult);
// Ensure the stream starts
- frameHandlerNonMaster->waitForFrameCount(1);
+ frameHandlerSecondary->waitForFrameCount(1);
int32_t val0 = 0;
std::vector<int32_t> values;
EvsEventDesc aNotification0 = {};
EvsEventDesc aNotification1 = {};
- for (auto &cmd : camMasterCmds) {
+ for (auto &cmd : camPrimaryCmds) {
// Get a valid parameter value range
int32_t minVal, maxVal, step;
- pCamMaster->getIntParameterRange(
+ pCamPrimary->getIntParameterRange(
cmd,
[&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
minVal = val0;
@@ -1236,7 +1236,7 @@
if (cmd == CameraParam::ABSOLUTE_FOCUS) {
// Try to turn off auto-focus
values.clear();
- pCamMaster->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+ pCamPrimary->setIntParameter(CameraParam::AUTO_FOCUS, 0,
[&result, &values](auto status, auto effectiveValues) {
result = status;
if (status == EvsResult::OK) {
@@ -1261,7 +1261,7 @@
std::condition_variable eventCond;
std::thread listener0 = std::thread(
[cmd, val0,
- &aNotification0, &frameHandlerMaster, &listening0, &listening1, &eventCond]() {
+ &aNotification0, &frameHandlerPrimary, &listening0, &listening1, &eventCond]() {
listening0 = true;
if (listening1) {
eventCond.notify_all();
@@ -1271,14 +1271,14 @@
aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
aTargetEvent.payload[1] = val0;
- if (!frameHandlerMaster->waitForEvent(aTargetEvent, aNotification0)) {
+ if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
LOG(WARNING) << "A timer is expired before a target event is fired.";
}
}
);
std::thread listener1 = std::thread(
[cmd, val0,
- &aNotification1, &frameHandlerNonMaster, &listening0, &listening1, &eventCond]() {
+ &aNotification1, &frameHandlerSecondary, &listening0, &listening1, &eventCond]() {
listening1 = true;
if (listening0) {
eventCond.notify_all();
@@ -1288,7 +1288,7 @@
aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
aTargetEvent.payload[1] = val0;
- if (!frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification1)) {
+ if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
LOG(WARNING) << "A timer is expired before a target event is fired.";
}
}
@@ -1305,7 +1305,7 @@
// Try to program a parameter
values.clear();
- pCamMaster->setIntParameter(cmd, val0,
+ pCamPrimary->setIntParameter(cmd, val0,
[&result, &values](auto status, auto effectiveValues) {
result = status;
if (status == EvsResult::OK) {
@@ -1345,9 +1345,9 @@
}
// Clients expects to receive a parameter change notification
- // whenever a master client adjusts it.
+ // whenever a primary client client adjusts it.
values.clear();
- pCamMaster->getIntParameter(cmd,
+ pCamPrimary->getIntParameter(cmd,
[&result, &values](auto status, auto readValues) {
result = status;
if (status == EvsResult::OK) {
@@ -1362,9 +1362,9 @@
}
}
- // Try to adjust a parameter via non-master client
+ // Try to adjust a parameter via non-primary client
values.clear();
- pCamNonMaster->setIntParameter(camNonMasterCmds[0], val0,
+ pCamSecondary->setIntParameter(camSecondaryCmds[0], val0,
[&result, &values](auto status, auto effectiveValues) {
result = status;
if (status == EvsResult::OK) {
@@ -1375,21 +1375,21 @@
});
ASSERT_EQ(EvsResult::INVALID_ARG, result);
- // Non-master client attemps to be a master
- result = pCamNonMaster->setMaster();
+ // Non-primary client attempts to be a primary client
+ result = pCamSecondary->setMaster();
ASSERT_EQ(EvsResult::OWNERSHIP_LOST, result);
- // Master client retires from a master role
+ // Primary client retires from a primary client role
bool listening = false;
std::condition_variable eventCond;
std::thread listener = std::thread(
- [&aNotification0, &frameHandlerNonMaster, &listening, &eventCond]() {
+ [&aNotification0, &frameHandlerSecondary, &listening, &eventCond]() {
listening = true;
eventCond.notify_all();
EvsEventDesc aTargetEvent;
aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
- if (!frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification0, true)) {
+ if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification0, true)) {
LOG(WARNING) << "A timer is expired before a target event is fired.";
}
}
@@ -1403,7 +1403,7 @@
}
lock.unlock();
- result = pCamMaster->unsetMaster();
+ result = pCamPrimary->unsetMaster();
ASSERT_EQ(EvsResult::OK, result);
if (listener.joinable()) {
@@ -1414,7 +1414,7 @@
// Try to adjust a parameter after being retired
values.clear();
- pCamMaster->setIntParameter(camMasterCmds[0], val0,
+ pCamPrimary->setIntParameter(camPrimaryCmds[0], val0,
[&result, &values](auto status, auto effectiveValues) {
result = status;
if (status == EvsResult::OK) {
@@ -1425,15 +1425,15 @@
});
ASSERT_EQ(EvsResult::INVALID_ARG, result);
- // Non-master client becomes a master
- result = pCamNonMaster->setMaster();
+ // Non-primary client becomes a primary client
+ result = pCamSecondary->setMaster();
ASSERT_EQ(EvsResult::OK, result);
- // Try to adjust a parameter via new master client
- for (auto &cmd : camNonMasterCmds) {
+ // Try to adjust a parameter via new primary client
+ for (auto &cmd : camSecondaryCmds) {
// Get a valid parameter value range
int32_t minVal, maxVal, step;
- pCamNonMaster->getIntParameterRange(
+ pCamSecondary->getIntParameterRange(
cmd,
[&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
minVal = val0;
@@ -1447,7 +1447,7 @@
if (cmd == CameraParam::ABSOLUTE_FOCUS) {
// Try to turn off auto-focus
values.clear();
- pCamNonMaster->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+ pCamSecondary->setIntParameter(CameraParam::AUTO_FOCUS, 0,
[&result, &values](auto status, auto effectiveValues) {
result = status;
if (status == EvsResult::OK) {
@@ -1471,7 +1471,7 @@
bool listening1 = false;
std::condition_variable eventCond;
std::thread listener0 = std::thread(
- [&cmd, &val0, &aNotification0, &frameHandlerMaster, &listening0, &listening1, &eventCond]() {
+ [&]() {
listening0 = true;
if (listening1) {
eventCond.notify_all();
@@ -1481,13 +1481,13 @@
aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
aTargetEvent.payload[1] = val0;
- if (!frameHandlerMaster->waitForEvent(aTargetEvent, aNotification0)) {
+ if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
LOG(WARNING) << "A timer is expired before a target event is fired.";
}
}
);
std::thread listener1 = std::thread(
- [&cmd, &val0, &aNotification1, &frameHandlerNonMaster, &listening0, &listening1, &eventCond]() {
+ [&]() {
listening1 = true;
if (listening0) {
eventCond.notify_all();
@@ -1497,7 +1497,7 @@
aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
aTargetEvent.payload[1] = val0;
- if (!frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification1)) {
+ if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
LOG(WARNING) << "A timer is expired before a target event is fired.";
}
}
@@ -1514,7 +1514,7 @@
// Try to program a parameter
values.clear();
- pCamNonMaster->setIntParameter(cmd, val0,
+ pCamSecondary->setIntParameter(cmd, val0,
[&result, &values](auto status, auto effectiveValues) {
result = status;
if (status == EvsResult::OK) {
@@ -1526,9 +1526,9 @@
ASSERT_EQ(EvsResult::OK, result);
// Clients expects to receive a parameter change notification
- // whenever a master client adjusts it.
+ // whenever a primary client client adjusts it.
values.clear();
- pCamNonMaster->getIntParameter(cmd,
+ pCamSecondary->getIntParameter(cmd,
[&result, &values](auto status, auto readValues) {
result = status;
if (status == EvsResult::OK) {
@@ -1567,17 +1567,17 @@
}
}
- // New master retires from a master role
- result = pCamNonMaster->unsetMaster();
+ // New primary client retires from the role
+ result = pCamSecondary->unsetMaster();
ASSERT_EQ(EvsResult::OK, result);
// Shutdown
- frameHandlerMaster->shutdown();
- frameHandlerNonMaster->shutdown();
+ frameHandlerPrimary->shutdown();
+ frameHandlerSecondary->shutdown();
// Explicitly release the camera
- pEnumerator->closeCamera(pCamMaster);
- pEnumerator->closeCamera(pCamNonMaster);
+ pEnumerator->closeCamera(pCamPrimary);
+ pEnumerator->closeCamera(pCamSecondary);
activeCameras.clear();
}
}
@@ -1586,7 +1586,7 @@
/*
* HighPriorityCameraClient:
* EVS client, which owns the display, is priortized and therefore can take over
- * a master role from other EVS clients without the display.
+ * a primary client role from other EVS clients without the display.
*/
TEST_P(EvsHidlTest, HighPriorityCameraClient) {
LOG(INFO) << "Starting HighPriorityCameraClient test";
@@ -1668,7 +1668,7 @@
frameHandler0->waitForFrameCount(1);
frameHandler1->waitForFrameCount(1);
- // Client 1 becomes a master and programs a parameter.
+ // Client 1 becomes a primary client and programs a parameter.
EvsResult result = EvsResult::OK;
// Get a valid parameter value range
int32_t minVal, maxVal, step;
@@ -1681,7 +1681,7 @@
}
);
- // Client1 becomes a master
+ // Client1 becomes a primary client
result = pCam1->setMaster();
ASSERT_EQ(EvsResult::OK, result);
@@ -1820,7 +1820,7 @@
}
lock.unlock();
- // Client 0 steals a master role
+ // Client 0 steals a primary client role
ASSERT_EQ(EvsResult::OK, pCam0->forceMaster(pDisplay));
// Join a listener
@@ -2241,7 +2241,7 @@
TEST_P(EvsHidlTest, CameraStreamExternalBuffering) {
LOG(INFO) << "Starting CameraStreamExternalBuffering test";
- // Arbitrary constant (should be > 1 and less than crazy)
+ // Arbitrary constant (should be > 1 and not too big)
static const unsigned int kBuffersToHold = 6;
// Get the camera list
diff --git a/automotive/vehicle/2.0/default/impl/vhal_v2_0/DefaultConfig.h b/automotive/vehicle/2.0/default/impl/vhal_v2_0/DefaultConfig.h
index 16c33b9..16e1bf7 100644
--- a/automotive/vehicle/2.0/default/impl/vhal_v2_0/DefaultConfig.h
+++ b/automotive/vehicle/2.0/default/impl/vhal_v2_0/DefaultConfig.h
@@ -266,6 +266,20 @@
.initialValue = {.stringValue = "Toy Vehicle"}},
{.config =
{
+ .prop = toInt(VehicleProperty::INFO_MODEL),
+ .access = VehiclePropertyAccess::READ,
+ .changeMode = VehiclePropertyChangeMode::STATIC,
+ },
+ .initialValue = {.stringValue = "Speedy Model"}},
+ {.config =
+ {
+ .prop = toInt(VehicleProperty::INFO_MODEL_YEAR),
+ .access = VehiclePropertyAccess::READ,
+ .changeMode = VehiclePropertyChangeMode::STATIC,
+ },
+ .initialValue = {.int32Values = {2020}}},
+ {.config =
+ {
.prop = toInt(VehicleProperty::INFO_EXTERIOR_DIMENSIONS),
.access = VehiclePropertyAccess::READ,
.changeMode = VehiclePropertyChangeMode::STATIC,
diff --git a/compatibility_matrices/compatibility_matrix.current.xml b/compatibility_matrices/compatibility_matrix.current.xml
index 9463924..44fbc64 100644
--- a/compatibility_matrices/compatibility_matrix.current.xml
+++ b/compatibility_matrices/compatibility_matrix.current.xml
@@ -537,7 +537,7 @@
</hal>
<hal format="hidl" optional="true">
<name>android.hardware.wifi.hostapd</name>
- <version>1.0-2</version>
+ <version>1.0-3</version>
<interface>
<name>IHostapd</name>
<instance>default</instance>
@@ -545,7 +545,7 @@
</hal>
<hal format="hidl" optional="true">
<name>android.hardware.wifi.supplicant</name>
- <version>1.0-3</version>
+ <version>1.0-4</version>
<interface>
<name>ISupplicant</name>
<instance>default</instance>
diff --git a/sensors/common/default/2.X/multihal/HalProxy.cpp b/sensors/common/default/2.X/multihal/HalProxy.cpp
index 75ffc17..fe3fc84 100644
--- a/sensors/common/default/2.X/multihal/HalProxy.cpp
+++ b/sensors/common/default/2.X/multihal/HalProxy.cpp
@@ -693,6 +693,10 @@
int64_t timeoutStart /* = -1 */) {
if (!mThreadsRun.load()) return;
std::lock_guard<std::recursive_mutex> lockGuard(mWakelockMutex);
+ if (delta > mWakelockRefCount) {
+ ALOGE("Decrementing wakelock ref count by %zu when count is %zu",
+ delta, mWakelockRefCount);
+ }
if (timeoutStart == -1) timeoutStart = mWakelockTimeoutResetTime;
if (mWakelockRefCount == 0 || timeoutStart < mWakelockTimeoutResetTime) return;
mWakelockRefCount -= std::min(mWakelockRefCount, delta);
diff --git a/sensors/common/default/2.X/multihal/ScopedWakelock.cpp b/sensors/common/default/2.X/multihal/ScopedWakelock.cpp
index bf2ad35..2a2baa2 100644
--- a/sensors/common/default/2.X/multihal/ScopedWakelock.cpp
+++ b/sensors/common/default/2.X/multihal/ScopedWakelock.cpp
@@ -28,6 +28,21 @@
.count();
}
+ScopedWakelock::ScopedWakelock(ScopedWakelock&& other) {
+ *this = std::move(other);
+}
+
+ScopedWakelock& ScopedWakelock::operator=(ScopedWakelock&& other) {
+ mRefCounter = other.mRefCounter;
+ mCreatedAtTimeNs = other.mCreatedAtTimeNs;
+ mLocked = other.mLocked;
+
+ other.mRefCounter = nullptr;
+ other.mCreatedAtTimeNs = 0;
+ other.mLocked = false;
+ return *this;
+}
+
ScopedWakelock::ScopedWakelock(IScopedWakelockRefCounter* refCounter, bool locked)
: mRefCounter(refCounter), mLocked(locked) {
if (mLocked) {
diff --git a/sensors/common/default/2.X/multihal/include/V2_0/ScopedWakelock.h b/sensors/common/default/2.X/multihal/include/V2_0/ScopedWakelock.h
index 1cc5cd5..b3f398c 100644
--- a/sensors/common/default/2.X/multihal/include/V2_0/ScopedWakelock.h
+++ b/sensors/common/default/2.X/multihal/include/V2_0/ScopedWakelock.h
@@ -81,14 +81,15 @@
*/
class ScopedWakelock {
public:
- ScopedWakelock(ScopedWakelock&&) = default;
- ScopedWakelock& operator=(ScopedWakelock&&) = default;
+ ScopedWakelock(ScopedWakelock&& other);
+ ScopedWakelock& operator=(ScopedWakelock&& other);
virtual ~ScopedWakelock();
bool isLocked() const { return mLocked; }
private:
friend class HalProxyCallbackBase;
+ friend class ScopedWakelockTest;
IScopedWakelockRefCounter* mRefCounter;
int64_t mCreatedAtTimeNs;
bool mLocked;
diff --git a/sensors/common/default/2.X/multihal/tests/Android.bp b/sensors/common/default/2.X/multihal/tests/Android.bp
index a15faed..1b60f4b 100644
--- a/sensors/common/default/2.X/multihal/tests/Android.bp
+++ b/sensors/common/default/2.X/multihal/tests/Android.bp
@@ -90,7 +90,10 @@
cc_test {
name: "android.hardware.sensors@2.X-halproxy-unit-tests",
- srcs: ["HalProxy_test.cpp"],
+ srcs: [
+ "HalProxy_test.cpp",
+ "ScopedWakelock_test.cpp",
+ ],
vendor: true,
header_libs: [
"android.hardware.sensors@2.X-shared-utils",
diff --git a/sensors/common/default/2.X/multihal/tests/ScopedWakelock_test.cpp b/sensors/common/default/2.X/multihal/tests/ScopedWakelock_test.cpp
new file mode 100644
index 0000000..133d9e8
--- /dev/null
+++ b/sensors/common/default/2.X/multihal/tests/ScopedWakelock_test.cpp
@@ -0,0 +1,110 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <gtest/gtest.h>
+
+#include "V2_0/ScopedWakelock.h"
+
+namespace android {
+namespace hardware {
+namespace sensors {
+namespace V2_0 {
+namespace implementation {
+
+class RefCounter : public IScopedWakelockRefCounter {
+ public:
+ size_t incCount = 0;
+ size_t decCount = 0;
+
+ bool incrementRefCountAndMaybeAcquireWakelock(size_t /* delta */,
+ int64_t* /* timeoutStart */) override {
+ incCount++;
+ return true;
+ }
+
+ void decrementRefCountAndMaybeReleaseWakelock(size_t /* delta */,
+ int64_t /* timeoutStart */) override {
+ decCount++;
+ }
+};
+
+class ScopedWakelockTest : public testing::Test {
+ public:
+ ScopedWakelock createScopedWakelock(bool locked) {
+ return ScopedWakelock(&mRefCounter, locked);
+ }
+
+ RefCounter mRefCounter;
+};
+
+TEST_F(ScopedWakelockTest, UnlockedAfterMoved) {
+ ScopedWakelock wakelock = createScopedWakelock(false /* locked */);
+
+ ScopedWakelock movedWakelock(std::move(wakelock));
+
+ EXPECT_FALSE(wakelock.isLocked());
+ EXPECT_FALSE(movedWakelock.isLocked());
+}
+
+TEST_F(ScopedWakelockTest, LockedAfterMoved) {
+ ScopedWakelock wakelock = createScopedWakelock(true /* locked */);
+
+ ScopedWakelock movedWakelock(std::move(wakelock));
+
+ EXPECT_FALSE(wakelock.isLocked());
+ EXPECT_TRUE(movedWakelock.isLocked());
+}
+
+TEST_F(ScopedWakelockTest, Locked) {
+ ScopedWakelock wakelock = createScopedWakelock(true /* locked */);
+
+ EXPECT_TRUE(wakelock.isLocked());
+}
+
+TEST_F(ScopedWakelockTest, Unlocked) {
+ ScopedWakelock wakelock = createScopedWakelock(false /* locked */);
+
+ EXPECT_FALSE(wakelock.isLocked());
+}
+
+TEST_F(ScopedWakelockTest, ScopedLocked) {
+ { createScopedWakelock(true /* locked */); }
+
+ EXPECT_EQ(mRefCounter.incCount, 1);
+ EXPECT_EQ(mRefCounter.decCount, 1);
+}
+
+TEST_F(ScopedWakelockTest, ScopedUnlockIsNoop) {
+ { createScopedWakelock(false /* locked */); }
+
+ EXPECT_EQ(mRefCounter.incCount, 0);
+ EXPECT_EQ(mRefCounter.decCount, 0);
+}
+
+TEST_F(ScopedWakelockTest, ScopedLockedMove) {
+ {
+ ScopedWakelock wakelock = createScopedWakelock(true /* locked */);
+ ScopedWakelock movedWakelock(std::move(wakelock));
+ }
+
+ EXPECT_EQ(mRefCounter.incCount, 1);
+ EXPECT_EQ(mRefCounter.decCount, 1);
+}
+
+} // namespace implementation
+} // namespace V2_0
+} // namespace sensors
+} // namespace hardware
+} // namespace android
\ No newline at end of file
diff --git a/tv/tuner/1.1/Android.bp b/tv/tuner/1.1/Android.bp
index 032232d..92769f0 100644
--- a/tv/tuner/1.1/Android.bp
+++ b/tv/tuner/1.1/Android.bp
@@ -6,6 +6,7 @@
srcs: [
"IDemux.hal",
"IFilter.hal",
+ "IFrontend.hal",
"IFilterCallback.hal",
"ITuner.hal",
"types.hal",
diff --git a/tv/tuner/1.1/IFilterCallback.hal b/tv/tuner/1.1/IFilterCallback.hal
index a80273b..3e5f047 100644
--- a/tv/tuner/1.1/IFilterCallback.hal
+++ b/tv/tuner/1.1/IFilterCallback.hal
@@ -17,13 +17,15 @@
package android.hardware.tv.tuner@1.1;
import @1.0::IFilterCallback;
-import @1.1::DemuxFilterEvent;
+import @1.0::DemuxFilterEvent;
+import @1.1::DemuxFilterEventExt;
interface IFilterCallback extends @1.0::IFilterCallback {
/**
* Notify the client that a new filter event happened.
*
- * @param filterEvent a v1_1 filter event.
+ * @param filterEvent a v1_0 filter event.
+ * @param filterEventExt a v1_1 extended filter event.
*/
- oneway onFilterEvent_1_1(DemuxFilterEvent filterEvent);
+ oneway onFilterEvent_1_1(DemuxFilterEvent filterEvent, DemuxFilterEventExt filterEventExt);
};
diff --git a/tv/tuner/1.1/IFrontend.hal b/tv/tuner/1.1/IFrontend.hal
new file mode 100644
index 0000000..b570549
--- /dev/null
+++ b/tv/tuner/1.1/IFrontend.hal
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.tv.tuner@1.1;
+
+import @1.0::FrontendScanType;
+import @1.0::FrontendSettings;
+import @1.0::IFrontend;
+import @1.0::Result;
+
+/**
+ * A Tuner Frontend is used to tune to a frequency and lock signal.
+ *
+ * IFrontend provides a bit stream to the Tuner Demux interface.
+ */
+interface IFrontend extends @1.0::IFrontend {
+ /**
+ * Tunes the frontend to using the settings given.
+ *
+ * This locks the frontend to a frequency by providing signal
+ * delivery information. If previous tuning isn't completed, this call MUST
+ * stop previous tuning, and start a new tuning.
+ * Tune is an async call, with LOCKED or NO_SIGNAL events sent via callback.
+ *
+ * @param settings Signal delivery information the frontend uses to
+ * search and lock the signal.
+ * @param settingsExt Extended information that would be used in the 1.1 Frontend to
+ * search and lock the signal in a better way.
+ *
+ * @return result Result status of the operation.
+ * SUCCESS if successful,
+ * INVALID_STATE if tuning can't be applied at current stage,
+ * UNKNOWN_ERROR if tuning failed for other reasons.
+ */
+ tune_1_1(FrontendSettings settings, FrontendSettingsExt settingsExt) generates (Result result);
+
+ /**
+ * Scan the frontend to use the settings given.
+ *
+ * This uses the frontend to start a scan from signal delivery information.
+ * If previous scan isn't completed, this call MUST stop previous scan,
+ * and start a new scan.
+ * Scan is an async call, with FrontendScanMessage sent via callback.
+ *
+ * @param settings Signal delivery information which the frontend uses to
+ * scan the signal.
+ * @param type the type which the frontend uses to scan the signal.
+ * @param settingsExt Extended information that would be used in the 1.1 Frontend to
+ * search and lock the signal in a better way.
+ * @return result Result status of the operation.
+ * SUCCESS if successful,
+ * INVALID_STATE if tuning can't be applied at current stage,
+ * UNKNOWN_ERROR if tuning failed for other reasons.
+ */
+ scan_1_1(FrontendSettings settings, FrontendScanType type, FrontendSettingsExt settingsExt)
+ generates (Result result);
+};
diff --git a/tv/tuner/1.1/default/Filter.cpp b/tv/tuner/1.1/default/Filter.cpp
index 4d08afe..fae83a2 100644
--- a/tv/tuner/1.1/default/Filter.cpp
+++ b/tv/tuner/1.1/default/Filter.cpp
@@ -220,7 +220,7 @@
// For the first time of filter output, implementation needs to send the filter
// Event Callback without waiting for the DATA_CONSUMED to init the process.
while (mFilterThreadRunning) {
- if (mFilterEvent.events.size() == 0 && mFilterEvent_1_1.events.size() == 0) {
+ if (mFilterEvent.events.size() == 0 && mFilterEventExt.events.size() == 0) {
if (DEBUG_FILTER) {
ALOGD("[Filter] wait for filter data output.");
}
@@ -228,18 +228,17 @@
continue;
}
// After successfully write, send a callback and wait for the read to be done
- if (mFilterEvent_1_1.events.size() > 0) {
- if (mCallback_1_1 == nullptr) {
- ALOGE("[Filter] IFilterCallback_1_1 has not been configured yet. Can't send event");
- mFilterThreadRunning = false;
- break;
- }
- mCallback_1_1->onFilterEvent_1_1(mFilterEvent_1_1);
- mFilterEvent_1_1.events.resize(0);
- } else {
+ if (mCallback_1_1 != nullptr) {
+ mCallback_1_1->onFilterEvent_1_1(mFilterEvent, mFilterEventExt);
+ mFilterEventExt.events.resize(0);
+ } else if (mCallback != nullptr) {
mCallback->onFilterEvent(mFilterEvent);
- mFilterEvent.events.resize(0);
+ } else {
+ ALOGD("[Filter] filter callback is not configured yet.");
+ mFilterThreadRunning = false;
+ return;
}
+ mFilterEvent.events.resize(0);
freeAvHandle();
mFilterStatus = DemuxFilterStatus::DATA_READY;
@@ -279,8 +278,8 @@
mCallback->onFilterEvent(mFilterEvent);
mFilterEvent.events.resize(0);
} else if (mCallback_1_1 != nullptr) {
- mCallback_1_1->onFilterEvent_1_1(mFilterEvent_1_1);
- mFilterEvent_1_1.events.resize(0);
+ mCallback_1_1->onFilterEvent_1_1(mFilterEvent, mFilterEventExt);
+ mFilterEventExt.events.resize(0);
}
break;
}
@@ -612,22 +611,18 @@
recordEvent = {
.byteNumber = mRecordFilterOutput.size(),
};
- V1_1::DemuxFilterTsRecordEvent recordEvent_1_1;
- recordEvent_1_1 = {
- .tsRecordEvent_1_0 = recordEvent,
+ V1_1::DemuxFilterRecordEventExt recordEventExt;
+ recordEventExt = {
.pts = (mPts == 0) ? time(NULL) * 900000 : mPts,
};
int size;
- if (mCallback_1_1 != nullptr) {
- size = mFilterEvent_1_1.events.size();
- mFilterEvent_1_1.events.resize(size + 1);
- mFilterEvent_1_1.events[size].tsRecord(recordEvent_1_1);
- } else if (mCallback != nullptr) {
- size = mFilterEvent.events.size();
- mFilterEvent.events.resize(size + 1);
- mFilterEvent.events[size].tsRecord(recordEvent);
- }
+ size = mFilterEventExt.events.size();
+ mFilterEventExt.events.resize(size + 1);
+ mFilterEventExt.events[size].tsRecord(recordEventExt);
+ size = mFilterEvent.events.size();
+ mFilterEvent.events.resize(size + 1);
+ mFilterEvent.events[size].tsRecord(recordEvent);
mRecordFilterOutput.clear();
return Result::SUCCESS;
diff --git a/tv/tuner/1.1/default/Filter.h b/tv/tuner/1.1/default/Filter.h
index 8e6fe38..fa52f96 100644
--- a/tv/tuner/1.1/default/Filter.h
+++ b/tv/tuner/1.1/default/Filter.h
@@ -128,7 +128,7 @@
bool mIsUsingFMQ = false;
EventFlag* mFilterEventFlag;
DemuxFilterEvent mFilterEvent;
- V1_1::DemuxFilterEvent mFilterEvent_1_1;
+ V1_1::DemuxFilterEventExt mFilterEventExt;
// Thread handlers
pthread_t mFilterThread;
diff --git a/tv/tuner/1.1/default/Frontend.cpp b/tv/tuner/1.1/default/Frontend.cpp
index 6f5885f..1e9435b 100644
--- a/tv/tuner/1.1/default/Frontend.cpp
+++ b/tv/tuner/1.1/default/Frontend.cpp
@@ -71,6 +71,12 @@
return Result::SUCCESS;
}
+Return<Result> Frontend::tune_1_1(const FrontendSettings& settings,
+ const V1_1::FrontendSettingsExt& /*settingsExt*/) {
+ ALOGV("%s", __FUNCTION__);
+ return tune(settings);
+}
+
Return<Result> Frontend::stopTune() {
ALOGV("%s", __FUNCTION__);
@@ -115,6 +121,12 @@
return Result::SUCCESS;
}
+Return<Result> Frontend::scan_1_1(const FrontendSettings& settings, FrontendScanType type,
+ const V1_1::FrontendSettingsExt& /*settingsExt*/) {
+ ALOGV("%s", __FUNCTION__);
+ return scan(settings, type);
+}
+
Return<Result> Frontend::stopScan() {
ALOGV("%s", __FUNCTION__);
diff --git a/tv/tuner/1.1/default/Frontend.h b/tv/tuner/1.1/default/Frontend.h
index 89b4a6b..d44f4ae 100644
--- a/tv/tuner/1.1/default/Frontend.h
+++ b/tv/tuner/1.1/default/Frontend.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_HARDWARE_TV_TUNER_V1_1_FRONTEND_H_
#define ANDROID_HARDWARE_TV_TUNER_V1_1_FRONTEND_H_
+#include <android/hardware/tv/tuner/1.1/IFrontend.h>
#include <fstream>
#include <iostream>
#include "Tuner.h"
@@ -32,7 +33,7 @@
class Tuner;
-class Frontend : public IFrontend {
+class Frontend : public V1_1::IFrontend {
public:
Frontend(FrontendType type, FrontendId id, sp<Tuner> tuner);
@@ -42,10 +43,16 @@
virtual Return<Result> tune(const FrontendSettings& settings) override;
+ virtual Return<Result> tune_1_1(const FrontendSettings& settings,
+ const V1_1::FrontendSettingsExt& settingsExt) override;
+
virtual Return<Result> stopTune() override;
virtual Return<Result> scan(const FrontendSettings& settings, FrontendScanType type) override;
+ virtual Return<Result> scan_1_1(const FrontendSettings& settings, FrontendScanType type,
+ const V1_1::FrontendSettingsExt& settingsExt) override;
+
virtual Return<Result> stopScan() override;
virtual Return<void> getStatus(const hidl_vec<FrontendStatusType>& statusTypes,
diff --git a/tv/tuner/1.1/types.hal b/tv/tuner/1.1/types.hal
index 5c02b85..9e2f453 100644
--- a/tv/tuner/1.1/types.hal
+++ b/tv/tuner/1.1/types.hal
@@ -17,15 +17,12 @@
package android.hardware.tv.tuner@1.1;
import @1.0::Constant;
-import @1.0::DemuxFilterDownloadEvent;
-import @1.0::DemuxFilterIpPayloadEvent;
-import @1.0::DemuxFilterMediaEvent;
import @1.0::DemuxFilterMmtpRecordEvent;
-import @1.0::DemuxFilterPesEvent;
-import @1.0::DemuxFilterSectionEvent;
-import @1.0::DemuxFilterTemiEvent;
import @1.0::DemuxFilterTsRecordEvent;
+import @1.0::FrontendDvbcSpectralInversion;
+import @1.0::FrontendDvbtTransmissionMode;
import android.hidl.safe_union@1.0;
+import android.hidl.safe_union@1.0::Monostate;
@export
enum Constant : @1.0::Constant {
@@ -33,6 +30,10 @@
* An invalid mpuSequenceNumber in DemuxFilterMmtpRecordEvent.
*/
INVALID_MMTP_RECORD_EVENT_MPT_SEQUENCE_NUM = 0xFFFFFFFF,
+ /**
+ * An invalid frenquency that can be used as the default value of the frontend setting.
+ */
+ INVALID_FRONTEND_SETTING_FREQUENCY = 0xFFFFFFFF,
};
@export
@@ -52,64 +53,36 @@
};
/**
- * Filter Event for TS Record data.
+ * Extended Demux Filter Record Event.
*/
-struct DemuxFilterTsRecordEvent {
- /**
- * V1_0 Filter Event for TS Record data.
- */
- @1.0::DemuxFilterTsRecordEvent tsRecordEvent_1_0;
-
+struct DemuxFilterRecordEventExt {
/**
* The Presentation Time Stamp(PTS) for the audio or video frame. It is based on 90KHz
* and has the same format as the PTS in ISO/IEC 13818-1. It is used only for the SC and
* the SC_HEVC.
*/
uint64_t pts;
-};
-
-/**
- * Filter Event for MMTP Record data.
- */
-struct DemuxFilterMmtpRecordEvent {
- /**
- * V1_0 Filter Event for MMTP Record data.
- */
- @1.0::DemuxFilterMmtpRecordEvent mmtpRecordEvent_1_0;
/**
* MPU sequence number of the filtered data. This is only used for MMTP.
*/
uint32_t mpuSequenceNumber;
-
- /**
- * The Presentation Time Stamp(PTS) for the audio or video frame. It is based on 90KHz
- * and has the same format as the PTS in ISO/IEC 13818-1. It is used only for the SC and
- * the SC_HEVC.
- */
- uint64_t pts;
};
/**
- * Filter Event.
+ * Extended Demux Filter Event.
*/
-struct DemuxFilterEvent {
+struct DemuxFilterEventExt {
safe_union Event {
- DemuxFilterSectionEvent section;
+ /**
+ * No extended record filter Event. This is used by the tsRecord or mmtpRecord filter event
+ * that does not contain the DemuxFilterRecordEventExt information.
+ */
+ Monostate noinit;
- DemuxFilterMediaEvent media;
+ DemuxFilterRecordEventExt tsRecord;
- DemuxFilterPesEvent pes;
-
- @1.1::DemuxFilterTsRecordEvent tsRecord;
-
- @1.1::DemuxFilterMmtpRecordEvent mmtpRecord;
-
- DemuxFilterDownloadEvent download;
-
- DemuxFilterIpPayloadEvent ipPayload;
-
- DemuxFilterTemiEvent temi;
+ DemuxFilterRecordEventExt mmtpRecord;
};
/**
@@ -117,3 +90,91 @@
*/
vec<Event> events;
};
+
+typedef FrontendDvbcSpectralInversion FrontendSpectralInversion;
+
+/**
+ * Scan type for a DVBS Frontend.
+ */
+@export
+enum FrontendDvbsScanType : uint32_t {
+ UNDEFINED = 0,
+ DIRECT,
+ DISEQC,
+ UNICABLE,
+ JESS,
+};
+
+/**
+ * Rotation status for a DVBT Frontend.
+ */
+@export
+enum FrontendDvbtRotation : uint32_t {
+ UNDEFINED,
+ NOT_ROTATED,
+ ROTATED,
+};
+
+/**
+ * AFT flag for an Analog Frontend.
+ */
+@export
+enum FrontendAnalogAftFlag : uint32_t {
+ UNDEFINED,
+ AFT_TRUE,
+ AFT_FALSE,
+};
+
+/**
+ * Extended Transmission Mode for DVBT.
+ */
+@export
+enum FrontendDvbtTransmissionMode : @1.0::FrontendDvbtTransmissionMode {
+ MODE_8K_E = 1 << 7,
+
+ MODE_16K_E = 1 << 8,
+
+ MODE_32K_E = 1 << 9,
+};
+
+/**
+ * Extended Signal Settings for a DVBS Frontend.
+ */
+struct FrontendDvbsSettingsExt {
+ FrontendDvbsScanType scanType;
+};
+
+/**
+ * Extended Signal Settings for a DVBT Frontend.
+ */
+struct FrontendDvbtSettingsExt {
+ FrontendDvbtRotation rotation;
+
+ FrontendDvbtTransmissionMode transmissionMode;
+};
+
+/**
+ * Extended Signal Settings for an Analog Frontend.
+ */
+struct FrontendAnalogSettingsExt {
+ FrontendAnalogAftFlag aftFlag;
+};
+
+/**
+ * Extended Signal Settings for Frontend.
+ */
+struct FrontendSettingsExt {
+ uint32_t endFrequency;
+
+ FrontendSpectralInversion inversion;
+
+ safe_union SettingsExt {
+ Monostate noinit;
+
+ FrontendAnalogSettingsExt analog;
+
+ FrontendDvbsSettingsExt dvbs;
+
+ FrontendDvbtSettingsExt dvbt;
+ } settingExt;
+};
diff --git a/tv/tuner/1.1/vts/functional/FilterTests.cpp b/tv/tuner/1.1/vts/functional/FilterTests.cpp
index 9cbec86..24e1fa0 100644
--- a/tv/tuner/1.1/vts/functional/FilterTests.cpp
+++ b/tv/tuner/1.1/vts/functional/FilterTests.cpp
@@ -20,11 +20,12 @@
bool result = false;
ALOGW("[vts] reading from filter FMQ or buffer %d", mFilterId);
// todo separate filter handlers
- for (int i = 0; i < mFilterEvent.events.size(); i++) {
- switch (mFilterEventType) {
- case FilterEventType::RECORD:
- ALOGW("[vts] Record filter event, pts=%" PRIu64 ".",
- mFilterEvent.events[0].tsRecord().pts);
+ for (int i = 0; i < mFilterEventExt.events.size(); i++) {
+ auto eventExt = mFilterEventExt.events[i];
+ switch (eventExt.getDiscriminator()) {
+ case DemuxFilterEventExt::Event::hidl_discriminator::tsRecord:
+ ALOGW("[vts] Extended TS record filter event, pts=%" PRIu64 ".",
+ eventExt.tsRecord().pts);
break;
default:
break;
diff --git a/tv/tuner/1.1/vts/functional/FilterTests.h b/tv/tuner/1.1/vts/functional/FilterTests.h
index 8156f14..721e419 100644
--- a/tv/tuner/1.1/vts/functional/FilterTests.h
+++ b/tv/tuner/1.1/vts/functional/FilterTests.h
@@ -44,6 +44,7 @@
using android::hardware::MQDescriptorSync;
using android::hardware::Return;
using android::hardware::Void;
+using android::hardware::tv::tuner::V1_0::DemuxFilterEvent;
using android::hardware::tv::tuner::V1_0::DemuxFilterMainType;
using android::hardware::tv::tuner::V1_0::DemuxFilterSettings;
using android::hardware::tv::tuner::V1_0::DemuxFilterStatus;
@@ -52,7 +53,7 @@
using android::hardware::tv::tuner::V1_0::IDemux;
using android::hardware::tv::tuner::V1_0::IFilter;
using android::hardware::tv::tuner::V1_0::Result;
-using android::hardware::tv::tuner::V1_1::DemuxFilterEvent;
+using android::hardware::tv::tuner::V1_1::DemuxFilterEventExt;
using android::hardware::tv::tuner::V1_1::IFilterCallback;
using android::hardware::tv::tuner::V1_1::ITuner;
@@ -78,11 +79,13 @@
class FilterCallback : public IFilterCallback {
public:
- virtual Return<void> onFilterEvent_1_1(const DemuxFilterEvent& filterEvent) override {
+ virtual Return<void> onFilterEvent_1_1(const DemuxFilterEvent& filterEvent,
+ const DemuxFilterEventExt& filterEventExt) override {
android::Mutex::Autolock autoLock(mMsgLock);
// Temprarily we treat the first coming back filter data on the matching pid a success
// once all of the MQ are cleared, means we got all the expected output
mFilterEvent = filterEvent;
+ mFilterEventExt = filterEventExt;
readFilterEventData();
mPidFilterOutputCount++;
mMsgCondition.signal();
@@ -109,6 +112,7 @@
sp<IFilter> mFilter;
FilterEventType mFilterEventType;
DemuxFilterEvent mFilterEvent;
+ DemuxFilterEventExt mFilterEventExt;
android::Mutex mMsgLock;
android::Mutex mFilterOutputLock;
diff --git a/tv/tuner/1.1/vts/functional/FrontendTests.cpp b/tv/tuner/1.1/vts/functional/FrontendTests.cpp
index da46adb..5e2b288 100644
--- a/tv/tuner/1.1/vts/functional/FrontendTests.cpp
+++ b/tv/tuner/1.1/vts/functional/FrontendTests.cpp
@@ -32,13 +32,31 @@
}
}
-Return<void> FrontendCallback::onScanMessage(FrontendScanMessageType /*type*/,
- const FrontendScanMessage& /*message*/) {
+Return<void> FrontendCallback::onScanMessage(FrontendScanMessageType type,
+ const FrontendScanMessage& message) {
+ android::Mutex::Autolock autoLock(mMsgLock);
+ while (!mScanMsgProcessed) {
+ mMsgCondition.wait(mMsgLock);
+ }
+ ALOGD("[vts] frontend scan message. Type: %d", type);
+ mScanMessageReceived = true;
+ mScanMsgProcessed = false;
+ mScanMessageType = type;
+ mScanMessage = message;
+ mMsgCondition.signal();
return Void();
}
-void FrontendCallback::tuneTestOnLock(sp<IFrontend>& frontend, FrontendSettings settings) {
- Result result = frontend->tune(settings);
+void FrontendCallback::tuneTestOnLock(sp<IFrontend>& frontend, FrontendSettings settings,
+ FrontendSettingsExt settingsExt) {
+ sp<android::hardware::tv::tuner::V1_1::IFrontend> frontend_1_1;
+ frontend_1_1 = android::hardware::tv::tuner::V1_1::IFrontend::castFrom(frontend);
+ if (frontend_1_1 == nullptr) {
+ EXPECT_TRUE(false) << "Couldn't get 1.1 IFrontend from the Hal implementation.";
+ return;
+ }
+
+ Result result = frontend_1_1->tune_1_1(settings, settingsExt);
EXPECT_TRUE(result == Result::SUCCESS);
android::Mutex::Autolock autoLock(mMsgLock);
@@ -52,6 +70,130 @@
mLockMsgReceived = false;
}
+void FrontendCallback::scanTest(sp<IFrontend>& frontend, FrontendConfig config,
+ FrontendScanType type) {
+ sp<android::hardware::tv::tuner::V1_1::IFrontend> frontend_1_1;
+ frontend_1_1 = android::hardware::tv::tuner::V1_1::IFrontend::castFrom(frontend);
+ if (frontend_1_1 == nullptr) {
+ EXPECT_TRUE(false) << "Couldn't get 1.1 IFrontend from the Hal implementation.";
+ return;
+ }
+
+ uint32_t targetFrequency = getTargetFrequency(config.settings, config.type);
+ if (type == FrontendScanType::SCAN_BLIND) {
+ // reset the frequency in the scan configuration to test blind scan. The settings param of
+ // passed in means the real input config on the transponder connected to the DUT.
+ // We want the blind the test to start from lower frequency than this to check the blind
+ // scan implementation.
+ resetBlindScanStartingFrequency(config, targetFrequency - 100);
+ }
+
+ Result result = frontend_1_1->scan_1_1(config.settings, type, config.settingsExt);
+ EXPECT_TRUE(result == Result::SUCCESS);
+
+ bool scanMsgLockedReceived = false;
+ bool targetFrequencyReceived = false;
+
+ android::Mutex::Autolock autoLock(mMsgLock);
+wait:
+ while (!mScanMessageReceived) {
+ if (-ETIMEDOUT == mMsgCondition.waitRelative(mMsgLock, WAIT_TIMEOUT)) {
+ EXPECT_TRUE(false) << "Scan message not received within timeout";
+ mScanMessageReceived = false;
+ mScanMsgProcessed = true;
+ return;
+ }
+ }
+
+ if (mScanMessageType != FrontendScanMessageType::END) {
+ if (mScanMessageType == FrontendScanMessageType::LOCKED) {
+ scanMsgLockedReceived = true;
+ Result result = frontend_1_1->scan_1_1(config.settings, type, config.settingsExt);
+ EXPECT_TRUE(result == Result::SUCCESS);
+ }
+
+ if (mScanMessageType == FrontendScanMessageType::FREQUENCY) {
+ targetFrequencyReceived = mScanMessage.frequencies().size() > 0 &&
+ mScanMessage.frequencies()[0] == targetFrequency;
+ }
+
+ if (mScanMessageType == FrontendScanMessageType::PROGRESS_PERCENT) {
+ ALOGD("[vts] Scan in progress...[%d%%]", mScanMessage.progressPercent());
+ }
+
+ mScanMessageReceived = false;
+ mScanMsgProcessed = true;
+ mMsgCondition.signal();
+ goto wait;
+ }
+
+ EXPECT_TRUE(scanMsgLockedReceived) << "Scan message LOCKED not received before END";
+ EXPECT_TRUE(targetFrequencyReceived) << "frequency not received before LOCKED on blindScan";
+ mScanMessageReceived = false;
+ mScanMsgProcessed = true;
+}
+
+uint32_t FrontendCallback::getTargetFrequency(FrontendSettings settings, FrontendType type) {
+ switch (type) {
+ case FrontendType::ANALOG:
+ return settings.analog().frequency;
+ case FrontendType::ATSC:
+ return settings.atsc().frequency;
+ case FrontendType::ATSC3:
+ return settings.atsc3().frequency;
+ case FrontendType::DVBC:
+ return settings.dvbc().frequency;
+ case FrontendType::DVBS:
+ return settings.dvbs().frequency;
+ case FrontendType::DVBT:
+ return settings.dvbt().frequency;
+ case FrontendType::ISDBS:
+ return settings.isdbs().frequency;
+ case FrontendType::ISDBS3:
+ return settings.isdbs3().frequency;
+ case FrontendType::ISDBT:
+ return settings.isdbt().frequency;
+ default:
+ return 0;
+ }
+}
+
+void FrontendCallback::resetBlindScanStartingFrequency(FrontendConfig& config,
+ uint32_t resetingFreq) {
+ switch (config.type) {
+ case FrontendType::ANALOG:
+ config.settings.analog().frequency = resetingFreq;
+ break;
+ case FrontendType::ATSC:
+ config.settings.atsc().frequency = resetingFreq;
+ break;
+ case FrontendType::ATSC3:
+ config.settings.atsc3().frequency = resetingFreq;
+ break;
+ case FrontendType::DVBC:
+ config.settings.dvbc().frequency = resetingFreq;
+ break;
+ case FrontendType::DVBS:
+ config.settings.dvbs().frequency = resetingFreq;
+ break;
+ case FrontendType::DVBT:
+ config.settings.dvbt().frequency = resetingFreq;
+ break;
+ case FrontendType::ISDBS:
+ config.settings.isdbs().frequency = resetingFreq;
+ break;
+ case FrontendType::ISDBS3:
+ config.settings.isdbs3().frequency = resetingFreq;
+ break;
+ case FrontendType::ISDBT:
+ config.settings.isdbt().frequency = resetingFreq;
+ break;
+ default:
+ // do nothing
+ return;
+ }
+}
+
AssertionResult FrontendTests::getFrontendIds() {
Result status;
mService->getFrontendIds([&](Result result, const hidl_vec<FrontendId>& frontendIds) {
@@ -86,6 +228,138 @@
return AssertionResult(callbackStatus.isOk());
}
+AssertionResult FrontendTests::scanFrontend(FrontendConfig config, FrontendScanType type) {
+ EXPECT_TRUE(mFrontendCallback)
+ << "test with openFrontendById/setFrontendCallback/getFrontendInfo first.";
+
+ EXPECT_TRUE(mFrontendInfo.type == config.type)
+ << "FrontendConfig does not match the frontend info of the given id.";
+
+ mFrontendCallback->scanTest(mFrontend, config, type);
+ return AssertionResult(true);
+}
+
+AssertionResult FrontendTests::stopScanFrontend() {
+ EXPECT_TRUE(mFrontend) << "Test with openFrontendById first.";
+ Result status;
+ status = mFrontend->stopScan();
+ return AssertionResult(status == Result::SUCCESS);
+}
+
+void FrontendTests::verifyFrontendStatus(vector<FrontendStatusType> statusTypes,
+ vector<FrontendStatus> expectStatuses) {
+ ASSERT_TRUE(mFrontend) << "Frontend is not opened yet.";
+ Result status;
+ vector<FrontendStatus> realStatuses;
+
+ mFrontend->getStatus(statusTypes, [&](Result result, const hidl_vec<FrontendStatus>& statuses) {
+ status = result;
+ realStatuses = statuses;
+ });
+
+ ASSERT_TRUE(realStatuses.size() == statusTypes.size());
+ for (int i = 0; i < statusTypes.size(); i++) {
+ FrontendStatusType type = statusTypes[i];
+ switch (type) {
+ case FrontendStatusType::DEMOD_LOCK: {
+ ASSERT_TRUE(realStatuses[i].isDemodLocked() == expectStatuses[i].isDemodLocked());
+ break;
+ }
+ case FrontendStatusType::SNR: {
+ ASSERT_TRUE(realStatuses[i].snr() == expectStatuses[i].snr());
+ break;
+ }
+ case FrontendStatusType::BER: {
+ ASSERT_TRUE(realStatuses[i].ber() == expectStatuses[i].ber());
+ break;
+ }
+ case FrontendStatusType::PER: {
+ ASSERT_TRUE(realStatuses[i].per() == expectStatuses[i].per());
+ break;
+ }
+ case FrontendStatusType::PRE_BER: {
+ ASSERT_TRUE(realStatuses[i].preBer() == expectStatuses[i].preBer());
+ break;
+ }
+ case FrontendStatusType::SIGNAL_QUALITY: {
+ ASSERT_TRUE(realStatuses[i].signalQuality() == expectStatuses[i].signalQuality());
+ break;
+ }
+ case FrontendStatusType::SIGNAL_STRENGTH: {
+ ASSERT_TRUE(realStatuses[i].signalStrength() == expectStatuses[i].signalStrength());
+ break;
+ }
+ case FrontendStatusType::SYMBOL_RATE: {
+ ASSERT_TRUE(realStatuses[i].symbolRate() == expectStatuses[i].symbolRate());
+ break;
+ }
+ case FrontendStatusType::FEC: {
+ ASSERT_TRUE(realStatuses[i].innerFec() == expectStatuses[i].innerFec());
+ break;
+ }
+ case FrontendStatusType::MODULATION: {
+ // TODO: check modulation status
+ break;
+ }
+ case FrontendStatusType::SPECTRAL: {
+ ASSERT_TRUE(realStatuses[i].inversion() == expectStatuses[i].inversion());
+ break;
+ }
+ case FrontendStatusType::LNB_VOLTAGE: {
+ ASSERT_TRUE(realStatuses[i].lnbVoltage() == expectStatuses[i].lnbVoltage());
+ break;
+ }
+ case FrontendStatusType::PLP_ID: {
+ ASSERT_TRUE(realStatuses[i].plpId() == expectStatuses[i].plpId());
+ break;
+ }
+ case FrontendStatusType::EWBS: {
+ ASSERT_TRUE(realStatuses[i].isEWBS() == expectStatuses[i].isEWBS());
+ break;
+ }
+ case FrontendStatusType::AGC: {
+ ASSERT_TRUE(realStatuses[i].agc() == expectStatuses[i].agc());
+ break;
+ }
+ case FrontendStatusType::LNA: {
+ ASSERT_TRUE(realStatuses[i].isLnaOn() == expectStatuses[i].isLnaOn());
+ break;
+ }
+ case FrontendStatusType::LAYER_ERROR: {
+ vector<bool> realLayberError = realStatuses[i].isLayerError();
+ vector<bool> expectLayerError = expectStatuses[i].isLayerError();
+ ASSERT_TRUE(realLayberError.size() == expectLayerError.size());
+ for (int i = 0; i < realLayberError.size(); i++) {
+ ASSERT_TRUE(realLayberError[i] == expectLayerError[i]);
+ }
+ break;
+ }
+ case FrontendStatusType::MER: {
+ ASSERT_TRUE(realStatuses[i].mer() == expectStatuses[i].mer());
+ break;
+ }
+ case FrontendStatusType::FREQ_OFFSET: {
+ ASSERT_TRUE(realStatuses[i].freqOffset() == expectStatuses[i].freqOffset());
+ break;
+ }
+ case FrontendStatusType::HIERARCHY: {
+ ASSERT_TRUE(realStatuses[i].hierarchy() == expectStatuses[i].hierarchy());
+ break;
+ }
+ case FrontendStatusType::RF_LOCK: {
+ ASSERT_TRUE(realStatuses[i].isRfLocked() == expectStatuses[i].isRfLocked());
+ break;
+ }
+ case FrontendStatusType::ATSC3_PLP_INFO:
+ // TODO: verify plpinfo
+ break;
+ default:
+ continue;
+ }
+ }
+ ASSERT_TRUE(status == Result::SUCCESS);
+}
+
AssertionResult FrontendTests::tuneFrontend(FrontendConfig config, bool testWithDemux) {
EXPECT_TRUE(mFrontendCallback)
<< "test with openFrontendById/setFrontendCallback/getFrontendInfo first.";
@@ -106,7 +380,7 @@
return failure();
}
}
- mFrontendCallback->tuneTestOnLock(mFrontend, config.settings);
+ mFrontendCallback->tuneTestOnLock(mFrontend, config.settings, config.settingsExt);
return AssertionResult(true);
}
@@ -143,3 +417,26 @@
}
feId = INVALID_ID;
}
+
+void FrontendTests::tuneTest(FrontendConfig frontendConf) {
+ uint32_t feId;
+ getFrontendIdByType(frontendConf.type, feId);
+ ASSERT_TRUE(feId != INVALID_ID);
+ ASSERT_TRUE(openFrontendById(feId));
+ ASSERT_TRUE(setFrontendCallback());
+ ASSERT_TRUE(tuneFrontend(frontendConf, false /*testWithDemux*/));
+ verifyFrontendStatus(frontendConf.tuneStatusTypes, frontendConf.expectTuneStatuses);
+ ASSERT_TRUE(stopTuneFrontend(false /*testWithDemux*/));
+ ASSERT_TRUE(closeFrontend());
+}
+
+void FrontendTests::scanTest(FrontendConfig frontendConf, FrontendScanType scanType) {
+ uint32_t feId;
+ getFrontendIdByType(frontendConf.type, feId);
+ ASSERT_TRUE(feId != INVALID_ID);
+ ASSERT_TRUE(openFrontendById(feId));
+ ASSERT_TRUE(setFrontendCallback());
+ ASSERT_TRUE(scanFrontend(frontendConf, scanType));
+ ASSERT_TRUE(stopScanFrontend());
+ ASSERT_TRUE(closeFrontend());
+}
diff --git a/tv/tuner/1.1/vts/functional/FrontendTests.h b/tv/tuner/1.1/vts/functional/FrontendTests.h
index 492f2f0..8986d81 100644
--- a/tv/tuner/1.1/vts/functional/FrontendTests.h
+++ b/tv/tuner/1.1/vts/functional/FrontendTests.h
@@ -15,9 +15,9 @@
*/
#include <android-base/logging.h>
-#include <android/hardware/tv/tuner/1.0/IFrontend.h>
#include <android/hardware/tv/tuner/1.0/IFrontendCallback.h>
#include <android/hardware/tv/tuner/1.0/types.h>
+#include <android/hardware/tv/tuner/1.1/IFrontend.h>
#include <android/hardware/tv/tuner/1.1/ITuner.h>
#include <binder/MemoryDealer.h>
#include <gtest/gtest.h>
@@ -52,6 +52,7 @@
using android::hardware::tv::tuner::V1_0::FrontendInfo;
using android::hardware::tv::tuner::V1_0::FrontendScanMessage;
using android::hardware::tv::tuner::V1_0::FrontendScanMessageType;
+using android::hardware::tv::tuner::V1_0::FrontendScanType;
using android::hardware::tv::tuner::V1_0::IFrontend;
using android::hardware::tv::tuner::V1_0::IFrontendCallback;
using android::hardware::tv::tuner::V1_0::Result;
@@ -70,11 +71,21 @@
virtual Return<void> onScanMessage(FrontendScanMessageType type,
const FrontendScanMessage& message) override;
- void tuneTestOnLock(sp<IFrontend>& frontend, FrontendSettings settings);
+ void tuneTestOnLock(sp<IFrontend>& frontend, FrontendSettings settings,
+ FrontendSettingsExt settingsExt);
+ void scanTest(sp<IFrontend>& frontend, FrontendConfig config, FrontendScanType type);
+
+ // Helper methods
+ uint32_t getTargetFrequency(FrontendSettings settings, FrontendType type);
+ void resetBlindScanStartingFrequency(FrontendConfig& config, uint32_t resetingFreq);
private:
bool mEventReceived = false;
+ bool mScanMessageReceived = false;
bool mLockMsgReceived = false;
+ bool mScanMsgProcessed = true;
+ FrontendScanMessageType mScanMessageType;
+ FrontendScanMessage mScanMessage;
hidl_vec<uint8_t> mEventMessage;
android::Mutex mMsgLock;
android::Condition mMsgCondition;
@@ -95,11 +106,17 @@
AssertionResult getFrontendInfo(uint32_t frontendId);
AssertionResult openFrontendById(uint32_t frontendId);
AssertionResult setFrontendCallback();
+ AssertionResult scanFrontend(FrontendConfig config, FrontendScanType type);
+ AssertionResult stopScanFrontend();
AssertionResult tuneFrontend(FrontendConfig config, bool testWithDemux);
+ void verifyFrontendStatus(vector<FrontendStatusType> statusTypes,
+ vector<FrontendStatus> expectStatuses);
AssertionResult stopTuneFrontend(bool testWithDemux);
AssertionResult closeFrontend();
void getFrontendIdByType(FrontendType feType, uint32_t& feId);
+ void tuneTest(FrontendConfig frontendConf);
+ void scanTest(FrontendConfig frontend, FrontendScanType type);
void setDvrTests(DvrTests dvrTests) { mDvrTests = dvrTests; }
void setDemux(sp<IDemux> demux) { mDvrTests.setDemux(demux); }
diff --git a/tv/tuner/1.1/vts/functional/VtsHalTvTunerV1_1TargetTest.cpp b/tv/tuner/1.1/vts/functional/VtsHalTvTunerV1_1TargetTest.cpp
index c9873b2..c3df078 100644
--- a/tv/tuner/1.1/vts/functional/VtsHalTvTunerV1_1TargetTest.cpp
+++ b/tv/tuner/1.1/vts/functional/VtsHalTvTunerV1_1TargetTest.cpp
@@ -132,6 +132,21 @@
recordSingleFilterTest(filterArray[TS_RECORD0], frontendArray[DVBT], dvrArray[DVR_RECORD0]);
}
+TEST_P(TunerFrontendHidlTest, TuneFrontendWithFrontendSettingsExt) {
+ description("Tune one Frontend with specific setting and check Lock event");
+ mFrontendTests.tuneTest(frontendArray[DVBT]);
+}
+
+TEST_P(TunerFrontendHidlTest, BlindScanFrontendWithEndFrequency) {
+ description("Run an blind frontend scan with specific setting and check lock scanMessage");
+ mFrontendTests.scanTest(frontendScanArray[SCAN_DVBT], FrontendScanType::SCAN_BLIND);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ PerInstance, TunerFrontendHidlTest,
+ testing::ValuesIn(android::hardware::getAllHalInstanceNames(ITuner::descriptor)),
+ android::hardware::PrintInstanceNameToString);
+
INSTANTIATE_TEST_SUITE_P(
PerInstance, TunerFilterHidlTest,
testing::ValuesIn(android::hardware::getAllHalInstanceNames(ITuner::descriptor)),
@@ -146,8 +161,4 @@
PerInstance, TunerRecordHidlTest,
testing::ValuesIn(android::hardware::getAllHalInstanceNames(ITuner::descriptor)),
android::hardware::PrintInstanceNameToString);
-
-GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(TunerFilterHidlTest);
-GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(TunerDemuxHidlTest);
-GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(TunerRecordHidlTest);
} // namespace
diff --git a/tv/tuner/1.1/vts/functional/VtsHalTvTunerV1_1TargetTest.h b/tv/tuner/1.1/vts/functional/VtsHalTvTunerV1_1TargetTest.h
index 1b28853..505e35a 100644
--- a/tv/tuner/1.1/vts/functional/VtsHalTvTunerV1_1TargetTest.h
+++ b/tv/tuner/1.1/vts/functional/VtsHalTvTunerV1_1TargetTest.h
@@ -21,6 +21,7 @@
void initConfiguration() {
initFrontendConfig();
+ initFrontendScanConfig();
initFilterConfig();
initDvrConfig();
}
@@ -50,6 +51,8 @@
FilterTests mFilterTests;
};
+GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(TunerFilterHidlTest);
+
class TunerDemuxHidlTest : public testing::TestWithParam<std::string> {
public:
virtual void SetUp() override {
@@ -73,6 +76,8 @@
FilterTests mFilterTests;
};
+GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(TunerDemuxHidlTest);
+
class TunerRecordHidlTest : public testing::TestWithParam<std::string> {
public:
virtual void SetUp() override {
@@ -100,4 +105,27 @@
FilterTests mFilterTests;
DvrTests mDvrTests;
};
+
+GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(TunerRecordHidlTest);
+
+class TunerFrontendHidlTest : public testing::TestWithParam<std::string> {
+ public:
+ virtual void SetUp() override {
+ mService = ITuner::getService(GetParam());
+ ASSERT_NE(mService, nullptr);
+ initConfiguration();
+
+ mFrontendTests.setService(mService);
+ }
+
+ protected:
+ static void description(const std::string& description) {
+ RecordProperty("description", description);
+ }
+
+ sp<ITuner> mService;
+ FrontendTests mFrontendTests;
+};
+
+GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(TunerFrontendHidlTest);
} // namespace
\ No newline at end of file
diff --git a/tv/tuner/1.1/vts/functional/VtsHalTvTunerV1_1TestConfigurations.h b/tv/tuner/1.1/vts/functional/VtsHalTvTunerV1_1TestConfigurations.h
index 39872d2..34418d1 100644
--- a/tv/tuner/1.1/vts/functional/VtsHalTvTunerV1_1TestConfigurations.h
+++ b/tv/tuner/1.1/vts/functional/VtsHalTvTunerV1_1TestConfigurations.h
@@ -46,6 +46,7 @@
using android::hardware::tv::tuner::V1_0::FrontendType;
using android::hardware::tv::tuner::V1_0::PlaybackSettings;
using android::hardware::tv::tuner::V1_0::RecordSettings;
+using android::hardware::tv::tuner::V1_1::FrontendSettingsExt;
using namespace std;
@@ -73,6 +74,11 @@
} Frontend;
typedef enum {
+ SCAN_DVBT,
+ SCAN_MAX,
+} FrontendScan;
+
+typedef enum {
DVR_RECORD0,
DVR_PLAYBACK0,
DVR_MAX,
@@ -90,6 +96,7 @@
bool isSoftwareFe;
FrontendType type;
FrontendSettings settings;
+ FrontendSettingsExt settingsExt;
vector<FrontendStatusType> tuneStatusTypes;
vector<FrontendStatus> expectTuneStatuses;
};
@@ -102,6 +109,7 @@
};
static FrontendConfig frontendArray[FILTER_MAX];
+static FrontendConfig frontendScanArray[SCAN_MAX];
static FilterConfig filterArray[FILTER_MAX];
static DvrConfig dvrArray[DVR_MAX];
@@ -129,10 +137,36 @@
frontendArray[DVBT].tuneStatusTypes = types;
frontendArray[DVBT].expectTuneStatuses = statuses;
frontendArray[DVBT].isSoftwareFe = true;
+ frontendArray[DVBT].settingsExt.settingExt.dvbt({
+ .transmissionMode =
+ android::hardware::tv::tuner::V1_1::FrontendDvbtTransmissionMode::MODE_8K_E,
+ });
frontendArray[DVBS].type = FrontendType::DVBS;
frontendArray[DVBS].isSoftwareFe = true;
};
+/** Configuration array for the frontend scan test */
+inline void initFrontendScanConfig() {
+ frontendScanArray[SCAN_DVBT].type = FrontendType::DVBT;
+ frontendScanArray[SCAN_DVBT].settings.dvbt({
+ .frequency = 578000,
+ .transmissionMode = FrontendDvbtTransmissionMode::MODE_8K,
+ .bandwidth = FrontendDvbtBandwidth::BANDWIDTH_8MHZ,
+ .constellation = FrontendDvbtConstellation::AUTO,
+ .hierarchy = FrontendDvbtHierarchy::AUTO,
+ .hpCoderate = FrontendDvbtCoderate::AUTO,
+ .lpCoderate = FrontendDvbtCoderate::AUTO,
+ .guardInterval = FrontendDvbtGuardInterval::AUTO,
+ .isHighPriority = true,
+ .standard = FrontendDvbtStandard::T,
+ });
+ frontendScanArray[SCAN_DVBT].settingsExt.endFrequency = 800000;
+ frontendScanArray[SCAN_DVBT].settingsExt.settingExt.dvbt({
+ .transmissionMode =
+ android::hardware::tv::tuner::V1_1::FrontendDvbtTransmissionMode::MODE_8K_E,
+ });
+};
+
/** Configuration array for the filter test */
inline void initFilterConfig() {
// TS VIDEO filter setting for default implementation testing
diff --git a/vibrator/bench/Android.bp b/vibrator/bench/Android.bp
new file mode 100644
index 0000000..c7f824d
--- /dev/null
+++ b/vibrator/bench/Android.bp
@@ -0,0 +1,34 @@
+//
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+cc_benchmark {
+ name: "VibratorHalIntegrationBenchmark",
+ defaults: ["hidl_defaults"],
+ srcs: [
+ "benchmark.cpp",
+ ],
+ shared_libs: [
+ "android.hardware.vibrator-cpp",
+ "android.hardware.vibrator@1.0",
+ "android.hardware.vibrator@1.1",
+ "android.hardware.vibrator@1.2",
+ "android.hardware.vibrator@1.3",
+ "libbinder",
+ "libhardware",
+ "libhidlbase",
+ "libutils",
+ ],
+ test_suites: ["device-tests"],
+}
diff --git a/vibrator/bench/benchmark.cpp b/vibrator/bench/benchmark.cpp
new file mode 100644
index 0000000..e19dc6f
--- /dev/null
+++ b/vibrator/bench/benchmark.cpp
@@ -0,0 +1,577 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "benchmark/benchmark.h"
+
+#include <android/hardware/vibrator/1.3/IVibrator.h>
+#include <android/hardware/vibrator/BnVibratorCallback.h>
+#include <android/hardware/vibrator/IVibrator.h>
+#include <binder/IServiceManager.h>
+
+using ::android::enum_range;
+using ::android::sp;
+using ::android::hardware::hidl_enum_range;
+using ::android::hardware::Return;
+using ::android::hardware::details::hidl_enum_values;
+using ::benchmark::Counter;
+using ::benchmark::Fixture;
+using ::benchmark::kMicrosecond;
+using ::benchmark::State;
+using ::benchmark::internal::Benchmark;
+using ::std::chrono::duration;
+using ::std::chrono::duration_cast;
+using ::std::chrono::high_resolution_clock;
+
+namespace Aidl = ::android::hardware::vibrator;
+namespace V1_0 = ::android::hardware::vibrator::V1_0;
+namespace V1_1 = ::android::hardware::vibrator::V1_1;
+namespace V1_2 = ::android::hardware::vibrator::V1_2;
+namespace V1_3 = ::android::hardware::vibrator::V1_3;
+
+template <typename I>
+class BaseBench : public Fixture {
+ public:
+ void TearDown(State& /*state*/) override {
+ if (!mVibrator) {
+ return;
+ }
+ mVibrator->off();
+ }
+
+ static void DefaultConfig(Benchmark* b) { b->Unit(kMicrosecond); }
+
+ static void DefaultArgs(Benchmark* /*b*/) { /* none */
+ }
+
+ protected:
+ auto getOtherArg(const State& state, std::size_t index) const { return state.range(index + 0); }
+
+ protected:
+ sp<I> mVibrator;
+};
+
+template <typename I>
+class VibratorBench : public BaseBench<I> {
+ public:
+ void SetUp(State& /*state*/) override { this->mVibrator = I::getService(); }
+};
+
+enum class EmptyEnum : uint32_t;
+template <>
+inline constexpr std::array<EmptyEnum, 0> hidl_enum_values<EmptyEnum> = {};
+
+template <typename T, typename U>
+std::set<T> difference(const hidl_enum_range<T>& t, const hidl_enum_range<U>& u) {
+ class Compare {
+ public:
+ bool operator()(const T& a, const U& b) { return a < static_cast<T>(b); }
+ bool operator()(const U& a, const T& b) { return static_cast<T>(a) < b; }
+ };
+ std::set<T> ret;
+
+ std::set_difference(t.begin(), t.end(), u.begin(), u.end(),
+ std::insert_iterator<decltype(ret)>(ret, ret.begin()), Compare());
+
+ return ret;
+}
+
+template <typename I, typename E1, typename E2 = EmptyEnum>
+class VibratorEffectsBench : public VibratorBench<I> {
+ public:
+ using Effect = E1;
+ using EffectStrength = V1_0::EffectStrength;
+ using Status = V1_0::Status;
+
+ public:
+ static void DefaultArgs(Benchmark* b) {
+ b->ArgNames({"Effect", "Strength"});
+ for (const auto& effect : difference(hidl_enum_range<E1>(), hidl_enum_range<E2>())) {
+ for (const auto& strength : hidl_enum_range<EffectStrength>()) {
+ b->Args({static_cast<long>(effect), static_cast<long>(strength)});
+ }
+ }
+ }
+
+ void performBench(State* state, Return<void> (I::*performApi)(Effect, EffectStrength,
+ typename I::perform_cb)) {
+ auto effect = getEffect(*state);
+ auto strength = getStrength(*state);
+ bool supported = true;
+
+ (*this->mVibrator.*performApi)(effect, strength, [&](Status status, uint32_t /*lengthMs*/) {
+ if (status == Status::UNSUPPORTED_OPERATION) {
+ supported = false;
+ }
+ });
+
+ if (!supported) {
+ return;
+ }
+
+ for (auto _ : *state) {
+ state->ResumeTiming();
+ (*this->mVibrator.*performApi)(effect, strength,
+ [](Status /*status*/, uint32_t /*lengthMs*/) {});
+ state->PauseTiming();
+ this->mVibrator->off();
+ }
+ }
+
+ protected:
+ auto getEffect(const State& state) const {
+ return static_cast<Effect>(this->getOtherArg(state, 0));
+ }
+
+ auto getStrength(const State& state) const {
+ return static_cast<EffectStrength>(this->getOtherArg(state, 1));
+ }
+};
+
+#define BENCHMARK_WRAPPER(fixt, test, code) \
+ BENCHMARK_DEFINE_F(fixt, test) \
+ /* NOLINTNEXTLINE */ \
+ (State & state) { \
+ if (!mVibrator) { \
+ return; \
+ } \
+ \
+ code \
+ } \
+ BENCHMARK_REGISTER_F(fixt, test)->Apply(fixt::DefaultConfig)->Apply(fixt::DefaultArgs)
+
+using VibratorBench_V1_0 = VibratorBench<V1_0::IVibrator>;
+
+BENCHMARK_WRAPPER(VibratorBench_V1_0, on, {
+ uint32_t ms = UINT32_MAX;
+
+ for (auto _ : state) {
+ state.ResumeTiming();
+ mVibrator->on(ms);
+ state.PauseTiming();
+ mVibrator->off();
+ }
+});
+
+BENCHMARK_WRAPPER(VibratorBench_V1_0, off, {
+ uint32_t ms = UINT32_MAX;
+
+ for (auto _ : state) {
+ state.PauseTiming();
+ mVibrator->on(ms);
+ state.ResumeTiming();
+ mVibrator->off();
+ }
+});
+
+BENCHMARK_WRAPPER(VibratorBench_V1_0, supportsAmplitudeControl, {
+ for (auto _ : state) {
+ mVibrator->supportsAmplitudeControl();
+ }
+});
+
+BENCHMARK_WRAPPER(VibratorBench_V1_0, setAmplitude, {
+ uint8_t amplitude = UINT8_MAX;
+
+ if (!mVibrator->supportsAmplitudeControl()) {
+ return;
+ }
+
+ mVibrator->on(UINT32_MAX);
+
+ for (auto _ : state) {
+ mVibrator->setAmplitude(amplitude);
+ }
+
+ mVibrator->off();
+});
+
+using VibratorEffectsBench_V1_0 = VibratorEffectsBench<V1_0::IVibrator, V1_0::Effect>;
+
+BENCHMARK_WRAPPER(VibratorEffectsBench_V1_0, perform,
+ { performBench(&state, &V1_0::IVibrator::perform); });
+
+using VibratorEffectsBench_V1_1 =
+ VibratorEffectsBench<V1_1::IVibrator, V1_1::Effect_1_1, V1_0::Effect>;
+
+BENCHMARK_WRAPPER(VibratorEffectsBench_V1_1, perform_1_1,
+ { performBench(&state, &V1_1::IVibrator::perform_1_1); });
+
+using VibratorEffectsBench_V1_2 =
+ VibratorEffectsBench<V1_2::IVibrator, V1_2::Effect, V1_1::Effect_1_1>;
+
+BENCHMARK_WRAPPER(VibratorEffectsBench_V1_2, perform_1_2,
+ { performBench(&state, &V1_2::IVibrator::perform_1_2); });
+
+using VibratorBench_V1_3 = VibratorBench<V1_3::IVibrator>;
+
+BENCHMARK_WRAPPER(VibratorBench_V1_3, supportsExternalControl, {
+ for (auto _ : state) {
+ mVibrator->supportsExternalControl();
+ }
+});
+
+BENCHMARK_WRAPPER(VibratorBench_V1_3, setExternalControl, {
+ bool enable = true;
+
+ if (!mVibrator->supportsExternalControl()) {
+ return;
+ }
+
+ for (auto _ : state) {
+ state.ResumeTiming();
+ mVibrator->setExternalControl(enable);
+ state.PauseTiming();
+ mVibrator->setExternalControl(false);
+ }
+});
+
+BENCHMARK_WRAPPER(VibratorBench_V1_3, supportsExternalAmplitudeControl, {
+ if (!mVibrator->supportsExternalControl()) {
+ return;
+ }
+
+ mVibrator->setExternalControl(true);
+
+ for (auto _ : state) {
+ mVibrator->supportsAmplitudeControl();
+ }
+
+ mVibrator->setExternalControl(false);
+});
+
+BENCHMARK_WRAPPER(VibratorBench_V1_3, setExternalAmplitude, {
+ uint8_t amplitude = UINT8_MAX;
+
+ if (!mVibrator->supportsExternalControl()) {
+ return;
+ }
+
+ mVibrator->setExternalControl(true);
+
+ if (!mVibrator->supportsAmplitudeControl()) {
+ return;
+ }
+
+ for (auto _ : state) {
+ mVibrator->setAmplitude(amplitude);
+ }
+
+ mVibrator->setExternalControl(false);
+});
+
+using VibratorEffectsBench_V1_3 = VibratorEffectsBench<V1_3::IVibrator, V1_3::Effect, V1_2::Effect>;
+
+BENCHMARK_WRAPPER(VibratorEffectsBench_V1_3, perform_1_3,
+ { performBench(&state, &V1_3::IVibrator::perform_1_3); });
+
+class VibratorBench_Aidl : public BaseBench<Aidl::IVibrator> {
+ public:
+ void SetUp(State& /*state*/) override {
+ this->mVibrator = android::waitForVintfService<Aidl::IVibrator>();
+ }
+};
+
+class HalCallback : public Aidl::BnVibratorCallback {
+ public:
+ HalCallback() = default;
+ ~HalCallback() = default;
+
+ android::binder::Status onComplete() override { return android::binder::Status::ok(); }
+};
+
+BENCHMARK_WRAPPER(VibratorBench_Aidl, on, {
+ int32_t capabilities = 0;
+ mVibrator->getCapabilities(&capabilities);
+
+ int32_t ms = INT32_MAX;
+ auto cb = (capabilities & Aidl::IVibrator::CAP_ON_CALLBACK) ? new HalCallback() : nullptr;
+
+ for (auto _ : state) {
+ state.ResumeTiming();
+ mVibrator->on(ms, cb);
+ state.PauseTiming();
+ mVibrator->off();
+ }
+});
+
+BENCHMARK_WRAPPER(VibratorBench_Aidl, off, {
+ for (auto _ : state) {
+ state.PauseTiming();
+ mVibrator->on(INT32_MAX, nullptr);
+ state.ResumeTiming();
+ mVibrator->off();
+ }
+});
+
+BENCHMARK_WRAPPER(VibratorBench_Aidl, getCapabilities, {
+ int32_t capabilities = 0;
+
+ for (auto _ : state) {
+ mVibrator->getCapabilities(&capabilities);
+ }
+});
+
+BENCHMARK_WRAPPER(VibratorBench_Aidl, setAmplitude, {
+ int32_t capabilities = 0;
+ mVibrator->getCapabilities(&capabilities);
+ if ((capabilities & Aidl::IVibrator::CAP_AMPLITUDE_CONTROL) == 0) {
+ return;
+ }
+
+ float amplitude = 1.0f;
+ mVibrator->on(INT32_MAX, nullptr);
+
+ for (auto _ : state) {
+ mVibrator->setAmplitude(amplitude);
+ }
+
+ mVibrator->off();
+});
+
+BENCHMARK_WRAPPER(VibratorBench_Aidl, setExternalControl, {
+ int32_t capabilities = 0;
+ mVibrator->getCapabilities(&capabilities);
+ if ((capabilities & Aidl::IVibrator::CAP_EXTERNAL_CONTROL) == 0) {
+ return;
+ }
+
+ for (auto _ : state) {
+ state.ResumeTiming();
+ mVibrator->setExternalControl(true);
+ state.PauseTiming();
+ mVibrator->setExternalControl(false);
+ }
+});
+
+BENCHMARK_WRAPPER(VibratorBench_Aidl, setExternalAmplitude, {
+ int32_t capabilities = 0;
+ mVibrator->getCapabilities(&capabilities);
+ if ((capabilities & Aidl::IVibrator::CAP_EXTERNAL_CONTROL) == 0 ||
+ (capabilities & Aidl::IVibrator::CAP_EXTERNAL_AMPLITUDE_CONTROL) == 0) {
+ return;
+ }
+
+ float amplitude = 1.0f;
+ mVibrator->setExternalControl(true);
+
+ for (auto _ : state) {
+ mVibrator->setAmplitude(amplitude);
+ }
+
+ mVibrator->setExternalControl(false);
+});
+
+BENCHMARK_WRAPPER(VibratorBench_Aidl, getSupportedEffects, {
+ std::vector<Aidl::Effect> supportedEffects;
+
+ for (auto _ : state) {
+ mVibrator->getSupportedEffects(&supportedEffects);
+ }
+});
+
+BENCHMARK_WRAPPER(VibratorBench_Aidl, getSupportedAlwaysOnEffects, {
+ std::vector<Aidl::Effect> supportedEffects;
+
+ for (auto _ : state) {
+ mVibrator->getSupportedAlwaysOnEffects(&supportedEffects);
+ }
+});
+
+BENCHMARK_WRAPPER(VibratorBench_Aidl, getSupportedPrimitives, {
+ std::vector<Aidl::CompositePrimitive> supportedPrimitives;
+
+ for (auto _ : state) {
+ mVibrator->getSupportedPrimitives(&supportedPrimitives);
+ }
+});
+
+class VibratorEffectsBench_Aidl : public VibratorBench_Aidl {
+ public:
+ static void DefaultArgs(Benchmark* b) {
+ b->ArgNames({"Effect", "Strength"});
+ for (const auto& effect : enum_range<Aidl::Effect>()) {
+ for (const auto& strength : enum_range<Aidl::EffectStrength>()) {
+ b->Args({static_cast<long>(effect), static_cast<long>(strength)});
+ }
+ }
+ }
+
+ protected:
+ auto getEffect(const State& state) const {
+ return static_cast<Aidl::Effect>(this->getOtherArg(state, 0));
+ }
+
+ auto getStrength(const State& state) const {
+ return static_cast<Aidl::EffectStrength>(this->getOtherArg(state, 1));
+ }
+};
+
+BENCHMARK_WRAPPER(VibratorEffectsBench_Aidl, alwaysOnEnable, {
+ int32_t capabilities = 0;
+ mVibrator->getCapabilities(&capabilities);
+ if ((capabilities & Aidl::IVibrator::CAP_ALWAYS_ON_CONTROL) == 0) {
+ return;
+ }
+
+ int32_t id = 1;
+ auto effect = getEffect(state);
+ auto strength = getStrength(state);
+
+ std::vector<Aidl::Effect> supported;
+ mVibrator->getSupportedAlwaysOnEffects(&supported);
+ if (std::find(supported.begin(), supported.end(), effect) == supported.end()) {
+ return;
+ }
+
+ for (auto _ : state) {
+ state.ResumeTiming();
+ mVibrator->alwaysOnEnable(id, effect, strength);
+ state.PauseTiming();
+ mVibrator->alwaysOnDisable(id);
+ }
+});
+
+BENCHMARK_WRAPPER(VibratorEffectsBench_Aidl, alwaysOnDisable, {
+ int32_t capabilities = 0;
+ mVibrator->getCapabilities(&capabilities);
+ if ((capabilities & Aidl::IVibrator::CAP_ALWAYS_ON_CONTROL) == 0) {
+ return;
+ }
+
+ int32_t id = 1;
+ auto effect = getEffect(state);
+ auto strength = getStrength(state);
+
+ std::vector<Aidl::Effect> supported;
+ mVibrator->getSupportedAlwaysOnEffects(&supported);
+ if (std::find(supported.begin(), supported.end(), effect) == supported.end()) {
+ return;
+ }
+
+ for (auto _ : state) {
+ state.PauseTiming();
+ mVibrator->alwaysOnEnable(id, effect, strength);
+ state.ResumeTiming();
+ mVibrator->alwaysOnDisable(id);
+ }
+});
+
+BENCHMARK_WRAPPER(VibratorEffectsBench_Aidl, perform, {
+ int32_t capabilities = 0;
+ mVibrator->getCapabilities(&capabilities);
+
+ auto effect = getEffect(state);
+ auto strength = getStrength(state);
+ auto cb = (capabilities & Aidl::IVibrator::CAP_PERFORM_CALLBACK) ? new HalCallback() : nullptr;
+ int32_t lengthMs = 0;
+
+ std::vector<Aidl::Effect> supported;
+ mVibrator->getSupportedEffects(&supported);
+ if (std::find(supported.begin(), supported.end(), effect) == supported.end()) {
+ return;
+ }
+
+ for (auto _ : state) {
+ state.ResumeTiming();
+ mVibrator->perform(effect, strength, cb, &lengthMs);
+ state.PauseTiming();
+ mVibrator->off();
+ }
+});
+
+class VibratorPrimitivesBench_Aidl : public VibratorBench_Aidl {
+ public:
+ static void DefaultArgs(Benchmark* b) {
+ b->ArgNames({"Primitive"});
+ for (const auto& primitive : enum_range<Aidl::CompositePrimitive>()) {
+ b->Args({static_cast<long>(primitive)});
+ }
+ }
+
+ protected:
+ auto getPrimitive(const State& state) const {
+ return static_cast<Aidl::CompositePrimitive>(this->getOtherArg(state, 0));
+ }
+};
+
+BENCHMARK_WRAPPER(VibratorBench_Aidl, getCompositionDelayMax, {
+ int32_t ms = 0;
+
+ for (auto _ : state) {
+ mVibrator->getCompositionDelayMax(&ms);
+ }
+});
+
+BENCHMARK_WRAPPER(VibratorBench_Aidl, getCompositionSizeMax, {
+ int32_t size = 0;
+
+ for (auto _ : state) {
+ mVibrator->getCompositionSizeMax(&size);
+ }
+});
+
+BENCHMARK_WRAPPER(VibratorPrimitivesBench_Aidl, getPrimitiveDuration, {
+ int32_t capabilities = 0;
+ mVibrator->getCapabilities(&capabilities);
+ if ((capabilities & Aidl::IVibrator::CAP_COMPOSE_EFFECTS) == 0) {
+ return;
+ }
+
+ auto primitive = getPrimitive(state);
+ int32_t ms = 0;
+
+ std::vector<Aidl::CompositePrimitive> supported;
+ mVibrator->getSupportedPrimitives(&supported);
+ if (std::find(supported.begin(), supported.end(), primitive) == supported.end()) {
+ return;
+ }
+
+ for (auto _ : state) {
+ mVibrator->getPrimitiveDuration(primitive, &ms);
+ }
+});
+
+BENCHMARK_WRAPPER(VibratorPrimitivesBench_Aidl, compose, {
+ int32_t capabilities = 0;
+ mVibrator->getCapabilities(&capabilities);
+ if ((capabilities & Aidl::IVibrator::CAP_COMPOSE_EFFECTS) == 0) {
+ return;
+ }
+
+ Aidl::CompositeEffect effect;
+ effect.primitive = getPrimitive(state);
+ effect.scale = 1.0f;
+ effect.delayMs = 0;
+
+ std::vector<Aidl::CompositePrimitive> supported;
+ mVibrator->getSupportedPrimitives(&supported);
+ if (std::find(supported.begin(), supported.end(), effect.primitive) == supported.end()) {
+ return;
+ }
+
+ auto cb = new HalCallback();
+ std::vector<Aidl::CompositeEffect> effects;
+ effects.push_back(effect);
+
+ for (auto _ : state) {
+ state.ResumeTiming();
+ mVibrator->compose(effects, cb);
+ state.PauseTiming();
+ mVibrator->off();
+ }
+});
+
+BENCHMARK_MAIN();
diff --git a/wifi/1.0/vts/functional/wifi_hidl_test_utils.cpp b/wifi/1.0/vts/functional/wifi_hidl_test_utils.cpp
index 5b11dd3..092822f 100644
--- a/wifi/1.0/vts/functional/wifi_hidl_test_utils.cpp
+++ b/wifi/1.0/vts/functional/wifi_hidl_test_utils.cpp
@@ -112,7 +112,7 @@
const auto& status_and_chip_ids = HIDL_INVOKE(wifi, getChipIds);
const auto& chip_ids = status_and_chip_ids.second;
if (status_and_chip_ids.first.code != WifiStatusCode::SUCCESS ||
- chip_ids.size() != 1) {
+ chip_ids.size() < 1) {
return nullptr;
}
const auto& status_and_chip = HIDL_INVOKE(wifi, getChip, chip_ids[0]);
diff --git a/wifi/1.5/default/Android.mk b/wifi/1.5/default/Android.mk
index 236dae2..dc9e89b 100644
--- a/wifi/1.5/default/Android.mk
+++ b/wifi/1.5/default/Android.mk
@@ -51,6 +51,7 @@
wifi_feature_flags.cpp \
wifi_iface_util.cpp \
wifi_legacy_hal.cpp \
+ wifi_legacy_hal_factory.cpp \
wifi_legacy_hal_stubs.cpp \
wifi_mode_controller.cpp \
wifi_nan_iface.cpp \
@@ -67,12 +68,14 @@
libutils \
libwifi-hal \
libwifi-system-iface \
+ libxml2 \
android.hardware.wifi@1.0 \
android.hardware.wifi@1.1 \
android.hardware.wifi@1.2 \
android.hardware.wifi@1.3 \
android.hardware.wifi@1.4 \
android.hardware.wifi@1.5
+LOCAL_C_INCLUDES += $(TOP)/external/libxml2/include
LOCAL_EXPORT_C_INCLUDE_DIRS := $(LOCAL_PATH)
include $(BUILD_STATIC_LIBRARY)
@@ -96,6 +99,7 @@
libutils \
libwifi-hal \
libwifi-system-iface \
+ libxml2 \
android.hardware.wifi@1.0 \
android.hardware.wifi@1.1 \
android.hardware.wifi@1.2 \
@@ -129,6 +133,7 @@
libutils \
libwifi-hal \
libwifi-system-iface \
+ libxml2 \
android.hardware.wifi@1.0 \
android.hardware.wifi@1.1 \
android.hardware.wifi@1.2 \
diff --git a/wifi/1.5/default/service.cpp b/wifi/1.5/default/service.cpp
index f53d528..8539a37 100644
--- a/wifi/1.5/default/service.cpp
+++ b/wifi/1.5/default/service.cpp
@@ -23,6 +23,7 @@
#include "wifi.h"
#include "wifi_feature_flags.h"
#include "wifi_legacy_hal.h"
+#include "wifi_legacy_hal_factory.h"
#include "wifi_mode_controller.h"
using android::hardware::configureRpcThreadpool;
@@ -32,6 +33,8 @@
WifiFeatureFlags;
using android::hardware::wifi::V1_5::implementation::iface_util::WifiIfaceUtil;
using android::hardware::wifi::V1_5::implementation::legacy_hal::WifiLegacyHal;
+using android::hardware::wifi::V1_5::implementation::legacy_hal::
+ WifiLegacyHalFactory;
using android::hardware::wifi::V1_5::implementation::mode_controller::
WifiModeController;
@@ -50,10 +53,13 @@
const auto iface_tool =
std::make_shared<android::wifi_system::InterfaceTool>();
+ const auto legacy_hal_factory =
+ std::make_shared<WifiLegacyHalFactory>(iface_tool);
+
// Setup hwbinder service
android::sp<android::hardware::wifi::V1_5::IWifi> service =
new android::hardware::wifi::V1_5::implementation::Wifi(
- iface_tool, std::make_shared<WifiLegacyHal>(iface_tool),
+ iface_tool, legacy_hal_factory,
std::make_shared<WifiModeController>(),
std::make_shared<WifiIfaceUtil>(iface_tool),
std::make_shared<WifiFeatureFlags>());
diff --git a/wifi/1.5/default/tests/mock_wifi_feature_flags.h b/wifi/1.5/default/tests/mock_wifi_feature_flags.h
index 92fbb05..c3877ed 100644
--- a/wifi/1.5/default/tests/mock_wifi_feature_flags.h
+++ b/wifi/1.5/default/tests/mock_wifi_feature_flags.h
@@ -33,7 +33,8 @@
public:
MockWifiFeatureFlags();
- MOCK_METHOD0(getChipModes, std::vector<V1_0::IWifiChip::ChipMode>());
+ MOCK_METHOD1(getChipModes,
+ std::vector<V1_0::IWifiChip::ChipMode>(bool is_primary));
MOCK_METHOD0(isApMacRandomizationDisabled, bool());
};
diff --git a/wifi/1.5/default/tests/mock_wifi_legacy_hal.cpp b/wifi/1.5/default/tests/mock_wifi_legacy_hal.cpp
index 501bd7f..d13c556 100644
--- a/wifi/1.5/default/tests/mock_wifi_legacy_hal.cpp
+++ b/wifi/1.5/default/tests/mock_wifi_legacy_hal.cpp
@@ -29,8 +29,9 @@
namespace legacy_hal {
MockWifiLegacyHal::MockWifiLegacyHal(
- const std::weak_ptr<wifi_system::InterfaceTool> iface_tool)
- : WifiLegacyHal(iface_tool) {}
+ const std::weak_ptr<wifi_system::InterfaceTool> iface_tool,
+ const wifi_hal_fn& fn, bool is_primary)
+ : WifiLegacyHal(iface_tool, fn, is_primary) {}
} // namespace legacy_hal
} // namespace implementation
} // namespace V1_5
diff --git a/wifi/1.5/default/tests/mock_wifi_legacy_hal.h b/wifi/1.5/default/tests/mock_wifi_legacy_hal.h
index f938347..9ab2fd5 100644
--- a/wifi/1.5/default/tests/mock_wifi_legacy_hal.h
+++ b/wifi/1.5/default/tests/mock_wifi_legacy_hal.h
@@ -31,7 +31,8 @@
class MockWifiLegacyHal : public WifiLegacyHal {
public:
MockWifiLegacyHal(
- const std::weak_ptr<wifi_system::InterfaceTool> iface_tool);
+ const std::weak_ptr<wifi_system::InterfaceTool> iface_tool,
+ const wifi_hal_fn& fn, bool is_primary);
MOCK_METHOD0(initialize, wifi_error());
MOCK_METHOD0(start, wifi_error());
MOCK_METHOD2(stop, wifi_error(std::unique_lock<std::recursive_mutex>*,
diff --git a/wifi/1.5/default/tests/wifi_chip_unit_tests.cpp b/wifi/1.5/default/tests/wifi_chip_unit_tests.cpp
index 1d55e16..d99bfbd 100644
--- a/wifi/1.5/default/tests/wifi_chip_unit_tests.cpp
+++ b/wifi/1.5/default/tests/wifi_chip_unit_tests.cpp
@@ -59,7 +59,7 @@
{feature_flags::chip_mode_ids::kV1Ap, combinationsAp}
};
// clang-format on
- EXPECT_CALL(*feature_flags_, getChipModes())
+ EXPECT_CALL(*feature_flags_, getChipModes(true))
.WillRepeatedly(testing::Return(modes));
}
@@ -76,7 +76,7 @@
{feature_flags::chip_mode_ids::kV1Ap, combinationsAp}
};
// clang-format on
- EXPECT_CALL(*feature_flags_, getChipModes())
+ EXPECT_CALL(*feature_flags_, getChipModes(true))
.WillRepeatedly(testing::Return(modes));
}
@@ -89,7 +89,7 @@
{feature_flags::chip_mode_ids::kV1Sta, combinationsSta}
};
// clang-format on
- EXPECT_CALL(*feature_flags_, getChipModes())
+ EXPECT_CALL(*feature_flags_, getChipModes(true))
.WillRepeatedly(testing::Return(modes));
}
@@ -103,7 +103,7 @@
{feature_flags::chip_mode_ids::kV3, combinations}
};
// clang-format on
- EXPECT_CALL(*feature_flags_, getChipModes())
+ EXPECT_CALL(*feature_flags_, getChipModes(true))
.WillRepeatedly(testing::Return(modes));
}
@@ -116,7 +116,7 @@
{feature_flags::chip_mode_ids::kV3, combinations}
};
// clang-format on
- EXPECT_CALL(*feature_flags_, getChipModes())
+ EXPECT_CALL(*feature_flags_, getChipModes(true))
.WillRepeatedly(testing::Return(modes));
}
@@ -129,7 +129,7 @@
{feature_flags::chip_mode_ids::kV3, combinations}
};
// clang-format on
- EXPECT_CALL(*feature_flags_, getChipModes())
+ EXPECT_CALL(*feature_flags_, getChipModes(true))
.WillRepeatedly(testing::Return(modes));
}
@@ -267,10 +267,12 @@
sp<WifiChip> chip_;
ChipId chip_id_ = kFakeChipId;
+ legacy_hal::wifi_hal_fn fake_func_table_;
std::shared_ptr<NiceMock<wifi_system::MockInterfaceTool>> iface_tool_{
new NiceMock<wifi_system::MockInterfaceTool>};
std::shared_ptr<NiceMock<legacy_hal::MockWifiLegacyHal>> legacy_hal_{
- new NiceMock<legacy_hal::MockWifiLegacyHal>(iface_tool_)};
+ new NiceMock<legacy_hal::MockWifiLegacyHal>(iface_tool_,
+ fake_func_table_, true)};
std::shared_ptr<NiceMock<mode_controller::MockWifiModeController>>
mode_controller_{new NiceMock<mode_controller::MockWifiModeController>};
std::shared_ptr<NiceMock<iface_util::MockWifiIfaceUtil>> iface_util_{
@@ -281,8 +283,8 @@
public:
void SetUp() override {
chip_ =
- new WifiChip(chip_id_, legacy_hal_, mode_controller_, iface_util_,
- feature_flags_, subsystemRestartHandler);
+ new WifiChip(chip_id_, true, legacy_hal_, mode_controller_,
+ iface_util_, feature_flags_, subsystemRestartHandler);
EXPECT_CALL(*mode_controller_, changeFirmwareMode(testing::_))
.WillRepeatedly(testing::Return(true));
diff --git a/wifi/1.5/default/tests/wifi_nan_iface_unit_tests.cpp b/wifi/1.5/default/tests/wifi_nan_iface_unit_tests.cpp
index 3e7026f..411190b 100644
--- a/wifi/1.5/default/tests/wifi_nan_iface_unit_tests.cpp
+++ b/wifi/1.5/default/tests/wifi_nan_iface_unit_tests.cpp
@@ -112,10 +112,12 @@
class WifiNanIfaceTest : public Test {
protected:
+ legacy_hal::wifi_hal_fn fake_func_table_;
std::shared_ptr<NiceMock<wifi_system::MockInterfaceTool>> iface_tool_{
new NiceMock<wifi_system::MockInterfaceTool>};
std::shared_ptr<NiceMock<legacy_hal::MockWifiLegacyHal>> legacy_hal_{
- new NiceMock<legacy_hal::MockWifiLegacyHal>(iface_tool_)};
+ new NiceMock<legacy_hal::MockWifiLegacyHal>(iface_tool_,
+ fake_func_table_, true)};
std::shared_ptr<NiceMock<iface_util::MockWifiIfaceUtil>> iface_util_{
new NiceMock<iface_util::MockWifiIfaceUtil>(iface_tool_)};
};
diff --git a/wifi/1.5/default/wifi.cpp b/wifi/1.5/default/wifi.cpp
index c4e2333..17db51d 100644
--- a/wifi/1.5/default/wifi.cpp
+++ b/wifi/1.5/default/wifi.cpp
@@ -21,8 +21,8 @@
#include "wifi_status_util.h"
namespace {
-// Chip ID to use for the only supported chip.
-static constexpr android::hardware::wifi::V1_0::ChipId kChipId = 0;
+// Starting Chip ID, will be assigned to primary chip
+static constexpr android::hardware::wifi::V1_0::ChipId kPrimaryChipId = 0;
} // namespace
namespace android {
@@ -35,12 +35,12 @@
Wifi::Wifi(
const std::shared_ptr<wifi_system::InterfaceTool> iface_tool,
- const std::shared_ptr<legacy_hal::WifiLegacyHal> legacy_hal,
+ const std::shared_ptr<legacy_hal::WifiLegacyHalFactory> legacy_hal_factory,
const std::shared_ptr<mode_controller::WifiModeController> mode_controller,
const std::shared_ptr<iface_util::WifiIfaceUtil> iface_util,
const std::shared_ptr<feature_flags::WifiFeatureFlags> feature_flags)
: iface_tool_(iface_tool),
- legacy_hal_(legacy_hal),
+ legacy_hal_factory_(legacy_hal_factory),
mode_controller_(mode_controller),
iface_util_(iface_util),
feature_flags_(feature_flags),
@@ -84,10 +84,16 @@
Return<void> Wifi::debug(const hidl_handle& handle,
const hidl_vec<hidl_string>&) {
LOG(INFO) << "-----------Debug is called----------------";
- if (!chip_.get()) {
+ if (chips_.size() == 0) {
return Void();
}
- return chip_->debug(handle, {});
+
+ for (sp<WifiChip> chip : chips_) {
+ if (!chip.get()) continue;
+
+ chip->debug(handle, {});
+ }
+ return Void();
}
WifiStatus Wifi::registerEventCallbackInternal(
@@ -120,10 +126,13 @@
};
// Create the chip instance once the HAL is started.
- // Need to consider the case of multiple chips TODO(156998862)
- chip_ =
- new WifiChip(kChipId, legacy_hal_, mode_controller_, iface_util_,
- feature_flags_, on_subsystem_restart_callback);
+ android::hardware::wifi::V1_0::ChipId chipId = kPrimaryChipId;
+ for (auto& hal : legacy_hals_) {
+ chips_.push_back(new WifiChip(
+ chipId, chipId == kPrimaryChipId, hal, mode_controller_,
+ iface_util_, feature_flags_, on_subsystem_restart_callback));
+ chipId++;
+ }
run_state_ = RunState::STARTED;
for (const auto& callback : event_cb_handler_.getCallbacks()) {
if (!callback->onStart().isOk()) {
@@ -154,10 +163,13 @@
}
// Clear the chip object and its child objects since the HAL is now
// stopped.
- if (chip_.get()) {
- chip_->invalidate();
- chip_.clear();
+ for (auto& chip : chips_) {
+ if (chip.get()) {
+ chip->invalidate();
+ chip.clear();
+ }
}
+ chips_.clear();
WifiStatus wifi_status = stopLegacyHalAndDeinitializeModeController(lock);
if (wifi_status.code == WifiStatusCode::SUCCESS) {
for (const auto& callback : event_cb_handler_.getCallbacks()) {
@@ -181,21 +193,23 @@
std::pair<WifiStatus, std::vector<ChipId>> Wifi::getChipIdsInternal() {
std::vector<ChipId> chip_ids;
- if (chip_.get()) {
- chip_ids.emplace_back(kChipId);
+
+ for (auto& chip : chips_) {
+ ChipId chip_id = getChipIdFromWifiChip(chip);
+ if (chip_id != UINT32_MAX) chip_ids.emplace_back(chip_id);
}
return {createWifiStatus(WifiStatusCode::SUCCESS), std::move(chip_ids)};
}
std::pair<WifiStatus, sp<V1_4::IWifiChip>> Wifi::getChipInternal(
ChipId chip_id) {
- if (!chip_.get()) {
- return {createWifiStatus(WifiStatusCode::ERROR_NOT_STARTED), nullptr};
+ for (auto& chip : chips_) {
+ ChipId cand_id = getChipIdFromWifiChip(chip);
+ if ((cand_id != UINT32_MAX) && (cand_id == chip_id))
+ return {createWifiStatus(WifiStatusCode::SUCCESS), chip};
}
- if (chip_id != kChipId) {
- return {createWifiStatus(WifiStatusCode::ERROR_INVALID_ARGS), nullptr};
- }
- return {createWifiStatus(WifiStatusCode::SUCCESS), chip_};
+
+ return {createWifiStatus(WifiStatusCode::ERROR_INVALID_ARGS), nullptr};
}
WifiStatus Wifi::initializeModeControllerAndLegacyHal() {
@@ -203,23 +217,46 @@
LOG(ERROR) << "Failed to initialize firmware mode controller";
return createWifiStatus(WifiStatusCode::ERROR_UNKNOWN);
}
- legacy_hal::wifi_error legacy_status = legacy_hal_->initialize();
- if (legacy_status != legacy_hal::WIFI_SUCCESS) {
- LOG(ERROR) << "Failed to initialize legacy HAL: "
- << legacyErrorToString(legacy_status);
- return createWifiStatusFromLegacyError(legacy_status);
+
+ legacy_hals_ = legacy_hal_factory_->getHals();
+ if (legacy_hals_.empty())
+ return createWifiStatus(WifiStatusCode::ERROR_UNKNOWN);
+ int index = 0; // for failure log
+ for (auto& hal : legacy_hals_) {
+ legacy_hal::wifi_error legacy_status = hal->initialize();
+ if (legacy_status != legacy_hal::WIFI_SUCCESS) {
+ // Currently WifiLegacyHal::initialize does not allocate extra mem,
+ // only initializes the function table. If this changes, need to
+ // implement WifiLegacyHal::deinitialize and deinitalize the
+ // HALs already initialized
+ LOG(ERROR) << "Failed to initialize legacy HAL index: " << index
+ << " error: " << legacyErrorToString(legacy_status);
+ return createWifiStatusFromLegacyError(legacy_status);
+ }
+ index++;
}
return createWifiStatus(WifiStatusCode::SUCCESS);
}
WifiStatus Wifi::stopLegacyHalAndDeinitializeModeController(
/* NONNULL */ std::unique_lock<std::recursive_mutex>* lock) {
+ legacy_hal::wifi_error legacy_status = legacy_hal::WIFI_SUCCESS;
+ int index = 0;
+
run_state_ = RunState::STOPPING;
- legacy_hal::wifi_error legacy_status =
- legacy_hal_->stop(lock, [&]() { run_state_ = RunState::STOPPED; });
+ for (auto& hal : legacy_hals_) {
+ legacy_hal::wifi_error tmp = hal->stop(lock, [&]() {});
+ if (tmp != legacy_hal::WIFI_SUCCESS) {
+ LOG(ERROR) << "Failed to stop legacy HAL index: " << index
+ << " error: " << legacyErrorToString(legacy_status);
+ legacy_status = tmp;
+ }
+ index++;
+ }
+ run_state_ = RunState::STOPPED;
+
if (legacy_status != legacy_hal::WIFI_SUCCESS) {
- LOG(ERROR) << "Failed to stop legacy HAL: "
- << legacyErrorToString(legacy_status);
+ LOG(ERROR) << "One or more legacy HALs failed to stop";
return createWifiStatusFromLegacyError(legacy_status);
}
if (!mode_controller_->deinitialize()) {
@@ -228,6 +265,19 @@
}
return createWifiStatus(WifiStatusCode::SUCCESS);
}
+
+ChipId Wifi::getChipIdFromWifiChip(sp<WifiChip>& chip) {
+ ChipId chip_id = UINT32_MAX;
+ if (chip.get()) {
+ chip->getId([&](WifiStatus status, uint32_t id) {
+ if (status.code == WifiStatusCode::SUCCESS) {
+ chip_id = id;
+ }
+ });
+ }
+
+ return chip_id;
+}
} // namespace implementation
} // namespace V1_5
} // namespace wifi
diff --git a/wifi/1.5/default/wifi.h b/wifi/1.5/default/wifi.h
index 8de0ef4..9f5a1b0 100644
--- a/wifi/1.5/default/wifi.h
+++ b/wifi/1.5/default/wifi.h
@@ -27,6 +27,7 @@
#include "wifi_chip.h"
#include "wifi_feature_flags.h"
#include "wifi_legacy_hal.h"
+#include "wifi_legacy_hal_factory.h"
#include "wifi_mode_controller.h"
namespace android {
@@ -41,7 +42,8 @@
class Wifi : public V1_5::IWifi {
public:
Wifi(const std::shared_ptr<wifi_system::InterfaceTool> iface_tool,
- const std::shared_ptr<legacy_hal::WifiLegacyHal> legacy_hal,
+ const std::shared_ptr<legacy_hal::WifiLegacyHalFactory>
+ legacy_hal_factory,
const std::shared_ptr<mode_controller::WifiModeController>
mode_controller,
const std::shared_ptr<iface_util::WifiIfaceUtil> iface_util,
@@ -75,16 +77,18 @@
WifiStatus initializeModeControllerAndLegacyHal();
WifiStatus stopLegacyHalAndDeinitializeModeController(
std::unique_lock<std::recursive_mutex>* lock);
+ ChipId getChipIdFromWifiChip(sp<WifiChip>& chip);
// Instance is created in this root level |IWifi| HIDL interface object
// and shared with all the child HIDL interface objects.
std::shared_ptr<wifi_system::InterfaceTool> iface_tool_;
- std::shared_ptr<legacy_hal::WifiLegacyHal> legacy_hal_;
+ std::shared_ptr<legacy_hal::WifiLegacyHalFactory> legacy_hal_factory_;
std::shared_ptr<mode_controller::WifiModeController> mode_controller_;
+ std::vector<std::shared_ptr<legacy_hal::WifiLegacyHal>> legacy_hals_;
std::shared_ptr<iface_util::WifiIfaceUtil> iface_util_;
std::shared_ptr<feature_flags::WifiFeatureFlags> feature_flags_;
RunState run_state_;
- sp<WifiChip> chip_;
+ std::vector<sp<WifiChip>> chips_;
hidl_callback_util::HidlCallbackHandler<IWifiEventCallback>
event_cb_handler_;
diff --git a/wifi/1.5/default/wifi_chip.cpp b/wifi/1.5/default/wifi_chip.cpp
index 069fd65..ecf170a 100644
--- a/wifi/1.5/default/wifi_chip.cpp
+++ b/wifi/1.5/default/wifi_chip.cpp
@@ -281,9 +281,6 @@
continue;
}
std::string cur_file_name(dp->d_name);
- // string.size() does not include the null terminator. The cpio FreeBSD
- // file header expects the null character to be included in the length.
- const size_t file_name_len = cur_file_name.size() + 1;
struct stat st;
const std::string cur_file_path = kTombstoneFolderPath + cur_file_name;
if (stat(cur_file_path.c_str(), &st) == -1) {
@@ -297,8 +294,15 @@
n_error++;
continue;
}
+ std::string file_name_with_last_modified_time =
+ cur_file_name + "-" + std::to_string(st.st_mtime);
+ // string.size() does not include the null terminator. The cpio FreeBSD
+ // file header expects the null character to be included in the length.
+ const size_t file_name_len =
+ file_name_with_last_modified_time.size() + 1;
unique_fd file_auto_closer(fd_read);
- if (!cpioWriteHeader(out_fd, st, cur_file_name.c_str(),
+ if (!cpioWriteHeader(out_fd, st,
+ file_name_with_last_modified_time.c_str(),
file_name_len)) {
return ++n_error;
}
@@ -332,7 +336,8 @@
using hidl_return_util::validateAndCallWithLock;
WifiChip::WifiChip(
- ChipId chip_id, const std::weak_ptr<legacy_hal::WifiLegacyHal> legacy_hal,
+ ChipId chip_id, bool is_primary,
+ const std::weak_ptr<legacy_hal::WifiLegacyHal> legacy_hal,
const std::weak_ptr<mode_controller::WifiModeController> mode_controller,
const std::weak_ptr<iface_util::WifiIfaceUtil> iface_util,
const std::weak_ptr<feature_flags::WifiFeatureFlags> feature_flags,
@@ -343,7 +348,7 @@
iface_util_(iface_util),
is_valid_(true),
current_mode_id_(feature_flags::chip_mode_ids::kInvalid),
- modes_(feature_flags.lock()->getChipModes()),
+ modes_(feature_flags.lock()->getChipModes(is_primary)),
debug_ring_buffer_cb_registered_(false),
subsystemCallbackHandler_(handler) {
setActiveWlanIfaceNameProperty(kNoActiveWlanIfaceNamePropertyValue);
@@ -681,9 +686,10 @@
void WifiChip::invalidateAndRemoveDependencies(
const std::string& removed_iface_name) {
- for (const auto& nan_iface : nan_ifaces_) {
+ for (auto it = nan_ifaces_.begin(); it != nan_ifaces_.end();) {
+ auto nan_iface = *it;
if (nan_iface->getName() == removed_iface_name) {
- invalidateAndClear(nan_ifaces_, nan_iface);
+ nan_iface->invalidate();
for (const auto& callback : event_cb_handler_.getCallbacks()) {
if (!callback
->onIfaceRemoved(IfaceType::NAN, removed_iface_name)
@@ -691,11 +697,19 @@
LOG(ERROR) << "Failed to invoke onIfaceRemoved callback";
}
}
+ it = nan_ifaces_.erase(it);
+ } else {
+ ++it;
}
}
- for (const auto& rtt : rtt_controllers_) {
+
+ for (auto it = rtt_controllers_.begin(); it != rtt_controllers_.end();) {
+ auto rtt = *it;
if (rtt->getIfaceName() == removed_iface_name) {
- invalidateAndClear(rtt_controllers_, rtt);
+ rtt->invalidate();
+ it = rtt_controllers_.erase(it);
+ } else {
+ ++it;
}
}
}
@@ -1590,15 +1604,16 @@
// This could happen if the chip call is made before any STA/AP
// iface is created. Default to wlan0 for such cases.
LOG(WARNING) << "No active wlan interfaces in use! Using default";
- return getWlanIfaceName(0);
+ return getWlanIfaceNameWithType(IfaceType::STA, 0);
}
// Return the first wlan (wlan0, wlan1 etc.) starting from |start_idx|
// not already in use.
// Note: This doesn't check the actual presence of these interfaces.
-std::string WifiChip::allocateApOrStaIfaceName(uint32_t start_idx) {
+std::string WifiChip::allocateApOrStaIfaceName(IfaceType type,
+ uint32_t start_idx) {
for (unsigned idx = start_idx; idx < kMaxWlanIfaces; idx++) {
- const auto ifname = getWlanIfaceName(idx);
+ const auto ifname = getWlanIfaceNameWithType(type, idx);
if (findUsingName(ap_ifaces_, ifname)) continue;
if (findUsingName(sta_ifaces_, ifname)) continue;
return ifname;
@@ -1616,7 +1631,8 @@
if (!ifname.empty()) {
return ifname;
}
- return allocateApOrStaIfaceName((isStaApConcurrencyAllowedInCurrentMode() &&
+ return allocateApOrStaIfaceName(IfaceType::AP,
+ (isStaApConcurrencyAllowedInCurrentMode() &&
!isDualApAllowedInCurrentMode())
? 1
: 0);
@@ -1625,7 +1641,7 @@
// STA iface names start with idx 0.
// Primary STA iface will always be 0.
std::string WifiChip::allocateStaIfaceName() {
- return allocateApOrStaIfaceName(0);
+ return allocateApOrStaIfaceName(IfaceType::STA, 0);
}
bool WifiChip::writeRingbufferFilesInternal() {
@@ -1661,6 +1677,17 @@
return true;
}
+std::string WifiChip::getWlanIfaceNameWithType(IfaceType type, unsigned idx) {
+ std::string ifname;
+
+ // let the legacy hal override the interface name
+ legacy_hal::wifi_error err =
+ legacy_hal_.lock()->getSupportedIfaceName((uint32_t)type, ifname);
+ if (err == legacy_hal::WIFI_SUCCESS) return ifname;
+
+ return getWlanIfaceName(idx);
+}
+
} // namespace implementation
} // namespace V1_5
} // namespace wifi
diff --git a/wifi/1.5/default/wifi_chip.h b/wifi/1.5/default/wifi_chip.h
index 36c191c..5f1d9e8 100644
--- a/wifi/1.5/default/wifi_chip.h
+++ b/wifi/1.5/default/wifi_chip.h
@@ -50,7 +50,7 @@
*/
class WifiChip : public V1_4::IWifiChip {
public:
- WifiChip(ChipId chip_id,
+ WifiChip(ChipId chip_id, bool is_primary,
const std::weak_ptr<legacy_hal::WifiLegacyHal> legacy_hal,
const std::weak_ptr<mode_controller::WifiModeController>
mode_controller,
@@ -256,10 +256,11 @@
bool isStaApConcurrencyAllowedInCurrentMode();
bool isDualApAllowedInCurrentMode();
std::string getFirstActiveWlanIfaceName();
- std::string allocateApOrStaIfaceName(uint32_t start_idx);
+ std::string allocateApOrStaIfaceName(IfaceType type, uint32_t start_idx);
std::string allocateApIfaceName();
std::string allocateStaIfaceName();
bool writeRingbufferFilesInternal();
+ std::string getWlanIfaceNameWithType(IfaceType type, unsigned idx);
ChipId chip_id_;
std::weak_ptr<legacy_hal::WifiLegacyHal> legacy_hal_;
diff --git a/wifi/1.5/default/wifi_feature_flags.cpp b/wifi/1.5/default/wifi_feature_flags.cpp
index 151d473..9f91bd7 100644
--- a/wifi/1.5/default/wifi_feature_flags.cpp
+++ b/wifi/1.5/default/wifi_feature_flags.cpp
@@ -139,6 +139,13 @@
ChipIfaceCombination::make_vec({WIFI_HAL_INTERFACE_COMBINATIONS_AP})},
#endif
};
+
+static const std::vector<IWifiChip::ChipMode> kChipModesSecondary{
+#ifdef WIFI_HAL_INTERFACE_COMBINATIONS_SECONDARY_CHIP
+ {chip_mode_ids::kV3, ChipIfaceCombination::make_vec(
+ {WIFI_HAL_INTERFACE_COMBINATIONS_SECONDARY_CHIP})},
+#endif
+};
#undef STA
#undef AP
#undef P2P
@@ -154,8 +161,9 @@
WifiFeatureFlags::WifiFeatureFlags() {}
-std::vector<IWifiChip::ChipMode> WifiFeatureFlags::getChipModes() {
- return kChipModes;
+std::vector<IWifiChip::ChipMode> WifiFeatureFlags::getChipModes(
+ bool is_primary) {
+ return (is_primary) ? kChipModes : kChipModesSecondary;
}
} // namespace feature_flags
diff --git a/wifi/1.5/default/wifi_feature_flags.h b/wifi/1.5/default/wifi_feature_flags.h
index 73d18ec..cb68b8c 100644
--- a/wifi/1.5/default/wifi_feature_flags.h
+++ b/wifi/1.5/default/wifi_feature_flags.h
@@ -42,7 +42,8 @@
WifiFeatureFlags();
virtual ~WifiFeatureFlags() = default;
- virtual std::vector<V1_0::IWifiChip::ChipMode> getChipModes();
+ virtual std::vector<V1_0::IWifiChip::ChipMode> getChipModes(
+ bool is_primary);
};
} // namespace feature_flags
diff --git a/wifi/1.5/default/wifi_legacy_hal.cpp b/wifi/1.5/default/wifi_legacy_hal.cpp
index 4070568..7d14e9d 100644
--- a/wifi/1.5/default/wifi_legacy_hal.cpp
+++ b/wifi/1.5/default/wifi_legacy_hal.cpp
@@ -36,7 +36,8 @@
static constexpr uint32_t kLinkLayerStatsDataMpduSizeThreshold = 128;
static constexpr uint32_t kMaxWakeReasonStatsArraySize = 32;
static constexpr uint32_t kMaxRingBuffers = 10;
-static constexpr uint32_t kMaxStopCompleteWaitMs = 100;
+// need a long timeout (1000ms) for chips that unload their driver.
+static constexpr uint32_t kMaxStopCompleteWaitMs = 1000;
static constexpr char kDriverPropName[] = "wlan.driver.status";
// Helper function to create a non-const char* for legacy Hal API's.
@@ -54,6 +55,7 @@
namespace V1_5 {
namespace implementation {
namespace legacy_hal {
+
// Legacy HAL functions accept "C" style function pointers, so use global
// functions to pass to the legacy HAL function and store the corresponding
// std::function methods to be invoked.
@@ -344,26 +346,20 @@
// End of the free-standing "C" style callbacks.
WifiLegacyHal::WifiLegacyHal(
- const std::weak_ptr<wifi_system::InterfaceTool> iface_tool)
- : global_handle_(nullptr),
+ const std::weak_ptr<wifi_system::InterfaceTool> iface_tool,
+ const wifi_hal_fn& fn, bool is_primary)
+ : global_func_table_(fn),
+ global_handle_(nullptr),
awaiting_event_loop_termination_(false),
is_started_(false),
- iface_tool_(iface_tool) {}
+ iface_tool_(iface_tool),
+ is_primary_(is_primary) {}
wifi_error WifiLegacyHal::initialize() {
LOG(DEBUG) << "Initialize legacy HAL";
- // TODO: Add back the HAL Tool if we need to. All we need from the HAL tool
- // for now is this function call which we can directly call.
- if (!initHalFuncTableWithStubs(&global_func_table_)) {
- LOG(ERROR)
- << "Failed to initialize legacy hal function table with stubs";
- return WIFI_ERROR_UNKNOWN;
- }
- wifi_error status = init_wifi_vendor_hal_func_table(&global_func_table_);
- if (status != WIFI_SUCCESS) {
- LOG(ERROR) << "Failed to initialize legacy hal function table";
- }
- return status;
+ // this now does nothing, since HAL function table is provided
+ // to the constructor
+ return WIFI_SUCCESS;
}
wifi_error WifiLegacyHal::start() {
@@ -376,17 +372,21 @@
}
LOG(DEBUG) << "Waiting for the driver ready";
wifi_error status = global_func_table_.wifi_wait_for_driver_ready();
- if (status == WIFI_ERROR_TIMED_OUT) {
- LOG(ERROR) << "Timed out awaiting driver ready";
+ if (status == WIFI_ERROR_TIMED_OUT || status == WIFI_ERROR_UNKNOWN) {
+ LOG(ERROR) << "Failed or timed out awaiting driver ready";
return status;
}
- property_set(kDriverPropName, "ok");
+
+ if (is_primary_) {
+ property_set(kDriverPropName, "ok");
+
+ if (!iface_tool_.lock()->SetWifiUpState(true)) {
+ LOG(ERROR) << "Failed to set WiFi interface up";
+ return WIFI_ERROR_UNKNOWN;
+ }
+ }
LOG(DEBUG) << "Starting legacy HAL";
- if (!iface_tool_.lock()->SetWifiUpState(true)) {
- LOG(ERROR) << "Failed to set WiFi interface up";
- return WIFI_ERROR_UNKNOWN;
- }
status = global_func_table_.wifi_initialize(&global_handle_);
if (status != WIFI_SUCCESS || !global_handle_) {
LOG(ERROR) << "Failed to retrieve global handle";
@@ -419,7 +419,7 @@
// Invalidate all the internal pointers now that the HAL is
// stopped.
invalidate();
- iface_tool_.lock()->SetWifiUpState(false);
+ if (is_primary_) iface_tool_.lock()->SetWifiUpState(false);
on_stop_complete_user_callback();
is_started_ = false;
};
@@ -488,12 +488,21 @@
std::pair<wifi_error, uint32_t> WifiLegacyHal::getSupportedFeatureSet(
const std::string& iface_name) {
- feature_set set;
+ feature_set set = 0, chip_set = 0;
+ wifi_error status = WIFI_SUCCESS;
+
static_assert(sizeof(set) == sizeof(uint64_t),
"Some feature_flags can not be represented in output");
- wifi_error status = global_func_table_.wifi_get_supported_feature_set(
- getIfaceHandle(iface_name), &set);
- return {status, static_cast<uint32_t>(set)};
+ wifi_interface_handle iface_handle = getIfaceHandle(iface_name);
+
+ global_func_table_.wifi_get_chip_feature_set(
+ global_handle_, &chip_set); /* ignore error, chip_set will stay 0 */
+
+ if (iface_handle) {
+ status = global_func_table_.wifi_get_supported_feature_set(iface_handle,
+ &set);
+ }
+ return {status, static_cast<uint32_t>(set | chip_set)};
}
std::pair<wifi_error, PacketFilterCapabilities>
@@ -845,10 +854,15 @@
std::pair<wifi_error, uint32_t> WifiLegacyHal::getLoggerSupportedFeatureSet(
const std::string& iface_name) {
- uint32_t supported_feature_flags;
- wifi_error status =
- global_func_table_.wifi_get_logger_supported_feature_set(
- getIfaceHandle(iface_name), &supported_feature_flags);
+ uint32_t supported_feature_flags = 0;
+ wifi_error status = WIFI_SUCCESS;
+
+ wifi_interface_handle iface_handle = getIfaceHandle(iface_name);
+
+ if (iface_handle) {
+ status = global_func_table_.wifi_get_logger_supported_feature_set(
+ iface_handle, &supported_feature_flags);
+ }
return {status, supported_feature_flags};
}
@@ -1485,6 +1499,17 @@
return status;
}
+wifi_error WifiLegacyHal::getSupportedIfaceName(uint32_t iface_type,
+ std::string& ifname) {
+ std::array<char, IFNAMSIZ> buffer;
+
+ wifi_error res = global_func_table_.wifi_get_supported_iface_name(
+ global_handle_, (uint32_t)iface_type, buffer.data(), buffer.size());
+ if (res == WIFI_SUCCESS) ifname = buffer.data();
+
+ return res;
+}
+
void WifiLegacyHal::invalidate() {
global_handle_ = nullptr;
iface_name_to_handle_.clear();
diff --git a/wifi/1.5/default/wifi_legacy_hal.h b/wifi/1.5/default/wifi_legacy_hal.h
index ae520a8..2984a00 100644
--- a/wifi/1.5/default/wifi_legacy_hal.h
+++ b/wifi/1.5/default/wifi_legacy_hal.h
@@ -173,7 +173,8 @@
*/
class WifiLegacyHal {
public:
- WifiLegacyHal(const std::weak_ptr<wifi_system::InterfaceTool> iface_tool);
+ WifiLegacyHal(const std::weak_ptr<wifi_system::InterfaceTool> iface_tool,
+ const wifi_hal_fn& fn, bool is_primary);
virtual ~WifiLegacyHal() = default;
// Initialize the legacy HAL function table.
@@ -379,6 +380,7 @@
virtual wifi_error createVirtualInterface(const std::string& ifname,
wifi_interface_type iftype);
virtual wifi_error deleteVirtualInterface(const std::string& ifname);
+ wifi_error getSupportedIfaceName(uint32_t iface_type, std::string& ifname);
private:
// Retrieve interface handles for all the available interfaces.
@@ -408,6 +410,11 @@
// Flag to indicate if the legacy HAL has been started.
bool is_started_;
std::weak_ptr<wifi_system::InterfaceTool> iface_tool_;
+ // flag to indicate if this HAL is for the primary chip. This is used
+ // in order to avoid some hard-coded behavior used with older HALs,
+ // such as bring wlan0 interface up/down on start/stop HAL.
+ // it may be removed once vendor HALs are updated.
+ bool is_primary_;
};
} // namespace legacy_hal
diff --git a/wifi/1.5/default/wifi_legacy_hal_factory.cpp b/wifi/1.5/default/wifi_legacy_hal_factory.cpp
new file mode 100644
index 0000000..fbaa284
--- /dev/null
+++ b/wifi/1.5/default/wifi_legacy_hal_factory.cpp
@@ -0,0 +1,263 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <dirent.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+
+#include <android-base/logging.h>
+#include <dlfcn.h>
+#include <libxml/parser.h>
+#include <libxml/tree.h>
+#include <libxml/xmlmemory.h>
+
+#include "wifi_legacy_hal_factory.h"
+#include "wifi_legacy_hal_stubs.h"
+
+namespace {
+static constexpr char kVendorHalsDescPath[] = "/vendor/etc/wifi/vendor_hals";
+static constexpr char kVendorHalsDescExt[] = ".xml";
+static constexpr uint32_t kVendorHalsDescVersion = 1;
+
+bool isDirectory(struct dirent* entryPtr) {
+ bool isDir = false;
+ if (entryPtr->d_type != DT_UNKNOWN && entryPtr->d_type != DT_LNK) {
+ isDir = (entryPtr->d_type == DT_DIR);
+ } else {
+ struct stat entryStat;
+ stat(entryPtr->d_name, &entryStat);
+ isDir = S_ISDIR(entryStat.st_mode);
+ }
+ return isDir;
+}
+
+bool isFileExtension(const char* name, const char* ext) {
+ if (name == NULL) return false;
+ if (ext == NULL) return false;
+
+ size_t extLen = strlen(ext);
+ size_t nameLen = strlen(name);
+
+ if (extLen > nameLen) return false;
+
+ if (strncmp(name + nameLen - extLen, ext, extLen) != 0) return false;
+
+ return true;
+}
+}; // namespace
+
+namespace android {
+namespace hardware {
+namespace wifi {
+namespace V1_5 {
+namespace implementation {
+namespace legacy_hal {
+
+WifiLegacyHalFactory::WifiLegacyHalFactory(
+ const std::weak_ptr<wifi_system::InterfaceTool> iface_tool)
+ : iface_tool_(iface_tool) {}
+
+std::vector<std::shared_ptr<WifiLegacyHal>> WifiLegacyHalFactory::getHals() {
+ if (legacy_hals_.empty()) {
+ if (!initVendorHalDescriptorFromLinked())
+ initVendorHalsDescriptorList();
+ for (auto& desc : descs_) {
+ std::shared_ptr<WifiLegacyHal> hal =
+ std::make_shared<WifiLegacyHal>(iface_tool_, desc.fn,
+ desc.primary);
+ legacy_hals_.push_back(hal);
+ }
+ }
+
+ return legacy_hals_;
+}
+
+bool WifiLegacyHalFactory::initVendorHalDescriptorFromLinked() {
+ wifi_hal_lib_desc desc;
+
+ if (!initLinkedHalFunctionTable(&desc.fn)) return false;
+
+ desc.primary = true;
+ desc.handle = NULL;
+ descs_.push_back(desc);
+ return true;
+}
+
+bool WifiLegacyHalFactory::initLinkedHalFunctionTable(wifi_hal_fn* hal_fn) {
+ init_wifi_vendor_hal_func_table_t initfn;
+
+ initfn = (init_wifi_vendor_hal_func_table_t)dlsym(
+ RTLD_DEFAULT, "init_wifi_vendor_hal_func_table");
+ if (!initfn) {
+ LOG(INFO) << "no vendor HAL library linked, will try dynamic load";
+ return false;
+ }
+
+ if (!initHalFuncTableWithStubs(hal_fn)) {
+ LOG(ERROR) << "Can not initialize the basic function pointer table";
+ return false;
+ }
+
+ if (initfn(hal_fn) != WIFI_SUCCESS) {
+ LOG(ERROR) << "Can not initialize the vendor function pointer table";
+ return false;
+ }
+
+ return true;
+}
+
+/*
+ * Overall structure of the HAL descriptor XML schema
+ *
+ * <?xml version="1.0" encoding="UTF-8"?>
+ * <WifiVendorHal version="1">
+ * <path>/vendor/lib64/libwifi-hal-qcom.so</path>
+ * <primary>1</primary>
+ * </WifiVendorHal>
+ */
+void WifiLegacyHalFactory::initVendorHalsDescriptorList() {
+ xmlDocPtr xml;
+ xmlNodePtr node, cnode;
+ char* version;
+ std::string path;
+ xmlChar* value;
+ wifi_hal_lib_desc desc;
+
+ LOG(INFO) << "processing vendor HALs descriptions in "
+ << kVendorHalsDescPath;
+ DIR* dirPtr = ::opendir(kVendorHalsDescPath);
+ if (dirPtr == NULL) {
+ LOG(ERROR) << "failed to open " << kVendorHalsDescPath;
+ return;
+ }
+ for (struct dirent* entryPtr = ::readdir(dirPtr); entryPtr != NULL;
+ entryPtr = ::readdir(dirPtr)) {
+ if (isDirectory(entryPtr)) continue;
+
+ if (!isFileExtension(entryPtr->d_name, kVendorHalsDescExt))
+ continue; // only process .xml files
+
+ LOG(INFO) << "processing config file: " << entryPtr->d_name;
+
+ std::string fullPath(kVendorHalsDescPath);
+ fullPath.append("/");
+ fullPath.append(entryPtr->d_name);
+ xml = xmlReadFile(fullPath.c_str(), "UTF-8", XML_PARSE_RECOVER);
+ if (!xml) {
+ LOG(ERROR) << "failed to parse: " << entryPtr->d_name
+ << " skipping...";
+ continue;
+ }
+ node = xmlDocGetRootElement(xml);
+ if (!node) {
+ LOG(ERROR) << "empty config file: " << entryPtr->d_name
+ << " skipping...";
+ goto skip;
+ }
+ if (xmlStrcmp(node->name, BAD_CAST "WifiVendorHal")) {
+ LOG(ERROR) << "bad config, root element not WifiVendorHal: "
+ << entryPtr->d_name << " skipping...";
+ goto skip;
+ }
+ version = (char*)xmlGetProp(node, BAD_CAST "version");
+ if (!version || strtoul(version, NULL, 0) != kVendorHalsDescVersion) {
+ LOG(ERROR) << "conf file: " << entryPtr->d_name
+ << "must have version: " << kVendorHalsDescVersion
+ << ", skipping...";
+ goto skip;
+ }
+ cnode = node->children;
+ path.clear();
+ desc.primary = false;
+ while (cnode) {
+ if (!xmlStrcmp(cnode->name, BAD_CAST "path")) {
+ value = xmlNodeListGetString(xml, cnode->children, 1);
+ if (value) path = (char*)value;
+ xmlFree(value);
+ } else if (!xmlStrcmp(cnode->name, BAD_CAST "primary")) {
+ value = xmlNodeListGetString(xml, cnode->children, 1);
+ desc.primary = !xmlStrcmp(value, BAD_CAST "1");
+ xmlFree(value);
+ }
+ cnode = cnode->next;
+ }
+ if (path.empty()) {
+ LOG(ERROR) << "hal library path not provided in: "
+ << entryPtr->d_name << ", skipping...";
+ goto skip;
+ }
+ if (loadVendorHalLib(path, desc)) {
+ if (desc.primary)
+ descs_.insert(descs_.begin(), desc);
+ else
+ descs_.push_back(desc);
+ }
+ skip:
+ xmlFreeDoc(xml);
+ }
+ ::closedir(dirPtr);
+}
+
+bool WifiLegacyHalFactory::loadVendorHalLib(const std::string& path,
+ wifi_hal_lib_desc& desc) {
+ void* h = dlopen(path.c_str(), RTLD_NOW | RTLD_LOCAL);
+ init_wifi_vendor_hal_func_table_t initfn;
+ wifi_error res;
+
+ if (!h) {
+ LOG(ERROR) << "failed to open vendor hal library: " << path;
+ return false;
+ }
+ initfn = (init_wifi_vendor_hal_func_table_t)dlsym(
+ h, "init_wifi_vendor_hal_func_table");
+ if (!initfn) {
+ LOG(ERROR) << "init_wifi_vendor_hal_func_table not found in: " << path;
+ goto out_err;
+ }
+
+ if (!initHalFuncTableWithStubs(&desc.fn)) {
+ LOG(ERROR) << "Can not initialize the basic function pointer table";
+ goto out_err;
+ }
+ res = initfn(&desc.fn);
+ if (res != WIFI_SUCCESS) {
+ LOG(ERROR) << "failed to initialize the vendor func table in: " << path
+ << " error: " << res;
+ goto out_err;
+ }
+
+ res = desc.fn.wifi_early_initialize();
+ // vendor HALs which do not implement early_initialize will return
+ // WIFI_ERROR_NOT_SUPPORTED, treat this as success.
+ if (res != WIFI_SUCCESS && res != WIFI_ERROR_NOT_SUPPORTED) {
+ LOG(ERROR) << "early initialization failed in: " << path
+ << " error: " << res;
+ goto out_err;
+ }
+
+ desc.handle = h;
+ return true;
+out_err:
+ dlclose(h);
+ return false;
+}
+
+} // namespace legacy_hal
+} // namespace implementation
+} // namespace V1_5
+} // namespace wifi
+} // namespace hardware
+} // namespace android
diff --git a/wifi/1.5/default/wifi_legacy_hal_factory.h b/wifi/1.5/default/wifi_legacy_hal_factory.h
new file mode 100644
index 0000000..e3440fa
--- /dev/null
+++ b/wifi/1.5/default/wifi_legacy_hal_factory.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WIFI_LEGACY_HAL_FACTORY_H_
+#define WIFI_LEGACY_HAL_FACTORY_H_
+
+#include <wifi_system/interface_tool.h>
+
+#include "wifi_legacy_hal.h"
+
+namespace android {
+namespace hardware {
+namespace wifi {
+namespace V1_5 {
+namespace implementation {
+// This is in a separate namespace to prevent typename conflicts between
+// the legacy HAL types and the HIDL interface types.
+namespace legacy_hal {
+/**
+ * Class that creates WifiLegacyHal objects for vendor HALs in the system.
+ */
+class WifiLegacyHalFactory {
+ public:
+ WifiLegacyHalFactory(
+ const std::weak_ptr<wifi_system::InterfaceTool> iface_tool);
+ virtual ~WifiLegacyHalFactory() = default;
+
+ std::vector<std::shared_ptr<WifiLegacyHal>> getHals();
+
+ private:
+ typedef struct {
+ wifi_hal_fn fn;
+ bool primary;
+ void* handle;
+ } wifi_hal_lib_desc;
+
+ bool initVendorHalDescriptorFromLinked();
+ void initVendorHalsDescriptorList();
+ bool initLinkedHalFunctionTable(wifi_hal_fn* hal_fn);
+ bool loadVendorHalLib(const std::string& path, wifi_hal_lib_desc& desc);
+
+ std::weak_ptr<wifi_system::InterfaceTool> iface_tool_;
+ std::vector<wifi_hal_lib_desc> descs_;
+ std::vector<std::shared_ptr<WifiLegacyHal>> legacy_hals_;
+};
+
+} // namespace legacy_hal
+} // namespace implementation
+} // namespace V1_5
+} // namespace wifi
+} // namespace hardware
+} // namespace android
+
+#endif // WIFI_LEGACY_HAL_FACTORY_H_
diff --git a/wifi/1.5/default/wifi_legacy_hal_stubs.cpp b/wifi/1.5/default/wifi_legacy_hal_stubs.cpp
index 73b5856..a1122e9 100644
--- a/wifi/1.5/default/wifi_legacy_hal_stubs.cpp
+++ b/wifi/1.5/default/wifi_legacy_hal_stubs.cpp
@@ -144,6 +144,10 @@
populateStubFor(&hal_fn->wifi_map_dscp_access_category);
populateStubFor(&hal_fn->wifi_reset_dscp_mapping);
populateStubFor(&hal_fn->wifi_set_subsystem_restart_handler);
+ populateStubFor(&hal_fn->wifi_get_supported_iface_name);
+ populateStubFor(&hal_fn->wifi_early_initialize);
+ populateStubFor(&hal_fn->wifi_get_chip_feature_set);
+
return true;
}
} // namespace legacy_hal
diff --git a/wifi/hostapd/1.1/vts/functional/Android.bp b/wifi/hostapd/1.1/vts/functional/Android.bp
index 291eceb..61a8dfd 100644
--- a/wifi/hostapd/1.1/vts/functional/Android.bp
+++ b/wifi/hostapd/1.1/vts/functional/Android.bp
@@ -25,11 +25,15 @@
"VtsHalWifiHostapdV1_0TargetTestUtil",
"android.hardware.wifi.hostapd@1.0",
"android.hardware.wifi.hostapd@1.1",
+ "android.hardware.wifi.hostapd@1.2",
+ "android.hardware.wifi.hostapd@1.3",
"android.hardware.wifi@1.0",
"libgmock",
"libwifi-system",
"libwifi-system-iface",
],
- test_suites: ["general-tests", "vts"],
+ test_suites: [
+ "general-tests",
+ "vts",
+ ],
}
-
diff --git a/wifi/hostapd/1.1/vts/functional/hostapd_hidl_test.cpp b/wifi/hostapd/1.1/vts/functional/hostapd_hidl_test.cpp
index 345cf31..0162a99 100644
--- a/wifi/hostapd/1.1/vts/functional/hostapd_hidl_test.cpp
+++ b/wifi/hostapd/1.1/vts/functional/hostapd_hidl_test.cpp
@@ -23,6 +23,7 @@
#include <android/hardware/wifi/1.0/IWifi.h>
#include <android/hardware/wifi/hostapd/1.1/IHostapd.h>
+#include <android/hardware/wifi/hostapd/1.3/IHostapd.h>
#include "hostapd_hidl_call_util.h"
#include "hostapd_hidl_test_utils.h"
@@ -43,6 +44,7 @@
constexpr char kNwPassphrase[] = "test12345";
constexpr int kIfaceChannel = 6;
constexpr int kIfaceInvalidChannel = 567;
+
} // namespace
class HostapdHidlTest
@@ -170,10 +172,17 @@
}
};
+bool is_1_3(const sp<IHostapd>& hostapd) {
+ sp<::android::hardware::wifi::hostapd::V1_3::IHostapd> hostapd_1_3 =
+ ::android::hardware::wifi::hostapd::V1_3::IHostapd::castFrom(hostapd);
+ return hostapd_1_3.get() != nullptr;
+}
+
/*
* RegisterCallback
*/
TEST_P(HostapdHidlTest, registerCallback) {
+ if (is_1_3(hostapd_)) GTEST_SKIP() << "Ignore since current HIDL over 1.3";
hostapd_->registerCallback(
new IfaceCallback(), [](const HostapdStatus& status) {
EXPECT_EQ(HostapdStatusCode::SUCCESS, status.code);
diff --git a/wifi/hostapd/1.3/Android.bp b/wifi/hostapd/1.3/Android.bp
new file mode 100644
index 0000000..aef3267
--- /dev/null
+++ b/wifi/hostapd/1.3/Android.bp
@@ -0,0 +1,22 @@
+// This file is autogenerated by hidl-gen -Landroidbp.
+
+hidl_interface {
+ name: "android.hardware.wifi.hostapd@1.3",
+ root: "android.hardware",
+ srcs: [
+ "types.hal",
+ "IHostapd.hal",
+ "IHostapdCallback.hal",
+ ],
+ interfaces: [
+ "android.hardware.wifi.hostapd@1.0",
+ "android.hardware.wifi.hostapd@1.1",
+ "android.hardware.wifi.hostapd@1.2",
+ "android.hardware.wifi.supplicant@1.0",
+ "android.hidl.base@1.0",
+ ],
+ gen_java: true,
+ apex_available: [
+ "com.android.wifi",
+ ],
+}
diff --git a/wifi/hostapd/1.3/IHostapd.hal b/wifi/hostapd/1.3/IHostapd.hal
new file mode 100644
index 0000000..7ac20e4
--- /dev/null
+++ b/wifi/hostapd/1.3/IHostapd.hal
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.wifi.hostapd@1.3;
+
+import @1.2::IHostapd;
+import @1.2::HostapdStatus;
+
+import IHostapdCallback;
+/**
+ * Top-level object for managing SoftAPs.
+ */
+interface IHostapd extends @1.2::IHostapd {
+ /**
+ * Register for callbacks from the hostapd service.
+ *
+ * These callbacks are invoked for global events that are not specific
+ * to any interface or network. Registration of multiple callback
+ * objects is supported. These objects must be deleted when the corresponding
+ * client process is dead.
+ *
+ * @param callback An instance of the |IHostapdCallback| HIDL interface
+ * object.
+ * @return status Status of the operation.
+ * Possible status codes:
+ * |HostapdStatusCode.SUCCESS|,
+ * |HostapdStatusCode.FAILURE_UNKNOWN|
+ */
+ registerCallback_1_3(IHostapdCallback callback)
+ generates (HostapdStatus status);
+};
diff --git a/wifi/hostapd/1.3/IHostapdCallback.hal b/wifi/hostapd/1.3/IHostapdCallback.hal
new file mode 100644
index 0000000..5202178
--- /dev/null
+++ b/wifi/hostapd/1.3/IHostapdCallback.hal
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.wifi.hostapd@1.3;
+
+import @1.1::IHostapdCallback;
+import Generation;
+
+/**
+ * Top-level callback object for managing SoftAPs.
+ */
+interface IHostapdCallback extends @1.1::IHostapdCallback {
+ oneway onInterfaceInfoChanged(string ifaceName, Generation generation);
+};
diff --git a/wifi/hostapd/1.3/types.hal b/wifi/hostapd/1.3/types.hal
new file mode 100644
index 0000000..ce092c0
--- /dev/null
+++ b/wifi/hostapd/1.3/types.hal
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.wifi.hostapd@1.3;
+
+/**
+ * The wifi generation which AP reside on.
+ * It depends on hw mode and HT/VHT capabilities in hostapd.
+ *
+ * WIFI_STANDARD_LEGACY = (hw_mode is HOSTAPD_MODE_IEEE80211B) or
+ * (hw_mode is HOSTAPD_MODE_IEEE80211G and HT is 0).
+ * WIFI_STANDARD_11N = [hw_mode is HOSTAPD_MODE_IEEE80211G and (HT is 1 or HT40 is 1)] or
+ * [hw_mode is HOSTAPD_MODE_IEEE80211A and VHT is 0].
+ * WIFI_STANDARD_11AC = hw_mode is HOSTAPD_MODE_IEEE80211A and VHT is 1.
+ * WIFI_STANDARD_11AX = hw_mode is HOSTAPD_MODE_IEEE80211AX.
+ */
+enum Generation : uint32_t {
+ WIFI_STANDARD_UNKNOWN = -1,
+ WIFI_STANDARD_LEGACY = 0,
+ WIFI_STANDARD_11N = 1,
+ WIFI_STANDARD_11AC = 2,
+ WIFI_STANDARD_11AX = 3,
+};
+
diff --git a/wifi/supplicant/1.0/vts/functional/supplicant_hidl_test.cpp b/wifi/supplicant/1.0/vts/functional/supplicant_hidl_test.cpp
index 4f25465..47f0394 100644
--- a/wifi/supplicant/1.0/vts/functional/supplicant_hidl_test.cpp
+++ b/wifi/supplicant/1.0/vts/functional/supplicant_hidl_test.cpp
@@ -40,16 +40,22 @@
virtual void SetUp() override {
wifi_instance_name_ = std::get<0>(GetParam());
supplicant_instance_name_ = std::get<1>(GetParam());
+ isP2pOn_ =
+ testing::deviceSupportsFeature("android.hardware.wifi.direct");
+ // Stop Framework
+ std::system("/system/bin/stop");
stopSupplicant(wifi_instance_name_);
startSupplicantAndWaitForHidlService(wifi_instance_name_,
supplicant_instance_name_);
- isP2pOn_ =
- testing::deviceSupportsFeature("android.hardware.wifi.direct");
supplicant_ = getSupplicant(supplicant_instance_name_, isP2pOn_);
ASSERT_NE(supplicant_.get(), nullptr);
}
- virtual void TearDown() override { stopSupplicant(wifi_instance_name_); }
+ virtual void TearDown() override {
+ stopSupplicant(wifi_instance_name_);
+ // Start Framework
+ std::system("/system/bin/start");
+ }
protected:
// ISupplicant object used for all tests in this fixture.
diff --git a/wifi/supplicant/1.0/vts/functional/supplicant_p2p_iface_hidl_test.cpp b/wifi/supplicant/1.0/vts/functional/supplicant_p2p_iface_hidl_test.cpp
index 8d6f38d..c333c4f 100644
--- a/wifi/supplicant/1.0/vts/functional/supplicant_p2p_iface_hidl_test.cpp
+++ b/wifi/supplicant/1.0/vts/functional/supplicant_p2p_iface_hidl_test.cpp
@@ -77,11 +77,13 @@
virtual void SetUp() override {
wifi_instance_name_ = std::get<0>(GetParam());
supplicant_instance_name_ = std::get<1>(GetParam());
+ isP2pOn_ =
+ testing::deviceSupportsFeature("android.hardware.wifi.direct");
+ // Stop Framework
+ std::system("/system/bin/stop");
stopSupplicant(wifi_instance_name_);
startSupplicantAndWaitForHidlService(wifi_instance_name_,
supplicant_instance_name_);
- isP2pOn_ =
- testing::deviceSupportsFeature("android.hardware.wifi.direct");
supplicant_ = getSupplicant(supplicant_instance_name_, isP2pOn_);
EXPECT_TRUE(turnOnExcessiveLogging(supplicant_));
p2p_iface_ = getSupplicantP2pIface(supplicant_);
@@ -91,7 +93,11 @@
memcpy(peer_mac_addr_.data(), kTestPeerMacAddr, peer_mac_addr_.size());
}
- virtual void TearDown() override { stopSupplicant(wifi_instance_name_); }
+ virtual void TearDown() override {
+ stopSupplicant(wifi_instance_name_);
+ // Start Framework
+ std::system("/system/bin/start");
+ }
protected:
bool isP2pOn_ = false;
diff --git a/wifi/supplicant/1.0/vts/functional/supplicant_sta_iface_hidl_test.cpp b/wifi/supplicant/1.0/vts/functional/supplicant_sta_iface_hidl_test.cpp
index 089b3cd..ff28754 100644
--- a/wifi/supplicant/1.0/vts/functional/supplicant_sta_iface_hidl_test.cpp
+++ b/wifi/supplicant/1.0/vts/functional/supplicant_sta_iface_hidl_test.cpp
@@ -72,11 +72,13 @@
virtual void SetUp() override {
wifi_instance_name_ = std::get<0>(GetParam());
supplicant_instance_name_ = std::get<1>(GetParam());
+ isP2pOn_ =
+ testing::deviceSupportsFeature("android.hardware.wifi.direct");
+ // Stop Framework
+ std::system("/system/bin/stop");
stopSupplicant(wifi_instance_name_);
startSupplicantAndWaitForHidlService(wifi_instance_name_,
supplicant_instance_name_);
- isP2pOn_ =
- testing::deviceSupportsFeature("android.hardware.wifi.direct");
supplicant_ = getSupplicant(supplicant_instance_name_, isP2pOn_);
EXPECT_TRUE(turnOnExcessiveLogging(supplicant_));
sta_iface_ = getSupplicantStaIface(supplicant_);
@@ -85,7 +87,11 @@
memcpy(mac_addr_.data(), kTestMacAddr, mac_addr_.size());
}
- virtual void TearDown() override { stopSupplicant(wifi_instance_name_); }
+ virtual void TearDown() override {
+ stopSupplicant(wifi_instance_name_);
+ // Start Framework
+ std::system("/system/bin/start");
+ }
protected:
bool isP2pOn_ = false;
diff --git a/wifi/supplicant/1.0/vts/functional/supplicant_sta_network_hidl_test.cpp b/wifi/supplicant/1.0/vts/functional/supplicant_sta_network_hidl_test.cpp
index 5467e02..295ebfb 100644
--- a/wifi/supplicant/1.0/vts/functional/supplicant_sta_network_hidl_test.cpp
+++ b/wifi/supplicant/1.0/vts/functional/supplicant_sta_network_hidl_test.cpp
@@ -85,11 +85,13 @@
virtual void SetUp() override {
wifi_instance_name_ = std::get<0>(GetParam());
supplicant_instance_name_ = std::get<1>(GetParam());
+ isP2pOn_ =
+ testing::deviceSupportsFeature("android.hardware.wifi.direct");
+ // Stop Framework
+ std::system("/system/bin/stop");
stopSupplicant(wifi_instance_name_);
startSupplicantAndWaitForHidlService(wifi_instance_name_,
supplicant_instance_name_);
- isP2pOn_ =
- testing::deviceSupportsFeature("android.hardware.wifi.direct");
supplicant_ = getSupplicant(supplicant_instance_name_, isP2pOn_);
EXPECT_TRUE(turnOnExcessiveLogging(supplicant_));
sta_network_ = createSupplicantStaNetwork(supplicant_);
@@ -103,7 +105,11 @@
ssid_.assign(kTestSsidStr, kTestSsidStr + strlen(kTestSsidStr));
}
- virtual void TearDown() override { stopSupplicant(wifi_instance_name_); }
+ virtual void TearDown() override {
+ stopSupplicant(wifi_instance_name_);
+ // Start Framework
+ std::system("/system/bin/start");
+ }
protected:
void removeNetwork() {
diff --git a/wifi/supplicant/1.1/vts/functional/supplicant_hidl_test_utils_1_1.h b/wifi/supplicant/1.1/vts/functional/supplicant_hidl_test_utils_1_1.h
index 3629882..2104794 100644
--- a/wifi/supplicant/1.1/vts/functional/supplicant_hidl_test_utils_1_1.h
+++ b/wifi/supplicant/1.1/vts/functional/supplicant_hidl_test_utils_1_1.h
@@ -44,6 +44,8 @@
supplicant_v1_1_instance_name_ = std::get<1>(GetParam());
isP2pOn_ =
testing::deviceSupportsFeature("android.hardware.wifi.direct");
+ // Stop Framework
+ std::system("/system/bin/stop");
stopSupplicant(wifi_v1_0_instance_name_);
startSupplicantAndWaitForHidlService(wifi_v1_0_instance_name_,
supplicant_v1_1_instance_name_);
@@ -54,6 +56,8 @@
virtual void TearDown() override {
stopSupplicant(wifi_v1_0_instance_name_);
+ // Start Framework
+ std::system("/system/bin/start");
}
protected:
diff --git a/wifi/supplicant/1.2/vts/functional/supplicant_hidl_test_utils_1_2.h b/wifi/supplicant/1.2/vts/functional/supplicant_hidl_test_utils_1_2.h
index 5ecfdd4..2a432d0 100644
--- a/wifi/supplicant/1.2/vts/functional/supplicant_hidl_test_utils_1_2.h
+++ b/wifi/supplicant/1.2/vts/functional/supplicant_hidl_test_utils_1_2.h
@@ -50,6 +50,8 @@
supplicant_v1_2_instance_name_ = std::get<1>(GetParam());
isP2pOn_ =
testing::deviceSupportsFeature("android.hardware.wifi.direct");
+ // Stop Framework
+ std::system("/system/bin/stop");
stopSupplicant(wifi_v1_0_instance_name_);
startSupplicantAndWaitForHidlService(wifi_v1_0_instance_name_,
supplicant_v1_2_instance_name_);
@@ -61,6 +63,8 @@
virtual void TearDown() override {
stopSupplicant(wifi_v1_0_instance_name_);
+ // Start Framework
+ std::system("/system/bin/start");
}
protected:
diff --git a/wifi/supplicant/1.2/vts/functional/supplicant_p2p_iface_hidl_test.cpp b/wifi/supplicant/1.2/vts/functional/supplicant_p2p_iface_hidl_test.cpp
index 1eb8eea..cab160b 100644
--- a/wifi/supplicant/1.2/vts/functional/supplicant_p2p_iface_hidl_test.cpp
+++ b/wifi/supplicant/1.2/vts/functional/supplicant_p2p_iface_hidl_test.cpp
@@ -43,7 +43,7 @@
virtual void SetUp() override {
SupplicantHidlTestBase::SetUp();
EXPECT_TRUE(turnOnExcessiveLogging(supplicant_));
- if (!::testing::deviceSupportsFeature("android.hardware.wifi.direct")) {
+ if (!isP2pOn_) {
GTEST_SKIP() << "Wi-Fi Direct is not supported, skip this test.";
}
p2p_iface_ = getSupplicantP2pIface_1_2(supplicant_);
diff --git a/wifi/supplicant/1.3/vts/functional/supplicant_sta_iface_hidl_test.cpp b/wifi/supplicant/1.3/vts/functional/supplicant_sta_iface_hidl_test.cpp
index 12bd122..011a955 100644
--- a/wifi/supplicant/1.3/vts/functional/supplicant_sta_iface_hidl_test.cpp
+++ b/wifi/supplicant/1.3/vts/functional/supplicant_sta_iface_hidl_test.cpp
@@ -63,6 +63,8 @@
supplicant_v1_3_instance_name_ = std::get<1>(GetParam());
isP2pOn_ =
testing::deviceSupportsFeature("android.hardware.wifi.direct");
+ // Stop Framework
+ std::system("/system/bin/stop");
stopSupplicant(wifi_v1_0_instance_name_);
startSupplicantAndWaitForHidlService(wifi_v1_0_instance_name_,
@@ -76,6 +78,8 @@
virtual void TearDown() override {
stopSupplicant(wifi_v1_0_instance_name_);
+ // Start Framework
+ std::system("/system/bin/start");
}
int64_t pmkCacheExpirationTimeInSec;
diff --git a/wifi/supplicant/1.3/vts/functional/supplicant_sta_network_hidl_test.cpp b/wifi/supplicant/1.3/vts/functional/supplicant_sta_network_hidl_test.cpp
index 25091a5..5f60746 100644
--- a/wifi/supplicant/1.3/vts/functional/supplicant_sta_network_hidl_test.cpp
+++ b/wifi/supplicant/1.3/vts/functional/supplicant_sta_network_hidl_test.cpp
@@ -51,6 +51,8 @@
supplicant_v1_3_instance_name_ = std::get<1>(GetParam());
isP2pOn_ =
testing::deviceSupportsFeature("android.hardware.wifi.direct");
+ // Stop Framework
+ std::system("/system/bin/stop");
stopSupplicant(wifi_v1_0_instance_name_);
startSupplicantAndWaitForHidlService(wifi_v1_0_instance_name_,
@@ -66,6 +68,8 @@
virtual void TearDown() override {
stopSupplicant(wifi_v1_0_instance_name_);
+ // Start Framework
+ std::system("/system/bin/start");
}
protected:
diff --git a/wifi/supplicant/1.4/Android.bp b/wifi/supplicant/1.4/Android.bp
new file mode 100644
index 0000000..ff2a8bc
--- /dev/null
+++ b/wifi/supplicant/1.4/Android.bp
@@ -0,0 +1,21 @@
+// This file is autogenerated by hidl-gen -Landroidbp.
+
+hidl_interface {
+ name: "android.hardware.wifi.supplicant@1.4",
+ root: "android.hardware",
+ srcs: [
+ "ISupplicant.hal",
+ ],
+ interfaces: [
+ "android.hardware.wifi.supplicant@1.0",
+ "android.hardware.wifi.supplicant@1.1",
+ "android.hardware.wifi.supplicant@1.2",
+ "android.hardware.wifi.supplicant@1.3",
+ "android.hardware.wifi@1.0",
+ "android.hidl.base@1.0",
+ ],
+ gen_java: true,
+ apex_available: [
+ "com.android.wifi",
+ ],
+}
diff --git a/wifi/supplicant/1.4/ISupplicant.hal b/wifi/supplicant/1.4/ISupplicant.hal
new file mode 100644
index 0000000..9cb88ba
--- /dev/null
+++ b/wifi/supplicant/1.4/ISupplicant.hal
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.wifi.supplicant@1.4;
+
+import @1.3::ISupplicant;
+
+/**
+ * Interface exposed by the supplicant HIDL service registered
+ * with the hardware service manager.
+ * This is the root level object for any the supplicant interactions.
+ * To use 1.4 features you must cast specific interfaces returned from the
+ * 1.1 HAL. For example V1_1::ISupplicant::addIface() adds V1_0::ISupplicantIface,
+ * which can be cast to V1_4::ISupplicantStaIface.
+ */
+interface ISupplicant extends @1.3::ISupplicant {};