Merge "Use audio_bytes_per_frame now supporting compressed formats" into main
diff --git a/Android.bp b/Android.bp
index 302e250..7a2bb9b 100644
--- a/Android.bp
+++ b/Android.bp
@@ -52,8 +52,8 @@
"aidl/android/media/VolumeShaperOperationFlag.aidl",
"aidl/android/media/VolumeShaperState.aidl",
],
- imports: [
- "android.media.audio.common.types-V2",
+ defaults: [
+ "latest_android_media_audio_common_types_import_interface",
],
backend: {
cpp: {
@@ -113,8 +113,8 @@
srcs: [
"aidl/android/media/audio/IHalAdapterVendorExtension.aidl",
],
- imports: [
- "android.hardware.audio.core-V1",
+ defaults: [
+ "latest_android_hardware_audio_core_import_interface",
],
backend: {
// The C++ backend is disabled transitively due to use of FMQ by the audio core HAL.
diff --git a/media/codec2/fuzzer/Android.bp b/media/codec2/fuzzer/Android.bp
index dd68e7e..b387b2c 100644
--- a/media/codec2/fuzzer/Android.bp
+++ b/media/codec2/fuzzer/Android.bp
@@ -41,8 +41,17 @@
fuzz_config: {
cc: [
- "wonsik@google.com",
+ "android-fwk-video@google.com",
],
+ componentid: 1344,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of libcodec2",
+ vector: "remote",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 8082dd7..cd3a80f 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -2326,9 +2326,12 @@
void CCodec::onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems) {
if (!workItems.empty()) {
Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
+ bool shouldPost = queue->empty();
queue->splice(queue->end(), workItems);
+ if (shouldPost) {
+ (new AMessage(kWhatWorkDone, this))->post();
+ }
}
- (new AMessage(kWhatWorkDone, this))->post();
}
void CCodec::onInputBufferDone(uint64_t frameIndex, size_t arrayIndex) {
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index 30f451a..7882951 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -254,6 +254,9 @@
name: "aaudio-aidl",
unstable: true,
local_include_dir: "binding/aidl",
+ defaults: [
+ "latest_android_media_audio_common_types_import_interface",
+ ],
srcs: [
"binding/aidl/aaudio/Endpoint.aidl",
"binding/aidl/aaudio/RingBuffer.aidl",
@@ -264,7 +267,6 @@
"binding/aidl/aaudio/IAAudioService.aidl",
],
imports: [
- "android.media.audio.common.types-V2",
"audioclient-types-aidl",
"shared-file-region-aidl",
"framework-permission-aidl",
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 01e3d53..2c9e173 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -255,8 +255,10 @@
"aidl/android/media/IEffect.aidl",
"aidl/android/media/IEffectClient.aidl",
],
+ defaults: [
+ "latest_android_media_audio_common_types_import_interface",
+ ],
imports: [
- "android.media.audio.common.types-V2",
"shared-file-region-aidl",
],
backend: {
@@ -309,8 +311,10 @@
"aidl/android/media/TrackSecondaryOutputInfo.aidl",
"aidl/android/media/SurroundSoundConfig.aidl",
],
+ defaults: [
+ "latest_android_media_audio_common_types_import_interface",
+ ],
imports: [
- "android.media.audio.common.types-V2",
"framework-permission-aidl",
],
backend: {
@@ -351,12 +355,11 @@
"aidl/android/media/AudioVolumeGroup.aidl",
"aidl/android/media/DeviceRole.aidl",
"aidl/android/media/SoundTriggerSession.aidl",
- "aidl/android/media/SpatializationLevel.aidl",
- "aidl/android/media/SpatializationMode.aidl",
- "aidl/android/media/SpatializerHeadTrackingMode.aidl",
+ ],
+ defaults: [
+ "latest_android_media_audio_common_types_import_interface",
],
imports: [
- "android.media.audio.common.types-V2",
"audioclient-types-aidl",
],
backend: {
@@ -402,8 +405,10 @@
"aidl/android/media/ISoundDoseCallback.aidl",
"aidl/android/media/SoundDoseRecord.aidl",
],
+ defaults: [
+ "latest_android_media_audio_common_types_import_interface",
+ ],
imports: [
- "android.media.audio.common.types-V2",
"audioclient-types-aidl",
"av-types-aidl",
"effect-aidl",
@@ -439,8 +444,10 @@
"aidl/android/media/IAudioPolicyService.aidl",
"aidl/android/media/IAudioPolicyServiceClient.aidl",
],
+ defaults: [
+ "latest_android_media_audio_common_types_import_interface",
+ ],
imports: [
- "android.media.audio.common.types-V2",
"audioclient-types-aidl",
"audiopolicy-types-aidl",
"capture_state_listener-aidl",
@@ -474,6 +481,9 @@
"aidl/android/media/ISpatializer.aidl",
"aidl/android/media/ISpatializerHeadTrackingCallback.aidl",
],
+ defaults: [
+ "latest_android_media_audio_common_types_import_interface",
+ ],
imports: [
"audiopolicy-types-aidl",
],
diff --git a/media/libaudioclient/aidl/android/media/INativeSpatializerCallback.aidl b/media/libaudioclient/aidl/android/media/INativeSpatializerCallback.aidl
index 88b8108..8b30b29 100644
--- a/media/libaudioclient/aidl/android/media/INativeSpatializerCallback.aidl
+++ b/media/libaudioclient/aidl/android/media/INativeSpatializerCallback.aidl
@@ -16,8 +16,7 @@
package android.media;
-import android.media.SpatializationLevel;
-import android.media.SpatializerHeadTrackingMode;
+import android.media.audio.common.Spatialization;
/**
* The INativeSpatializerCallback interface is a callback associated to the
@@ -30,7 +29,7 @@
/** Called when the spatialization level applied by the spatializer changes
* (e.g. when the spatializer is enabled or disabled)
*/
- void onLevelChanged(SpatializationLevel level);
+ void onLevelChanged(Spatialization.Level level);
/** Called when the output stream the Spatializer is attached to changes.
* Indicates the IO Handle of the new output.
diff --git a/media/libaudioclient/aidl/android/media/ISpatializer.aidl b/media/libaudioclient/aidl/android/media/ISpatializer.aidl
index 250c450..37dd776 100644
--- a/media/libaudioclient/aidl/android/media/ISpatializer.aidl
+++ b/media/libaudioclient/aidl/android/media/ISpatializer.aidl
@@ -16,11 +16,9 @@
package android.media;
+import android.media.audio.common.HeadTracking;
+import android.media.audio.common.Spatialization;
import android.media.ISpatializerHeadTrackingCallback;
-import android.media.SpatializationLevel;
-import android.media.SpatializationMode;
-import android.media.SpatializerHeadTrackingMode;
-
/**
* The ISpatializer interface is used to control the native audio service implementation
@@ -34,21 +32,25 @@
/** Releases a ISpatializer interface previously acquired. */
void release();
- /** Reports the list of supported spatialization levels (see SpatializationLevel.aidl).
+ /**
+ * Reports the list of supported spatialization levels.
* The list should never be empty if an ISpatializer interface was successfully
* retrieved with IAudioPolicyService.getSpatializer().
*/
- SpatializationLevel[] getSupportedLevels();
+ Spatialization.Level[] getSupportedLevels();
- /** Selects the desired spatialization level (see SpatializationLevel.aidl). Selecting a level
- * different from SpatializationLevel.NONE with create the specialized multichannel output
+ /**
+ * Selects the desired spatialization level. Selecting a level
+ * different from Spatializer.Level.NONE with create the specialized multichannel output
* mixer, create and enable the spatializer effect and let the audio policy attach eligible
* AudioTrack to this output stream.
*/
- void setLevel(SpatializationLevel level);
+ void setLevel(Spatialization.Level level);
- /** Gets the selected spatialization level (see SpatializationLevel.aidl) */
- SpatializationLevel getLevel();
+ /**
+ * Gets the selected spatialization level.
+ */
+ Spatialization.Level getLevel();
/** Reports if the spatializer engine supports head tracking or not.
* This is a pre condition independent of the fact that a head tracking sensor is
@@ -56,26 +58,33 @@
*/
boolean isHeadTrackingSupported();
- /** Reports the list of supported head tracking modes (see SpatializerHeadTrackingMode.aidl).
+ /**
+ * Reports the list of supported head tracking modes.
* The list always contains SpatializerHeadTrackingMode.DISABLED and can include other modes
* if the spatializer effect implementation supports head tracking.
* The result does not depend on currently connected sensors but reflects the capabilities
* when sensors are available.
*/
- SpatializerHeadTrackingMode[] getSupportedHeadTrackingModes();
+ HeadTracking.Mode[] getSupportedHeadTrackingModes();
- /** Selects the desired head tracking mode (see SpatializerHeadTrackingMode.aidl) */
- void setDesiredHeadTrackingMode(SpatializerHeadTrackingMode mode);
+ /**
+ * Selects the desired head tracking mode.
+ */
+ void setDesiredHeadTrackingMode(HeadTracking.Mode mode);
- /** Gets the actual head tracking mode. Can be different from the desired mode if conditions to
+ /**
+ * Gets the actual head tracking mode. Can be different from the desired mode if conditions to
* enable the desired mode are not met (e.g if the head tracking device was removed)
*/
- SpatializerHeadTrackingMode getActualHeadTrackingMode();
+ HeadTracking.Mode getActualHeadTrackingMode();
- /** Reset the head tracking algorithm to consider current head pose as neutral */
+ /**
+ * Reset the head tracking algorithm to consider current head pose as neutral
+ */
void recenterHeadTracker();
- /** Set the screen to stage transform to use by the head tracking algorithm
+ /**
+ * Set the screen to stage transform to use by the head tracking algorithm
* The screen to stage transform is conveyed as a vector of 6 elements,
* where the first three are a translation vector and
* the last three are a rotation vector.
@@ -123,11 +132,12 @@
*/
void setFoldState(boolean folded);
- /** Reports the list of supported spatialization modess (see SpatializationMode.aidl).
+ /**
+ * Reports the list of supported spatialization modess.
* The list should never be empty if an ISpatializer interface was successfully
* retrieved with IAudioPolicyService.getSpatializer().
*/
- SpatializationMode[] getSupportedModes();
+ Spatialization.Mode[] getSupportedModes();
/**
* Registers a callback to monitor head tracking functions.
diff --git a/media/libaudioclient/aidl/android/media/ISpatializerHeadTrackingCallback.aidl b/media/libaudioclient/aidl/android/media/ISpatializerHeadTrackingCallback.aidl
index 23d5e13..615b971 100644
--- a/media/libaudioclient/aidl/android/media/ISpatializerHeadTrackingCallback.aidl
+++ b/media/libaudioclient/aidl/android/media/ISpatializerHeadTrackingCallback.aidl
@@ -16,8 +16,7 @@
package android.media;
-import android.media.SpatializationLevel;
-import android.media.SpatializerHeadTrackingMode;
+import android.media.audio.common.HeadTracking;
/**
* The ISpatializerHeadTrackingCallback interface is a callback associated to the
@@ -28,7 +27,7 @@
oneway interface ISpatializerHeadTrackingCallback {
/** Called when the head tracking mode has changed
*/
- void onHeadTrackingModeChanged(SpatializerHeadTrackingMode mode);
+ void onHeadTrackingModeChanged(HeadTracking.Mode mode);
/** Called when the head to stage pose hase been updated
* The head to stage pose is conveyed as a vector of 6 elements,
diff --git a/media/libaudioclient/aidl/android/media/SpatializationLevel.aidl b/media/libaudioclient/aidl/android/media/SpatializationLevel.aidl
deleted file mode 100644
index 961c5a1..0000000
--- a/media/libaudioclient/aidl/android/media/SpatializationLevel.aidl
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-/**
- * The spatialization level supported by the spatializer stage effect implementation.
- * Used by methods of the ISpatializer interface.
- * {@hide}
- */
-@Backing(type="byte")
-enum SpatializationLevel {
- /** Spatialization is disabled. */
- NONE = 0,
- /** The spatializer accepts audio with positional multichannel masks (e.g 5.1). */
- SPATIALIZER_MULTICHANNEL = 1,
- /** The spatializer accepts audio made of a channel bed of positional multichannels (e.g 5.1)
- * and audio objects positioned independently via meta data.
- */
- SPATIALIZER_MCHAN_BED_PLUS_OBJECTS = 2,
-}
diff --git a/media/libaudioclient/aidl/android/media/SpatializationMode.aidl b/media/libaudioclient/aidl/android/media/SpatializationMode.aidl
deleted file mode 100644
index eaaff37..0000000
--- a/media/libaudioclient/aidl/android/media/SpatializationMode.aidl
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-/**
- * The spatialization mode supported by the spatializer stage effect implementation.
- * Used by methods of the ISpatializer interface.
- * {@hide}
- */
-@Backing(type="byte")
-enum SpatializationMode {
- /** The spatializer supports binaural mode (over headphones type devices). */
- SPATIALIZER_BINAURAL = 0,
- /** The spatializer supports transaural mode (over speaker type devices). */
- SPATIALIZER_TRANSAURAL = 1,
-}
diff --git a/media/libaudioclient/aidl/android/media/SpatializerHeadTrackingMode.aidl b/media/libaudioclient/aidl/android/media/SpatializerHeadTrackingMode.aidl
deleted file mode 100644
index 58e0f61..0000000
--- a/media/libaudioclient/aidl/android/media/SpatializerHeadTrackingMode.aidl
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-
-/**
- * The head tracking mode supported by the spatializer effect implementation.
- * Used by methods of the ISpatializer interface.
- * {@hide}
- */
-@Backing(type="byte")
-enum SpatializerHeadTrackingMode {
- /** Head tracking is active in a mode not listed below (forward compatibility) */
- OTHER = 0,
- /** Head tracking is disabled */
- DISABLED = 1,
- /** Head tracking is performed relative to the real work environment */
- RELATIVE_WORLD = 2,
- /** Head tracking is performed relative to the device's screen */
- RELATIVE_SCREEN = 3,
-}
diff --git a/media/libaudiofoundation/AudioPort.cpp b/media/libaudiofoundation/AudioPort.cpp
index 6e05abc..ae0457f 100644
--- a/media/libaudiofoundation/AudioPort.cpp
+++ b/media/libaudiofoundation/AudioPort.cpp
@@ -57,14 +57,21 @@
void AudioPort::importAudioPort(const audio_port_v7 &port) {
for (size_t i = 0; i < port.num_audio_profiles; ++i) {
+ if (port.audio_profiles[i].format == AUDIO_FORMAT_DEFAULT) {
+ // The dynamic format from AudioPort should not be AUDIO_FORMAT_DEFAULT.
+ continue;
+ }
sp<AudioProfile> profile = new AudioProfile(port.audio_profiles[i].format,
ChannelMaskSet(port.audio_profiles[i].channel_masks,
port.audio_profiles[i].channel_masks +
- port.audio_profiles->num_channel_masks),
+ port.audio_profiles[i].num_channel_masks),
SampleRateSet(port.audio_profiles[i].sample_rates,
port.audio_profiles[i].sample_rates +
port.audio_profiles[i].num_sample_rates),
port.audio_profiles[i].encapsulation_type);
+ profile->setDynamicFormat(true);
+ profile->setDynamicChannels(true);
+ profile->setDynamicRate(true);
if (!mProfiles.contains(profile)) {
addAudioProfile(profile);
}
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
index ff0c32b..49e6827 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
@@ -51,6 +51,7 @@
status_t AidlConversionSpatializer::getParameter(EffectParamWriter& param) {
DefaultExtension defaultExt;
// read parameters into DefaultExtension vector<uint8_t>
+ defaultExt.bytes.resize(param.getParameterSize());
if (OK != param.readFromParameter(defaultExt.bytes.data(), param.getParameterSize())) {
ALOGE("%s invalid param %s", __func__, param.toString().c_str());
param.setStatus(BAD_VALUE);
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index df64676..1a37622 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -17,7 +17,7 @@
#ifndef LVM_FLOAT
typedef float LVM_FLOAT;
#endif
-#define LOG_TAG "Bundle"
+#define LOG_TAG "EffectBundle"
#define ARRAY_SIZE(array) (sizeof(array) / sizeof(array)[0])
//#define LOG_NDEBUG 0
@@ -1191,11 +1191,13 @@
// 0 if the configuration is supported
//----------------------------------------------------------------------------
int VirtualizerIsDeviceSupported(audio_devices_t deviceType) {
+ ALOGV("%s: deviceType:%#x", __func__, deviceType);
switch (deviceType) {
case AUDIO_DEVICE_OUT_WIRED_HEADSET:
case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
case AUDIO_DEVICE_OUT_USB_HEADSET:
+ case AUDIO_DEVICE_OUT_BLE_HEADSET:
// case AUDIO_DEVICE_OUT_USB_DEVICE: // For USB testing of the virtualizer only.
return 0;
default:
@@ -3372,10 +3374,10 @@
if (pContext->EffectType == LVM_BASS_BOOST) {
if ((device == AUDIO_DEVICE_OUT_SPEAKER) ||
(device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT) ||
- (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)) {
- ALOGV("\tEFFECT_CMD_SET_DEVICE device is invalid for LVM_BASS_BOOST %d",
- *(int32_t*)pCmdData);
- ALOGV("\tEFFECT_CMD_SET_DEVICE temporary disable LVM_BAS_BOOST");
+ device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER ||
+ device == AUDIO_DEVICE_OUT_BLE_SPEAKER) {
+ ALOGV("%s: EFFECT_CMD_SET_DEVICE device %#x is invalid for LVM_BASS_BOOST",
+ __func__, device);
// If a device doesn't support bassboost the effect must be temporarily disabled
// the effect must still report its original state as this can only be changed
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 870ebdf..c110f05 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -2991,12 +2991,6 @@
ALOGE("retrying start: failed to reset codec");
break;
}
- sp<AMessage> response;
- err = PostAndAwaitResponse(mConfigureMsg, &response);
- if (err != OK) {
- ALOGE("retrying start: failed to configure codec");
- break;
- }
if (callback != nullptr) {
err = setCallback(callback);
if (err != OK) {
@@ -3005,6 +2999,12 @@
}
ALOGD("succeed to set callback for reclaim");
}
+ sp<AMessage> response;
+ err = PostAndAwaitResponse(mConfigureMsg, &response);
+ if (err != OK) {
+ ALOGE("retrying start: failed to configure codec");
+ break;
+ }
}
// Keep callback message after the first iteration if necessary.
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index 291b892..827052d 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -225,6 +225,13 @@
return err;
};
+ // We need to set sidebandStream to nullptr before pushing blank buffers
+ err = native_window_set_sideband_stream(nativeWindow, nullptr);
+ if (err != NO_ERROR) {
+ ALOGE("error setting sidebandStream to nullptr: %s (%d)", strerror(-err), -err);
+ return err;
+ }
+
// We need to reconnect to the ANativeWindow as a CPU client to ensure that
// no frames get dropped by SurfaceFlinger assuming that these are video
// frames.
diff --git a/media/libstagefright/httplive/fuzzer/Android.bp b/media/libstagefright/httplive/fuzzer/Android.bp
index dd49714..cb2f4ee 100644
--- a/media/libstagefright/httplive/fuzzer/Android.bp
+++ b/media/libstagefright/httplive/fuzzer/Android.bp
@@ -48,6 +48,7 @@
"libstagefright_httplive_headers",
],
shared_libs: [
+ "libbase",
"libcrypto",
"libstagefright_foundation",
"libhidlbase",
diff --git a/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp b/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
index 7823922..ee76a67 100644
--- a/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
+++ b/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
@@ -261,7 +261,7 @@
Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
[](const std::string &, const std::string &flag, const std::string &) -> std::string {
if (flag == "render_metrics_enabled") {
- return "false";
+ return "true";
} else if (flag == "render_metrics_are_skipped_frames_dropped") {
return "false";
} else if (flag == "render_metrics_max_expected_content_frame_duration_us") {
@@ -304,7 +304,7 @@
// default - if we are accidentally configuring to the default then we're not necessarily
// testing the parsing.
Configuration d;
- EXPECT_EQ(c.enabled, false);
+ EXPECT_EQ(c.enabled, true);
EXPECT_NE(c.enabled, d.enabled);
EXPECT_EQ(c.areSkippedFramesDropped, false);
EXPECT_NE(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
@@ -357,6 +357,7 @@
TEST_F(VideoRenderQualityTrackerTest, countsReleasedFrames) {
Configuration c;
+ c.enabled = true;
Helper h(16.66, c);
h.drop(10);
h.render({16.66, 16.66, 16.66});
@@ -368,6 +369,7 @@
TEST_F(VideoRenderQualityTrackerTest, countsSkippedFrames) {
Configuration c;
+ c.enabled = true;
Helper h(16.66, c);
h.drop(10); // dropped frames are not counted
h.skip(10); // frames skipped before rendering a frame are not counted
@@ -382,6 +384,7 @@
TEST_F(VideoRenderQualityTrackerTest, whenSkippedFramesAreDropped_countsDroppedFrames) {
Configuration c;
+ c.enabled = true;
c.areSkippedFramesDropped = true;
Helper h(16.66, c);
h.skip(10); // skipped frames at the beginning of playback are not counted
@@ -398,6 +401,7 @@
TEST_F(VideoRenderQualityTrackerTest, whenNotSkippedFramesAreDropped_countsDroppedFrames) {
Configuration c;
+ c.enabled = true;
c.areSkippedFramesDropped = false;
Helper h(16.66, c);
h.skip(10); // skipped frames at the beginning of playback are not counted
@@ -414,6 +418,7 @@
TEST_F(VideoRenderQualityTrackerTest, countsRenderedFrames) {
Configuration c;
+ c.enabled = true;
Helper h(16.66, c);
h.drop(10); // dropped frames are not counted
h.render({16.66, 16.66, 16.66});
@@ -425,6 +430,7 @@
TEST_F(VideoRenderQualityTrackerTest, detectsFrameRate) {
Configuration c;
+ c.enabled = true;
c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
Helper h(16.66, c);
h.render({16.6, 16.7, 16.6, 16.7});
@@ -434,6 +440,7 @@
TEST_F(VideoRenderQualityTrackerTest, handlesSeeking) {
Configuration c;
+ c.enabled = true;
c.maxExpectedContentFrameDurationUs = 30;
VideoRenderQualityTracker v(c);
v.onFrameReleased(0, 0);
@@ -472,6 +479,7 @@
TEST_F(VideoRenderQualityTrackerTest, withSkipping_handlesSeeking) {
Configuration c;
+ c.enabled = true;
c.maxExpectedContentFrameDurationUs = 30;
VideoRenderQualityTracker v(c);
v.onFrameReleased(0, 0);
@@ -508,6 +516,7 @@
TEST_F(VideoRenderQualityTrackerTest, whenLowTolerance_doesntDetectFrameRate) {
Configuration c;
+ c.enabled = true;
c.frameRateDetectionToleranceUs = 0;
Helper h(16.66, c);
h.render({16.6, 16.7, 16.6, 16.7});
@@ -517,6 +526,7 @@
TEST_F(VideoRenderQualityTrackerTest, whenFrameRateDestabilizes_detectsFrameRate) {
Configuration c;
+ c.enabled = true;
c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
Helper h(16.66, c);
h.render({16.6, 16.7, 16.6, 16.7});
@@ -527,6 +537,7 @@
TEST_F(VideoRenderQualityTrackerTest, detects32Pulldown) {
Configuration c;
+ c.enabled = true;
c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
Helper h(41.66, c);
h.render({49.9, 33.2, 50.0, 33.4, 50.1, 33.2});
@@ -536,6 +547,7 @@
TEST_F(VideoRenderQualityTrackerTest, whenBad32Pulldown_doesntDetect32Pulldown) {
Configuration c;
+ c.enabled = true;
c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
Helper h(41.66, c);
h.render({50.0, 33.33, 33.33, 50.00, 33.33, 50.00});
@@ -545,6 +557,7 @@
TEST_F(VideoRenderQualityTrackerTest, whenFrameRateChanges_detectsMostRecentFrameRate) {
Configuration c;
+ c.enabled = true;
c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
Helper h(16.66, c);
h.render({16.6, 16.7, 16.6, 16.7});
@@ -558,6 +571,7 @@
TEST_F(VideoRenderQualityTrackerTest, whenFrameRateIsUnstable_doesntDetectFrameRate) {
Configuration c;
+ c.enabled = true;
c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
Helper h(16.66, c);
h.render({16.66, 30.0, 16.66, 30.0, 16.66});
@@ -567,6 +581,7 @@
TEST_F(VideoRenderQualityTrackerTest, capturesFreezeRate) {
Configuration c;
+ c.enabled = true;
Helper h(20, c);
h.render(3);
EXPECT_EQ(h.getMetrics().freezeRate, 0);
@@ -579,6 +594,7 @@
TEST_F(VideoRenderQualityTrackerTest, capturesFreezeDurationHistogram) {
Configuration c;
+ c.enabled = true;
// +17 because freeze durations include the render time of the previous frame
c.freezeDurationMsHistogramBuckets = {2 * 17 + 17, 3 * 17 + 17, 6 * 17 + 17};
Helper h(17, c);
@@ -612,6 +628,7 @@
TEST_F(VideoRenderQualityTrackerTest, capturesFreezeDistanceHistogram) {
Configuration c;
+ c.enabled = true;
c.freezeDistanceMsHistogramBuckets = {1 * 17, 5 * 17, 6 * 17};
Helper h(17, c);
h.render(1);
@@ -643,6 +660,7 @@
TEST_F(VideoRenderQualityTrackerTest, when60hz_hasNoJudder) {
Configuration c;
+ c.enabled = true;
Helper h(16.66, c); // ~24Hz
h.render({16.66, 16.66, 16.66, 16.66, 16.66, 16.66, 16.66});
EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
@@ -651,6 +669,7 @@
TEST_F(VideoRenderQualityTrackerTest, whenSmallVariance60hz_hasNoJudder) {
Configuration c;
+ c.enabled = true;
Helper h(16.66, c); // ~24Hz
h.render({14, 18, 14, 18, 14, 18, 14, 18});
EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
@@ -659,6 +678,7 @@
TEST_F(VideoRenderQualityTrackerTest, whenBadSmallVariance60Hz_hasJudder) {
Configuration c;
+ c.enabled = true;
Helper h(16.66, c); // ~24Hz
h.render({14, 18, 14, /* no 18 between 14s */ 14, 18, 14, 18});
EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 1);
@@ -666,6 +686,7 @@
TEST_F(VideoRenderQualityTrackerTest, when30Hz_hasNoJudder) {
Configuration c;
+ c.enabled = true;
Helper h(33.33, c);
h.render({33.33, 33.33, 33.33, 33.33, 33.33, 33.33});
EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
@@ -674,6 +695,7 @@
TEST_F(VideoRenderQualityTrackerTest, whenSmallVariance30Hz_hasNoJudder) {
Configuration c;
+ c.enabled = true;
Helper h(33.33, c);
h.render({29.0, 35.0, 29.0, 35.0, 29.0, 35.0});
EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
@@ -682,6 +704,7 @@
TEST_F(VideoRenderQualityTrackerTest, whenBadSmallVariance30Hz_hasJudder) {
Configuration c;
+ c.enabled = true;
Helper h(33.33, c);
h.render({29.0, 35.0, 29.0, /* no 35 between 29s */ 29.0, 35.0, 29.0, 35.0});
EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 1);
@@ -689,6 +712,7 @@
TEST_F(VideoRenderQualityTrackerTest, whenBad30HzTo60Hz_hasJudder) {
Configuration c;
+ c.enabled = true;
Helper h(33.33, c);
h.render({33.33, 33.33, 50.0, /* frame stayed 1 vsync too long */ 16.66, 33.33, 33.33});
EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 2); // note: 2 counts of judder
@@ -696,6 +720,7 @@
TEST_F(VideoRenderQualityTrackerTest, when24HzTo60Hz_hasNoJudder) {
Configuration c;
+ c.enabled = true;
Helper h(41.66, c);
h.render({50.0, 33.33, 50.0, 33.33, 50.0, 33.33});
EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
@@ -704,6 +729,7 @@
TEST_F(VideoRenderQualityTrackerTest, when25HzTo60Hz_hasJudder) {
Configuration c;
+ c.enabled = true;
Helper h(40, c);
h.render({33.33, 33.33, 50.0});
h.render({33.33, 33.33, 50.0});
@@ -716,6 +742,7 @@
TEST_F(VideoRenderQualityTrackerTest, when50HzTo60Hz_hasJudder) {
Configuration c;
+ c.enabled = true;
Helper h(20, c);
h.render({16.66, 16.66, 16.66, 33.33});
h.render({16.66, 16.66, 16.66, 33.33});
@@ -728,6 +755,7 @@
TEST_F(VideoRenderQualityTrackerTest, when30HzTo50Hz_hasJudder) {
Configuration c;
+ c.enabled = true;
Helper h(33.33, c);
h.render({40.0, 40.0, 40.0, 60.0});
h.render({40.0, 40.0, 40.0, 60.0});
@@ -739,6 +767,7 @@
TEST_F(VideoRenderQualityTrackerTest, whenSmallVariancePulldown24HzTo60Hz_hasNoJudder) {
Configuration c;
+ c.enabled = true;
Helper h(41.66, c);
h.render({52.0, 31.33, 52.0, 31.33, 52.0, 31.33});
EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
@@ -746,6 +775,7 @@
TEST_F(VideoRenderQualityTrackerTest, whenBad24HzTo60Hz_hasJudder) {
Configuration c;
+ c.enabled = true;
Helper h(41.66, c);
h.render({50.0, 33.33, 50.0, 33.33, /* no 50 between 33s */ 33.33, 50.0, 33.33});
EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 1);
@@ -753,6 +783,7 @@
TEST_F(VideoRenderQualityTrackerTest, capturesJudderScoreHistogram) {
Configuration c;
+ c.enabled = true;
c.judderErrorToleranceUs = 2000;
c.judderScoreHistogramBuckets = {1, 5, 8};
Helper h(16, c);
@@ -767,6 +798,7 @@
TEST_F(VideoRenderQualityTrackerTest, ranksJudderScoresInOrder) {
// Each rendering is ranked from best to worst from a user experience
Configuration c;
+ c.enabled = true;
c.judderErrorToleranceUs = 2000;
c.judderScoreHistogramBuckets = {0, 1000};
int64_t previousScore = 0;
@@ -846,6 +878,7 @@
TEST_F(VideoRenderQualityTrackerTest, capturesFreezeEvents) {
Configuration c;
+ c.enabled = true;
c.freezeEventMax = 5;
c.freezeEventDetailsMax = 4;
c.freezeEventDistanceToleranceMs = 1000;
@@ -938,6 +971,7 @@
TEST_F(VideoRenderQualityTrackerTest, capturesJudderEvents) {
Configuration c;
+ c.enabled = true;
c.judderEventMax = 4;
c.judderEventDetailsMax = 3;
c.judderEventDistanceToleranceMs = 100;
@@ -988,6 +1022,7 @@
TEST_F(VideoRenderQualityTrackerTest, capturesOverallFreezeScore) {
Configuration c;
+ c.enabled = true;
// # drops * 20ms + 20ms because current frame is frozen + 1 for bucket threshold
c.freezeDurationMsHistogramBuckets = {1 * 20 + 21, 5 * 20 + 21, 10 * 20 + 21};
c.freezeDurationMsHistogramToScore = {10, 100, 1000};
@@ -1012,6 +1047,7 @@
TEST_F(VideoRenderQualityTrackerTest, capturesOverallJudderScore) {
Configuration c;
+ c.enabled = true;
c.judderScoreHistogramBuckets = {0, 6, 10};
c.judderScoreHistogramToScore = {10, 100, 1000};
Helper h(20, c);
diff --git a/media/module/libmediatranscoding/transcoder/tests/fuzzer/Android.bp b/media/module/libmediatranscoding/transcoder/tests/fuzzer/Android.bp
index 69b2827..b6eca2a 100644
--- a/media/module/libmediatranscoding/transcoder/tests/fuzzer/Android.bp
+++ b/media/module/libmediatranscoding/transcoder/tests/fuzzer/Android.bp
@@ -48,8 +48,16 @@
],
fuzz_config: {
cc: [
- "android-media-fuzzing-reports@google.com",
+ "android-media-editing@google.com",
],
- componentid: 155276,
+ componentid: 761430,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of libmediatranscoder",
+ vector: "local_no_privileges_required",
+ service_privilege: "privileged",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/module/mpeg2ts/ATSParser.cpp b/media/module/mpeg2ts/ATSParser.cpp
index 6aeea3b..86187bd 100644
--- a/media/module/mpeg2ts/ATSParser.cpp
+++ b/media/module/mpeg2ts/ATSParser.cpp
@@ -23,8 +23,8 @@
#include "ESQueue.h"
#include <android/hardware/cas/native/1.0/IDescrambler.h>
-#include <android/hidl/allocator/1.0/IAllocator.h>
#include <android/hidl/memory/1.0/IMemory.h>
+#include <cutils/ashmem.h>
#include <cutils/native_handle.h>
#include <hidlmemory/mapping.h>
#include <media/cas/DescramblerAPI.h>
@@ -46,12 +46,12 @@
#include <inttypes.h>
namespace android {
+using hardware::hidl_handle;
using hardware::hidl_string;
using hardware::hidl_vec;
using hardware::hidl_memory;
using namespace hardware::cas::V1_0;
using namespace hardware::cas::native::V1_0;
-typedef hidl::allocator::V1_0::IAllocator TAllocator;
typedef hidl::memory::V1_0::IMemory TMemory;
// I want the expression "y" evaluated even if verbose logging is off.
@@ -210,7 +210,6 @@
bool mSampleEncrypted;
sp<AMessage> mSampleAesKeyItem;
sp<TMemory> mHidlMemory;
- sp<TAllocator> mHidlAllocator;
hardware::cas::native::V1_0::SharedBuffer mDescramblerSrcBuffer;
sp<ABuffer> mDescrambledBuffer;
List<SubSampleInfo> mSubSamples;
@@ -1006,34 +1005,29 @@
sp<ABuffer> newBuffer, newScrambledBuffer;
sp<TMemory> newMem;
if (mScrambled) {
- if (mHidlAllocator == nullptr) {
- mHidlAllocator = TAllocator::getService("ashmem");
- if (mHidlAllocator == nullptr) {
- ALOGE("[stream %d] can't get hidl allocator", mElementaryPID);
- return false;
+ int fd = ashmem_create_region("mediaATS", neededSize);
+ if (fd < 0) {
+ ALOGE("[stream %d] create_ashmem_region failed for size %zu. FD returned: %d",
+ mElementaryPID, neededSize, fd);
+ return false;
+ }
+
+ native_handle_t* handle = native_handle_create(1 /*numFds*/, 0/*numInts*/);
+ if (handle == nullptr) {
+ ALOGE("[stream %d] failed to create a native_handle_t", mElementaryPID);
+ if (close(fd)) {
+ ALOGE("[stream %d] failed to close ashmem fd. errno: %s", mElementaryPID,
+ strerror(errno));
}
- }
- hidl_memory hidlMemToken;
- bool success;
- auto transStatus = mHidlAllocator->allocate(
- neededSize,
- [&success, &hidlMemToken](
- bool s,
- hidl_memory const& m) {
- success = s;
- hidlMemToken = m;
- });
-
- if (!transStatus.isOk()) {
- ALOGE("[stream %d] hidl allocator failed at the transport: %s",
- mElementaryPID, transStatus.description().c_str());
return false;
}
- if (!success) {
- ALOGE("[stream %d] hidl allocator failed", mElementaryPID);
- return false;
- }
+
+ handle->data[0] = fd;
+ hidl_handle memHandle;
+ memHandle.setTo(handle, true /*shouldOwn*/);
+ hidl_memory hidlMemToken("ashmem", memHandle, neededSize);
+
newMem = mapMemory(hidlMemToken);
if (newMem == nullptr || newMem->getPointer() == nullptr) {
ALOGE("[stream %d] hidl failed to map memory", mElementaryPID);
diff --git a/media/module/mpeg2ts/Android.bp b/media/module/mpeg2ts/Android.bp
index bf762c6..c710ffb 100644
--- a/media/module/mpeg2ts/Android.bp
+++ b/media/module/mpeg2ts/Android.bp
@@ -44,7 +44,6 @@
"libhidlmemory",
"android.hardware.cas.native@1.0",
"android.hidl.memory@1.0",
- "android.hidl.allocator@1.0",
],
header_libs: [
diff --git a/media/module/mpeg2ts/test/Android.bp b/media/module/mpeg2ts/test/Android.bp
index 34a8d3e..cccefac 100644
--- a/media/module/mpeg2ts/test/Android.bp
+++ b/media/module/mpeg2ts/test/Android.bp
@@ -37,9 +37,8 @@
shared_libs: [
"android.hardware.cas@1.0",
"android.hardware.cas.native@1.0",
- "android.hidl.token@1.0-utils",
- "android.hidl.allocator@1.0",
"libcrypto",
+ "libcutils",
"libhidlbase",
"libhidlmemory",
"liblog",
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 0cd6243..0c878c9 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -142,9 +142,10 @@
name: "libaudioflinger",
defaults: [
- "latest_android_media_audio_common_types_cpp_shared",
- "latest_android_hardware_audio_core_sounddose_ndk_shared",
"audioflinger_flags_defaults",
+ "latest_android_hardware_audio_core_sounddose_ndk_export_shared_lib_header",
+ "latest_android_hardware_audio_core_sounddose_ndk_shared",
+ "latest_android_media_audio_common_types_cpp_shared",
],
srcs: [
@@ -217,7 +218,6 @@
export_shared_lib_headers: [
"libpermission",
- "android.hardware.audio.core.sounddose-V1-ndk",
],
cflags: [
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 0b73fba..048bec3 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -10760,14 +10760,24 @@
AudioHwDevice *hwDev, AudioStreamOut *output, bool systemReady)
: MmapThread(afThreadCallback, id, hwDev, output->stream, systemReady, true /* isOut */),
mStreamType(AUDIO_STREAM_MUSIC),
- mStreamVolume(1.0),
- mStreamMute(false),
mOutput(output)
{
snprintf(mThreadName, kThreadNameLength, "AudioMmapOut_%X", id);
mChannelCount = audio_channel_count_from_out_mask(mChannelMask);
mMasterVolume = afThreadCallback->masterVolume_l();
mMasterMute = afThreadCallback->masterMute_l();
+
+ for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
+ const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
+ mStreamTypes[stream].volume = 0.0f;
+ mStreamTypes[stream].mute = mAfThreadCallback->streamMute_l(stream);
+ }
+ // Audio patch and call assistant volume are always max
+ mStreamTypes[AUDIO_STREAM_PATCH].volume = 1.0f;
+ mStreamTypes[AUDIO_STREAM_PATCH].mute = false;
+ mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].volume = 1.0f;
+ mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].mute = false;
+
if (mAudioHwDev) {
if (mAudioHwDev->canSetMasterVolume()) {
mMasterVolume = 1.0;
@@ -10824,8 +10834,8 @@
void MmapPlaybackThread::setStreamVolume(audio_stream_type_t stream, float value)
{
audio_utils::lock_guard _l(mutex());
+ mStreamTypes[stream].volume = value;
if (stream == mStreamType) {
- mStreamVolume = value;
broadcast_l();
}
}
@@ -10833,17 +10843,14 @@
float MmapPlaybackThread::streamVolume(audio_stream_type_t stream) const
{
audio_utils::lock_guard _l(mutex());
- if (stream == mStreamType) {
- return mStreamVolume;
- }
- return 0.0f;
+ return mStreamTypes[stream].volume;
}
void MmapPlaybackThread::setStreamMute(audio_stream_type_t stream, bool muted)
{
audio_utils::lock_guard _l(mutex());
+ mStreamTypes[stream].mute = muted;
if (stream == mStreamType) {
- mStreamMute= muted;
broadcast_l();
}
}
@@ -10883,14 +10890,13 @@
{
float volume;
- if (mMasterMute || mStreamMute) {
+ if (mMasterMute || streamMuted_l()) {
volume = 0;
} else {
- volume = mMasterVolume * mStreamVolume;
+ volume = mMasterVolume * streamVolume_l();
}
if (volume != mHalVolFloat) {
-
// Convert volumes from float to 8.24
uint32_t vol = (uint32_t)(volume * (1 << 24));
@@ -10924,8 +10930,8 @@
track->setMetadataHasChanged();
track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
/*muteState=*/{mMasterMute,
- mStreamVolume == 0.f,
- mStreamMute,
+ streamVolume_l() == 0.f,
+ streamMuted_l(),
// TODO(b/241533526): adjust logic to include mute from AppOps
false /*muteFromPlaybackRestricted*/,
false /*muteFromClientVolume*/,
@@ -11038,7 +11044,7 @@
MmapThread::dumpInternals_l(fd, args);
dprintf(fd, " Stream type: %d Stream volume: %f HAL volume: %f Stream mute %d\n",
- mStreamType, mStreamVolume, mHalVolFloat, mStreamMute);
+ mStreamType, streamVolume_l(), mHalVolFloat, streamMuted_l());
dprintf(fd, " Master volume: %f Master mute %d\n", mMasterVolume, mMasterMute);
}
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 3105ad7..0ca79f9 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -2233,12 +2233,17 @@
protected:
void dumpInternals_l(int fd, const Vector<String16>& args) final;
+ float streamVolume_l() const {
+ return mStreamTypes[mStreamType].volume;
+ }
+ bool streamMuted_l() const {
+ return mStreamTypes[mStreamType].mute;
+ }
+ stream_type_t mStreamTypes[AUDIO_STREAM_CNT];
audio_stream_type_t mStreamType;
float mMasterVolume;
- float mStreamVolume;
bool mMasterMute;
- bool mStreamMute;
AudioStreamOut* mOutput;
mediautils::atomic_sp<audio_utils::MelProcessor> mMelProcessor;
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index d7aa5c9..104e01a 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -51,6 +51,7 @@
namespace android {
using binder::Status;
+using media::audio::common::Spatialization;
static const char kDeadlockedString[] = "AudioPolicyService may be deadlocked\n";
static const char kCmdDeadlockedString[] = "AudioPolicyService command thread may be deadlocked\n";
@@ -552,7 +553,7 @@
if (mSpatializer != nullptr) {
// Note: mSpatializer != nullptr => mAudioPolicyManager != nullptr
- if (mSpatializer->getLevel() != media::SpatializationLevel::NONE) {
+ if (mSpatializer->getLevel() != Spatialization::Level::NONE) {
audio_io_handle_t currentOutput = mSpatializer->getOutput();
audio_io_handle_t newOutput;
const audio_attributes_t attr = attributes_initializer(AUDIO_USAGE_MEDIA);
@@ -577,8 +578,8 @@
if (status != NO_ERROR) {
mAudioPolicyManager->releaseSpatializerOutput(newOutput);
}
- } else if (mSpatializer->getLevel() == media::SpatializationLevel::NONE
- && mSpatializer->getOutput() != AUDIO_IO_HANDLE_NONE) {
+ } else if (mSpatializer->getLevel() == Spatialization::Level::NONE &&
+ mSpatializer->getOutput() != AUDIO_IO_HANDLE_NONE) {
audio_io_handle_t output = mSpatializer->detachOutput();
if (output != AUDIO_IO_HANDLE_NONE) {
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index 1245b1e..7859c2c 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -43,16 +43,16 @@
namespace android {
-using aidl_utils::statusTFromBinderStatus;
using aidl_utils::binderStatusFromStatusT;
+using aidl_utils::statusTFromBinderStatus;
using android::content::AttributionSourceState;
using binder::Status;
using media::HeadTrackingMode;
using media::Pose3f;
-using media::SpatializationLevel;
-using media::SpatializationMode;
-using media::SpatializerHeadTrackingMode;
using media::SensorPoseProvider;
+using media::audio::common::HeadTracking;
+using media::audio::common::Spatialization;
+using ::android::internal::ToString;
using namespace std::chrono_literals;
@@ -302,7 +302,7 @@
}
mSupportsHeadTracking = supportsHeadTracking[0];
- std::vector<media::SpatializationLevel> spatializationLevels;
+ std::vector<Spatialization::Level> spatializationLevels;
status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_LEVELS,
&spatializationLevels);
if (status != NO_ERROR) {
@@ -316,7 +316,7 @@
ALOGW("%s: ignoring spatializationLevel:%d", __func__, (int)spatializationLevel);
continue;
}
- if (spatializationLevel == media::SpatializationLevel::NONE) {
+ if (spatializationLevel == Spatialization::Level::NONE) {
noneLevelFound = true;
} else {
activeLevelFound = true;
@@ -330,7 +330,7 @@
return BAD_VALUE;
}
- std::vector<media::SpatializationMode> spatializationModes;
+ std::vector<Spatialization::Mode> spatializationModes;
status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES,
&spatializationModes);
if (status != NO_ERROR) {
@@ -373,9 +373,9 @@
// Currently we expose only RELATIVE_WORLD.
// This is a limitation of the head tracking library based on a UX choice.
- mHeadTrackingModes.push_back(SpatializerHeadTrackingMode::DISABLED);
+ mHeadTrackingModes.push_back(HeadTracking::Mode::DISABLED);
if (mSupportsHeadTracking) {
- mHeadTrackingModes.push_back(SpatializerHeadTrackingMode::RELATIVE_WORLD);
+ mHeadTrackingModes.push_back(HeadTracking::Mode::RELATIVE_WORLD);
}
mediametrics::LogItem(mMetricsId)
.set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE)
@@ -440,7 +440,7 @@
void Spatializer::binderDied(__unused const wp<IBinder> &who) {
{
std::lock_guard lock(mLock);
- mLevel = SpatializationLevel::NONE;
+ mLevel = Spatialization::Level::NONE;
mSpatializerCallback.clear();
}
ALOGV("%s", __func__);
@@ -448,20 +448,20 @@
}
// ISpatializer
-Status Spatializer::getSupportedLevels(std::vector<SpatializationLevel> *levels) {
+Status Spatializer::getSupportedLevels(std::vector<Spatialization::Level> *levels) {
ALOGV("%s", __func__);
if (levels == nullptr) {
return binderStatusFromStatusT(BAD_VALUE);
}
- // SpatializationLevel::NONE is already required from the effect or we don't load it.
+ // Spatialization::Level::NONE is already required from the effect or we don't load it.
levels->insert(levels->end(), mLevels.begin(), mLevels.end());
return Status::ok();
}
-Status Spatializer::setLevel(SpatializationLevel level) {
- ALOGV("%s level %s", __func__, media::toString(level).c_str());
- mLocalLog.log("%s with %s", __func__, media::toString(level).c_str());
- if (level != SpatializationLevel::NONE
+Status Spatializer::setLevel(Spatialization::Level level) {
+ ALOGV("%s level %s", __func__, ToString(level).c_str());
+ mLocalLog.log("%s with %s", __func__, ToString(level).c_str());
+ if (level != Spatialization::Level::NONE
&& std::find(mLevels.begin(), mLevels.end(), level) == mLevels.end()) {
return binderStatusFromStatusT(BAD_VALUE);
}
@@ -488,7 +488,7 @@
return Status::ok();
}
-Status Spatializer::getLevel(SpatializationLevel *level) {
+Status Spatializer::getLevel(Spatialization::Level *level) {
if (level == nullptr) {
return binderStatusFromStatusT(BAD_VALUE);
}
@@ -509,7 +509,7 @@
}
Status Spatializer::getSupportedHeadTrackingModes(
- std::vector<SpatializerHeadTrackingMode>* modes) {
+ std::vector<HeadTracking::Mode>* modes) {
std::lock_guard lock(mLock);
ALOGV("%s", __func__);
if (modes == nullptr) {
@@ -519,24 +519,24 @@
return Status::ok();
}
-Status Spatializer::setDesiredHeadTrackingMode(SpatializerHeadTrackingMode mode) {
- ALOGV("%s mode %s", __func__, media::toString(mode).c_str());
+Status Spatializer::setDesiredHeadTrackingMode(HeadTracking::Mode mode) {
+ ALOGV("%s mode %s", __func__, ToString(mode).c_str());
if (!mSupportsHeadTracking) {
return binderStatusFromStatusT(INVALID_OPERATION);
}
- mLocalLog.log("%s with %s", __func__, media::toString(mode).c_str());
+ mLocalLog.log("%s with %s", __func__, ToString(mode).c_str());
std::lock_guard lock(mLock);
switch (mode) {
- case SpatializerHeadTrackingMode::OTHER:
+ case HeadTracking::Mode::OTHER:
return binderStatusFromStatusT(BAD_VALUE);
- case SpatializerHeadTrackingMode::DISABLED:
+ case HeadTracking::Mode::DISABLED:
mDesiredHeadTrackingMode = HeadTrackingMode::STATIC;
break;
- case SpatializerHeadTrackingMode::RELATIVE_WORLD:
+ case HeadTracking::Mode::RELATIVE_WORLD:
mDesiredHeadTrackingMode = HeadTrackingMode::WORLD_RELATIVE;
break;
- case SpatializerHeadTrackingMode::RELATIVE_SCREEN:
+ case HeadTracking::Mode::RELATIVE_SCREEN:
mDesiredHeadTrackingMode = HeadTrackingMode::SCREEN_RELATIVE;
break;
}
@@ -547,7 +547,7 @@
return Status::ok();
}
-Status Spatializer::getActualHeadTrackingMode(SpatializerHeadTrackingMode *mode) {
+Status Spatializer::getActualHeadTrackingMode(HeadTracking::Mode *mode) {
if (mode == nullptr) {
return binderStatusFromStatusT(BAD_VALUE);
}
@@ -600,8 +600,8 @@
binder->unlinkToDeath(this);
mSpatializerCallback.clear();
- levelChanged = mLevel != SpatializationLevel::NONE;
- mLevel = SpatializationLevel::NONE;
+ levelChanged = mLevel != Spatialization::Level::NONE;
+ mLevel = Spatialization::Level::NONE;
}
if (levelChanged) {
@@ -690,7 +690,7 @@
return Status::ok();
}
-Status Spatializer::getSupportedModes(std::vector<SpatializationMode> *modes) {
+Status Spatializer::getSupportedModes(std::vector<Spatialization::Mode> *modes) {
ALOGV("%s", __func__);
if (modes == nullptr) {
return binderStatusFromStatusT(BAD_VALUE);
@@ -771,7 +771,7 @@
const std::vector<float> headToStage(6, 0.0);
setEffectParameter_l(SPATIALIZER_PARAM_HEAD_TO_STAGE, headToStage);
setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
- std::vector<SpatializerHeadTrackingMode>{SpatializerHeadTrackingMode::DISABLED});
+ std::vector<HeadTracking::Mode>{HeadTracking::Mode::DISABLED});
}
void Spatializer::onHeadToStagePoseMsg(const std::vector<float>& headToStage) {
@@ -804,21 +804,21 @@
void Spatializer::onActualModeChangeMsg(HeadTrackingMode mode) {
ALOGV("%s(%d)", __func__, (int) mode);
sp<media::ISpatializerHeadTrackingCallback> callback;
- SpatializerHeadTrackingMode spatializerMode;
+ HeadTracking::Mode spatializerMode;
{
std::lock_guard lock(mLock);
if (!mSupportsHeadTracking) {
- spatializerMode = SpatializerHeadTrackingMode::DISABLED;
+ spatializerMode = HeadTracking::Mode::DISABLED;
} else {
switch (mode) {
case HeadTrackingMode::STATIC:
- spatializerMode = SpatializerHeadTrackingMode::DISABLED;
+ spatializerMode = HeadTracking::Mode::DISABLED;
break;
case HeadTrackingMode::WORLD_RELATIVE:
- spatializerMode = SpatializerHeadTrackingMode::RELATIVE_WORLD;
+ spatializerMode = HeadTracking::Mode::RELATIVE_WORLD;
break;
case HeadTrackingMode::SCREEN_RELATIVE:
- spatializerMode = SpatializerHeadTrackingMode::RELATIVE_SCREEN;
+ spatializerMode = HeadTracking::Mode::RELATIVE_SCREEN;
break;
default:
LOG_ALWAYS_FATAL("Unknown mode: %d", mode);
@@ -826,11 +826,11 @@
}
mActualHeadTrackingMode = spatializerMode;
if (mEngine != nullptr) {
- if (spatializerMode == SpatializerHeadTrackingMode::DISABLED) {
+ if (spatializerMode == HeadTracking::Mode::DISABLED) {
resetEngineHeadPose_l();
} else {
setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
- std::vector<SpatializerHeadTrackingMode>{spatializerMode});
+ std::vector<HeadTracking::Mode>{spatializerMode});
}
}
callback = mHeadTrackingCallback;
@@ -974,12 +974,12 @@
if (mPoseController != nullptr) {
// TODO(b/253297301, b/255433067) reenable low latency condition check
// for Head Tracking after Bluetooth HAL supports it correctly.
- if (mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
+ if (mNumActiveTracks > 0 && mLevel != Spatialization::Level::NONE
&& mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
&& mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
if (mEngine != nullptr) {
setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
- std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
+ std::vector<HeadTracking::Mode>{mActualHeadTrackingMode});
}
mPoseController->setHeadSensor(mHeadSensor);
mPoseController->setScreenSensor(mScreenSensor);
@@ -996,20 +996,20 @@
if (mOutput != AUDIO_IO_HANDLE_NONE && supportsSetLatencyMode) {
const status_t status =
AudioSystem::setRequestedLatencyMode(mOutput, requestedLatencyMode);
- ALOGD("%s: setRequestedLatencyMode for output thread(%d) to %s returned %d",
- __func__, mOutput, toString(requestedLatencyMode).c_str(), status);
+ ALOGD("%s: setRequestedLatencyMode for output thread(%d) to %s returned %d", __func__,
+ mOutput, toString(requestedLatencyMode).c_str(), status);
}
}
void Spatializer::checkEngineState_l() {
if (mEngine != nullptr) {
- if (mLevel != SpatializationLevel::NONE && mNumActiveTracks > 0) {
+ if (mLevel != Spatialization::Level::NONE && mNumActiveTracks > 0) {
mEngine->setEnabled(true);
setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
- std::vector<SpatializationLevel>{mLevel});
+ std::vector<Spatialization::Level>{mLevel});
} else {
setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
- std::vector<SpatializationLevel>{SpatializationLevel::NONE});
+ std::vector<Spatialization::Level>{Spatialization::Level::NONE});
mEngine->setEnabled(false);
}
}
@@ -1067,21 +1067,21 @@
// 1. Capabilities (mLevels, mHeadTrackingModes, mSpatializationModes, mChannelMasks, etc)
ss.append(prefixSpace).append("Supported levels: [");
for (auto& level : mLevels) {
- base::StringAppendF(&ss, " %s", media::toString(level).c_str());
+ base::StringAppendF(&ss, " %s", ToString(level).c_str());
}
- base::StringAppendF(&ss, "], mLevel: %s", media::toString(mLevel).c_str());
+ base::StringAppendF(&ss, "], mLevel: %s", ToString(mLevel).c_str());
base::StringAppendF(&ss, "\n%smHeadTrackingModes: [", prefixSpace.c_str());
for (auto& mode : mHeadTrackingModes) {
- base::StringAppendF(&ss, " %s", media::toString(mode).c_str());
+ base::StringAppendF(&ss, " %s", ToString(mode).c_str());
}
base::StringAppendF(&ss, "], Desired: %s, Actual %s\n",
media::toString(mDesiredHeadTrackingMode).c_str(),
- media::toString(mActualHeadTrackingMode).c_str());
+ ToString(mActualHeadTrackingMode).c_str());
base::StringAppendF(&ss, "%smSpatializationModes: [", prefixSpace.c_str());
for (auto& mode : mSpatializationModes) {
- base::StringAppendF(&ss, " %s", media::toString(mode).c_str());
+ base::StringAppendF(&ss, " %s", ToString(mode).c_str());
}
ss += "]\n";
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 0d4d3f6..4ef07ce 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -20,10 +20,9 @@
#include <android-base/stringprintf.h>
#include <android/media/BnEffect.h>
#include <android/media/BnSpatializer.h>
-#include <android/media/SpatializationLevel.h>
-#include <android/media/SpatializationMode.h>
-#include <android/media/SpatializerHeadTrackingMode.h>
#include <android/media/audio/common/AudioLatencyMode.h>
+#include <android/media/audio/common/HeadTracking.h>
+#include <android/media/audio/common/Spatialization.h>
#include <audio_utils/SimpleLog.h>
#include <math.h>
#include <media/AudioEffect.h>
@@ -106,16 +105,17 @@
/** ISpatializer, see ISpatializer.aidl */
binder::Status release() override;
- binder::Status getSupportedLevels(std::vector<media::SpatializationLevel>* levels) override;
- binder::Status setLevel(media::SpatializationLevel level) override;
- binder::Status getLevel(media::SpatializationLevel *level) override;
+ binder::Status getSupportedLevels(
+ std::vector<media::audio::common::Spatialization::Level>* levels) override;
+ binder::Status setLevel(media::audio::common::Spatialization::Level level) override;
+ binder::Status getLevel(media::audio::common::Spatialization::Level *level) override;
binder::Status isHeadTrackingSupported(bool *supports);
binder::Status getSupportedHeadTrackingModes(
- std::vector<media::SpatializerHeadTrackingMode>* modes) override;
+ std::vector<media::audio::common::HeadTracking::Mode>* modes) override;
binder::Status setDesiredHeadTrackingMode(
- media::SpatializerHeadTrackingMode mode) override;
+ media::audio::common::HeadTracking::Mode mode) override;
binder::Status getActualHeadTrackingMode(
- media::SpatializerHeadTrackingMode* mode) override;
+ media::audio::common::HeadTracking::Mode* mode) override;
binder::Status recenterHeadTracker() override;
binder::Status setGlobalTransform(const std::vector<float>& screenToStage) override;
binder::Status setHeadSensor(int sensorHandle) override;
@@ -123,7 +123,8 @@
binder::Status setDisplayOrientation(float physicalToLogicalAngle) override;
binder::Status setHingeAngle(float hingeAngle) override;
binder::Status setFoldState(bool folded) override;
- binder::Status getSupportedModes(std::vector<media::SpatializationMode>* modes) override;
+ binder::Status getSupportedModes(
+ std::vector<media::audio::common::Spatialization::Mode>* modes) override;
binder::Status registerHeadTrackingCallback(
const sp<media::ISpatializerHeadTrackingCallback>& callback) override;
binder::Status setParameter(int key, const std::vector<unsigned char>& value) override;
@@ -145,7 +146,10 @@
status_t loadEngineConfiguration(sp<EffectHalInterface> effect);
/** Level getter for use by local classes. */
- media::SpatializationLevel getLevel() const { std::lock_guard lock(mLock); return mLevel; }
+ media::audio::common::Spatialization::Level getLevel() const {
+ std::lock_guard lock(mLock);
+ return mLevel;
+ }
/** Called by audio policy service when the special output mixer dedicated to spatialization
* is opened and the spatializer engine must be created.
@@ -360,7 +364,8 @@
sp<media::ISpatializerHeadTrackingCallback> mHeadTrackingCallback GUARDED_BY(mLock);
/** Requested spatialization level */
- media::SpatializationLevel mLevel GUARDED_BY(mLock) = media::SpatializationLevel::NONE;
+ media::audio::common::Spatialization::Level mLevel GUARDED_BY(mLock) =
+ media::audio::common::Spatialization::Level::NONE;
/** Control logic for head-tracking, etc. */
std::shared_ptr<SpatializerPoseController> mPoseController GUARDED_BY(mLock);
@@ -370,8 +375,8 @@
= media::HeadTrackingMode::STATIC;
/** Last-reported actual head-tracking mode. */
- media::SpatializerHeadTrackingMode mActualHeadTrackingMode GUARDED_BY(mLock)
- = media::SpatializerHeadTrackingMode::DISABLED;
+ media::audio::common::HeadTracking::Mode mActualHeadTrackingMode GUARDED_BY(mLock)
+ = media::audio::common::HeadTracking::Mode::DISABLED;
/** Selected Head pose sensor */
int32_t mHeadSensor GUARDED_BY(mLock) = SpatializerPoseController::INVALID_SENSOR;
@@ -388,9 +393,9 @@
/** Last hinge angle */
float mHingeAngle GUARDED_BY(mLock) = 0.f; // foldable: 0.f is closed, M_PI flat open.
- std::vector<media::SpatializationLevel> mLevels;
- std::vector<media::SpatializerHeadTrackingMode> mHeadTrackingModes;
- std::vector<media::SpatializationMode> mSpatializationModes;
+ std::vector<media::audio::common::Spatialization::Level> mLevels;
+ std::vector<media::audio::common::HeadTracking::Mode> mHeadTrackingModes;
+ std::vector<media::audio::common::Spatialization::Mode> mSpatializationModes;
std::vector<audio_channel_mask_t> mChannelMasks;
bool mSupportsHeadTracking;