Merge "plugin: update controlParams at takeFilters()" into main
diff --git a/Android.bp b/Android.bp
index 72b8721..afb1341 100644
--- a/Android.bp
+++ b/Android.bp
@@ -133,3 +133,19 @@
frozen: true,
}
+
+latest_av_audio_types_aidl = "av-audio-types-aidl-V1"
+
+cc_defaults {
+ name: "latest_av_audio_types_aidl_ndk_shared",
+ shared_libs: [
+ latest_av_audio_types_aidl + "-ndk",
+ ],
+}
+
+cc_defaults {
+ name: "latest_av_audio_types_aidl_ndk_static",
+ static_libs: [
+ latest_av_audio_types_aidl + "-ndk",
+ ],
+}
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index 1cf63b0..e9b757b 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -4,9 +4,30 @@
hidden_api_txt_checksorted_hook = ${REPO_ROOT}/tools/platform-compat/hiddenapi/checksorted_sha.sh ${PREUPLOAD_COMMIT} ${REPO_ROOT}
[Builtin Hooks]
+bpfmt = true
clang_format = true
[Builtin Hooks Options]
+# Enable sort and limit subfolder checks
+bpfmt = -s
+ media/audio/
+ media/audioserver/
+ media/libaaudio/
+ media/libaudioclient/
+ media/libaudiofoundation/
+ media/libaudiohal/
+ media/libaudioprocessing/
+ media/libaudiousecasevalidation/
+ media/libeffects/
+ media/libmediametrics/
+ media/libnbaio/
+ media/libnblog/
+ services/audioflinger/
+ services/audioparameterparser/
+ services/audiopolicy/
+ services/medialog/
+ services/oboeservice/
+
# Only turn on clang-format check for the following subfolders.
clang_format = --commit ${PREUPLOAD_COMMIT} --style file --extensions c,h,cc,cpp
media/libaudioclient/tests/
diff --git a/camera/Android.bp b/camera/Android.bp
index 22f1633..4c5b160 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -46,6 +46,7 @@
aconfig_declarations {
name: "camera_platform_flags",
package: "com.android.internal.camera.flags",
+ container: "system",
srcs: ["camera_platform.aconfig"],
}
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index 5d2a263..46a4cf2 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -1,4 +1,5 @@
package: "com.android.internal.camera.flags"
+container: "system"
flag {
namespace: "camera_platform"
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 2c68cef..9f9860b 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -40,6 +40,21 @@
__BEGIN_DECLS
+/*
+ * Note: The following enum values were incorrect and have been updated:
+ * enum old value updated value
+ * ACAMERA_CONTROL_SETTINGS_OVERRIDE ACAMERA_CONTROL_START + 49 ACAMERA_CONTROL_START + 52;
+ * ACAMERA_CONTROL_AVAILABLE_SETTINGS_OVERRIDES ACAMERA_CONTROL_START + 50 ACAMERA_CONTROL_START + 53;
+ * ACAMERA_CONTROL_AUTOFRAMING ACAMERA_CONTROL_START + 52 ACAMERA_CONTROL_START + 55;
+ * ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE ACAMERA_CONTROL_START + 53 ACAMERA_CONTROL_START + 56;
+ * ACAMERA_CONTROL_AUTOFRAMING_STATE ACAMERA_CONTROL_START + 54 ACAMERA_CONTROL_START + 57;
+ * ACAMERA_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE ACAMERA_CONTROL_START + 55 ACAMERA_CONTROL_START + 58;
+ * ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE ACAMERA_CONTROL_START + 56 ACAMERA_CONTROL_START + 59;
+
+ * ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES ACAMERA_SCALER_START + 25 ACAMERA_SCALER_START + 26;
+ * ACAMERA_SCALER_CROP_REGION ACAMERA_SCALER_START + 26 ACAMERA_SCALER_START + 27;
+ */
+
typedef enum acamera_metadata_section {
ACAMERA_COLOR_CORRECTION,
@@ -2147,7 +2162,7 @@
* </ul>
*/
ACAMERA_CONTROL_SETTINGS_OVERRIDE = // int32 (acamera_metadata_enum_android_control_settings_override_t)
- ACAMERA_CONTROL_START + 49,
+ ACAMERA_CONTROL_START + 52,
/**
* <p>List of available settings overrides supported by the camera device that can
* be used to speed up certain controls.</p>
@@ -2173,7 +2188,7 @@
* @see ACAMERA_CONTROL_SETTINGS_OVERRIDE
*/
ACAMERA_CONTROL_AVAILABLE_SETTINGS_OVERRIDES = // int32[n]
- ACAMERA_CONTROL_START + 50,
+ ACAMERA_CONTROL_START + 53,
/**
* <p>Automatic crop, pan and zoom to keep objects in the center of the frame.</p>
*
@@ -2200,7 +2215,7 @@
* @see ACAMERA_SCALER_CROP_REGION
*/
ACAMERA_CONTROL_AUTOFRAMING = // byte (acamera_metadata_enum_android_control_autoframing_t)
- ACAMERA_CONTROL_START + 52,
+ ACAMERA_CONTROL_START + 55,
/**
* <p>Whether the camera device supports ACAMERA_CONTROL_AUTOFRAMING.</p>
*
@@ -2216,7 +2231,7 @@
* <p>Will be <code>false</code> if auto-framing is not available.</p>
*/
ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE = // byte (acamera_metadata_enum_android_control_autoframing_available_t)
- ACAMERA_CONTROL_START + 53,
+ ACAMERA_CONTROL_START + 56,
/**
* <p>Current state of auto-framing.</p>
*
@@ -2243,7 +2258,7 @@
* @see ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE
*/
ACAMERA_CONTROL_AUTOFRAMING_STATE = // byte (acamera_metadata_enum_android_control_autoframing_state_t)
- ACAMERA_CONTROL_START + 54,
+ ACAMERA_CONTROL_START + 57,
/**
* <p>The operating luminance range of low light boost measured in lux (lx).</p>
*
@@ -2256,7 +2271,7 @@
*
*/
ACAMERA_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE = // float[2]
- ACAMERA_CONTROL_START + 55,
+ ACAMERA_CONTROL_START + 58,
/**
* <p>Current state of the low light boost AE mode.</p>
*
@@ -2276,7 +2291,7 @@
* 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY.</p>
*/
ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE = // byte (acamera_metadata_enum_android_control_low_light_boost_state_t)
- ACAMERA_CONTROL_START + 56,
+ ACAMERA_CONTROL_START + 59,
ACAMERA_CONTROL_END,
/**
@@ -4669,7 +4684,7 @@
* application should leave stream use cases within the session as DEFAULT.</p>
*/
ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES = // int64[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)
- ACAMERA_SCALER_START + 25,
+ ACAMERA_SCALER_START + 26,
/**
* <p>The region of the sensor that corresponds to the RAW read out for this
* capture when the stream use case of a RAW stream is set to CROPPED_RAW.</p>
@@ -4722,7 +4737,7 @@
* @see ACAMERA_STATISTICS_HOT_PIXEL_MAP
*/
ACAMERA_SCALER_RAW_CROP_REGION = // int32[4]
- ACAMERA_SCALER_START + 26,
+ ACAMERA_SCALER_START + 27,
ACAMERA_SCALER_END,
/**
diff --git a/drm/libmediadrmrkp/Android.bp b/drm/libmediadrmrkp/Android.bp
index f13eb62..b1a01e4 100644
--- a/drm/libmediadrmrkp/Android.bp
+++ b/drm/libmediadrmrkp/Android.bp
@@ -5,7 +5,7 @@
"src/**/*.cpp",
],
export_include_dirs: [
- "include"
+ "include",
],
shared_libs: [
"libbinder_ndk",
@@ -17,7 +17,7 @@
"android.hardware.drm-V1-ndk",
"android.hardware.security.rkp-V3-ndk",
"libbase",
- "libcppbor_external",
+ "libcppbor",
],
defaults: [
"keymint_use_latest_hal_aidl_ndk_shared",
@@ -42,7 +42,7 @@
"android.hardware.drm-V1-ndk",
"android.hardware.security.rkp-V3-ndk",
"libbase",
- "libcppbor_external",
+ "libcppbor",
"libmediadrmrkp",
],
vendor: true,
@@ -50,4 +50,4 @@
"-Wall",
"-Werror",
],
-}
\ No newline at end of file
+}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/Android.bp b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
index 0b0d46a..9a06bd2 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
@@ -107,6 +107,17 @@
installable: false, // installed in APEX
}
+cc_binary {
+ name: "android.hardware.drm-service-lazy.clearkey.apex",
+ stem: "android.hardware.drm-service-lazy.clearkey",
+ defaults: [
+ "aidl_clearkey_service_defaults",
+ "aidl_clearkey_service_defaults-use-static-deps",
+ ],
+ srcs: ["ServiceLazy.cpp"],
+ installable: false, // installed in APEX
+}
+
phony {
name: "android.hardware.drm@latest-service.clearkey",
required: [
@@ -183,17 +194,63 @@
"android.hardware.drm-service.clearkey.apex.rc",
"android.hardware.drm-service.clearkey.xml"
],
+ overrides: [
+ "android.hardware.drm-service.clearkey",
+ ],
}
prebuilt_etc {
name: "android.hardware.drm-service.clearkey.apex.rc",
- src: "android.hardware.drm-service.clearkey.apex.rc",
+ src: ":gen-android.hardware.drm-service.clearkey.apex.rc",
installable: false,
}
+genrule {
+ name: "gen-android.hardware.drm-service.clearkey.apex.rc",
+ srcs: ["android.hardware.drm-service.clearkey.rc"],
+ out: ["android.hardware.drm-service.clearkey.apex.rc"],
+ cmd: "sed -E 's%/vendor/bin/%/apex/com.android.hardware.drm.clearkey/bin/%' $(in) > $(out)",
+}
+
prebuilt_etc {
name: "android.hardware.drm-service.clearkey.xml",
src: "android.hardware.drm-service.clearkey.xml",
sub_dir: "vintf",
installable: false,
}
+
+apex {
+ name: "com.android.hardware.drm.clearkey.lazy",
+ manifest: "manifest.json",
+ file_contexts: "file_contexts",
+ key: "com.android.hardware.key",
+ certificate: ":com.android.hardware.certificate",
+ vendor: true,
+ updatable: false,
+
+ binaries: [
+ "android.hardware.drm-service-lazy.clearkey.apex",
+ ],
+ prebuilts: [
+ "android.hardware.drm-service-lazy.clearkey.apex.rc",
+ "android.hardware.drm-service.clearkey.xml"
+ ],
+ overrides: [
+ "android.hardware.drm-service.clearkey",
+ "android.hardware.drm-service-lazy.clearkey",
+ "com.android.hardware.drm.clearkey",
+ ],
+}
+
+prebuilt_etc {
+ name: "android.hardware.drm-service-lazy.clearkey.apex.rc",
+ src: ":gen-android.hardware.drm-service-lazy.clearkey.apex.rc",
+ installable: false,
+}
+
+genrule {
+ name: "gen-android.hardware.drm-service-lazy.clearkey.apex.rc",
+ srcs: ["android.hardware.drm-service-lazy.clearkey.rc"],
+ out: ["android.hardware.drm-service-lazy.clearkey.apex.rc"],
+ cmd: "sed -E 's%/vendor/bin/%/apex/com.android.hardware.drm.clearkey/bin/%' $(in) > $(out)",
+}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.apex.rc b/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.apex.rc
deleted file mode 100644
index f4645b3..0000000
--- a/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.apex.rc
+++ /dev/null
@@ -1,7 +0,0 @@
-service vendor.drm-clearkey-service /apex/com.android.hardware.drm.clearkey/bin/hw/android.hardware.drm-service.clearkey
- class hal
- user media
- group mediadrm drmrpc
- ioprio rt 4
- task_profiles ProcessCapacityHigh
- interface aidl android.hardware.drm.IDrmFactory/clearkey
diff --git a/media/OWNERS b/media/OWNERS
index 976fb9e..b926075 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -14,5 +14,8 @@
taklee@google.com
wonsik@google.com
+# For TEST_MAPPING tv-presubmit and tv-postsubmit configurations:
+per-file TEST_MAPPING = blindahl@google.com
+
# go/android-fwk-media-solutions for info on areas of ownership.
include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index cd5d354..1a637ac 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -44,5 +44,16 @@
],
"file_patterns": ["(?i)drm|crypto"]
}
+ ],
+ // Postsubmit tests for TV devices
+ "tv-postsubmit": [
+ {
+ "name": "CtsMediaDecoderTestCases",
+ "options": [
+ {
+ "include-filter": "android.media.decoder.cts.DecoderRenderTest"
+ }
+ ]
+ }
]
}
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index b77485e..9f64a28 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -14,6 +14,7 @@
flag {
name: "dynamic_color_aspects"
+ is_exported: true
namespace: "codec_fwk"
description: "Feature flag for dynamic color aspect support"
bug: "297914560"
@@ -21,6 +22,7 @@
flag {
name: "hlg_editing"
+ is_exported: true
namespace: "codec_fwk"
description: "Feature flag for HLG editing support"
bug: "316397061"
@@ -28,6 +30,7 @@
flag {
name: "in_process_sw_audio_codec"
+ is_exported: true
namespace: "codec_fwk"
description: "Feature flag for in-process software audio codec API"
bug: "297922713"
@@ -56,6 +59,7 @@
flag {
name: "null_output_surface"
+ is_exported: true
namespace: "codec_fwk"
description: "Feature flag for null output Surface API"
bug: "297920102"
@@ -70,6 +74,7 @@
flag {
name: "region_of_interest"
+ is_exported: true
namespace: "codec_fwk"
description: "Feature flag for region of interest API"
bug: "299191092"
@@ -83,6 +88,36 @@
}
flag {
+ name: "set_callback_stall"
+ namespace: "codec_fwk"
+ description: "Bugfix flag for setCallback stall"
+ bug: "326010604"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+
+flag {
+ name: "set_state_early"
+ namespace: "codec_fwk"
+ description: "Bugfix flag for setting state early to avoid a race condition"
+ bug: "298613712"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+
+flag {
+ name: "stop_hal_before_surface"
+ namespace: "codec_fwk"
+ description: "Bugfix flag for setting state early to avoid a race condition"
+ bug: "339247977"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+
+flag {
name: "teamfood"
namespace: "codec_fwk"
description: "Feature flag to track teamfood population"
diff --git a/media/aconfig/mediacodec_flags.aconfig b/media/aconfig/mediacodec_flags.aconfig
index 1dd8081..3cc9a1a 100644
--- a/media/aconfig/mediacodec_flags.aconfig
+++ b/media/aconfig/mediacodec_flags.aconfig
@@ -15,6 +15,7 @@
flag {
name: "codec_importance"
+ is_exported: true
namespace: "codec_fwk"
description: "Feature flags for media codec importance"
bug: "297929011"
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index b1d4ad4..2f659a2 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -8,18 +8,21 @@
aconfig_declarations {
name: "com.android.media.audioserver-aconfig",
package: "com.android.media.audioserver",
+ container: "system",
srcs: ["audioserver.aconfig"],
}
aconfig_declarations {
name: "com.android.media.audio-aconfig",
package: "com.android.media.audio",
+ container: "system",
srcs: ["audio.aconfig"],
}
aconfig_declarations {
name: "com.android.media.aaudio-aconfig",
package: "com.android.media.aaudio",
+ container: "system",
srcs: ["aaudio.aconfig"],
}
@@ -63,6 +66,7 @@
aconfig_declarations {
name: "android.media.audio-aconfig",
package: "android.media.audio",
+ container: "system",
srcs: ["audio_framework.aconfig"],
visibility: ["//visibility:private"],
}
@@ -70,6 +74,7 @@
aconfig_declarations {
name: "android.media.audiopolicy-aconfig",
package: "android.media.audiopolicy",
+ container: "system",
srcs: ["audiopolicy_framework.aconfig"],
visibility: ["//visibility:private"],
}
@@ -77,6 +82,7 @@
aconfig_declarations {
name: "android.media.midi-aconfig",
package: "android.media.midi",
+ container: "system",
srcs: ["midi_flags.aconfig"],
visibility: ["//visibility:private"],
}
diff --git a/media/audio/aconfig/aaudio.aconfig b/media/audio/aconfig/aaudio.aconfig
index 7196525..c160109 100644
--- a/media/audio/aconfig/aaudio.aconfig
+++ b/media/audio/aconfig/aaudio.aconfig
@@ -3,6 +3,7 @@
# Please add flags in alphabetical order.
package: "com.android.media.aaudio"
+container: "system"
flag {
name: "sample_rate_conversion"
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index 73cb8ca..8ca4f9e 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -3,6 +3,7 @@
# Please add flags in alphabetical order.
package: "com.android.media.audio"
+container: "system"
flag {
name: "alarm_min_volume_zero"
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index 294e67d..b7ec093 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -4,6 +4,7 @@
# Please add flags in alphabetical order.
package: "android.media.audio"
+container: "system"
flag {
name: "auto_public_volume_api_hardening"
@@ -44,3 +45,12 @@
bug: "298463873"
}
+flag {
+ name: "sco_managed_by_audio"
+ namespace: "media_audio"
+ description: "\
+Enable new implementation of headset profile device connection and\
+SCO audio activation."
+ bug: "265057196"
+}
+
diff --git a/media/audio/aconfig/audiopolicy_framework.aconfig b/media/audio/aconfig/audiopolicy_framework.aconfig
index 833730a..80e64ad 100644
--- a/media/audio/aconfig/audiopolicy_framework.aconfig
+++ b/media/audio/aconfig/audiopolicy_framework.aconfig
@@ -4,6 +4,7 @@
# Please add flags in alphabetical order.
package: "android.media.audiopolicy"
+container: "system"
flag {
name: "audio_policy_update_mixing_rules_api"
diff --git a/media/audio/aconfig/audioserver.aconfig b/media/audio/aconfig/audioserver.aconfig
index 21ea1a2..5c6504f 100644
--- a/media/audio/aconfig/audioserver.aconfig
+++ b/media/audio/aconfig/audioserver.aconfig
@@ -3,6 +3,7 @@
# Please add flags in alphabetical order.
package: "com.android.media.audioserver"
+container: "system"
flag {
name: "direct_track_reprioritization"
diff --git a/media/audio/aconfig/midi_flags.aconfig b/media/audio/aconfig/midi_flags.aconfig
index ff9238a..efb643f 100644
--- a/media/audio/aconfig/midi_flags.aconfig
+++ b/media/audio/aconfig/midi_flags.aconfig
@@ -4,6 +4,7 @@
# Please add flags in alphabetical order.
package: "android.media.midi"
+container: "system"
flag {
name: "virtual_ump"
diff --git a/media/audioaidlconversion/Android.bp b/media/audioaidlconversion/Android.bp
index 07c59c7..2e1eb8c 100644
--- a/media/audioaidlconversion/Android.bp
+++ b/media/audioaidlconversion/Android.bp
@@ -58,10 +58,10 @@
cc_defaults {
name: "audio_aidl_conversion_common_default_cpp",
shared_libs: [
+ "framework-permission-aidl-cpp",
"libbinder",
"libshmemcompat",
"shared-file-region-aidl-cpp",
- "framework-permission-aidl-cpp",
],
export_shared_lib_headers: [
"shared-file-region-aidl-cpp",
@@ -94,8 +94,8 @@
],
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
},
target: {
@@ -148,8 +148,8 @@
"latest_android_media_audio_common_types_ndk_shared",
],
shared_libs: [
- "libbinder_ndk",
"libbase",
+ "libbinder_ndk",
],
static_libs: [
"libaudioaidlcommon",
@@ -182,8 +182,8 @@
],
shared_libs: [
"libaudio_aidl_conversion_common_ndk",
- "libbinder_ndk",
"libbase",
+ "libbinder_ndk",
],
cflags: [
"-DBACKEND_NDK",
@@ -213,8 +213,8 @@
],
shared_libs: [
"libaudio_aidl_conversion_common_ndk",
- "libbinder_ndk",
"libbase",
+ "libbinder_ndk",
],
cflags: [
"-DBACKEND_NDK",
@@ -238,8 +238,8 @@
"latest_android_media_audio_common_types_ndk_shared",
],
shared_libs: [
- "libbinder_ndk",
"libbase",
+ "libbinder_ndk",
],
cflags: [
"-DBACKEND_CPP_NDK",
diff --git a/media/audioaidlconversion/tests/Android.bp b/media/audioaidlconversion/tests/Android.bp
index 88b2cc9..bca4dd0 100644
--- a/media/audioaidlconversion/tests/Android.bp
+++ b/media/audioaidlconversion/tests/Android.bp
@@ -16,8 +16,8 @@
],
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
},
}
@@ -26,8 +26,8 @@
name: "audio_aidl_ndk_conversion_tests",
defaults: [
- "latest_android_media_audio_common_types_ndk_static",
"latest_android_hardware_audio_common_ndk_static",
+ "latest_android_media_audio_common_types_ndk_static",
"libaudio_aidl_conversion_tests_defaults",
],
srcs: ["audio_aidl_ndk_conversion_tests.cpp"],
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index 2030dc7..e3a9645 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -26,11 +26,11 @@
],
defaults: [
+ "latest_android_hardware_audio_core_sounddose_ndk_shared",
+ "latest_android_media_audio_common_types_cpp_shared",
"libaaudioservice_dependencies",
"libaudioflinger_dependencies",
"libaudiopolicyservice_dependencies",
- "latest_android_media_audio_common_types_cpp_shared",
- "latest_android_hardware_audio_core_sounddose_ndk_shared",
],
static_libs: [
@@ -71,7 +71,6 @@
"frameworks/av/services/medialog",
"frameworks/av/services/oboeservice", // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
-
],
init_rc: ["audioserver.rc"],
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index c7a1bfd..55847f4 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -169,6 +169,11 @@
"%s: AudioSystem already has an AudioFlinger instance!", __func__);
const auto aps = sp<AudioPolicyService>::make();
ALOGD("%s: AudioPolicy created", __func__);
+ ALOGW_IF(AudioSystem::setLocalAudioPolicyService(aps) != OK,
+ "%s: AudioSystem already has an AudioPolicyService instance!", __func__);
+
+ // Start initialization of internally managed audio objects such as Device Effects.
+ aps->onAudioSystemReady();
// Add AudioFlinger and AudioPolicy to ServiceManager.
sp<IServiceManager> sm = defaultServiceManager();
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
index 7c9d3e8..722b13a 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.cpp
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -29,6 +29,12 @@
#include "C2SoftAomEnc.h"
+/* Quantization param values defined by the spec */
+#define AOM_QP_MIN 0
+#define AOM_QP_MAX 63
+#define AOM_QP_DEFAULT_MIN AOM_QP_MIN
+#define AOM_QP_DEFAULT_MAX AOM_QP_MAX
+
namespace android {
constexpr char COMPONENT_NAME[] = "c2.android.av1.encoder";
@@ -50,11 +56,13 @@
0u, (uint64_t)C2MemoryUsage::CPU_READ))
.build());
+ // Odd dimension support in encoders requires Android V and above
+ size_t stepSize = isAtLeastV() ? 1 : 2;
addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
.withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
.withFields({
- C2F(mSize, width).inRange(2, 2048, 2),
- C2F(mSize, height).inRange(2, 2048, 2),
+ C2F(mSize, width).inRange(2, 2048, stepSize),
+ C2F(mSize, height).inRange(2, 2048, stepSize),
})
.withSetter(SizeSetter)
.build());
@@ -173,6 +181,19 @@
.inRange(C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)})
.withSetter(CodedColorAspectsSetter, mColorAspects)
.build());
+
+ addParameter(
+ DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+ .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+ 0 /* flexCount */, 0u /* stream */))
+ .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+ {C2Config::I_FRAME, C2Config::P_FRAME}),
+ C2F(mPictureQuantization, m.values[0].min).inRange(
+ AOM_QP_DEFAULT_MIN, AOM_QP_DEFAULT_MAX),
+ C2F(mPictureQuantization, m.values[0].max).inRange(
+ AOM_QP_DEFAULT_MIN, AOM_QP_DEFAULT_MAX)})
+ .withSetter(PictureQuantizationSetter)
+ .build());
}
C2R C2SoftAomEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output>& me) {
@@ -305,6 +326,54 @@
return C2R::Ok();
}
+C2R C2SoftAomEnc::IntfImpl::PictureQuantizationSetter(
+ bool mayBlock, C2P<C2StreamPictureQuantizationTuning::output>& me) {
+ (void)mayBlock;
+ int32_t iMin = AOM_QP_DEFAULT_MIN, pMin = AOM_QP_DEFAULT_MIN;
+ int32_t iMax = AOM_QP_DEFAULT_MAX, pMax = AOM_QP_DEFAULT_MAX;
+ for (size_t i = 0; i < me.v.flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+ // layerMin is clamped to [AOM_QP_MIN, layerMax] to avoid error
+ // cases where layer.min > layer.max
+ int32_t layerMax = std::clamp(layer.max, AOM_QP_MIN, AOM_QP_MAX);
+ int32_t layerMin = std::clamp(layer.min, AOM_QP_MIN, layerMax);
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+ iMax = layerMax;
+ iMin = layerMin;
+ ALOGV("iMin %d iMax %d", iMin, iMax);
+ } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+ pMax = layerMax;
+ pMin = layerMin;
+ ALOGV("pMin %d pMax %d", pMin, pMax);
+ }
+ }
+ ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d",
+ iMin, iMax, pMin, pMax);
+
+ // aom library takes same range for I/P picture type
+ int32_t maxFrameQP = std::min(iMax, pMax);
+ int32_t minFrameQP = std::max(iMin, pMin);
+ if (minFrameQP > maxFrameQP) {
+ minFrameQP = maxFrameQP;
+ }
+ // put them back into the structure
+ for (size_t i = 0; i < me.v.flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+ me.set().m.values[i].max = maxFrameQP;
+ me.set().m.values[i].min = minFrameQP;
+ }
+ else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+ me.set().m.values[i].max = maxFrameQP;
+ me.set().m.values[i].min = minFrameQP;
+ }
+ }
+ ALOGV("PictureQuantizationSetter(exit): minFrameQP = %d maxFrameQP = %d",
+ minFrameQP, maxFrameQP);
+ return C2R::Ok();
+}
+
uint32_t C2SoftAomEnc::IntfImpl::getLevel_l() const {
return mProfileLevel->level - LEVEL_AV1_2;
}
@@ -556,6 +625,7 @@
mQuality = mIntf->getQuality_l();
mComplexity = mIntf->getComplexity_l();
mAV1EncLevel = mIntf->getLevel_l();
+ mQpBounds = mIntf->getPictureQuantization_l();
}
@@ -573,6 +643,18 @@
break;
}
+ if (mQpBounds->flexCount() > 0) {
+ // read min max qp for sequence
+ for (size_t i = 0; i < mQpBounds->flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = mQpBounds->m.values[i];
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+ mMaxQuantizer = layer.max;
+ mMinQuantizer = layer.min;
+ break;
+ }
+ }
+ }
+
mCodecInterface = aom_codec_av1_cx();
if (!mCodecInterface) goto CleanUp;
@@ -605,7 +687,7 @@
mCodecConfiguration->g_timebase.den = 1000000;
// rc_target_bitrate is in kbps, mBitrate in bps
mCodecConfiguration->rc_target_bitrate = (mBitrate->value + 500) / 1000;
- mCodecConfiguration->rc_end_usage = mBitrateControlMode == AOM_Q ? AOM_Q : AOM_CBR;
+ mCodecConfiguration->rc_end_usage = mBitrateControlMode;
// Disable frame drop - not allowed in MediaCodec now.
mCodecConfiguration->rc_dropframe_thresh = 0;
// Disable lagged encoding.
diff --git a/media/codec2/components/aom/C2SoftAomEnc.h b/media/codec2/components/aom/C2SoftAomEnc.h
index 7e5ea63..067b04f 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.h
+++ b/media/codec2/components/aom/C2SoftAomEnc.h
@@ -109,6 +109,7 @@
std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> mQpBounds;
aom_codec_err_t setupCodecParameters();
};
@@ -126,6 +127,8 @@
const C2P<C2StreamPictureSizeInfo::input>& size,
const C2P<C2StreamFrameRateInfo::output>& frameRate,
const C2P<C2StreamBitrateInfo::output>& bitrate);
+ static C2R PictureQuantizationSetter(bool mayBlock,
+ C2P<C2StreamPictureQuantizationTuning::output> &me);
// unsafe getters
std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
@@ -150,6 +153,9 @@
std::shared_ptr<C2StreamPixelFormatInfo::input> getPixelFormat_l() const {
return mPixelFormat;
}
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const {
+ return mPictureQuantization;
+ }
uint32_t getSyncFramePeriod() const;
static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me);
static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
@@ -171,6 +177,7 @@
std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
std::shared_ptr<C2StreamPixelFormatInfo::input> mPixelFormat;
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
};
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
index 76680a3..4ec26d6 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -243,10 +243,17 @@
.build());
addParameter(
+ DefineParam(mLowLatencyMode, C2_PARAMKEY_LOW_LATENCY_MODE)
+ .withDefault(new C2GlobalLowLatencyModeTuning(0))
+ .withFields({C2F(mLowLatencyMode, value).oneOf({0,1})})
+ .withSetter(Setter<decltype(*mLowLatencyMode)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
.withDefault(new C2PortActualDelayTuning::output(kOutputDelay))
.withFields({C2F(mActualOutputDelay, value).inRange(0, kOutputDelay)})
- .withSetter(Setter<decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
+ .withSetter(ActualOutputDelaySetter, mLowLatencyMode)
.build());
}
@@ -365,6 +372,10 @@
return mPixelFormat;
}
+ std::shared_ptr<C2PortActualDelayTuning::output> getActualOutputDelay_l() const {
+ return mActualOutputDelay;
+ }
+
static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
(void)mayBlock;
if (me.v.mastering.red.x > 1) {
@@ -406,6 +417,13 @@
return C2R::Ok();
}
+ static C2R ActualOutputDelaySetter(bool mayBlock, C2P<C2PortActualDelayTuning::output>& me,
+ const C2P<C2GlobalLowLatencyModeTuning>& lowLatencyMode) {
+ (void)mayBlock;
+ me.set().value = lowLatencyMode.v.value ? 1 : kOutputDelay;
+ return C2R::Ok();
+ }
+
private:
std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
@@ -419,6 +437,7 @@
std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
+ std::shared_ptr<C2GlobalLowLatencyModeTuning> mLowLatencyMode;
};
C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
@@ -516,6 +535,7 @@
{
IntfImpl::Lock lock = mIntf->lock();
mPixelFormatInfo = mIntf->getPixelFormat_l();
+ mActualOutputDelayInfo = mIntf->getActualOutputDelay_l();
}
const char* version = dav1d_version();
@@ -529,7 +549,7 @@
android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
if (numThreads > 0) lib_settings.n_threads = numThreads;
- lib_settings.max_frame_delay = kOutputDelay;
+ lib_settings.max_frame_delay = mActualOutputDelayInfo->value;
int res = 0;
if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.h b/media/codec2/components/dav1d/C2SoftDav1dDec.h
index 5d2a725..6008325 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.h
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.h
@@ -62,6 +62,7 @@
// configurations used by component in process
// (TODO: keep this in intf but make them internal only)
std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+ std::shared_ptr<C2PortActualDelayTuning::output> mActualOutputDelayInfo;
uint32_t mHalPixelFormat;
uint32_t mWidth;
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 591d56d..7b63e75 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -21,6 +21,7 @@
#include <audio_utils/primitives.h>
#include <media/stagefright/foundation/MediaDefs.h>
+#include <C2Debug.h>
#include <C2PlatformSupport.h>
#include <SimpleC2Interface.h>
@@ -81,10 +82,6 @@
FLAC_COMPRESSION_LEVEL_MIN, FLAC_COMPRESSION_LEVEL_MAX)})
.withSetter(Setter<decltype(*mComplexity)>::NonStrictValueWithNoDeps)
.build());
- addParameter(
- DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
- .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 4608))
- .build());
addParameter(
DefineParam(mPcmEncodingInfo, C2_PARAMKEY_PCM_ENCODING)
@@ -96,6 +93,26 @@
})
.withSetter((Setter<decltype(*mPcmEncodingInfo)>::StrictValueWithNoDeps))
.build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMaxBlockSize))
+ .withFields({
+ C2F(mInputMaxBufSize, value).any(),
+ })
+ .withSetter(MaxInputSizeSetter, mChannelCount, mPcmEncodingInfo)
+ .build());
+ }
+
+ static C2R MaxInputSizeSetter(bool mayBlock,
+ C2P<C2StreamMaxBufferSizeInfo::input> &me,
+ const C2P<C2StreamChannelCountInfo::input> &channelCount,
+ const C2P<C2StreamPcmEncodingInfo::input> &pcmEncoding) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ int bytesPerSample = pcmEncoding.v.value == C2Config::PCM_FLOAT ? 4 : 2;
+ me.set().value = kMaxBlockSize * bytesPerSample * channelCount.v.value;
+ return res;
}
uint32_t getSampleRate() const { return mSampleRate->value; }
@@ -446,6 +463,9 @@
mBlockSize = FLAC__stream_encoder_get_blocksize(mFlacStreamEncoder);
+ // Update kMaxBlockSize to match maximum size used by the encoder
+ CHECK(mBlockSize <= kMaxBlockSize);
+
ALOGV("encoder successfully configured");
return OK;
}
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.h b/media/codec2/components/flac/C2SoftFlacEnc.h
index a971ab5..1f3be3c 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.h
+++ b/media/codec2/components/flac/C2SoftFlacEnc.h
@@ -63,7 +63,8 @@
std::shared_ptr<IntfImpl> mIntf;
const unsigned int kInBlockSize = 1152;
- const unsigned int kMaxNumChannels = 2;
+ static constexpr unsigned int kMaxNumChannels = 2;
+ static constexpr unsigned int kMaxBlockSize = 4608;
FLAC__StreamEncoder* mFlacStreamEncoder;
FLAC__int32* mInputBufferPcm32;
std::shared_ptr<C2LinearBlock> mOutputBlock;
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 2137964..fd9488b 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -469,11 +469,12 @@
mInitialized = false;
}
+ bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0;
+
if (!mInitialized) {
uint8_t *vol_data[1]{};
int32_t vol_size = 0;
- bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0;
if (codecConfig || volHeader) {
vol_data[0] = bitstream;
vol_size = inSize;
@@ -512,10 +513,11 @@
return;
}
}
- if (codecConfig) {
- fillEmptyWork(work);
- return;
- }
+ }
+
+ if (codecConfig) {
+ fillEmptyWork(work);
+ return;
}
size_t inPos = 0;
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
index cdc3be0..40bb26e 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.cpp
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -29,7 +29,6 @@
#include <opus_multistream.h>
}
-#define DEFAULT_FRAME_DURATION_MS 20
namespace android {
namespace {
@@ -38,7 +37,6 @@
} // namespace
-static const int kMaxNumChannelsSupported = 2;
class C2SoftOpusEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
public:
@@ -248,10 +246,11 @@
mAnchorTimeStamp = 0;
mProcessedSamples = 0;
mFilledLen = 0;
- mFrameDurationMs = DEFAULT_FRAME_DURATION_MS;
+ mFrameDurationMs = kDefaultFrameDurationMs;
if (!mInputBufferPcm16) {
+ size_t frameSize = (mFrameDurationMs * kMaxSampleRateSupported) / 1000;
mInputBufferPcm16 =
- (int16_t*)malloc(kFrameSize * kMaxNumChannels * sizeof(int16_t));
+ (int16_t*)malloc(frameSize * kMaxNumChannelsSupported * sizeof(int16_t));
}
if (!mInputBufferPcm16) return C2_NO_MEMORY;
@@ -368,7 +367,9 @@
}
C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
- err = pool->fetchLinearBlock(kMaxPayload, usage, &mOutputBlock);
+ int outCapacity =
+ kMaxPayload * ((inSize + mNumPcmBytesPerInputFrame) / mNumPcmBytesPerInputFrame);
+ err = pool->fetchLinearBlock(outCapacity, usage, &mOutputBlock);
if (err != C2_OK) {
ALOGE("fetchLinearBlock for Output failed with status %d", err);
work->result = C2_NO_MEMORY;
@@ -497,11 +498,11 @@
uint8_t* outPtr = wView.data() + mBytesEncoded;
int encodedBytes =
opus_multistream_encode(mEncoder, mInputBufferPcm16,
- mNumSamplesPerFrame, outPtr, kMaxPayload - mBytesEncoded);
+ mNumSamplesPerFrame, outPtr, outCapacity - mBytesEncoded);
ALOGV("encoded %i Opus bytes from %zu PCM bytes", encodedBytes,
processSize);
- if (encodedBytes < 0 || encodedBytes > (kMaxPayload - mBytesEncoded)) {
+ if (encodedBytes < 0 || encodedBytes > (outCapacity - mBytesEncoded)) {
ALOGE("opus_encode failed, encodedBytes : %d", encodedBytes);
mSignalledError = true;
work->result = C2_CORRUPTED;
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.h b/media/codec2/components/opus/C2SoftOpusEnc.h
index 733a6bc..2c9f5e5 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.h
+++ b/media/codec2/components/opus/C2SoftOpusEnc.h
@@ -45,12 +45,13 @@
uint32_t drainMode,
const std::shared_ptr<C2BlockPool> &pool) override;
private:
- /* OPUS_FRAMESIZE_20_MS */
- const int kFrameSize = 960;
- const int kMaxSampleRate = 48000;
- const int kMinSampleRate = 8000;
- const int kMaxPayload = (4000 * kMaxSampleRate) / kMinSampleRate;
- const int kMaxNumChannels = 8;
+ static const int kMaxNumChannelsSupported = 2;
+ static const int kMaxSampleRateSupported = 48000;
+ static const int kDefaultFrameDurationMs = 20;
+ // For a frame duration of 20ms, payload recommended size is 1276 as per
+ // https://www.opus-codec.org/docs/html_api/group__opusencoder.html.
+ // For 40ms, 60ms, .. payload size will change proportionately, 1276 x 2, 1276 x 3, ..
+ static const int kMaxPayload = 1500; // from tests/test_opus_encode.c
std::shared_ptr<IntfImpl> mIntf;
std::shared_ptr<C2LinearBlock> mOutputBlock;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index dab7b89..318f093 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -446,6 +446,7 @@
{
IntfImpl::Lock lock = mIntf->lock();
mPixelFormatInfo = mIntf->getPixelFormat_l();
+ mColorAspects = mIntf->getDefaultColorAspects_l();
}
mWidth = 320;
@@ -591,6 +592,41 @@
return;
}
+ // handle dynamic config parameters
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects =
+ mIntf->getDefaultColorAspects_l();
+ lock.unlock();
+
+ if (mColorAspects->range != defaultColorAspects->range ||
+ mColorAspects->primaries != defaultColorAspects->primaries ||
+ mColorAspects->matrix != defaultColorAspects->matrix ||
+ mColorAspects->transfer != defaultColorAspects->transfer) {
+
+ mColorAspects->range = defaultColorAspects->range;
+ mColorAspects->primaries = defaultColorAspects->primaries;
+ mColorAspects->matrix = defaultColorAspects->matrix;
+ mColorAspects->transfer = defaultColorAspects->transfer;
+
+ C2StreamColorAspectsTuning::output colorAspect(0u, defaultColorAspects->range,
+ defaultColorAspects->primaries, defaultColorAspects->transfer,
+ defaultColorAspects->matrix);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config({&colorAspect}, C2_MAY_BLOCK, &failures);
+ if (err == C2_OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(colorAspect));
+ } else {
+ ALOGE("Config update colorAspect failed");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ }
+
size_t inOffset = 0u;
size_t inSize = 0u;
C2ReadView rView = mDummyReadView;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
index e9d6dc9..93cc213 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.h
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -66,6 +66,7 @@
// configurations used by component in process
// (TODO: keep this in intf but make them internal only)
std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+ std::shared_ptr<C2StreamColorAspectsTuning::output> mColorAspects;
std::shared_ptr<IntfImpl> mIntf;
vpx_codec_ctx_t *mCodecCtx;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 2a33048..1c5772f 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -22,6 +22,7 @@
#include <media/hardware/VideoAPI.h>
#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
#include <C2Debug.h>
#include "C2SoftVpxEnc.h"
@@ -63,12 +64,14 @@
0u, (uint64_t)C2MemoryUsage::CPU_READ))
.build());
+ // Odd dimension support in encoders requires Android V and above
+ size_t stepSize = isAtLeastV() ? 1 : 2;
addParameter(
DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
.withDefault(new C2StreamPictureSizeInfo::input(0u, 64, 64))
.withFields({
- C2F(mSize, width).inRange(2, 2048, 2),
- C2F(mSize, height).inRange(2, 2048, 2),
+ C2F(mSize, width).inRange(2, 2048, stepSize),
+ C2F(mSize, height).inRange(2, 2048, stepSize),
})
.withSetter(SizeSetter)
.build());
@@ -351,12 +354,9 @@
return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
}
-C2R C2SoftVpxEnc::IntfImpl::PictureQuantizationSetter(bool mayBlock,
- C2P<C2StreamPictureQuantizationTuning::output>
- &me) {
+C2R C2SoftVpxEnc::IntfImpl::PictureQuantizationSetter(
+ bool mayBlock, C2P<C2StreamPictureQuantizationTuning::output>& me) {
(void)mayBlock;
- // these are the ones we're going to set, so want them to default
- // to the DEFAULT values for the codec
int32_t iMin = VPX_QP_DEFAULT_MIN, pMin = VPX_QP_DEFAULT_MIN;
int32_t iMax = VPX_QP_DEFAULT_MAX, pMax = VPX_QP_DEFAULT_MAX;
for (size_t i = 0; i < me.v.flexCount(); ++i) {
@@ -379,8 +379,8 @@
iMin, iMax, pMin, pMax);
// vpx library takes same range for I/P picture type
- int32_t maxFrameQP = std::min({iMax, pMax});
- int32_t minFrameQP = std::max({iMin, pMin});
+ int32_t maxFrameQP = std::min(iMax, pMax);
+ int32_t minFrameQP = std::max(iMin, pMin);
if (minFrameQP > maxFrameQP) {
minFrameQP = maxFrameQP;
}
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 785cdf2..e6782a9 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -164,6 +164,9 @@
kParamIndexLargeFrame,
kParamIndexAccessUnitInfos, // struct
+ /* Region of Interest Encoding parameters */
+ kParamIndexQpOffsetMapBuffer, // info-buffer, used to signal qp-offset map for a frame
+
// deprecated
kParamIndexDelayRequest = kParamIndexDelay | C2Param::CoreIndex::IS_REQUEST_FLAG,
@@ -201,6 +204,8 @@
kParamIndexPictureQuantization,
kParamIndexHdrDynamicMetadata,
kParamIndexHdrFormat,
+ kParamIndexQpOffsetRect,
+ kParamIndexQpOffsetRects,
/* ------------------------------------ video components ------------------------------------ */
@@ -1394,6 +1399,47 @@
constexpr char C2_PARAMKEY_VUI_ROTATION[] = "coded.vui.rotation";
/**
+ * Region of Interest of an image/video frame communicated as an array of C2QpOffsetRectStruct
+ *
+ * Fields width, height, left and top of C2QpOffsetRectStruct form a bounding box contouring RoI.
+ * Field qpOffset of C2QpOffsetRectStruct indicates the qp bias to be used for quantizing the
+ * coding units of the bounding box.
+ *
+ * If Roi rect is not valid that is bounding box width is < 0 or bounding box height is < 0,
+ * components may ignore the configuration silently. If Roi rect extends outside frame
+ * boundaries, then rect shall be clamped to the frame boundaries.
+ *
+ * The scope of this key is throughout the encoding session until it is reconfigured with a
+ * different value.
+ *
+ * The number of elements in C2StreamQpOffset array is not limited by C2 specification.
+ * However components may mandate a limit. Implementations may drop the rectangles that are beyond
+ * the supported limits. Hence it is preferable to place the rects in descending order of
+ * importance. Transitively, if the bounding boxes overlap, then the most preferred
+ * rectangle's qp offset (earlier rectangle qp offset) will be used to quantize the block.
+ */
+struct C2QpOffsetRectStruct : C2Rect {
+ C2QpOffsetRectStruct() = default;
+ C2QpOffsetRectStruct(const C2Rect &rect, int32_t offset) : C2Rect(rect), qpOffset(offset) {}
+
+ bool operator==(const C2QpOffsetRectStruct &) = delete;
+ bool operator!=(const C2QpOffsetRectStruct &) = delete;
+
+ int32_t qpOffset;
+
+ DEFINE_AND_DESCRIBE_C2STRUCT(QpOffsetRect)
+ C2FIELD(width, "width")
+ C2FIELD(height, "height")
+ C2FIELD(left, "left")
+ C2FIELD(top, "top")
+ C2FIELD(qpOffset, "qp-offset")
+};
+
+typedef C2StreamParam<C2Info, C2SimpleArrayStruct<C2QpOffsetRectStruct>, kParamIndexQpOffsetRects>
+ C2StreamQpOffsetRects;
+constexpr char C2_PARAMKEY_QP_OFFSET_RECTS[] = "coding.qp-offset-rects";
+
+/**
* Pixel (sample) aspect ratio.
*/
typedef C2StreamParam<C2Info, C2PictureSizeStruct, kParamIndexPixelAspectRatio>
diff --git a/media/codec2/hal/aidl/Android.bp b/media/codec2/hal/aidl/Android.bp
index 48b6e21..e16e2b1 100644
--- a/media/codec2/hal/aidl/Android.bp
+++ b/media/codec2/hal/aidl/Android.bp
@@ -8,6 +8,7 @@
name: "libcodec2_aidl_client",
defaults: [
+ "aconfig_lib_cc_static_link.defaults",
"libcodec2_hal_selection",
],
@@ -65,6 +66,7 @@
],
defaults: [
+ "aconfig_lib_cc_static_link.defaults",
"libcodec2_hal_selection",
],
diff --git a/media/codec2/hal/aidl/ComponentInterface.cpp b/media/codec2/hal/aidl/ComponentInterface.cpp
index 8ae9fa8..8c7a986 100644
--- a/media/codec2/hal/aidl/ComponentInterface.cpp
+++ b/media/codec2/hal/aidl/ComponentInterface.cpp
@@ -79,6 +79,26 @@
}
c2_status_t err2 = C2_OK;
if (paramsToLargeFrameIntf.size() > 0) {
+ C2ComponentKindSetting kind;
+ C2StreamMaxBufferSizeInfo::input maxInputSize(0);
+ c2_status_t err = mIntf->query_vb(
+ {&kind, &maxInputSize}, {}, C2_MAY_BLOCK, nullptr);
+ if ((err == C2_OK) && (kind.value == C2Component::KIND_ENCODER)) {
+ for (int i = 0 ; i < paramsToLargeFrameIntf.size(); i++) {
+ if (paramsToLargeFrameIntf[i]->index() ==
+ C2LargeFrame::output::PARAM_TYPE) {
+ C2LargeFrame::output *lfp = C2LargeFrame::output::From(
+ paramsToLargeFrameIntf[i]);
+ // This is assuming a worst case compression ratio of 1:1
+ // In no case the encoder should give an output more than
+ // what is being provided to the encoder in a single call.
+ if (lfp && (lfp->maxSize < maxInputSize.value)) {
+ lfp->maxSize = maxInputSize.value;
+ }
+ break;
+ }
+ }
+ }
err2 = mMultiAccessUnitIntf->config(
paramsToLargeFrameIntf, mayBlock, failures);
}
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index 01b0678..dbbabfe 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -173,7 +173,7 @@
}
GraphicsTracker::GraphicsTracker(int maxDequeueCount)
- : mBufferCache(new BufferCache()), mMaxDequeue{maxDequeueCount},
+ : mBufferCache(new BufferCache()), mNumDequeueing{0}, mMaxDequeue{maxDequeueCount},
mMaxDequeueCommitted{maxDequeueCount},
mDequeueable{maxDequeueCount},
mTotalDequeued{0}, mTotalCancelled{0}, mTotalDropped{0}, mTotalReleased{0},
@@ -235,6 +235,7 @@
const sp<IGraphicBufferProducer>& igbp, uint32_t generation) {
// TODO: wait until operations to previous IGBP is completed.
std::shared_ptr<BufferCache> prevCache;
+ int prevDequeueRequested = 0;
int prevDequeueCommitted;
std::unique_lock<std::mutex> cl(mConfigLock);
@@ -243,6 +244,9 @@
mInConfig = true;
prevCache = mBufferCache;
prevDequeueCommitted = mMaxDequeueCommitted;
+ if (mMaxDequeueRequested.has_value()) {
+ prevDequeueRequested = mMaxDequeueRequested.value();
+ }
}
// NOTE: Switching to the same surface is blocked from MediaCodec.
// Switching to the same surface might not work if tried, since disconnect()
@@ -263,6 +267,11 @@
mInConfig = false;
return C2_BAD_VALUE;
}
+ ALOGD("new surface in configuration: maxDequeueRequested(%d), maxDequeueCommitted(%d)",
+ prevDequeueRequested, prevDequeueCommitted);
+ if (prevDequeueRequested > 0 && prevDequeueRequested > prevDequeueCommitted) {
+ prevDequeueCommitted = prevDequeueRequested;
+ }
if (igbp) {
ret = igbp->setMaxDequeuedBufferCount(prevDequeueCommitted);
if (ret != ::android::OK) {
@@ -280,6 +289,34 @@
std::unique_lock<std::mutex> l(mLock);
mInConfig = false;
mBufferCache = newCache;
+ // {@code dequeued} is the number of currently dequeued buffers.
+ // {@code prevDequeueCommitted} is max dequeued buffer at any moment
+ // from the new surface.
+ // {@code newDequeueable} is hence the current # of dequeueable buffers
+ // if no change occurs.
+ int dequeued = mDequeued.size() + mNumDequeueing;
+ int newDequeueable = prevDequeueCommitted - dequeued;
+ if (newDequeueable < 0) {
+ // This will not happen.
+ // But if this happens, we respect the value and try to continue.
+ ALOGE("calculated new dequeueable is negative: %d max(%d),dequeued(%d)",
+ newDequeueable, prevDequeueCommitted, dequeued);
+ }
+
+ if (mMaxDequeueRequested.has_value() && mMaxDequeueRequested == prevDequeueCommitted) {
+ mMaxDequeueRequested.reset();
+ }
+ mMaxDequeue = mMaxDequeueCommitted = prevDequeueCommitted;
+
+ int delta = newDequeueable - mDequeueable;
+ if (delta > 0) {
+ writeIncDequeueableLocked(delta);
+ } else if (delta < 0) {
+ drainDequeueableLocked(-delta);
+ }
+ ALOGV("new surfcace dequeueable %d(delta %d), maxDequeue %d",
+ newDequeueable, delta, mMaxDequeue);
+ mDequeueable = newDequeueable;
}
return C2_OK;
}
@@ -529,6 +566,7 @@
ALOGE("writing end for the waitable object seems to be closed");
return C2_BAD_STATE;
}
+ mNumDequeueing++;
mDequeueable--;
*cache = mBufferCache;
return C2_OK;
@@ -543,6 +581,7 @@
bool cached, int slot, const sp<Fence> &fence,
std::shared_ptr<BufferItem> *pBuffer, bool *updateDequeue) {
std::unique_lock<std::mutex> l(mLock);
+ mNumDequeueing--;
if (res == C2_OK) {
if (cached) {
auto it = cache->mBuffers.find(slot);
@@ -563,6 +602,8 @@
auto mapRet = mDequeued.emplace(bid, *pBuffer);
CHECK(mapRet.second);
} else {
+ ALOGD("allocate error(%d): Dequeued(%zu), Dequeuable(%d)",
+ (int)res, mDequeued.size(), mDequeueable + 1);
if (adjustDequeueConfLocked(updateDequeue)) {
return;
}
@@ -629,7 +670,11 @@
::android::status_t status = igbp->dequeueBuffer(
&slotId, &fence, width, height, format, usage, &outBufferAge, &outTimestamps);
if (status < ::android::OK) {
- ALOGE("dequeueBuffer() error %d", (int)status);
+ if (status == ::android::TIMED_OUT || status == ::android::WOULD_BLOCK) {
+ ALOGW("BQ might not be ready for dequeueBuffer()");
+ return C2_BLOCKING;
+ }
+ ALOGE("BQ in inconsistent status. dequeueBuffer() error %d", (int)status);
return C2_CORRUPTED;
}
cache->waitOnSlot(slotId);
@@ -649,7 +694,8 @@
ALOGE("allocate by dequeueBuffer() successful, but requestBuffer() failed %d",
status);
igbp->cancelBuffer(slotId, fence);
- return C2_CORRUPTED;
+ // This might be due to life-cycle end and/or surface switching.
+ return C2_BLOCKING;
}
*buffer = std::make_shared<BufferItem>(generation, slotId, realloced, fence);
if (!*buffer) {
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index b3ae514..1d2794e 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -1868,6 +1868,10 @@
return nullptr;
}
+bool Codec2Client::IsAidlSelected() {
+ return c2_aidl::utils::IsSelected();
+}
+
// Codec2Client::Interface
Codec2Client::Interface::Interface(const sp<HidlBase>& base)
: Configurable{
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
index dd6c869..762030b 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
@@ -234,6 +234,7 @@
// Maps bufferId to buffer
std::map<uint64_t, std::shared_ptr<BufferItem>> mDequeued;
std::set<uint64_t> mDeallocating;
+ int mNumDequeueing;
// These member variables are read and modified accessed as follows.
// 1. mConfigLock being held
diff --git a/media/codec2/hal/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
index 3b7f7a6..5c75a47 100644
--- a/media/codec2/hal/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -270,6 +270,9 @@
static std::shared_ptr<InputSurface> CreateInputSurface(
char const* serviceName = nullptr);
+ // Whether AIDL is selected.
+ static bool IsAidlSelected();
+
// base and/or configurable cannot be null.
Codec2Client(
sp<HidlBase> const& base,
diff --git a/media/codec2/hal/common/Android.bp b/media/codec2/hal/common/Android.bp
index 7d7b285..4c9da33 100644
--- a/media/codec2/hal/common/Android.bp
+++ b/media/codec2/hal/common/Android.bp
@@ -31,6 +31,10 @@
],
static_libs: ["aconfig_mediacodec_flags_c_lib"],
+
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
}
cc_library_static {
diff --git a/media/codec2/hal/common/MultiAccessUnitHelper.cpp b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
index 8086ef2..b1fa82f 100644
--- a/media/codec2/hal/common/MultiAccessUnitHelper.cpp
+++ b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
@@ -27,6 +27,7 @@
#include <C2Debug.h>
#include <C2PlatformSupport.h>
+static inline constexpr uint32_t MAX_SUPPORTED_SIZE = ( 10 * 512000 * 8 * 2u);
namespace android {
static C2R MultiAccessUnitParamsSetter(
@@ -39,8 +40,6 @@
res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.thresholdSize)));
} else if (me.v.maxSize < me.v.thresholdSize) {
me.set().maxSize = me.v.thresholdSize;
- } else if (me.v.thresholdSize == 0 && me.v.maxSize > 0) {
- me.set().thresholdSize = me.v.maxSize;
}
std::vector<std::unique_ptr<C2SettingResult>> failures;
res.retrieveFailures(&failures);
@@ -61,9 +60,9 @@
.withDefault(new C2LargeFrame::output(0u, 0, 0))
.withFields({
C2F(mLargeFrameParams, maxSize).inRange(
- 0, c2_min(UINT_MAX, 10 * 512000 * 8 * 2u)),
+ 0, c2_min(UINT_MAX, MAX_SUPPORTED_SIZE)),
C2F(mLargeFrameParams, thresholdSize).inRange(
- 0, c2_min(UINT_MAX, 10 * 512000 * 8 * 2u))
+ 0, c2_min(UINT_MAX, MAX_SUPPORTED_SIZE))
})
.withSetter(MultiAccessUnitParamsSetter)
.build());
@@ -115,6 +114,18 @@
return false;
}
+bool MultiAccessUnitInterface::getMaxInputSize(
+ C2StreamMaxBufferSizeInfo::input* const maxInputSize) const {
+ if (maxInputSize == nullptr || mC2ComponentIntf == nullptr) {
+ return false;
+ }
+ c2_status_t err = mC2ComponentIntf->query_vb({maxInputSize}, {}, C2_MAY_BLOCK, nullptr);
+ if (err != OK) {
+ return false;
+ }
+ return true;
+}
+
//C2MultiAccessUnitBuffer
class C2MultiAccessUnitBuffer : public C2Buffer {
public:
@@ -128,6 +139,7 @@
MultiAccessUnitHelper::MultiAccessUnitHelper(
const std::shared_ptr<MultiAccessUnitInterface>& intf,
std::shared_ptr<C2BlockPool>& linearPool):
+ mMultiAccessOnOffAllowed(true),
mInit(false),
mInterface(intf),
mLinearPool(linearPool) {
@@ -152,6 +164,63 @@
return result;
}
+bool MultiAccessUnitHelper::tryReconfigure(const std::unique_ptr<C2Param> ¶m) {
+ C2LargeFrame::output *lfp = C2LargeFrame::output::From(param.get());
+ if (lfp == nullptr) {
+ return false;
+ }
+ bool isDecoder = (mInterface->kind() == C2Component::KIND_DECODER) ? true : false;
+ if (!isDecoder) {
+ C2StreamMaxBufferSizeInfo::input maxInputSize(0);
+ if (!mInterface->getMaxInputSize(&maxInputSize)) {
+ LOG(ERROR) << "Error in reconfigure: "
+ << "Encoder failed to respond with a valid max input size";
+ return false;
+ }
+ // This is assuming a worst case compression ratio of 1:1
+ // In no case the encoder should give an output more than
+ // what is being provided to the encoder in a single call.
+ if (lfp->maxSize < maxInputSize.value) {
+ lfp->maxSize = maxInputSize.value;
+ }
+ }
+ lfp->maxSize =
+ (lfp->maxSize > MAX_SUPPORTED_SIZE) ? MAX_SUPPORTED_SIZE :
+ (lfp->maxSize < 0) ? 0 : lfp->maxSize;
+ lfp->thresholdSize =
+ (lfp->thresholdSize > MAX_SUPPORTED_SIZE) ? MAX_SUPPORTED_SIZE :
+ (lfp->thresholdSize < 0) ? 0 : lfp->thresholdSize;
+ C2LargeFrame::output currentConfig = mInterface->getLargeFrameParam();
+ if ((currentConfig.maxSize == lfp->maxSize)
+ && (currentConfig.thresholdSize == lfp->thresholdSize)) {
+ // no need to update
+ return false;
+ }
+ if (isDecoder) {
+ bool isOnOffTransition =
+ (currentConfig.maxSize == 0 && lfp->maxSize != 0)
+ || (currentConfig.maxSize != 0 && lfp->maxSize == 0);
+ if (isOnOffTransition && !mMultiAccessOnOffAllowed) {
+ LOG(ERROR) << "Setting new configs not allowed"
+ << " MaxSize: " << lfp->maxSize
+ << " ThresholdSize: " << lfp->thresholdSize;
+ return false;
+ }
+ }
+ std::vector<C2Param*> config{lfp};
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ if (C2_OK != mInterface->config(config, C2_MAY_BLOCK, &failures)) {
+ LOG(ERROR) << "Dynamic config not applied for"
+ << " MaxSize: " << lfp->maxSize
+ << " ThresholdSize: " << lfp->thresholdSize;
+ return false;
+ }
+ LOG(DEBUG) << "Updated from param maxSize "
+ << lfp->maxSize
+ << " ThresholdSize " << lfp->thresholdSize;
+ return true;
+}
+
std::shared_ptr<MultiAccessUnitInterface> MultiAccessUnitHelper::getInterface() {
return mInterface;
}
@@ -163,6 +232,7 @@
void MultiAccessUnitHelper::reset() {
std::lock_guard<std::mutex> l(mLock);
mFrameHolder.clear();
+ mMultiAccessOnOffAllowed = true;
}
c2_status_t MultiAccessUnitHelper::error(
@@ -181,6 +251,7 @@
}
}
mFrameHolder.clear();
+ mMultiAccessOnOffAllowed = true;
return C2_OK;
}
@@ -232,16 +303,23 @@
uint64_t newFrameIdx = mFrameIndex++;
// TODO: Do not split buffers if component inherantly supports MultipleFrames.
// if thats case, only replace frameindex.
- auto cloneInputWork = [&newFrameIdx](std::unique_ptr<C2Work>& inWork, uint32_t flags) {
+ auto cloneInputWork = [&frameInfo, &newFrameIdx, this]
+ (std::unique_ptr<C2Work>& inWork, uint32_t flags) -> std::unique_ptr<C2Work> {
std::unique_ptr<C2Work> newWork(new C2Work);
newWork->input.flags = (C2FrameData::flags_t)flags;
newWork->input.ordinal = inWork->input.ordinal;
newWork->input.ordinal.frameIndex = newFrameIdx;
if (!inWork->input.configUpdate.empty()) {
for (std::unique_ptr<C2Param>& param : inWork->input.configUpdate) {
- newWork->input.configUpdate.push_back(
- std::move(C2Param::Copy(*(param.get()))));
+ if (param->index() == C2LargeFrame::output::PARAM_TYPE) {
+ if (tryReconfigure(param)) {
+ frameInfo.mConfigUpdate.push_back(std::move(param));
+ }
+ } else {
+ newWork->input.configUpdate.push_back(std::move(param));
+ }
}
+ inWork->input.configUpdate.clear();
}
newWork->input.infoBuffers = (inWork->input.infoBuffers);
if (!inWork->worklets.empty() && inWork->worklets.front() != nullptr) {
@@ -331,6 +409,7 @@
frameInfo.mLargeFrameTuning = multiAccessParams;
std::lock_guard<std::mutex> l(mLock);
mFrameHolder.push_back(std::move(frameInfo));
+ mMultiAccessOnOffAllowed = false;
}
}
return C2_OK;
@@ -360,6 +439,7 @@
std::list<MultiAccessUnitInfo>::iterator frame =
mFrameHolder.begin();
while (!foundFrame && frame != mFrameHolder.end()) {
+ c2_status_t res = C2_OK;
auto it = frame->mComponentFrameIds.find(thisFrameIndex);
if (it != frame->mComponentFrameIds.end()) {
foundFrame = true;
@@ -369,8 +449,7 @@
if (work->result != C2_OK
|| work->worklets.empty()
|| !work->worklets.front()
- || (frame->mLargeFrameTuning.thresholdSize == 0
- || frame->mLargeFrameTuning.maxSize == 0)) {
+ || frame->mLargeFrameTuning.maxSize == 0) {
if (removeEntry) {
frame->mComponentFrameIds.erase(it);
removeEntry = false;
@@ -388,10 +467,27 @@
addOutWork(frame->mLargeWork);
frame->reset();
if (workResult != C2_OK) {
- frame->mAccessUnitInfos.clear();
+ frame->mComponentFrameIds.clear();
+ removeEntry = false;
}
- } else if (C2_OK != processWorklets(*frame, work, addOutWork)) {
- LOG(DEBUG) << "Error while processing work";
+ } else if (C2_OK != (res = processWorklets(*frame, work, addOutWork))) {
+ // Upon error in processing worklets, we return the work with
+ // result set to the error. This should indicate the error to the
+ // framework and thus doing what is necessary to handle the
+ // error.
+ LOG(DEBUG) << "Error while processing worklets";
+ if (frame->mLargeWork == nullptr) {
+ frame->mLargeWork.reset(new C2Work);
+ frame->mLargeWork->input.ordinal = frame->inOrdinal;
+ frame->mLargeWork->input.ordinal.frameIndex =
+ frame->inOrdinal.frameIndex;
+ }
+ frame->mLargeWork->result = res;
+ finalizeWork(*frame);
+ addOutWork(frame->mLargeWork);
+ frame->reset();
+ frame->mComponentFrameIds.clear();
+ removeEntry = false;
}
if (removeEntry) {
LOG(DEBUG) << "Removing entry: " << thisFrameIndex
@@ -528,9 +624,6 @@
LOG(DEBUG) << "maxOutSize " << frame.mLargeFrameTuning.maxSize
<< " threshold " << frame.mLargeFrameTuning.thresholdSize;
- if ((*worklet)->output.buffers.size() > 0) {
- allocateWork(frame, true, true);
- }
LOG(DEBUG) << "This worklet has " << (*worklet)->output.buffers.size() << " buffers"
<< " ts: " << (*worklet)->output.ordinal.timestamp.peekull();
int64_t workletTimestamp = (*worklet)->output.ordinal.timestamp.peekull();
@@ -552,43 +645,39 @@
inputSize -= (inputSize % frameSize);
}
while (inputOffset < inputSize) {
- if (frame.mWview->offset() >= frame.mLargeFrameTuning.thresholdSize) {
+ if ((frame.mWview != nullptr)
+ && (frame.mWview->offset() >= frame.mLargeFrameTuning.thresholdSize)) {
frame.mLargeWork->result = C2_OK;
finalizeWork(frame, flagsForCopy);
addWork(frame.mLargeWork);
frame.reset();
- allocateWork(frame, true, true);
}
if (mInterface->kind() == C2Component::KIND_ENCODER) {
if (inputSize > frame.mLargeFrameTuning.maxSize) {
- LOG(ERROR) << "Enc: Output buffer too small for AU, configured with "
- << frame.mLargeFrameTuning.maxSize
- << " block size: " << blocks.front().size()
- << "alloc size " << frame.mWview->size();
- if (frame.mLargeWork
- && frame.mWview && frame.mWview->offset() > 0) {
+ LOG(WARNING) << "WARNING Encoder:"
+ << " Output buffer too small for configuration"
+ << " configured max size " << frame.mLargeFrameTuning.maxSize
+ << " access unit size " << inputSize;
+ if (frame.mLargeWork && (frame.mWview && frame.mWview->offset() > 0)) {
+ frame.mLargeWork->result = C2_OK;
finalizeWork(frame, flagsForCopy);
addWork(frame.mLargeWork);
frame.reset();
- allocateWork(frame, true, false);
}
- frame.mLargeWork->result = C2_NO_MEMORY;
- finalizeWork(frame, 0, true);
- addWork(frame.mLargeWork);
- frame.reset();
- return C2_NO_MEMORY;
- } else if (inputSize > frame.mWview->size()) {
+ frame.mLargeFrameTuning.maxSize = inputSize;
+ } else if ((frame.mWview != nullptr)
+ && (inputSize > frame.mWview->size())) {
LOG(DEBUG) << "Enc: Large frame hitting bufer limit, current size "
<< frame.mWview->offset();
- if (frame.mLargeWork
- && frame.mWview && frame.mWview->offset() > 0) {
+ if (frame.mWview->offset() > 0) {
+ frame.mLargeWork->result = C2_OK;
finalizeWork(frame, flagsForCopy);
addWork(frame.mLargeWork);
frame.reset();
- allocateWork(frame, true, true);
}
}
}
+ allocateWork(frame, true, true);
C2ReadView rView = blocks.front().map().get();
if (rView.error()) {
LOG(ERROR) << "Buffer read view error";
@@ -683,26 +772,39 @@
frame.mWview->setOffset(0);
std::shared_ptr<C2Buffer> c2Buffer = C2Buffer::CreateLinearBuffer(
frame.mBlock->share(0, size, ::C2Fence()));
- if (frame.mAccessUnitInfos.size() > 0) {
- if (finalFlags & C2FrameData::FLAG_END_OF_STREAM) {
- frame.mAccessUnitInfos.back().flags |=
- C2FrameData::FLAG_END_OF_STREAM;
- }
- std::shared_ptr<C2AccessUnitInfos::output> largeFrame =
- C2AccessUnitInfos::output::AllocShared(
- frame.mAccessUnitInfos.size(), 0u, frame.mAccessUnitInfos);
- frame.mInfos.push_back(largeFrame);
- frame.mAccessUnitInfos.clear();
- }
- for (auto &info : frame.mInfos) {
- c2Buffer->setInfo(std::const_pointer_cast<C2Info>(info));
- }
frame.mLargeWork->worklets.front()->output.buffers.push_back(std::move(c2Buffer));
- frame.mInfos.clear();
- frame.mBlock.reset();
- frame.mWview.reset();
+ }
+ if (frame.mLargeWork->worklets.front()->output.buffers.size() > 0) {
+ std::shared_ptr<C2Buffer>& c2Buffer =
+ frame.mLargeWork->worklets.front()->output.buffers.front();
+ if (c2Buffer != nullptr) {
+ if (frame.mAccessUnitInfos.size() > 0) {
+ if (finalFlags & C2FrameData::FLAG_END_OF_STREAM) {
+ frame.mAccessUnitInfos.back().flags |= C2FrameData::FLAG_END_OF_STREAM;
+ }
+ std::shared_ptr<C2AccessUnitInfos::output> largeFrame =
+ C2AccessUnitInfos::output::AllocShared(
+ frame.mAccessUnitInfos.size(), 0u, frame.mAccessUnitInfos);
+ frame.mInfos.push_back(largeFrame);
+ frame.mAccessUnitInfos.clear();
+ }
+ for (auto &info : frame.mInfos) {
+ c2Buffer->setInfo(std::const_pointer_cast<C2Info>(info));
+ }
+ }
+ }
+ if (frame.mConfigUpdate.size() > 0) {
+ outFrameData.configUpdate.insert(
+ outFrameData.configUpdate.end(),
+ make_move_iterator(frame.mConfigUpdate.begin()),
+ make_move_iterator(frame.mConfigUpdate.end()));
}
}
+ frame.mConfigUpdate.clear();
+ frame.mInfos.clear();
+ frame.mBlock.reset();
+ frame.mWview.reset();
+
LOG(DEBUG) << "Multi access-unitflag setting as " << finalFlags;
return C2_OK;
}
@@ -735,6 +837,7 @@
mBlock.reset();
mWview.reset();
mInfos.clear();
+ mConfigUpdate.clear();
mAccessUnitInfos.clear();
mLargeWork.reset();
}
diff --git a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
index bb4464c..070a1f5 100644
--- a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
+++ b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
@@ -46,6 +46,7 @@
protected:
bool getDecoderSampleRateAndChannelCount(
uint32_t * const sampleRate_, uint32_t * const channelCount_) const;
+ bool getMaxInputSize(C2StreamMaxBufferSizeInfo::input* const maxInputSize) const;
const std::shared_ptr<C2ComponentInterface> mC2ComponentIntf;
std::shared_ptr<C2LargeFrame::output> mLargeFrameParams;
C2ComponentKindSetting mKind;
@@ -140,6 +141,11 @@
std::vector<std::shared_ptr<const C2Info>> mInfos;
/*
+ * Vector for holding config updates from the wrapper
+ */
+ std::vector<std::unique_ptr<C2Param>> mConfigUpdate;
+
+ /*
* C2AccessUnitInfos for the current buffer
*/
std::vector<C2AccessUnitInfosStruct> mAccessUnitInfos;
@@ -170,6 +176,11 @@
};
/*
+ * Reconfigure helper
+ */
+ bool tryReconfigure(const std::unique_ptr<C2Param> &p);
+
+ /*
* Creates a linear block to be used with work
*/
c2_status_t createLinearBlock(MultiAccessUnitInfo &frame);
@@ -195,6 +206,14 @@
uint32_t size,
int64_t timestamp);
+ // Flag to allow dynamic on/off settings on this helper.
+ // Once enabled and buffers in transit, it is not possible
+ // to turn this module off by setting the max output value
+ // to 0. This is because the skip cut buffer expects the
+ // metadata to be always present along with a valid buffer.
+ // This flag is used to monitor that state of this module.
+ bool mMultiAccessOnOffAllowed;
+
bool mInit;
// Interface of this module
diff --git a/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
index 41a8904..08f1ae2 100644
--- a/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
@@ -78,6 +78,26 @@
}
c2_status_t err2 = C2_OK;
if (paramsToLargeFrameIntf.size() > 0) {
+ C2ComponentKindSetting kind;
+ C2StreamMaxBufferSizeInfo::input maxInputSize(0);
+ c2_status_t err = mIntf->query_vb(
+ {&kind, &maxInputSize}, {}, C2_MAY_BLOCK, nullptr);
+ if ((err == C2_OK) && (kind.value == C2Component::KIND_ENCODER)) {
+ for (int i = 0 ; i < paramsToLargeFrameIntf.size(); i++) {
+ if (paramsToLargeFrameIntf[i]->index() ==
+ C2LargeFrame::output::PARAM_TYPE) {
+ C2LargeFrame::output *lfp = C2LargeFrame::output::From(
+ paramsToLargeFrameIntf[i]);
+ // This is assuming a worst case compression ratio of 1:1
+ // In no case the encoder should give an output more than
+ // what is being provided to the encoder in a single call.
+ if (lfp && (lfp->maxSize < maxInputSize.value)) {
+ lfp->maxSize = maxInputSize.value;
+ }
+ break;
+ }
+ }
+ }
err2 = mMultiAccessUnitIntf->config(
paramsToLargeFrameIntf, mayBlock, failures);
}
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index ab47b7c..36907e1 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -263,9 +263,6 @@
ALOGV("mComponent->reset() timeConsumed=%" PRId64 " us", timeConsumed);
ASSERT_EQ(err, C2_OK);
- err = mComponent->start();
- ASSERT_EQ(err, C2_OK);
-
// Query supported params by the component
std::vector<std::shared_ptr<C2ParamDescriptor>> params;
startTime = getNowUs();
@@ -298,6 +295,9 @@
timeConsumed);
}
+ err = mComponent->start();
+ ASSERT_EQ(err, C2_OK);
+
std::list<std::unique_ptr<C2Work>> workList;
startTime = getNowUs();
err = mComponent->queue(&workList);
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
index d1f0fb5..4c2ef9c 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
@@ -83,6 +83,7 @@
}
}
+// @VsrTest = 3.2-001.003
TEST_P(Codec2MasterHalTest, MustUseAidlBeyond202404) {
static int sVendorApiLevel = android::base::GetIntProperty("ro.vendor.api_level", 0);
if (sVendorApiLevel < 202404) {
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 4de2347..7076bac 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -19,7 +19,9 @@
export_include_dirs: ["include"],
srcs: [
+ "C2AidlNode.cpp",
"C2OMXNode.cpp",
+ "C2NodeImpl.cpp",
"CCodec.cpp",
"CCodecBufferChannel.cpp",
"CCodecBuffers.cpp",
@@ -54,8 +56,11 @@
"android.hardware.drm@1.0",
"android.hardware.media.c2@1.0",
"android.hardware.media.omx@1.0",
+ "android.hardware.graphics.common-V5-ndk",
+ "graphicbuffersource-aidl-ndk",
"libbase",
"libbinder",
+ "libbinder_ndk",
"libcodec2",
"libcodec2_client",
"libcodec2_vndk",
@@ -67,9 +72,11 @@
"liblog",
"libmedia_codeclist",
"libmedia_omx",
+ "libnativewindow",
"libsfplugin_ccodec_utils",
"libstagefright_bufferqueue_helper",
"libstagefright_codecbase",
+ "libstagefright_graphicbuffersource_aidl",
"libstagefright_foundation",
"libstagefright_omx",
"libstagefright_surface_utils",
@@ -84,6 +91,10 @@
"libcodec2_client",
],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
+
sanitize: {
cfi: true,
misc_undefined: [
diff --git a/media/codec2/sfplugin/C2AidlNode.cpp b/media/codec2/sfplugin/C2AidlNode.cpp
new file mode 100644
index 0000000..93c9d8b
--- /dev/null
+++ b/media/codec2/sfplugin/C2AidlNode.cpp
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2AidlNode"
+#include <log/log.h>
+#include <private/android/AHardwareBufferHelpers.h>
+
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/Conversion.h>
+
+#include "C2NodeImpl.h"
+#include "C2AidlNode.h"
+
+namespace android {
+
+using ::aidl::android::media::IAidlBufferSource;
+using ::aidl::android::media::IAidlNode;
+
+// Conversion
+using ::android::media::aidl_conversion::toAidlStatus;
+
+C2AidlNode::C2AidlNode(const std::shared_ptr<Codec2Client::Component> &comp)
+ : mImpl(new C2NodeImpl(comp, true)) {}
+
+// aidl ndk interfaces
+::ndk::ScopedAStatus C2AidlNode::freeNode() {
+ return toAidlStatus(mImpl->freeNode());
+}
+
+::ndk::ScopedAStatus C2AidlNode::getConsumerUsage(int64_t* _aidl_return) {
+ uint64_t usage;
+ mImpl->getConsumerUsageBits(&usage);
+ *_aidl_return = usage;
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus C2AidlNode::getInputBufferParams(IAidlNode::InputBufferParams* _aidl_return) {
+ mImpl->getInputBufferParams(_aidl_return);
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus C2AidlNode::setConsumerUsage(int64_t usage) {
+ mImpl->setConsumerUsageBits(static_cast<uint64_t>(usage));
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus C2AidlNode::setAdjustTimestampGapUs(int32_t gapUs) {
+ mImpl->setAdjustTimestampGapUs(gapUs);
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus C2AidlNode::setInputSurface(
+ const std::shared_ptr<IAidlBufferSource>& bufferSource) {
+ return toAidlStatus(mImpl->setAidlInputSurface(bufferSource));
+}
+
+::ndk::ScopedAStatus C2AidlNode::submitBuffer(
+ int32_t buffer, const ::aidl::android::hardware::HardwareBuffer& hBuffer,
+ int32_t flags, int64_t timestamp, const ::ndk::ScopedFileDescriptor& fence) {
+ sp<GraphicBuffer> gBuf;
+ AHardwareBuffer *ahwb = hBuffer.get();
+ if (ahwb) {
+ gBuf = AHardwareBuffer_to_GraphicBuffer(ahwb);
+ }
+ return toAidlStatus(mImpl->submitBuffer(
+ buffer, gBuf, flags, timestamp, ::dup(fence.get())));
+}
+
+::ndk::ScopedAStatus C2AidlNode::onDataSpaceChanged(
+ int32_t dataSpace,
+ int32_t aspects,
+ int32_t pixelFormat) {
+ // NOTE: legacy codes passed aspects, but they didn't used.
+ (void)aspects;
+
+ return toAidlStatus(mImpl->onDataspaceChanged(
+ static_cast<uint32_t>(dataSpace),
+ static_cast<uint32_t>(pixelFormat)));
+}
+
+// cpp interface
+
+std::shared_ptr<IAidlBufferSource> C2AidlNode::getSource() {
+ return mImpl->getAidlSource();
+}
+
+void C2AidlNode::setFrameSize(uint32_t width, uint32_t height) {
+ return mImpl->setFrameSize(width, height);
+}
+
+void C2AidlNode::onInputBufferDone(c2_cntr64_t index) {
+ return mImpl->onInputBufferDone(index);
+}
+
+android_dataspace C2AidlNode::getDataspace() {
+ return mImpl->getDataspace();
+}
+
+uint32_t C2AidlNode::getPixelFormat() {
+ return mImpl->getPixelFormat();
+}
+
+void C2AidlNode::setPriority(int priority) {
+ return mImpl->setPriority(priority);
+}
+
+} // namespace android
diff --git a/media/codec2/sfplugin/C2AidlNode.h b/media/codec2/sfplugin/C2AidlNode.h
new file mode 100644
index 0000000..365a41d
--- /dev/null
+++ b/media/codec2/sfplugin/C2AidlNode.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/media/BnAidlNode.h>
+#include <codec2/hidl/client.h>
+
+namespace android {
+
+struct C2NodeImpl;
+
+/**
+ * Thin Codec2 AIdL encoder HAL wrapper for InputSurface
+ */
+class C2AidlNode : public ::aidl::android::media::BnAidlNode {
+public:
+ explicit C2AidlNode(const std::shared_ptr<Codec2Client::Component> &comp);
+ ~C2AidlNode() override = default;
+
+ // IAidlNode
+ ::ndk::ScopedAStatus freeNode() override;
+
+ ::ndk::ScopedAStatus getConsumerUsage(int64_t *_aidl_return) override;
+
+ ::ndk::ScopedAStatus getInputBufferParams(
+ ::aidl::android::media::IAidlNode::InputBufferParams *_aidl_return) override;
+
+ ::ndk::ScopedAStatus setConsumerUsage(int64_t usage) override;
+
+ ::ndk::ScopedAStatus setAdjustTimestampGapUs(int32_t gapUs) override;
+
+ ::ndk::ScopedAStatus setInputSurface(
+ const std::shared_ptr<::aidl::android::media::IAidlBufferSource>&
+ bufferSource) override;
+
+ ::ndk::ScopedAStatus submitBuffer(
+ int32_t buffer,
+ const ::aidl::android::hardware::HardwareBuffer& hBuffer,
+ int32_t flags,
+ int64_t timestampUs,
+ const ::ndk::ScopedFileDescriptor& fence) override;
+
+ ::ndk::ScopedAStatus onDataSpaceChanged(
+ int dataSpace, int aspects, int pixelFormat) override;
+
+ /**
+ * Returns underlying IAidlBufferSource object.
+ */
+ std::shared_ptr<::aidl::android::media::IAidlBufferSource> getSource();
+
+ /**
+ * Configure the frame size.
+ */
+ void setFrameSize(uint32_t width, uint32_t height);
+
+ /**
+ * Clean up work item reference.
+ *
+ * \param index input work index
+ */
+ void onInputBufferDone(c2_cntr64_t index);
+
+ /**
+ * Returns dataspace information from GraphicBufferSource.
+ */
+ android_dataspace getDataspace();
+
+ /**
+ * Returns dataspace information from GraphicBufferSource.
+ */
+ uint32_t getPixelFormat();
+
+ /**
+ * Sets priority of the queue thread.
+ */
+ void setPriority(int priority);
+
+private:
+ std::shared_ptr<C2NodeImpl> mImpl;
+};
+
+} // namespace android
+
diff --git a/media/codec2/sfplugin/C2NodeImpl.cpp b/media/codec2/sfplugin/C2NodeImpl.cpp
new file mode 100644
index 0000000..6f53e0f
--- /dev/null
+++ b/media/codec2/sfplugin/C2NodeImpl.cpp
@@ -0,0 +1,451 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2NodeImpl"
+#include <log/log.h>
+
+#include <C2AllocatorGralloc.h>
+#include <C2BlockInternal.h>
+#include <C2Component.h>
+#include <C2Config.h>
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+
+#include <android/fdsan.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+#include <ui/Fence.h>
+#include <ui/GraphicBuffer.h>
+#include <utils/Errors.h>
+#include <utils/Thread.h>
+
+#include "utils/Codec2Mapper.h"
+#include "C2NodeImpl.h"
+#include "Codec2Buffer.h"
+
+namespace android {
+
+using ::aidl::android::media::IAidlBufferSource;
+using ::aidl::android::media::IAidlNode;
+
+using ::android::media::BUFFERFLAG_EOS;
+
+namespace {
+
+class Buffer2D : public C2Buffer {
+public:
+ explicit Buffer2D(C2ConstGraphicBlock block) : C2Buffer({ block }) {}
+};
+
+} // namespace
+
+class C2NodeImpl::QueueThread : public Thread {
+public:
+ QueueThread() : Thread(false) {}
+ ~QueueThread() override = default;
+ void queue(
+ const std::shared_ptr<Codec2Client::Component> &comp,
+ int fenceFd,
+ std::unique_ptr<C2Work> &&work,
+ android::base::unique_fd &&fd0,
+ android::base::unique_fd &&fd1) {
+ Mutexed<Jobs>::Locked jobs(mJobs);
+ auto it = jobs->queues.try_emplace(comp, comp).first;
+ it->second.workList.emplace_back(
+ std::move(work), fenceFd, std::move(fd0), std::move(fd1));
+ jobs->cond.broadcast();
+ }
+
+ void setDataspace(android_dataspace dataspace) {
+ Mutexed<Jobs>::Locked jobs(mJobs);
+ ColorUtils::convertDataSpaceToV0(dataspace);
+ jobs->configUpdate.emplace_back(new C2StreamDataSpaceInfo::input(0u, dataspace));
+ int32_t standard;
+ int32_t transfer;
+ int32_t range;
+ ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
+ std::unique_ptr<C2StreamColorAspectsInfo::input> colorAspects =
+ std::make_unique<C2StreamColorAspectsInfo::input>(0u);
+ if (C2Mapper::map(standard, &colorAspects->primaries, &colorAspects->matrix)
+ && C2Mapper::map(transfer, &colorAspects->transfer)
+ && C2Mapper::map(range, &colorAspects->range)) {
+ jobs->configUpdate.push_back(std::move(colorAspects));
+ }
+ }
+
+ void setPriority(int priority) {
+ androidSetThreadPriority(getTid(), priority);
+ }
+
+protected:
+ bool threadLoop() override {
+ constexpr nsecs_t kIntervalNs = nsecs_t(10) * 1000 * 1000; // 10ms
+ constexpr nsecs_t kWaitNs = kIntervalNs * 2;
+ for (int i = 0; i < 2; ++i) {
+ Mutexed<Jobs>::Locked jobs(mJobs);
+ nsecs_t nowNs = systemTime();
+ bool queued = false;
+ for (auto it = jobs->queues.begin(); it != jobs->queues.end(); ) {
+ Queue &queue = it->second;
+ if (queue.workList.empty()
+ || (queue.lastQueuedTimestampNs != 0 &&
+ nowNs - queue.lastQueuedTimestampNs < kIntervalNs)) {
+ ++it;
+ continue;
+ }
+ std::shared_ptr<Codec2Client::Component> comp = queue.component.lock();
+ if (!comp) {
+ it = jobs->queues.erase(it);
+ continue;
+ }
+ std::list<std::unique_ptr<C2Work>> items;
+ std::vector<int> fenceFds;
+ std::vector<android::base::unique_fd> uniqueFds;
+ while (!queue.workList.empty()) {
+ items.push_back(std::move(queue.workList.front().work));
+ fenceFds.push_back(queue.workList.front().fenceFd);
+ uniqueFds.push_back(std::move(queue.workList.front().fd0));
+ uniqueFds.push_back(std::move(queue.workList.front().fd1));
+ queue.workList.pop_front();
+ }
+ for (const std::unique_ptr<C2Param> ¶m : jobs->configUpdate) {
+ items.front()->input.configUpdate.emplace_back(C2Param::Copy(*param));
+ }
+
+ jobs.unlock();
+ for (int fenceFd : fenceFds) {
+ sp<Fence> fence(new Fence(fenceFd));
+ fence->waitForever(LOG_TAG);
+ }
+ queue.lastQueuedTimestampNs = nowNs;
+ comp->queue(&items);
+ for (android::base::unique_fd &ufd : uniqueFds) {
+ (void)ufd.release();
+ }
+ jobs.lock();
+
+ it = jobs->queues.upper_bound(comp);
+ queued = true;
+ }
+ if (queued) {
+ jobs->configUpdate.clear();
+ return true;
+ }
+ if (i == 0) {
+ jobs.waitForConditionRelative(jobs->cond, kWaitNs);
+ }
+ }
+ return true;
+ }
+
+private:
+ struct WorkFence {
+ WorkFence(std::unique_ptr<C2Work> &&w, int fd) : work(std::move(w)), fenceFd(fd) {}
+
+ WorkFence(
+ std::unique_ptr<C2Work> &&w,
+ int fd,
+ android::base::unique_fd &&uniqueFd0,
+ android::base::unique_fd &&uniqueFd1)
+ : work(std::move(w)),
+ fenceFd(fd),
+ fd0(std::move(uniqueFd0)),
+ fd1(std::move(uniqueFd1)) {}
+
+ std::unique_ptr<C2Work> work;
+ int fenceFd;
+ android::base::unique_fd fd0;
+ android::base::unique_fd fd1;
+ };
+ struct Queue {
+ Queue(const std::shared_ptr<Codec2Client::Component> &comp)
+ : component(comp), lastQueuedTimestampNs(0) {}
+ Queue(const Queue &) = delete;
+ Queue &operator =(const Queue &) = delete;
+
+ std::weak_ptr<Codec2Client::Component> component;
+ std::list<WorkFence> workList;
+ nsecs_t lastQueuedTimestampNs;
+ };
+ struct Jobs {
+ std::map<std::weak_ptr<Codec2Client::Component>,
+ Queue,
+ std::owner_less<std::weak_ptr<Codec2Client::Component>>> queues;
+ std::vector<std::unique_ptr<C2Param>> configUpdate;
+ Condition cond;
+ };
+ Mutexed<Jobs> mJobs;
+};
+
+C2NodeImpl::C2NodeImpl(const std::shared_ptr<Codec2Client::Component> &comp, bool aidl)
+ : mComp(comp), mFrameIndex(0), mWidth(0), mHeight(0), mUsage(0),
+ mAdjustTimestampGapUs(0), mFirstInputFrame(true),
+ mQueueThread(new QueueThread), mAidlHal(aidl) {
+ android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ALWAYS);
+ mQueueThread->run("C2NodeImpl", PRIORITY_AUDIO);
+
+ android_dataspace ds = HAL_DATASPACE_UNKNOWN;
+ mDataspace.lock().set(ds);
+ uint32_t pf = PIXEL_FORMAT_UNKNOWN;
+ mPixelFormat.lock().set(pf);
+}
+
+C2NodeImpl::~C2NodeImpl() {
+}
+
+status_t C2NodeImpl::freeNode() {
+ mComp.reset();
+ android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ONCE);
+ return mQueueThread->requestExitAndWait();
+}
+
+void C2NodeImpl::onFirstInputFrame() {
+ mFirstInputFrame = true;
+}
+
+void C2NodeImpl::getConsumerUsageBits(uint64_t *usage) {
+ *usage = mUsage;
+}
+
+void C2NodeImpl::getInputBufferParams(IAidlNode::InputBufferParams *params) {
+ params->bufferCountActual = 16;
+
+ // WORKAROUND: having more slots improve performance while consuming
+ // more memory. This is a temporary workaround to reduce memory for
+ // larger-than-4K scenario.
+ if (mWidth * mHeight > 4096 * 2340) {
+ std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
+ C2PortActualDelayTuning::input inputDelay(0);
+ C2ActualPipelineDelayTuning pipelineDelay(0);
+ c2_status_t c2err = C2_NOT_FOUND;
+ if (comp) {
+ c2err = comp->query(
+ {&inputDelay, &pipelineDelay}, {}, C2_DONT_BLOCK, nullptr);
+ }
+ if (c2err == C2_OK || c2err == C2_BAD_INDEX) {
+ params->bufferCountActual = 4;
+ params->bufferCountActual += (inputDelay ? inputDelay.value : 0u);
+ params->bufferCountActual += (pipelineDelay ? pipelineDelay.value : 0u);
+ }
+ }
+
+ params->frameWidth = mWidth;
+ params->frameHeight = mHeight;
+}
+
+void C2NodeImpl::setConsumerUsageBits(uint64_t usage) {
+ mUsage = usage;
+}
+
+void C2NodeImpl::setAdjustTimestampGapUs(int32_t gapUs) {
+ mAdjustTimestampGapUs = gapUs;
+}
+
+status_t C2NodeImpl::setInputSurface(const sp<IOMXBufferSource> &bufferSource) {
+ c2_status_t err = GetCodec2PlatformAllocatorStore()->fetchAllocator(
+ C2PlatformAllocatorStore::GRALLOC,
+ &mAllocator);
+ if (err != OK) {
+ return UNKNOWN_ERROR;
+ }
+ CHECK(!mAidlHal);
+ mBufferSource = bufferSource;
+ return OK;
+}
+
+status_t C2NodeImpl::setAidlInputSurface(
+ const std::shared_ptr<IAidlBufferSource> &aidlBufferSource) {
+ c2_status_t err = GetCodec2PlatformAllocatorStore()->fetchAllocator(
+ C2PlatformAllocatorStore::GRALLOC,
+ &mAllocator);
+ if (err != OK) {
+ return UNKNOWN_ERROR;
+ }
+ CHECK(mAidlHal);
+ mAidlBufferSource = aidlBufferSource;
+ return OK;
+}
+
+status_t C2NodeImpl::submitBuffer(
+ uint32_t buffer, const sp<GraphicBuffer> &graphicBuffer,
+ uint32_t flags, int64_t timestamp, int fenceFd) {
+ std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
+ if (!comp) {
+ return NO_INIT;
+ }
+
+ uint32_t c2Flags = (flags & BUFFERFLAG_EOS)
+ ? C2FrameData::FLAG_END_OF_STREAM : 0;
+ std::shared_ptr<C2GraphicBlock> block;
+
+ android::base::unique_fd fd0, fd1;
+ C2Handle *handle = nullptr;
+ if (graphicBuffer) {
+ std::shared_ptr<C2GraphicAllocation> alloc;
+ handle = WrapNativeCodec2GrallocHandle(
+ graphicBuffer->handle,
+ graphicBuffer->width,
+ graphicBuffer->height,
+ graphicBuffer->format,
+ graphicBuffer->usage,
+ graphicBuffer->stride);
+ if (handle != nullptr) {
+ // unique_fd takes ownership of the fds, we'll get warning if these
+ // fds get closed by somebody else. Onwership will be released before
+ // we return, so that the fds get closed as usually when this function
+ // goes out of scope (when both items and block are gone).
+ native_handle_t *nativeHandle = reinterpret_cast<native_handle_t*>(handle);
+ fd0.reset(nativeHandle->numFds > 0 ? nativeHandle->data[0] : -1);
+ fd1.reset(nativeHandle->numFds > 1 ? nativeHandle->data[1] : -1);
+ }
+ c2_status_t err = mAllocator->priorGraphicAllocation(handle, &alloc);
+ if (err != OK) {
+ (void)fd0.release();
+ (void)fd1.release();
+ native_handle_close(handle);
+ native_handle_delete(handle);
+ return UNKNOWN_ERROR;
+ }
+ block = _C2BlockFactory::CreateGraphicBlock(alloc);
+ } else if (!(flags & BUFFERFLAG_EOS)) {
+ return BAD_VALUE;
+ }
+
+ std::unique_ptr<C2Work> work(new C2Work);
+ work->input.flags = (C2FrameData::flags_t)c2Flags;
+ work->input.ordinal.timestamp = timestamp;
+
+ // WORKAROUND: adjust timestamp based on gapUs
+ {
+ work->input.ordinal.customOrdinal = timestamp; // save input timestamp
+ if (mFirstInputFrame) {
+ // grab timestamps on first frame
+ mPrevInputTimestamp = timestamp;
+ mPrevCodecTimestamp = timestamp;
+ mFirstInputFrame = false;
+ } else if (mAdjustTimestampGapUs > 0) {
+ work->input.ordinal.timestamp =
+ mPrevCodecTimestamp
+ + c2_min((timestamp - mPrevInputTimestamp).peek(), mAdjustTimestampGapUs);
+ } else if (mAdjustTimestampGapUs < 0) {
+ work->input.ordinal.timestamp = mPrevCodecTimestamp - mAdjustTimestampGapUs;
+ }
+ mPrevInputTimestamp = work->input.ordinal.customOrdinal;
+ mPrevCodecTimestamp = work->input.ordinal.timestamp;
+ ALOGV("adjusting %lld to %lld (gap=%lld)",
+ work->input.ordinal.customOrdinal.peekll(),
+ work->input.ordinal.timestamp.peekll(),
+ (long long)mAdjustTimestampGapUs);
+ }
+
+ work->input.ordinal.frameIndex = mFrameIndex++;
+ work->input.buffers.clear();
+ if (block) {
+ std::shared_ptr<C2Buffer> c2Buffer(
+ new Buffer2D(block->share(
+ C2Rect(block->width(), block->height()), ::C2Fence())));
+ work->input.buffers.push_back(c2Buffer);
+ std::shared_ptr<C2StreamHdrStaticInfo::input> staticInfo;
+ std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> dynamicInfo;
+ GetHdrMetadataFromGralloc4Handle(
+ block->handle(),
+ &staticInfo,
+ &dynamicInfo);
+ if (staticInfo && *staticInfo) {
+ c2Buffer->setInfo(staticInfo);
+ }
+ if (dynamicInfo && *dynamicInfo) {
+ c2Buffer->setInfo(dynamicInfo);
+ }
+ }
+ work->worklets.clear();
+ work->worklets.emplace_back(new C2Worklet);
+ mBufferIdsInUse.lock()->emplace(work->input.ordinal.frameIndex.peeku(), buffer);
+ mQueueThread->queue(comp, fenceFd, std::move(work), std::move(fd0), std::move(fd1));
+
+ return OK;
+}
+
+status_t C2NodeImpl::onDataspaceChanged(uint32_t dataSpace, uint32_t pixelFormat) {
+ ALOGD("dataspace changed to %#x pixel format: %#x", dataSpace, pixelFormat);
+ android_dataspace d = (android_dataspace)dataSpace;
+ mQueueThread->setDataspace(d);
+
+ mDataspace.lock().set(d);
+ mPixelFormat.lock().set(pixelFormat);
+ return OK;
+}
+
+sp<IOMXBufferSource> C2NodeImpl::getSource() {
+ CHECK(!mAidlHal);
+ return mBufferSource;
+}
+
+std::shared_ptr<IAidlBufferSource> C2NodeImpl::getAidlSource() {
+ CHECK(mAidlHal);
+ return mAidlBufferSource;
+}
+
+void C2NodeImpl::setFrameSize(uint32_t width, uint32_t height) {
+ mWidth = width;
+ mHeight = height;
+}
+
+void C2NodeImpl::onInputBufferDone(c2_cntr64_t index) {
+ if (mAidlHal) {
+ if (!mAidlBufferSource) {
+ ALOGD("Buffer source not set (index=%llu)", index.peekull());
+ return;
+ }
+ } else {
+ if (!mBufferSource) {
+ ALOGD("Buffer source not set (index=%llu)", index.peekull());
+ return;
+ }
+ }
+
+ int32_t bufferId = 0;
+ {
+ decltype(mBufferIdsInUse)::Locked bufferIds(mBufferIdsInUse);
+ auto it = bufferIds->find(index.peeku());
+ if (it == bufferIds->end()) {
+ ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
+ return;
+ }
+ bufferId = it->second;
+ (void)bufferIds->erase(it);
+ }
+ if (mAidlHal) {
+ ::ndk::ScopedFileDescriptor nullFence;
+ (void)mAidlBufferSource->onInputBufferEmptied(bufferId, nullFence);
+ } else {
+ (void)mBufferSource->onInputBufferEmptied(bufferId, -1);
+ }
+}
+
+android_dataspace C2NodeImpl::getDataspace() {
+ return *mDataspace.lock();
+}
+
+uint32_t C2NodeImpl::getPixelFormat() {
+ return *mPixelFormat.lock();
+}
+
+void C2NodeImpl::setPriority(int priority) {
+ mQueueThread->setPriority(priority);
+}
+
+} // namespace android
diff --git a/media/codec2/sfplugin/C2NodeImpl.h b/media/codec2/sfplugin/C2NodeImpl.h
new file mode 100644
index 0000000..e060fd8
--- /dev/null
+++ b/media/codec2/sfplugin/C2NodeImpl.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <atomic>
+
+#include <android/IOMXBufferSource.h>
+#include <aidl/android/media/IAidlBufferSource.h>
+#include <aidl/android/media/IAidlNode.h>
+#include <codec2/hidl/client.h>
+#include <media/stagefright/foundation/Mutexed.h>
+#include <media/stagefright/aidlpersistentsurface/C2NodeDef.h>
+
+namespace android {
+
+/**
+ * IOmxNode implementation around codec 2.0 component, only to be used in
+ * IGraphicBufferSource::configure. Only subset of IOmxNode API is implemented.
+ * As a result, one cannot expect this IOmxNode to work in any other usage than
+ * IGraphicBufferSource(if aidl hal is used, IAidlGraphicBufferSource).
+ */
+struct C2NodeImpl {
+ explicit C2NodeImpl(const std::shared_ptr<Codec2Client::Component> &comp, bool aidl);
+ ~C2NodeImpl();
+
+ // IOMXNode and/or IAidlNode
+ status_t freeNode();
+
+ void onFirstInputFrame();
+ void getConsumerUsageBits(uint64_t *usage /* nonnull */);
+ void getInputBufferParams(
+ ::aidl::android::media::IAidlNode::InputBufferParams *params /* nonnull */);
+ void setConsumerUsageBits(uint64_t usage);
+ void setAdjustTimestampGapUs(int32_t gapUs);
+
+ status_t setInputSurface(
+ const sp<IOMXBufferSource> &bufferSource);
+ status_t setAidlInputSurface(
+ const std::shared_ptr<::aidl::android::media::IAidlBufferSource> &aidlBufferSource);
+
+ status_t submitBuffer(
+ uint32_t buffer, const sp<GraphicBuffer> &graphicBuffer,
+ uint32_t flags, int64_t timestamp, int fenceFd);
+ status_t onDataspaceChanged(uint32_t dataSpace, uint32_t pixelFormat);
+
+ /**
+ * Returns underlying IOMXBufferSource object.
+ */
+ sp<IOMXBufferSource> getSource();
+
+ /**
+ * Returns underlying IAidlBufferSource object.
+ */
+ std::shared_ptr<::aidl::android::media::IAidlBufferSource> getAidlSource();
+
+ /**
+ * Configure the frame size.
+ */
+ void setFrameSize(uint32_t width, uint32_t height);
+
+ /**
+ * Clean up work item reference.
+ *
+ * \param index input work index
+ */
+ void onInputBufferDone(c2_cntr64_t index);
+
+ /**
+ * Returns dataspace information from GraphicBufferSource.
+ */
+ android_dataspace getDataspace();
+
+ /**
+ * Returns dataspace information from GraphicBufferSource.
+ */
+ uint32_t getPixelFormat();
+
+ /**
+ * Sets priority of the queue thread.
+ */
+ void setPriority(int priority);
+
+private:
+ std::weak_ptr<Codec2Client::Component> mComp;
+
+ sp<IOMXBufferSource> mBufferSource;
+ std::shared_ptr<::aidl::android::media::IAidlBufferSource> mAidlBufferSource;
+
+ std::shared_ptr<C2Allocator> mAllocator;
+ std::atomic_uint64_t mFrameIndex;
+ uint32_t mWidth;
+ uint32_t mHeight;
+ uint64_t mUsage;
+ Mutexed<android_dataspace> mDataspace;
+ Mutexed<uint32_t> mPixelFormat;
+
+ // WORKAROUND: timestamp adjustment
+
+ // if >0: this is the max timestamp gap, if <0: this is -1 times the fixed timestamp gap
+ // if 0: no timestamp adjustment is made
+ // note that C2OMXNode can be recycled between encoding sessions.
+ int32_t mAdjustTimestampGapUs;
+ bool mFirstInputFrame; // true for first input
+ c2_cntr64_t mPrevInputTimestamp; // input timestamp for previous frame
+ c2_cntr64_t mPrevCodecTimestamp; // adjusted (codec) timestamp for previous frame
+
+ Mutexed<std::map<uint64_t, uint32_t>> mBufferIdsInUse;
+
+ class QueueThread;
+ sp<QueueThread> mQueueThread;
+
+ bool mAidlHal;
+};
+
+} // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index bba022b..ce02c88 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright 2018, The Android Open Source Project
+ * Copyright 2024, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -19,30 +19,17 @@
#endif
//#define LOG_NDEBUG 0
-#define LOG_TAG "C2OMXNode"
+#define LOG_TAG "C2OMXNODE"
#include <log/log.h>
-#include <C2AllocatorGralloc.h>
-#include <C2BlockInternal.h>
-#include <C2Component.h>
-#include <C2Config.h>
-#include <C2PlatformSupport.h>
-
#include <OMX_Component.h>
#include <OMX_Index.h>
#include <OMX_IndexExt.h>
-#include <android/fdsan.h>
-#include <media/stagefright/foundation/ColorUtils.h>
-#include <media/stagefright/omx/OMXUtils.h>
#include <media/stagefright/MediaErrors.h>
-#include <ui/Fence.h>
-#include <ui/GraphicBuffer.h>
-#include <utils/Thread.h>
-#include "utils/Codec2Mapper.h"
#include "C2OMXNode.h"
-#include "Codec2Buffer.h"
+#include "C2NodeImpl.h"
namespace android {
@@ -50,175 +37,25 @@
constexpr OMX_U32 kPortIndexInput = 0;
-class Buffer2D : public C2Buffer {
-public:
- explicit Buffer2D(C2ConstGraphicBlock block) : C2Buffer({ block }) {}
-};
+} // anomymous namespace
-} // namespace
+using ::android::media::BUFFERFLAG_ENDOFFRAME;
+using ::android::media::BUFFERFLAG_EOS;
-class C2OMXNode::QueueThread : public Thread {
-public:
- QueueThread() : Thread(false) {}
- ~QueueThread() override = default;
- void queue(
- const std::shared_ptr<Codec2Client::Component> &comp,
- int fenceFd,
- std::unique_ptr<C2Work> &&work,
- android::base::unique_fd &&fd0,
- android::base::unique_fd &&fd1) {
- Mutexed<Jobs>::Locked jobs(mJobs);
- auto it = jobs->queues.try_emplace(comp, comp).first;
- it->second.workList.emplace_back(
- std::move(work), fenceFd, std::move(fd0), std::move(fd1));
- jobs->cond.broadcast();
- }
-
- void setDataspace(android_dataspace dataspace) {
- Mutexed<Jobs>::Locked jobs(mJobs);
- ColorUtils::convertDataSpaceToV0(dataspace);
- jobs->configUpdate.emplace_back(new C2StreamDataSpaceInfo::input(0u, dataspace));
- int32_t standard;
- int32_t transfer;
- int32_t range;
- ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
- std::unique_ptr<C2StreamColorAspectsInfo::input> colorAspects =
- std::make_unique<C2StreamColorAspectsInfo::input>(0u);
- if (C2Mapper::map(standard, &colorAspects->primaries, &colorAspects->matrix)
- && C2Mapper::map(transfer, &colorAspects->transfer)
- && C2Mapper::map(range, &colorAspects->range)) {
- jobs->configUpdate.push_back(std::move(colorAspects));
- }
- }
-
- void setPriority(int priority) {
- androidSetThreadPriority(getTid(), priority);
- }
-
-protected:
- bool threadLoop() override {
- constexpr nsecs_t kIntervalNs = nsecs_t(10) * 1000 * 1000; // 10ms
- constexpr nsecs_t kWaitNs = kIntervalNs * 2;
- for (int i = 0; i < 2; ++i) {
- Mutexed<Jobs>::Locked jobs(mJobs);
- nsecs_t nowNs = systemTime();
- bool queued = false;
- for (auto it = jobs->queues.begin(); it != jobs->queues.end(); ) {
- Queue &queue = it->second;
- if (queue.workList.empty()
- || (queue.lastQueuedTimestampNs != 0 &&
- nowNs - queue.lastQueuedTimestampNs < kIntervalNs)) {
- ++it;
- continue;
- }
- std::shared_ptr<Codec2Client::Component> comp = queue.component.lock();
- if (!comp) {
- it = jobs->queues.erase(it);
- continue;
- }
- std::list<std::unique_ptr<C2Work>> items;
- std::vector<int> fenceFds;
- std::vector<android::base::unique_fd> uniqueFds;
- while (!queue.workList.empty()) {
- items.push_back(std::move(queue.workList.front().work));
- fenceFds.push_back(queue.workList.front().fenceFd);
- uniqueFds.push_back(std::move(queue.workList.front().fd0));
- uniqueFds.push_back(std::move(queue.workList.front().fd1));
- queue.workList.pop_front();
- }
- for (const std::unique_ptr<C2Param> ¶m : jobs->configUpdate) {
- items.front()->input.configUpdate.emplace_back(C2Param::Copy(*param));
- }
-
- jobs.unlock();
- for (int fenceFd : fenceFds) {
- sp<Fence> fence(new Fence(fenceFd));
- fence->waitForever(LOG_TAG);
- }
- queue.lastQueuedTimestampNs = nowNs;
- comp->queue(&items);
- for (android::base::unique_fd &ufd : uniqueFds) {
- (void)ufd.release();
- }
- jobs.lock();
-
- it = jobs->queues.upper_bound(comp);
- queued = true;
- }
- if (queued) {
- jobs->configUpdate.clear();
- return true;
- }
- if (i == 0) {
- jobs.waitForConditionRelative(jobs->cond, kWaitNs);
- }
- }
- return true;
- }
-
-private:
- struct WorkFence {
- WorkFence(std::unique_ptr<C2Work> &&w, int fd) : work(std::move(w)), fenceFd(fd) {}
-
- WorkFence(
- std::unique_ptr<C2Work> &&w,
- int fd,
- android::base::unique_fd &&uniqueFd0,
- android::base::unique_fd &&uniqueFd1)
- : work(std::move(w)),
- fenceFd(fd),
- fd0(std::move(uniqueFd0)),
- fd1(std::move(uniqueFd1)) {}
-
- std::unique_ptr<C2Work> work;
- int fenceFd;
- android::base::unique_fd fd0;
- android::base::unique_fd fd1;
- };
- struct Queue {
- Queue(const std::shared_ptr<Codec2Client::Component> &comp)
- : component(comp), lastQueuedTimestampNs(0) {}
- Queue(const Queue &) = delete;
- Queue &operator =(const Queue &) = delete;
-
- std::weak_ptr<Codec2Client::Component> component;
- std::list<WorkFence> workList;
- nsecs_t lastQueuedTimestampNs;
- };
- struct Jobs {
- std::map<std::weak_ptr<Codec2Client::Component>,
- Queue,
- std::owner_less<std::weak_ptr<Codec2Client::Component>>> queues;
- std::vector<std::unique_ptr<C2Param>> configUpdate;
- Condition cond;
- };
- Mutexed<Jobs> mJobs;
-};
+using ::aidl::android::media::IAidlNode;
C2OMXNode::C2OMXNode(const std::shared_ptr<Codec2Client::Component> &comp)
- : mComp(comp), mFrameIndex(0), mWidth(0), mHeight(0), mUsage(0),
- mAdjustTimestampGapUs(0), mFirstInputFrame(true),
- mQueueThread(new QueueThread) {
- android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ALWAYS);
- mQueueThread->run("C2OMXNode", PRIORITY_AUDIO);
-
- android_dataspace ds = HAL_DATASPACE_UNKNOWN;
- mDataspace.lock().set(ds);
- uint32_t pf = PIXEL_FORMAT_UNKNOWN;
- mPixelFormat.lock().set(pf);
-}
+ : mImpl(new C2NodeImpl(comp, false)) {}
status_t C2OMXNode::freeNode() {
- mComp.reset();
- android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ONCE);
- return mQueueThread->requestExitAndWait();
+ return mImpl->freeNode();
}
status_t C2OMXNode::sendCommand(OMX_COMMANDTYPE cmd, OMX_S32 param) {
if (cmd == OMX_CommandStateSet && param == OMX_StateLoaded) {
// Reset first input frame so if C2OMXNode is recycled, the timestamp does not become
// negative. This is a workaround for HW codecs that do not handle timestamp rollover.
- mFirstInputFrame = true;
+ mImpl->onFirstInputFrame();
}
return ERROR_UNSUPPORTED;
}
@@ -228,13 +65,19 @@
switch ((uint32_t)index) {
case OMX_IndexParamConsumerUsageBits: {
OMX_U32 *usage = (OMX_U32 *)params;
- *usage = mUsage;
+ uint64_t val;
+ mImpl->getConsumerUsageBits(&val);
+ *usage = static_cast<uint32_t>(val & 0xFFFFFFFF);
+ ALOGW("retrieving usage bits in 32 bits %llu -> %u",
+ (unsigned long long)val, (unsigned int)*usage);
err = OK;
break;
}
case OMX_IndexParamConsumerUsageBits64: {
OMX_U64 *usage = (OMX_U64 *)params;
- *usage = mUsage;
+ uint64_t val;
+ mImpl->getConsumerUsageBits(&val);
+ *usage = val;
err = OK;
break;
}
@@ -246,31 +89,12 @@
if (pDef->nPortIndex != kPortIndexInput) {
break;
}
-
- pDef->nBufferCountActual = 16;
-
- // WORKAROUND: having more slots improve performance while consuming
- // more memory. This is a temporary workaround to reduce memory for
- // larger-than-4K scenario.
- if (mWidth * mHeight > 4096 * 2340) {
- std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
- C2PortActualDelayTuning::input inputDelay(0);
- C2ActualPipelineDelayTuning pipelineDelay(0);
- c2_status_t c2err = C2_NOT_FOUND;
- if (comp) {
- c2err = comp->query(
- {&inputDelay, &pipelineDelay}, {}, C2_DONT_BLOCK, nullptr);
- }
- if (c2err == C2_OK || c2err == C2_BAD_INDEX) {
- pDef->nBufferCountActual = 4;
- pDef->nBufferCountActual += (inputDelay ? inputDelay.value : 0u);
- pDef->nBufferCountActual += (pipelineDelay ? pipelineDelay.value : 0u);
- }
- }
-
+ IAidlNode::InputBufferParams bufferParams;
+ mImpl->getInputBufferParams(&bufferParams);
+ pDef->nBufferCountActual = bufferParams.bufferCountActual;
pDef->eDomain = OMX_PortDomainVideo;
- pDef->format.video.nFrameWidth = mWidth;
- pDef->format.video.nFrameHeight = mHeight;
+ pDef->format.video.nFrameWidth = bufferParams.frameWidth;
+ pDef->format.video.nFrameHeight = bufferParams.frameHeight;
pDef->format.video.eColorFormat = OMX_COLOR_FormatAndroidOpaque;
err = OK;
break;
@@ -286,28 +110,34 @@
return BAD_VALUE;
}
switch ((uint32_t)index) {
- case OMX_IndexParamMaxFrameDurationForBitrateControl:
+ case OMX_IndexParamMaxFrameDurationForBitrateControl: {
// handle max/fixed frame duration control
if (size != sizeof(OMX_PARAM_U32TYPE)) {
return BAD_VALUE;
}
// The incoming number is an int32_t contained in OMX_U32.
- mAdjustTimestampGapUs = (int32_t)((OMX_PARAM_U32TYPE*)params)->nU32;
+ int32_t gapUs = (int32_t)((OMX_PARAM_U32TYPE*)params)->nU32;
+ mImpl->setAdjustTimestampGapUs(gapUs);
return OK;
-
- case OMX_IndexParamConsumerUsageBits:
+ }
+ case OMX_IndexParamConsumerUsageBits: {
if (size != sizeof(OMX_U32)) {
return BAD_VALUE;
}
- mUsage = *((OMX_U32 *)params);
+ uint32_t usage = *((OMX_U32 *)params);
+ mImpl->setConsumerUsageBits(static_cast<uint64_t>(usage));
return OK;
-
- case OMX_IndexParamConsumerUsageBits64:
+ }
+ case OMX_IndexParamConsumerUsageBits64: {
if (size != sizeof(OMX_U64)) {
return BAD_VALUE;
}
- mUsage = *((OMX_U64 *)params);
+ uint64_t usagell = *((OMX_U64 *)params);
+ mImpl->setConsumerUsageBits(usagell);
return OK;
+ }
+ default:
+ break;
}
return ERROR_UNSUPPORTED;
}
@@ -359,14 +189,7 @@
}
status_t C2OMXNode::setInputSurface(const sp<IOMXBufferSource> &bufferSource) {
- c2_status_t err = GetCodec2PlatformAllocatorStore()->fetchAllocator(
- C2PlatformAllocatorStore::GRALLOC,
- &mAllocator);
- if (err != OK) {
- return UNKNOWN_ERROR;
- }
- mBufferSource = bufferSource;
- return OK;
+ return mImpl->setInputSurface(bufferSource);
}
status_t C2OMXNode::allocateSecureBuffer(
@@ -402,105 +225,39 @@
return ERROR_UNSUPPORTED;
}
+namespace {
+ uint32_t toNodeFlags(OMX_U32 flags) {
+ uint32_t retFlags = 0;
+ if (flags & OMX_BUFFERFLAG_ENDOFFRAME) {
+ retFlags |= BUFFERFLAG_ENDOFFRAME;
+ }
+ if (flags & OMX_BUFFERFLAG_EOS) {
+ retFlags |= BUFFERFLAG_EOS;
+ }
+ return retFlags;
+ }
+ int64_t toNodeTimestamp(OMX_TICKS ticks) {
+ int64_t timestamp = 0;
+#ifndef OMX_SKIP64BIT
+ timestamp = ticks;
+#else
+ timestamp = ((ticks.nHighPart << 32) | ticks.nLowPart);
+#endif
+ return timestamp;
+ }
+} // anonymous namespace
+
status_t C2OMXNode::emptyBuffer(
buffer_id buffer, const OMXBuffer &omxBuf,
OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {
- std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
- if (!comp) {
- return NO_INIT;
- }
-
- uint32_t c2Flags = (flags & OMX_BUFFERFLAG_EOS)
- ? C2FrameData::FLAG_END_OF_STREAM : 0;
- std::shared_ptr<C2GraphicBlock> block;
-
- android::base::unique_fd fd0, fd1;
- C2Handle *handle = nullptr;
if (omxBuf.mBufferType == OMXBuffer::kBufferTypeANWBuffer
&& omxBuf.mGraphicBuffer != nullptr) {
- std::shared_ptr<C2GraphicAllocation> alloc;
- handle = WrapNativeCodec2GrallocHandle(
- omxBuf.mGraphicBuffer->handle,
- omxBuf.mGraphicBuffer->width,
- omxBuf.mGraphicBuffer->height,
- omxBuf.mGraphicBuffer->format,
- omxBuf.mGraphicBuffer->usage,
- omxBuf.mGraphicBuffer->stride);
- if (handle != nullptr) {
- // unique_fd takes ownership of the fds, we'll get warning if these
- // fds get closed by somebody else. Onwership will be released before
- // we return, so that the fds get closed as usually when this function
- // goes out of scope (when both items and block are gone).
- native_handle_t *nativeHandle = reinterpret_cast<native_handle_t*>(handle);
- fd0.reset(nativeHandle->numFds > 0 ? nativeHandle->data[0] : -1);
- fd1.reset(nativeHandle->numFds > 1 ? nativeHandle->data[1] : -1);
- }
- c2_status_t err = mAllocator->priorGraphicAllocation(handle, &alloc);
- if (err != OK) {
- (void)fd0.release();
- (void)fd1.release();
- native_handle_close(handle);
- native_handle_delete(handle);
- return UNKNOWN_ERROR;
- }
- block = _C2BlockFactory::CreateGraphicBlock(alloc);
- } else if (!(flags & OMX_BUFFERFLAG_EOS)) {
- return BAD_VALUE;
+ return mImpl->submitBuffer(buffer, omxBuf.mGraphicBuffer, toNodeFlags(flags),
+ toNodeTimestamp(timestamp), fenceFd);
}
-
- std::unique_ptr<C2Work> work(new C2Work);
- work->input.flags = (C2FrameData::flags_t)c2Flags;
- work->input.ordinal.timestamp = timestamp;
-
- // WORKAROUND: adjust timestamp based on gapUs
- {
- work->input.ordinal.customOrdinal = timestamp; // save input timestamp
- if (mFirstInputFrame) {
- // grab timestamps on first frame
- mPrevInputTimestamp = timestamp;
- mPrevCodecTimestamp = timestamp;
- mFirstInputFrame = false;
- } else if (mAdjustTimestampGapUs > 0) {
- work->input.ordinal.timestamp =
- mPrevCodecTimestamp
- + c2_min((timestamp - mPrevInputTimestamp).peek(), mAdjustTimestampGapUs);
- } else if (mAdjustTimestampGapUs < 0) {
- work->input.ordinal.timestamp = mPrevCodecTimestamp - mAdjustTimestampGapUs;
- }
- mPrevInputTimestamp = work->input.ordinal.customOrdinal;
- mPrevCodecTimestamp = work->input.ordinal.timestamp;
- ALOGV("adjusting %lld to %lld (gap=%lld)",
- work->input.ordinal.customOrdinal.peekll(),
- work->input.ordinal.timestamp.peekll(),
- (long long)mAdjustTimestampGapUs);
- }
-
- work->input.ordinal.frameIndex = mFrameIndex++;
- work->input.buffers.clear();
- if (block) {
- std::shared_ptr<C2Buffer> c2Buffer(
- new Buffer2D(block->share(
- C2Rect(block->width(), block->height()), ::C2Fence())));
- work->input.buffers.push_back(c2Buffer);
- std::shared_ptr<C2StreamHdrStaticInfo::input> staticInfo;
- std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> dynamicInfo;
- GetHdrMetadataFromGralloc4Handle(
- block->handle(),
- &staticInfo,
- &dynamicInfo);
- if (staticInfo && *staticInfo) {
- c2Buffer->setInfo(staticInfo);
- }
- if (dynamicInfo && *dynamicInfo) {
- c2Buffer->setInfo(dynamicInfo);
- }
- }
- work->worklets.clear();
- work->worklets.emplace_back(new C2Worklet);
- mBufferIdsInUse.lock()->emplace(work->input.ordinal.frameIndex.peeku(), buffer);
- mQueueThread->queue(comp, fenceFd, std::move(work), std::move(fd0), std::move(fd1));
-
- return OK;
+ sp<GraphicBuffer> gBuf;
+ return mImpl->submitBuffer(buffer, gBuf, toNodeFlags(flags),
+ toNodeTimestamp(timestamp), fenceFd);
}
status_t C2OMXNode::getExtensionIndex(
@@ -517,56 +274,33 @@
if (msg.u.event_data.event != OMX_EventDataSpaceChanged) {
return ERROR_UNSUPPORTED;
}
- android_dataspace dataSpace = (android_dataspace)msg.u.event_data.data1;
- uint32_t pixelFormat = msg.u.event_data.data3;
-
- ALOGD("dataspace changed to %#x pixel format: %#x", dataSpace, pixelFormat);
- mQueueThread->setDataspace(dataSpace);
-
- mDataspace.lock().set(dataSpace);
- mPixelFormat.lock().set(pixelFormat);
- return OK;
+ return mImpl->onDataspaceChanged(
+ msg.u.event_data.data1,
+ msg.u.event_data.data3);
}
sp<IOMXBufferSource> C2OMXNode::getSource() {
- return mBufferSource;
+ return mImpl->getSource();
}
void C2OMXNode::setFrameSize(uint32_t width, uint32_t height) {
- mWidth = width;
- mHeight = height;
+ return mImpl->setFrameSize(width, height);
}
void C2OMXNode::onInputBufferDone(c2_cntr64_t index) {
- if (!mBufferSource) {
- ALOGD("Buffer source not set (index=%llu)", index.peekull());
- return;
- }
-
- int32_t bufferId = 0;
- {
- decltype(mBufferIdsInUse)::Locked bufferIds(mBufferIdsInUse);
- auto it = bufferIds->find(index.peeku());
- if (it == bufferIds->end()) {
- ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
- return;
- }
- bufferId = it->second;
- (void)bufferIds->erase(it);
- }
- (void)mBufferSource->onInputBufferEmptied(bufferId, -1);
+ return mImpl->onInputBufferDone(index);
}
android_dataspace C2OMXNode::getDataspace() {
- return *mDataspace.lock();
+ return mImpl->getDataspace();
}
uint32_t C2OMXNode::getPixelFormat() {
- return *mPixelFormat.lock();
+ return mImpl->getPixelFormat();
}
void C2OMXNode::setPriority(int priority) {
- mQueueThread->setPriority(priority);
+ return mImpl->setPriority(priority);
}
} // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.h b/media/codec2/sfplugin/C2OMXNode.h
index c8ce336..d077202 100644
--- a/media/codec2/sfplugin/C2OMXNode.h
+++ b/media/codec2/sfplugin/C2OMXNode.h
@@ -1,5 +1,5 @@
/*
- * Copyright 2018, The Android Open Source Project
+ * Copyright 2024, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,16 +17,15 @@
#ifndef C2_OMX_NODE_H_
#define C2_OMX_NODE_H_
-#include <atomic>
-
#include <android/IOMXBufferSource.h>
#include <codec2/hidl/client.h>
-#include <media/stagefright/foundation/Mutexed.h>
#include <media/IOMX.h>
#include <media/OMXBuffer.h>
namespace android {
+struct C2NodeImpl;
+
/**
* IOmxNode implementation around codec 2.0 component, only to be used in
* IGraphicBufferSource::configure. Only subset of IOmxNode API is implemented
@@ -109,30 +108,7 @@
void setPriority(int priority);
private:
- std::weak_ptr<Codec2Client::Component> mComp;
- sp<IOMXBufferSource> mBufferSource;
- std::shared_ptr<C2Allocator> mAllocator;
- std::atomic_uint64_t mFrameIndex;
- uint32_t mWidth;
- uint32_t mHeight;
- uint64_t mUsage;
- Mutexed<android_dataspace> mDataspace;
- Mutexed<uint32_t> mPixelFormat;
-
- // WORKAROUND: timestamp adjustment
-
- // if >0: this is the max timestamp gap, if <0: this is -1 times the fixed timestamp gap
- // if 0: no timestamp adjustment is made
- // note that C2OMXNode can be recycled between encoding sessions.
- int32_t mAdjustTimestampGapUs;
- bool mFirstInputFrame; // true for first input
- c2_cntr64_t mPrevInputTimestamp; // input timestamp for previous frame
- c2_cntr64_t mPrevCodecTimestamp; // adjusted (codec) timestamp for previous frame
-
- Mutexed<std::map<uint64_t, buffer_id>> mBufferIdsInUse;
-
- class QueueThread;
- sp<QueueThread> mQueueThread;
+ std::shared_ptr<C2NodeImpl> mImpl;
};
} // namespace android
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 9c264af..a897fa0 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -21,11 +21,16 @@
#include <sstream>
#include <thread>
+#include <android_media_codec.h>
+
#include <C2Config.h>
#include <C2Debug.h>
#include <C2ParamInternal.h>
#include <C2PlatformSupport.h>
+#include <aidl/android/hardware/graphics/common/Dataspace.h>
+#include <aidl/android/media/IAidlGraphicBufferSource.h>
+#include <aidl/android/media/IAidlBufferSource.h>
#include <android/IOMXBufferSource.h>
#include <android/hardware/media/c2/1.0/IInputSurface.h>
#include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -40,6 +45,11 @@
#include <media/openmax/OMX_Core.h>
#include <media/openmax/OMX_IndexExt.h>
#include <media/stagefright/foundation/avc_utils.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/C2NodeDef.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/Conversion.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h>
#include <media/stagefright/omx/1.0/WGraphicBufferSource.h>
#include <media/stagefright/omx/OmxGraphicBufferSource.h>
#include <media/stagefright/CCodec.h>
@@ -50,6 +60,7 @@
#include <media/stagefright/RenderedFrameInfo.h>
#include <utils/NativeHandle.h>
+#include "C2AidlNode.h"
#include "C2OMXNode.h"
#include "CCodecBufferChannel.h"
#include "CCodecConfig.h"
@@ -64,8 +75,14 @@
using ::android::hardware::graphics::bufferqueue::V1_0::utils::H2BGraphicBufferProducer;
using android::base::StringPrintf;
using ::android::hardware::media::c2::V1_0::IInputSurface;
+using ::aidl::android::media::IAidlBufferSource;
+using ::aidl::android::media::IAidlNode;
+using ::android::media::AidlGraphicBufferSource;
+using ::android::media::WAidlGraphicBufferSource;
+using ::android::media::aidl_conversion::fromAidlStatus;
typedef hardware::media::omx::V1_0::IGraphicBufferSource HGraphicBufferSource;
+typedef aidl::android::media::IAidlGraphicBufferSource AGraphicBufferSource;
typedef CCodecConfig Config;
namespace {
@@ -189,11 +206,11 @@
std::shared_ptr<Codec2Client::InputSurfaceConnection> mConnection;
};
-class GraphicBufferSourceWrapper : public InputSurfaceWrapper {
+class HGraphicBufferSourceWrapper : public InputSurfaceWrapper {
public:
typedef hardware::media::omx::V1_0::Status OmxStatus;
- GraphicBufferSourceWrapper(
+ HGraphicBufferSourceWrapper(
const sp<HGraphicBufferSource> &source,
uint32_t width,
uint32_t height,
@@ -202,7 +219,7 @@
mDataSpace = HAL_DATASPACE_BT709;
mConfig.mUsage = usage;
}
- ~GraphicBufferSourceWrapper() override = default;
+ ~HGraphicBufferSourceWrapper() override = default;
status_t connect(const std::shared_ptr<Codec2Client::Component> &comp) override {
mNode = new C2OMXNode(comp);
@@ -444,6 +461,224 @@
Config mConfig;
};
+class AGraphicBufferSourceWrapper : public InputSurfaceWrapper {
+public:
+ AGraphicBufferSourceWrapper(
+ const std::shared_ptr<AGraphicBufferSource> &source,
+ uint32_t width,
+ uint32_t height,
+ uint64_t usage)
+ : mSource(source), mWidth(width), mHeight(height) {
+ mDataSpace = HAL_DATASPACE_BT709;
+ mConfig.mUsage = usage;
+ }
+ ~AGraphicBufferSourceWrapper() override = default;
+
+ status_t connect(const std::shared_ptr<Codec2Client::Component> &comp) override {
+ mNode = ::ndk::SharedRefBase::make<C2AidlNode>(comp);
+ mNode->setFrameSize(mWidth, mHeight);
+ // Usage is queried during configure(), so setting it beforehand.
+ uint64_t usage = mConfig.mUsage;
+ (void)mNode->setConsumerUsage((int64_t)usage);
+
+ return fromAidlStatus(mSource->configure(
+ mNode, static_cast<::aidl::android::hardware::graphics::common::Dataspace>(
+ mDataSpace)));
+ }
+
+ void disconnect() override {
+ if (mNode == nullptr) {
+ return;
+ }
+ std::shared_ptr<IAidlBufferSource> source = mNode->getSource();
+ if (source == nullptr) {
+ ALOGD("GBSWrapper::disconnect: node is not configured with OMXBufferSource.");
+ return;
+ }
+ (void)source->onStop();
+ (void)source->onRelease();
+ mNode.reset();
+ }
+
+ status_t start() override {
+ std::shared_ptr<IAidlBufferSource> source = mNode->getSource();
+ if (source == nullptr) {
+ return NO_INIT;
+ }
+
+ size_t numSlots = 16;
+
+ IAidlNode::InputBufferParams param;
+ status_t err = fromAidlStatus(mNode->getInputBufferParams(¶m));
+ if (err == OK) {
+ numSlots = param.bufferCountActual;
+ }
+
+ for (size_t i = 0; i < numSlots; ++i) {
+ (void)source->onInputBufferAdded(i);
+ }
+
+ (void)source->onStart();
+ return OK;
+ }
+
+ status_t signalEndOfInputStream() override {
+ return fromAidlStatus(mSource->signalEndOfInputStream());
+ }
+
+ status_t configure(Config &config) {
+ std::stringstream status;
+ status_t err = OK;
+
+ // handle each configuration granually, in case we need to handle part of the configuration
+ // elsewhere
+
+ // TRICKY: we do not unset frame delay repeating
+ if (config.mMinFps > 0 && config.mMinFps != mConfig.mMinFps) {
+ int64_t us = 1e6 / config.mMinFps + 0.5;
+ status_t res = fromAidlStatus(mSource->setRepeatPreviousFrameDelayUs(us));
+ status << " minFps=" << config.mMinFps << " => repeatDelayUs=" << us;
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mMinFps = config.mMinFps;
+ }
+
+ // pts gap
+ if (config.mMinAdjustedFps > 0 || config.mFixedAdjustedFps > 0) {
+ if (mNode != nullptr) {
+ float gap = (config.mMinAdjustedFps > 0)
+ ? c2_min(INT32_MAX + 0., 1e6 / config.mMinAdjustedFps + 0.5)
+ : c2_max(0. - INT32_MAX, -1e6 / config.mFixedAdjustedFps - 0.5);
+ // float -> uint32_t is undefined if the value is negative.
+ // First convert to int32_t to ensure the expected behavior.
+ int32_t gapUs = int32_t(gap);
+ (void)mNode->setAdjustTimestampGapUs(gapUs);
+ }
+ }
+
+ // max fps
+ // TRICKY: we do not unset max fps to 0 unless using fixed fps
+ if ((config.mMaxFps > 0 || (config.mFixedAdjustedFps > 0 && config.mMaxFps == -1))
+ && config.mMaxFps != mConfig.mMaxFps) {
+ status_t res = fromAidlStatus(mSource->setMaxFps(config.mMaxFps));
+ status << " maxFps=" << config.mMaxFps;
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mMaxFps = config.mMaxFps;
+ }
+
+ if (config.mTimeOffsetUs != mConfig.mTimeOffsetUs) {
+ status_t res = fromAidlStatus(mSource->setTimeOffsetUs(config.mTimeOffsetUs));
+ status << " timeOffset " << config.mTimeOffsetUs << "us";
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mTimeOffsetUs = config.mTimeOffsetUs;
+ }
+
+ if (config.mCaptureFps != mConfig.mCaptureFps || config.mCodedFps != mConfig.mCodedFps) {
+ status_t res =
+ fromAidlStatus(mSource->setTimeLapseConfig(config.mCodedFps, config.mCaptureFps));
+ status << " timeLapse " << config.mCaptureFps << "fps as " << config.mCodedFps << "fps";
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mCaptureFps = config.mCaptureFps;
+ mConfig.mCodedFps = config.mCodedFps;
+ }
+
+ if (config.mStartAtUs != mConfig.mStartAtUs
+ || (config.mStopped != mConfig.mStopped && !config.mStopped)) {
+ status_t res = fromAidlStatus(mSource->setStartTimeUs(config.mStartAtUs));
+ status << " start at " << config.mStartAtUs << "us";
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mStartAtUs = config.mStartAtUs;
+ mConfig.mStopped = config.mStopped;
+ }
+
+ // suspend-resume
+ if (config.mSuspended != mConfig.mSuspended) {
+ status_t res = fromAidlStatus(mSource->setSuspend(
+ config.mSuspended, config.mSuspendAtUs));
+ status << " " << (config.mSuspended ? "suspend" : "resume")
+ << " at " << config.mSuspendAtUs << "us";
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mSuspended = config.mSuspended;
+ mConfig.mSuspendAtUs = config.mSuspendAtUs;
+ }
+
+ if (config.mStopped != mConfig.mStopped && config.mStopped) {
+ status_t res = fromAidlStatus(mSource->setStopTimeUs(config.mStopAtUs));
+ status << " stop at " << config.mStopAtUs << "us";
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ } else {
+ status << " delayUs";
+ res = fromAidlStatus(mSource->getStopTimeOffsetUs(&config.mInputDelayUs));
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ } else {
+ status << "=" << config.mInputDelayUs << "us";
+ }
+ mConfig.mInputDelayUs = config.mInputDelayUs;
+ }
+ mConfig.mStopAtUs = config.mStopAtUs;
+ mConfig.mStopped = config.mStopped;
+ }
+
+ // color aspects (android._color-aspects)
+
+ // consumer usage is queried earlier.
+
+ // priority
+ if (mConfig.mPriority != config.mPriority) {
+ if (config.mPriority != INT_MAX) {
+ mNode->setPriority(config.mPriority);
+ }
+ mConfig.mPriority = config.mPriority;
+ }
+
+ if (status.str().empty()) {
+ ALOGD("ISConfig not changed");
+ } else {
+ ALOGD("ISConfig%s", status.str().c_str());
+ }
+ return err;
+ }
+
+ void onInputBufferDone(c2_cntr64_t index) override {
+ mNode->onInputBufferDone(index);
+ }
+
+ android_dataspace getDataspace() override {
+ return mNode->getDataspace();
+ }
+
+ uint32_t getPixelFormat() override {
+ return mNode->getPixelFormat();
+ }
+
+private:
+ std::shared_ptr<AGraphicBufferSource> mSource;
+ std::shared_ptr<C2AidlNode> mNode;
+ uint32_t mWidth;
+ uint32_t mHeight;
+ Config mConfig;
+};
+
class Codec2ClientInterfaceWrapper : public C2ComponentStore {
std::shared_ptr<Codec2Client> mClient;
@@ -1178,6 +1413,23 @@
}
}
+ /*
+ * configure mock region of interest if Feature_Roi is enabled
+ */
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ if ((config->mDomain & Config::IS_ENCODER) && (config->mDomain & Config::IS_VIDEO)) {
+ int32_t enableRoi;
+ if (msg->findInt32("feature-region-of-interest", &enableRoi) && enableRoi != 0) {
+ if (!msg->contains(PARAMETER_KEY_QP_OFFSET_MAP) &&
+ !msg->contains(PARAMETER_KEY_QP_OFFSET_RECTS)) {
+ msg->setString(PARAMETER_KEY_QP_OFFSET_RECTS,
+ AStringPrintf("%d,%d-%d,%d=%d;", 0, 0, height, width, 0));
+ }
+ }
+ }
+ }
+
std::vector<std::unique_ptr<C2Param>> configUpdate;
// NOTE: We used to ignore "video-bitrate" at configure; replicate
// the behavior here.
@@ -1458,7 +1710,8 @@
int64_t blockUsage =
usage.value | C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE;
std::shared_ptr<C2GraphicBlock> block = FetchGraphicBlock(
- width, height, componentColorFormat, blockUsage, {comp->getName()});
+ align(width, 2), align(height, 2), componentColorFormat, blockUsage,
+ {comp->getName()});
sp<GraphicBlockBuffer> buffer;
if (block) {
buffer = GraphicBlockBuffer::Allocate(
@@ -1627,28 +1880,46 @@
}
sp<PersistentSurface> persistentSurface = CreateCompatibleInputSurface();
- sp<hidl::base::V1_0::IBase> hidlTarget = persistentSurface->getHidlTarget();
- sp<IInputSurface> hidlInputSurface = IInputSurface::castFrom(hidlTarget);
- sp<HGraphicBufferSource> gbs = HGraphicBufferSource::castFrom(hidlTarget);
-
- if (hidlInputSurface) {
- std::shared_ptr<Codec2Client::InputSurface> inputSurface =
- std::make_shared<Codec2Client::InputSurface>(hidlInputSurface);
- err = setupInputSurface(std::make_shared<C2InputSurfaceWrapper>(
- inputSurface));
- bufferProducer = inputSurface->getGraphicBufferProducer();
- } else if (gbs) {
- int32_t width = 0;
- (void)outputFormat->findInt32("width", &width);
- int32_t height = 0;
- (void)outputFormat->findInt32("height", &height);
- err = setupInputSurface(std::make_shared<GraphicBufferSourceWrapper>(
- gbs, width, height, usage));
- bufferProducer = persistentSurface->getBufferProducer();
+ if (persistentSurface->isTargetAidl()) {
+ ::ndk::SpAIBinder aidlTarget = persistentSurface->getAidlTarget();
+ std::shared_ptr<AGraphicBufferSource> gbs = AGraphicBufferSource::fromBinder(aidlTarget);
+ if (gbs) {
+ int32_t width = 0;
+ (void)outputFormat->findInt32("width", &width);
+ int32_t height = 0;
+ (void)outputFormat->findInt32("height", &height);
+ err = setupInputSurface(std::make_shared<AGraphicBufferSourceWrapper>(
+ gbs, width, height, usage));
+ bufferProducer = persistentSurface->getBufferProducer();
+ } else {
+ ALOGE("Corrupted input surface(aidl)");
+ mCallback->onInputSurfaceCreationFailed(UNKNOWN_ERROR);
+ return;
+ }
} else {
- ALOGE("Corrupted input surface");
- mCallback->onInputSurfaceCreationFailed(UNKNOWN_ERROR);
- return;
+ sp<hidl::base::V1_0::IBase> hidlTarget = persistentSurface->getHidlTarget();
+ sp<IInputSurface> hidlInputSurface = IInputSurface::castFrom(hidlTarget);
+ sp<HGraphicBufferSource> gbs = HGraphicBufferSource::castFrom(hidlTarget);
+
+ if (hidlInputSurface) {
+ std::shared_ptr<Codec2Client::InputSurface> inputSurface =
+ std::make_shared<Codec2Client::InputSurface>(hidlInputSurface);
+ err = setupInputSurface(std::make_shared<C2InputSurfaceWrapper>(
+ inputSurface));
+ bufferProducer = inputSurface->getGraphicBufferProducer();
+ } else if (gbs) {
+ int32_t width = 0;
+ (void)outputFormat->findInt32("width", &width);
+ int32_t height = 0;
+ (void)outputFormat->findInt32("height", &height);
+ err = setupInputSurface(std::make_shared<HGraphicBufferSourceWrapper>(
+ gbs, width, height, usage));
+ bufferProducer = persistentSurface->getBufferProducer();
+ } else {
+ ALOGE("Corrupted input surface");
+ mCallback->onInputSurfaceCreationFailed(UNKNOWN_ERROR);
+ return;
+ }
}
if (err != OK) {
@@ -1743,33 +2014,56 @@
outputFormat = config->mOutputFormat;
usage = config->mISConfig ? config->mISConfig->mUsage : 0;
}
- sp<hidl::base::V1_0::IBase> hidlTarget = surface->getHidlTarget();
- sp<IInputSurface> inputSurface = IInputSurface::castFrom(hidlTarget);
- sp<HGraphicBufferSource> gbs = HGraphicBufferSource::castFrom(hidlTarget);
- if (inputSurface) {
- status_t err = setupInputSurface(std::make_shared<C2InputSurfaceWrapper>(
- std::make_shared<Codec2Client::InputSurface>(inputSurface)));
- if (err != OK) {
- ALOGE("Failed to set up input surface: %d", err);
- mCallback->onInputSurfaceDeclined(err);
- return;
- }
- } else if (gbs) {
- int32_t width = 0;
- (void)outputFormat->findInt32("width", &width);
- int32_t height = 0;
- (void)outputFormat->findInt32("height", &height);
- status_t err = setupInputSurface(std::make_shared<GraphicBufferSourceWrapper>(
- gbs, width, height, usage));
- if (err != OK) {
- ALOGE("Failed to set up input surface: %d", err);
- mCallback->onInputSurfaceDeclined(err);
+ if (surface->isTargetAidl()) {
+ ::ndk::SpAIBinder aidlTarget = surface->getAidlTarget();
+ std::shared_ptr<AGraphicBufferSource> gbs = AGraphicBufferSource::fromBinder(aidlTarget);
+ if (gbs) {
+ int32_t width = 0;
+ (void)outputFormat->findInt32("width", &width);
+ int32_t height = 0;
+ (void)outputFormat->findInt32("height", &height);
+
+ status_t err = setupInputSurface(std::make_shared<AGraphicBufferSourceWrapper>(
+ gbs, width, height, usage));
+ if (err != OK) {
+ ALOGE("Failed to set up input surface(aidl): %d", err);
+ mCallback->onInputSurfaceDeclined(err);
+ return;
+ }
+ } else {
+ ALOGE("Failed to set input surface(aidl): Corrupted surface.");
+ mCallback->onInputSurfaceDeclined(UNKNOWN_ERROR);
return;
}
} else {
- ALOGE("Failed to set input surface: Corrupted surface.");
- mCallback->onInputSurfaceDeclined(UNKNOWN_ERROR);
- return;
+ sp<hidl::base::V1_0::IBase> hidlTarget = surface->getHidlTarget();
+ sp<IInputSurface> inputSurface = IInputSurface::castFrom(hidlTarget);
+ sp<HGraphicBufferSource> gbs = HGraphicBufferSource::castFrom(hidlTarget);
+ if (inputSurface) {
+ status_t err = setupInputSurface(std::make_shared<C2InputSurfaceWrapper>(
+ std::make_shared<Codec2Client::InputSurface>(inputSurface)));
+ if (err != OK) {
+ ALOGE("Failed to set up input surface: %d", err);
+ mCallback->onInputSurfaceDeclined(err);
+ return;
+ }
+ } else if (gbs) {
+ int32_t width = 0;
+ (void)outputFormat->findInt32("width", &width);
+ int32_t height = 0;
+ (void)outputFormat->findInt32("height", &height);
+ status_t err = setupInputSurface(std::make_shared<HGraphicBufferSourceWrapper>(
+ gbs, width, height, usage));
+ if (err != OK) {
+ ALOGE("Failed to set up input surface: %d", err);
+ mCallback->onInputSurfaceDeclined(err);
+ return;
+ }
+ } else {
+ ALOGE("Failed to set input surface: Corrupted surface.");
+ mCallback->onInputSurfaceDeclined(UNKNOWN_ERROR);
+ return;
+ }
}
// Formats can change after setupInputSurface
sp<AMessage> inputFormat;
@@ -1933,8 +2227,23 @@
// So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
// prior to comp->stop().
// See also b/300350761.
- mChannel->stopUseOutputSurface(pushBlankBuffer);
- status_t err = comp->stop();
+ //
+ // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes.
+ // so we are reverting back to the logical sequence of the operations when
+ // AIDL HALs are selected.
+ // When the HIDL HALs are selected, we retained workaround(the reversed
+ // order) as default in order to keep legacy behavior.
+ bool stopHalBeforeSurface =
+ Codec2Client::IsAidlSelected() ||
+ property_get_bool("debug.codec2.stop_hal_before_surface", false);
+ status_t err = C2_OK;
+ if (stopHalBeforeSurface && android::media::codec::provider_->stop_hal_before_surface()) {
+ err = comp->stop();
+ mChannel->stopUseOutputSurface(pushBlankBuffer);
+ } else {
+ mChannel->stopUseOutputSurface(pushBlankBuffer);
+ err = comp->stop();
+ }
if (err != C2_OK) {
// TODO: convert err into status_t
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
@@ -2029,8 +2338,22 @@
// So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
// prior to comp->release().
// See also b/300350761.
- mChannel->stopUseOutputSurface(pushBlankBuffer);
- comp->release();
+ //
+ // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes.
+ // so we are reverting back to the logical sequence of the operations when
+ // AIDL HALs are selected.
+ // When the HIDL HALs are selected, we retained workaround(the reversed
+ // order) as default in order to keep legacy behavior.
+ bool stopHalBeforeSurface =
+ Codec2Client::IsAidlSelected() ||
+ property_get_bool("debug.codec2.stop_hal_before_surface", false);
+ if (stopHalBeforeSurface && android::media::codec::provider_->stop_hal_before_surface()) {
+ comp->release();
+ mChannel->stopUseOutputSurface(pushBlankBuffer);
+ } else {
+ mChannel->stopUseOutputSurface(pushBlankBuffer);
+ comp->release();
+ }
{
Mutexed<State>::Locked state(mState);
@@ -2259,6 +2582,40 @@
}
}
+ /**
+ * Handle ROI QP map configuration. Recover the QP map configuration from AMessage as an
+ * ABuffer and configure to CCodecBufferChannel as a C2InfoBuffer
+ */
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ sp<ABuffer> qpOffsetMap;
+ if ((config->mDomain & (Config::IS_VIDEO | Config::IS_IMAGE))
+ && (config->mDomain & Config::IS_ENCODER)
+ && params->findBuffer(PARAMETER_KEY_QP_OFFSET_MAP, &qpOffsetMap)) {
+ std::shared_ptr<C2BlockPool> pool;
+ // TODO(b/331443865) Use pooled block pool to improve efficiency
+ c2_status_t status = GetCodec2BlockPool(C2BlockPool::BASIC_LINEAR, nullptr, &pool);
+
+ if (status == C2_OK) {
+ size_t mapSize = qpOffsetMap->size();
+ std::shared_ptr<C2LinearBlock> block;
+ status = pool->fetchLinearBlock(mapSize,
+ C2MemoryUsage{C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}, &block);
+ if (status == C2_OK && !block->map().get().error()) {
+ C2WriteView wView = block->map().get();
+ uint8_t* outData = wView.data();
+ memcpy(outData, qpOffsetMap->data(), mapSize);
+ C2InfoBuffer info = C2InfoBuffer::CreateLinearBuffer(
+ kParamIndexQpOffsetMapBuffer,
+ block->share(0, mapSize, C2Fence()));
+ mChannel->setInfoBuffer(std::make_shared<C2InfoBuffer>(info));
+ }
+ }
+ params->removeEntryByName(PARAMETER_KEY_QP_OFFSET_MAP);
+ }
+ }
+
+
std::vector<std::unique_ptr<C2Param>> configUpdate;
(void)config->getConfigUpdateFromSdkParams(
comp, params, Config::IS_PARAM, C2_MAY_BLOCK, &configUpdate);
@@ -2661,15 +3018,32 @@
Codec2Client::CreateInputSurface();
if (!inputSurface) {
if (property_get_int32("debug.stagefright.c2inputsurface", 0) == -1) {
- sp<IGraphicBufferProducer> gbp;
- sp<OmxGraphicBufferSource> gbs = new OmxGraphicBufferSource();
- status_t err = gbs->initCheck();
- if (err != OK) {
- ALOGE("Failed to create persistent input surface: error %d", err);
- return nullptr;
+ if (Codec2Client::IsAidlSelected()) {
+ sp<IGraphicBufferProducer> gbp;
+ sp<AidlGraphicBufferSource> gbs = new AidlGraphicBufferSource();
+ status_t err = gbs->initCheck();
+ if (err != OK) {
+ ALOGE("Failed to create persistent input surface: error %d", err);
+ return nullptr;
+ }
+ ALOGD("aidl based PersistentSurface created");
+ std::shared_ptr<WAidlGraphicBufferSource> wrapper =
+ ::ndk::SharedRefBase::make<WAidlGraphicBufferSource>(gbs);
+
+ return new PersistentSurface(
+ gbs->getIGraphicBufferProducer(), wrapper->asBinder());
+ } else {
+ sp<IGraphicBufferProducer> gbp;
+ sp<OmxGraphicBufferSource> gbs = new OmxGraphicBufferSource();
+ status_t err = gbs->initCheck();
+ if (err != OK) {
+ ALOGE("Failed to create persistent input surface: error %d", err);
+ return nullptr;
+ }
+ ALOGD("hidl based PersistentSurface created");
+ return new PersistentSurface(
+ gbs->getIGraphicBufferProducer(), new TWGraphicBufferSource(gbs));
}
- return new PersistentSurface(
- gbs->getIGraphicBufferProducer(), new TWGraphicBufferSource(gbs));
} else {
return nullptr;
}
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index f58dc65..c7ab82f 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -28,6 +28,8 @@
#include <thread>
#include <chrono>
+#include <android_media_codec.h>
+
#include <C2AllocatorGralloc.h>
#include <C2PlatformSupport.h>
#include <C2BlockInternal.h>
@@ -370,7 +372,17 @@
}
} else {
work->input.flags = (C2FrameData::flags_t)flags;
+
// TODO: fill info's
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ if (mInfoBuffers.size()) {
+ for (auto infoBuffer : mInfoBuffers) {
+ work->input.infoBuffers.emplace_back(*infoBuffer);
+ }
+ mInfoBuffers.clear();
+ }
+ }
work->input.configUpdate = std::move(mParamsToBeSet);
if (tunnelFirstFrame) {
@@ -2058,6 +2070,7 @@
void CCodecBufferChannel::stop() {
mSync.stop();
mFirstValidFrameIndex = mFrameIndex.load(std::memory_order_relaxed);
+ mInfoBuffers.clear();
}
void CCodecBufferChannel::stopUseOutputSurface(bool pushBlankBuffer) {
@@ -2099,6 +2112,7 @@
}
void CCodecBufferChannel::release() {
+ mInfoBuffers.clear();
mComponent.reset();
mInputAllocator.reset();
mOutputSurface.lock()->surface.clear();
@@ -2164,6 +2178,7 @@
output->buffers->flushStash();
}
}
+ mInfoBuffers.clear();
}
void CCodecBufferChannel::onWorkDone(
@@ -2768,6 +2783,19 @@
}
}
+void CCodecBufferChannel::setInfoBuffer(const std::shared_ptr<C2InfoBuffer> &buffer) {
+ if (mInputSurface == nullptr) {
+ mInfoBuffers.push_back(buffer);
+ } else {
+ std::list<std::unique_ptr<C2Work>> items;
+ std::unique_ptr<C2Work> work(new C2Work);
+ work->input.infoBuffers.emplace_back(*buffer);
+ work->worklets.emplace_back(new C2Worklet);
+ items.push_back(std::move(work));
+ c2_status_t err = mComponent->queue(&items);
+ }
+}
+
status_t toStatusT(c2_status_t c2s, c2_operation_t c2op) {
// C2_OK is always translated to OK.
if (c2s == C2_OK) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index b470655..94a5998 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -227,6 +227,14 @@
void resetBuffersPixelFormat(bool isEncoder);
+ /**
+ * Queue a C2 info buffer that will be sent to codec in the subsequent
+ * queueInputBuffer
+ *
+ * @param buffer C2 info buffer
+ */
+ void setInfoBuffer(const std::shared_ptr<C2InfoBuffer> &buffer);
+
private:
uint32_t getInputBuffersPixelFormat();
@@ -400,6 +408,8 @@
std::atomic_bool mSendEncryptedInfoBuffer;
std::atomic_bool mTunneled;
+
+ std::vector<std::shared_ptr<C2InfoBuffer>> mInfoBuffers;
};
// Conversion of a c2_status_t value to a status_t value may depend on the
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index d313f33..3eec0f3 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -24,6 +24,7 @@
#include <C2PlatformSupport.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/MediaDefs.h>
#include <media/stagefright/CodecBase.h>
#include <media/stagefright/MediaCodecConstants.h>
@@ -57,7 +58,7 @@
std::shared_ptr<C2GraphicBlock> block;
c2_status_t err = pool->fetchGraphicBlock(
- width, height, pixelFormat, fullUsage, &block);
+ align(width, 2), align(height, 2), pixelFormat, fullUsage, &block);
if (err != C2_OK) {
ALOGD("fetch graphic block failed: %d", err);
return nullptr;
@@ -1549,19 +1550,23 @@
sp<Codec2Buffer> LinearOutputBuffers::wrap(const std::shared_ptr<C2Buffer> &buffer) {
if (buffer == nullptr) {
- ALOGV("[%s] using a dummy buffer", mName);
+ ALOGD("[%s] received null buffer", mName);
return new LocalLinearBuffer(mFormat, new ABuffer(0));
}
if (buffer->data().type() != C2BufferData::LINEAR) {
- ALOGV("[%s] non-linear buffer %d", mName, buffer->data().type());
+ ALOGW("[%s] non-linear buffer %d", mName, buffer->data().type());
// We expect linear output buffers from the component.
return nullptr;
}
if (buffer->data().linearBlocks().size() != 1u) {
- ALOGV("[%s] no linear buffers", mName);
+ ALOGW("[%s] no linear buffers", mName);
// We expect one and only one linear block from the component.
return nullptr;
}
+ if (buffer->data().linearBlocks().front().size() == 0) {
+ ALOGD("[%s] received 0-sized buffer", mName);
+ return new LocalLinearBuffer(mFormat, new ABuffer(0));
+ }
sp<Codec2Buffer> clientBuffer = ConstLinearBlockBuffer::Allocate(mFormat, buffer);
if (clientBuffer == nullptr) {
ALOGD("[%s] ConstLinearBlockBuffer::Allocate failed", mName);
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index c22deca..db59227 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -19,6 +19,8 @@
#include <initializer_list>
+#include <android_media_codec.h>
+
#include <cutils/properties.h>
#include <log/log.h>
#include <utils/NativeHandle.h>
@@ -591,6 +593,13 @@
}
return C2Value();
}));
+
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ add(ConfigMapper(C2_PARAMKEY_QP_OFFSET_RECTS, C2_PARAMKEY_QP_OFFSET_RECTS, "")
+ .limitTo(D::VIDEO & (D::CONFIG | D::PARAM) & D::ENCODER & D::INPUT));
+ }
+
deprecated(ConfigMapper(PARAMETER_KEY_REQUEST_SYNC_FRAME,
"coding.request-sync", "value")
.limitTo(D::PARAM & D::ENCODER)
@@ -1121,6 +1130,11 @@
mParamUpdater->clear();
mParamUpdater->supportWholeParam(
C2_PARAMKEY_TEMPORAL_LAYERING, C2StreamTemporalLayeringTuning::CORE_INDEX);
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ mParamUpdater->supportWholeParam(
+ C2_PARAMKEY_QP_OFFSET_RECTS, C2StreamQpOffsetRects::CORE_INDEX);
+ }
mParamUpdater->addParamDesc(mReflector, mParamDescs);
// TEMP: add some standard fields even if not reflected
@@ -1871,6 +1885,39 @@
}
}
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ if (mDomain == (IS_VIDEO | IS_ENCODER)) {
+ AString qpOffsetRects;
+ if (params->findString(PARAMETER_KEY_QP_OFFSET_RECTS, &qpOffsetRects)) {
+ std::vector<C2QpOffsetRectStruct> c2QpOffsetRects;
+ char mutableStrQpOffsetRects[strlen(qpOffsetRects.c_str()) + 1];
+ strcpy(mutableStrQpOffsetRects, qpOffsetRects.c_str());
+ char* box = strtok(mutableStrQpOffsetRects, ";");
+ while (box != nullptr) {
+ int top, left, bottom, right, offset;
+ if (sscanf(box, "%d,%d-%d,%d=%d", &top, &left, &bottom, &right, &offset) == 5) {
+ left = c2_max(0, left);
+ top = c2_max(0, top);
+ if (right > left && bottom > top) {
+ C2Rect rect(right - left, bottom - top);
+ rect.at(left, top);
+ c2QpOffsetRects.push_back(C2QpOffsetRectStruct(rect, offset));
+ }
+ }
+ box = strtok(nullptr, ";");
+ }
+ if (c2QpOffsetRects.size() != 0) {
+ const std::unique_ptr<C2StreamQpOffsetRects::output> regions =
+ C2StreamQpOffsetRects::output::AllocUnique(
+ c2QpOffsetRects.size(), 0u, c2QpOffsetRects);
+ params->setBuffer(C2_PARAMKEY_QP_OFFSET_RECTS,
+ ABuffer::CreateAsCopy(regions.get(), regions->size()));
+ }
+ }
+ }
+ }
+
// this is to verify that we set proper signedness for standard parameters
bool beVeryStrict = property_get_bool("debug.stagefright.ccodec_strict_type", false);
// this is to allow vendors to use the wrong signedness for standard parameters
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 9c514f2..2550dcf 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -179,10 +179,17 @@
if (!buffer
|| buffer->data().type() != C2BufferData::LINEAR
|| buffer->data().linearBlocks().size() != 1u) {
+ if (!buffer) {
+ ALOGD("ConstLinearBlockBuffer::Allocate: null buffer");
+ } else {
+ ALOGW("ConstLinearBlockBuffer::Allocate: type=%d # linear blocks=%zu",
+ buffer->data().type(), buffer->data().linearBlocks().size());
+ }
return nullptr;
}
C2ReadView readView(buffer->data().linearBlocks()[0].map().get());
if (readView.error() != C2_OK) {
+ ALOGW("ConstLinearBlockBuffer::Allocate: readView.error()=%d", readView.error());
return nullptr;
}
return new ConstLinearBlockBuffer(format, std::move(readView), buffer);
@@ -1137,7 +1144,7 @@
std::optional<Smpte2086> smpte2086;
status_t status = mapper.getSmpte2086(buffer.get(), &smpte2086);
- if (status != OK) {
+ if (status != OK || !smpte2086) {
err = C2_CORRUPTED;
} else {
if (smpte2086) {
@@ -1157,7 +1164,7 @@
std::optional<Cta861_3> cta861_3;
status = mapper.getCta861_3(buffer.get(), &cta861_3);
- if (status != OK) {
+ if (status != OK || !cta861_3) {
err = C2_CORRUPTED;
} else {
if (cta861_3) {
@@ -1176,7 +1183,7 @@
dynamicInfo->reset();
std::optional<std::vector<uint8_t>> vec;
status_t status = mapper.getSmpte2094_40(buffer.get(), &vec);
- if (status != OK) {
+ if (status != OK || !vec) {
dynamicInfo->reset();
err = C2_CORRUPTED;
} else {
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 37a7a4f..692f700 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -687,6 +687,11 @@
const MediaCodecsXmlParser::AttributeMap &attrMap = typeIt->second;
std::unique_ptr<MediaCodecInfo::CapabilitiesWriter> caps =
codecInfo->addMediaType(mediaType.c_str());
+
+ // we could detect tunneled playback via the playback interface, but we never did
+ // that for the advertised feature, so for now use only the advertised feature.
+ bool canDoTunneledPlayback = false;
+
for (const auto &v : attrMap) {
std::string key = v.first;
std::string value = v.second;
@@ -707,6 +712,11 @@
// Ignore trailing bad characters and default to 0.
(void)sscanf(value.c_str(), "%d", &intValue);
caps->addDetail(key.c_str(), intValue);
+
+ if (key.compare(
+ MediaCodecInfo::Capabilities::FEATURE_TUNNELED_PLAYBACK) == 0) {
+ canDoTunneledPlayback = true;
+ }
} else {
caps->addDetail(key.c_str(), value.c_str());
}
@@ -774,6 +784,17 @@
}
}
}
+
+ if (android::media::codec::provider_->null_output_surface_support() &&
+ android::media::codec::provider_->null_output_surface()) {
+ // all non-tunneled video decoders support detached surface mode
+ if (trait.kind == C2Component::KIND_DECODER &&
+ trait.domain == C2Component::DOMAIN_VIDEO &&
+ !canDoTunneledPlayback) {
+ caps->addDetail(
+ MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE, 0);
+ }
+ }
}
}
}
diff --git a/media/codec2/sfplugin/tests/Android.bp b/media/codec2/sfplugin/tests/Android.bp
index 246e563..2739f44 100644
--- a/media/codec2/sfplugin/tests/Android.bp
+++ b/media/codec2/sfplugin/tests/Android.bp
@@ -42,6 +42,7 @@
],
static_libs: [
+ "android.media.codec-aconfig-cc",
"libcodec2_hidl@1.0",
"libstagefright_bufferpool@2.0",
],
diff --git a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
index 3615289..508bec2 100644
--- a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
@@ -20,6 +20,8 @@
#include <gtest/gtest.h>
+#include <android_media_codec.h>
+
#include <codec2/hidl/1.0/Configurable.h>
#include <codec2/hidl/client.h>
#include <util/C2InterfaceHelper.h>
@@ -235,6 +237,22 @@
})
.withSetter(Setter<C2StreamProfileLevelInfo::output>)
.build());
+
+ std::vector<C2QpOffsetRectStruct> c2QpOffsetRectsInfo;
+ addParameter(
+ DefineParam(mInputQpOffsetRects, C2_PARAMKEY_QP_OFFSET_RECTS)
+ .withDefault(C2StreamQpOffsetRects::output::AllocShared(
+ c2QpOffsetRectsInfo.size(), 0, c2QpOffsetRectsInfo))
+ .withFields({
+ C2F(mInputQpOffsetRects, m.values[0].qpOffset)
+ .inRange(-128, 127),
+ C2F(mInputQpOffsetRects, m.values[0].left).any(),
+ C2F(mInputQpOffsetRects, m.values[0].top).any(),
+ C2F(mInputQpOffsetRects, m.values[0].width).any(),
+ C2F(mInputQpOffsetRects, m.values[0].height).any(),
+ })
+ .withSetter(Setter<C2StreamQpOffsetRects::output>)
+ .build());
}
// TODO: more SDK params
@@ -254,6 +272,7 @@
std::shared_ptr<C2StreamBitrateInfo::output> mOutputBitrate;
std::shared_ptr<C2StreamProfileLevelInfo::input> mInputProfileLevel;
std::shared_ptr<C2StreamProfileLevelInfo::output> mOutputProfileLevel;
+ std::shared_ptr<C2StreamQpOffsetRects::output> mInputQpOffsetRects;
template<typename T>
static C2R Setter(bool, C2P<T> &) {
@@ -636,4 +655,56 @@
HdrProfilesTest,
::testing::ValuesIn(kHdrProfilesParams));
+TEST_F(CCodecConfigTest, SetRegionOfInterestParams) {
+ if (!android::media::codec::provider_->region_of_interest()
+ || !android::media::codec::provider_->region_of_interest_support()) {
+ GTEST_SKIP() << "Skipping the test as region_of_interest flags are not enabled.\n";
+ }
+
+ init(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER, MIMETYPE_VIDEO_VP9);
+
+ ASSERT_EQ(OK, mConfig.initialize(mReflector, mConfigurable));
+
+ const int kWidth = 32;
+ const int kHeight = 32;
+ const int kNumBlocks = ((kWidth + 15) / 16) * ((kHeight + 15) / 16);
+ int8_t mapInfo[kNumBlocks] = {-1, 0, 1, 1};
+ int top[kNumBlocks] = {0, 0, 16, 16};
+ int left[kNumBlocks] = {0, 16, 0, 16};
+ int bottom[kNumBlocks] = {16, 16, 32, 32};
+ int right[kNumBlocks] = {16, 32, 16, 32};
+ sp<AMessage> format{new AMessage};
+ format->setInt32(KEY_WIDTH, kWidth);
+ format->setInt32(KEY_HEIGHT, kHeight);
+ AString val;
+ for (int i = 0; i < kNumBlocks; i++) {
+ val.append(AStringPrintf("%d,%d-%d,%d=%d;", top[i], left[i], bottom[i],
+ right[i], mapInfo[i]));
+ }
+ format->setString(PARAMETER_KEY_QP_OFFSET_RECTS, val);
+
+ std::vector<std::unique_ptr<C2Param>> configUpdate;
+ ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(mConfigurable, format, D::CONFIG,
+ C2_MAY_BLOCK, &configUpdate));
+
+ EXPECT_EQ(1u, configUpdate.size());
+
+ C2StreamQpOffsetRects::output* qpRectParam =
+ FindParam<std::remove_pointer<decltype(qpRectParam)>::type>(configUpdate);
+ ASSERT_NE(nullptr, qpRectParam);
+ ASSERT_EQ(kNumBlocks, qpRectParam->flexCount());
+ for (auto i = 0; i < kNumBlocks; i++) {
+ EXPECT_EQ(mapInfo[i], (int8_t)qpRectParam->m.values[i].qpOffset)
+ << "qp offset for index " << i << " is not as expected ";
+ EXPECT_EQ(left[i], qpRectParam->m.values[i].left)
+ << "left for index " << i << " is not as expected ";
+ EXPECT_EQ(top[i], qpRectParam->m.values[i].top)
+ << "top for index " << i << " is not as expected ";
+ EXPECT_EQ(right[i] - left[i], qpRectParam->m.values[i].width)
+ << "width for index " << i << " is not as expected ";
+ EXPECT_EQ(bottom[i] - top[i], qpRectParam->m.values[i].height)
+ << "height for index " << i << " is not as expected ";
+ }
+}
+
} // namespace android
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 261fd05..75e9bbc 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -593,8 +593,6 @@
uint8_t *dstY, size_t dstStride, size_t dstVStride, size_t bufferSize,
const C2GraphicView &src, C2Color::matrix_t colorMatrix, C2Color::range_t colorRange) {
CHECK(dstY != nullptr);
- CHECK((src.width() & 1) == 0);
- CHECK((src.height() & 1) == 0);
if (dstStride * dstVStride * 3 / 2 > bufferSize) {
ALOGD("conversion buffer is too small for converting from RGB to YUV");
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
index ff72b1f..7a33af4 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -32,10 +32,15 @@
namespace android {
-static bool isAtLeast(int version, const char *codeName) {
- char deviceCodeName[PROP_VALUE_MAX];
- __system_property_get("ro.build.version.codename", deviceCodeName);
- return android_get_device_api_level() >= version || !strcmp(deviceCodeName, codeName);
+static bool isAtLeast(int version, const std::string codeName) {
+ static std::once_flag sCheckOnce;
+ static std::string sDeviceCodeName;
+ static int sDeviceApiLevel;
+ std::call_once(sCheckOnce, [&](){
+ sDeviceCodeName = base::GetProperty("ro.build.version.codename", "");
+ sDeviceApiLevel = android_get_device_api_level();
+ });
+ return sDeviceApiLevel >= version || sDeviceCodeName == codeName;
}
bool isAtLeastT() {
@@ -46,6 +51,10 @@
return isAtLeast(__ANDROID_API_U__, "UpsideDownCake");
}
+bool isAtLeastV() {
+ return isAtLeast(__ANDROID_API_V__, "VanillaIceCream");
+}
+
static bool isP010Allowed() {
// The Vendor API level which is min(ro.product.first_api_level, ro.board.[first_]api_level).
// This is the api level to which VSR requirement the device conform.
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.h b/media/codec2/sfplugin/utils/Codec2CommonUtils.h
index 9bb52bd..693b3db 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.h
@@ -25,6 +25,8 @@
bool isAtLeastU();
+bool isAtLeastV();
+
bool isVendorApiOrFirstApiAtLeastT();
/**
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 9f57bfd..dc06ee6 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -53,6 +53,7 @@
],
defaults: [
+ "aconfig_lib_cc_static_link.defaults",
"libcodec2_hal_selection",
],
diff --git a/media/libaaudio/examples/input_monitor/Android.bp b/media/libaaudio/examples/input_monitor/Android.bp
index 72adfd7..fc55290 100644
--- a/media/libaaudio/examples/input_monitor/Android.bp
+++ b/media/libaaudio/examples/input_monitor/Android.bp
@@ -11,7 +11,10 @@
name: "input_monitor",
gtest: false,
srcs: ["src/input_monitor.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
shared_libs: ["libaaudio"],
header_libs: ["libaaudio_example_utils"],
}
@@ -20,7 +23,10 @@
name: "input_monitor_callback",
gtest: false,
srcs: ["src/input_monitor_callback.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
shared_libs: ["libaaudio"],
header_libs: ["libaaudio_example_utils"],
}
diff --git a/media/libaaudio/examples/loopback/Android.bp b/media/libaaudio/examples/loopback/Android.bp
index b18aeec..bde1024 100644
--- a/media/libaaudio/examples/loopback/Android.bp
+++ b/media/libaaudio/examples/loopback/Android.bp
@@ -11,13 +11,16 @@
name: "aaudio_loopback",
gtest: false,
srcs: ["src/loopback.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
static_libs: ["libsndfile"],
include_dirs: ["external/oboe/apps/OboeTester/app/src/main/cpp"],
shared_libs: [
"libaaudio",
"libaudioutils",
- "liblog"
- ],
+ "liblog",
+ ],
header_libs: ["libaaudio_example_utils"],
}
diff --git a/media/libaaudio/examples/write_sine/Android.bp b/media/libaaudio/examples/write_sine/Android.bp
index 1c7e0f1..70b1764 100644
--- a/media/libaaudio/examples/write_sine/Android.bp
+++ b/media/libaaudio/examples/write_sine/Android.bp
@@ -10,7 +10,10 @@
cc_test {
name: "write_sine",
srcs: ["src/write_sine.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
shared_libs: ["libaaudio"],
header_libs: ["libaaudio_example_utils"],
}
@@ -18,7 +21,10 @@
cc_test {
name: "write_sine_callback",
srcs: ["src/write_sine_callback.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
shared_libs: ["libaaudio"],
header_libs: ["libaaudio_example_utils"],
}
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index 46c4148..e148a53 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -35,37 +35,37 @@
"libaaudio_headers",
],
shared_libs: [
- "libbinder",
+ "com.android.media.aaudio-aconfig-cc",
+ "libaudio_aidl_conversion_common_cpp",
+ "libaudioclient_aidl_conversion",
"libaudiomanager",
"libaudiopolicy",
- "libaudioclient_aidl_conversion",
- "libaudio_aidl_conversion_common_cpp",
+ "libbinder",
"libutils",
- "com.android.media.aaudio-aconfig-cc",
],
static_libs: [
- "liblog",
- "libcutils",
+ "aaudio-aidl-cpp",
+ "audioclient-types-aidl-cpp",
+ "audioflinger-aidl-cpp",
+ "audiopolicy-aidl-cpp",
+ "audiopolicy-types-aidl-cpp",
+ "av-types-aidl-cpp",
+ "framework-permission-aidl-cpp",
"libaaudio",
- "libjsoncpp",
+ "libaaudio_internal",
+ "libaudioclient",
+ "libaudioutils",
"libbase_ndk",
"libcgrouprc",
- "libaudioutils",
- "libaudioclient",
- "aaudio-aidl-cpp",
+ "libcgrouprc_format",
+ "libcutils",
+ "libjsoncpp",
+ "liblog",
"libmedia_helper",
"libmediametrics",
"libprocessgroup",
- "av-types-aidl-cpp",
- "libaaudio_internal",
- "libcgrouprc_format",
- "audiopolicy-aidl-cpp",
- "audioflinger-aidl-cpp",
- "audiopolicy-types-aidl-cpp",
- "audioclient-types-aidl-cpp",
- "shared-file-region-aidl-cpp",
- "framework-permission-aidl-cpp",
"mediametricsservice-aidl-cpp",
+ "shared-file-region-aidl-cpp",
],
fuzz_config: {
cc: [
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index fcb376c..8aaa4a0 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -56,10 +56,10 @@
"-bugprone-macro-parentheses", // found in SharedMemoryParcelable.h
"-bugprone-narrowing-conversions", // found in several interface from size_t to int32_t
- "-google-readability-casting", // C++ casts not always necessary and may be verbose
- "-google-readability-todo", // do not require TODO(info)
"-google-build-using-namespace", // Reenable and fix later.
"-google-global-names-in-headers", // found in several files
+ "-google-readability-casting", // C++ casts not always necessary and may be verbose
+ "-google-readability-todo", // do not require TODO(info)
"-misc-non-private-member-variables-in-classes", // found in aidl generated files
@@ -89,28 +89,27 @@
],
cflags: [
- "-Wthread-safety",
- "-Wno-unused-parameter",
"-Wall",
"-Werror",
- // By default, all symbols are hidden.
- // "-fvisibility=hidden",
+ "-Wno-unused-parameter",
+ "-Wthread-safety",
+
// AAUDIO_API is used to explicitly export a function or a variable as a visible symbol.
"-DAAUDIO_API=__attribute__((visibility(\"default\")))",
],
shared_libs: [
+ "framework-permission-aidl-cpp",
"libaaudio_internal",
"libaudioclient",
"libaudioutils",
+ "libbinder",
+ "libcutils",
+ "liblog",
"libmedia_helper",
"libmediametrics",
"libmediautils",
- "liblog",
- "libcutils",
"libutils",
- "libbinder",
- "framework-permission-aidl-cpp",
],
sanitize: {
@@ -128,7 +127,7 @@
tidy_checks_as_errors: tidy_errors,
tidy_flags: [
"-format-style=file",
- ]
+ ],
}
cc_library {
@@ -160,56 +159,49 @@
],
shared_libs: [
+ "aaudio-aidl-cpp",
+ "audioclient-types-aidl-cpp",
+ "com.android.media.aaudio-aconfig-cc",
+ "framework-permission-aidl-cpp",
"libaudioclient",
+ "libaudioclient_aidl_conversion",
"libaudioutils",
+ "libbinder",
+ "libcutils",
+ "liblog",
"libmedia_helper",
"libmediametrics",
"libmediautils",
- "liblog",
- "libcutils",
"libutils",
- "libbinder",
- "framework-permission-aidl-cpp",
- "aaudio-aidl-cpp",
- "audioclient-types-aidl-cpp",
- "libaudioclient_aidl_conversion",
- "com.android.media.aaudio-aconfig-cc",
],
cflags: [
- "-Wno-unused-parameter",
"-Wall",
"-Werror",
+ "-Wno-unused-parameter",
],
srcs: [
- "core/AudioGlobal.cpp",
- "core/AudioStream.cpp",
- "core/AudioStreamBuilder.cpp",
- "core/AAudioStreamParameters.cpp",
- "legacy/AudioStreamLegacy.cpp",
- "legacy/AudioStreamRecord.cpp",
- "legacy/AudioStreamTrack.cpp",
- "utility/AAudioUtilities.cpp",
- "utility/FixedBlockAdapter.cpp",
- "utility/FixedBlockReader.cpp",
- "utility/FixedBlockWriter.cpp",
- "fifo/FifoBuffer.cpp",
- "fifo/FifoControllerBase.cpp",
+ "binding/AAudioBinderAdapter.cpp",
+ "binding/AAudioBinderClient.cpp",
+ "binding/AAudioStreamConfiguration.cpp",
+ "binding/AAudioStreamRequest.cpp",
+ "binding/AudioEndpointParcelable.cpp",
+ "binding/RingBufferParcelable.cpp",
+ "binding/SharedMemoryParcelable.cpp",
+ "binding/SharedRegionParcelable.cpp",
"client/AAudioFlowGraph.cpp",
"client/AudioEndpoint.cpp",
"client/AudioStreamInternal.cpp",
"client/AudioStreamInternalCapture.cpp",
"client/AudioStreamInternalPlay.cpp",
"client/IsochronousClockModel.cpp",
- "binding/AudioEndpointParcelable.cpp",
- "binding/AAudioBinderAdapter.cpp",
- "binding/AAudioBinderClient.cpp",
- "binding/AAudioStreamRequest.cpp",
- "binding/AAudioStreamConfiguration.cpp",
- "binding/RingBufferParcelable.cpp",
- "binding/SharedMemoryParcelable.cpp",
- "binding/SharedRegionParcelable.cpp",
+ "core/AAudioStreamParameters.cpp",
+ "core/AudioGlobal.cpp",
+ "core/AudioStream.cpp",
+ "core/AudioStreamBuilder.cpp",
+ "fifo/FifoBuffer.cpp",
+ "fifo/FifoControllerBase.cpp",
"flowgraph/ChannelCountConverter.cpp",
"flowgraph/ClipToRange.cpp",
"flowgraph/FlowGraphNode.cpp",
@@ -217,20 +209,20 @@
"flowgraph/ManyToMultiConverter.cpp",
"flowgraph/MonoBlend.cpp",
"flowgraph/MonoToMultiConverter.cpp",
- "flowgraph/MultiToMonoConverter.cpp",
"flowgraph/MultiToManyConverter.cpp",
+ "flowgraph/MultiToMonoConverter.cpp",
"flowgraph/RampLinear.cpp",
"flowgraph/SampleRateConverter.cpp",
"flowgraph/SinkFloat.cpp",
+ "flowgraph/SinkI8_24.cpp",
"flowgraph/SinkI16.cpp",
"flowgraph/SinkI24.cpp",
"flowgraph/SinkI32.cpp",
- "flowgraph/SinkI8_24.cpp",
"flowgraph/SourceFloat.cpp",
+ "flowgraph/SourceI8_24.cpp",
"flowgraph/SourceI16.cpp",
"flowgraph/SourceI24.cpp",
"flowgraph/SourceI32.cpp",
- "flowgraph/SourceI8_24.cpp",
"flowgraph/resampler/IntegerRatio.cpp",
"flowgraph/resampler/LinearResampler.cpp",
"flowgraph/resampler/MultiChannelResampler.cpp",
@@ -239,6 +231,13 @@
"flowgraph/resampler/PolyphaseResamplerStereo.cpp",
"flowgraph/resampler/SincResampler.cpp",
"flowgraph/resampler/SincResamplerStereo.cpp",
+ "legacy/AudioStreamLegacy.cpp",
+ "legacy/AudioStreamRecord.cpp",
+ "legacy/AudioStreamTrack.cpp",
+ "utility/AAudioUtilities.cpp",
+ "utility/FixedBlockAdapter.cpp",
+ "utility/FixedBlockReader.cpp",
+ "utility/FixedBlockWriter.cpp",
],
sanitize: {
integer_overflow: true,
@@ -250,7 +249,7 @@
tidy_checks_as_errors: tidy_errors,
tidy_flags: [
"-format-style=file",
- ]
+ ],
}
aidl_interface {
@@ -262,20 +261,19 @@
],
srcs: [
"binding/aidl/aaudio/Endpoint.aidl",
+ "binding/aidl/aaudio/IAAudioClient.aidl",
+ "binding/aidl/aaudio/IAAudioService.aidl",
"binding/aidl/aaudio/RingBuffer.aidl",
"binding/aidl/aaudio/SharedRegion.aidl",
"binding/aidl/aaudio/StreamParameters.aidl",
"binding/aidl/aaudio/StreamRequest.aidl",
- "binding/aidl/aaudio/IAAudioClient.aidl",
- "binding/aidl/aaudio/IAAudioService.aidl",
],
imports: [
"audioclient-types-aidl",
- "shared-file-region-aidl",
"framework-permission-aidl",
+ "shared-file-region-aidl",
],
- backend:
- {
+ backend: {
java: {
sdk_version: "module_current",
},
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index d59afef..23cc28c 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -183,9 +183,9 @@
defaults: ["libaaudio_tests_defaults"],
srcs: ["test_full_queue.cpp"],
shared_libs: [
- "libaaudio",
- "liblog"
- ],
+ "libaaudio",
+ "liblog",
+ ],
}
cc_test {
@@ -205,9 +205,9 @@
srcs: ["test_steal_exclusive.cpp"],
shared_libs: [
"libaaudio",
- "liblog",
"libbinder",
"libcutils",
+ "liblog",
"libutils",
],
}
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 987a20c..ad717fa 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -29,14 +29,14 @@
static_libs: [
"audioflinger-aidl-cpp",
"audiopolicy-aidl-cpp",
- "spatializer-aidl-cpp",
"av-types-aidl-cpp",
+ "spatializer-aidl-cpp",
],
export_static_lib_headers: [
"audioflinger-aidl-cpp",
"audiopolicy-aidl-cpp",
- "spatializer-aidl-cpp",
"av-types-aidl-cpp",
+ "spatializer-aidl-cpp",
],
target: {
darwin: {
@@ -48,11 +48,11 @@
cc_library {
name: "libaudiopolicy",
srcs: [
- "VolumeGroupAttributes.cpp",
"AudioPolicy.cpp",
"AudioProductStrategy.cpp",
"AudioVolumeGroup.cpp",
- "PolicyAidlConversion.cpp"
+ "PolicyAidlConversion.cpp",
+ "VolumeGroupAttributes.cpp",
],
defaults: [
"latest_android_media_audio_common_types_cpp_export_shared",
@@ -63,8 +63,8 @@
"audiopolicy-aidl-cpp",
"audiopolicy-types-aidl-cpp",
"capture_state_listener-aidl-cpp",
- "libaudiofoundation",
"libaudioclient_aidl_conversion",
+ "libaudiofoundation",
"libaudioutils",
"libbinder",
"libcutils",
@@ -72,8 +72,8 @@
"libutils",
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
include_dirs: ["system/media/audio_utils/include"],
export_include_dirs: ["include"],
@@ -83,8 +83,8 @@
"audiopolicy-aidl-cpp",
"audiopolicy-types-aidl-cpp",
"capture_state_listener-aidl-cpp",
- "libaudiofoundation",
"libaudioclient_aidl_conversion",
+ "libaudiofoundation",
],
header_libs: ["libaudioclient_headers"],
}
@@ -112,9 +112,9 @@
"AudioTrack.cpp",
"AudioTrackShared.cpp",
"IAudioFlinger.cpp",
- "ToneGenerator.cpp",
"PlayerBase.cpp",
"RecordingActivityTracker.cpp",
+ "ToneGenerator.cpp",
"TrackPlayerBase.cpp",
],
defaults: [
@@ -124,16 +124,16 @@
"audioclient-types-aidl-cpp",
"audioflinger-aidl-cpp",
"audiopolicy-aidl-cpp",
- "spatializer-aidl-cpp",
"audiopolicy-types-aidl-cpp",
"av-types-aidl-cpp",
"capture_state_listener-aidl-cpp",
+ "framework-permission-aidl-cpp",
"libaudio_aidl_conversion_common_cpp",
"libaudioclient_aidl_conversion",
"libaudiofoundation",
- "libaudioutils",
- "libaudiopolicy",
"libaudiomanager",
+ "libaudiopolicy",
+ "libaudioutils",
"libbinder",
"libcutils",
"libdl",
@@ -145,24 +145,24 @@
"libprocessgroup",
"libshmemcompat",
"libutils",
- "framework-permission-aidl-cpp",
"packagemanager_aidl-cpp",
+ "spatializer-aidl-cpp",
],
export_shared_lib_headers: [
"audioflinger-aidl-cpp",
"audiopolicy-aidl-cpp",
- "spatializer-aidl-cpp",
"framework-permission-aidl-cpp",
"libbinder",
"libmediametrics",
+ "spatializer-aidl-cpp",
],
include_dirs: [
"frameworks/av/media/libnbaio/include_mono/",
],
local_include_dirs: [
- "include/media",
"aidl",
+ "include/media",
],
header_libs: [
"libaudioclient_headers",
@@ -189,8 +189,8 @@
],
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
},
}
@@ -227,8 +227,8 @@
filegroup {
name: "libaudioclient_aidl",
srcs: [
- "aidl/android/media/IPlayer.aidl",
"aidl/android/media/AudioHalVersion.aidl",
+ "aidl/android/media/IPlayer.aidl",
],
path: "aidl",
}
@@ -294,14 +294,14 @@
"aidl/android/media/AudioIoDescriptor.aidl",
"aidl/android/media/AudioPatchFw.aidl",
"aidl/android/media/AudioPolicyConfig.aidl",
- "aidl/android/media/AudioPortFw.aidl",
- "aidl/android/media/AudioPortSys.aidl",
"aidl/android/media/AudioPortConfigFw.aidl",
"aidl/android/media/AudioPortConfigSys.aidl",
"aidl/android/media/AudioPortDeviceExtSys.aidl",
"aidl/android/media/AudioPortExtSys.aidl",
+ "aidl/android/media/AudioPortFw.aidl",
"aidl/android/media/AudioPortMixExtSys.aidl",
"aidl/android/media/AudioPortRole.aidl",
+ "aidl/android/media/AudioPortSys.aidl",
"aidl/android/media/AudioPortType.aidl",
"aidl/android/media/AudioProfileSys.aidl",
"aidl/android/media/AudioRoute.aidl",
@@ -310,8 +310,8 @@
"aidl/android/media/AudioVibratorInfo.aidl",
"aidl/android/media/DeviceConnectedState.aidl",
"aidl/android/media/EffectDescriptor.aidl",
- "aidl/android/media/TrackSecondaryOutputInfo.aidl",
"aidl/android/media/SurroundSoundConfig.aidl",
+ "aidl/android/media/TrackSecondaryOutputInfo.aidl",
],
defaults: [
"latest_android_media_audio_common_types_import_interface",
@@ -332,6 +332,7 @@
},
},
}
+
aidl_interface {
name: "audiopolicy-types-aidl",
unstable: true,
@@ -342,14 +343,14 @@
srcs: [
"aidl/android/media/AudioAttributesEx.aidl",
"aidl/android/media/AudioMix.aidl",
- "aidl/android/media/AudioMixUpdate.aidl",
- "aidl/android/media/AudioMixerAttributesInternal.aidl",
- "aidl/android/media/AudioMixerBehavior.aidl",
"aidl/android/media/AudioMixCallbackFlag.aidl",
"aidl/android/media/AudioMixMatchCriterion.aidl",
"aidl/android/media/AudioMixMatchCriterionValue.aidl",
"aidl/android/media/AudioMixRouteFlag.aidl",
"aidl/android/media/AudioMixType.aidl",
+ "aidl/android/media/AudioMixUpdate.aidl",
+ "aidl/android/media/AudioMixerAttributesInternal.aidl",
+ "aidl/android/media/AudioMixerBehavior.aidl",
"aidl/android/media/AudioOffloadMode.aidl",
"aidl/android/media/AudioPolicyDeviceState.aidl",
"aidl/android/media/AudioPolicyForceUse.aidl",
@@ -398,8 +399,8 @@
"aidl/android/media/OpenOutputResponse.aidl",
"aidl/android/media/RenderPosition.aidl",
- "aidl/android/media/IAudioFlingerService.aidl",
"aidl/android/media/IAudioFlingerClient.aidl",
+ "aidl/android/media/IAudioFlingerService.aidl",
"aidl/android/media/IAudioRecord.aidl",
"aidl/android/media/IAudioTrack.aidl",
"aidl/android/media/IAudioTrackCallback.aidl",
@@ -415,8 +416,8 @@
"audioclient-types-aidl",
"av-types-aidl",
"effect-aidl",
- "shared-file-region-aidl",
"framework-permission-aidl",
+ "shared-file-region-aidl",
],
double_loadable: true,
backend: {
@@ -443,9 +444,9 @@
"aidl/android/media/GetInputForAttrResponse.aidl",
"aidl/android/media/GetOutputForAttrResponse.aidl",
"aidl/android/media/GetSpatializerResponse.aidl",
- "aidl/android/media/RecordClientInfo.aidl",
"aidl/android/media/IAudioPolicyService.aidl",
"aidl/android/media/IAudioPolicyServiceClient.aidl",
+ "aidl/android/media/RecordClientInfo.aidl",
],
defaults: [
"latest_android_media_audio_common_types_import_interface",
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index b7a30dc..a25d7ff 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -83,7 +83,7 @@
typename ServiceTraits>
class ServiceHandler {
public:
- sp<ServiceInterface> getService(bool canStartThreadPool = true)
+ sp<ServiceInterface> getService()
EXCLUDES(mMutex) NO_THREAD_SAFETY_ANALYSIS { // std::unique_ptr
sp<ServiceInterface> service;
sp<Client> client;
@@ -140,7 +140,7 @@
client = mClient;
service = mService;
// Make sure callbacks can be received by the client
- if (canStartThreadPool) {
+ if (mCanStartThreadPool) {
ProcessState::self()->startThreadPool();
}
ul.unlock();
@@ -183,6 +183,10 @@
if (mClient) ServiceTraits::onClearService(mClient);
}
+ void disableThreadPool() {
+ mCanStartThreadPool = false;
+ }
+
private:
std::mutex mSingleGetter;
std::mutex mMutex;
@@ -191,6 +195,7 @@
sp<ServiceInterface> mLocalService GUARDED_BY(mMutex);
sp<ServiceInterface> mService GUARDED_BY(mMutex);
sp<Client> mClient GUARDED_BY(mMutex);
+ std::atomic<bool> mCanStartThreadPool = true;
};
struct AudioFlingerTraits {
@@ -221,10 +226,6 @@
return gAudioFlingerServiceHandler.getService();
}
-sp<IAudioFlinger> AudioSystem::get_audio_flinger_for_fuzzer() {
- return gAudioFlingerServiceHandler.getService(false /* canStartThreadPool */);
-}
-
sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
return gAudioFlingerServiceHandler.getClient();
}
@@ -954,6 +955,11 @@
gAudioPolicyServiceHandler.clearService();
}
+void AudioSystem::disableThreadPool() {
+ gAudioFlingerServiceHandler.disableThreadPool();
+ gAudioPolicyServiceHandler.disableThreadPool();
+}
+
// ---------------------------------------------------------------------------
void AudioSystem::onNewAudioModulesAvailable() {
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 2afe80c..d6b1163 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1707,14 +1707,14 @@
mSelectedDeviceId = deviceId;
if (mStatus == NO_ERROR) {
if (isOffloadedOrDirect_l()) {
- if (mState == STATE_STOPPED || mState == STATE_FLUSHED) {
- ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId);
- result = restoreTrack_l("setOutputDevice", true /* forceRestore */);
- } else {
+ if (isPlaying_l()) {
ALOGW("%s(%d). Offloaded or Direct track is not STOPPED or FLUSHED. "
"State: %s.",
__func__, mPortId, stateToString(mState));
result = INVALID_OPERATION;
+ } else {
+ ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId);
+ result = restoreTrack_l("setOutputDevice", true /* forceRestore */);
}
} else {
// allow track invalidation when track is not playing to propagate
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index 6093933..1071beb 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -18,44 +18,45 @@
name: "libaudioclient_aidl_fuzzer_defaults",
static_libs: [
"android.hardware.audio.common@7.0-enums",
- "effect-aidl-cpp",
+ "libaudiomockhal",
"libcgrouprc",
"libcgrouprc_format",
"libfakeservicemanager",
"libjsoncpp",
"liblog",
- "libmediametricsservice",
"libmedia_helper",
+ "libmediametricsservice",
"libprocessgroup",
"shared-file-region-aidl-cpp",
],
shared_libs: [
"android.hardware.audio.common-util",
"audioclient-types-aidl-cpp",
+ "audioflinger-aidl-cpp",
"audiopolicy-aidl-cpp",
"audiopolicy-types-aidl-cpp",
"av-types-aidl-cpp",
"capture_state_listener-aidl-cpp",
+ "effect-aidl-cpp",
"framework-permission-aidl-cpp",
+ "libactivitymanager_aidl",
"libaudioclient",
- "audioflinger-aidl-cpp",
- "libaudioflinger",
"libaudioclient_aidl_conversion",
+ "libaudioflinger",
"libaudiofoundation",
+ "libaudiohal",
"libaudiomanager",
"libaudiopolicy",
- "libaudioutils",
- "libaudiopolicyservice",
"libaudiopolicymanagerdefault",
- "libaudiohal",
+ "libaudiopolicyservice",
"libaudioprocessing",
- "libactivitymanager_aidl",
+ "libaudioutils",
"libdl",
"libheadtracking",
- "libmediautils",
"libmediametrics",
- "libnblog",
+ "libmediautils",
"libnbaio",
+ "libnblog",
"libpowermanager",
"libvibrator",
"libvndksupport",
@@ -64,16 +65,16 @@
"packagemanager_aidl-cpp",
],
header_libs: [
- "libaudiopolicymanager_interface_headers",
+ "libaudioflinger_headers",
"libaudiofoundation_headers",
"libaudiohal_headers",
- "libaudioflinger_headers",
+ "libaudiopolicymanager_interface_headers",
"libbinder_headers",
"libmedia_headers",
],
- fuzz_config: {
+ fuzz_config: {
cc: [
- "android-media-fuzzing-reports@google.com",
+ "android-audio-fuzzing-reports@google.com",
],
componentid: 155276,
hotlists: ["4593311"],
@@ -89,7 +90,10 @@
name: "audioflinger_aidl_fuzzer",
srcs: ["audioflinger_aidl_fuzzer.cpp"],
defaults: [
+ "latest_android_hardware_audio_core_ndk_shared",
+ "latest_android_hardware_audio_core_sounddose_ndk_shared",
+ "latest_android_hardware_audio_effect_ndk_shared",
"libaudioclient_aidl_fuzzer_defaults",
- "service_fuzzer_defaults"
+ "service_fuzzer_defaults",
],
}
diff --git a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
index f99cc3b..c7a04da 100644
--- a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
+++ b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
@@ -17,8 +17,12 @@
#include <AudioFlinger.h>
#include <android-base/logging.h>
#include <android/binder_interface_utils.h>
+#include <android/binder_manager.h>
#include <android/binder_process.h>
#include <android/media/IAudioPolicyService.h>
+#include <core-mock/ConfigMock.h>
+#include <core-mock/ModuleMock.h>
+#include <effect-mock/FactoryMock.h>
#include <fakeservicemanager/FakeServiceManager.h>
#include <fuzzbinder/libbinder_driver.h>
#include <fuzzbinder/random_binder.h>
@@ -32,6 +36,7 @@
[[clang::no_destroy]] static std::once_flag gSmOnce;
sp<FakeServiceManager> gFakeServiceManager;
+sp<AudioFlingerServerAdapter> gAudioFlingerServerAdapter;
bool addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
FuzzedDataProvider& fdp) {
@@ -43,46 +48,58 @@
return true;
}
+extern "C" int LLVMFuzzerInitialize(int* /*argc*/, char*** /*argv*/) {
+ /* Create a FakeServiceManager instance and add required services */
+ gFakeServiceManager = sp<FakeServiceManager>::make();
+ setDefaultServiceManager(gFakeServiceManager);
+
+ auto configService = ndk::SharedRefBase::make<ConfigMock>();
+ CHECK_EQ(NO_ERROR, AServiceManager_addService(configService.get()->asBinder().get(),
+ "android.hardware.audio.core.IConfig/default"));
+
+ auto factoryService = ndk::SharedRefBase::make<FactoryMock>();
+ CHECK_EQ(NO_ERROR,
+ AServiceManager_addService(factoryService.get()->asBinder().get(),
+ "android.hardware.audio.effect.IFactory/default"));
+
+ auto moduleService = ndk::SharedRefBase::make<ModuleMock>();
+ CHECK_EQ(NO_ERROR, AServiceManager_addService(moduleService.get()->asBinder().get(),
+ "android.hardware.audio.core.IModule/default"));
+
+ // Disable creating thread pool for fuzzer instance of audio flinger and audio policy services
+ AudioSystem::disableThreadPool();
+
+ return 0;
+}
+
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
FuzzedDataProvider fdp(data, size);
- std::call_once(gSmOnce, [&] {
- /* Create a FakeServiceManager instance and add required services */
- gFakeServiceManager = sp<FakeServiceManager>::make();
- setDefaultServiceManager(gFakeServiceManager);
- });
- gFakeServiceManager->clear();
-
- for (const char* service :
- {"activity", "sensor_privacy", "permission", "scheduling_policy",
- "android.hardware.audio.core.IConfig", "batterystats", "media.metrics"}) {
+ for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
+ "batterystats", "media.metrics"}) {
if (!addService(String16(service), gFakeServiceManager, fdp)) {
return 0;
}
}
- const auto audioFlinger = sp<AudioFlinger>::make();
- const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+ // TODO(330882064) : Initialise Audio Flinger and Audio Policy services every time
+ std::call_once(gSmOnce, [&] {
+ const auto audioFlinger = sp<AudioFlinger>::make();
+ gAudioFlingerServerAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+ CHECK_EQ(NO_ERROR,
+ gFakeServiceManager->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+ IInterface::asBinder(gAudioFlingerServerAdapter),
+ false /* allowIsolated */,
+ IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
- CHECK_EQ(NO_ERROR,
- gFakeServiceManager->addService(
- String16(IAudioFlinger::DEFAULT_SERVICE_NAME), IInterface::asBinder(afAdapter),
- false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+ const auto audioPolicyService = sp<AudioPolicyService>::make();
+ CHECK_EQ(NO_ERROR,
+ gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
+ false /* allowIsolated */,
+ IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+ });
- AudioSystem::get_audio_flinger_for_fuzzer();
- const auto audioPolicyService = sp<AudioPolicyService>::make();
-
- CHECK_EQ(NO_ERROR,
- gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
- false /* allowIsolated */,
- IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
-
- sp<IBinder> audioFlingerServiceBinder =
- gFakeServiceManager->getService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
- sp<media::IAudioFlingerService> audioFlingerService =
- interface_cast<media::IAudioFlingerService>(audioFlingerServiceBinder);
-
- fuzzService(media::IAudioFlingerService::asBinder(audioFlingerService), std::move(fdp));
+ fuzzService(media::IAudioFlingerService::asBinder(gAudioFlingerServerAdapter), std::move(fdp));
return 0;
}
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/Android.bp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/Android.bp
new file mode 100644
index 0000000..c4afffb
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/Android.bp
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_library {
+ name: "libaudiomockhal",
+
+ defaults: [
+ "latest_android_hardware_audio_core_ndk_shared",
+ "latest_android_hardware_audio_core_sounddose_ndk_shared",
+ "latest_android_hardware_audio_effect_ndk_shared",
+ ],
+ header_libs: [
+ "libbinder_headers",
+ ],
+ static_libs: [
+ "libbinder_random_parcel",
+ ],
+ shared_libs: [
+ "libbinder_ndk",
+ ],
+
+ host_supported: true,
+ srcs: [
+ "FactoryMock.cpp",
+ "ModuleMock.cpp",
+ "StreamInMock.cpp",
+ "StreamOutMock.cpp",
+ ],
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+ export_include_dirs: ["include"],
+}
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/FactoryMock.cpp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/FactoryMock.cpp
new file mode 100644
index 0000000..ea07afc
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/FactoryMock.cpp
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "effect-mock/FactoryMock.h"
+#include "effect-mock/EffectMock.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+ndk::ScopedAStatus FactoryMock::createEffect(const AudioUuid&,
+ std::shared_ptr<IEffect>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<EffectMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/ModuleMock.cpp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/ModuleMock.cpp
new file mode 100644
index 0000000..711924f
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/ModuleMock.cpp
@@ -0,0 +1,144 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "core-mock/ModuleMock.h"
+#include "core-mock/BluetoothA2dpMock.h"
+#include "core-mock/BluetoothLeMock.h"
+#include "core-mock/BluetoothMock.h"
+#include "core-mock/StreamInMock.h"
+#include "core-mock/StreamOutMock.h"
+#include "core-mock/TelephonyMock.h"
+#include "sounddose-mock/SoundDoseMock.h"
+
+namespace aidl::android::hardware::audio::core {
+
+ModuleMock::ModuleMock() {
+ // Device ports
+ auto outDevice = createPort(/* PortId */ 0, /* Name */ "Default",
+ /* Flags */ 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE,
+ /* isInput */ false,
+ createDeviceExt(
+ /* DeviceType */ AudioDeviceType::OUT_DEFAULT,
+ /* Flags */ AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE));
+ mPorts.push_back(outDevice);
+ auto inDevice = createPort(/* PortId */ 1, /* Name */ "Default",
+ /* Flags */ 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE,
+ /* isInput */ true,
+ createDeviceExt(
+ /* DeviceType */ AudioDeviceType::IN_DEFAULT,
+ /* Flags */ 0));
+ mPorts.push_back(outDevice);
+}
+
+ndk::ScopedAStatus ModuleMock::getTelephony(std::shared_ptr<ITelephony>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<TelephonyMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getBluetooth(std::shared_ptr<IBluetooth>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<BluetoothMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getBluetoothA2dp(std::shared_ptr<IBluetoothA2dp>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<BluetoothA2dpMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getBluetoothLe(std::shared_ptr<IBluetoothLe>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<BluetoothLeMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::openInputStream(const OpenInputStreamArguments&,
+ OpenInputStreamReturn* _aidl_return) {
+ _aidl_return->stream = ndk::SharedRefBase::make<StreamInMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::openOutputStream(const OpenOutputStreamArguments&,
+ OpenOutputStreamReturn* _aidl_return) {
+ _aidl_return->stream = ndk::SharedRefBase::make<StreamOutMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getMasterMute(bool* _aidl_return) {
+ *_aidl_return = mMasterMute;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::setMasterMute(bool masterMute) {
+ mMasterMute = masterMute;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getMasterVolume(float* _aidl_return) {
+ *_aidl_return = mMasterVolume;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::setMasterVolume(float masterVolume) {
+ mMasterVolume = masterVolume;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getMicMute(bool* _aidl_return) {
+ *_aidl_return = mMicMute;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::setMicMute(bool micMute) {
+ mMicMute = micMute;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getSoundDose(std::shared_ptr<ISoundDose>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<SoundDoseMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getMmapPolicyInfos(AudioMMapPolicyType,
+ std::vector<AudioMMapPolicyInfo>* _aidl_return) {
+ AudioMMapPolicyInfo never;
+ never.mmapPolicy = AudioMMapPolicy::NEVER;
+ _aidl_return->push_back(never);
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::supportsVariableLatency(bool* _aidl_return) {
+ *_aidl_return = false;
+ return ndk::ScopedAStatus::ok();
+}
+
+AudioPortExt ModuleMock::createDeviceExt(AudioDeviceType devType, int32_t flags) {
+ AudioPortDeviceExt deviceExt;
+ deviceExt.device.type.type = devType;
+ deviceExt.flags = flags;
+ return AudioPortExt::make<AudioPortExt::Tag::device>(deviceExt);
+}
+
+AudioPort ModuleMock::createPort(int32_t id, const std::string& name, int32_t flags, bool isInput,
+ const AudioPortExt& ext) {
+ AudioPort port;
+ port.id = id;
+ port.name = name;
+ port.flags = isInput ? AudioIoFlags::make<AudioIoFlags::Tag::input>(flags)
+ : AudioIoFlags::make<AudioIoFlags::Tag::output>(flags);
+ port.ext = ext;
+ return port;
+}
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamInMock.cpp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamInMock.cpp
new file mode 100644
index 0000000..093a979
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamInMock.cpp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "core-mock/StreamInMock.h"
+#include "core-mock/StreamCommonMock.h"
+
+namespace aidl::android::hardware::audio::core {
+
+ndk::ScopedAStatus StreamInMock::getStreamCommon(std::shared_ptr<IStreamCommon>* _aidl_return) {
+ if (!mStreamCommon) {
+ mStreamCommon = ndk::SharedRefBase::make<StreamCommonMock>();
+ }
+ *_aidl_return = mStreamCommon;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::getMicrophoneDirection(
+ IStreamIn::MicrophoneDirection* _aidl_return) {
+ *_aidl_return = mMicrophoneDirection;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::setMicrophoneDirection(
+ IStreamIn::MicrophoneDirection in_direction) {
+ mMicrophoneDirection = in_direction;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::getMicrophoneFieldDimension(float* _aidl_return) {
+ *_aidl_return = mMicrophoneFieldDimension;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::setMicrophoneFieldDimension(float in_zoom) {
+ mMicrophoneFieldDimension = in_zoom;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::getHwGain(std::vector<float>* _aidl_return) {
+ *_aidl_return = mHwGains;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::setHwGain(const std::vector<float>& in_channelGains) {
+ mHwGains = in_channelGains;
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamOutMock.cpp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamOutMock.cpp
new file mode 100644
index 0000000..a71f954
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamOutMock.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "core-mock/StreamOutMock.h"
+#include "core-mock/StreamCommonMock.h"
+
+namespace aidl::android::hardware::audio::core {
+
+ndk::ScopedAStatus StreamOutMock::getStreamCommon(std::shared_ptr<IStreamCommon>* _aidl_return) {
+ if (!mStreamCommon) {
+ mStreamCommon = ndk::SharedRefBase::make<StreamCommonMock>();
+ }
+ *_aidl_return = mStreamCommon;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamOutMock::getHwVolume(std::vector<float>* _aidl_return) {
+ *_aidl_return = mHwVolume;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::setHwVolume(const std::vector<float>& in_channelVolumes) {
+ mHwVolume = in_channelVolumes;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::getAudioDescriptionMixLevel(float* _aidl_return) {
+ *_aidl_return = mAudioDescriptionMixLeveldB;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::setAudioDescriptionMixLevel(float in_leveldB) {
+ mAudioDescriptionMixLeveldB = in_leveldB;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::getDualMonoMode(AudioDualMonoMode* _aidl_return) {
+ *_aidl_return = mDualMonoMode;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::setDualMonoMode(AudioDualMonoMode in_mode) {
+ mDualMonoMode = in_mode;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::getPlaybackRateParameters(AudioPlaybackRate* _aidl_return) {
+ *_aidl_return = mPlaybackRateParameters;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::setPlaybackRateParameters(
+ const AudioPlaybackRate& in_playbackRate) {
+ mPlaybackRateParameters = in_playbackRate;
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothA2dpMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothA2dpMock.h
new file mode 100644
index 0000000..c4dd0d9
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothA2dpMock.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnBluetoothA2dp.h>
+
+using namespace aidl::android::hardware::audio::core;
+
+namespace aidl::android::hardware::audio::core {
+
+class BluetoothA2dpMock : public BnBluetoothA2dp {
+ public:
+ ndk::ScopedAStatus isEnabled(bool* _aidl_return) override {
+ *_aidl_return = mEnabled;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setEnabled(bool enabled) override {
+ mEnabled = enabled;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus supportsOffloadReconfiguration(bool* _aidl_return) override {
+ *_aidl_return = kSupportsOffloadReconfiguration;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus reconfigureOffload(const std::vector<VendorParameter>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ private:
+ static constexpr bool kSupportsOffloadReconfiguration = true;
+ bool mEnabled = false;
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothLeMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothLeMock.h
new file mode 100644
index 0000000..d58695a
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothLeMock.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnBluetoothLe.h>
+
+using namespace aidl::android::hardware::audio::core;
+
+namespace aidl::android::hardware::audio::core {
+
+class BluetoothLeMock : public BnBluetoothLe {
+ public:
+ ndk::ScopedAStatus isEnabled(bool* _aidl_return) override {
+ *_aidl_return = mEnabled;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setEnabled(bool enabled) override {
+ mEnabled = enabled;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus supportsOffloadReconfiguration(bool* _aidl_return) override {
+ *_aidl_return = kSupportsOffloadReconfiguration;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus reconfigureOffload(const std::vector<VendorParameter>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ private:
+ static constexpr bool kSupportsOffloadReconfiguration = true;
+ bool mEnabled = false;
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothMock.h
new file mode 100644
index 0000000..e805840
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothMock.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnBluetooth.h>
+
+using namespace aidl::android::hardware::audio::core;
+
+namespace aidl::android::hardware::audio::core {
+
+class BluetoothMock : public BnBluetooth {
+ public:
+ ndk::ScopedAStatus setScoConfig(const IBluetooth::ScoConfig&, IBluetooth::ScoConfig*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setHfpConfig(const IBluetooth::HfpConfig&, IBluetooth::HfpConfig*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ConfigMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ConfigMock.h
new file mode 100644
index 0000000..f4031b5
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ConfigMock.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnConfig.h>
+
+using namespace aidl::android::media::audio::common;
+using namespace aidl::android::hardware::audio::core;
+
+namespace aidl::android::hardware::audio::core {
+
+class ConfigMock : public BnConfig {
+ private:
+ ndk::ScopedAStatus getSurroundSoundConfig(SurroundSoundConfig*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getEngineConfig(AudioHalEngineConfig*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ModuleMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ModuleMock.h
new file mode 100644
index 0000000..d49203d
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ModuleMock.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnModule.h>
+
+using namespace aidl::android::media::audio::common;
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::hardware::audio::core::sounddose;
+using namespace aidl::android::hardware::audio::effect;
+
+namespace aidl::android::hardware::audio::core {
+
+class ModuleMock : public BnModule {
+ public:
+ ModuleMock();
+
+ private:
+ ndk::ScopedAStatus getTelephony(std::shared_ptr<ITelephony>*) override;
+ ndk::ScopedAStatus getBluetooth(std::shared_ptr<IBluetooth>*) override;
+ ndk::ScopedAStatus getBluetoothA2dp(std::shared_ptr<IBluetoothA2dp>*) override;
+ ndk::ScopedAStatus getBluetoothLe(std::shared_ptr<IBluetoothLe>*) override;
+ ndk::ScopedAStatus openInputStream(const OpenInputStreamArguments&,
+ OpenInputStreamReturn*) override;
+ ndk::ScopedAStatus openOutputStream(const OpenOutputStreamArguments&,
+ OpenOutputStreamReturn*) override;
+ ndk::ScopedAStatus getMasterMute(bool*) override;
+ ndk::ScopedAStatus setMasterMute(bool) override;
+ ndk::ScopedAStatus getMasterVolume(float*) override;
+ ndk::ScopedAStatus setMasterVolume(float) override;
+ ndk::ScopedAStatus getMicMute(bool*) override;
+ ndk::ScopedAStatus setMicMute(bool) override;
+ ndk::ScopedAStatus getSoundDose(std::shared_ptr<ISoundDose>*) override;
+ ndk::ScopedAStatus getMmapPolicyInfos(AudioMMapPolicyType,
+ std::vector<AudioMMapPolicyInfo>*) override;
+ ndk::ScopedAStatus supportsVariableLatency(bool*) override;
+
+ ndk::ScopedAStatus setModuleDebug(const ModuleDebug&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus connectExternalDevice(const AudioPort&, AudioPort*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus disconnectExternalDevice(int32_t) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioPatches(std::vector<AudioPatch>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioPort(int32_t, AudioPort*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioPortConfigs(std::vector<AudioPortConfig>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioPorts(std::vector<AudioPort>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioRoutes(std::vector<AudioRoute>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioRoutesForAudioPort(int32_t, std::vector<AudioRoute>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getSupportedPlaybackRateFactors(SupportedPlaybackRateFactors*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setAudioPatch(const AudioPatch&, AudioPatch*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setAudioPortConfig(const AudioPortConfig&, AudioPortConfig*,
+ bool*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus resetAudioPatch(int32_t) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus resetAudioPortConfig(int32_t) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus getMicrophones(std::vector<MicrophoneInfo>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus updateAudioMode(AudioMode) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus updateScreenRotation(ScreenRotation) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus updateScreenState(bool) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus generateHwAvSyncId(int32_t*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>&,
+ std::vector<VendorParameter>*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>&, bool) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus addDeviceEffect(int32_t, const std::shared_ptr<IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus removeDeviceEffect(int32_t, const std::shared_ptr<IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAAudioMixerBurstCount(int32_t*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus getAAudioHardwareBurstMinUsec(int32_t*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus prepareToDisconnectExternalDevice(int32_t) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ AudioPortExt createDeviceExt(AudioDeviceType devType, int32_t flags);
+ AudioPort createPort(int32_t id, const std::string& name, int32_t flags, bool isInput,
+ const AudioPortExt& ext);
+
+ bool mMasterMute;
+ float mMasterVolume;
+ bool mMicMute;
+ std::vector<AudioPort> mPorts;
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamCommonMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamCommonMock.h
new file mode 100644
index 0000000..25d53f8
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamCommonMock.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnStreamCommon.h>
+
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::hardware::audio::effect;
+
+namespace aidl::android::hardware::audio::core {
+
+class StreamCommonMock : public BnStreamCommon {
+ ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus prepareToClose() override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus updateHwAvSyncId(int32_t) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>&,
+ std::vector<VendorParameter>*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>&, bool) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus addEffect(const std::shared_ptr<IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus removeEffect(const std::shared_ptr<IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamInMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamInMock.h
new file mode 100644
index 0000000..5deab5b
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamInMock.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnStreamIn.h>
+
+using namespace aidl::android::hardware::audio::common;
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::media::audio::common;
+
+namespace aidl::android::hardware::audio::core {
+
+class StreamInMock : public BnStreamIn {
+ ndk::ScopedAStatus getStreamCommon(std::shared_ptr<IStreamCommon>* _aidl_return) override;
+ ndk::ScopedAStatus getMicrophoneDirection(
+ IStreamIn::MicrophoneDirection* _aidl_return) override;
+ ndk::ScopedAStatus setMicrophoneDirection(IStreamIn::MicrophoneDirection in_direction) override;
+ ndk::ScopedAStatus getMicrophoneFieldDimension(float* _aidl_return) override;
+ ndk::ScopedAStatus setMicrophoneFieldDimension(float in_zoom) override;
+ ndk::ScopedAStatus getHwGain(std::vector<float>* _aidl_return) override;
+ ndk::ScopedAStatus setHwGain(const std::vector<float>& in_channelGains) override;
+
+ ndk::ScopedAStatus getActiveMicrophones(std::vector<MicrophoneDynamicInfo>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus updateMetadata(const SinkMetadata&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ private:
+ IStreamIn::MicrophoneDirection mMicrophoneDirection;
+ float mMicrophoneFieldDimension;
+ std::vector<float> mHwGains;
+ std::shared_ptr<IStreamCommon> mStreamCommon;
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamOutMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamOutMock.h
new file mode 100644
index 0000000..4d12815
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamOutMock.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnStreamOut.h>
+
+using namespace aidl::android::hardware::audio::common;
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::media::audio::common;
+
+namespace aidl::android::hardware::audio::core {
+
+class StreamOutMock : public BnStreamOut {
+ ndk::ScopedAStatus getStreamCommon(std::shared_ptr<IStreamCommon>* _aidl_return) override;
+ ndk::ScopedAStatus getHwVolume(std::vector<float>* _aidl_return) override;
+ ndk::ScopedAStatus setHwVolume(const std::vector<float>& in_channelVolumes) override;
+ ndk::ScopedAStatus getAudioDescriptionMixLevel(float* _aidl_return) override;
+ ndk::ScopedAStatus setAudioDescriptionMixLevel(float in_leveldB) override;
+ ndk::ScopedAStatus getDualMonoMode(AudioDualMonoMode* _aidl_return) override;
+ ndk::ScopedAStatus setDualMonoMode(AudioDualMonoMode in_mode) override;
+ ndk::ScopedAStatus getPlaybackRateParameters(AudioPlaybackRate* _aidl_return) override;
+ ndk::ScopedAStatus setPlaybackRateParameters(const AudioPlaybackRate& in_playbackRate) override;
+
+ ndk::ScopedAStatus updateMetadata(const SourceMetadata&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus updateOffloadMetadata(const AudioOffloadMetadata&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getRecommendedLatencyModes(std::vector<AudioLatencyMode>*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus setLatencyMode(AudioLatencyMode) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus selectPresentation(int32_t, int32_t) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+
+ private:
+ AudioPlaybackRate mPlaybackRateParameters;
+ AudioDualMonoMode mDualMonoMode;
+ float mAudioDescriptionMixLeveldB;
+ std::vector<float> mHwVolume;
+ std::shared_ptr<IStreamCommon> mStreamCommon;
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/TelephonyMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/TelephonyMock.h
new file mode 100644
index 0000000..d56dee6
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/TelephonyMock.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnTelephony.h>
+
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::media::audio::common;
+
+namespace aidl::android::hardware::audio::core {
+
+class TelephonyMock : public BnTelephony {
+ public:
+ ndk::ScopedAStatus getSupportedAudioModes(std::vector<AudioMode>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus switchAudioMode(AudioMode) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus setTelecomConfig(const ITelephony::TelecomConfig&,
+ ITelephony::TelecomConfig*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/EffectMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/EffectMock.h
new file mode 100644
index 0000000..db20cd8
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/EffectMock.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+
+using namespace aidl::android::hardware::audio::effect;
+
+namespace aidl::android::hardware::audio::effect {
+
+class EffectMock : public BnEffect {
+ public:
+ ndk::ScopedAStatus open(const Parameter::Common&, const std::optional<Parameter::Specific>&,
+ IEffect::OpenEffectReturn*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus command(CommandId) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus getState(State*) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus getDescriptor(Descriptor*) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus reopen(IEffect::OpenEffectReturn*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setParameter(const Parameter&) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus getParameter(const Parameter::Id&, Parameter*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/FactoryMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/FactoryMock.h
new file mode 100644
index 0000000..57d58d5
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/FactoryMock.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/effect/BnFactory.h>
+
+using namespace aidl::android::media::audio::common;
+using namespace aidl::android::hardware::audio::effect;
+
+namespace aidl::android::hardware::audio::effect {
+
+class FactoryMock : public BnFactory {
+ ndk::ScopedAStatus queryEffects(const std::optional<AudioUuid>&,
+ const std::optional<AudioUuid>&,
+ const std::optional<AudioUuid>&,
+ std::vector<Descriptor>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus queryProcessing(const std::optional<Processing::Type>&,
+ std::vector<Processing>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus destroyEffect(const std::shared_ptr<IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ ndk::ScopedAStatus createEffect(const AudioUuid&, std::shared_ptr<IEffect>*) override;
+};
+
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/sounddose-mock/SoundDoseMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/sounddose-mock/SoundDoseMock.h
new file mode 100644
index 0000000..5557b10
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/sounddose-mock/SoundDoseMock.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/sounddose/BnSoundDose.h>
+
+using namespace aidl::android::hardware::audio::core::sounddose;
+
+namespace aidl::android::hardware::audio::core::sounddose {
+
+class SoundDoseMock : public BnSoundDose {
+ ndk::ScopedAStatus setOutputRs2UpperBound(float in_rs2ValueDbA) override {
+ mOutputRs2UpperBound = in_rs2ValueDbA;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getOutputRs2UpperBound(float* _aidl_return) override {
+ *_aidl_return = mOutputRs2UpperBound;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus registerSoundDoseCallback(
+ const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ private:
+ float mOutputRs2UpperBound;
+};
+
+} // namespace aidl::android::hardware::audio::core::sounddose
diff --git a/media/libaudioclient/fuzzer/Android.bp b/media/libaudioclient/fuzzer/Android.bp
index fd3b0a8..12eedca 100644
--- a/media/libaudioclient/fuzzer/Android.bp
+++ b/media/libaudioclient/fuzzer/Android.bp
@@ -41,9 +41,9 @@
"libcutils",
"libjsoncpp",
"liblog",
+ "libmedia_helper",
"libmediametrics",
"libmediametricsservice",
- "libmedia_helper",
"libprocessgroup",
"shared-file-region-aidl-cpp",
],
@@ -55,8 +55,9 @@
"audiopolicy-types-aidl-cpp",
"av-types-aidl-cpp",
"capture_state_listener-aidl-cpp",
- "libaudioclient_aidl_conversion",
+ "framework-permission-aidl-cpp",
"libaudio_aidl_conversion_common_cpp",
+ "libaudioclient_aidl_conversion",
"libaudioflinger",
"libaudiofoundation",
"libaudiomanager",
@@ -69,7 +70,6 @@
"libutils",
"libxml2",
"mediametricsservice-aidl-cpp",
- "framework-permission-aidl-cpp",
],
header_libs: [
"libaudiofoundation_headers",
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 77d686a..c238158 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -187,7 +187,10 @@
// helper function to obtain AudioFlinger service handle
static sp<IAudioFlinger> get_audio_flinger();
- static sp<IAudioFlinger> get_audio_flinger_for_fuzzer();
+
+ // function to disable creation of thread pool (Used for testing).
+ // This should be called before get_audio_flinger() or get_audio_policy_service().
+ static void disableThreadPool();
static float linearToLog(int volume);
static int logToLinear(float volume);
diff --git a/media/libaudioclient/include/media/EffectClientAsyncProxy.h b/media/libaudioclient/include/media/EffectClientAsyncProxy.h
new file mode 100644
index 0000000..e7d6d80
--- /dev/null
+++ b/media/libaudioclient/include/media/EffectClientAsyncProxy.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/media/BnEffectClient.h>
+#include <audio_utils/CommandThread.h>
+
+namespace android::media {
+
+class EffectClientAsyncProxy : public IEffectClient {
+public:
+
+ /**
+ * Call this factory method to interpose a worker thread when a binder
+ * callback interface is invoked in-proc.
+ */
+ static sp<IEffectClient> makeIfNeeded(const sp<IEffectClient>& effectClient) {
+ if (isLocalBinder(effectClient)) {
+ return sp<EffectClientAsyncProxy>::make(effectClient);
+ }
+ return effectClient;
+ }
+
+ explicit EffectClientAsyncProxy(const sp<IEffectClient>& effectClient)
+ : mEffectClient(effectClient) {}
+
+ ::android::IBinder* onAsBinder() override {
+ return nullptr;
+ }
+
+ ::android::binder::Status controlStatusChanged(bool controlGranted) override {
+ getThread().add(__func__, [=, effectClient = mEffectClient]() {
+ effectClient->controlStatusChanged(controlGranted);
+ });
+ return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+ }
+
+ ::android::binder::Status enableStatusChanged(bool enabled) override {
+ getThread().add(__func__, [=, effectClient = mEffectClient]() {
+ effectClient->enableStatusChanged(enabled);
+ });
+ return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+ }
+
+ ::android::binder::Status commandExecuted(
+ int32_t cmdCode, const ::std::vector<uint8_t>& cmdData,
+ const ::std::vector<uint8_t>& replyData) override {
+ getThread().add(__func__, [=, effectClient = mEffectClient]() {
+ effectClient->commandExecuted(cmdCode, cmdData, replyData);
+ });
+ return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+ }
+
+ ::android::binder::Status framesProcessed(int32_t frames) override {
+ getThread().add(__func__, [=, effectClient = mEffectClient]() {
+ effectClient->framesProcessed(frames);
+ });
+ return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+ }
+
+ /**
+ * Returns true if the binder interface is local (in-proc).
+ *
+ * Move to a binder helper class?
+ */
+ static bool isLocalBinder(const sp<IInterface>& interface) {
+ const auto b = IInterface::asBinder(interface);
+ return b && b->localBinder();
+ }
+
+private:
+ const sp<IEffectClient> mEffectClient;
+
+ /**
+ * Returns the per-interface-descriptor CommandThread for in-proc binder transactions.
+ *
+ * Note: Remote RPC transactions to a given binder (kernel) node enter that node's
+ * async_todo list, which serializes all async operations to that binder node.
+ * Each transaction on the async_todo list must complete before the next one
+ * starts, even though there may be available threads in the process threadpool.
+ *
+ * For local transactions, we order all async requests entering
+ * the CommandThread. We do not maintain a threadpool, though a future implementation
+ * could use a shared ThreadPool.
+ *
+ * By using a static here, all in-proc binder interfaces made async with
+ * EffectClientAsyncProxy will get the same CommandThread.
+ *
+ * @return CommandThread to use.
+ */
+ static audio_utils::CommandThread& getThread() {
+ [[clang::no_destroy]] static audio_utils::CommandThread commandThread;
+ return commandThread;
+ }
+}; // class EffectClientAsyncProxy
+
+} // namespace android::media
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index 913bbb4..8bed4d4 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -22,8 +22,8 @@
],
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
},
}
@@ -31,8 +31,8 @@
cc_defaults {
name: "audio_aidl_conversion_test_defaults",
defaults: [
- "libaudioclient_tests_defaults",
"latest_android_media_audio_common_types_cpp_static",
+ "libaudioclient_tests_defaults",
],
static_libs: [
"audioclient-types-aidl-cpp",
@@ -109,9 +109,9 @@
"libcgrouprc",
"libdl",
"libmedia",
+ "libmedia_helper",
"libmediametrics",
"libmediautils",
- "libmedia_helper",
"libnblog",
"libprocessgroup",
"libshmemcompat",
diff --git a/media/libaudiofoundation/Android.bp b/media/libaudiofoundation/Android.bp
index c758fcd..576406d 100644
--- a/media/libaudiofoundation/Android.bp
+++ b/media/libaudiofoundation/Android.bp
@@ -87,7 +87,7 @@
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
}
diff --git a/media/libaudiofoundation/tests/Android.bp b/media/libaudiofoundation/tests/Android.bp
index 82c7db7..0ca50ab 100644
--- a/media/libaudiofoundation/tests/Android.bp
+++ b/media/libaudiofoundation/tests/Android.bp
@@ -22,8 +22,8 @@
static_libs: [
"audioclient-types-aidl-cpp",
- "libaudioclient_aidl_conversion",
"libaudio_aidl_conversion_common_cpp",
+ "libaudioclient_aidl_conversion",
"libaudiofoundation",
"libstagefright_foundation",
],
@@ -37,8 +37,8 @@
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
test_suites: ["device-tests"],
@@ -64,8 +64,8 @@
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
test_suites: ["device-tests"],
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index b8d0998..639c7aa 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -18,8 +18,8 @@
cflags: [
"-Wall",
- "-Wextra",
"-Werror",
+ "-Wextra",
],
required: [
@@ -44,7 +44,7 @@
"libbase_headers",
"liberror_headers",
"libmediautils_headers",
- ]
+ ],
}
cc_library_shared {
@@ -61,12 +61,12 @@
shared_libs: [
"libhidlbase",
- "libutils",
"liblog",
+ "libutils",
],
header_libs: [
- "libaudiohal_headers"
+ "libaudiohal_headers",
],
}
diff --git a/media/libaudiohal/impl/AidlUtils.cpp b/media/libaudiohal/impl/AidlUtils.cpp
new file mode 100644
index 0000000..a916802
--- /dev/null
+++ b/media/libaudiohal/impl/AidlUtils.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "AidlUtils.h"
+
+#define LOG_TAG "AIDLUtils"
+#include <utils/Log.h>
+
+namespace android {
+
+//static
+HalDeathHandler& HalDeathHandler::getInstance() {
+ // never-delete singleton
+ static HalDeathHandler* instance = new HalDeathHandler;
+ return *instance;
+}
+
+//static
+void HalDeathHandler::OnBinderDied(void*) {
+ ALOGE("HAL instance died, audio server is restarting");
+ _exit(1); // Avoid calling atexit handlers, as this code runs on a thread from RPC threadpool.
+}
+
+HalDeathHandler::HalDeathHandler()
+ : mDeathRecipient(AIBinder_DeathRecipient_new(OnBinderDied)) {}
+
+bool HalDeathHandler::registerHandler(AIBinder* binder) {
+ binder_status_t status = AIBinder_linkToDeath(binder, mDeathRecipient.get(), nullptr);
+ if (status == STATUS_OK) return true;
+ ALOGE("%s: linkToDeath failed: %d", __func__, status);
+ return false;
+}
+
+} // namespace android
diff --git a/media/libaudiohal/impl/AidlUtils.h b/media/libaudiohal/impl/AidlUtils.h
new file mode 100644
index 0000000..97a5bba
--- /dev/null
+++ b/media/libaudiohal/impl/AidlUtils.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <memory>
+#include <string>
+
+#include <android/binder_auto_utils.h>
+#include <android/binder_ibinder.h>
+#include <android/binder_manager.h>
+
+namespace android {
+
+class HalDeathHandler {
+ public:
+ static HalDeathHandler& getInstance();
+
+ bool registerHandler(AIBinder* binder);
+ private:
+ static void OnBinderDied(void*);
+
+ HalDeathHandler();
+
+ ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+};
+
+template<class Intf>
+std::shared_ptr<Intf> getServiceInstance(const std::string& instanceName) {
+ const std::string serviceName =
+ std::string(Intf::descriptor).append("/").append(instanceName);
+ std::shared_ptr<Intf> service;
+ while (!service) {
+ AIBinder* serviceBinder = nullptr;
+ while (!serviceBinder) {
+ // 'waitForService' may return a nullptr, hopefully a transient error.
+ serviceBinder = AServiceManager_waitForService(serviceName.c_str());
+ }
+ // `fromBinder` may fail and return a nullptr if the service has died in the meantime.
+ service = Intf::fromBinder(ndk::SpAIBinder(serviceBinder));
+ if (service != nullptr) {
+ HalDeathHandler::getInstance().registerHandler(serviceBinder);
+ }
+ }
+ return service;
+}
+
+} // namespace android
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index 4d81f77..1a6b949 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -49,8 +49,8 @@
cflags: [
"-Wall",
- "-Wextra",
"-Werror",
+ "-Wextra",
"-fvisibility=hidden",
],
shared_libs: [
@@ -211,10 +211,10 @@
"libbinder_ndk",
],
cflags: [
- "-DMAJOR_VERSION=7",
- "-DMINOR_VERSION=1",
"-DCOMMON_TYPES_MINOR_VERSION=0",
"-DCORE_TYPES_MINOR_VERSION=0",
+ "-DMAJOR_VERSION=7",
+ "-DMINOR_VERSION=1",
"-include common/all-versions/VersionMacro.h",
],
}
@@ -227,11 +227,11 @@
"latest_android_hardware_audio_core_sounddose_ndk_shared",
"latest_android_hardware_audio_effect_ndk_shared",
"latest_android_media_audio_common_types_ndk_shared",
+ "latest_av_audio_types_aidl_ndk_shared",
],
shared_libs: [
"android.hardware.common-V2-ndk",
"android.hardware.common.fmq-V1-ndk",
- "av-audio-types-aidl-V1-ndk",
"libaudio_aidl_conversion_common_cpp",
"libaudio_aidl_conversion_common_ndk",
"libaudio_aidl_conversion_common_ndk_cpp",
@@ -245,25 +245,26 @@
"libeffectsconfig_headers",
],
cflags: [
- "-Wall",
- "-Wextra",
- "-Werror",
- "-Wthread-safety",
"-DBACKEND_CPP_NDK",
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ "-Wthread-safety",
],
}
cc_library_shared {
name: "libaudiohal@aidl",
defaults: [
- "libaudiohal_default",
"libaudiohal_aidl_default",
+ "libaudiohal_default",
],
srcs: [
- "DevicesFactoryHalEntry.cpp",
- "EffectsFactoryHalEntry.cpp",
":audio_effect_hal_aidl_src_files",
":core_audio_hal_aidl_src_files",
+ "AidlUtils.cpp",
+ "DevicesFactoryHalEntry.cpp",
+ "EffectsFactoryHalEntry.cpp",
],
}
@@ -281,8 +282,9 @@
filegroup {
name: "audio_effect_hal_aidl_src_files",
srcs: [
- "EffectConversionHelperAidl.cpp",
+ ":audio_effectproxy_src_files",
"EffectBufferHalAidl.cpp",
+ "EffectConversionHelperAidl.cpp",
"EffectHalAidl.cpp",
"EffectsFactoryHalAidl.cpp",
"effectsAidlConversion/AidlConversionAec.cpp",
@@ -301,7 +303,6 @@
"effectsAidlConversion/AidlConversionVendorExtension.cpp",
"effectsAidlConversion/AidlConversionVirtualizer.cpp",
"effectsAidlConversion/AidlConversionVisualizer.cpp",
- ":audio_effectproxy_src_files",
],
}
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 14765f6b..2447b18 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -478,15 +478,21 @@
__func__, ret.desc.toString().c_str());
return NO_INIT;
}
- *outStream = sp<StreamOutHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
+ auto stream = sp<StreamOutHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
std::move(ret.stream), mVendorExt, this /*callbackBroker*/);
- void* cbCookie = (*outStream).get();
+ *outStream = stream;
+ /* StreamOutHalInterface* */ void* cbCookie = (*outStream).get();
{
std::lock_guard l(mLock);
mCallbacks.emplace(cbCookie, Callbacks{});
mMapper.addStream(*outStream, mixPortConfig.id, aidlPatch.id);
}
- if (streamCb) streamCb->setCookie(cbCookie);
+ if (streamCb) {
+ streamCb->setCookie(cbCookie);
+ // Although StreamOutHalAidl implements StreamOutHalInterfaceCallback,
+ // we always go via the CallbackBroker for consistency.
+ setStreamOutCallback(cbCookie, stream);
+ }
eventCb->setCookie(cbCookie);
cleanups.disarmAll();
return OK;
@@ -589,7 +595,6 @@
// that the HAL module uses `int32_t` for patch IDs. The following assert ensures
// that both the framework and the HAL use the same value for "no ID":
static_assert(AUDIO_PATCH_HANDLE_NONE == 0);
- int32_t aidlPatchId = static_cast<int32_t>(*patch);
// Upon conversion, mix port configs contain audio configuration, while
// device port configs contain device address. This data is used to find
@@ -611,11 +616,27 @@
::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
sinks[i], isInput, 0)));
}
+ int32_t aidlPatchId = static_cast<int32_t>(*patch);
Hal2AidlMapper::Cleanups cleanups(mMapperAccessor);
{
std::lock_guard l(mLock);
- RETURN_STATUS_IF_ERROR(mMapper.createOrUpdatePatch(
- aidlSources, aidlSinks, &aidlPatchId, &cleanups));
+ // Check for patches that only exist for the framework, or have different HAL patch ID.
+ if (int32_t aidlHalPatchId = mMapper.findFwkPatch(aidlPatchId); aidlHalPatchId != 0) {
+ if (aidlHalPatchId == aidlPatchId) {
+ // This patch was previously released by the HAL. Thus we need to pass '0'
+ // to the HAL to obtain a new patch.
+ int32_t newAidlPatchId = 0;
+ RETURN_STATUS_IF_ERROR(mMapper.createOrUpdatePatch(
+ aidlSources, aidlSinks, &newAidlPatchId, &cleanups));
+ mMapper.updateFwkPatch(aidlPatchId, newAidlPatchId);
+ } else {
+ RETURN_STATUS_IF_ERROR(mMapper.createOrUpdatePatch(
+ aidlSources, aidlSinks, &aidlHalPatchId, &cleanups));
+ }
+ } else {
+ RETURN_STATUS_IF_ERROR(mMapper.createOrUpdatePatch(
+ aidlSources, aidlSinks, &aidlPatchId, &cleanups));
+ }
}
*patch = static_cast<audio_patch_handle_t>(aidlPatchId);
cleanups.disarmAll();
@@ -631,7 +652,19 @@
return BAD_VALUE;
}
std::lock_guard l(mLock);
- RETURN_STATUS_IF_ERROR(mMapper.releaseAudioPatch(static_cast<int32_t>(patch)));
+ // Check for patches that only exist for the framework, or have different HAL patch ID.
+ int32_t aidlPatchId = static_cast<int32_t>(patch);
+ if (int32_t aidlHalPatchId = mMapper.findFwkPatch(aidlPatchId); aidlHalPatchId != 0) {
+ if (aidlHalPatchId == aidlPatchId) {
+ // This patch was previously released by the HAL, just need to finish its removal.
+ mMapper.eraseFwkPatch(aidlPatchId);
+ return OK;
+ } else {
+ // This patch has a HAL patch ID which is different
+ aidlPatchId = aidlHalPatchId;
+ }
+ }
+ RETURN_STATUS_IF_ERROR(mMapper.releaseAudioPatch(aidlPatchId));
return OK;
}
@@ -988,7 +1021,7 @@
if (mModule == nullptr) return NO_INIT;
{
std::lock_guard l(mLock);
- mMapper.resetUnusedPatchesPortConfigsAndPorts();
+ mMapper.resetUnusedPatchesAndPortConfigs();
}
ModuleDebug debug{ .simulateDeviceConnections = enabled };
status_t status = statusTFromBinderStatus(mModule->setModuleDebug(debug));
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index 478e0f0..ea4258c 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -752,10 +752,14 @@
// the attributes reported by `getParameters` API.
struct audio_port_v7 temp = *devicePort;
AudioProfileAttributesMultimap attrsFromDevice;
- status_t status = getAudioPort(&temp);
- if (status == NO_ERROR) {
- attrsFromDevice = createAudioProfilesAttrMap(temp.audio_profiles, 0 /*first*/,
- temp.num_audio_profiles);
+ bool supportsPatches;
+ if (supportsAudioPatches(&supportsPatches) == OK && supportsPatches) {
+ // The audio patches are supported since HAL 3.0, which is the same HAL version
+ // requirement for 'getAudioPort' API.
+ if (getAudioPort(&temp) == NO_ERROR) {
+ attrsFromDevice = createAudioProfilesAttrMap(temp.audio_profiles, 0 /*first*/,
+ temp.num_audio_profiles);
+ }
}
auto streamIt = mStreams.find(mixPort->ext.mix.handle);
if (streamIt == mStreams.end()) {
@@ -767,7 +771,7 @@
}
String8 formatsStr;
- status = getParametersFromStream(
+ status_t status = getParametersFromStream(
stream, AudioParameter::keyStreamSupportedFormats, nullptr /*extraParameters*/,
&formatsStr);
if (status != NO_ERROR) {
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
index 347afa6..68b650f 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
@@ -31,6 +31,7 @@
#include <media/AidlConversionUtil.h>
#include <utils/Log.h>
+#include "AidlUtils.h"
#include "DeviceHalAidl.h"
#include "DevicesFactoryHalAidl.h"
@@ -179,16 +180,8 @@
if (name == nullptr || device == nullptr) {
return BAD_VALUE;
}
- std::shared_ptr<IModule> service;
if (strcmp(name, "primary") == 0) name = "default";
- auto serviceName = std::string(IModule::descriptor) + "/" + name;
- service = IModule::fromBinder(
- ndk::SpAIBinder(AServiceManager_waitForService(serviceName.c_str())));
- if (service == nullptr) {
- ALOGE("%s fromBinder %s failed", __func__, serviceName.c_str());
- return NO_INIT;
- }
- *device = sp<DeviceHalAidl>::make(name, service, mVendorExt);
+ *device = sp<DeviceHalAidl>::make(name, getServiceInstance<IModule>(name), mVendorExt);
return OK;
}
@@ -229,14 +222,7 @@
// Main entry-point to the shared library.
extern "C" __attribute__((visibility("default"))) void* createIDevicesFactoryImpl() {
- auto serviceName = std::string(IConfig::descriptor) + "/default";
- auto service = IConfig::fromBinder(
- ndk::SpAIBinder(AServiceManager_waitForService(serviceName.c_str())));
- if (!service) {
- ALOGE("%s binder service %s not exist", __func__, serviceName.c_str());
- return nullptr;
- }
- return new DevicesFactoryHalAidl(service);
+ return new DevicesFactoryHalAidl(getServiceInstance<IConfig>("default"));
}
} // namespace android
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
index e1a82f8..ff6126d 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
@@ -181,7 +181,7 @@
State state;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getState(&state)));
if (state == State::INIT) {
- ALOGI("%s at state %s, opening effect with input %s output %s", __func__,
+ ALOGD("%s at state %s, opening effect with input %s output %s", __func__,
android::internal::ToString(state).c_str(), common.input.toString().c_str(),
common.output.toString().c_str());
IEffect::OpenEffectReturn openReturn;
@@ -189,10 +189,12 @@
statusTFromBinderStatus(mEffect->open(common, std::nullopt, &openReturn)));
updateMqsAndEventFlags(openReturn);
} else if (mCommon != common) {
- ALOGI("%s at state %s, setParameter", __func__, android::internal::ToString(state).c_str());
+ ALOGV("%s at state %s, setCommonParameter %s", __func__,
+ android::internal::ToString(state).c_str(), common.toString().c_str());
Parameter aidlParam = UNION_MAKE(Parameter, common, common);
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam)));
}
+ mOutputAccessMode = config->outputCfg.accessMode;
mCommon = common;
return *static_cast<int32_t*>(pReplyData) = OK;
@@ -397,12 +399,12 @@
effect_offload_param_t* offload = (effect_offload_param_t*)pCmdData;
// send to proxy to update active sub-effect
if (mIsProxyEffect) {
- ALOGI("%s offload param offload %s ioHandle %d", __func__,
+ ALOGV("%s offload param offload %s ioHandle %d", __func__,
offload->isOffload ? "true" : "false", offload->ioHandle);
const auto& effectProxy = std::static_pointer_cast<EffectProxy>(mEffect);
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(effectProxy->setOffloadParam(offload)));
if (mCommon.ioHandle != offload->ioHandle) {
- ALOGI("%s ioHandle update [%d to %d]", __func__, mCommon.ioHandle, offload->ioHandle);
+ ALOGV("%s ioHandle update [%d to %d]", __func__, mCommon.ioHandle, offload->ioHandle);
mCommon.ioHandle = offload->ioHandle;
Parameter aidlParam = UNION_MAKE(Parameter, common, mCommon);
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam)));
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.h b/media/libaudiohal/impl/EffectConversionHelperAidl.h
index 0c0184e..29c5a83 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.h
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.h
@@ -49,6 +49,8 @@
::aidl::android::hardware::audio::effect::Descriptor getDescriptor() const;
status_t reopen();
+ uint8_t mOutputAccessMode = EFFECT_BUFFER_ACCESS_WRITE;
+
protected:
const int32_t mSessionId;
const int32_t mIoId;
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index ebda86a..3fe2046 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -20,6 +20,7 @@
#include <memory>
+#include <audio_utils/primitives.h>
#include <error/expected_utils.h>
#include <media/AidlConversionCppNdk.h>
#include <media/AidlConversionEffect.h>
@@ -56,7 +57,9 @@
using ::aidl::android::hardware::audio::effect::Descriptor;
using ::aidl::android::hardware::audio::effect::IEffect;
using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::kEventFlagDataMqNotEmpty;
using ::aidl::android::hardware::audio::effect::kEventFlagDataMqUpdate;
+using ::aidl::android::hardware::audio::effect::kEventFlagNotEmpty;
using ::aidl::android::hardware::audio::effect::kReopenSupportedVersion;
using ::aidl::android::hardware::audio::effect::State;
@@ -196,8 +199,9 @@
::android::OK == efGroup->wait(kEventFlagDataMqUpdate, &efState,
1 /* ns */, true /* retry */) &&
efState & kEventFlagDataMqUpdate) {
- ALOGI("%s %s V%d receive dataMQUpdate eventFlag from HAL", __func__, effectName.c_str(),
+ ALOGV("%s %s V%d receive dataMQUpdate eventFlag from HAL", __func__, effectName.c_str(),
halVersion);
+
mConversion->reopen();
}
auto statusQ = mConversion->getStatusMQ();
@@ -223,12 +227,22 @@
floatsToWrite, mInBuffer->audioBuffer(), inputQ->availableToWrite());
return INVALID_OPERATION;
}
- efGroup->wake(aidl::android::hardware::audio::effect::kEventFlagNotEmpty);
+
+ // for V2 audio effect HAL, expect different EventFlag to avoid bit conflict with FMQ_NOT_EMPTY
+ efGroup->wake(halVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty
+ : kEventFlagNotEmpty);
IEffect::Status retStatus{};
- if (!statusQ->readBlocking(&retStatus, 1) || retStatus.status != OK ||
- (size_t)retStatus.fmqConsumed != floatsToWrite || retStatus.fmqProduced == 0) {
- ALOGE("%s read status failed: %s", __func__, retStatus.toString().c_str());
+ if (!statusQ->readBlocking(&retStatus, 1)) {
+ ALOGE("%s %s V%d read status from status FMQ failed", __func__, effectName.c_str(),
+ halVersion);
+ return INVALID_OPERATION;
+ }
+ if (retStatus.status != OK || (size_t)retStatus.fmqConsumed != floatsToWrite ||
+ retStatus.fmqProduced == 0) {
+ ALOGE("%s read status failed: %s, consumed %d (of %zu) produced %d", __func__,
+ retStatus.toString().c_str(), retStatus.fmqConsumed, floatsToWrite,
+ retStatus.fmqProduced);
return INVALID_OPERATION;
}
@@ -239,16 +253,23 @@
mOutBuffer->getSize() / sizeof(float), available);
return INVALID_OPERATION;
}
+
+ float *outputRawBuffer = mOutBuffer->audioBuffer()->f32;
+ std::vector<float> tempBuffer;
+ if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+ tempBuffer.resize(floatsToRead);
+ outputRawBuffer = tempBuffer.data();
+ }
// always read floating point data for AIDL
- if (!mOutBuffer->audioBuffer() ||
- !outputQ->read(mOutBuffer->audioBuffer()->f32, floatsToRead)) {
+ if (!outputQ->read(outputRawBuffer, floatsToRead)) {
ALOGE("%s failed to read %zu from outputQ to audioBuffer %p", __func__, floatsToRead,
mOutBuffer->audioBuffer());
return INVALID_OPERATION;
}
+ if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+ accumulate_float(mOutBuffer->audioBuffer()->f32, outputRawBuffer, floatsToRead);
+ }
- ALOGD("%s %s consumed %zu produced %zu", __func__, effectName.c_str(), floatsToWrite,
- floatsToRead);
return OK;
}
diff --git a/media/libaudiohal/impl/EffectProxy.cpp b/media/libaudiohal/impl/EffectProxy.cpp
index 3d9832c..fb4658f 100644
--- a/media/libaudiohal/impl/EffectProxy.cpp
+++ b/media/libaudiohal/impl/EffectProxy.cpp
@@ -82,8 +82,7 @@
ndk::ScopedAStatus EffectProxy::setOffloadParam(const effect_offload_param_t* offload) {
const auto& itor = std::find_if(mSubEffects.begin(), mSubEffects.end(), [&](const auto& sub) {
const auto& desc = sub.descriptor;
- return offload->isOffload ==
- (desc.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL);
+ return offload->isOffload == desc.common.flags.offloadIndication;
});
if (itor == mSubEffects.end()) {
ALOGE("%s no %soffload sub-effect found", __func__, offload->isOffload ? "" : "non-");
@@ -93,7 +92,7 @@
}
mActiveSubIdx = std::distance(mSubEffects.begin(), itor);
- ALOGI("%s: active %soffload sub-effect %zu descriptor: %s", __func__,
+ ALOGI("%s: active %soffload sub-effect %zu: %s", __func__,
offload->isOffload ? "" : "non-", mActiveSubIdx,
::android::audio::utils::toString(mSubEffects[mActiveSubIdx].descriptor.common.id.uuid)
.c_str());
@@ -147,7 +146,7 @@
// close all opened effects if failure
if (!status.isOk()) {
- ALOGE("%s: closing all sub-effects with error %s", __func__,
+ ALOGW("%s: closing all sub-effects with error %s", __func__,
status.getDescription().c_str());
close();
}
@@ -163,7 +162,7 @@
ndk::ScopedAStatus EffectProxy::getDescriptor(Descriptor* desc) {
*desc = mSubEffects[mActiveSubIdx].descriptor;
- desc->common.id.uuid = desc->common.id.proxy.value();
+ desc->common = mDescriptorCommon;
return ndk::ScopedAStatus::ok();
}
@@ -185,42 +184,35 @@
return ndk::ScopedAStatus::ok();
}
+// Sub-effects are required to have identical features, so here we return the SW sub-effect
+// descriptor, with the implementation UUID replaced with proxy UUID, and flags setting respect all
+// sub-effects.
Descriptor::Common EffectProxy::buildDescriptorCommon(
const AudioUuid& uuid, const std::vector<Descriptor>& subEffectDescs) {
- // initial flag values before we know which sub-effect to active (with setOffloadParam)
- // align to HIDL EffectProxy flags
- Descriptor::Common common = {.flags = {.type = Flags::Type::INSERT,
- .insert = Flags::Insert::LAST,
- .volume = Flags::Volume::CTRL}};
-
+ Descriptor::Common swCommon;
+ const Flags& firstFlag = subEffectDescs[0].common.flags;
+ bool offloadExist = false;
for (const auto& desc : subEffectDescs) {
- if (desc.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL) {
- common.flags.hwAcceleratorMode = Flags::HardwareAccelerator::TUNNEL;
+ if (desc.common.flags.offloadIndication) {
+ offloadExist = true;
+ } else {
+ swCommon = desc.common;
}
-
- // set indication if any sub-effect indication was set
- common.flags.offloadIndication |= desc.common.flags.offloadIndication;
- common.flags.deviceIndication |= desc.common.flags.deviceIndication;
- common.flags.audioModeIndication |= desc.common.flags.audioModeIndication;
- common.flags.audioSourceIndication |= desc.common.flags.audioSourceIndication;
- // Set to NONE if any sub-effect not supporting any Volume command
- if (desc.common.flags.volume == Flags::Volume::NONE) {
- common.flags.volume = Flags::Volume::NONE;
- }
- // set to AUXILIARY if any sub-effect is of AUXILIARY type
- if (desc.common.flags.type == Flags::Type::AUXILIARY) {
- common.flags.type = Flags::Type::AUXILIARY;
+ if (desc.common.flags.audioModeIndication != firstFlag.audioModeIndication ||
+ desc.common.flags.audioSourceIndication != firstFlag.audioSourceIndication ||
+ desc.common.flags.sinkMetadataIndication != firstFlag.sinkMetadataIndication ||
+ desc.common.flags.sourceMetadataIndication != firstFlag.sourceMetadataIndication ||
+ desc.common.flags.deviceIndication != firstFlag.deviceIndication) {
+ ALOGW("Inconsistent flags %s vs %s", desc.common.flags.toString().c_str(),
+ firstFlag.toString().c_str());
}
}
- // copy type UUID from any of sub-effects, all sub-effects should have same type
- common.id.type = subEffectDescs[0].common.id.type;
+ swCommon.flags.offloadIndication = offloadExist;
// replace implementation UUID with proxy UUID.
- common.id.uuid = uuid;
- common.id.proxy = std::nullopt;
- common.name = "Proxy";
- common.implementor = "AOSP";
- return common;
+ swCommon.id.uuid = uuid;
+ swCommon.id.proxy = std::nullopt;
+ return swCommon;
}
// Handle with active sub-effect first, only send to other sub-effects when success
@@ -259,7 +251,7 @@
std::function<ndk::ScopedAStatus(const std::shared_ptr<IEffect>&)> const& func) {
ndk::ScopedAStatus status = runWithActiveSubEffect(func);
if (!status.isOk()) {
- ALOGE("%s active sub-effect return error %s", __func__, status.getDescription().c_str());
+ ALOGW("%s active sub-effect return error %s", __func__, status.getDescription().c_str());
}
// proceed with others
@@ -268,7 +260,7 @@
continue;
}
if (!mSubEffects[i].handle) {
- ALOGE("%s null sub-effect interface for %s", __func__,
+ ALOGW("%s null sub-effect interface for %s", __func__,
mSubEffects[i].descriptor.common.id.uuid.toString().c_str());
continue;
}
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index 7d807b2..64cc7ed 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -31,6 +31,7 @@
#include <system/audio_aidl_utils.h>
#include <utils/Log.h>
+#include "AidlUtils.h"
#include "EffectBufferHalAidl.h"
#include "EffectHalAidl.h"
#include "EffectProxy.h"
@@ -120,8 +121,6 @@
}
*pNumEffects = mEffectCount;
- ALOGD("%s %u non %zu proxyMap %zu proxyDesc %zu", __func__, *pNumEffects,
- mNonProxyDescList.size(), mProxyUuidDescriptorMap.size(), mProxyDescList.size());
return OK;
}
@@ -178,7 +177,7 @@
if (sessionId == AUDIO_SESSION_DEVICE && ioId == AUDIO_IO_HANDLE_NONE) {
return INVALID_OPERATION;
}
- ALOGI("%s session %d ioId %d", __func__, sessionId, ioId);
+ ALOGV("%s session %d ioId %d", __func__, sessionId, ioId);
AudioUuid aidlUuid =
VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
@@ -189,7 +188,6 @@
aidlEffect = ndk::SharedRefBase::make<EffectProxy>(
aidlUuid, mProxyUuidDescriptorMap.at(aidlUuid) /* sub-effect descriptor list */,
mFactory);
- mProxyList.emplace_back(std::static_pointer_cast<EffectProxy>(aidlEffect));
} else {
RETURN_STATUS_IF_ERROR(
statusTFromBinderStatus(mFactory->createEffect(aidlUuid, &aidlEffect)));
@@ -206,25 +204,17 @@
}
status_t EffectsFactoryHalAidl::dumpEffects(int fd) {
- status_t ret = OK;
- // record the error ret and continue dump as many effects as possible
- for (const auto& proxy : mProxyList) {
- if (status_t temp = BAD_VALUE; proxy && (temp = proxy->dump(fd, nullptr, 0)) != OK) {
- ret = temp;
- }
- }
+ // TODO: b/333803769 improve the effect dump implementation
RETURN_STATUS_IF_ERROR(mFactory->dump(fd, nullptr, 0));
- return ret;
+ return OK;
}
status_t EffectsFactoryHalAidl::allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) {
- ALOGI("%s size %zu buffer %p", __func__, size, buffer);
return EffectBufferHalAidl::allocate(size, buffer);
}
status_t EffectsFactoryHalAidl::mirrorBuffer(void* external, size_t size,
sp<EffectBufferHalInterface>* buffer) {
- ALOGI("%s extern %p size %zu buffer %p", __func__, external, size, buffer);
return EffectBufferHalAidl::mirror(external, size, buffer);
}
@@ -245,7 +235,6 @@
ALOGE("%s UUID not found in HAL and proxy list %s", __func__, toString(uuid).c_str());
return NAME_NOT_FOUND;
}
- ALOGI("%s UUID impl found %s", __func__, toString(uuid).c_str());
*pDescriptor = VALUE_OR_RETURN_STATUS(
::aidl::android::aidl2legacy_Descriptor_effect_descriptor(*matchIt));
@@ -267,7 +256,6 @@
ALOGW("%s UUID type not found in HAL and proxy list %s", __func__, toString(type).c_str());
return BAD_VALUE;
}
- ALOGI("%s UUID type found %zu \n %s", __func__, result.size(), toString(type).c_str());
*descriptors = VALUE_OR_RETURN_STATUS(
aidl::android::convertContainer<std::vector<effect_descriptor_t>>(
@@ -362,14 +350,7 @@
// exports from a static library are optimized out unless actually used by
// the shared library. See EffectsFactoryHalEntry.cpp.
extern "C" void* createIEffectsFactoryImpl() {
- auto serviceName = std::string(IFactory::descriptor) + "/default";
- auto service = IFactory::fromBinder(
- ndk::SpAIBinder(AServiceManager_waitForService(serviceName.c_str())));
- if (!service) {
- ALOGE("%s binder service %s not exist", __func__, serviceName.c_str());
- return nullptr;
- }
- return new effect::EffectsFactoryHalAidl(service);
+ return new effect::EffectsFactoryHalAidl(getServiceInstance<IFactory>("default"));
}
} // namespace android
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.h b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
index 73089b0..3b8628c 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
@@ -84,9 +84,6 @@
// Query result of pre and post processing from effect factory
const std::vector<Processing> mAidlProcessings;
- // list of the EffectProxy instances
- std::list<std::shared_ptr<EffectProxy>> mProxyList;
-
virtual ~EffectsFactoryHalAidl() = default;
status_t getHalDescriptorWithImplUuid(
const ::aidl::android::media::audio::common::AudioUuid& uuid,
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
index 2b7f298..453f9e2 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.cpp
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -102,8 +102,8 @@
}
void Hal2AidlMapper::addStream(
- const sp<StreamHalInterface>& stream, int32_t portConfigId, int32_t patchId) {
- mStreams.insert(std::pair(stream, std::pair(portConfigId, patchId)));
+ const sp<StreamHalInterface>& stream, int32_t mixPortConfigId, int32_t patchId) {
+ mStreams.insert(std::pair(stream, std::pair(mixPortConfigId, patchId)));
}
bool Hal2AidlMapper::audioDeviceMatches(const AudioDevice& device, const AudioPort& p) {
@@ -181,7 +181,9 @@
};
// When looking up port configs, the destinationPortId is only used for mix ports.
// Thus, we process device port configs first, and look up the destination port ID from them.
- bool sourceIsDevice = std::any_of(sources.begin(), sources.end(),
+ const bool sourceIsDevice = std::any_of(sources.begin(), sources.end(),
+ [](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
+ const bool sinkIsDevice = std::any_of(sinks.begin(), sinks.end(),
[](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
const std::vector<AudioPortConfig>& devicePortConfigs =
sourceIsDevice ? sources : sinks;
@@ -202,7 +204,13 @@
existingPatchIt->second = patch;
} else {
bool created = false;
- RETURN_STATUS_IF_ERROR(findOrCreatePatch(patch, &patch, &created));
+ // When the framework does not specify a patch ID, only the mix port config
+ // is used for finding an existing patch. That's because the framework assumes
+ // that there can only be one patch for an I/O thread.
+ PatchMatch match = sourceIsDevice && sinkIsDevice ?
+ MATCH_BOTH : (sourceIsDevice ? MATCH_SINKS : MATCH_SOURCES);
+ RETURN_STATUS_IF_ERROR(findOrCreatePatch(patch, match,
+ &patch, &created));
// No cleanup of the patch is needed, it is managed by the framework.
*patchId = patch.id;
if (!created) {
@@ -274,18 +282,18 @@
}
status_t Hal2AidlMapper::findOrCreatePatch(
- const AudioPatch& requestedPatch, AudioPatch* patch, bool* created) {
+ const AudioPatch& requestedPatch, PatchMatch match, AudioPatch* patch, bool* created) {
std::set<int32_t> sourcePortConfigIds(requestedPatch.sourcePortConfigIds.begin(),
requestedPatch.sourcePortConfigIds.end());
std::set<int32_t> sinkPortConfigIds(requestedPatch.sinkPortConfigIds.begin(),
requestedPatch.sinkPortConfigIds.end());
- return findOrCreatePatch(sourcePortConfigIds, sinkPortConfigIds, patch, created);
+ return findOrCreatePatch(sourcePortConfigIds, sinkPortConfigIds, match, patch, created);
}
status_t Hal2AidlMapper::findOrCreatePatch(
const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds,
- AudioPatch* patch, bool* created) {
- auto patchIt = findPatch(sourcePortConfigIds, sinkPortConfigIds);
+ PatchMatch match, AudioPatch* patch, bool* created) {
+ auto patchIt = findPatch(sourcePortConfigIds, sinkPortConfigIds, match);
if (patchIt == mPatches.end()) {
AudioPatch requestedPatch, appliedPatch;
requestedPatch.sourcePortConfigIds.insert(requestedPatch.sourcePortConfigIds.end(),
@@ -450,13 +458,14 @@
*portConfig = it->second;
return OK;
}
- ALOGE("%s: could not find a configured device port for device %s",
+ ALOGE("%s: could not find a device port config for device %s",
__func__, device.toString().c_str());
return BAD_VALUE;
}
Hal2AidlMapper::Patches::iterator Hal2AidlMapper::findPatch(
- const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds) {
+ const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds,
+ PatchMatch match) {
return std::find_if(mPatches.begin(), mPatches.end(),
[&](const auto& pair) {
const auto& p = pair.second;
@@ -464,7 +473,15 @@
p.sourcePortConfigIds.begin(), p.sourcePortConfigIds.end());
std::set<int32_t> patchSinks(
p.sinkPortConfigIds.begin(), p.sinkPortConfigIds.end());
- return sourcePortConfigIds == patchSrcs && sinkPortConfigIds == patchSinks; });
+ switch (match) {
+ case MATCH_SOURCES:
+ return sourcePortConfigIds == patchSrcs;
+ case MATCH_SINKS:
+ return sinkPortConfigIds == patchSinks;
+ case MATCH_BOTH:
+ return sourcePortConfigIds == patchSrcs && sinkPortConfigIds == patchSinks;
+ }
+ });
}
Hal2AidlMapper::Ports::iterator Hal2AidlMapper::findPort(const AudioDevice& device) {
@@ -698,20 +715,23 @@
return OK;
}
-bool Hal2AidlMapper::isPortBeingHeld(int32_t portId) {
- // It is assumed that mStreams has already been cleaned up.
- for (const auto& s : mStreams) {
- if (portConfigBelongsToPort(s.second.first, portId)) return true;
- }
- for (const auto& [_, patch] : mPatches) {
+std::set<int32_t> Hal2AidlMapper::getPatchIdsByPortId(int32_t portId) {
+ std::set<int32_t> result;
+ for (const auto& [patchId, patch] : mPatches) {
for (int32_t id : patch.sourcePortConfigIds) {
- if (portConfigBelongsToPort(id, portId)) return true;
+ if (portConfigBelongsToPort(id, portId)) {
+ result.insert(patchId);
+ break;
+ }
}
for (int32_t id : patch.sinkPortConfigIds) {
- if (portConfigBelongsToPort(id, portId)) return true;
+ if (portConfigBelongsToPort(id, portId)) {
+ result.insert(patchId);
+ break;
+ }
}
}
- return false;
+ return result;
}
status_t Hal2AidlMapper::prepareToDisconnectExternalDevice(const AudioPort& devicePort) {
@@ -730,7 +750,7 @@
this, __func__, ioHandle, device.toString().c_str(),
flags.toString().c_str(), toString(source).c_str(),
config->toString().c_str(), mixPortConfig->toString().c_str());
- resetUnusedPatchesPortConfigsAndPorts();
+ resetUnusedPatchesAndPortConfigs();
const AudioConfig initialConfig = *config;
// Find / create AudioPortConfigs for the device port and the mix port,
// then find / create a patch between them, and open a stream on the mix port.
@@ -813,10 +833,10 @@
}
if (isInput) {
RETURN_STATUS_IF_ERROR(findOrCreatePatch(
- {devicePortConfigId}, {mixPortConfig->id}, patch, &created));
+ {devicePortConfigId}, {mixPortConfig->id}, MATCH_BOTH, patch, &created));
} else {
RETURN_STATUS_IF_ERROR(findOrCreatePatch(
- {mixPortConfig->id}, {devicePortConfigId}, patch, &created));
+ {mixPortConfig->id}, {devicePortConfigId}, MATCH_BOTH, patch, &created));
}
if (created) {
cleanups->add(&Hal2AidlMapper::resetPatch, patch->id);
@@ -843,39 +863,52 @@
return releaseAudioPatches({patchId});
}
+// Note: does not reset port configs.
+status_t Hal2AidlMapper::releaseAudioPatch(Patches::iterator it) {
+ const int32_t patchId = it->first;
+ if (ndk::ScopedAStatus status = mModule->resetAudioPatch(patchId); !status.isOk()) {
+ ALOGE("%s: error while resetting patch %d: %s",
+ __func__, patchId, status.getDescription().c_str());
+ return statusTFromBinderStatus(status);
+ }
+ mPatches.erase(it);
+ for (auto it = mFwkPatches.begin(); it != mFwkPatches.end(); ++it) {
+ if (it->second == patchId) {
+ mFwkPatches.erase(it);
+ break;
+ }
+ }
+ return OK;
+}
+
status_t Hal2AidlMapper::releaseAudioPatches(const std::set<int32_t>& patchIds) {
status_t result = OK;
for (const auto patchId : patchIds) {
if (auto it = mPatches.find(patchId); it != mPatches.end()) {
- mPatches.erase(it);
- if (ndk::ScopedAStatus status = mModule->resetAudioPatch(patchId); !status.isOk()) {
- ALOGE("%s: error while resetting patch %d: %s",
- __func__, patchId, status.getDescription().c_str());
- result = statusTFromBinderStatus(status);
- }
+ releaseAudioPatch(it);
} else {
ALOGE("%s: patch id %d not found", __func__, patchId);
result = BAD_VALUE;
}
}
- resetUnusedPortConfigsAndPorts();
+ resetUnusedPortConfigs();
return result;
}
void Hal2AidlMapper::resetPortConfig(int32_t portConfigId) {
if (auto it = mPortConfigs.find(portConfigId); it != mPortConfigs.end()) {
- mPortConfigs.erase(it);
if (ndk::ScopedAStatus status = mModule->resetAudioPortConfig(portConfigId);
!status.isOk()) {
ALOGE("%s: error while resetting port config %d: %s",
__func__, portConfigId, status.getDescription().c_str());
}
+ mPortConfigs.erase(it);
return;
}
ALOGE("%s: port config id %d not found", __func__, portConfigId);
}
-void Hal2AidlMapper::resetUnusedPatchesPortConfigsAndPorts() {
+void Hal2AidlMapper::resetUnusedPatchesAndPortConfigs() {
// Since patches can be created independently of streams via 'createOrUpdatePatch',
// here we only clean up patches for released streams.
std::set<int32_t> patchesToRelease;
@@ -889,52 +922,35 @@
it = mStreams.erase(it);
}
}
- // 'releaseAudioPatches' also resets unused port configs and ports.
+ // 'releaseAudioPatches' also resets unused port configs.
releaseAudioPatches(patchesToRelease);
}
-void Hal2AidlMapper::resetUnusedPortConfigsAndPorts() {
+void Hal2AidlMapper::resetUnusedPortConfigs() {
// The assumption is that port configs are used to create patches
// (or to open streams, but that involves creation of patches, too). Thus,
// orphaned port configs can and should be reset.
- std::map<int32_t, int32_t /*portID*/> portConfigIds;
+ std::set<int32_t> portConfigIdsToReset;
std::transform(mPortConfigs.begin(), mPortConfigs.end(),
- std::inserter(portConfigIds, portConfigIds.end()),
- [](const auto& pcPair) { return std::make_pair(pcPair.first, pcPair.second.portId); });
+ std::inserter(portConfigIdsToReset, portConfigIdsToReset.end()),
+ [](const auto& pcPair) { return pcPair.first; });
for (const auto& p : mPatches) {
- for (int32_t id : p.second.sourcePortConfigIds) portConfigIds.erase(id);
- for (int32_t id : p.second.sinkPortConfigIds) portConfigIds.erase(id);
+ for (int32_t id : p.second.sourcePortConfigIds) portConfigIdsToReset.erase(id);
+ for (int32_t id : p.second.sinkPortConfigIds) portConfigIdsToReset.erase(id);
}
for (int32_t id : mInitialPortConfigIds) {
- portConfigIds.erase(id);
+ portConfigIdsToReset.erase(id);
}
for (const auto& s : mStreams) {
- portConfigIds.erase(s.second.first);
+ portConfigIdsToReset.erase(s.second.first);
}
- std::set<int32_t> retryDeviceDisconnection;
- for (const auto& portConfigAndIdPair : portConfigIds) {
- resetPortConfig(portConfigAndIdPair.first);
- if (const auto it = mConnectedPorts.find(portConfigAndIdPair.second);
- it != mConnectedPorts.end() && it->second) {
- retryDeviceDisconnection.insert(portConfigAndIdPair.second);
- }
- }
- for (int32_t portId : retryDeviceDisconnection) {
- if (!isPortBeingHeld(portId)) {
- if (auto status = mModule->disconnectExternalDevice(portId); status.isOk()) {
- eraseConnectedPort(portId);
- ALOGD("%s: executed postponed external device disconnection for port ID %d",
- __func__, portId);
- }
- }
- }
- if (!retryDeviceDisconnection.empty()) {
- updateRoutes();
+ for (const auto& portConfigId : portConfigIdsToReset) {
+ resetPortConfig(portConfigId);
}
}
status_t Hal2AidlMapper::setDevicePortConnectedState(const AudioPort& devicePort, bool connected) {
- resetUnusedPatchesPortConfigsAndPorts();
+ resetUnusedPatchesAndPortConfigs();
if (connected) {
AudioDevice matchDevice = devicePort.ext.get<AudioPortExt::device>().device;
std::optional<AudioPort> templatePort;
@@ -980,7 +996,7 @@
"%s: module %s, duplicate port ID received from HAL: %s, existing port: %s",
__func__, mInstance.c_str(), connectedPort.toString().c_str(),
it->second.toString().c_str());
- mConnectedPorts[connectedPort.id] = false;
+ mConnectedPorts.insert(connectedPort.id);
if (erasePortAfterConnectionIt != mPorts.end()) {
mPorts.erase(erasePortAfterConnectionIt);
}
@@ -1007,17 +1023,34 @@
port.ext.get<AudioPortExt::Tag::device>().device = matchDevice;
port.profiles = portsIt->second.profiles;
}
- // Streams are closed by AudioFlinger independently from device disconnections.
- // It is possible that the stream has not been closed yet.
- if (!isPortBeingHeld(portId)) {
- RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
- mModule->disconnectExternalDevice(portId)));
- eraseConnectedPort(portId);
- } else {
- ALOGD("%s: since device port ID %d is used by a stream, "
- "external device disconnection postponed", __func__, portId);
- mConnectedPorts[portId] = true;
+
+ // Patches may still exist, the framework may reset or update them later.
+ // For disconnection to succeed, need to release these patches first.
+ if (std::set<int32_t> patchIdsToRelease = getPatchIdsByPortId(portId);
+ !patchIdsToRelease.empty()) {
+ FwkPatches releasedPatches;
+ status_t status = OK;
+ for (int32_t patchId : patchIdsToRelease) {
+ if (auto it = mPatches.find(patchId); it != mPatches.end()) {
+ if (status = releaseAudioPatch(it); status != OK) break;
+ releasedPatches.insert(std::make_pair(patchId, patchId));
+ }
+ }
+ resetUnusedPortConfigs();
+ // Patches created by Hal2AidlMapper during stream creation and not "claimed"
+ // by the framework must not be surfaced to it.
+ for (auto& s : mStreams) {
+ if (auto it = releasedPatches.find(s.second.second); it != releasedPatches.end()) {
+ releasedPatches.erase(it);
+ }
+ }
+ mFwkPatches.merge(releasedPatches);
+ LOG_ALWAYS_FATAL_IF(!releasedPatches.empty(),
+ "mFwkPatches already contains some of released patches");
+ if (status != OK) return status;
}
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->disconnectExternalDevice(portId)));
+ eraseConnectedPort(portId);
}
return updateRoutes();
}
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.h b/media/libaudiohal/impl/Hal2AidlMapper.h
index f937173..c70c8af 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.h
+++ b/media/libaudiohal/impl/Hal2AidlMapper.h
@@ -49,7 +49,7 @@
const std::string& instance,
const std::shared_ptr<::aidl::android::hardware::audio::core::IModule>& module);
- void addStream(const sp<StreamHalInterface>& stream, int32_t portConfigId, int32_t patchId);
+ void addStream(const sp<StreamHalInterface>& stream, int32_t mixPortConfigId, int32_t patchId);
status_t createOrUpdatePatch(
const std::vector<::aidl::android::media::audio::common::AudioPortConfig>& sources,
const std::vector<::aidl::android::media::audio::common::AudioPortConfig>& sinks,
@@ -91,13 +91,32 @@
::aidl::android::media::audio::common::AudioPortConfig* portConfig,
Cleanups* cleanups = nullptr);
status_t releaseAudioPatch(int32_t patchId);
- void resetUnusedPatchesPortConfigsAndPorts();
+ void resetUnusedPatchesAndPortConfigs();
status_t setDevicePortConnectedState(
const ::aidl::android::media::audio::common::AudioPort& devicePort, bool connected);
+ // Methods to work with FwkPatches.
+ void eraseFwkPatch(int32_t fwkPatchId) { mFwkPatches.erase(fwkPatchId); }
+ int32_t findFwkPatch(int32_t fwkPatchId) {
+ const auto it = mFwkPatches.find(fwkPatchId);
+ return it != mFwkPatches.end() ? it->second : 0;
+ }
+ void updateFwkPatch(int32_t fwkPatchId, int32_t halPatchId) {
+ mFwkPatches[fwkPatchId] = halPatchId;
+ }
+
private:
- // IDs of ports for connected external devices, and whether they are held by streams.
- using ConnectedPorts = std::map<int32_t /*port ID*/, bool>;
+ // 'FwkPatches' is used to store patches that diverge from the framework's state.
+ // Uses framework patch ID (aka audio_patch_handle_t) values for indexing.
+ // When the 'key == value', that means Hal2AidlMapper has removed this patch, and it is absent
+ // from 'mPatches', but it still "exists" for the framework. It will either remove it or
+ // re-patch. If the framework re-patches, it will continue to use the same patch handle,
+ // but the HAL will use the new one (since the old patch was reset), thus 'key != value'
+ // for such patches. Since they "exist" both for the framework and the HAL, 'mPatches'
+ // contains their data under HAL patch ID ('value' of 'FwkPatches').
+ // To avoid confusion, all patchIDs used by Hal2AidlMapper are HAL IDs. Mapping between
+ // framework patch IDs and HAL patch IDs is done by DeviceHalAidl.
+ using FwkPatches = std::map<int32_t /*audio_patch_handle_t*/, int32_t /*patch ID*/>;
using Patches = std::map<int32_t /*patch ID*/,
::aidl::android::hardware::audio::core::AudioPatch>;
using PortConfigs = std::map<int32_t /*port config ID*/,
@@ -107,12 +126,14 @@
// Answers the question "whether portID 'first' is reachable from portID 'second'?"
// It's not a map because both portIDs are known. The matrix is symmetric.
using RoutingMatrix = std::set<std::pair<int32_t, int32_t>>;
- // There is always a port config ID set. The patch ID is set after stream
+ // There is always a mix port config ID set. The patch ID is set after stream
// creation, and can be set to '-1' later if the framework happens to create
// a patch between the same endpoints. In that case, the ownership of the patch
// is on the framework.
using Streams = std::map<wp<StreamHalInterface>,
- std::pair<int32_t /*port config ID*/, int32_t /*patch ID*/>>;
+ std::pair<int32_t /*mix port config ID*/, int32_t /*patch ID*/>>;
+
+ enum PatchMatch { MATCH_SOURCES, MATCH_SINKS, MATCH_BOTH };
const std::string mInstance;
const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
@@ -131,11 +152,13 @@
::aidl::android::media::audio::common::AudioPortConfig* result, bool *created);
void eraseConnectedPort(int32_t portId);
status_t findOrCreatePatch(
- const std::set<int32_t>& sourcePortConfigIds,
- const std::set<int32_t>& sinkPortConfigIds,
+ const std::set<int32_t>& sourcePortConfigIds,
+ const std::set<int32_t>& sinkPortConfigIds,
+ PatchMatch match,
::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
status_t findOrCreatePatch(
const ::aidl::android::hardware::audio::core::AudioPatch& requestedPatch,
+ PatchMatch match,
::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
status_t findOrCreateDevicePortConfig(
const ::aidl::android::media::audio::common::AudioDevice& device,
@@ -156,7 +179,7 @@
const std::set<int32_t>& destinationPortIds,
::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
Patches::iterator findPatch(const std::set<int32_t>& sourcePortConfigIds,
- const std::set<int32_t>& sinkPortConfigIds);
+ const std::set<int32_t>& sinkPortConfigIds, PatchMatch match);
Ports::iterator findPort(const ::aidl::android::media::audio::common::AudioDevice& device);
Ports::iterator findPort(
const ::aidl::android::media::audio::common::AudioConfig& config,
@@ -168,7 +191,7 @@
const std::optional<::aidl::android::media::audio::common::AudioConfig>& config,
const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
int32_t ioHandle);
- bool isPortBeingHeld(int32_t portId);
+ std::set<int32_t> getPatchIdsByPortId(int32_t portId);
status_t prepareToOpenStreamHelper(
int32_t ioHandle, int32_t devicePortId, int32_t devicePortConfigId,
const ::aidl::android::media::audio::common::AudioIoFlags& flags,
@@ -181,10 +204,11 @@
auto it = mPortConfigs.find(portConfigId);
return it != mPortConfigs.end() && it->second.portId == portId;
}
+ status_t releaseAudioPatch(Patches::iterator it);
status_t releaseAudioPatches(const std::set<int32_t>& patchIds);
void resetPatch(int32_t patchId) { (void)releaseAudioPatch(patchId); }
void resetPortConfig(int32_t portConfigId);
- void resetUnusedPortConfigsAndPorts();
+ void resetUnusedPortConfigs();
status_t updateAudioPort(
int32_t portId, ::aidl::android::media::audio::common::AudioPort* port);
status_t updateRoutes();
@@ -197,13 +221,14 @@
std::optional<::aidl::android::media::audio::common::AudioPort> mRemoteSubmixOut;
int32_t mDefaultInputPortId = -1;
int32_t mDefaultOutputPortId = -1;
+ FwkPatches mFwkPatches;
PortConfigs mPortConfigs;
std::set<int32_t> mInitialPortConfigIds;
Patches mPatches;
Routes mRoutes;
RoutingMatrix mRoutingMatrix;
Streams mStreams;
- ConnectedPorts mConnectedPorts;
+ std::set<int32_t> mConnectedPorts;
std::pair<int32_t, ::aidl::android::media::audio::common::AudioPort>
mDisconnectedPortReplacement;
std::set<int32_t> mDynamicMixPortIds;
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 2a8ebc6..4f01ec7 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -82,7 +82,12 @@
mConfig(configToBase(config)),
mContext(std::move(context)),
mStream(stream),
- mVendorExt(vext) {
+ mVendorExt(vext),
+ mLastReplyLifeTimeNs(
+ std::min(static_cast<size_t>(100),
+ 2 * mContext.getBufferDurationMs(mConfig.sample_rate))
+ * NANOS_PER_MILLISECOND)
+{
ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
{
std::lock_guard l(mLock);
@@ -195,8 +200,12 @@
StreamDescriptor::Reply reply;
switch (state) {
case StreamDescriptor::State::ACTIVE:
+ case StreamDescriptor::State::DRAINING:
+ case StreamDescriptor::State::TRANSFERRING:
RETURN_STATUS_IF_ERROR(pause(&reply));
- if (reply.state != StreamDescriptor::State::PAUSED) {
+ if (reply.state != StreamDescriptor::State::PAUSED &&
+ reply.state != StreamDescriptor::State::DRAIN_PAUSED &&
+ reply.state != StreamDescriptor::State::TRANSFER_PAUSED) {
ALOGE("%s: unexpected stream state: %s (expected PAUSED)",
__func__, toString(reply.state).c_str());
return INVALID_OPERATION;
@@ -204,6 +213,7 @@
FALLTHROUGH_INTENDED;
case StreamDescriptor::State::PAUSED:
case StreamDescriptor::State::DRAIN_PAUSED:
+ case StreamDescriptor::State::TRANSFER_PAUSED:
if (mIsInput) return flush();
RETURN_STATUS_IF_ERROR(flush(&reply));
if (reply.state != StreamDescriptor::State::IDLE) {
@@ -271,11 +281,12 @@
return OK;
}
-status_t StreamHalAidl::getObservablePosition(int64_t *frames, int64_t *timestamp) {
+status_t StreamHalAidl::getObservablePosition(int64_t* frames, int64_t* timestamp,
+ StatePositions* statePositions) {
ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
if (!mStream) return NO_INIT;
StreamDescriptor::Reply reply;
- RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+ RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, statePositions));
*frames = std::max<int64_t>(0, reply.observable.frames);
*timestamp = std::max<int64_t>(0, reply.observable.timeNs);
return OK;
@@ -285,8 +296,7 @@
ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
if (!mStream) return NO_INIT;
StreamDescriptor::Reply reply;
- // TODO: switch to updateCountersIfNeeded once we sort out mWorkerTid initialization
- RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), &reply, true));
+ RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
*frames = std::max<int64_t>(0, reply.hardware.frames);
*timestamp = std::max<int64_t>(0, reply.hardware.timeNs);
return OK;
@@ -319,8 +329,11 @@
return INVALID_OPERATION;
}
}
+ StreamContextAidl::DataMQ::Error fmqError = StreamContextAidl::DataMQ::Error::NONE;
+ std::string fmqErrorMsg;
if (!mIsInput) {
- bytes = std::min(bytes, mContext.getDataMQ()->availableToWrite());
+ bytes = std::min(bytes,
+ mContext.getDataMQ()->availableToWrite(&fmqError, &fmqErrorMsg));
}
StreamDescriptor::Command burst =
StreamDescriptor::Command::make<StreamDescriptor::Command::Tag::burst>(bytes);
@@ -337,12 +350,14 @@
LOG_ALWAYS_FATAL_IF(*transferred > bytes,
"%s: HAL module read %zu bytes, which exceeds requested count %zu",
__func__, *transferred, bytes);
- if (auto toRead = mContext.getDataMQ()->availableToRead();
+ if (auto toRead = mContext.getDataMQ()->availableToRead(&fmqError, &fmqErrorMsg);
toRead != 0 && !mContext.getDataMQ()->read(static_cast<int8_t*>(buffer), toRead)) {
ALOGE("%s: failed to read %zu bytes to data MQ", __func__, toRead);
return NOT_ENOUGH_DATA;
}
}
+ LOG_ALWAYS_FATAL_IF(fmqError != StreamContextAidl::DataMQ::Error::NONE,
+ "%s", fmqErrorMsg.c_str());
mStreamPowerLog.log(buffer, *transferred);
return OK;
}
@@ -362,24 +377,28 @@
if (mIsInput) {
return sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), reply);
} else {
- if (mContext.isAsynchronous()) {
+ if (const auto state = getState(); state == StreamDescriptor::State::IDLE) {
// Handle pause-flush-resume sequence. 'flush' from PAUSED goes to
// IDLE. We move here from IDLE to ACTIVE (same as 'start' from PAUSED).
- const auto state = getState();
- if (state == StreamDescriptor::State::IDLE) {
- StreamDescriptor::Reply localReply{};
- StreamDescriptor::Reply* innerReply = reply ?: &localReply;
- RETURN_STATUS_IF_ERROR(
- sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), innerReply));
- if (innerReply->state != StreamDescriptor::State::ACTIVE) {
- ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
- __func__, toString(innerReply->state).c_str());
- return INVALID_OPERATION;
- }
- return OK;
+ StreamDescriptor::Reply localReply{};
+ StreamDescriptor::Reply* innerReply = reply ?: &localReply;
+ RETURN_STATUS_IF_ERROR(
+ sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), innerReply));
+ if (innerReply->state != StreamDescriptor::State::ACTIVE) {
+ ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
+ __func__, toString(innerReply->state).c_str());
+ return INVALID_OPERATION;
}
+ return OK;
+ } else if (state == StreamDescriptor::State::PAUSED ||
+ state == StreamDescriptor::State::TRANSFER_PAUSED ||
+ state == StreamDescriptor::State::DRAIN_PAUSED) {
+ return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
+ } else {
+ ALOGE("%s: unexpected stream state: %s (expected IDLE or one of *PAUSED states)",
+ __func__, toString(state).c_str());
+ return INVALID_OPERATION;
}
- return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
}
}
@@ -409,6 +428,45 @@
return statusTFromBinderStatus(mStream->prepareToClose());
}
+void StreamHalAidl::onAsyncTransferReady() {
+ if (auto state = getState(); state == StreamDescriptor::State::TRANSFERRING) {
+ // Retrieve the current state together with position counters unconditionally
+ // to ensure that the state on our side gets updated.
+ sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
+ nullptr, true /*safeFromNonWorkerThread */);
+ } else {
+ ALOGW("%s: unexpected onTransferReady in the state %s", __func__, toString(state).c_str());
+ }
+}
+
+void StreamHalAidl::onAsyncDrainReady() {
+ if (auto state = getState(); state == StreamDescriptor::State::DRAINING) {
+ // Retrieve the current state together with position counters unconditionally
+ // to ensure that the state on our side gets updated.
+ sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), nullptr,
+ true /*safeFromNonWorkerThread */);
+ // For compatibility with HIDL behavior, apply a "soft" position reset
+ // after receiving the "drain ready" callback.
+ std::lock_guard l(mLock);
+ mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames;
+ } else {
+ ALOGW("%s: unexpected onDrainReady in the state %s", __func__, toString(state).c_str());
+ }
+}
+
+void StreamHalAidl::onAsyncError() {
+ std::lock_guard l(mLock);
+ if (mLastReply.state == StreamDescriptor::State::IDLE ||
+ mLastReply.state == StreamDescriptor::State::DRAINING ||
+ mLastReply.state == StreamDescriptor::State::TRANSFERRING) {
+ mLastReply.state = StreamDescriptor::State::ERROR;
+ ALOGW("%s: onError received", __func__);
+ } else {
+ ALOGW("%s: unexpected onError in the state %s", __func__,
+ toString(mLastReply.state).c_str());
+ }
+}
+
status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
struct audio_mmap_buffer_info *info) {
ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
@@ -457,9 +515,9 @@
}
status_t StreamHalAidl::sendCommand(
- const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
+ const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command,
::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
- bool safeFromNonWorkerThread) {
+ bool safeFromNonWorkerThread, StatePositions* statePositions) {
// TIME_CHECK(); // TODO(b/243839867) reenable only when optimized.
if (!safeFromNonWorkerThread) {
const pid_t workerTid = mWorkerTid.load(std::memory_order_acquire);
@@ -467,26 +525,48 @@
"%s %s: must be invoked from the worker thread (%d)",
__func__, command.toString().c_str(), workerTid);
}
- if (!mContext.getCommandMQ()->writeBlocking(&command, 1)) {
- ALOGE("%s: failed to write command %s to MQ", __func__, command.toString().c_str());
- return NOT_ENOUGH_DATA;
- }
StreamDescriptor::Reply localReply{};
- if (reply == nullptr) {
- reply = &localReply;
- }
- if (!mContext.getReplyMQ()->readBlocking(reply, 1)) {
- ALOGE("%s: failed to read from reply MQ, command %s", __func__, command.toString().c_str());
- return NOT_ENOUGH_DATA;
- }
{
- std::lock_guard l(mLock);
- // Not every command replies with 'latencyMs' field filled out, substitute the last
- // returned value in that case.
- if (reply->latencyMs <= 0) {
- reply->latencyMs = mLastReply.latencyMs;
+ std::lock_guard l(mCommandReplyLock);
+ if (!mContext.getCommandMQ()->writeBlocking(&command, 1)) {
+ ALOGE("%s: failed to write command %s to MQ", __func__, command.toString().c_str());
+ return NOT_ENOUGH_DATA;
}
- mLastReply = *reply;
+ if (reply == nullptr) {
+ reply = &localReply;
+ }
+ if (!mContext.getReplyMQ()->readBlocking(reply, 1)) {
+ ALOGE("%s: failed to read from reply MQ, command %s",
+ __func__, command.toString().c_str());
+ return NOT_ENOUGH_DATA;
+ }
+ {
+ std::lock_guard l(mLock);
+ // Not every command replies with 'latencyMs' field filled out, substitute the last
+ // returned value in that case.
+ if (reply->latencyMs <= 0) {
+ reply->latencyMs = mLastReply.latencyMs;
+ }
+ mLastReply = *reply;
+ mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
+ if (!mIsInput && reply->status == STATUS_OK) {
+ if (command.getTag() == StreamDescriptor::Command::standby &&
+ reply->state == StreamDescriptor::State::STANDBY) {
+ mStatePositions.framesAtStandby = reply->observable.frames;
+ } else if (command.getTag() == StreamDescriptor::Command::flush &&
+ reply->state == StreamDescriptor::State::IDLE) {
+ mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
+ } else if (!mContext.isAsynchronous() &&
+ command.getTag() == StreamDescriptor::Command::drain &&
+ (reply->state == StreamDescriptor::State::IDLE ||
+ reply->state == StreamDescriptor::State::DRAINING)) {
+ mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
+ } // for asynchronous drain, the frame count is saved in 'onAsyncDrainReady'
+ }
+ if (statePositions != nullptr) {
+ *statePositions = mStatePositions;
+ }
+ }
}
switch (reply->status) {
case STATUS_OK: return OK;
@@ -501,17 +581,24 @@
}
status_t StreamHalAidl::updateCountersIfNeeded(
- ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply) {
- if (mWorkerTid.load(std::memory_order_acquire) == gettid()) {
- if (const auto state = getState(); state != StreamDescriptor::State::ACTIVE &&
- state != StreamDescriptor::State::DRAINING &&
- state != StreamDescriptor::State::TRANSFERRING) {
- return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), reply);
- }
+ ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
+ StatePositions* statePositions) {
+ bool doUpdate = false;
+ {
+ std::lock_guard l(mLock);
+ doUpdate = uptimeNanos() > mLastReplyExpirationNs;
}
- if (reply != nullptr) {
+ if (doUpdate) {
+ // Since updates are paced, it is OK to perform them from any thread, they should
+ // not interfere with I/O operations of the worker.
+ return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
+ reply, true /*safeFromNonWorkerThread */, statePositions);
+ } else if (reply != nullptr) { // provide cached reply
std::lock_guard l(mLock);
*reply = mLastReply;
+ if (statePositions != nullptr) {
+ *statePositions = mStatePositions;
+ }
}
return OK;
}
@@ -545,7 +632,7 @@
StreamOutHalAidl::~StreamOutHalAidl() {
if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
- broker->clearCallbacks(this);
+ broker->clearCallbacks(static_cast<StreamOutHalInterface*>(this));
}
}
@@ -569,7 +656,19 @@
status_t StreamOutHalAidl::setVolume(float left, float right) {
TIME_CHECK();
if (!mStream) return NO_INIT;
- return statusTFromBinderStatus(mStream->setHwVolume({left, right}));
+ size_t channelCount = audio_channel_count_from_out_mask(mConfig.channel_mask);
+ if (channelCount == 0) channelCount = 2;
+ std::vector<float> volumes(channelCount);
+ if (channelCount == 1) {
+ volumes[0] = (left + right) / 2;
+ } else {
+ volumes[0] = left;
+ volumes[1] = right;
+ for (size_t i = 2; i < channelCount; ++i) {
+ volumes[i] = (left + right) / 2;
+ }
+ }
+ return statusTFromBinderStatus(mStream->setHwVolume(volumes));
}
status_t StreamOutHalAidl::selectPresentation(int presentationId, int programId) {
@@ -586,37 +685,36 @@
return transfer(const_cast<void*>(buffer), bytes, written);
}
-status_t StreamOutHalAidl::getRenderPosition(uint32_t *dspFrames) {
+status_t StreamOutHalAidl::getRenderPosition(uint64_t *dspFrames) {
if (dspFrames == nullptr) {
return BAD_VALUE;
}
int64_t aidlFrames = 0, aidlTimestamp = 0;
- RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp));
- *dspFrames = static_cast<uint32_t>(aidlFrames);
+ StatePositions statePositions{};
+ RETURN_STATUS_IF_ERROR(
+ getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions));
+ // Number of audio frames since the stream has exited standby.
+ // See the table at the start of 'StreamHalInterface' on when it needs to reset.
+ int64_t mostRecentResetPoint;
+ if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
+ mostRecentResetPoint = statePositions.framesAtStandby;
+ } else {
+ mostRecentResetPoint =
+ std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+ }
+ *dspFrames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
return OK;
}
-status_t StreamOutHalAidl::getNextWriteTimestamp(int64_t *timestamp __unused) {
- // Obsolete, use getPresentationPosition.
- return INVALID_OPERATION;
-}
-
status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
+ ALOGD("%p %s", this, __func__);
TIME_CHECK();
if (!mStream) return NO_INIT;
if (!mContext.isAsynchronous()) {
ALOGE("%s: the callback is intended for asynchronous streams only", __func__);
return INVALID_OPERATION;
}
- if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
- if (auto cb = callback.promote(); cb != nullptr) {
- broker->setStreamOutCallback(this, cb);
- } else {
- // It is expected that the framework never passes a null pointer.
- // In the AIDL model callbacks can't be "unregistered".
- LOG_ALWAYS_FATAL("%s: received an expired or null callback pointer", __func__);
- }
- }
+ mClientCallback = callback;
return OK;
}
@@ -661,13 +759,26 @@
return BAD_VALUE;
}
int64_t aidlFrames = 0, aidlTimestamp = 0;
- RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp));
- *frames = aidlFrames;
+ StatePositions statePositions{};
+ RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions));
+ // See the table at the start of 'StreamHalInterface'.
+ if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
+ *frames = aidlFrames;
+ } else {
+ const int64_t mostRecentResetPoint =
+ std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+ *frames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
+ }
timestamp->tv_sec = aidlTimestamp / NANOS_PER_SECOND;
timestamp->tv_nsec = aidlTimestamp - timestamp->tv_sec * NANOS_PER_SECOND;
return OK;
}
+status_t StreamOutHalAidl::presentationComplete() {
+ ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ return OK;
+}
+
status_t StreamOutHalAidl::updateSourceMetadata(
const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
TIME_CHECK();
@@ -739,7 +850,7 @@
TIME_CHECK();
if (!mStream) return NO_INIT;
if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
- broker->setStreamOutEventCallback(this, callback);
+ broker->setStreamOutEventCallback(static_cast<StreamOutHalInterface*>(this), callback);
}
return OK;
}
@@ -773,7 +884,8 @@
TIME_CHECK();
if (!mStream) return NO_INIT;
if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
- broker->setStreamOutLatencyModeCallback(this, callback);
+ broker->setStreamOutLatencyModeCallback(
+ static_cast<StreamOutHalInterface*>(this), callback);
}
return OK;
};
@@ -782,6 +894,27 @@
return StreamHalAidl::exit();
}
+void StreamOutHalAidl::onWriteReady() {
+ onAsyncTransferReady();
+ if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
+ clientCb->onWriteReady();
+ }
+}
+
+void StreamOutHalAidl::onDrainReady() {
+ onAsyncDrainReady();
+ if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
+ clientCb->onDrainReady();
+ }
+}
+
+void StreamOutHalAidl::onError() {
+ onAsyncError();
+ if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
+ clientCb->onError();
+ }
+}
+
status_t StreamOutHalAidl::filterAndUpdateOffloadMetadata(AudioParameter ¶meters) {
TIME_CHECK();
bool updateMetadata = false;
@@ -866,7 +999,9 @@
status_t StreamInHalAidl::setGain(float gain) {
TIME_CHECK();
if (!mStream) return NO_INIT;
- return statusTFromBinderStatus(mStream->setHwGain({gain}));
+ const size_t channelCount = audio_channel_count_from_in_mask(mConfig.channel_mask);
+ std::vector<float> gains(channelCount != 0 ? channelCount : 1, gain);
+ return statusTFromBinderStatus(mStream->setHwGain(gains));
}
status_t StreamInHalAidl::read(void *buffer, size_t bytes, size_t *read) {
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index 4acc6ac..fff7a92 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -32,6 +32,7 @@
#include <media/audiohal/StreamHalInterface.h>
#include <media/AidlConversionUtil.h>
#include <media/AudioParameter.h>
+#include <mediautils/Synchronization.h>
#include "ConversionHelperAidl.h"
#include "StreamPowerLog.h"
@@ -93,6 +94,9 @@
}
size_t getBufferSizeBytes() const { return mFrameSizeBytes * mBufferSizeFrames; }
size_t getBufferSizeFrames() const { return mBufferSizeFrames; }
+ size_t getBufferDurationMs(int32_t sampleRate) const {
+ return sampleRate != 0 ? mBufferSizeFrames * MILLIS_PER_SECOND / sampleRate : 0;
+ }
CommandMQ* getCommandMQ() const { return mCommandMQ.get(); }
DataMQ* getDataMQ() const { return mDataMQ.get(); }
size_t getFrameSizeBytes() const { return mFrameSizeBytes; }
@@ -190,6 +194,11 @@
// For tests.
friend class sp<StreamHalAidl>;
+ struct StatePositions {
+ int64_t framesAtFlushOrDrain;
+ int64_t framesAtStandby;
+ };
+
template<class T>
static std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> getStreamCommon(
const std::shared_ptr<T>& stream);
@@ -208,7 +217,8 @@
status_t getLatency(uint32_t *latency);
// Always returns non-negative values.
- status_t getObservablePosition(int64_t *frames, int64_t *timestamp);
+ status_t getObservablePosition(int64_t* frames, int64_t* timestamp,
+ StatePositions* statePositions = nullptr);
// Always returns non-negative values.
status_t getHardwarePosition(int64_t *frames, int64_t *timestamp);
@@ -232,9 +242,22 @@
status_t exit();
+ void onAsyncTransferReady();
+ void onAsyncDrainReady();
+ void onAsyncError();
+
const bool mIsInput;
const audio_config_base_t mConfig;
const StreamContextAidl mContext;
+ // This lock is used to make sending of a command and receiving a reply an atomic
+ // operation. Otherwise, when two threads are trying to send a command, they may both advance to
+ // reading of the reply once the HAL has consumed the command from the MQ, and that creates a
+ // race condition between them.
+ //
+ // Note that only access to command and reply MQs needs to be protected because the data MQ is
+ // only accessed by the I/O thread. Also, there is no need to protect lookup operations on the
+ // queues as they are thread-safe, only send/receive operation must be protected.
+ std::mutex mCommandReplyLock;
private:
static audio_config_base_t configToBase(const audio_config& config) {
@@ -248,17 +271,26 @@
std::lock_guard l(mLock);
return mLastReply.state;
}
+ // Note: Since `sendCommand` takes mLock while holding mCommandReplyLock, never call
+ // it with `mLock` being held.
status_t sendCommand(
- const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
+ const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command,
::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
- bool safeFromNonWorkerThread = false);
+ bool safeFromNonWorkerThread = false,
+ StatePositions* statePositions = nullptr);
status_t updateCountersIfNeeded(
- ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr);
+ ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
+ StatePositions* statePositions = nullptr);
const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> mStream;
const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
+ const int64_t mLastReplyLifeTimeNs;
std::mutex mLock;
::aidl::android::hardware::audio::core::StreamDescriptor::Reply mLastReply GUARDED_BY(mLock);
+ int64_t mLastReplyExpirationNs GUARDED_BY(mLock) = 0;
+ // Cached values of observable positions when the stream last entered certain state.
+ // Updated for output streams only.
+ StatePositions mStatePositions GUARDED_BY(mLock) = {};
// mStreamPowerLog is used for audio signal power logging.
StreamPowerLog mStreamPowerLog;
std::atomic<pid_t> mWorkerTid = -1;
@@ -266,7 +298,9 @@
class CallbackBroker;
-class StreamOutHalAidl : public StreamOutHalInterface, public StreamHalAidl {
+class StreamOutHalAidl : public virtual StreamOutHalInterface,
+ public virtual StreamOutHalInterfaceCallback,
+ public StreamHalAidl {
public:
// Extract the output stream parameters and set by AIDL APIs.
status_t setParameters(const String8& kvPairs) override;
@@ -285,10 +319,7 @@
// Return the number of audio frames written by the audio dsp to DAC since
// the output has exited standby.
- status_t getRenderPosition(uint32_t *dspFrames) override;
-
- // Get the local time at which the next write to the audio driver will be presented.
- status_t getNextWriteTimestamp(int64_t *timestamp) override;
+ status_t getRenderPosition(uint64_t *dspFrames) override;
// Set the callback for notifying completion of non-blocking write and drain.
status_t setCallback(wp<StreamOutHalInterfaceCallback> callback) override;
@@ -308,12 +339,19 @@
// Requests notification when data buffered by the driver/hardware has been played.
status_t drain(bool earlyNotify) override;
- // Notifies to the audio driver to flush the queued data.
+ // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+ // already be paused before calling 'flush'.
status_t flush() override;
// Return a recent count of the number of audio frames presented to an external observer.
+ // This excludes frames which have been written but are still in the pipeline. See the
+ // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+ // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) override;
+ // Notifies the HAL layer that the framework considers the current playback as completed.
+ status_t presentationComplete() override;
+
// Called when the metadata of the stream's source has been changed.
status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
@@ -344,6 +382,11 @@
status_t exit() override;
+ // StreamOutHalInterfaceCallback
+ void onWriteReady() override;
+ void onDrainReady() override;
+ void onError() override;
+
private:
friend class sp<StreamOutHalAidl>;
@@ -352,6 +395,7 @@
const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOut> mStream;
const wp<CallbackBroker> mCallbackBroker;
+ mediautils::atomic_wp<StreamOutHalInterfaceCallback> mClientCallback;
AudioOffloadMetadata mOffloadMetadata;
@@ -384,6 +428,7 @@
// Return a recent count of the number of audio frames received and
// the clock time associated with that frame count.
+ // The count must not reset to zero when a PCM input enters standby.
status_t getCapturePosition(int64_t *frames, int64_t *time) override;
// Get active microphones
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 77c75db..9e22700 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -17,6 +17,8 @@
#define LOG_TAG "StreamHalHidl"
//#define LOG_NDEBUG 0
+#include <cinttypes>
+
#include <android/hidl/manager/1.0/IServiceManager.h>
#include <hwbinder/IPCThreadState.h>
#include <media/AudioParameter.h>
@@ -589,32 +591,39 @@
return OK;
}
-status_t StreamOutHalHidl::getRenderPosition(uint32_t *dspFrames) {
+status_t StreamOutHalHidl::getRenderPosition(uint64_t *dspFrames) {
// TIME_CHECK(); // TODO(b/243839867) reenable only when optimized.
if (mStream == 0) return NO_INIT;
Result retval;
+ uint32_t halPosition = 0;
Return<void> ret = mStream->getRenderPosition(
[&](Result r, uint32_t d) {
retval = r;
if (retval == Result::OK) {
- *dspFrames = d;
+ halPosition = d;
}
});
- return processReturn("getRenderPosition", ret, retval);
-}
+ status_t status = processReturn("getRenderPosition", ret, retval);
+ if (status != OK) {
+ return status;
+ }
+ // Maintain a 64-bit render position using the 32-bit result from the HAL.
+ // This delta calculation relies on the arithmetic overflow behavior
+ // of integers. For example (100 - 0xFFFFFFF0) = 116.
+ std::lock_guard l(mPositionMutex);
+ const auto truncatedPosition = (uint32_t)mRenderPosition;
+ int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
+ (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
-status_t StreamOutHalHidl::getNextWriteTimestamp(int64_t *timestamp) {
- TIME_CHECK();
- if (mStream == 0) return NO_INIT;
- Result retval;
- Return<void> ret = mStream->getNextWriteTimestamp(
- [&](Result r, int64_t t) {
- retval = r;
- if (retval == Result::OK) {
- *timestamp = t;
- }
- });
- return processReturn("getRenderPosition", ret, retval);
+ if (deltaHalPosition >= 0) {
+ mRenderPosition += deltaHalPosition;
+ } else if (mExpectRetrograde) {
+ mExpectRetrograde = false;
+ mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition);
+ ALOGW("Retrograde motion of %" PRId32 " frames", -deltaHalPosition);
+ }
+ *dspFrames = mRenderPosition;
+ return OK;
}
status_t StreamOutHalHidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
@@ -667,9 +676,23 @@
status_t StreamOutHalHidl::flush() {
TIME_CHECK();
if (mStream == 0) return NO_INIT;
+ {
+ std::lock_guard l(mPositionMutex);
+ mRenderPosition = 0;
+ mExpectRetrograde = false;
+ }
return processReturn("pause", mStream->flush());
}
+status_t StreamOutHalHidl::standby() {
+ {
+ std::lock_guard l(mPositionMutex);
+ mRenderPosition = 0;
+ mExpectRetrograde = false;
+ }
+ return StreamHalHidl::standby();
+}
+
status_t StreamOutHalHidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
// TIME_CHECK(); // TODO(b/243839867) reenable only when optimized.
if (mStream == 0) return NO_INIT;
@@ -696,6 +719,16 @@
}
}
+status_t StreamOutHalHidl::presentationComplete() {
+ // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when
+ // transitioning between tracks.
+ // The HAL resets the frame position without flush/stop being called, but calls back prior to
+ // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of
+ // mRenderPosition.
+ mExpectRetrograde = true;
+ return OK;
+}
+
#if MAJOR_VERSION == 2
status_t StreamOutHalHidl::updateSourceMetadata(
const StreamOutHalInterface::SourceMetadata& /* sourceMetadata */) {
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 48da633..433e0a3 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -18,10 +18,12 @@
#define ANDROID_HARDWARE_STREAM_HAL_HIDL_H
#include <atomic>
+#include <mutex>
#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStream.h)
#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamIn.h)
#include PATH(android/hardware/audio/FILE_VERSION/IStreamOut.h)
+#include <android-base/thread_annotations.h>
#include <fmq/EventFlag.h>
#include <fmq/MessageQueue.h>
#include <media/audiohal/EffectHalInterface.h>
@@ -119,6 +121,9 @@
class StreamOutHalHidl : public StreamOutHalInterface, public StreamHalHidl {
public:
+ // Put the audio hardware input/output into standby mode (from StreamHalInterface).
+ status_t standby() override;
+
// Return the frame size (number of bytes per sample) of a stream.
virtual status_t getFrameSize(size_t *size);
@@ -136,10 +141,7 @@
// Return the number of audio frames written by the audio dsp to DAC since
// the output has exited standby.
- virtual status_t getRenderPosition(uint32_t *dspFrames);
-
- // Get the local time at which the next write to the audio driver will be presented.
- virtual status_t getNextWriteTimestamp(int64_t *timestamp);
+ virtual status_t getRenderPosition(uint64_t *dspFrames);
// Set the callback for notifying completion of non-blocking write and drain.
virtual status_t setCallback(wp<StreamOutHalInterfaceCallback> callback);
@@ -159,12 +161,19 @@
// Requests notification when data buffered by the driver/hardware has been played.
virtual status_t drain(bool earlyNotify);
- // Notifies to the audio driver to flush the queued data.
+ // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+ // already be paused before calling 'flush'.
virtual status_t flush();
// Return a recent count of the number of audio frames presented to an external observer.
+ // This excludes frames which have been written but are still in the pipeline. See the
+ // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+ // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
+ // Notifies the HAL layer that the framework considers the current playback as completed.
+ status_t presentationComplete() override;
+
// Called when the metadata of the stream's source has been changed.
status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
@@ -221,6 +230,10 @@
std::unique_ptr<StatusMQ> mStatusMQ;
std::atomic<pid_t> mWriterClient;
EventFlag* mEfGroup;
+ std::mutex mPositionMutex;
+ // Used to expand correctly the 32-bit position from the HAL.
+ uint64_t mRenderPosition GUARDED_BY(mPositionMutex) = 0;
+ bool mExpectRetrograde GUARDED_BY(mPositionMutex) = false; // See 'presentationComplete'.
// Can not be constructed directly by clients.
StreamOutHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& stream);
@@ -250,6 +263,7 @@
// Return a recent count of the number of audio frames received and
// the clock time associated with that frame count.
+ // The count must not reset to zero when a PCM input enters standby.
virtual status_t getCapturePosition(int64_t *frames, int64_t *time);
// Get active microphones
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
index 3cac591..642c370 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
@@ -71,7 +71,6 @@
status_t AidlConversionPresetReverb::getParameter(EffectParamWriter& param) {
uint32_t type = 0;
uint16_t value = 0;
- ALOGE("%s enter %s", __func__, param.toString().c_str());
if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
OK != param.readFromParameter(&type)) {
ALOGE("%s invalid param %s", __func__, param.toString().c_str());
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
index d1794f0..c2aa278 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
@@ -142,15 +142,17 @@
toString(mode).c_str());
return status;
}
- ALOGI("%s %d: %s", __func__, __LINE__, aidlParam.toString().c_str());
aidlParam = MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer, headTrackingSensorId,
sensorId);
- ALOGI("%s %d: %s", __func__, __LINE__, aidlParam.toString().c_str());
return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
}
default: {
- ALOGE("%s %d invalid command %u", __func__, __LINE__, command);
- return BAD_VALUE;
+ // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+ VendorExtension ext = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_VendorExtension(param));
+ aidlParam =
+ MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer, vendor, ext);
+ break;
}
}
} else {
@@ -158,7 +160,6 @@
::aidl::android::legacy2aidl_EffectParameterReader_Parameter(param));
}
- ALOGI("%s %d: %s", __func__, __LINE__, aidlParam.toString().c_str());
return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
}
@@ -178,17 +179,19 @@
if (!range) {
return BAD_VALUE;
}
+ std::vector<Spatialization::Level> levels;
for (const auto level : ::ndk::enum_range<Spatialization::Level>()) {
const auto spatializer =
Spatializer::make<Spatializer::spatializationLevel>(level);
if (spatializer >= range->min && spatializer <= range->max) {
- if (status_t status = param.writeToValue(&level); status != OK) {
- ALOGI("%s %d: write level %s to value failed %d", __func__, __LINE__,
- toString(level).c_str(), status);
- return status;
- }
+ levels.emplace_back(level);
}
}
+ const uint8_t num = levels.size();
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
+ for (const auto level : levels) {
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&level));
+ }
return OK;
}
case SPATIALIZER_PARAM_LEVEL: {
@@ -200,7 +203,6 @@
const auto level = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
aidlParam, Spatializer, spatializer, Spatializer::spatializationLevel,
Spatialization::Level));
- ALOGI("%s %d: %s", __func__, __LINE__, aidlParam.toString().c_str());
return param.writeToValue(&level);
}
case SPATIALIZER_PARAM_HEADTRACKING_SUPPORTED: {
@@ -227,7 +229,6 @@
const auto mode = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
aidlParam, Spatializer, spatializer, Spatializer::headTrackingMode,
HeadTracking::Mode));
- ALOGI("%s %d: %s", __func__, __LINE__, aidlParam.toString().c_str());
return param.writeToValue(&mode);
}
case SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS: {
@@ -239,17 +240,15 @@
const auto& supportedLayouts = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
aidlParam, Spatializer, spatializer, Spatializer::supportedChannelLayout,
std::vector<AudioChannelLayout>));
+ // audio_channel_mask_t is uint32_t enum, write number in 32bit
+ const uint32_t num = supportedLayouts.size();
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
for (const auto& layout : supportedLayouts) {
audio_channel_mask_t mask = VALUE_OR_RETURN_STATUS(
::aidl::android::aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
layout, false /* isInput */));
- if (status_t status = param.writeToValue(&mask); status != OK) {
- ALOGI("%s %d: write mask %s to value failed %d", __func__, __LINE__,
- layout.toString().c_str(), status);
- return status;
- }
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&mask));
}
- ALOGI("%s %d: %s", __func__, __LINE__, aidlParam.toString().c_str());
return OK;
}
case SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES: {
@@ -258,17 +257,19 @@
if (!range) {
return BAD_VALUE;
}
+ std::vector<Spatialization::Mode> modes;
for (const auto mode : ::ndk::enum_range<Spatialization::Mode>()) {
if (const auto spatializer =
Spatializer::make<Spatializer::spatializationMode>(mode);
spatializer >= range->min && spatializer <= range->max) {
- if (status_t status = param.writeToValue(&mode); status != OK) {
- ALOGI("%s %d: write mode %s to value failed %d", __func__, __LINE__,
- toString(mode).c_str(), status);
- return status;
- }
+ modes.emplace_back(mode);
}
}
+ const uint8_t num = modes.size();
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
+ for (const auto mode : modes) {
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&mode));
+ }
return OK;
}
case SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION: {
@@ -277,17 +278,18 @@
if (!range) {
return BAD_VALUE;
}
+ std::vector<HeadTracking::ConnectionMode> modes;
for (const auto mode : ::ndk::enum_range<HeadTracking::ConnectionMode>()) {
if (const auto spatializer =
Spatializer::make<Spatializer::headTrackingConnectionMode>(mode);
spatializer < range->min || spatializer > range->max) {
- continue;
+ modes.emplace_back(mode);
}
- if (status_t status = param.writeToValue(&mode); status != OK) {
- ALOGI("%s %d: write mode %s to value failed %d", __func__, __LINE__,
- toString(mode).c_str(), status);
- return status;
- }
+ }
+ const uint8_t num = modes.size();
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
+ for (const auto mode : modes) {
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&mode));
}
return OK;
}
@@ -311,21 +313,19 @@
Spatializer::headTrackingSensorId, int32_t));
uint32_t modeInt32 = static_cast<int32_t>(mode);
if (status = param.writeToValue(&modeInt32); status != OK) {
- ALOGI("%s %d: write mode %s to value failed %d", __func__, __LINE__,
+ ALOGW("%s %d: write mode %s to value failed %d", __func__, __LINE__,
toString(mode).c_str(), status);
return status;
}
if (status = param.writeToValue(&sensorId); status != OK) {
- ALOGI("%s %d: write sensorId %d to value failed %d", __func__, __LINE__,
+ ALOGW("%s %d: write sensorId %d to value failed %d", __func__, __LINE__,
sensorId, status);
return status;
}
- ALOGI("%s %d: %s", __func__, __LINE__, aidlParam.toString().c_str());
return OK;
}
default: {
- ALOGE("%s %d invalid command %u", __func__, __LINE__, command);
- return BAD_VALUE;
+ VENDOR_EXTENSION_GET_AND_RETURN(Spatializer, spatializer, param);
}
}
} else {
@@ -343,8 +343,6 @@
idTag.extension.setParcelable(defaultExt);
Parameter::Id id = UNION_MAKE(Parameter::Id, vendorEffectTag, idTag);
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
- ALOGI("%s %d: %s", __func__, __LINE__,
- aidlParam.get<Parameter::specific>().toString().c_str());
// copy the AIDL extension data back to effect_param_t
return VALUE_OR_RETURN_STATUS(
::aidl::android::aidl2legacy_Parameter_EffectParameterWriter(aidlParam, param));
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
index cad0068..db5cb9a 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
@@ -133,7 +133,6 @@
const audio_channel_mask_t chMask = ::aidl::android::
aidl2legacy_AudioChannelLayout_layout_audio_channel_mask_t_bits(
angle.channel, false);
- ALOGW("%s aidl %d ch %d", __func__, angle.channel, chMask);
if (OK != param.writeToValue(&chMask) ||
OK != param.writeToValue(&angle.azimuthDegree) ||
OK != param.writeToValue(&angle.elevationDegree)) {
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index 37615af..585a895 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -135,6 +135,38 @@
virtual ~StreamOutHalInterfaceLatencyModeCallback() = default;
};
+/**
+ * On position reporting. There are two methods: 'getRenderPosition' and
+ * 'getPresentationPosition'. The first difference is that they may have a
+ * time offset because "render" position relates to what happens between
+ * ADSP and DAC, while "observable" position is relative to the external
+ * observer. The second difference is that 'getRenderPosition' always
+ * resets on standby (for all types of stream data) according to its
+ * definition. Since the original C definition of 'getRenderPosition' used
+ * 32-bit frame counters, and also because in complex playback chains that
+ * include wireless devices the "observable" position has more practical
+ * meaning, 'getRenderPosition' does not exist in the AIDL HAL interface.
+ * The table below summarizes frame count behavior for 'getPresentationPosition':
+ *
+ * | Mixed | Direct | Direct
+ * | | non-offload | offload
+ * ==============|============|==============|==============
+ * PCM and | Continuous | |
+ * encapsulated | | |
+ * bitstream | | |
+ * --------------|------------| Continuous† |
+ * Bitstream | | | Reset on
+ * encapsulated | | | flush, drain
+ * into PCM | | | and standby
+ * | Not | |
+ * --------------| supported |--------------|
+ * Bitstream | | Reset on |
+ * | | flush, drain |
+ * | | and standby |
+ * | | |
+ *
+ * † - on standby, reset of the frame count happens at the framework level.
+ */
class StreamOutHalInterface : public virtual StreamHalInterface {
public:
// Return the audio hardware driver estimated latency in milliseconds.
@@ -151,10 +183,7 @@
// Return the number of audio frames written by the audio dsp to DAC since
// the output has exited standby.
- virtual status_t getRenderPosition(uint32_t *dspFrames) = 0;
-
- // Get the local time at which the next write to the audio driver will be presented.
- virtual status_t getNextWriteTimestamp(int64_t *timestamp) = 0;
+ virtual status_t getRenderPosition(uint64_t *dspFrames) = 0;
// Set the callback for notifying completion of non-blocking write and drain.
// The callback must be owned by someone else. The output stream does not own it
@@ -176,12 +205,19 @@
// Requests notification when data buffered by the driver/hardware has been played.
virtual status_t drain(bool earlyNotify) = 0;
- // Notifies to the audio driver to flush the queued data.
+ // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+ // already be paused before calling 'flush'.
virtual status_t flush() = 0;
// Return a recent count of the number of audio frames presented to an external observer.
+ // This excludes frames which have been written but are still in the pipeline. See the
+ // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+ // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) = 0;
+ // Notifies the HAL layer that the framework considers the current playback as completed.
+ virtual status_t presentationComplete() = 0;
+
struct SourceMetadata {
std::vector<playback_track_metadata_v7_t> tracks;
};
@@ -270,6 +306,7 @@
// Return a recent count of the number of audio frames received and
// the clock time associated with that frame count.
+ // The count must not reset to zero when a PCM input enters standby.
virtual status_t getCapturePosition(int64_t *frames, int64_t *time) = 0;
// Get active microphones
diff --git a/media/libaudiohal/tests/Android.bp b/media/libaudiohal/tests/Android.bp
index 1a54500..3c8e087 100644
--- a/media/libaudiohal/tests/Android.bp
+++ b/media/libaudiohal/tests/Android.bp
@@ -24,8 +24,8 @@
name: "libaudiohal_aidl_test_default",
test_suites: ["device-tests"],
defaults: [
- "libaudiohal_default",
"libaudiohal_aidl_default",
+ "libaudiohal_default",
],
shared_libs: [
"libaudiohal",
@@ -35,8 +35,8 @@
cc_test {
name: "CoreAudioHalAidlTest",
srcs: [
- "CoreAudioHalAidl_test.cpp",
":core_audio_hal_aidl_src_files",
+ "CoreAudioHalAidl_test.cpp",
],
defaults: ["libaudiohal_aidl_test_default"],
header_libs: ["libaudiohalimpl_headers"],
@@ -55,8 +55,8 @@
cc_test {
name: "EffectProxyTest",
srcs: [
- "EffectProxy_test.cpp",
":audio_effectproxy_src_files",
+ "EffectProxy_test.cpp",
],
defaults: [
"libaudiohal_aidl_test_default",
@@ -68,8 +68,8 @@
cc_test {
name: "EffectHalVersionCompatibilityTest",
srcs: [
- "EffectHalVersionCompatibility_test.cpp",
":audio_effect_hal_aidl_src_files",
+ "EffectHalVersionCompatibility_test.cpp",
],
defaults: ["libaudiohal_aidl_test_default"],
header_libs: ["libaudiohalimpl_headers"],
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index 3541078..5106874 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -14,7 +14,9 @@
* limitations under the License.
*/
+#include <algorithm>
#include <memory>
+#include <mutex>
#include <string>
#include <vector>
@@ -22,6 +24,7 @@
#include <gtest/gtest.h>
#include <DeviceHalAidl.h>
+#include <Hal2AidlMapper.h>
#include <StreamHalAidl.h>
#include <aidl/android/hardware/audio/core/BnModule.h>
#include <aidl/android/hardware/audio/core/BnStreamCommon.h>
@@ -31,7 +34,24 @@
namespace {
+using ::aidl::android::hardware::audio::core::AudioPatch;
+using ::aidl::android::hardware::audio::core::AudioRoute;
using ::aidl::android::hardware::audio::core::VendorParameter;
+using ::aidl::android::media::audio::common::AudioChannelLayout;
+using ::aidl::android::media::audio::common::AudioConfig;
+using ::aidl::android::media::audio::common::AudioDeviceDescription;
+using ::aidl::android::media::audio::common::AudioDeviceType;
+using ::aidl::android::media::audio::common::AudioFormatDescription;
+using ::aidl::android::media::audio::common::AudioFormatType;
+using ::aidl::android::media::audio::common::AudioIoFlags;
+using ::aidl::android::media::audio::common::AudioPort;
+using ::aidl::android::media::audio::common::AudioPortConfig;
+using ::aidl::android::media::audio::common::AudioPortDeviceExt;
+using ::aidl::android::media::audio::common::AudioPortExt;
+using ::aidl::android::media::audio::common::AudioPortMixExt;
+using ::aidl::android::media::audio::common::AudioProfile;
+using ::aidl::android::media::audio::common::AudioSource;
+using ::aidl::android::media::audio::common::PcmType;
class VendorParameterMock {
public:
@@ -63,9 +83,105 @@
std::vector<VendorParameter> mSyncParameters;
};
+struct Configuration {
+ std::vector<AudioPort> ports;
+ std::vector<AudioPortConfig> portConfigs;
+ std::vector<AudioRoute> routes;
+ std::vector<AudioPatch> patches;
+ int32_t nextPortId = 1;
+ int32_t nextPatchId = 1;
+};
+
+void fillProfile(AudioProfile* profile, const std::vector<int32_t>& channelLayouts,
+ const std::vector<int32_t>& sampleRates) {
+ for (auto layout : channelLayouts) {
+ profile->channelMasks.push_back(
+ AudioChannelLayout::make<AudioChannelLayout::layoutMask>(layout));
+ }
+ profile->sampleRates.insert(profile->sampleRates.end(), sampleRates.begin(), sampleRates.end());
+}
+
+AudioProfile createProfile(PcmType pcmType, const std::vector<int32_t>& channelLayouts,
+ const std::vector<int32_t>& sampleRates) {
+ AudioProfile profile;
+ profile.format.type = AudioFormatType::PCM;
+ profile.format.pcm = pcmType;
+ fillProfile(&profile, channelLayouts, sampleRates);
+ return profile;
+}
+
+AudioPortExt createPortDeviceExt(AudioDeviceType devType, int32_t flags,
+ std::string connection = "") {
+ AudioPortDeviceExt deviceExt;
+ deviceExt.device.type.type = devType;
+ if (devType == AudioDeviceType::IN_MICROPHONE && connection.empty()) {
+ deviceExt.device.address = "bottom";
+ } else if (devType == AudioDeviceType::IN_MICROPHONE_BACK && connection.empty()) {
+ deviceExt.device.address = "back";
+ }
+ deviceExt.device.type.connection = std::move(connection);
+ deviceExt.flags = flags;
+ return AudioPortExt::make<AudioPortExt::device>(deviceExt);
+}
+
+AudioPortExt createPortMixExt(int32_t maxOpenStreamCount, int32_t maxActiveStreamCount) {
+ AudioPortMixExt mixExt;
+ mixExt.maxOpenStreamCount = maxOpenStreamCount;
+ mixExt.maxActiveStreamCount = maxActiveStreamCount;
+ return AudioPortExt::make<AudioPortExt::mix>(mixExt);
+}
+
+AudioPort createPort(int32_t id, const std::string& name, int32_t flags, bool isInput,
+ const AudioPortExt& ext) {
+ AudioPort port;
+ port.id = id;
+ port.name = name;
+ port.flags = isInput ? AudioIoFlags::make<AudioIoFlags::input>(flags)
+ : AudioIoFlags::make<AudioIoFlags::output>(flags);
+ port.ext = ext;
+ return port;
+}
+
+AudioRoute createRoute(const std::vector<AudioPort>& sources, const AudioPort& sink) {
+ AudioRoute route;
+ route.sinkPortId = sink.id;
+ std::transform(sources.begin(), sources.end(), std::back_inserter(route.sourcePortIds),
+ [](const auto& port) { return port.id; });
+ return route;
+}
+
+template <typename T>
+auto findById(std::vector<T>& v, int32_t id) {
+ return std::find_if(v.begin(), v.end(), [&](const auto& e) { return e.id == id; });
+}
+
+Configuration getTestConfiguration() {
+ const std::vector<AudioProfile> standardPcmAudioProfiles = {
+ createProfile(PcmType::INT_16_BIT, {AudioChannelLayout::LAYOUT_STEREO}, {48000})};
+ Configuration c;
+
+ AudioPort btOutDevice =
+ createPort(c.nextPortId++, "BT A2DP Out", 0, false,
+ createPortDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
+ AudioDeviceDescription::CONNECTION_BT_A2DP));
+ btOutDevice.profiles = standardPcmAudioProfiles;
+ c.ports.push_back(btOutDevice);
+
+ AudioPort btOutMix =
+ createPort(c.nextPortId++, "a2dp output", 0, false, createPortMixExt(1, 1));
+ btOutMix.profiles = standardPcmAudioProfiles;
+ c.ports.push_back(btOutMix);
+
+ c.routes.push_back(createRoute({btOutMix}, btOutDevice));
+
+ return c;
+}
+
class ModuleMock : public ::aidl::android::hardware::audio::core::BnModule,
public VendorParameterMock {
public:
+ ModuleMock() = default;
+ explicit ModuleMock(const Configuration& config) : mConfig(config) {}
bool isScreenTurnedOn() const { return mIsScreenTurnedOn; }
ScreenRotation getScreenRotation() const { return mScreenRotation; }
@@ -91,35 +207,91 @@
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus connectExternalDevice(
- const ::aidl::android::media::audio::common::AudioPort&,
- ::aidl::android::media::audio::common::AudioPort*) override {
+ const ::aidl::android::media::audio::common::AudioPort& portIdAndData,
+ ::aidl::android::media::audio::common::AudioPort* port) override {
+ auto src = portIdAndData; // Make a copy to mimic RPC behavior.
+ auto iter = findById<AudioPort>(mConfig.ports, src.id);
+ if (iter == mConfig.ports.end()) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ *port = *iter;
+ port->ext = src.ext;
+ port->id = mConfig.nextPortId++;
+ ALOGD("%s: returning %s", __func__, port->toString().c_str());
+ mConfig.ports.push_back(*port);
+ std::vector<AudioRoute> newRoutes;
+ for (auto& r : mConfig.routes) {
+ if (r.sinkPortId == src.id) {
+ newRoutes.push_back(AudioRoute{.sourcePortIds = r.sourcePortIds,
+ .sinkPortId = port->id,
+ .isExclusive = r.isExclusive});
+ } else if (std::find(r.sourcePortIds.begin(), r.sourcePortIds.end(), src.id) !=
+ r.sourcePortIds.end()) {
+ r.sourcePortIds.push_back(port->id);
+ }
+ }
+ mConfig.routes.insert(mConfig.routes.end(), newRoutes.begin(), newRoutes.end());
return ndk::ScopedAStatus::ok();
}
- ndk::ScopedAStatus disconnectExternalDevice(int32_t) override {
+ ndk::ScopedAStatus disconnectExternalDevice(int32_t portId) override {
+ auto iter = findById<AudioPort>(mConfig.ports, portId);
+ if (iter == mConfig.ports.end()) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ mConfig.ports.erase(iter);
+ for (auto it = mConfig.routes.begin(); it != mConfig.routes.end();) {
+ if (it->sinkPortId == portId) {
+ it = mConfig.routes.erase(it);
+ } else {
+ if (auto srcIt =
+ std::find(it->sourcePortIds.begin(), it->sourcePortIds.end(), portId);
+ srcIt != it->sourcePortIds.end()) {
+ it->sourcePortIds.erase(srcIt);
+ }
+ ++it;
+ }
+ }
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus getAudioPatches(
- std::vector<::aidl::android::hardware::audio::core::AudioPatch>*) override {
+ std::vector<::aidl::android::hardware::audio::core::AudioPatch>* patches) override {
+ *patches = mConfig.patches;
return ndk::ScopedAStatus::ok();
}
- ndk::ScopedAStatus getAudioPort(int32_t,
- ::aidl::android::media::audio::common::AudioPort*) override {
+ ndk::ScopedAStatus getAudioPort(
+ int32_t portId, ::aidl::android::media::audio::common::AudioPort* port) override {
+ auto iter = findById<AudioPort>(mConfig.ports, portId);
+ if (iter == mConfig.ports.end()) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ *port = *iter;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus getAudioPortConfigs(
- std::vector<::aidl::android::media::audio::common::AudioPortConfig>*) override {
+ std::vector<::aidl::android::media::audio::common::AudioPortConfig>* configs) override {
+ *configs = mConfig.portConfigs;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus getAudioPorts(
- std::vector<::aidl::android::media::audio::common::AudioPort>*) override {
+ std::vector<::aidl::android::media::audio::common::AudioPort>* ports) override {
+ *ports = mConfig.ports;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus getAudioRoutes(
- std::vector<::aidl::android::hardware::audio::core::AudioRoute>*) override {
+ std::vector<::aidl::android::hardware::audio::core::AudioRoute>* routes) override {
+ *routes = mConfig.routes;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus getAudioRoutesForAudioPort(
- int32_t, std::vector<::aidl::android::hardware::audio::core::AudioRoute>*) override {
+ int32_t portId,
+ std::vector<::aidl::android::hardware::audio::core::AudioRoute>* routes) override {
+ for (auto& r : mConfig.routes) {
+ const auto& srcs = r.sourcePortIds;
+ if (r.sinkPortId == portId ||
+ std::find(srcs.begin(), srcs.end(), portId) != srcs.end()) {
+ routes->push_back(r);
+ }
+ }
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus openInputStream(const OpenInputStreamArguments&,
@@ -133,17 +305,69 @@
ndk::ScopedAStatus getSupportedPlaybackRateFactors(SupportedPlaybackRateFactors*) override {
return ndk::ScopedAStatus::ok();
}
- ndk::ScopedAStatus setAudioPatch(const ::aidl::android::hardware::audio::core::AudioPatch&,
- ::aidl::android::hardware::audio::core::AudioPatch*) override {
+ ndk::ScopedAStatus setAudioPatch(
+ const ::aidl::android::hardware::audio::core::AudioPatch& requested,
+ ::aidl::android::hardware::audio::core::AudioPatch* patch) override {
+ if (requested.id == 0) {
+ *patch = requested;
+ patch->id = mConfig.nextPatchId++;
+ mConfig.patches.push_back(*patch);
+ ALOGD("%s: returning %s", __func__, patch->toString().c_str());
+ } else {
+ auto iter = findById<AudioPatch>(mConfig.patches, requested.id);
+ if (iter == mConfig.patches.end()) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ *iter = *patch = requested;
+ ALOGD("%s: updated %s", __func__, patch->toString().c_str());
+ }
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus setAudioPortConfig(
- const ::aidl::android::media::audio::common::AudioPortConfig&,
- ::aidl::android::media::audio::common::AudioPortConfig*, bool*) override {
+ const ::aidl::android::media::audio::common::AudioPortConfig& requested,
+ ::aidl::android::media::audio::common::AudioPortConfig* config,
+ bool* applied) override {
+ *applied = false;
+ auto src = requested; // Make a copy to mimic RPC behavior.
+ if (src.id == 0) {
+ *config = src;
+ if (config->ext.getTag() == AudioPortExt::unspecified) {
+ auto iter = findById<AudioPort>(mConfig.ports, src.portId);
+ if (iter == mConfig.ports.end()) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ config->ext = iter->ext;
+ }
+ config->id = mConfig.nextPortId++;
+ mConfig.portConfigs.push_back(*config);
+ ALOGD("%s: returning %s", __func__, config->toString().c_str());
+ } else {
+ auto iter = findById<AudioPortConfig>(mConfig.portConfigs, src.id);
+ if (iter == mConfig.portConfigs.end()) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ *iter = *config = src;
+ ALOGD("%s: updated %s", __func__, config->toString().c_str());
+ }
+ *applied = true;
return ndk::ScopedAStatus::ok();
}
- ndk::ScopedAStatus resetAudioPatch(int32_t) override { return ndk::ScopedAStatus::ok(); }
- ndk::ScopedAStatus resetAudioPortConfig(int32_t) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus resetAudioPatch(int32_t patchId) override {
+ auto iter = findById<AudioPatch>(mConfig.patches, patchId);
+ if (iter == mConfig.patches.end()) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ mConfig.patches.erase(iter);
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus resetAudioPortConfig(int32_t portConfigId) override {
+ auto iter = findById<AudioPortConfig>(mConfig.portConfigs, portConfigId);
+ if (iter == mConfig.portConfigs.end()) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ mConfig.portConfigs.erase(iter);
+ return ndk::ScopedAStatus::ok();
+ }
ndk::ScopedAStatus getMasterMute(bool*) override { return ndk::ScopedAStatus::ok(); }
ndk::ScopedAStatus setMasterMute(bool) override { return ndk::ScopedAStatus::ok(); }
ndk::ScopedAStatus getMasterVolume(float*) override { return ndk::ScopedAStatus::ok(); }
@@ -205,6 +429,7 @@
return ndk::ScopedAStatus::ok();
}
+ Configuration mConfig;
bool mIsScreenTurnedOn = false;
ScreenRotation mScreenRotation = ScreenRotation::DEG_0;
};
@@ -398,6 +623,35 @@
using namespace android;
+namespace {
+
+class StreamHalMock : public virtual StreamHalInterface {
+ public:
+ StreamHalMock() = default;
+ ~StreamHalMock() override = default;
+ status_t getBufferSize(size_t*) override { return OK; }
+ status_t getAudioProperties(audio_config_base_t*) override { return OK; }
+ status_t setParameters(const String8&) override { return OK; }
+ status_t getParameters(const String8&, String8*) override { return OK; }
+ status_t getFrameSize(size_t*) override { return OK; }
+ status_t addEffect(sp<EffectHalInterface>) override { return OK; }
+ status_t removeEffect(sp<EffectHalInterface>) override { return OK; }
+ status_t standby() override { return OK; }
+ status_t dump(int, const Vector<String16>&) override { return OK; }
+ status_t start() override { return OK; }
+ status_t stop() override { return OK; }
+ status_t createMmapBuffer(int32_t, struct audio_mmap_buffer_info*) override { return OK; }
+ status_t getMmapPosition(struct audio_mmap_position*) override { return OK; }
+ status_t setHalThreadPriority(int) override { return OK; }
+ status_t legacyCreateAudioPatch(const struct audio_port_config&, std::optional<audio_source_t>,
+ audio_devices_t) override {
+ return OK;
+ }
+ status_t legacyReleaseAudioPatch() override { return OK; }
+};
+
+} // namespace
+
class DeviceHalAidlTest : public testing::Test {
public:
void SetUp() override {
@@ -593,3 +847,297 @@
EXPECT_EQ(0UL, mStreamCommon->getAsyncParameters().size());
EXPECT_EQ(0UL, mStreamCommon->getSyncParameters().size());
}
+
+class Hal2AidlMapperTest : public testing::Test {
+ public:
+ void SetUp() override {
+ mModule = ndk::SharedRefBase::make<ModuleMock>(getTestConfiguration());
+ mMapper = std::make_unique<Hal2AidlMapper>("test", mModule);
+ ASSERT_EQ(OK, mMapper->initialize());
+
+ mConnectedPort.ext = createPortDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
+ AudioDeviceDescription::CONNECTION_BT_A2DP);
+ mConnectedPort.ext.get<AudioPortExt::device>().device.address = "00:11:22:33:44:55";
+ ASSERT_EQ(OK, mMapper->setDevicePortConnectedState(mConnectedPort, true /*connected*/));
+
+ std::mutex mutex; // Only needed for cleanups.
+ auto mapperAccessor = std::make_unique<LockedAccessor<Hal2AidlMapper>>(*mMapper, mutex);
+ Hal2AidlMapper::Cleanups cleanups(*mapperAccessor);
+ AudioConfig config;
+ config.base.channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+ AudioChannelLayout::LAYOUT_STEREO);
+ config.base.format =
+ AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = PcmType::INT_16_BIT};
+ config.base.sampleRate = 48000;
+ ASSERT_EQ(OK,
+ mMapper->prepareToOpenStream(
+ 42 /*ioHandle*/, mConnectedPort.ext.get<AudioPortExt::device>().device,
+ AudioIoFlags::make<AudioIoFlags::output>(0), AudioSource::DEFAULT,
+ &cleanups, &config, &mMixPortConfig, &mPatch));
+ cleanups.disarmAll();
+ ASSERT_NE(0, mPatch.id);
+ ASSERT_NE(0, mMixPortConfig.id);
+ mStream = sp<StreamHalMock>::make();
+ mMapper->addStream(mStream, mMixPortConfig.id, mPatch.id);
+
+ ASSERT_EQ(OK, mMapper->findPortConfig(mConnectedPort.ext.get<AudioPortExt::device>().device,
+ &mDevicePortConfig));
+ ASSERT_EQ(1UL, mPatch.sourcePortConfigIds.size());
+ ASSERT_EQ(mMixPortConfig.id, mPatch.sourcePortConfigIds[0]);
+ ASSERT_EQ(1UL, mPatch.sinkPortConfigIds.size());
+ ASSERT_EQ(mDevicePortConfig.id, mPatch.sinkPortConfigIds[0]);
+ }
+
+ void TearDown() override {
+ mStream.clear();
+ mMapper.reset();
+ mModule.reset();
+ }
+
+ protected:
+ void CloseDisconnectImpl() {
+ mStream.clear();
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ }
+
+ void ConnectAnotherDevice() {
+ mConnectedPort.ext.get<AudioPortExt::device>().device.address = "00:11:22:33:44:66";
+ ASSERT_EQ(OK, mMapper->setDevicePortConnectedState(mConnectedPort, true /*connected*/));
+ }
+
+ void CreateFwkPatch(int32_t* patchId) {
+ std::mutex mutex; // Only needed for cleanups.
+ auto mapperAccessor = std::make_unique<LockedAccessor<Hal2AidlMapper>>(*mMapper, mutex);
+ Hal2AidlMapper::Cleanups cleanups(*mapperAccessor);
+ ASSERT_EQ(OK, mMapper->createOrUpdatePatch({mMixPortConfig}, {mDevicePortConfig}, patchId,
+ &cleanups));
+ cleanups.disarmAll();
+ }
+
+ void DisconnectDevice() {
+ ASSERT_EQ(OK, mMapper->prepareToDisconnectExternalDevice(mConnectedPort));
+ ASSERT_EQ(OK, mMapper->setDevicePortConnectedState(mConnectedPort, false /*connected*/));
+ }
+
+ void ReleaseFwkOnlyPatch(int32_t patchId) {
+ // The patch only exists for the framework.
+ EXPECT_EQ(patchId, mMapper->findFwkPatch(patchId));
+ ASSERT_EQ(BAD_VALUE, mMapper->releaseAudioPatch(patchId));
+ mMapper->eraseFwkPatch(patchId);
+ // The patch is now erased.
+ EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+ }
+
+ std::shared_ptr<ModuleMock> mModule;
+ std::unique_ptr<Hal2AidlMapper> mMapper;
+ AudioPort mConnectedPort;
+ AudioPortConfig mMixPortConfig;
+ AudioPortConfig mDevicePortConfig;
+ AudioPatch mPatch;
+ sp<StreamHalInterface> mStream;
+};
+
+/**
+ * External device connections and patches tests diagram.
+ *
+ * [Connect device] -> [Create Stream]
+ * |-> [ (1) Close Stream] -> [Disconnect Device]
+ * |-> [ (2) Disconnect Device]
+ * | |-> [ (3) Close Stream]
+ * | \-> [ (4) Connect Another Device]
+ * | |-> (1)
+ * | |-> (2) -> (3)
+ * | \-> (5) -> (7)
+ * \-> [ (5) Create/Update Fwk Patch]
+ * |-> [(6) Release Fwk Patch]
+ * | |-> (1)
+ * | \-> (2) (including reconnection)
+ * \-> [(7) Disconnect Device]
+ * |-> [Release Fwk Patch] -> [Close Stream]
+ * \-> (4) -> (5) -> (6) -> (1)
+ *
+ * Note that the test (acting on behalf of DeviceHalAidl) is responsible
+ * for calling `eraseFwkPatch` and `updateFwkPatch` when needed.
+ */
+
+// (1)
+TEST_F(Hal2AidlMapperTest, CloseDisconnect) {
+ ASSERT_NO_FATAL_FAILURE(CloseDisconnectImpl());
+ // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+}
+
+// (2) -> (3)
+TEST_F(Hal2AidlMapperTest, DisconnectClose) {
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+ mStream.clear();
+}
+
+// (2) -> (4) -> (1)
+TEST_F(Hal2AidlMapperTest, DisconnectConnectCloseDisconnect) {
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+ ASSERT_NO_FATAL_FAILURE(ConnectAnotherDevice());
+ ASSERT_NO_FATAL_FAILURE(CloseDisconnectImpl());
+ // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+}
+
+// (2) -> (4) -> (2) -> (3)
+TEST_F(Hal2AidlMapperTest, DisconnectConnectDisconnectClose) {
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+ ASSERT_NO_FATAL_FAILURE(ConnectAnotherDevice());
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+ mStream.clear();
+}
+
+// (5) -> (6) -> (1)
+TEST_F(Hal2AidlMapperTest, CreateFwkPatchReleaseCloseDisconnect) {
+ int32_t patchId;
+ ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&patchId));
+ // Must be the patch created during stream opening.
+ ASSERT_EQ(mPatch.id, patchId);
+ // The patch was not reset by HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+ ASSERT_EQ(OK, mMapper->releaseAudioPatch(patchId));
+ // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+ ASSERT_NO_FATAL_FAILURE(CloseDisconnectImpl());
+ // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+}
+
+// (5) -> (6) -> (2) -> (3)
+TEST_F(Hal2AidlMapperTest, CreateFwkPatchReleaseDisconnectClose) {
+ int32_t patchId;
+ ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&patchId));
+ // Must be the patch created during stream opening.
+ ASSERT_EQ(mPatch.id, patchId);
+ // The patch was not reset by HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+ ASSERT_EQ(OK, mMapper->releaseAudioPatch(patchId));
+ // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+ mStream.clear();
+}
+
+// (5) -> (6) -> (2) -> (4) -> (2) -> (3)
+TEST_F(Hal2AidlMapperTest, CreateFwkPatchReleaseDisconnectConnectDisconnectClose) {
+ int32_t patchId;
+ ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&patchId));
+ // Must be the patch created during stream opening.
+ ASSERT_EQ(mPatch.id, patchId);
+ // The patch was not reset by HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+ ASSERT_EQ(OK, mMapper->releaseAudioPatch(patchId));
+ // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+ ASSERT_NO_FATAL_FAILURE(ConnectAnotherDevice());
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+ mStream.clear();
+}
+
+// (5) -> (7) -> Release -> Close
+TEST_F(Hal2AidlMapperTest, CreateFwkPatchDisconnectReleaseClose) {
+ int32_t patchId;
+ ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&patchId));
+ // Must be the patch created during stream opening.
+ ASSERT_EQ(mPatch.id, patchId);
+ // The patch was not reset by HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ ASSERT_NO_FATAL_FAILURE(ReleaseFwkOnlyPatch(patchId));
+
+ mStream.clear();
+ EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+}
+
+// (5) -> (7) -> (4) -> (5) -> (6) -> (1)
+TEST_F(Hal2AidlMapperTest, CreateFwkPatchDisconnectConnectUpdateReleaseCloseDisconnect) {
+ int32_t patchId;
+ ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&patchId));
+ // Must be the patch created during stream opening.
+ ASSERT_EQ(mPatch.id, patchId);
+ // The patch was not reset by HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch now only exists for the framework.
+ EXPECT_EQ(mPatch.id, mMapper->findFwkPatch(mPatch.id));
+
+ ASSERT_NO_FATAL_FAILURE(ConnectAnotherDevice());
+ // Change the device address locally, for patch update.
+ mDevicePortConfig.ext.get<AudioPortExt::device>().device.address =
+ mConnectedPort.ext.get<AudioPortExt::device>().device.address;
+ int32_t newPatchId = patchId;
+ ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&newPatchId));
+ EXPECT_NE(patchId, newPatchId);
+ mMapper->updateFwkPatch(patchId, newPatchId);
+ EXPECT_EQ(newPatchId, mMapper->findFwkPatch(patchId));
+ // Just in case, check that HAL patch ID is not listed as a fwk patch.
+ EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+ // Verify that device port config was updated.
+ ASSERT_EQ(OK, mMapper->findPortConfig(mConnectedPort.ext.get<AudioPortExt::device>().device,
+ &mDevicePortConfig));
+
+ ASSERT_EQ(OK, mMapper->releaseAudioPatch(newPatchId));
+ // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+ // Just in case, check that HAL patch ID is not listed.
+ EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+
+ ASSERT_NO_FATAL_FAILURE(CloseDisconnectImpl());
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+ EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+ EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+}
+
+// (2) -> (4) -> (5) -> (7) -> Release -> Close
+TEST_F(Hal2AidlMapperTest, DisconnectConnectCreateFwkPatchDisconnectReleaseClose) {
+ const int32_t patchId = mPatch.id;
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+ ASSERT_NO_FATAL_FAILURE(ConnectAnotherDevice());
+ // Change the device address locally, for patch update.
+ mDevicePortConfig.ext.get<AudioPortExt::device>().device.address =
+ mConnectedPort.ext.get<AudioPortExt::device>().device.address;
+ int32_t newPatchId = 0; // Use 0 since the fwk does not know about the HAL patch.
+ EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+ ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&newPatchId));
+ EXPECT_NE(0, newPatchId);
+ EXPECT_NE(patchId, newPatchId);
+ // Just in case, check that HAL patch ID is not listed as a fwk patch.
+ EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+ // Verify that device port config was updated.
+ ASSERT_EQ(OK, mMapper->findPortConfig(mConnectedPort.ext.get<AudioPortExt::device>().device,
+ &mDevicePortConfig));
+
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ ASSERT_NO_FATAL_FAILURE(ReleaseFwkOnlyPatch(newPatchId));
+
+ mStream.clear();
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+ EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+}
diff --git a/media/libaudioprocessing/Android.bp b/media/libaudioprocessing/Android.bp
index 6160d7d..c84796e 100644
--- a/media/libaudioprocessing/Android.bp
+++ b/media/libaudioprocessing/Android.bp
@@ -22,10 +22,11 @@
],
cflags: [
- "-Werror",
"-Wall",
// uncomment to disable NEON on architectures that actually do support NEON, for benchmarking
+
+ "-Werror",
// "-DUSE_NEON=false",
],
@@ -62,7 +63,7 @@
header_libs: [
"libaudiohal_headers",
"libbase_headers",
- "libmedia_headers"
+ "libmedia_headers",
],
shared_libs: [
@@ -87,8 +88,8 @@
"AudioMixerBase.cpp",
"AudioResampler.cpp",
"AudioResamplerCubic.cpp",
- "AudioResamplerSinc.cpp",
"AudioResamplerDyn.cpp",
+ "AudioResamplerSinc.cpp",
],
arch: {
diff --git a/media/libaudioprocessing/AudioMixerBase.cpp b/media/libaudioprocessing/AudioMixerBase.cpp
index 3d11d92..7e362f7 100644
--- a/media/libaudioprocessing/AudioMixerBase.cpp
+++ b/media/libaudioprocessing/AudioMixerBase.cpp
@@ -1122,7 +1122,7 @@
aux = t->auxBuffer + numFrames;
}
for (int outFrames = frameCount; outFrames > 0; ) {
- // t->in == nullptr can happen if the track was flushed just after having
+ // t->mIn == nullptr can happen if the track was flushed just after having
// been enabled for mixing.
if (t->mIn == nullptr) {
break;
diff --git a/media/libaudioprocessing/audio-resampler/Android.bp b/media/libaudioprocessing/audio-resampler/Android.bp
index 4ea75e7..791ae37 100644
--- a/media/libaudioprocessing/audio-resampler/Android.bp
+++ b/media/libaudioprocessing/audio-resampler/Android.bp
@@ -13,12 +13,12 @@
srcs: ["AudioResamplerCoefficients.cpp"],
shared_libs: [
- "libutils",
"liblog",
+ "libutils",
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
}
diff --git a/media/libaudioprocessing/tests/Android.bp b/media/libaudioprocessing/tests/Android.bp
index ad402db..48c97ab 100644
--- a/media/libaudioprocessing/tests/Android.bp
+++ b/media/libaudioprocessing/tests/Android.bp
@@ -28,8 +28,8 @@
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
}
diff --git a/media/libaudioprocessing/tests/fuzzer/Android.bp b/media/libaudioprocessing/tests/fuzzer/Android.bp
index 8fb6fff..e4780cf 100644
--- a/media/libaudioprocessing/tests/fuzzer/Android.bp
+++ b/media/libaudioprocessing/tests/fuzzer/Android.bp
@@ -8,23 +8,23 @@
}
cc_fuzz {
- name: "libaudioprocessing_resampler_fuzzer",
- srcs: [
- "libaudioprocessing_resampler_fuzzer.cpp",
- ],
- defaults: ["libaudioprocessing_test_defaults"],
- static_libs: [
- "libsndfile",
- ],
+ name: "libaudioprocessing_resampler_fuzzer",
+ srcs: [
+ "libaudioprocessing_resampler_fuzzer.cpp",
+ ],
+ defaults: ["libaudioprocessing_test_defaults"],
+ static_libs: [
+ "libsndfile",
+ ],
}
cc_fuzz {
- name: "libaudioprocessing_record_buffer_converter_fuzzer",
- srcs: [
- "libaudioprocessing_record_buffer_converter_fuzzer.cpp",
- ],
- defaults: ["libaudioprocessing_test_defaults"],
- static_libs: [
- "libsndfile",
- ],
+ name: "libaudioprocessing_record_buffer_converter_fuzzer",
+ srcs: [
+ "libaudioprocessing_record_buffer_converter_fuzzer.cpp",
+ ],
+ defaults: ["libaudioprocessing_test_defaults"],
+ static_libs: [
+ "libsndfile",
+ ],
}
diff --git a/media/libcpustats/Android.bp b/media/libcpustats/Android.bp
index 1ab1de0..2b134a7 100644
--- a/media/libcpustats/Android.bp
+++ b/media/libcpustats/Android.bp
@@ -24,8 +24,8 @@
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
host_supported: true,
diff --git a/media/libeffects/config/Android.bp b/media/libeffects/config/Android.bp
index 293a9c2..1672797 100644
--- a/media/libeffects/config/Android.bp
+++ b/media/libeffects/config/Android.bp
@@ -20,11 +20,11 @@
],
shared_libs: [
+ "libcutils",
"liblog",
+ "libmedia_helper",
"libtinyxml2",
"libutils",
- "libmedia_helper",
- "libcutils",
],
header_libs: [
diff --git a/media/libeffects/data/Android.bp b/media/libeffects/data/Android.bp
new file mode 100644
index 0000000..2acf229
--- /dev/null
+++ b/media/libeffects/data/Android.bp
@@ -0,0 +1,19 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+prebuilt_etc {
+ name: "framework-audio_effects.xml",
+ src: "audio_effects.xml",
+ filename: "audio_effects.xml",
+}
diff --git a/media/libeffects/downmix/Android.bp b/media/libeffects/downmix/Android.bp
index b56872c..fda5acc 100644
--- a/media/libeffects/downmix/Android.bp
+++ b/media/libeffects/downmix/Android.bp
@@ -37,9 +37,9 @@
relative_install_path: "soundfx",
cflags: [
- "-fvisibility=hidden",
"-Wall",
"-Werror",
+ "-fvisibility=hidden",
],
header_libs: [
@@ -51,16 +51,16 @@
cc_library_shared {
name: "libdownmixaidl",
srcs: [
- "aidl/EffectDownmix.cpp",
- "aidl/DownmixContext.cpp",
":effectCommonFile",
+ "aidl/DownmixContext.cpp",
+ "aidl/EffectDownmix.cpp",
],
defaults: [
"aidlaudioeffectservice_defaults",
],
header_libs: [
"libaudioeffects",
- "libhardware_headers"
+ "libhardware_headers",
],
shared_libs: [
"libaudioutils",
diff --git a/media/libeffects/downmix/aidl/DownmixContext.cpp b/media/libeffects/downmix/aidl/DownmixContext.cpp
index 5fb44b5..593e16f 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.cpp
+++ b/media/libeffects/downmix/aidl/DownmixContext.cpp
@@ -76,18 +76,15 @@
DownmixContext::DownmixContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
- LOG(DEBUG) << __func__;
mState = DOWNMIX_STATE_UNINITIALIZED;
init_params(common);
}
DownmixContext::~DownmixContext() {
- LOG(DEBUG) << __func__;
mState = DOWNMIX_STATE_UNINITIALIZED;
}
RetCode DownmixContext::enable() {
- LOG(DEBUG) << __func__;
if (mState != DOWNMIX_STATE_INITIALIZED) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -96,7 +93,6 @@
}
RetCode DownmixContext::disable() {
- LOG(DEBUG) << __func__;
if (mState != DOWNMIX_STATE_ACTIVE) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -105,7 +101,6 @@
}
void DownmixContext::reset() {
- LOG(DEBUG) << __func__;
disable();
resetBuffer();
}
@@ -127,7 +122,6 @@
return status;
}
- LOG(DEBUG) << __func__ << " start processing";
bool accumulate = false;
int frames = samples * sizeof(float) / getInputFrameSize();
if (mType == Downmix::Type::STRIP) {
@@ -152,9 +146,6 @@
}
}
int producedSamples = (samples / mInputChannelCount) << 1;
- LOG(DEBUG) << __func__ << " done processing " << samples << " samples, generated "
- << producedSamples << " frameSize: " << getInputFrameSize() << " - "
- << getOutputFrameSize();
return {STATUS_OK, samples, producedSamples};
}
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.cpp b/media/libeffects/downmix/aidl/EffectDownmix.cpp
index 46156ce..883d41d 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.cpp
+++ b/media/libeffects/downmix/aidl/EffectDownmix.cpp
@@ -14,10 +14,12 @@
* limitations under the License.
*/
+#define ATRACE_TAG ATRACE_TAG_AUDIO
#define LOG_TAG "AHAL_DownmixImpl"
#include <android-base/logging.h>
#include <system/audio_effects/effect_uuid.h>
+#include <utils/Trace.h>
#include "EffectDownmix.h"
@@ -36,7 +38,6 @@
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<DownmixImpl>();
- LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -66,7 +67,6 @@
ndk::ScopedAStatus DownmixImpl::getDescriptor(Descriptor* _aidl_return) {
RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
- LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
@@ -171,12 +171,16 @@
}
void DownmixImpl::process() {
+ ATRACE_NAME("Downmix::process");
/**
* wait for the EventFlag without lock, it's ok because the mEfGroup pointer will not change
* in the life cycle of workerThread (threadLoop).
*/
uint32_t efState = 0;
- if (!mEventFlag || ::android::OK != mEventFlag->wait(kEventFlagNotEmpty, &efState)) {
+ if (!mEventFlag ||
+ ::android::OK != mEventFlag->wait(mDataMqNotEmptyEf, &efState, 0 /* no timeout */,
+ true /* retry */) ||
+ !(efState & mDataMqNotEmptyEf)) {
LOG(ERROR) << getEffectName() << __func__ << ": StatusEventFlag invalid";
}
@@ -203,8 +207,6 @@
IEffect::Status status = effectProcessImpl(buffer, buffer, processSamples);
outputMQ->write(buffer, status.fmqProduced);
statusMQ->writeBlocking(&status, 1);
- LOG(VERBOSE) << getEffectName() << __func__ << ": done processing, effect consumed "
- << status.fmqConsumed << " produced " << status.fmqProduced;
}
}
}
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.h b/media/libeffects/downmix/aidl/EffectDownmix.h
index 54557dc..b7d621a 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.h
+++ b/media/libeffects/downmix/aidl/EffectDownmix.h
@@ -28,11 +28,8 @@
public:
static const std::string kEffectName;
static const Descriptor kDescriptor;
- DownmixImpl() { LOG(DEBUG) << __func__; }
- ~DownmixImpl() {
- cleanUp();
- LOG(DEBUG) << __func__;
- }
+ DownmixImpl() = default;
+ ~DownmixImpl() { cleanUp(); }
ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
diff --git a/media/libeffects/downmix/tests/Android.bp b/media/libeffects/downmix/tests/Android.bp
index 392a6fa..8cecbe2 100644
--- a/media/libeffects/downmix/tests/Android.bp
+++ b/media/libeffects/downmix/tests/Android.bp
@@ -14,7 +14,7 @@
//
// Use "atest downmix_tests" to run.
cc_test {
- name:"downmix_tests",
+ name: "downmix_tests",
gtest: true,
host_supported: true,
vendor: true,
@@ -45,7 +45,7 @@
// test application and outputs then compares files in a local directory
// on device (/data/local/tmp/downmixtest/).
cc_test {
- name:"downmixtest",
+ name: "downmixtest",
host_supported: false,
proprietary: true,
diff --git a/media/libeffects/dynamicsproc/Android.bp b/media/libeffects/dynamicsproc/Android.bp
index e93a4e6..12477a4 100644
--- a/media/libeffects/dynamicsproc/Android.bp
+++ b/media/libeffects/dynamicsproc/Android.bp
@@ -33,7 +33,7 @@
}
cc_defaults {
- name : "dynamicsprocessingdefaults",
+ name: "dynamicsprocessingdefaults",
srcs: [
"dsp/DPBase.cpp",
"dsp/DPFrequency.cpp",
@@ -50,9 +50,9 @@
"libeigen",
],
cflags: [
- "-Wthread-safety",
"-Wall",
"-Werror",
+ "-Wthread-safety",
],
relative_install_path: "soundfx",
}
@@ -80,9 +80,9 @@
name: "libdynamicsprocessingaidl",
srcs: [
+ ":effectCommonFile",
"aidl/DynamicsProcessing.cpp",
"aidl/DynamicsProcessingContext.cpp",
- ":effectCommonFile",
],
defaults: [
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
index 7e1549d..836e034 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
@@ -41,7 +41,6 @@
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<DynamicsProcessingImpl>();
- LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -206,7 +205,6 @@
ndk::ScopedAStatus DynamicsProcessingImpl::open(const Parameter::Common& common,
const std::optional<Parameter::Specific>& specific,
OpenEffectReturn* ret) {
- LOG(DEBUG) << __func__;
// effect only support 32bits float
RETURN_IF(common.input.base.format.pcm != common.output.base.format.pcm ||
common.input.base.format.pcm != PcmType::FLOAT_32_BIT,
@@ -215,11 +213,12 @@
RETURN_OK_IF(mState != State::INIT);
mImplContext = createContext(common);
RETURN_IF(!mContext || !mImplContext, EX_NULL_POINTER, "createContextFailed");
- int version = 0;
- RETURN_IF(!getInterfaceVersion(&version).isOk(), EX_UNSUPPORTED_OPERATION,
+ RETURN_IF(!getInterfaceVersion(&mVersion).isOk(), EX_UNSUPPORTED_OPERATION,
"FailedToGetInterfaceVersion");
mImplContext->setVersion(version);
mEventFlag = mImplContext->getStatusEventFlag();
+ mDataMqNotEmptyEf =
+ mVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty : kEventFlagNotEmpty;
if (specific.has_value()) {
RETURN_IF_ASTATUS_NOT_OK(setParameterSpecific(specific.value()), "setSpecParamErr");
@@ -233,14 +232,14 @@
mState = State::IDLE;
mContext->dupeFmq(ret);
- RETURN_IF(createThread(getEffectName()) != RetCode::SUCCESS, EX_UNSUPPORTED_OPERATION,
- "FailedToCreateWorker");
+ RETURN_IF(createThread(getEffectNameWithVersion()) != RetCode::SUCCESS,
+ EX_UNSUPPORTED_OPERATION, "FailedToCreateWorker");
+ LOG(INFO) << getEffectNameWithVersion() << __func__;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus DynamicsProcessingImpl::getDescriptor(Descriptor* _aidl_return) {
RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
- LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
index 4897888..e850ba4 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
@@ -30,11 +30,8 @@
static const Descriptor kDescriptor;
static const Capability kCapability;
- DynamicsProcessingImpl() { LOG(DEBUG) << __func__; }
- ~DynamicsProcessingImpl() {
- cleanUp();
- LOG(DEBUG) << __func__;
- }
+ DynamicsProcessingImpl() = default;
+ ~DynamicsProcessingImpl() { cleanUp(); }
ndk::ScopedAStatus open(const Parameter::Common& common,
const std::optional<Parameter::Specific>& specific,
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index 311d60a..ada301b 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -29,16 +29,10 @@
DynamicsProcessingContext::DynamicsProcessingContext(int statusDepth,
const Parameter::Common& common)
: EffectContext(statusDepth, common) {
- LOG(DEBUG) << __func__;
init();
}
-DynamicsProcessingContext::~DynamicsProcessingContext() {
- LOG(DEBUG) << __func__;
-}
-
RetCode DynamicsProcessingContext::enable() {
- std::lock_guard lg(mMutex);
if (mState != DYNAMICS_PROCESSING_STATE_INITIALIZED) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -47,7 +41,6 @@
}
RetCode DynamicsProcessingContext::disable() {
- std::lock_guard lg(mMutex);
if (mState != DYNAMICS_PROCESSING_STATE_ACTIVE) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -56,7 +49,6 @@
}
void DynamicsProcessingContext::reset() {
- std::lock_guard lg(mMutex);
if (mDpFreq != nullptr) {
mDpFreq.reset();
}
@@ -68,12 +60,10 @@
}
mCommon = common;
init();
- LOG(INFO) << __func__ << common.toString();
return RetCode::SUCCESS;
}
RetCode DynamicsProcessingContext::setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) {
- std::lock_guard lg(mMutex);
dp_fx::DPChannel* leftChannel = mDpFreq->getChannel(0);
dp_fx::DPChannel* rightChannel = mDpFreq->getChannel(1);
if (leftChannel != nullptr) {
@@ -99,8 +89,8 @@
int32_t sampleRate = mCommon.input.base.sampleRate;
int32_t minBlockSize = (int32_t)dp_fx::DPFrequency::getMinBockSize();
int32_t block = engine.preferredProcessingDurationMs * sampleRate / 1000.0f;
- LOG(INFO) << __func__ << " sampleRate " << sampleRate << " block length "
- << engine.preferredProcessingDurationMs << " ms (" << block << "samples)";
+ LOG(VERBOSE) << __func__ << " sampleRate " << sampleRate << " block length "
+ << engine.preferredProcessingDurationMs << " ms (" << block << "samples)";
if (block < minBlockSize) {
block = minBlockSize;
} else if (!powerof2(block)) {
@@ -112,7 +102,6 @@
RetCode DynamicsProcessingContext::setEngineArchitecture(
const DynamicsProcessing::EngineArchitecture& engineArchitecture) {
- std::lock_guard lg(mMutex);
if (!mEngineInited || mEngineArchitecture != engineArchitecture) {
if (engineArchitecture.resolutionPreference ==
DynamicsProcessing::ResolutionPreference::FAVOR_FREQUENCY_RESOLUTION) {
@@ -124,36 +113,26 @@
mEngineInited = true;
mEngineArchitecture = engineArchitecture;
}
- LOG(INFO) << __func__ << engineArchitecture.toString();
return RetCode::SUCCESS;
}
RetCode DynamicsProcessingContext::setPreEq(
const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
- std::lock_guard lg(mMutex);
- return setDpChannels_l<dp_fx::DPEq>(channels, mEngineArchitecture.preEqStage.inUse,
- StageType::PREEQ);
+ return setDpChannels_l<dp_fx::DPEq>(channels, StageType::PREEQ);
}
RetCode DynamicsProcessingContext::setPostEq(
const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
- std::lock_guard lg(mMutex);
- return setDpChannels_l<dp_fx::DPEq>(channels, mEngineArchitecture.postEqStage.inUse,
- StageType::POSTEQ);
+ return setDpChannels_l<dp_fx::DPEq>(channels, StageType::POSTEQ);
}
RetCode DynamicsProcessingContext::setMbc(
const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
- std::lock_guard lg(mMutex);
- return setDpChannels_l<dp_fx::DPMbc>(channels, mEngineArchitecture.mbcStage.inUse,
- StageType::MBC);
+ return setDpChannels_l<dp_fx::DPMbc>(channels, StageType::MBC);
}
RetCode DynamicsProcessingContext::setPreEqBand(
const std::vector<DynamicsProcessing::EqBandConfig>& bands) {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(!mEngineArchitecture.preEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
- "preEqNotInUse");
RETURN_VALUE_IF(
!validateBandConfig(bands, mChannelCount, mEngineArchitecture.preEqStage.bandCount),
RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
@@ -162,9 +141,6 @@
RetCode DynamicsProcessingContext::setPostEqBand(
const std::vector<DynamicsProcessing::EqBandConfig>& bands) {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(!mEngineArchitecture.postEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
- "postEqNotInUse");
RETURN_VALUE_IF(
!validateBandConfig(bands, mChannelCount, mEngineArchitecture.postEqStage.bandCount),
RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
@@ -173,9 +149,6 @@
RetCode DynamicsProcessingContext::setMbcBand(
const std::vector<DynamicsProcessing::MbcBandConfig>& bands) {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(!mEngineArchitecture.mbcStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
- "mbcNotInUse");
RETURN_VALUE_IF(
!validateBandConfig(bands, mChannelCount, mEngineArchitecture.mbcStage.bandCount),
RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
@@ -184,9 +157,6 @@
RetCode DynamicsProcessingContext::setLimiter(
const std::vector<DynamicsProcessing::LimiterConfig>& limiters) {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(!mEngineArchitecture.limiterInUse, RetCode::ERROR_ILLEGAL_PARAMETER,
- "limiterNotInUse");
RETURN_VALUE_IF(!validateLimiterConfig(limiters, mChannelCount),
RetCode::ERROR_ILLEGAL_PARAMETER, "limiterConfigNotValid");
return setBands_l<DynamicsProcessing::LimiterConfig>(limiters, StageType::LIMITER);
@@ -194,15 +164,12 @@
RetCode DynamicsProcessingContext::setInputGain(
const std::vector<DynamicsProcessing::InputGain>& inputGains) {
- std::lock_guard lg(mMutex);
RETURN_VALUE_IF(!validateInputGainConfig(inputGains, mChannelCount),
RetCode::ERROR_ILLEGAL_PARAMETER, "inputGainNotValid");
return setBands_l<DynamicsProcessing::InputGain>(inputGains, StageType::INPUTGAIN);
}
DynamicsProcessing::EngineArchitecture DynamicsProcessingContext::getEngineArchitecture() {
- std::lock_guard lg(mMutex);
- LOG(INFO) << __func__ << mEngineArchitecture.toString();
return mEngineArchitecture;
}
@@ -228,8 +195,6 @@
std::vector<DynamicsProcessing::MbcBandConfig> DynamicsProcessingContext::getMbcBand() {
std::vector<DynamicsProcessing::MbcBandConfig> bands;
-
- std::lock_guard lg(mMutex);
auto maxBand = mEngineArchitecture.mbcStage.bandCount;
for (int32_t ch = 0; ch < mChannelCount; ch++) {
auto mbc = getMbc_l(ch);
@@ -261,8 +226,6 @@
std::vector<DynamicsProcessing::LimiterConfig> DynamicsProcessingContext::getLimiter() {
std::vector<DynamicsProcessing::LimiterConfig> ret;
-
- std::lock_guard lg(mMutex);
for (int32_t ch = 0; ch < mChannelCount; ch++) {
auto limiter = getLimiter_l(ch);
if (!limiter) {
@@ -282,8 +245,6 @@
std::vector<DynamicsProcessing::InputGain> DynamicsProcessingContext::getInputGain() {
std::vector<DynamicsProcessing::InputGain> ret;
-
- std::lock_guard lg(mMutex);
for (int32_t ch = 0; ch < mChannelCount; ch++) {
auto channel = getChannel_l(ch);
if (!channel) {
@@ -295,26 +256,20 @@
}
IEffect::Status DynamicsProcessingContext::dpeProcess(float* in, float* out, int samples) {
- LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
IEffect::Status status = {EX_NULL_POINTER, 0, 0};
RETURN_VALUE_IF(!in, status, "nullInput");
RETURN_VALUE_IF(!out, status, "nullOutput");
status = {EX_ILLEGAL_STATE, 0, 0};
- LOG(DEBUG) << __func__ << " start processing";
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(mState != DynamicsProcessingState::DYNAMICS_PROCESSING_STATE_ACTIVE, status,
- "notInActiveState");
- RETURN_VALUE_IF(!mDpFreq, status, "engineNotInited");
- mDpFreq->processSamples(in, out, samples);
- }
+ RETURN_VALUE_IF(mState != DynamicsProcessingState::DYNAMICS_PROCESSING_STATE_ACTIVE, status,
+ "notInActiveState");
+ RETURN_VALUE_IF(!mDpFreq, status, "engineNotInited");
+ mDpFreq->processSamples(in, out, samples);
return {STATUS_OK, samples, samples};
}
void DynamicsProcessingContext::init() {
- std::lock_guard lg(mMutex);
if (mState == DYNAMICS_PROCESSING_STATE_UNINITIALIZED) {
mState = DYNAMICS_PROCESSING_STATE_INITIALIZED;
}
@@ -399,7 +354,6 @@
StageType type) {
std::vector<DynamicsProcessing::ChannelConfig> ret;
- std::lock_guard lg(mMutex);
for (int32_t ch = 0; ch < mChannelCount; ch++) {
auto stage = getStageWithType_l(type, ch);
if (!stage) {
@@ -414,7 +368,6 @@
StageType type) {
std::vector<DynamicsProcessing::EqBandConfig> eqBands;
- std::lock_guard lg(mMutex);
auto maxBand = mEngineArchitecture.preEqStage.bandCount;
for (int32_t ch = 0; ch < mChannelCount; ch++) {
auto eq = getEqWithType_l(type, ch);
@@ -455,9 +408,7 @@
}
freqs[band.band] = band.cutoffFrequencyHz;
}
- return std::is_sorted(freqs.begin(), freqs.end(), [](const auto& a, const auto& b) {
- return a.second <= b.second; //index is already sorted as map key
- });
+ return true;
}
bool DynamicsProcessingContext::validateLimiterConfig(
@@ -478,17 +429,10 @@
template <typename D>
RetCode DynamicsProcessingContext::setDpChannels_l(
- const std::vector<DynamicsProcessing::ChannelConfig>& channels, bool stageInUse,
- StageType type) {
+ const std::vector<DynamicsProcessing::ChannelConfig>& channels, StageType type) {
RetCode ret = RetCode::SUCCESS;
std::unordered_set<int> channelSet;
- if (!stageInUse) {
- LOG(WARNING) << __func__ << " not in use " << ::android::internal::ToString(channels);
- return RetCode::ERROR_ILLEGAL_PARAMETER;
- }
-
- RETURN_VALUE_IF(!stageInUse, RetCode::ERROR_ILLEGAL_PARAMETER, "stageNotInUse");
for (auto& it : channels) {
if (0 != channelSet.count(it.channel)) {
LOG(WARNING) << __func__ << " duplicated channel " << it.channel;
@@ -509,7 +453,6 @@
continue;
}
if (dp->isEnabled() != it.enable) {
- LOG(INFO) << __func__ << it.toString();
dp->setEnabled(it.enable);
}
}
@@ -590,7 +533,6 @@
ret = RetCode::ERROR_ILLEGAL_PARAMETER;
continue;
}
- LOG(INFO) << __func__ << it.toString();
}
return ret;
}
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
index 839c6dd..ce657db 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
@@ -16,7 +16,6 @@
#pragma once
-#include <android-base/thread_annotations.h>
#include <audio_effects/effect_dynamicsprocessing.h>
#include "effect-impl/EffectContext.h"
@@ -37,8 +36,7 @@
class DynamicsProcessingContext final : public EffectContext {
public:
DynamicsProcessingContext(int statusDepth, const Parameter::Common& common);
- ~DynamicsProcessingContext();
-
+ ~DynamicsProcessingContext() = default;
RetCode enable();
RetCode disable();
void reset();
@@ -73,12 +71,11 @@
private:
static constexpr float kPreferredProcessingDurationMs = 10.0f;
static constexpr int kBandCount = 5;
- std::mutex mMutex;
- int mChannelCount GUARDED_BY(mMutex) = 0;
- DynamicsProcessingState mState GUARDED_BY(mMutex) = DYNAMICS_PROCESSING_STATE_UNINITIALIZED;
- std::unique_ptr<dp_fx::DPFrequency> mDpFreq GUARDED_BY(mMutex) = nullptr;
- bool mEngineInited GUARDED_BY(mMutex) = false;
- DynamicsProcessing::EngineArchitecture mEngineArchitecture GUARDED_BY(mMutex) = {
+ int mChannelCount = 0;
+ DynamicsProcessingState mState = DYNAMICS_PROCESSING_STATE_UNINITIALIZED;
+ std::unique_ptr<dp_fx::DPFrequency> mDpFreq = nullptr;
+ bool mEngineInited = false;
+ DynamicsProcessing::EngineArchitecture mEngineArchitecture = {
.resolutionPreference =
DynamicsProcessing::ResolutionPreference::FAVOR_FREQUENCY_RESOLUTION,
.preferredProcessingDurationMs = kPreferredProcessingDurationMs,
@@ -92,22 +89,21 @@
void init();
- void dpSetFreqDomainVariant_l(const DynamicsProcessing::EngineArchitecture& engine)
- REQUIRES(mMutex);
- dp_fx::DPChannel* getChannel_l(int ch) REQUIRES(mMutex);
- dp_fx::DPEq* getPreEq_l(int ch) REQUIRES(mMutex);
- dp_fx::DPEq* getPostEq_l(int ch) REQUIRES(mMutex);
- dp_fx::DPMbc* getMbc_l(int ch) REQUIRES(mMutex);
- dp_fx::DPLimiter* getLimiter_l(int ch) REQUIRES(mMutex);
- dp_fx::DPBandStage* getStageWithType_l(StageType type, int ch) REQUIRES(mMutex);
- dp_fx::DPEq* getEqWithType_l(StageType type, int ch) REQUIRES(mMutex);
+ void dpSetFreqDomainVariant_l(const DynamicsProcessing::EngineArchitecture& engine);
+ dp_fx::DPChannel* getChannel_l(int ch);
+ dp_fx::DPEq* getPreEq_l(int ch);
+ dp_fx::DPEq* getPostEq_l(int ch);
+ dp_fx::DPMbc* getMbc_l(int ch);
+ dp_fx::DPLimiter* getLimiter_l(int ch);
+ dp_fx::DPBandStage* getStageWithType_l(StageType type, int ch);
+ dp_fx::DPEq* getEqWithType_l(StageType type, int ch);
template <typename D>
RetCode setDpChannels_l(const std::vector<DynamicsProcessing::ChannelConfig>& channels,
- bool stageInUse, StageType type) REQUIRES(mMutex);
+ StageType type);
template <typename T /* BandConfig */>
- RetCode setBands_l(const std::vector<T>& bands, StageType type) REQUIRES(mMutex);
+ RetCode setBands_l(const std::vector<T>& bands, StageType type);
RetCode setDpChannelBand_l(const std::any& anyConfig, StageType type,
- std::set<std::pair<int, int>>& chBandSet) REQUIRES(mMutex);
+ std::set<std::pair<int, int>>& chBandSet);
std::vector<DynamicsProcessing::EqBandConfig> getEqBandConfigs(StageType type);
std::vector<DynamicsProcessing::ChannelConfig> getChannelConfig(StageType type);
diff --git a/media/libeffects/factory/Android.bp b/media/libeffects/factory/Android.bp
index d94093e..01eb8ee 100644
--- a/media/libeffects/factory/Android.bp
+++ b/media/libeffects/factory/Android.bp
@@ -20,21 +20,21 @@
name: "libeffects",
vendor: true,
srcs: [
- "EffectsFactory.c",
- "EffectsConfigLoader.c",
- "EffectsFactoryState.c",
- "EffectsXmlConfigLoader.cpp",
+ "EffectsConfigLoader.c",
+ "EffectsFactory.c",
+ "EffectsFactoryState.c",
+ "EffectsXmlConfigLoader.cpp",
],
shared_libs: [
"libcutils",
- "liblog",
"libdl",
"libeffectsconfig",
+ "liblog",
],
cflags: ["-fvisibility=hidden"],
- local_include_dirs:["include/media"],
+ local_include_dirs: ["include/media"],
header_libs: [
"libaudioeffects",
@@ -53,13 +53,16 @@
cflags: [
"-Wall",
- "-Wextra",
"-Werror",
+ "-Wextra",
],
shared_libs: [
- "libeffectsconfig",
"libeffects",
+ "libeffectsconfig",
],
- local_include_dirs:[".", "include"],
+ local_include_dirs: [
+ ".",
+ "include",
+ ],
}
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
index 7d96b53..9975f75 100644
--- a/media/libeffects/hapticgenerator/Android.bp
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -23,7 +23,7 @@
}
cc_defaults {
- name : "hapticgeneratordefaults",
+ name: "hapticgeneratordefaults",
srcs: [
"Processors.cpp",
],
@@ -54,13 +54,15 @@
],
cflags: [
- "-O2", // Turning on the optimization in order to reduce effect processing time.
- // The latency is around 1/5 less than without the optimization.
+ // Turning on the optimization in order to reduce effect processing time.
+ // The latency is around 1/5 less than without the optimization.
+ "-O2",
"-Wall",
"-Werror",
- "-ffast-math", // This is needed for the non-zero coefficients optimization for
- // BiquadFilter. Try the biquad_filter_benchmark test in audio_utils
- // with/without `-ffast-math` for more context.
+ // This is needed for the non-zero coefficients optimization for
+ // BiquadFilter. Try the biquad_filter_benchmark test in audio_utils
+ // with/without `-ffast-math` for more context.
+ "-ffast-math",
"-fvisibility=hidden",
],
}
@@ -69,9 +71,9 @@
name: "libhapticgeneratoraidl",
srcs: [
+ ":effectCommonFile",
"aidl/EffectHapticGenerator.cpp",
"aidl/HapticGeneratorContext.cpp",
- ":effectCommonFile",
],
defaults: [
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
index 2d3bdd0..b803ee4 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
@@ -37,7 +37,6 @@
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<HapticGeneratorImpl>();
- LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -67,7 +66,6 @@
ndk::ScopedAStatus HapticGeneratorImpl::getDescriptor(Descriptor* _aidl_return) {
RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
- LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
index 53dcd49..a775f06 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
@@ -27,11 +27,8 @@
public:
static const std::string kEffectName;
static const Descriptor kDescriptor;
- HapticGeneratorImpl() { LOG(DEBUG) << __func__; }
- ~HapticGeneratorImpl() {
- cleanUp();
- LOG(DEBUG) << __func__;
- }
+ HapticGeneratorImpl() = default;
+ ~HapticGeneratorImpl() { cleanUp(); }
ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
index e671543..e4b0484 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
@@ -28,7 +28,6 @@
HapticGeneratorContext::HapticGeneratorContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
- LOG(DEBUG) << __func__;
mState = HAPTIC_GENERATOR_STATE_UNINITIALIZED;
mSampleRate = common.input.base.sampleRate;
mFrameCount = common.input.frameCount;
@@ -36,7 +35,6 @@
}
HapticGeneratorContext::~HapticGeneratorContext() {
- LOG(DEBUG) << __func__;
mState = HAPTIC_GENERATOR_STATE_UNINITIALIZED;
}
@@ -70,7 +68,6 @@
RetCode HapticGeneratorContext::setHgHapticScales(
const std::vector<HapticGenerator::HapticScale>& hapticScales) {
- std::lock_guard lg(mMutex);
for (auto hapticScale : hapticScales) {
mParams.mHapticScales.insert_or_assign(hapticScale.id, hapticScale.scale);
}
@@ -82,13 +79,11 @@
}
HapticGenerator::VibratorInformation HapticGeneratorContext::getHgVibratorInformation() {
- std::lock_guard lg(mMutex);
return mParams.mVibratorInfo;
}
std::vector<HapticGenerator::HapticScale> HapticGeneratorContext::getHgHapticScales() {
std::vector<HapticGenerator::HapticScale> result;
- std::lock_guard lg(mMutex);
for (const auto& [id, vibratorScale] : mParams.mHapticScales) {
result.push_back({id, vibratorScale});
}
@@ -97,30 +92,23 @@
RetCode HapticGeneratorContext::setHgVibratorInformation(
const HapticGenerator::VibratorInformation& vibratorInfo) {
- {
- std::lock_guard lg(mMutex);
- mParams.mVibratorInfo = vibratorInfo;
+ mParams.mVibratorInfo = vibratorInfo;
- if (mProcessorsRecord.bpf != nullptr) {
- mProcessorsRecord.bpf->setCoefficients(
- ::android::audio_effect::haptic_generator::bpfCoefs(
- mParams.mVibratorInfo.resonantFrequencyHz, DEFAULT_BPF_Q, mSampleRate));
- }
- if (mProcessorsRecord.bsf != nullptr) {
- mProcessorsRecord.bsf->setCoefficients(
- ::android::audio_effect::haptic_generator::bsfCoefs(
- mParams.mVibratorInfo.resonantFrequencyHz,
- mParams.mVibratorInfo.qFactor, mParams.mVibratorInfo.qFactor / 2.0f,
- mSampleRate));
- }
+ if (mProcessorsRecord.bpf != nullptr) {
+ mProcessorsRecord.bpf->setCoefficients(::android::audio_effect::haptic_generator::bpfCoefs(
+ mParams.mVibratorInfo.resonantFrequencyHz, DEFAULT_BPF_Q, mSampleRate));
}
+ if (mProcessorsRecord.bsf != nullptr) {
+ mProcessorsRecord.bsf->setCoefficients(::android::audio_effect::haptic_generator::bsfCoefs(
+ mParams.mVibratorInfo.resonantFrequencyHz, mParams.mVibratorInfo.qFactor,
+ mParams.mVibratorInfo.qFactor / 2.0f, mSampleRate));
+ }
+
configure();
return RetCode::SUCCESS;
}
IEffect::Status HapticGeneratorContext::process(float* in, float* out, int samples) {
- LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
-
IEffect::Status status = {EX_NULL_POINTER, 0, 0};
RETURN_VALUE_IF(!in, status, "nullInput");
RETURN_VALUE_IF(!out, status, "nullOutput");
@@ -129,17 +117,11 @@
auto frameSize = getInputFrameSize();
RETURN_VALUE_IF(0 == frameSize, status, "zeroFrameSize");
- LOG(DEBUG) << __func__ << " start processing";
// The audio data must not be modified but just written to
// output buffer according the access mode.
- bool accumulate = false;
if (in != out) {
for (int i = 0; i < samples; i++) {
- if (accumulate) {
- out[i] += in[i];
- } else {
- out[i] = in[i];
- }
+ out[i] = in[i];
}
}
@@ -147,7 +129,6 @@
return status;
}
- std::lock_guard lg(mMutex);
if (mParams.mMaxVibratorScale == HapticGenerator::VibratorScale::MUTE) {
// Haptic channels are muted, not need to generate haptic data.
return {STATUS_OK, samples, samples};
@@ -189,7 +170,6 @@
void HapticGeneratorContext::init_params(media::audio::common::AudioChannelLayout inputChMask,
media::audio::common::AudioChannelLayout outputChMask) {
- std::lock_guard lg(mMutex);
mParams.mMaxVibratorScale = HapticGenerator::VibratorScale::MUTE;
mParams.mVibratorInfo.resonantFrequencyHz = DEFAULT_RESONANT_FREQUENCY;
mParams.mVibratorInfo.qFactor = DEFAULT_BSF_ZERO_Q;
@@ -210,7 +190,6 @@
float HapticGeneratorContext::getDistortionOutputGain() {
float distortionOutputGain = getFloatProperty(
"vendor.audio.hapticgenerator.distortion.output.gain", DEFAULT_DISTORTION_OUTPUT_GAIN);
- LOG(DEBUG) << "Using distortion output gain as " << distortionOutputGain;
return distortionOutputGain;
}
@@ -237,7 +216,6 @@
* Build haptic generator processing chain.
*/
void HapticGeneratorContext::buildProcessingChain() {
- std::lock_guard lg(mMutex);
const size_t channelCount = mParams.mHapticChannelCount;
float highPassCornerFrequency = 50.0f;
auto hpf = ::android::audio_effect::haptic_generator::createHPF2(highPassCornerFrequency,
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
index 8618b7b..3a2ad1c 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
@@ -16,7 +16,6 @@
#pragma once
-#include <android-base/thread_annotations.h>
#include <vibrator/ExternalVibrationUtils.h>
#include <map>
@@ -88,9 +87,8 @@
static constexpr float DEFAULT_DISTORTION_INPUT_GAIN = 0.3f;
static constexpr float DEFAULT_DISTORTION_CUBE_THRESHOLD = 0.1f;
- std::mutex mMutex;
HapticGeneratorState mState;
- HapticGeneratorParam mParams GUARDED_BY(mMutex);
+ HapticGeneratorParam mParams;
int mSampleRate;
int64_t mFrameCount = 0;
diff --git a/media/libeffects/loudness/Android.bp b/media/libeffects/loudness/Android.bp
index 46e4669..4f04ffb 100644
--- a/media/libeffects/loudness/Android.bp
+++ b/media/libeffects/loudness/Android.bp
@@ -48,10 +48,10 @@
cc_library_shared {
name: "libloudnessenhanceraidl",
srcs: [
+ ":effectCommonFile",
"aidl/EffectLoudnessEnhancer.cpp",
"aidl/LoudnessEnhancerContext.cpp",
"dsp/core/dynamic_range_compression.cpp",
- ":effectCommonFile",
],
defaults: [
"aidlaudioeffectservice_defaults",
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
index bcf0db6..f89606e 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
@@ -37,7 +37,6 @@
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<LoudnessEnhancerImpl>();
- LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -67,7 +66,6 @@
ndk::ScopedAStatus LoudnessEnhancerImpl::getDescriptor(Descriptor* _aidl_return) {
RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
- LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
index e2e716c..98bdc6b 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
@@ -27,11 +27,8 @@
public:
static const std::string kEffectName;
static const Descriptor kDescriptor;
- LoudnessEnhancerImpl() { LOG(DEBUG) << __func__; }
- ~LoudnessEnhancerImpl() {
- cleanUp();
- LOG(DEBUG) << __func__;
- }
+ LoudnessEnhancerImpl() = default;
+ ~LoudnessEnhancerImpl() { cleanUp(); }
ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
index be914bf..d8bcfc0 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
@@ -24,16 +24,10 @@
LoudnessEnhancerContext::LoudnessEnhancerContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
- LOG(DEBUG) << __func__;
init_params();
}
-LoudnessEnhancerContext::~LoudnessEnhancerContext() {
- LOG(DEBUG) << __func__;
-}
-
RetCode LoudnessEnhancerContext::enable() {
- std::lock_guard lg(mMutex);
if (mState != LOUDNESS_ENHANCER_STATE_INITIALIZED) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -42,7 +36,6 @@
}
RetCode LoudnessEnhancerContext::disable() {
- std::lock_guard lg(mMutex);
if (mState != LOUDNESS_ENHANCER_STATE_ACTIVE) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -52,7 +45,6 @@
void LoudnessEnhancerContext::reset() {
float targetAmp = pow(10, mGain / 2000.0f); // mB to linear amplification
- std::lock_guard lg(mMutex);
if (mCompressor != nullptr) {
// Get samplingRate from input
mCompressor->Initialize(targetAmp, mCommon.input.base.sampleRate);
@@ -66,8 +58,6 @@
}
IEffect::Status LoudnessEnhancerContext::process(float* in, float* out, int samples) {
- LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
-
IEffect::Status status = {EX_NULL_POINTER, 0, 0};
RETURN_VALUE_IF(!in, status, "nullInput");
RETURN_VALUE_IF(!out, status, "nullOutput");
@@ -76,11 +66,9 @@
auto frameSize = getInputFrameSize();
RETURN_VALUE_IF(0 == frameSize, status, "zeroFrameSize");
- std::lock_guard lg(mMutex);
status = {STATUS_INVALID_OPERATION, 0, 0};
RETURN_VALUE_IF(mState != LOUDNESS_ENHANCER_STATE_ACTIVE, status, "stateNotActive");
- LOG(DEBUG) << __func__ << " start processing";
// PcmType is always expected to be Float 32 bit.
constexpr float scale = 1 << 15; // power of 2 is lossless conversion to int16_t range
constexpr float inverseScale = 1.f / scale;
@@ -124,9 +112,8 @@
mGain = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB;
float targetAmp = pow(10, mGain / 2000.0f); // mB to linear amplification
- LOG(DEBUG) << __func__ << "Target gain = " << mGain << "mB <=> factor = " << targetAmp;
+ LOG(VERBOSE) << __func__ << "Target gain = " << mGain << "mB <=> factor = " << targetAmp;
- std::lock_guard lg(mMutex);
mCompressor = std::make_unique<le_fx::AdaptiveDynamicRangeCompression>();
mCompressor->Initialize(targetAmp, mCommon.input.base.sampleRate);
mState = LOUDNESS_ENHANCER_STATE_INITIALIZED;
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
index fd688d7..192b212 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
@@ -16,7 +16,6 @@
#pragma once
-#include <android-base/thread_annotations.h>
#include <audio_effects/effect_loudnessenhancer.h>
#include "dsp/core/dynamic_range_compression.h"
@@ -33,7 +32,7 @@
class LoudnessEnhancerContext final : public EffectContext {
public:
LoudnessEnhancerContext(int statusDepth, const Parameter::Common& common);
- ~LoudnessEnhancerContext();
+ ~LoudnessEnhancerContext() = default;
RetCode enable();
RetCode disable();
@@ -45,12 +44,11 @@
IEffect::Status process(float* in, float* out, int samples);
private:
- std::mutex mMutex;
- LoudnessEnhancerState mState GUARDED_BY(mMutex) = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
+ LoudnessEnhancerState mState = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
int mGain = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB;
// In this implementation, there is no coupling between the compression on the left and right
// channels
- std::unique_ptr<le_fx::AdaptiveDynamicRangeCompression> mCompressor GUARDED_BY(mMutex);
+ std::unique_ptr<le_fx::AdaptiveDynamicRangeCompression> mCompressor;
void init_params();
};
diff --git a/media/libeffects/lvm/lib/Android.bp b/media/libeffects/lvm/lib/Android.bp
index 7998879..be20684 100644
--- a/media/libeffects/lvm/lib/Android.bp
+++ b/media/libeffects/lvm/lib/Android.bp
@@ -30,6 +30,60 @@
vendor: true,
host_supported: true,
srcs: [
+ "Bass/src/LVDBE_Control.cpp",
+ "Bass/src/LVDBE_Init.cpp",
+ "Bass/src/LVDBE_Process.cpp",
+ "Bass/src/LVDBE_Tables.cpp",
+ "Bundle/src/LVM_API_Specials.cpp",
+ "Bundle/src/LVM_Buffers.cpp",
+ "Bundle/src/LVM_Control.cpp",
+ "Bundle/src/LVM_Init.cpp",
+ "Bundle/src/LVM_Process.cpp",
+ "Bundle/src/LVM_Tables.cpp",
+ "Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp",
+ "Common/src/Add2_Sat_32x32.cpp",
+ "Common/src/Copy_16.cpp",
+ "Common/src/DC_2I_D16_TRC_WRA_01.cpp",
+ "Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp",
+ "Common/src/DelayMix_16x16.cpp",
+ "Common/src/From2iToMS_16x16.cpp",
+ "Common/src/From2iToMono_32.cpp",
+ "Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp",
+ "Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp",
+ "Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp",
+ "Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp",
+ "Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp",
+ "Common/src/LVC_MixInSoft_D16C31_SAT.cpp",
+ "Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp",
+ "Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp",
+ "Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp",
+ "Common/src/LVC_Mixer_GetCurrent.cpp",
+ "Common/src/LVC_Mixer_GetTarget.cpp",
+ "Common/src/LVC_Mixer_Init.cpp",
+ "Common/src/LVC_Mixer_SetTarget.cpp",
+ "Common/src/LVC_Mixer_SetTimeConstant.cpp",
+ "Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp",
+ "Common/src/LVM_Timer.cpp",
+ "Common/src/LVM_Timer_Init.cpp",
+ "Common/src/MSTo2i_Sat_16x16.cpp",
+ "Common/src/Mac3s_Sat_32x16.cpp",
+ "Common/src/MonoTo2I_32.cpp",
+ "Common/src/Mult3s_32x16.cpp",
+ "Common/src/NonLinComp_D16.cpp",
+ "Common/src/Shift_Sat_v16xv16.cpp",
+ "Common/src/Shift_Sat_v32xv32.cpp",
+ "Common/src/dB_to_Lin32.cpp",
+ "Eq/src/LVEQNB_CalcCoef.cpp",
+ "Eq/src/LVEQNB_Control.cpp",
+ "Eq/src/LVEQNB_Init.cpp",
+ "Eq/src/LVEQNB_Process.cpp",
+ "Eq/src/LVEQNB_Tables.cpp",
+ "SpectrumAnalyzer/src/LVPSA_Control.cpp",
+ "SpectrumAnalyzer/src/LVPSA_Init.cpp",
+ "SpectrumAnalyzer/src/LVPSA_Process.cpp",
+ "SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp",
+ "SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp",
+ "SpectrumAnalyzer/src/LVPSA_Tables.cpp",
"StereoWidening/src/LVCS_BypassMix.cpp",
"StereoWidening/src/LVCS_Control.cpp",
"StereoWidening/src/LVCS_Equaliser.cpp",
@@ -38,77 +92,23 @@
"StereoWidening/src/LVCS_ReverbGenerator.cpp",
"StereoWidening/src/LVCS_StereoEnhancer.cpp",
"StereoWidening/src/LVCS_Tables.cpp",
- "Bass/src/LVDBE_Control.cpp",
- "Bass/src/LVDBE_Init.cpp",
- "Bass/src/LVDBE_Process.cpp",
- "Bass/src/LVDBE_Tables.cpp",
- "Bundle/src/LVM_API_Specials.cpp",
- "Bundle/src/LVM_Buffers.cpp",
- "Bundle/src/LVM_Init.cpp",
- "Bundle/src/LVM_Process.cpp",
- "Bundle/src/LVM_Tables.cpp",
- "Bundle/src/LVM_Control.cpp",
- "SpectrumAnalyzer/src/LVPSA_Control.cpp",
- "SpectrumAnalyzer/src/LVPSA_Init.cpp",
- "SpectrumAnalyzer/src/LVPSA_Process.cpp",
- "SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp",
- "SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp",
- "SpectrumAnalyzer/src/LVPSA_Tables.cpp",
- "Eq/src/LVEQNB_CalcCoef.cpp",
- "Eq/src/LVEQNB_Control.cpp",
- "Eq/src/LVEQNB_Init.cpp",
- "Eq/src/LVEQNB_Process.cpp",
- "Eq/src/LVEQNB_Tables.cpp",
- "Common/src/DC_2I_D16_TRC_WRA_01.cpp",
- "Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp",
- "Common/src/Copy_16.cpp",
- "Common/src/MonoTo2I_32.cpp",
- "Common/src/dB_to_Lin32.cpp",
- "Common/src/Shift_Sat_v16xv16.cpp",
- "Common/src/Shift_Sat_v32xv32.cpp",
- "Common/src/From2iToMono_32.cpp",
- "Common/src/Mult3s_32x16.cpp",
- "Common/src/NonLinComp_D16.cpp",
- "Common/src/DelayMix_16x16.cpp",
- "Common/src/MSTo2i_Sat_16x16.cpp",
- "Common/src/From2iToMS_16x16.cpp",
- "Common/src/Mac3s_Sat_32x16.cpp",
- "Common/src/Add2_Sat_32x32.cpp",
- "Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp",
- "Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp",
- "Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp",
- "Common/src/LVC_Mixer_SetTimeConstant.cpp",
- "Common/src/LVC_Mixer_SetTarget.cpp",
- "Common/src/LVC_Mixer_GetTarget.cpp",
- "Common/src/LVC_Mixer_Init.cpp",
- "Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp",
- "Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp",
- "Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp",
- "Common/src/LVC_Mixer_GetCurrent.cpp",
- "Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp",
- "Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp",
- "Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp",
- "Common/src/LVC_MixInSoft_D16C31_SAT.cpp",
- "Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp",
- "Common/src/LVM_Timer.cpp",
- "Common/src/LVM_Timer_Init.cpp",
],
local_include_dirs: [
- "Eq/lib",
- "Eq/src",
"Bass/lib",
"Bass/src",
- "Common/src",
"Bundle/src",
+ "Common/src",
+ "Eq/lib",
+ "Eq/src",
"SpectrumAnalyzer/lib",
"SpectrumAnalyzer/src",
- "StereoWidening/src",
"StereoWidening/lib",
+ "StereoWidening/src",
],
export_include_dirs: [
- "Common/lib",
"Bundle/lib",
+ "Common/lib",
],
shared_libs: [
"liblog",
@@ -120,9 +120,9 @@
"libhardware_headers",
],
cppflags: [
- "-fvisibility=hidden",
"-Wall",
"-Werror",
+ "-fvisibility=hidden",
],
}
@@ -140,6 +140,26 @@
vendor: true,
host_supported: true,
srcs: [
+ "Common/src/Add2_Sat_32x32.cpp",
+ "Common/src/Copy_16.cpp",
+ "Common/src/Core_MixHard_2St_D32C31_SAT.cpp",
+ "Common/src/Core_MixInSoft_D32C31_SAT.cpp",
+ "Common/src/Core_MixSoft_1St_D32C31_WRA.cpp",
+ "Common/src/From2iToMono_32.cpp",
+ "Common/src/JoinTo2i_32x32.cpp",
+ "Common/src/LVM_FO_HPF.cpp",
+ "Common/src/LVM_FO_LPF.cpp",
+ "Common/src/LVM_GetOmega.cpp",
+ "Common/src/LVM_Mixer_TimeConstant.cpp",
+ "Common/src/LVM_Polynomial.cpp",
+ "Common/src/LVM_Power10.cpp",
+ "Common/src/Mac3s_Sat_32x16.cpp",
+ "Common/src/MixInSoft_D32C31_SAT.cpp",
+ "Common/src/MixSoft_1St_D32C31_WRA.cpp",
+ "Common/src/MixSoft_2St_D32C31_SAT.cpp",
+ "Common/src/MonoTo2I_32.cpp",
+ "Common/src/Mult3s_32x16.cpp",
+ "Common/src/Shift_Sat_v32xv32.cpp",
"Reverb/src/LVREV_ApplyNewSettings.cpp",
"Reverb/src/LVREV_ClearAudioBuffers.cpp",
"Reverb/src/LVREV_GetControlParameters.cpp",
@@ -147,42 +167,22 @@
"Reverb/src/LVREV_Process.cpp",
"Reverb/src/LVREV_SetControlParameters.cpp",
"Reverb/src/LVREV_Tables.cpp",
- "Common/src/From2iToMono_32.cpp",
- "Common/src/Mult3s_32x16.cpp",
- "Common/src/Copy_16.cpp",
- "Common/src/Mac3s_Sat_32x16.cpp",
- "Common/src/Shift_Sat_v32xv32.cpp",
- "Common/src/Add2_Sat_32x32.cpp",
- "Common/src/JoinTo2i_32x32.cpp",
- "Common/src/MonoTo2I_32.cpp",
- "Common/src/LVM_FO_HPF.cpp",
- "Common/src/LVM_FO_LPF.cpp",
- "Common/src/LVM_Polynomial.cpp",
- "Common/src/LVM_Power10.cpp",
- "Common/src/LVM_GetOmega.cpp",
- "Common/src/MixSoft_2St_D32C31_SAT.cpp",
- "Common/src/MixSoft_1St_D32C31_WRA.cpp",
- "Common/src/MixInSoft_D32C31_SAT.cpp",
- "Common/src/LVM_Mixer_TimeConstant.cpp",
- "Common/src/Core_MixHard_2St_D32C31_SAT.cpp",
- "Common/src/Core_MixSoft_1St_D32C31_WRA.cpp",
- "Common/src/Core_MixInSoft_D32C31_SAT.cpp",
],
local_include_dirs: [
- "Reverb/src",
"Common/src",
+ "Reverb/src",
],
export_include_dirs: [
- "Reverb/lib",
"Common/lib",
+ "Reverb/lib",
],
static_libs: [
"libaudioutils",
],
cppflags: [
- "-fvisibility=hidden",
"-Wall",
"-Werror",
+ "-fvisibility=hidden",
],
}
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 0568fbd..9bb3264 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -12,7 +12,7 @@
cc_test {
name: "EffectReverbTest",
defaults: [
- "libeffects-test-defaults",
+ "libeffects-test-defaults",
],
srcs: [
"EffectReverbTest.cpp",
@@ -29,7 +29,7 @@
cc_test {
name: "EffectBundleTest",
defaults: [
- "libeffects-test-defaults",
+ "libeffects-test-defaults",
],
srcs: [
"EffectBundleTest.cpp",
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index aa18deb..fff2feb 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -36,20 +36,16 @@
BundleContext::BundleContext(int statusDepth, const Parameter::Common& common,
const lvm::BundleEffectType& type)
: EffectContext(statusDepth, common), mType(type) {
- LOG(DEBUG) << __func__ << type;
-
int inputChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
common.input.base.channelMask);
mSamplesPerSecond = common.input.base.sampleRate * inputChannelCount;
}
BundleContext::~BundleContext() {
- LOG(DEBUG) << __func__;
deInit();
}
RetCode BundleContext::init() {
- std::lock_guard lg(mMutex);
// init with pre-defined preset NORMAL
for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
mBandGainmB[i] = lvm::kSoftPresets[0 /* normal */][i] * 100;
@@ -88,7 +84,6 @@
}
void BundleContext::deInit() {
- std::lock_guard lg(mMutex);
if (mInstance) {
LVM_DelInstanceHandle(&mInstance);
mInstance = nullptr;
@@ -102,27 +97,23 @@
bool tempDisabled = false;
switch (mType) {
case lvm::BundleEffectType::EQUALIZER:
- LOG(DEBUG) << __func__ << " enable bundle EQ";
if (mSamplesToExitCountEq <= 0) mNumberEffectsEnabled++;
mSamplesToExitCountEq = (mSamplesPerSecond * 0.1);
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::EQUALIZER));
break;
case lvm::BundleEffectType::BASS_BOOST:
- LOG(DEBUG) << __func__ << " enable bundle BB";
if (mSamplesToExitCountBb <= 0) mNumberEffectsEnabled++;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::BASS_BOOST));
mSamplesToExitCountBb = (mSamplesPerSecond * 0.1);
tempDisabled = mBassTempDisabled;
break;
case lvm::BundleEffectType::VIRTUALIZER:
- LOG(DEBUG) << __func__ << " enable bundle VR";
if (mSamplesToExitCountVirt <= 0) mNumberEffectsEnabled++;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VIRTUALIZER));
mSamplesToExitCountVirt = (mSamplesPerSecond * 0.1);
tempDisabled = mVirtualizerTempDisabled;
break;
case lvm::BundleEffectType::VOLUME:
- LOG(DEBUG) << __func__ << " enable bundle VOL";
if ((mEffectInDrain & (1 << int(lvm::BundleEffectType::VOLUME))) == 0)
mNumberEffectsEnabled++;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VOLUME));
@@ -134,30 +125,24 @@
RetCode BundleContext::enableOperatingMode() {
LVM_ControlParams_t params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
- switch (mType) {
- case lvm::BundleEffectType::EQUALIZER:
- LOG(DEBUG) << __func__ << " enable bundle EQ";
- params.EQNB_OperatingMode = LVM_EQNB_ON;
- break;
- case lvm::BundleEffectType::BASS_BOOST:
- LOG(DEBUG) << __func__ << " enable bundle BB";
- params.BE_OperatingMode = LVM_BE_ON;
- break;
- case lvm::BundleEffectType::VIRTUALIZER:
- LOG(DEBUG) << __func__ << " enable bundle VR";
- params.VirtualizerOperatingMode = LVM_MODE_ON;
- break;
- case lvm::BundleEffectType::VOLUME:
- LOG(DEBUG) << __func__ << " enable bundle VOL";
- break;
- }
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
+ switch (mType) {
+ case lvm::BundleEffectType::EQUALIZER:
+ params.EQNB_OperatingMode = LVM_EQNB_ON;
+ break;
+ case lvm::BundleEffectType::BASS_BOOST:
+ params.BE_OperatingMode = LVM_BE_ON;
+ break;
+ case lvm::BundleEffectType::VIRTUALIZER:
+ params.VirtualizerOperatingMode = LVM_MODE_ON;
+ break;
+ case lvm::BundleEffectType::VOLUME:
+ break;
}
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
+
return limitLevel();
}
@@ -165,19 +150,15 @@
if (!mEnabled) return RetCode::ERROR_ILLEGAL_PARAMETER;
switch (mType) {
case lvm::BundleEffectType::EQUALIZER:
- LOG(DEBUG) << __func__ << " disable bundle EQ";
mEffectInDrain |= 1 << int(lvm::BundleEffectType::EQUALIZER);
break;
case lvm::BundleEffectType::BASS_BOOST:
- LOG(DEBUG) << __func__ << " disable bundle BB";
mEffectInDrain |= 1 << int(lvm::BundleEffectType::BASS_BOOST);
break;
case lvm::BundleEffectType::VIRTUALIZER:
- LOG(DEBUG) << __func__ << " disable bundle VR";
mEffectInDrain |= 1 << int(lvm::BundleEffectType::VIRTUALIZER);
break;
case lvm::BundleEffectType::VOLUME:
- LOG(DEBUG) << __func__ << " disable bundle VOL";
mEffectInDrain |= 1 << int(lvm::BundleEffectType::VOLUME);
break;
}
@@ -187,30 +168,23 @@
RetCode BundleContext::disableOperatingMode() {
LVM_ControlParams_t params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
- switch (mType) {
- case lvm::BundleEffectType::EQUALIZER:
- LOG(DEBUG) << __func__ << " disable bundle EQ";
- params.EQNB_OperatingMode = LVM_EQNB_OFF;
- break;
- case lvm::BundleEffectType::BASS_BOOST:
- LOG(DEBUG) << __func__ << " disable bundle BB";
- params.BE_OperatingMode = LVM_BE_OFF;
- break;
- case lvm::BundleEffectType::VIRTUALIZER:
- LOG(DEBUG) << __func__ << " disable bundle VR";
- params.VirtualizerOperatingMode = LVM_MODE_OFF;
- break;
- case lvm::BundleEffectType::VOLUME:
- LOG(DEBUG) << __func__ << " disable bundle VOL";
- break;
- }
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
+ switch (mType) {
+ case lvm::BundleEffectType::EQUALIZER:
+ params.EQNB_OperatingMode = LVM_EQNB_OFF;
+ break;
+ case lvm::BundleEffectType::BASS_BOOST:
+ params.BE_OperatingMode = LVM_BE_OFF;
+ break;
+ case lvm::BundleEffectType::VIRTUALIZER:
+ params.VirtualizerOperatingMode = LVM_MODE_OFF;
+ break;
+ case lvm::BundleEffectType::VOLUME:
+ break;
}
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
mEnabled = false;
return limitLevel();
}
@@ -223,89 +197,80 @@
float energyBassBoost = 0;
float crossCorrection = 0;
LVM_ControlParams_t params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- bool eqEnabled = params.EQNB_OperatingMode == LVM_EQNB_ON;
- bool bbEnabled = params.BE_OperatingMode == LVM_BE_ON;
- bool viEnabled = params.VirtualizerOperatingMode == LVM_MODE_ON;
+ bool eqEnabled = params.EQNB_OperatingMode == LVM_EQNB_ON;
+ bool bbEnabled = params.BE_OperatingMode == LVM_BE_ON;
+ bool viEnabled = params.VirtualizerOperatingMode == LVM_MODE_ON;
+
+ if (eqEnabled) {
+ for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+ float bandFactor = mBandGainmB[i] / 1500.0;
+ float bandCoefficient = lvm::kBandEnergyCoefficient[i];
+ float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;
+ if (bandEnergy > 0) energyContribution += bandEnergy;
+ }
+
+ // cross EQ coefficients
+ float bandFactorSum = 0;
+ for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS - 1; i++) {
+ float bandFactor1 = mBandGainmB[i] / 1500.0;
+ float bandFactor2 = mBandGainmB[i + 1] / 1500.0;
+
+ if (bandFactor1 > 0 && bandFactor2 > 0) {
+ float crossEnergy =
+ bandFactor1 * bandFactor2 * lvm::kBandEnergyCrossCoefficient[i];
+ bandFactorSum += bandFactor1 * bandFactor2;
+
+ if (crossEnergy > 0) energyCross += crossEnergy;
+ }
+ }
+ bandFactorSum -= 1.0;
+ if (bandFactorSum > 0) crossCorrection = bandFactorSum * 0.7;
+ }
+ // BassBoost contribution
+ if (bbEnabled) {
+ float boostFactor = mBassStrengthSaved / 1000.0;
+ float boostCoefficient = lvm::kBassBoostEnergyCoefficient;
+
+ energyContribution += boostFactor * boostCoefficient * boostCoefficient;
if (eqEnabled) {
for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
float bandFactor = mBandGainmB[i] / 1500.0;
- float bandCoefficient = lvm::kBandEnergyCoefficient[i];
- float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;
- if (bandEnergy > 0) energyContribution += bandEnergy;
- }
-
- // cross EQ coefficients
- float bandFactorSum = 0;
- for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS - 1; i++) {
- float bandFactor1 = mBandGainmB[i] / 1500.0;
- float bandFactor2 = mBandGainmB[i + 1] / 1500.0;
-
- if (bandFactor1 > 0 && bandFactor2 > 0) {
- float crossEnergy =
- bandFactor1 * bandFactor2 * lvm::kBandEnergyCrossCoefficient[i];
- bandFactorSum += bandFactor1 * bandFactor2;
-
- if (crossEnergy > 0) energyCross += crossEnergy;
- }
- }
- bandFactorSum -= 1.0;
- if (bandFactorSum > 0) crossCorrection = bandFactorSum * 0.7;
- }
- // BassBoost contribution
- if (bbEnabled) {
- float boostFactor = mBassStrengthSaved / 1000.0;
- float boostCoefficient = lvm::kBassBoostEnergyCoefficient;
-
- energyContribution += boostFactor * boostCoefficient * boostCoefficient;
-
- if (eqEnabled) {
- for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
- float bandFactor = mBandGainmB[i] / 1500.0;
- float bandCrossCoefficient = lvm::kBassBoostEnergyCrossCoefficient[i];
- float bandEnergy = boostFactor * bandFactor * bandCrossCoefficient;
- if (bandEnergy > 0) energyBassBoost += bandEnergy;
- }
+ float bandCrossCoefficient = lvm::kBassBoostEnergyCrossCoefficient[i];
+ float bandEnergy = boostFactor * bandFactor * bandCrossCoefficient;
+ if (bandEnergy > 0) energyBassBoost += bandEnergy;
}
}
- // Virtualizer contribution
- if (viEnabled) {
- energyContribution += lvm::kVirtualizerContribution * lvm::kVirtualizerContribution;
- }
+ }
+ // Virtualizer contribution
+ if (viEnabled) {
+ energyContribution += lvm::kVirtualizerContribution * lvm::kVirtualizerContribution;
+ }
- double totalEnergyEstimation =
- sqrt(energyContribution + energyCross + energyBassBoost) - crossCorrection;
- LOG(INFO) << " TOTAL energy estimation: " << totalEnergyEstimation << " dB";
+ double totalEnergyEstimation =
+ sqrt(energyContribution + energyCross + energyBassBoost) - crossCorrection;
- // roundoff
- int maxLevelRound = (int)(totalEnergyEstimation + 0.99);
- if (maxLevelRound + mVolumedB > 0) {
- gainCorrection = maxLevelRound + mVolumedB;
- }
+ // roundoff
+ int maxLevelRound = (int)(totalEnergyEstimation + 0.99);
+ if (maxLevelRound + mVolumedB > 0) {
+ gainCorrection = maxLevelRound + mVolumedB;
+ }
- params.VC_EffectLevel = mVolumedB - gainCorrection;
- if (params.VC_EffectLevel < -96) {
- params.VC_EffectLevel = -96;
- }
- LOG(INFO) << "\tVol: " << mVolumedB << ", GainCorrection: " << gainCorrection
- << ", Actual vol: " << params.VC_EffectLevel;
+ params.VC_EffectLevel = mVolumedB - gainCorrection;
+ if (params.VC_EffectLevel < -96) {
+ params.VC_EffectLevel = -96;
+ }
+ /* Activate the initial settings */
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- /* Activate the initial settings */
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
-
- if (mFirstVolume) {
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetVolumeNoSmoothing(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setVolumeNoSmoothingFailed");
- LOG(INFO) << "\tLVM_VOLUME: Disabling Smoothing for first volume change to remove "
- "spikes/clicks";
- mFirstVolume = false;
- }
+ if (mFirstVolume) {
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetVolumeNoSmoothing(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setVolumeNoSmoothingFailed");
+ mFirstVolume = false;
}
return RetCode::SUCCESS;
@@ -439,17 +404,13 @@
float maxdB = std::max(leftdB, rightdB);
float pandB = rightdB - leftdB;
setVolumeLevel(maxdB);
- LOG(DEBUG) << __func__ << " pandB: " << pandB << " maxdB " << maxdB;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, "");
- params.VC_Balance = pandB;
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "");
+ params.VC_Balance = pandB;
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, "");
- }
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "");
mVolumeStereo = volume;
return RetCode::SUCCESS;
}
@@ -469,7 +430,6 @@
RetCode ret = updateControlParameter(bandLevels);
if (RetCode::SUCCESS == ret) {
mCurPresetIdx = presetIdx;
- LOG(INFO) << __func__ << " success with " << presetIdx;
} else {
LOG(ERROR) << __func__ << " failed to setPreset " << presetIdx;
}
@@ -483,7 +443,6 @@
RetCode ret = updateControlParameter(bandLevels);
if (RetCode::SUCCESS == ret) {
mCurPresetIdx = lvm::PRESET_CUSTOM;
- LOG(INFO) << __func__ << " succeed with " << ::android::internal::ToString(bandLevels);
} else {
LOG(ERROR) << __func__ << " failed with " << ::android::internal::ToString(bandLevels);
}
@@ -502,14 +461,11 @@
std::vector<int32_t> BundleContext::getEqualizerCenterFreqs() {
std::vector<int32_t> freqs;
LVM_ControlParams_t params;
- {
- std::lock_guard lg(mMutex);
- /* Get the current settings */
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms), freqs,
- " getControlParamFailed");
- for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
- freqs.push_back((int32_t)params.pEQNB_BandDefinition[i].Frequency * 1000);
- }
+ /* Get the current settings */
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms), freqs,
+ " getControlParamFailed");
+ for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+ freqs.push_back((int32_t)params.pEQNB_BandDefinition[i].Frequency * 1000);
}
return freqs;
@@ -533,44 +489,36 @@
}
LVM_ControlParams_t params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
- params.pEQNB_BandDefinition[i].Frequency = lvm::kPresetsFrequencies[i];
- params.pEQNB_BandDefinition[i].QFactor = lvm::kPresetsQFactors[i];
- params.pEQNB_BandDefinition[i].Gain =
- tempLevel[i] > 0 ? (tempLevel[i] + 50) / 100 : (tempLevel[i] - 50) / 100;
- }
-
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+ for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+ params.pEQNB_BandDefinition[i].Frequency = lvm::kPresetsFrequencies[i];
+ params.pEQNB_BandDefinition[i].QFactor = lvm::kPresetsQFactors[i];
+ params.pEQNB_BandDefinition[i].Gain =
+ tempLevel[i] > 0 ? (tempLevel[i] + 50) / 100 : (tempLevel[i] - 50) / 100;
}
- mBandGainmB = tempLevel;
- LOG(DEBUG) << __func__ << " update bandGain to " << ::android::internal::ToString(mBandGainmB)
- << "mdB";
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
+ mBandGainmB = tempLevel;
return RetCode::SUCCESS;
}
RetCode BundleContext::setBassBoostStrength(int strength) {
// Update Control Parameter
LVM_ControlParams_t params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- params.BE_EffectLevel = (LVM_INT16)((15 * strength) / 1000);
- params.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
+ params.BE_EffectLevel = (LVM_INT16)((15 * strength) / 1000);
+ params.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
mBassStrengthSaved = strength;
- LOG(INFO) << __func__ << " success with strength " << strength;
return limitLevel();
}
@@ -580,7 +528,6 @@
} else {
mVolumedB = level;
}
- LOG(INFO) << __func__ << " success with level " << level;
return limitLevel();
}
@@ -602,29 +549,55 @@
RetCode BundleContext::setVirtualizerStrength(int strength) {
// Update Control Parameter
LVM_ControlParams_t params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- params.CS_EffectLevel = ((strength * 32767) / 1000);
+ params.CS_EffectLevel = ((strength * 32767) / 1000);
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
mVirtStrengthSaved = strength;
- LOG(INFO) << __func__ << " success with strength " << strength;
return limitLevel();
}
RetCode BundleContext::setForcedDevice(
const ::aidl::android::media::audio::common::AudioDeviceDescription& device) {
- RETURN_VALUE_IF(true != isDeviceSupportedVirtualizer({device}), RetCode::ERROR_EFFECT_LIB_ERROR,
- " deviceNotSupportVirtualizer");
- mForceDevice = device;
- return RetCode::SUCCESS;
+ RetCode ret = RetCode::SUCCESS;
+ bool enableVirtualizer = mType == lvm::BundleEffectType::VIRTUALIZER && mEnabled;
+
+ if (isDeviceSupportedVirtualizer({device})) {
+ mVirtualizerForcedDevice = device;
+ } else {
+ // disabling forced virtualization mode
+ AudioDeviceDescription noneDevice;
+ if (device != noneDevice) {
+ // device is not supported, make it behave as a reset of forced mode but return an error
+ ret = RetCode::ERROR_ILLEGAL_PARAMETER;
+ }
+ // verify whether the virtualization should be enabled or disabled
+ if (!isDeviceSupportedVirtualizer(mOutputDevice)) {
+ enableVirtualizer = false;
+ }
+ mVirtualizerForcedDevice = noneDevice;
+ }
+
+ if (enableVirtualizer) {
+ if (mVirtualizerTempDisabled) {
+ LOG(VERBOSE) << __func__ << " re-enable virtualizer";
+ enableOperatingMode();
+ mVirtualizerTempDisabled = false;
+ }
+ } else {
+ if (!mVirtualizerTempDisabled) {
+ LOG(VERBOSE) << __func__ << " disable virtualizer";
+ disableOperatingMode();
+ mVirtualizerTempDisabled = true;
+ }
+ }
+
+ return ret;
}
RetCode BundleContext::initControlParameter(LVM_ControlParams_t& params) const {
@@ -760,29 +733,24 @@
auto frameSize = getInputFrameSize();
RETURN_VALUE_IF(0 == frameSize, status, "zeroFrameSize");
- LOG(DEBUG) << __func__ << " start processing";
if ((mEffectProcessCalled & 1 << int(mType)) != 0) {
const int undrainedEffects = mEffectInDrain & ~mEffectProcessCalled;
if ((undrainedEffects & 1 << int(lvm::BundleEffectType::EQUALIZER)) != 0) {
- LOG(DEBUG) << "Draining EQUALIZER";
mSamplesToExitCountEq = 0;
--mNumberEffectsEnabled;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::EQUALIZER));
}
if ((undrainedEffects & 1 << int(lvm::BundleEffectType::BASS_BOOST)) != 0) {
- LOG(DEBUG) << "Draining BASS_BOOST";
mSamplesToExitCountBb = 0;
--mNumberEffectsEnabled;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::BASS_BOOST));
}
if ((undrainedEffects & 1 << int(lvm::BundleEffectType::VIRTUALIZER)) != 0) {
- LOG(DEBUG) << "Draining VIRTUALIZER";
mSamplesToExitCountVirt = 0;
--mNumberEffectsEnabled;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VIRTUALIZER));
}
if ((undrainedEffects & 1 << int(lvm::BundleEffectType::VOLUME)) != 0) {
- LOG(DEBUG) << "Draining VOLUME";
--mNumberEffectsEnabled;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VOLUME));
}
@@ -800,7 +768,6 @@
mNumberEffectsEnabled--;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::EQUALIZER));
}
- LOG(DEBUG) << "Effect_process() this is the last frame for EQUALIZER";
}
break;
case lvm::BundleEffectType::BASS_BOOST:
@@ -813,7 +780,6 @@
mNumberEffectsEnabled--;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::BASS_BOOST));
}
- LOG(DEBUG) << "Effect_process() this is the last frame for BASS_BOOST";
}
break;
case lvm::BundleEffectType::VIRTUALIZER:
@@ -826,7 +792,6 @@
mNumberEffectsEnabled--;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VIRTUALIZER));
}
- LOG(DEBUG) << "Effect_process() this is the last frame for VIRTUALIZER";
}
break;
case lvm::BundleEffectType::VOLUME:
@@ -835,14 +800,13 @@
mNumberEffectsEnabled--;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VOLUME));
}
- LOG(DEBUG) << "Effect_process() LVM_VOLUME Effect is not enabled";
break;
}
}
if (isDataAvailable) {
mNumberEffectsCalled++;
}
- bool accumulate = false;
+
if (mNumberEffectsCalled >= mNumberEffectsEnabled) {
// We expect the # effects called to be equal to # effects enabled in sequence (including
// draining effects). Warn if this is not the case due to inconsistent calls.
@@ -850,9 +814,6 @@
"%s Number of effects called %d is greater than number of effects enabled %d",
__func__, mNumberEffectsCalled, mNumberEffectsEnabled);
mEffectProcessCalled = 0; // reset our consistency check.
- if (!isDataAvailable) {
- LOG(DEBUG) << "Effect_process() processing last frame";
- }
mNumberEffectsCalled = 0;
int frames = samples * sizeof(float) / frameSize;
int bufferIndex = 0;
@@ -862,38 +823,24 @@
constexpr int kMaxBlockFrames =
(std::numeric_limits<int16_t>::max() / kBlockSizeMultiple) * kBlockSizeMultiple;
while (frames > 0) {
- float* outTmp = (accumulate ? getWorkBuffer() : out);
/* Process the samples */
LVM_ReturnStatus_en lvmStatus;
- {
- std::lock_guard lg(mMutex);
- int processFrames = std::min(frames, kMaxBlockFrames);
- lvmStatus = LVM_Process(mInstance, in + bufferIndex, outTmp + bufferIndex,
- processFrames, 0);
- if (lvmStatus != LVM_SUCCESS) {
- LOG(ERROR) << "LVM lib failed with error: " << lvmStatus;
- return {EX_UNSUPPORTED_OPERATION, 0, 0};
- }
- if (accumulate) {
- for (int i = 0; i < samples; i++) {
- out[i] += outTmp[i];
- }
- }
- frames -= processFrames;
- int processedSize = processFrames * frameSize / sizeof(float);
- bufferIndex += processedSize;
+ int processFrames = std::min(frames, kMaxBlockFrames);
+ lvmStatus = LVM_Process(mInstance, in + bufferIndex, out + bufferIndex,
+ processFrames, 0);
+ if (lvmStatus != LVM_SUCCESS) {
+ LOG(ERROR) << "LVM_Process failed with error: " << lvmStatus;
+ return {EX_UNSUPPORTED_OPERATION, 0, 0};
}
+ frames -= processFrames;
+ int processedSize = processFrames * frameSize / sizeof(float);
+ bufferIndex += processedSize;
}
} else {
for (int i = 0; i < samples; i++) {
- if (accumulate) {
- out[i] += in[i];
- } else {
- out[i] = in[i];
- }
+ out[i] = in[i];
}
}
- LOG(DEBUG) << __func__ << " done processing";
return {STATUS_OK, samples, samples};
}
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
index d823030..044c8dd 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -17,7 +17,6 @@
#pragma once
#include <android-base/logging.h>
-#include <android-base/thread_annotations.h>
#include <array>
#include <cstddef>
@@ -77,7 +76,7 @@
RetCode setForcedDevice(
const ::aidl::android::media::audio::common::AudioDeviceDescription& device);
aidl::android::media::audio::common::AudioDeviceDescription getForcedDevice() const {
- return mForceDevice;
+ return mVirtualizerForcedDevice;
}
std::vector<Virtualizer::ChannelAngle> getSpeakerAngles(
const Virtualizer::SpeakerAnglesPayload payload);
@@ -90,12 +89,9 @@
IEffect::Status processEffect(float* in, float* out, int sampleToProcess);
private:
- std::mutex mMutex;
const lvm::BundleEffectType mType;
bool mEnabled = false;
- LVM_Handle_t mInstance GUARDED_BY(mMutex);
-
- aidl::android::media::audio::common::AudioDeviceDescription mVirtualizerForcedDevice;
+ LVM_Handle_t mInstance;
int mSamplesPerSecond = 0;
int mSamplesToExitCountEq = 0;
@@ -122,7 +118,7 @@
// Virtualizer
int mVirtStrengthSaved = 0; /* Conversion between Get/Set */
bool mVirtualizerTempDisabled = false;
- ::aidl::android::media::audio::common::AudioDeviceDescription mForceDevice;
+ ::aidl::android::media::audio::common::AudioDeviceDescription mVirtualizerForcedDevice;
// Volume
float mLevelSaveddB = 0; /* for when mute is set, level must be saved */
float mVolumedB = 0;
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
index 755f57c..70c276d 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
@@ -55,7 +55,6 @@
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<EffectBundleAidl>(*uuid);
- LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -83,7 +82,6 @@
namespace aidl::android::hardware::audio::effect {
EffectBundleAidl::EffectBundleAidl(const AudioUuid& uuid) {
- LOG(DEBUG) << __func__ << uuid.toString();
if (uuid == getEffectImplUuidEqualizerBundle()) {
mType = lvm::BundleEffectType::EQUALIZER;
mDescriptor = &lvm::kEqualizerDesc;
@@ -107,12 +105,10 @@
EffectBundleAidl::~EffectBundleAidl() {
cleanUp();
- LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus EffectBundleAidl::getDescriptor(Descriptor* _aidl_return) {
RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
- LOG(DEBUG) << _aidl_return->toString();
*_aidl_return = *mDescriptor;
return ndk::ScopedAStatus::ok();
}
@@ -154,7 +150,6 @@
}
ndk::ScopedAStatus EffectBundleAidl::setParameterSpecific(const Parameter::Specific& specific) {
- LOG(DEBUG) << __func__ << " specific " << specific.toString();
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
auto tag = specific.getTag();
diff --git a/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h b/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h
index d31763b..ea1a8fe 100644
--- a/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h
+++ b/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h
@@ -21,7 +21,6 @@
#include <unordered_map>
#include <android-base/logging.h>
-#include <android-base/thread_annotations.h>
#include "BundleContext.h"
#include "BundleTypes.h"
@@ -41,11 +40,6 @@
return instance;
}
- bool isSessionIdExist(int sessionId) {
- std::lock_guard lg(mMutex);
- return mSessionMap.count(sessionId);
- }
-
static bool findBundleTypeInList(std::vector<std::shared_ptr<BundleContext>>& list,
const lvm::BundleEffectType& type, bool remove = false) {
auto itor = std::find_if(list.begin(), list.end(),
@@ -69,8 +63,7 @@
std::shared_ptr<BundleContext> createSession(const lvm::BundleEffectType& type, int statusDepth,
const Parameter::Common& common) {
int sessionId = common.session;
- LOG(DEBUG) << __func__ << type << " with sessionId " << sessionId;
- std::lock_guard lg(mMutex);
+ LOG(VERBOSE) << __func__ << type << " with sessionId " << sessionId;
if (mSessionMap.count(sessionId) == 0 && mSessionMap.size() >= MAX_BUNDLE_SESSIONS) {
LOG(ERROR) << __func__ << " exceed max bundle session";
return nullptr;
@@ -97,8 +90,7 @@
}
void releaseSession(const lvm::BundleEffectType& type, int sessionId) {
- LOG(DEBUG) << __func__ << type << " sessionId " << sessionId;
- std::lock_guard lg(mMutex);
+ LOG(VERBOSE) << __func__ << type << " sessionId " << sessionId;
if (mSessionMap.count(sessionId)) {
auto& list = mSessionMap[sessionId];
if (!findBundleTypeInList(list, type, true /* remove */)) {
@@ -112,11 +104,9 @@
}
private:
- // Lock for mSessionMap access.
- std::mutex mMutex;
// Max session number supported.
static constexpr int MAX_BUNDLE_SESSIONS = 32;
std::unordered_map<int /* session ID */, std::vector<std::shared_ptr<BundleContext>>>
- mSessionMap GUARDED_BY(mMutex);
+ mSessionMap;
};
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index 62837b9..84b49f2 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -51,8 +51,8 @@
local_include_dirs: ["Bundle"],
header_libs: [
- "libhardware_headers",
"libaudioeffects",
+ "libhardware_headers",
],
}
@@ -92,8 +92,8 @@
export_include_dirs: ["Reverb"],
header_libs: [
- "libhardware_headers",
"libaudioeffects",
+ "libhardware_headers",
],
sanitize: {
@@ -104,9 +104,9 @@
cc_library_shared {
name: "libbundleaidl",
srcs: [
+ ":effectCommonFile",
"Aidl/BundleContext.cpp",
"Aidl/EffectBundleAidl.cpp",
- ":effectCommonFile",
],
static_libs: ["libmusicbundle"],
defaults: [
@@ -124,8 +124,8 @@
"libstagefright_foundation",
],
cflags: [
- "-Wthread-safety",
"-DBACKEND_NDK",
+ "-Wthread-safety",
],
relative_install_path: "soundfx",
visibility: [
@@ -136,9 +136,9 @@
cc_library_shared {
name: "libreverbaidl",
srcs: [
- "Reverb/aidl/ReverbContext.cpp",
- "Reverb/aidl/EffectReverb.cpp",
":effectCommonFile",
+ "Reverb/aidl/EffectReverb.cpp",
+ "Reverb/aidl/ReverbContext.cpp",
],
static_libs: ["libreverb"],
defaults: [
@@ -150,8 +150,8 @@
"libhardware_headers",
],
shared_libs: [
- "libbase",
"libaudioutils",
+ "libbase",
"libcutils",
"liblog",
],
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
index c714bc9..4d369b1 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
@@ -55,7 +55,6 @@
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<EffectReverb>(*uuid);
- LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -82,7 +81,6 @@
namespace aidl::android::hardware::audio::effect {
EffectReverb::EffectReverb(const AudioUuid& uuid) {
- LOG(DEBUG) << __func__ << uuid.toString();
if (uuid == getEffectImplUuidAuxEnvReverb()) {
mType = lvm::ReverbEffectType::AUX_ENV;
mDescriptor = &lvm::kAuxEnvReverbDesc;
@@ -106,18 +104,16 @@
EffectReverb::~EffectReverb() {
cleanUp();
- LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus EffectReverb::getDescriptor(Descriptor* _aidl_return) {
RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
- LOG(DEBUG) << _aidl_return->toString();
*_aidl_return = *mDescriptor;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectReverb::setParameterSpecific(const Parameter::Specific& specific) {
- LOG(DEBUG) << __func__ << " specific " << specific.toString();
+ LOG(VERBOSE) << __func__ << " specific " << specific.toString();
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
auto tag = specific.getTag();
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 1c66c78..44ea2a4 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -68,24 +68,21 @@
// allocate lvm reverb instance
LVREV_ReturnStatus_en status = LVREV_SUCCESS;
- {
- std::lock_guard lg(mMutex);
- LVREV_InstanceParams_st params = {
- .MaxBlockSize = lvm::kMaxCallSize,
- // Max format, could be mono during process
- .SourceFormat = LVM_STEREO,
- .NumDelays = LVREV_DELAYLINES_4,
- };
- /* Init sets the instance handle */
- status = LVREV_GetInstanceHandle(&mInstance, ¶ms);
- GOTO_IF_LVREV_ERROR(status, deinit, "LVREV_GetInstanceHandleFailed");
+ LVREV_InstanceParams_st params = {
+ .MaxBlockSize = lvm::kMaxCallSize,
+ // Max format, could be mono during process
+ .SourceFormat = LVM_STEREO,
+ .NumDelays = LVREV_DELAYLINES_4,
+ };
+ /* Init sets the instance handle */
+ status = LVREV_GetInstanceHandle(&mInstance, ¶ms);
+ GOTO_IF_LVREV_ERROR(status, deinit, "LVREV_GetInstanceHandleFailed");
- // set control
- LVREV_ControlParams_st controlParams;
- initControlParameter(controlParams);
- status = LVREV_SetControlParameters(mInstance, &controlParams);
- GOTO_IF_LVREV_ERROR(status, deinit, "LVREV_SetControlParametersFailed");
- }
+ // set control
+ LVREV_ControlParams_st controlParams;
+ initControlParameter(controlParams);
+ status = LVREV_SetControlParameters(mInstance, &controlParams);
+ GOTO_IF_LVREV_ERROR(status, deinit, "LVREV_SetControlParametersFailed");
return RetCode::SUCCESS;
@@ -95,7 +92,6 @@
}
void ReverbContext::deInit() {
- std::lock_guard lg(mMutex);
if (mInstance) {
LVREV_FreeInstance(mInstance);
mInstance = nullptr;
@@ -143,19 +139,16 @@
RetCode ReverbContext::setEnvironmentalReverbRoomLevel(int roomLevel) {
// Update Control Parameter
LVREV_ControlParams_st params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- // Sum of room and reverb level controls
- // needs to subtract max levels for both room level and reverb level
- int combinedLevel = (roomLevel + mLevel) - lvm::kMaxReverbLevel;
- params.Level = convertLevel(combinedLevel);
+ // Sum of room and reverb level controls
+ // needs to subtract max levels for both room level and reverb level
+ int combinedLevel = (roomLevel + mLevel) - lvm::kMaxReverbLevel;
+ params.Level = convertLevel(combinedLevel);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
mRoomLevel = roomLevel;
return RetCode::SUCCESS;
}
@@ -163,16 +156,13 @@
RetCode ReverbContext::setEnvironmentalReverbRoomHfLevel(int roomHfLevel) {
// Update Control Parameter
LVREV_ControlParams_st params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- params.LPF = convertHfLevel(roomHfLevel);
+ params.LPF = convertHfLevel(roomHfLevel);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
mRoomHfLevel = roomHfLevel;
return RetCode::SUCCESS;
}
@@ -185,17 +175,15 @@
// Update Control Parameter
LVREV_ControlParams_st params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- params.T60 = (LVM_UINT16)time;
- mSamplesToExitCount = (params.T60 * mCommon.input.base.sampleRate) / 1000;
+ params.T60 = (LVM_UINT16)time;
+ mSamplesToExitCount = (params.T60 * mCommon.input.base.sampleRate) / 1000;
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
mDecayTime = time;
return RetCode::SUCCESS;
}
@@ -203,16 +191,13 @@
RetCode ReverbContext::setEnvironmentalReverbDecayHfRatio(int decayHfRatio) {
// Update Control Parameter
LVREV_ControlParams_st params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- params.Damping = (LVM_INT16)(decayHfRatio / 20);
+ params.Damping = (LVM_INT16)(decayHfRatio / 20);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
mDecayHfRatio = decayHfRatio;
return RetCode::SUCCESS;
}
@@ -220,19 +205,17 @@
RetCode ReverbContext::setEnvironmentalReverbLevel(int level) {
// Update Control Parameter
LVREV_ControlParams_st params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- // Sum of room and reverb level controls
- // needs to subtract max levels for both room level and level
- int combinedLevel = (level + mRoomLevel) - lvm::kMaxReverbLevel;
- params.Level = convertLevel(combinedLevel);
+ // Sum of room and reverb level controls
+ // needs to subtract max levels for both room level and level
+ int combinedLevel = (level + mRoomLevel) - lvm::kMaxReverbLevel;
+ params.Level = convertLevel(combinedLevel);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
mLevel = level;
return RetCode::SUCCESS;
}
@@ -245,16 +228,14 @@
RetCode ReverbContext::setEnvironmentalReverbDiffusion(int diffusion) {
// Update Control Parameter
LVREV_ControlParams_st params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- params.Density = (LVM_INT16)(diffusion / 10);
+ params.Density = (LVM_INT16)(diffusion / 10);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
mDiffusion = diffusion;
return RetCode::SUCCESS;
}
@@ -262,16 +243,14 @@
RetCode ReverbContext::setEnvironmentalReverbDensity(int density) {
// Update Control Parameter
LVREV_ControlParams_st params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- params.RoomSize = (LVM_INT16)(((density * 99) / 1000) + 1);
+ params.RoomSize = (LVM_INT16)(((density * 99) / 1000) + 1);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
mDensity = density;
return RetCode::SUCCESS;
}
@@ -362,9 +341,6 @@
RETURN_VALUE_IF(inputFrameCount != outputFrameCount, status, "FrameCountMismatch");
RETURN_VALUE_IF(0 == getInputFrameSize(), status, "zeroFrameSize");
- LOG(DEBUG) << __func__ << " start processing";
- std::lock_guard lg(mMutex);
-
int channels = ::aidl::android::hardware::audio::common::getChannelCount(
mCommon.input.base.channelMask);
int outChannels = ::aidl::android::hardware::audio::common::getChannelCount(
@@ -377,46 +353,62 @@
return status;
}
- std::vector<float> inFrames(samples);
- std::vector<float> outFrames(frameCount * FCC_2);
+ std::vector<float> inputSamples;
+ std::vector<float> outputSamples(frameCount * FCC_2);
if (isPreset() && mNextPreset != mPreset) {
loadPreset();
}
if (isAuxiliary()) {
- inFrames.assign(in, in + samples);
+ inputSamples.resize(samples);
+ inputSamples.assign(in, in + samples);
} else {
- // mono input is duplicated
+ // Resizing to stereo is required to duplicate mono input
+ inputSamples.resize(frameCount * FCC_2);
if (channels >= FCC_2) {
for (int i = 0; i < frameCount; i++) {
- inFrames[FCC_2 * i] = in[channels * i] * kSendLevel;
- inFrames[FCC_2 * i + 1] = in[channels * i + 1] * kSendLevel;
+ inputSamples[FCC_2 * i] = in[channels * i] * kSendLevel;
+ inputSamples[FCC_2 * i + 1] = in[channels * i + 1] * kSendLevel;
}
} else {
for (int i = 0; i < frameCount; i++) {
- inFrames[FCC_2 * i] = inFrames[FCC_2 * i + 1] = in[i] * kSendLevel;
+ inputSamples[FCC_2 * i] = inputSamples[FCC_2 * i + 1] = in[i] * kSendLevel;
}
}
}
if (isPreset() && mPreset == PresetReverb::Presets::NONE) {
- std::fill(outFrames.begin(), outFrames.end(), 0); // always stereo here
+ std::fill(outputSamples.begin(), outputSamples.end(), 0); // always stereo here
} else {
if (!mEnabled && mSamplesToExitCount > 0) {
- std::fill(outFrames.begin(), outFrames.end(), 0);
- LOG(VERBOSE) << "Zeroing " << channels << " samples per frame at the end of call ";
+ std::fill(outputSamples.begin(), outputSamples.end(), 0);
}
+ int inputBufferIndex = 0;
+ int outputBufferIndex = 0;
+
+ // LVREV library supports max of int16_t frames at a time
+ constexpr int kMaxBlockFrames = std::numeric_limits<int16_t>::max();
+ const auto inputFrameSize = getInputFrameSize();
+ const auto outputFrameSize = getOutputFrameSize();
/* Process the samples, producing a stereo output */
- LVREV_ReturnStatus_en lvrevStatus =
- LVREV_Process(mInstance, /* Instance handle */
- inFrames.data(), /* Input buffer */
- outFrames.data(), /* Output buffer */
- frameCount); /* Number of samples to read */
- if (lvrevStatus != LVREV_SUCCESS) {
- LOG(ERROR) << __func__ << lvrevStatus;
- return {EX_UNSUPPORTED_OPERATION, 0, 0};
+ for (int fc = frameCount; fc > 0;) {
+ int processFrames = std::min(fc, kMaxBlockFrames);
+ LVREV_ReturnStatus_en lvrevStatus =
+ LVREV_Process(mInstance, /* Instance handle */
+ inputSamples.data() + inputBufferIndex, /* Input buffer */
+ outputSamples.data() + outputBufferIndex, /* Output buffer */
+ processFrames); /* Number of samples to process */
+ if (lvrevStatus != LVREV_SUCCESS) {
+ LOG(ERROR) << __func__ << " LVREV_Process error: " << lvrevStatus;
+ return {EX_UNSUPPORTED_OPERATION, 0, 0};
+ }
+
+ fc -= processFrames;
+
+ inputBufferIndex += processFrames * inputFrameSize / sizeof(float);
+ outputBufferIndex += processFrames * outputFrameSize / sizeof(float);
}
}
// Convert to 16 bits
@@ -426,14 +418,14 @@
if (channels >= FCC_2) {
for (int i = 0; i < frameCount; i++) {
// Mix with dry input
- outFrames[FCC_2 * i] += in[channels * i];
- outFrames[FCC_2 * i + 1] += in[channels * i + 1];
+ outputSamples[FCC_2 * i] += in[channels * i];
+ outputSamples[FCC_2 * i + 1] += in[channels * i + 1];
}
} else {
for (int i = 0; i < frameCount; i++) {
// Mix with dry input
- outFrames[FCC_2 * i] += in[i];
- outFrames[FCC_2 * i + 1] += in[i];
+ outputSamples[FCC_2 * i] += in[i];
+ outputSamples[FCC_2 * i + 1] += in[i];
}
}
@@ -445,8 +437,8 @@
float incr = (mVolume.right - vr) / frameCount;
for (int i = 0; i < frameCount; i++) {
- outFrames[FCC_2 * i] *= vl;
- outFrames[FCC_2 * i + 1] *= vr;
+ outputSamples[FCC_2 * i] *= vl;
+ outputSamples[FCC_2 * i + 1] *= vr;
vl += incl;
vr += incr;
@@ -455,8 +447,8 @@
} else if (volumeMode != VOLUME_OFF) {
if (mVolume.left != kUnitVolume || mVolume.right != kUnitVolume) {
for (int i = 0; i < frameCount; i++) {
- outFrames[FCC_2 * i] *= mVolume.left;
- outFrames[FCC_2 * i + 1] *= mVolume.right;
+ outputSamples[FCC_2 * i] *= mVolume.left;
+ outputSamples[FCC_2 * i + 1] *= mVolume.right;
}
}
mPrevVolume = mVolume;
@@ -464,19 +456,10 @@
}
}
- bool accumulate = false;
if (outChannels > 2) {
- // Accumulate if required
- if (accumulate) {
- for (int i = 0; i < frameCount; i++) {
- out[outChannels * i] += outFrames[FCC_2 * i];
- out[outChannels * i + 1] += outFrames[FCC_2 * i + 1];
- }
- } else {
- for (int i = 0; i < frameCount; i++) {
- out[outChannels * i] = outFrames[FCC_2 * i];
- out[outChannels * i + 1] = outFrames[FCC_2 * i + 1];
- }
+ for (int i = 0; i < frameCount; i++) {
+ out[outChannels * i] = outputSamples[FCC_2 * i];
+ out[outChannels * i + 1] = outputSamples[FCC_2 * i + 1];
}
if (!isAuxiliary()) {
for (int i = 0; i < frameCount; i++) {
@@ -487,29 +470,15 @@
}
}
} else {
- if (accumulate) {
- if (outChannels == FCC_1) {
- for (int i = 0; i < frameCount; i++) {
- out[i] += ((outFrames[i * FCC_2] + outFrames[i * FCC_2 + 1]) * 0.5f);
- }
- } else {
- for (int i = 0; i < frameCount * FCC_2; i++) {
- out[i] += outFrames[i];
- }
- }
+ if (outChannels == FCC_1) {
+ From2iToMono_Float(outputSamples.data(), out, frameCount);
} else {
- if (outChannels == FCC_1) {
- From2iToMono_Float(outFrames.data(), out, frameCount);
- } else {
- for (int i = 0; i < frameCount * FCC_2; i++) {
- out[i] = outFrames[i];
- }
+ for (int i = 0; i < frameCount * FCC_2; i++) {
+ out[i] = outputSamples[i];
}
}
}
- LOG(DEBUG) << __func__ << " done processing";
-
if (!mEnabled && mSamplesToExitCount > 0) {
// signed - unsigned will trigger integer overflow if result becomes negative.
mSamplesToExitCount -= samples;
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
index 7d0ccff..44391f2 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
@@ -17,7 +17,6 @@
#pragma once
#include <android-base/logging.h>
-#include <android-base/thread_annotations.h>
#include <unordered_map>
#include "ReverbTypes.h"
@@ -158,10 +157,9 @@
{-400, -600, 1800, 700, -2000, 30, -1400, 60, 1000, 1000}},
{PresetReverb::Presets::PLATE, {-400, -200, 1300, 900, 0, 2, 0, 10, 1000, 750}}};
- std::mutex mMutex;
const lvm::ReverbEffectType mType;
bool mEnabled = false;
- LVREV_Handle_t mInstance GUARDED_BY(mMutex) = LVM_NULL;
+ LVREV_Handle_t mInstance = LVM_NULL;
int mRoomLevel = 0;
int mRoomHfLevel = 0;
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index 994b061..ce45a19 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -62,30 +62,30 @@
cc_library_shared {
name: "libpreprocessingaidl",
srcs: [
- "aidl/PreProcessingContext.cpp",
- "aidl/EffectPreProcessing.cpp",
":effectCommonFile",
+ "aidl/EffectPreProcessing.cpp",
+ "aidl/PreProcessingContext.cpp",
],
defaults: [
"aidlaudioeffectservice_defaults",
],
local_include_dirs: ["aidl"],
shared_libs: [
+ "libaudioutils",
"liblog",
"libutils",
- "libaudioutils",
],
static_libs: [
"webrtc_audio_processing",
],
header_libs: [
- "libwebrtc_absl_headers",
"libaudioeffects",
"libhardware_headers",
+ "libwebrtc_absl_headers",
],
cflags: [
- "-Wthread-safety",
"-Wno-unused-parameter",
+ "-Wthread-safety",
],
relative_install_path: "soundfx",
visibility: [
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
index 1675d97..87d267b 100644
--- a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
@@ -50,7 +50,6 @@
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<EffectPreProcessing>(*uuid);
- LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -78,7 +77,6 @@
namespace aidl::android::hardware::audio::effect {
EffectPreProcessing::EffectPreProcessing(const AudioUuid& uuid) {
- LOG(DEBUG) << __func__ << uuid.toString();
if (uuid == getEffectImplUuidAcousticEchoCancelerSw()) {
mType = PreProcessingEffectType::ACOUSTIC_ECHO_CANCELLATION;
mDescriptor = &kAcousticEchoCancelerDesc;
@@ -102,18 +100,16 @@
EffectPreProcessing::~EffectPreProcessing() {
cleanUp();
- LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus EffectPreProcessing::getDescriptor(Descriptor* _aidl_return) {
RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
- LOG(DEBUG) << _aidl_return->toString();
*_aidl_return = *mDescriptor;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectPreProcessing::setParameterSpecific(const Parameter::Specific& specific) {
- LOG(DEBUG) << __func__ << " specific " << specific.toString();
+ LOG(VERBOSE) << __func__ << " specific " << specific.toString();
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
auto tag = specific.getTag();
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp b/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
index 6f671f0..2d549ef 100644
--- a/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
+++ b/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
@@ -26,7 +26,6 @@
using aidl::android::media::audio::common::AudioDeviceType;
RetCode PreProcessingContext::init(const Parameter::Common& common) {
- std::lock_guard lg(mMutex);
webrtc::AudioProcessingBuilder apBuilder;
mAudioProcessingModule = apBuilder.Create();
if (mAudioProcessingModule == nullptr) {
@@ -64,7 +63,6 @@
}
RetCode PreProcessingContext::deInit() {
- std::lock_guard lg(mMutex);
mAudioProcessingModule = nullptr;
mState = PRE_PROC_STATE_UNINITIALIZED;
return RetCode::SUCCESS;
@@ -75,7 +73,6 @@
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
int typeMsk = (1 << int(mType));
- std::lock_guard lg(mMutex);
// Check if effect is already enabled.
if ((mEnabledMsk & typeMsk) == typeMsk) {
return RetCode::ERROR_ILLEGAL_PARAMETER;
@@ -110,7 +107,6 @@
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
int typeMsk = (1 << int(mType));
- std::lock_guard lg(mMutex);
// Check if effect is already disabled.
if ((mEnabledMsk & typeMsk) != typeMsk) {
return RetCode::ERROR_ILLEGAL_PARAMETER;
@@ -160,7 +156,6 @@
RetCode PreProcessingContext::setAcousticEchoCancelerEchoDelay(int echoDelayUs) {
mEchoDelayUs = echoDelayUs;
- std::lock_guard lg(mMutex);
mAudioProcessingModule->set_stream_delay_ms(mEchoDelayUs / 1000);
return RetCode::SUCCESS;
}
@@ -171,7 +166,6 @@
RetCode PreProcessingContext::setAcousticEchoCancelerMobileMode(bool mobileMode) {
mMobileMode = mobileMode;
- std::lock_guard lg(mMutex);
auto config = mAudioProcessingModule->GetConfig();
config.echo_canceller.mobile_mode = mobileMode;
mAudioProcessingModule->ApplyConfig(config);
@@ -184,7 +178,6 @@
RetCode PreProcessingContext::setAutomaticGainControlV1TargetPeakLevel(int targetPeakLevel) {
mTargetPeakLevel = targetPeakLevel;
- std::lock_guard lg(mMutex);
auto config = mAudioProcessingModule->GetConfig();
config.gain_controller1.target_level_dbfs = -(mTargetPeakLevel / 100);
mAudioProcessingModule->ApplyConfig(config);
@@ -197,7 +190,6 @@
RetCode PreProcessingContext::setAutomaticGainControlV1MaxCompressionGain(int maxCompressionGain) {
mMaxCompressionGain = maxCompressionGain;
- std::lock_guard lg(mMutex);
auto config = mAudioProcessingModule->GetConfig();
config.gain_controller1.compression_gain_db = mMaxCompressionGain / 100;
mAudioProcessingModule->ApplyConfig(config);
@@ -210,7 +202,6 @@
RetCode PreProcessingContext::setAutomaticGainControlV1EnableLimiter(bool enableLimiter) {
mEnableLimiter = enableLimiter;
- std::lock_guard lg(mMutex);
auto config = mAudioProcessingModule->GetConfig();
config.gain_controller1.enable_limiter = mEnableLimiter;
mAudioProcessingModule->ApplyConfig(config);
@@ -223,7 +214,6 @@
RetCode PreProcessingContext::setAutomaticGainControlV2DigitalGain(int gain) {
mDigitalGain = gain;
- std::lock_guard lg(mMutex);
auto config = mAudioProcessingModule->GetConfig();
config.gain_controller2.fixed_digital.gain_db = mDigitalGain;
mAudioProcessingModule->ApplyConfig(config);
@@ -256,7 +246,6 @@
RetCode PreProcessingContext::setNoiseSuppressionLevel(NoiseSuppression::Level level) {
mLevel = level;
- std::lock_guard lg(mMutex);
auto config = mAudioProcessingModule->GetConfig();
config.noise_suppression.level =
(webrtc::AudioProcessing::Config::NoiseSuppression::Level)level;
@@ -278,9 +267,6 @@
RETURN_VALUE_IF(inputFrameCount != outputFrameCount, status, "FrameCountMismatch");
RETURN_VALUE_IF(0 == getInputFrameSize(), status, "zeroFrameSize");
- LOG(DEBUG) << __func__ << " start processing";
- std::lock_guard lg(mMutex);
-
mProcessedMsk |= (1 << int(mType));
// webrtc implementation clear out was_stream_delay_set every time after ProcessStream() call
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingContext.h b/media/libeffects/preprocessing/aidl/PreProcessingContext.h
index 811bacf..11a2bea 100644
--- a/media/libeffects/preprocessing/aidl/PreProcessingContext.h
+++ b/media/libeffects/preprocessing/aidl/PreProcessingContext.h
@@ -17,7 +17,6 @@
#pragma once
#include <android-base/logging.h>
-#include <android-base/thread_annotations.h>
#include <audio_processing.h>
#include <unordered_map>
@@ -37,10 +36,9 @@
PreProcessingContext(int statusDepth, const Parameter::Common& common,
const PreProcessingEffectType& type)
: EffectContext(statusDepth, common), mType(type) {
- LOG(DEBUG) << __func__ << type;
mState = PRE_PROC_STATE_UNINITIALIZED;
}
- ~PreProcessingContext() override { LOG(DEBUG) << __func__; }
+ ~PreProcessingContext() = default;
RetCode init(const Parameter::Common& common);
RetCode deInit();
@@ -85,20 +83,19 @@
static constexpr inline webrtc::AudioProcessing::Config::NoiseSuppression::Level
kNsDefaultLevel = webrtc::AudioProcessing::Config::NoiseSuppression::kModerate;
- std::mutex mMutex;
const PreProcessingEffectType mType;
PreProcEffectState mState; // current state
// handle on webRTC audio processing module (APM)
- rtc::scoped_refptr<webrtc::AudioProcessing> mAudioProcessingModule GUARDED_BY(mMutex);
+ rtc::scoped_refptr<webrtc::AudioProcessing> mAudioProcessingModule;
- int mEnabledMsk GUARDED_BY(mMutex); // bit field containing IDs of enabled pre processors
- int mProcessedMsk GUARDED_BY(mMutex); // bit field containing IDs of pre processors already
+ int mEnabledMsk; // bit field containing IDs of enabled pre processors
+ int mProcessedMsk; // bit field containing IDs of pre processors already
// processed in current round
- int mRevEnabledMsk GUARDED_BY(mMutex); // bit field containing IDs of enabled pre processors
+ int mRevEnabledMsk; // bit field containing IDs of enabled pre processors
// with reverse channel
- int mRevProcessedMsk GUARDED_BY(mMutex); // bit field containing IDs of pre processors with
- // reverse channel already processed in current round
+ int mRevProcessedMsk; // bit field containing IDs of pre processors with
+ // reverse channel already processed in current round
webrtc::StreamConfig mInputConfig; // input stream configuration
webrtc::StreamConfig mOutputConfig; // output stream configuration
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingSession.h b/media/libeffects/preprocessing/aidl/PreProcessingSession.h
index 877292f..4a66e81 100644
--- a/media/libeffects/preprocessing/aidl/PreProcessingSession.h
+++ b/media/libeffects/preprocessing/aidl/PreProcessingSession.h
@@ -21,7 +21,6 @@
#include <unordered_map>
#include <android-base/logging.h>
-#include <android-base/thread_annotations.h>
#include "PreProcessingContext.h"
#include "PreProcessingTypes.h"
@@ -67,7 +66,6 @@
const Parameter::Common& common) {
int sessionId = common.session;
LOG(DEBUG) << __func__ << type << " with sessionId " << sessionId;
- std::lock_guard lg(mMutex);
if (mSessionMap.count(sessionId) == 0 && mSessionMap.size() >= MAX_PRE_PROC_SESSIONS) {
LOG(ERROR) << __func__ << " exceed max bundle session";
return nullptr;
@@ -95,7 +93,6 @@
void releaseSession(const PreProcessingEffectType& type, int sessionId) {
LOG(DEBUG) << __func__ << type << " sessionId " << sessionId;
- std::lock_guard lg(mMutex);
if (mSessionMap.count(sessionId)) {
auto& list = mSessionMap[sessionId];
if (!findPreProcessingTypeInList(list, type, true /* remove */)) {
@@ -109,11 +106,9 @@
}
private:
- // Lock for mSessionMap access.
- std::mutex mMutex;
// Max session number supported.
static constexpr int MAX_PRE_PROC_SESSIONS = 8;
std::unordered_map<int /* session ID */, std::vector<std::shared_ptr<PreProcessingContext>>>
- mSessionMap GUARDED_BY(mMutex);
+ mSessionMap;
};
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/proxy/Android.bp b/media/libeffects/proxy/Android.bp
index 6256eda..95da4de 100644
--- a/media/libeffects/proxy/Android.bp
+++ b/media/libeffects/proxy/Android.bp
@@ -29,19 +29,19 @@
srcs: ["EffectProxy.cpp"],
cflags: [
- "-fvisibility=hidden",
"-Wall",
"-Werror",
+ "-fvisibility=hidden",
],
include_dirs: ["frameworks/av/media/libeffects/factory"],
header_libs: ["libaudioeffects"],
shared_libs: [
- "liblog",
"libcutils",
- "libutils",
"libdl",
"libeffects",
+ "liblog",
+ "libutils",
],
}
diff --git a/media/libeffects/spatializer/tests/Android.bp b/media/libeffects/spatializer/tests/Android.bp
index 704e873..818e094 100644
--- a/media/libeffects/spatializer/tests/Android.bp
+++ b/media/libeffects/spatializer/tests/Android.bp
@@ -12,7 +12,7 @@
cc_test {
name: "SpatializerTest",
defaults: [
- "libeffects-test-defaults",
+ "libeffects-test-defaults",
],
host_supported: false,
srcs: [
diff --git a/media/libeffects/testlibs/Android.bp b/media/libeffects/testlibs/Android.bp
index 5ba56bb..f5aad92 100644
--- a/media/libeffects/testlibs/Android.bp
+++ b/media/libeffects/testlibs/Android.bp
@@ -33,10 +33,10 @@
relative_install_path: "soundfx",
cflags: [
- "-fvisibility=hidden",
"-Wall",
"-Werror",
"-Wno-address-of-packed-member",
+ "-fvisibility=hidden",
],
header_libs: [
@@ -66,9 +66,9 @@
relative_install_path: "soundfx",
cflags: [
- "-fvisibility=hidden",
"-Wall",
"-Werror",
+ "-fvisibility=hidden",
],
header_libs: [
diff --git a/media/libeffects/visualizer/Android.bp b/media/libeffects/visualizer/Android.bp
index 66ceadf..8f1d8da 100644
--- a/media/libeffects/visualizer/Android.bp
+++ b/media/libeffects/visualizer/Android.bp
@@ -54,9 +54,9 @@
cc_library_shared {
name: "libvisualizeraidl",
srcs: [
+ ":effectCommonFile",
"aidl/Visualizer.cpp",
"aidl/VisualizerContext.cpp",
- ":effectCommonFile",
],
defaults: [
"aidlaudioeffectservice_defaults",
diff --git a/media/libeffects/visualizer/aidl/Visualizer.cpp b/media/libeffects/visualizer/aidl/Visualizer.cpp
index 9b1bac6..9b493d4 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.cpp
+++ b/media/libeffects/visualizer/aidl/Visualizer.cpp
@@ -37,7 +37,6 @@
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<VisualizerImpl>();
- LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -73,7 +72,7 @@
.uuid = getEffectImplUuidVisualizer(),
.proxy = std::nullopt},
.flags = {.type = Flags::Type::INSERT,
- .insert = Flags::Insert::LAST,
+ .insert = Flags::Insert::FIRST,
.volume = Flags::Volume::NONE},
.name = VisualizerImpl::kEffectName,
.implementor = "The Android Open Source Project"},
diff --git a/media/libeffects/visualizer/aidl/Visualizer.h b/media/libeffects/visualizer/aidl/Visualizer.h
index b48c85e..3180972 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.h
+++ b/media/libeffects/visualizer/aidl/Visualizer.h
@@ -29,11 +29,8 @@
static const std::string kEffectName;
static const Capability kCapability;
static const Descriptor kDescriptor;
- VisualizerImpl() { LOG(DEBUG) << __func__; }
- ~VisualizerImpl() {
- cleanUp();
- LOG(DEBUG) << __func__;
- }
+ VisualizerImpl() = default;
+ ~VisualizerImpl() { cleanUp(); }
ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.cpp b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
index c763b1a..1e08674 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.cpp
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
@@ -38,14 +38,10 @@
}
VisualizerContext::~VisualizerContext() {
- std::lock_guard lg(mMutex);
- LOG(DEBUG) << __func__;
mState = State::UNINITIALIZED;
}
RetCode VisualizerContext::initParams(const Parameter::Common& common) {
- std::lock_guard lg(mMutex);
- LOG(DEBUG) << __func__;
if (common.input != common.output) {
LOG(ERROR) << __func__ << " mismatch input: " << common.input.toString()
<< " and output: " << common.output.toString();
@@ -66,7 +62,6 @@
}
RetCode VisualizerContext::enable() {
- std::lock_guard lg(mMutex);
if (mState != State::INITIALIZED) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -75,7 +70,6 @@
}
RetCode VisualizerContext::disable() {
- std::lock_guard lg(mMutex);
if (mState != State::ACTIVE) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -84,48 +78,39 @@
}
void VisualizerContext::reset() {
- std::lock_guard lg(mMutex);
std::fill(mCaptureBuf.begin(), mCaptureBuf.end(), 0x80);
}
RetCode VisualizerContext::setCaptureSamples(int samples) {
- std::lock_guard lg(mMutex);
mCaptureSamples = samples;
return RetCode::SUCCESS;
}
int32_t VisualizerContext::getCaptureSamples() {
- std::lock_guard lg(mMutex);
return mCaptureSamples;
}
RetCode VisualizerContext::setMeasurementMode(Visualizer::MeasurementMode mode) {
- std::lock_guard lg(mMutex);
mMeasurementMode = mode;
return RetCode::SUCCESS;
}
Visualizer::MeasurementMode VisualizerContext::getMeasurementMode() {
- std::lock_guard lg(mMutex);
return mMeasurementMode;
}
RetCode VisualizerContext::setScalingMode(Visualizer::ScalingMode mode) {
- std::lock_guard lg(mMutex);
mScalingMode = mode;
return RetCode::SUCCESS;
}
Visualizer::ScalingMode VisualizerContext::getScalingMode() {
- std::lock_guard lg(mMutex);
return mScalingMode;
}
RetCode VisualizerContext::setDownstreamLatency(int latency) {
- std::lock_guard lg(mMutex);
mDownstreamLatency = latency;
return RetCode::SUCCESS;
}
int VisualizerContext::getDownstreamLatency() {
- std::lock_guard lg(mMutex);
return mDownstreamLatency;
}
@@ -152,7 +137,6 @@
uint8_t nbValidMeasurements = 0;
{
- std::lock_guard lg(mMutex);
// reset measurements if last measurement was too long ago (which implies stored
// measurements aren't relevant anymore and shouldn't bias the new one)
const uint32_t delayMs = getDeltaTimeMsFromUpdatedTime_l();
@@ -185,13 +169,12 @@
// convert from I16 sample values to mB and write results
measure.rms = (rms < 0.000016f) ? -9600 : (int32_t)(2000 * log10(rms / 32767.0f));
measure.peak = (peakU16 == 0) ? -9600 : (int32_t)(2000 * log10(peakU16 / 32767.0f));
- LOG(INFO) << __func__ << " peak " << peakU16 << " (" << measure.peak << "mB), rms " << rms
- << " (" << measure.rms << "mB)";
+ LOG(VERBOSE) << __func__ << " peak " << peakU16 << " (" << measure.peak << "mB), rms " << rms
+ << " (" << measure.rms << "mB)";
return measure;
}
std::vector<uint8_t> VisualizerContext::capture() {
- std::lock_guard lg(mMutex);
uint32_t captureSamples = mCaptureSamples;
std::vector<uint8_t> result(captureSamples, 0x80);
// cts android.media.audio.cts.VisualizerTest expecting silence data when effect not running
@@ -205,7 +188,6 @@
// clear the capture buffer to return silence
if ((mLastCaptureIdx == mCaptureIdx) && (mBufferUpdateTime.tv_sec != 0) &&
(deltaMs > kMaxStallTimeMs)) {
- LOG(INFO) << __func__ << " capture going to idle";
mBufferUpdateTime.tv_sec = 0;
return result;
}
@@ -247,10 +229,8 @@
IEffect::Status result = {STATUS_NOT_ENOUGH_DATA, 0, 0};
RETURN_VALUE_IF(in == nullptr || out == nullptr || samples == 0, result, "dataBufferError");
- std::lock_guard lg(mMutex);
result.status = STATUS_INVALID_OPERATION;
RETURN_VALUE_IF(mState != State::ACTIVE, result, "stateNotActive");
- LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
// perform measurements if needed
if (mMeasurementMode == Visualizer::MeasurementMode::PEAK_RMS) {
// find the peak and RMS squared for the new buffer
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.h b/media/libeffects/visualizer/aidl/VisualizerContext.h
index b03e038..9715e20 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.h
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.h
@@ -16,7 +16,6 @@
#pragma once
-#include <android-base/thread_annotations.h>
#include <audio_effects/effect_dynamicsprocessing.h>
#include <system/audio_effects/effect_visualizer.h>
@@ -79,28 +78,26 @@
// note: buffer index is stored in uint8_t
static const uint32_t kMeasurementWindowMaxSizeInBuffers = 25;
- // serialize process() and parameter setting
- std::mutex mMutex;
- Parameter::Common mCommon GUARDED_BY(mMutex);
- State mState GUARDED_BY(mMutex) = State::UNINITIALIZED;
- uint32_t mCaptureIdx GUARDED_BY(mMutex) = 0;
- uint32_t mLastCaptureIdx GUARDED_BY(mMutex) = 0;
- Visualizer::ScalingMode mScalingMode GUARDED_BY(mMutex) = Visualizer::ScalingMode::NORMALIZED;
- struct timespec mBufferUpdateTime GUARDED_BY(mMutex);
+ Parameter::Common mCommon;
+ State mState = State::UNINITIALIZED;
+ uint32_t mCaptureIdx = 0;
+ uint32_t mLastCaptureIdx = 0;
+ Visualizer::ScalingMode mScalingMode = Visualizer::ScalingMode::NORMALIZED;
+ struct timespec mBufferUpdateTime;
// capture buf with 8 bits mono PCM samples
- std::array<uint8_t, kMaxCaptureBufSize> mCaptureBuf GUARDED_BY(mMutex);
- uint32_t mDownstreamLatency GUARDED_BY(mMutex) = 0;
- int32_t mCaptureSamples GUARDED_BY(mMutex) = kMaxCaptureBufSize;
+ std::array<uint8_t, kMaxCaptureBufSize> mCaptureBuf;
+ uint32_t mDownstreamLatency = 0;
+ int32_t mCaptureSamples = kMaxCaptureBufSize;
// to avoid recomputing it every time a buffer is processed
- uint8_t mChannelCount GUARDED_BY(mMutex) = 0;
- Visualizer::MeasurementMode mMeasurementMode GUARDED_BY(mMutex) =
+ uint8_t mChannelCount = 0;
+ Visualizer::MeasurementMode mMeasurementMode =
Visualizer::MeasurementMode::NONE;
uint8_t mMeasurementWindowSizeInBuffers = kMeasurementWindowMaxSizeInBuffers;
- uint8_t mMeasurementBufferIdx GUARDED_BY(mMutex) = 0;
+ uint8_t mMeasurementBufferIdx = 0;
std::array<BufferStats, kMeasurementWindowMaxSizeInBuffers> mPastMeasurements;
void init_params();
- uint32_t getDeltaTimeMsFromUpdatedTime_l() REQUIRES(mMutex);
+ uint32_t getDeltaTimeMsFromUpdatedTime_l();
};
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libheif/OWNERS b/media/libheif/OWNERS
new file mode 100644
index 0000000..a61ad21
--- /dev/null
+++ b/media/libheif/OWNERS
@@ -0,0 +1,2 @@
+include platform/frameworks/av:/media/janitors/avic_OWNERS
+include platform/frameworks/av:/media/janitors/codec_OWNERS
\ No newline at end of file
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 590a7b7..840897f 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -223,7 +223,6 @@
"com.android.media",
],
-
srcs: ["MidiIoWrapper.cpp"],
static_libs: [
@@ -278,6 +277,10 @@
"libutils",
],
+ static_libs: [
+ "android.media.codec-aconfig-cc",
+ ],
+
include_dirs: [
"system/libhidl/transport/token/1.0/utils/include",
],
diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp
index 86ad997..c45c5c3 100644
--- a/media/libmedia/MediaCodecInfo.cpp
+++ b/media/libmedia/MediaCodecInfo.cpp
@@ -36,6 +36,7 @@
constexpr char MediaCodecInfo::Capabilities::FEATURE_MULTIPLE_FRAMES[];
constexpr char MediaCodecInfo::Capabilities::FEATURE_SECURE_PLAYBACK[];
constexpr char MediaCodecInfo::Capabilities::FEATURE_TUNNELED_PLAYBACK[];
+constexpr char MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE[];
void MediaCodecInfo::Capabilities::getSupportedProfileLevels(
Vector<ProfileLevel> *profileLevels) const {
diff --git a/media/libmedia/include/media/MediaCodecInfo.h b/media/libmedia/include/media/MediaCodecInfo.h
index 54f565a..88a2dc4 100644
--- a/media/libmedia/include/media/MediaCodecInfo.h
+++ b/media/libmedia/include/media/MediaCodecInfo.h
@@ -69,6 +69,7 @@
constexpr static char FEATURE_MULTIPLE_FRAMES[] = "feature-multiple-frames";
constexpr static char FEATURE_SECURE_PLAYBACK[] = "feature-secure-playback";
constexpr static char FEATURE_TUNNELED_PLAYBACK[] = "feature-tunneled-playback";
+ constexpr static char FEATURE_DETACHED_SURFACE[] = "feature-detached-surface";
/**
* Returns the supported levels for each supported profile in a target array.
diff --git a/media/libmediametrics/Android.bp b/media/libmediametrics/Android.bp
index 8a38dd7..5214dfe 100644
--- a/media/libmediametrics/Android.bp
+++ b/media/libmediametrics/Android.bp
@@ -16,8 +16,8 @@
name: "libmediametrics",
srcs: [
- "MediaMetricsItem.cpp",
"MediaMetrics.cpp",
+ "MediaMetricsItem.cpp",
],
shared_libs: [
@@ -40,8 +40,8 @@
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
cfi: true,
},
@@ -50,8 +50,8 @@
stubs: {
symbol_file: "libmediametrics.map.txt",
versions: [
- "1" ,
- ]
+ "1",
+ ],
},
header_abi_checker: {
@@ -65,7 +65,7 @@
"//frameworks/base/apex/media/framework",
"//frameworks/base/core/jni",
"//frameworks/base/media/jni",
- "//packages/modules/Media/apex/framework",
+ "//packages/modules/Media/apex/framework",
],
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index bb49b5a..bd43fe2 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -2098,9 +2098,12 @@
displayHeight,
cropLeft, cropTop);
} else {
- CHECK(inputFormat->findInt32("width", &displayWidth));
- CHECK(inputFormat->findInt32("height", &displayHeight));
-
+ if (!inputFormat->findInt32("width", &displayWidth)
+ || !inputFormat->findInt32("height", &displayHeight)) {
+ ALOGW("Either video width or video height missing, reporting 0x0!");
+ notifyListener(MEDIA_SET_VIDEO_SIZE, 0, 0);
+ return;
+ }
ALOGV("Video input format %d x %d", displayWidth, displayHeight);
}
diff --git a/media/libnbaio/Android.bp b/media/libnbaio/Android.bp
index 434ae00..158900a 100644
--- a/media/libnbaio/Android.bp
+++ b/media/libnbaio/Android.bp
@@ -15,8 +15,8 @@
"NBAIO.cpp",
],
header_libs: [
- "libaudioclient_headers",
"libaudio_system_headers",
+ "libaudioclient_headers",
],
export_header_lib_headers: [
"libaudioclient_headers",
@@ -35,8 +35,8 @@
export_include_dirs: ["include_mono"],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
}
diff --git a/media/libnblog/Android.bp b/media/libnblog/Android.bp
index 8cfece6..b4d48b0 100644
--- a/media/libnblog/Android.bp
+++ b/media/libnblog/Android.bp
@@ -35,8 +35,8 @@
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
include_dirs: ["system/media/audio_utils/include"],
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index f9ceef2..e06efac 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -21,6 +21,8 @@
#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
#endif
+#include <android_media_codec.h>
+
#include <inttypes.h>
#include <utils/Trace.h>
@@ -7573,6 +7575,22 @@
return true;
}
+ // When Acodec receive an error event at LoadedToIdleState, it will not release
+ // allocated buffers, which will cause gralloc buffer leak issue. We need to first release
+ // these buffers and then process the error event
+ case OMX_EventError:
+ {
+ if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) {
+ mCodec->freeBuffersOnPort(kPortIndexInput);
+ }
+
+ if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) {
+ mCodec->freeBuffersOnPort(kPortIndexOutput);
+ }
+
+ return BaseState::onOMXEvent(event, data1, data2);
+ }
+
default:
return BaseState::onOMXEvent(event, data1, data2);
}
@@ -9314,6 +9332,12 @@
// adaptive playback is not supported
caps->removeDetail(MediaCodecInfo::Capabilities::FEATURE_ADAPTIVE_PLAYBACK);
}
+
+ // all non-tunneled video decoders support detached surface mode
+ if (android::media::codec::provider_->null_output_surface_support() &&
+ android::media::codec::provider_->null_output_surface()) {
+ caps->addDetail(MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE, 0);
+ }
}
}
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 896e021..c9a2eea 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -318,7 +318,12 @@
"aconfig_mediacodec_flags_c_lib",
],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
+
static_libs: [
+ "android.media.codec-aconfig-cc",
"libstagefright_esds",
"libstagefright_color_conversion",
"libyuv_static",
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 1a0bb7f..46703bb 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -48,6 +48,9 @@
static const int64_t kBufferTimeOutUs = 10000LL; // 10 msec
static const size_t kRetryCount = 100; // must be >0
static const int64_t kDefaultSampleDurationUs = 33333LL; // 33ms
+// For codec, 0 is the highest importance; higher the number lesser important.
+// To make codec for thumbnail less important, give it a value more than 0.
+static const int kThumbnailImportance = 1;
sp<IMemory> allocVideoFrame(const sp<MetaData>& trackMeta,
int32_t width, int32_t height, int32_t tileWidth, int32_t tileHeight,
@@ -585,6 +588,9 @@
}
}
+ // Set the importance for thumbnail.
+ videoFormat->setInt32(KEY_IMPORTANCE, kThumbnailImportance);
+
int32_t frameRate;
if (trackMeta()->findInt32(kKeyFrameRate, &frameRate) && frameRate > 0) {
mDefaultSampleDurationUs = 1000000LL / frameRate;
@@ -902,6 +908,10 @@
videoFormat->setInt32("android._num-input-buffers", 1);
videoFormat->setInt32("android._num-output-buffers", 1);
}
+
+ /// Set the importance for thumbnail.
+ videoFormat->setInt32(KEY_IMPORTANCE, kThumbnailImportance);
+
return videoFormat;
}
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index a18dbfe..e918b5e 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -2436,47 +2436,6 @@
return OK;
}
-bool MPEG4Writer::isSampleMetadataValid(size_t trackIndex, int64_t timeUs) {
- // Track Index starts from zero, so it should be at least 1 less than size.
- if (trackIndex >= mTracks.size()) {
- ALOGE("Incorrect trackIndex %zu, mTracks->size() %zu", trackIndex, mTracks.size());
- return false;
- }
-
- List<Track *>::iterator it = mTracks.begin();
-
- // (*it) is already pointing to trackIndex 0.
- for (int i = 1; i <= trackIndex; i++) {
- it++;
- }
-
- return (*it)->isTimestampValid(timeUs);
-}
-
-bool MPEG4Writer::Track::isTimestampValid(int64_t timeUs) {
- // No timescale if HEIF
- if (mIsHeif) {
- return true;
- }
-
- // Make sure abs(timeUs) does not overflow
- if (timeUs == INT64_MIN) {
- return false;
- }
-
- // Ensure that the timeUs value does not have extremely low or high values
- // that would cause an underflow or overflow, like in the calculation -
- // mdhdDuration = (trakDurationUs * mTimeScale + 5E5) / 1E6
- if (abs(timeUs) >= (INT64_MAX - 5E5) / mTimeScale) {
- return false;
- }
- // Limit check for calculations in ctts box
- if (abs(timeUs) + kMaxCttsOffsetTimeUs >= INT64_MAX / mTimeScale) {
- return false;
- }
- return true;
-}
-
bool MPEG4Writer::Track::isExifData(
MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const {
if (!mIsHeif) {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index e4f3b83..0401e82 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -30,6 +30,8 @@
#include "include/SoftwareRenderer.h"
+#include <android_media_codec.h>
+
#include <android/api-level.h>
#include <android/content/pm/IPackageManagerNative.h>
#include <android/hardware/cas/native/1.0/IDescrambler.h>
@@ -3017,6 +3019,13 @@
return PostAndAwaitResponse(msg, &response);
}
+status_t MediaCodec::detachOutputSurface() {
+ sp<AMessage> msg = new AMessage(kWhatDetachSurface, this);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
status_t MediaCodec::setSurface(const sp<Surface> &surface) {
sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
msg->setObject("surface", surface);
@@ -3966,6 +3975,15 @@
switch (mState) {
case INITIALIZING:
{
+ // Resource error during INITIALIZING state needs to be logged
+ // through metrics, to be able to track such occurrences.
+ if (isResourceError(err)) {
+ mediametrics_setInt32(mMetricsHandle, kCodecError, err);
+ mediametrics_setCString(mMetricsHandle, kCodecErrorState,
+ stateString(mState).c_str());
+ flushMediametrics();
+ initMediametrics();
+ }
setState(UNINITIALIZED);
break;
}
@@ -4676,7 +4694,7 @@
}
mResourceManagerProxy->removeClient();
- mReleaseSurface.reset();
+ mDetachedSurface.reset();
if (mReplyID != nullptr) {
postPendingRepliesAndDeferredMessages("kWhatReleaseCompleted");
@@ -4849,6 +4867,23 @@
mFlags |= kFlagPushBlankBuffersOnShutdown;
}
+ uint32_t flags;
+ CHECK(msg->findInt32("flags", (int32_t *)&flags));
+
+ if (android::media::codec::provider_->null_output_surface_support()) {
+ if (obj == nullptr
+ && (flags & CONFIGURE_FLAG_DETACHED_SURFACE)
+ && !(flags & CONFIGURE_FLAG_ENCODE)) {
+ sp<Surface> surface = getOrCreateDetachedSurface();
+ if (surface == nullptr) {
+ mErrorLog.log(
+ LOG_TAG, "Detached surface mode is not supported by this codec");
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ }
+ obj = surface;
+ }
+ }
+
if (obj != NULL) {
if (!format->findInt32(KEY_ALLOW_FRAME_DROP, &mAllowFrameDroppingBySurface)) {
// allow frame dropping by surface by default
@@ -4872,8 +4907,6 @@
mApiUsageMetrics.isUsingOutputSurface = true;
- uint32_t flags;
- CHECK(msg->findInt32("flags", (int32_t *)&flags));
if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL ||
flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
if (!(mFlags & kFlagIsAsync)) {
@@ -4888,8 +4921,8 @@
if (flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
mFlags |= kFlagUseCryptoAsync;
if ((mFlags & kFlagUseBlockModel)) {
- ALOGW("CrytoAsync not yet enabled for block model,\
- falling back to normal");
+ ALOGW("CrytoAsync not yet enabled for block model, "
+ "falling back to normal");
}
}
}
@@ -4946,8 +4979,7 @@
mDescrambler = static_cast<IDescrambler *>(descrambler);
mBufferChannel->setDescrambler(mDescrambler);
- if ((mFlags & kFlagUseCryptoAsync) &&
- mCrypto && (mDomain == DOMAIN_VIDEO)) {
+ if ((mFlags & kFlagUseCryptoAsync) && mCrypto) {
// set kFlagUseCryptoAsync but do-not use this for block model
// this is to propagate the error in onCryptoError()
// TODO (b/274628160): Enable Use of CONFIG_FLAG_USE_CRYPTO_ASYNC
@@ -4994,6 +5026,23 @@
break;
}
+ case kWhatDetachSurface:
+ {
+ // detach surface is equivalent to setSurface(mDetachedSurface)
+ sp<Surface> surface = getOrCreateDetachedSurface();
+
+ if (surface == nullptr) {
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ mErrorLog.log(LOG_TAG, "Detaching surface is not supported by the codec.");
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ }
+
+ msg->setObject("surface", surface);
+ }
+ [[fallthrough]];
+
case kWhatSetSurface:
{
sp<AReplyToken> replyID;
@@ -5011,14 +5060,17 @@
sp<Surface> surface = static_cast<Surface *>(obj.get());
if (mSurface == NULL) {
// do not support setting surface if it was not set
- mErrorLog.log(LOG_TAG,
- "Cannot set surface if the codec is not configured with "
- "a surface already");
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Cannot %s surface if the codec is not configured with "
+ "a surface already",
+ msg->what() == kWhatDetachSurface ? "detach" : "set"));
err = INVALID_OPERATION;
} else if (obj == NULL) {
// do not support unsetting surface
mErrorLog.log(LOG_TAG, "Unsetting surface is not supported");
err = BAD_VALUE;
+ } else if (android::media::codec::provider_->null_output_surface_support()) {
+ err = handleSetSurface(surface, true /* callCodec */);
} else {
uint32_t generation;
err = connectToSurface(surface, &generation);
@@ -5052,7 +5104,8 @@
default:
mErrorLog.log(LOG_TAG, base::StringPrintf(
- "setSurface() is valid only at Executing states; currently %s",
+ "%sSurface() is valid only at Executing states; currently %s",
+ msg->what() == kWhatDetachSurface ? "detach" : "set",
apiStateString().c_str()));
err = INVALID_OPERATION;
break;
@@ -5273,30 +5326,40 @@
bool forceSync = false;
if (asyncNotify != nullptr && mSurface != NULL) {
- if (!mReleaseSurface) {
- uint64_t usage = 0;
- if (mSurface->getConsumerUsage(&usage) != OK) {
- usage = 0;
- }
- mReleaseSurface.reset(new ReleaseSurface(usage));
- }
- if (mSurface != mReleaseSurface->getSurface()) {
- uint32_t generation;
- status_t err = connectToSurface(mReleaseSurface->getSurface(), &generation);
- ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
- if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
- err = mCodec->setSurface(mReleaseSurface->getSurface(), generation);
- ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
- }
- if (err == OK) {
- (void)disconnectFromSurface();
- mSurface = mReleaseSurface->getSurface();
- mSurfaceGeneration = generation;
- } else {
- // We were not able to switch the surface, so force
+ if (android::media::codec::provider_->null_output_surface_support()) {
+ if (handleSetSurface(getOrCreateDetachedSurface(), true /* callCodec */,
+ true /* onShutDown */) != OK) {
+ // We were not able to detach the surface, so force
// synchronous release.
forceSync = true;
}
+ } else {
+ if (!mDetachedSurface) {
+ uint64_t usage = 0;
+ if (mSurface->getConsumerUsage(&usage) != OK) {
+ usage = 0;
+ }
+ mDetachedSurface.reset(new ReleaseSurface(usage));
+ }
+ if (mSurface != mDetachedSurface->getSurface()) {
+ uint32_t generation;
+ status_t err =
+ connectToSurface(mDetachedSurface->getSurface(), &generation);
+ ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
+ if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
+ err = mCodec->setSurface(mDetachedSurface->getSurface(), generation);
+ ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
+ }
+ if (err == OK) {
+ (void)disconnectFromSurface();
+ mSurface = mDetachedSurface->getSurface();
+ mSurfaceGeneration = generation;
+ } else {
+ // We were not able to switch the surface, so force
+ // synchronous release.
+ forceSync = true;
+ }
+ }
}
}
@@ -5997,6 +6060,10 @@
mErrorLog.clear();
}
+ if (android::media::codec::provider_->set_state_early()) {
+ mState = newState;
+ }
+
if (newState == UNINITIALIZED) {
// return any straggling buffers, e.g. if we got here on an error
returnBuffersToCodec();
@@ -6007,7 +6074,9 @@
mFlags &= ~kFlagSawMediaServerDie;
}
- mState = newState;
+ if (!android::media::codec::provider_->set_state_early()) {
+ mState = newState;
+ }
if (mBatteryChecker != nullptr) {
mBatteryChecker->setExecuting(isExecuting());
@@ -6208,15 +6277,8 @@
cryptoInfo->setInt32("secure", mFlags & kFlagIsSecure);
sp<RefBase> obj;
if (msg->findObject("cryptoInfos", &obj)) {
- sp<CryptoInfosWrapper> infos{(CryptoInfosWrapper*)obj.get()};
- sp<CryptoInfosWrapper> asyncInfos{
- new CryptoInfosWrapper(std::vector<std::unique_ptr<CodecCryptoInfo>>())};
- for (std::unique_ptr<CodecCryptoInfo> &info : infos->value) {
- if (info) {
- asyncInfos->value.emplace_back(new CryptoAsync::CryptoAsyncInfo(info));
- }
- }
- buffer->meta()->setObject("cryptoInfos", asyncInfos);
+ // this object is a standalone object when created (no copy requied here)
+ buffer->meta()->setObject("cryptoInfos", obj);
} else {
size_t key_len = (key != nullptr)? 16 : 0;
size_t iv_len = (iv != nullptr)? 16 : 0;
@@ -6355,7 +6417,6 @@
}
}
if (mCryptoAsync) {
- // TODO b/316565675 - enable async path for audio
// prepare a message and enqueue
sp<AMessage> cryptoInfo = new AMessage();
buildCryptoInfoAMessage(cryptoInfo, CryptoAsync::kActionDecrypt);
@@ -6591,9 +6652,9 @@
CHECK_EQ(info, &mPortBuffers[portIndex][index]);
availBuffers->erase(availBuffers->begin());
- CHECK(!info->mOwnedByClient);
{
Mutex::Autolock al(mBufferLock);
+ CHECK(!info->mOwnedByClient);
info->mOwnedByClient = true;
// set image-data
@@ -6612,6 +6673,23 @@
return index;
}
+sp<Surface> MediaCodec::getOrCreateDetachedSurface() {
+ if (mDomain != DOMAIN_VIDEO || (mFlags & kFlagIsEncoder)) {
+ return nullptr;
+ }
+
+ if (!mDetachedSurface) {
+ uint64_t usage = 0;
+ if (!mSurface || mSurface->getConsumerUsage(&usage) != OK) {
+ // TODO: should we use a/the default consumer usage?
+ usage = 0;
+ }
+ mDetachedSurface.reset(new ReleaseSurface(usage));
+ }
+
+ return mDetachedSurface->getSurface();
+}
+
status_t MediaCodec::connectToSurface(const sp<Surface> &surface, uint32_t *generation) {
status_t err = OK;
if (surface != NULL) {
@@ -6685,7 +6763,56 @@
return err;
}
+status_t MediaCodec::handleSetSurface(const sp<Surface> &surface, bool callCodec, bool onShutDown) {
+ uint32_t generation;
+ status_t err = OK;
+ if (surface != nullptr) {
+ err = connectToSurface(surface, &generation);
+ if (err == ALREADY_EXISTS) {
+ // reconnecting to same surface
+ return OK;
+ }
+
+ if (err == OK && callCodec) {
+ if (mFlags & kFlagUsesSoftwareRenderer) {
+ if (mSoftRenderer != NULL
+ && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
+ pushBlankBuffersToNativeWindow(mSurface.get());
+ }
+ // do not create a new software renderer on shutdown (release)
+ // as it will not be used anyway
+ if (!onShutDown) {
+ surface->setDequeueTimeout(-1);
+ mSoftRenderer = new SoftwareRenderer(surface);
+ // TODO: check if this was successful
+ }
+ } else {
+ err = mCodec->setSurface(surface, generation);
+ }
+
+ mReliabilityContextMetrics.setOutputSurfaceCount++;
+ }
+ }
+
+ if (err == OK) {
+ if (mSurface != NULL) {
+ (void)disconnectFromSurface();
+ }
+
+ if (surface != NULL) {
+ mSurface = surface;
+ mSurfaceGeneration = generation;
+ }
+ }
+
+ return err;
+}
+
status_t MediaCodec::handleSetSurface(const sp<Surface> &surface) {
+ if (android::media::codec::provider_->null_output_surface_support()) {
+ return handleSetSurface(surface, false /* callCodec */);
+ }
+
status_t err = OK;
if (mSurface != NULL) {
(void)disconnectFromSurface();
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index 604dcb0..714e312 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -111,8 +111,9 @@
}
}
- int finalUsage = usage | consumerUsage;
- ALOGV("gralloc usage: %#x(producer) + %#x(consumer) = %#x", usage, consumerUsage, finalUsage);
+ uint64_t finalUsage = (uint32_t) usage | (uint32_t) consumerUsage;
+ ALOGV("gralloc usage: %#x(producer) + %#x(consumer) = 0x%" PRIx64,
+ usage, consumerUsage, finalUsage);
err = native_window_set_usage(nativeWindow, finalUsage);
if (err != NO_ERROR) {
ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
@@ -126,7 +127,7 @@
return err;
}
- ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage %#x",
+ ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage 0x%" PRIx64,
nativeWindow, width, height, format, rotation, finalUsage);
return NO_ERROR;
}
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index dd6da15..b7efbce 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -20,7 +20,6 @@
{
"exclude-annotation": "android.platform.test.annotations.RequiresDevice"
},
- // TODO: b/149314419
{
"exclude-filter": "android.media.audio.cts.AudioPlaybackCaptureTest"
},
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
index eb9ac0f..bf29b1d 100644
--- a/media/libstagefright/VideoRenderQualityTracker.cpp
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -302,13 +302,6 @@
mRenderDurationMs += (actualRenderTimeUs - mLastRenderTimeUs) / 1000;
}
- // Now that a frame has been rendered, the previously skipped frames can be processed as skipped
- // frames since the app is not skipping them to terminate playback.
- for (int64_t contentTimeUs : mPendingSkippedFrameContentTimeUsList) {
- processMetricsForSkippedFrame(contentTimeUs);
- }
- mPendingSkippedFrameContentTimeUsList = {};
-
// We can render a pending queued frame if it's the last frame of the video, so release it
// immediately.
if (contentTimeUs == mTunnelFrameQueuedContentTimeUs && mTunnelFrameQueuedContentTimeUs != -1) {
@@ -332,9 +325,25 @@
(long long) contentTimeUs, (long long) nextExpectedFrame.contentTimeUs);
break;
}
+ // Process all skipped frames before the dropped frame.
+ while (!mPendingSkippedFrameContentTimeUsList.empty()) {
+ if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) {
+ break;
+ }
+ processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front());
+ mPendingSkippedFrameContentTimeUsList.pop_front();
+ }
processMetricsForDroppedFrame(nextExpectedFrame.contentTimeUs,
nextExpectedFrame.desiredRenderTimeUs);
}
+ // Process all skipped frames before the rendered frame.
+ while (!mPendingSkippedFrameContentTimeUsList.empty()) {
+ if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) {
+ break;
+ }
+ processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front());
+ mPendingSkippedFrameContentTimeUsList.pop_front();
+ }
processMetricsForRenderedFrame(nextExpectedFrame.contentTimeUs,
nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs,
freezeEventOut, judderEventOut);
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index dc7d787..d50bc1e 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -139,6 +139,7 @@
<Limit name="bitrate" range="1-40000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.hevc.decoder" type="video/hevc" variant="slow-cpu,!slow-cpu">
@@ -160,6 +161,7 @@
<Limit name="bitrate" range="1-5000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.vp8.decoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
@@ -178,6 +180,7 @@
<Limit name="bitrate" range="1-40000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.vp9.decoder" type="video/x-vnd.on2.vp9" variant="slow-cpu,!slow-cpu">
@@ -197,6 +200,7 @@
<Limit name="bitrate" range="1-5000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="slow-cpu,!slow-cpu">
@@ -216,6 +220,8 @@
<Limit name="bitrate" range="1-5000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
+ <Feature name="low-latency" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.av1-dav1d.decoder" type="video/av01" variant="slow-cpu,!slow-cpu" rank="1024">
@@ -234,6 +240,8 @@
<Limit name="bitrate" range="1-5000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
+ <Feature name="low-latency" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.mpeg2.decoder" type="video/mpeg2" domain="tv">
@@ -335,7 +343,7 @@
</MediaCodec>
<MediaCodec name="c2.android.vp8.encoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
<Alias name="OMX.google.vp8.encoder" />
- <Limit name="alignment" value="2x2" />
+ <Limit name="alignment" value="1x1" />
<Limit name="block-size" value="16x16" />
<Variant name="!slow-cpu">
<Limit name="size" min="2x2" max="2048x2048" />
@@ -351,6 +359,7 @@
<Limit name="bitrate" range="1-20000000" />
</Variant>
<Feature name="bitrate-modes" value="VBR,CBR" />
+ <Feature name="qp-bounds" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.hevc.encoder" type="video/hevc" variant="!slow-cpu">
@@ -365,22 +374,24 @@
<Limit name="complexity" range="0-10" default="0" />
<Limit name="quality" range="0-100" default="80" />
<Feature name="bitrate-modes" value="VBR,CBR,CQ" />
+ <Feature name="qp-bounds" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.vp9.encoder" type="video/x-vnd.on2.vp9" variant="!slow-cpu">
<Alias name="OMX.google.vp9.encoder" />
<!-- profiles and levels: ProfileMain : Level_Version0-3 -->
<Limit name="size" min="2x2" max="2048x2048" />
- <Limit name="alignment" value="2x2" />
+ <Limit name="alignment" value="1x1" />
<Limit name="block-size" value="16x16" />
<!-- 2016 devices can encode at about 8fps at this block count -->
<Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
<Limit name="bitrate" range="1-40000000" />
<Feature name="bitrate-modes" value="VBR,CBR" />
+ <Feature name="qp-bounds" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.av1.encoder" type="video/av01" enabled="false" minsdk="34" variant="slow-cpu,!slow-cpu">
- <Limit name="alignment" value="2x2" />
+ <Limit name="alignment" value="1x1" />
<Limit name="block-size" value="16x16" />
<Variant name="!slow-cpu">
<Limit name="size" min="2x2" max="1920x1920" />
@@ -395,6 +406,7 @@
<Limit name="quality" range="0-100" default="80" />
<Limit name="complexity" range="0-5" default="0" />
<Feature name="bitrate-modes" value="VBR,CBR,CQ" />
+ <Feature name="qp-bounds" />
<Attribute name="software-codec" />
</MediaCodec>
</Encoders>
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 1ff8acf..054a4b8 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -77,9 +77,6 @@
virtual void setStartTimeOffsetMs(int ms) { mStartTimeOffsetMs = ms; }
virtual int32_t getStartTimeOffsetMs() const { return mStartTimeOffsetMs; }
virtual status_t setNextFd(int fd);
- // Returns true if the timestamp is valid which is compatible with the Mpeg4.
- // Note that this overloads that method in the base class.
- bool isSampleMetadataValid(size_t trackIndex, int64_t timeUs) override;
protected:
virtual ~MPEG4Writer();
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 9ecb12e..7169b1e 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -96,6 +96,7 @@
CONFIGURE_FLAG_ENCODE = 1,
CONFIGURE_FLAG_USE_BLOCK_MODEL = 2,
CONFIGURE_FLAG_USE_CRYPTO_ASYNC = 4,
+ CONFIGURE_FLAG_DETACHED_SURFACE = 8,
};
enum BufferFlags {
@@ -274,6 +275,8 @@
status_t setSurface(const sp<Surface> &nativeWindow);
+ status_t detachOutputSurface();
+
status_t requestIDRFrame();
// Notification will be posted once there "is something to do", i.e.
@@ -368,6 +371,7 @@
kWhatInit = 'init',
kWhatConfigure = 'conf',
kWhatSetSurface = 'sSur',
+ kWhatDetachSurface = 'dSur',
kWhatCreateInputSurface = 'cisf',
kWhatSetInputSurface = 'sisf',
kWhatStart = 'strt',
@@ -474,6 +478,10 @@
uint32_t mSurfaceGeneration = 0;
SoftwareRenderer *mSoftRenderer;
+ // Get the detached BufferQueue surface for a video decoder, and create it
+ // if it did not yet exist.
+ sp<Surface> getOrCreateDetachedSurface();
+
Mutex mMetricsLock;
mediametrics_handle_t mMetricsHandle = 0;
bool mMetricsToUpload = false;
@@ -642,6 +650,13 @@
status_t queueCSDInputBuffer(size_t bufferIndex);
status_t handleSetSurface(const sp<Surface> &surface);
+
+ // Common reimplementation of changing the output surface.
+ // Handles setting null surface, which is used during configure and init.
+ // Set |callCodec| to true if the codec needs to be notified (e.g. during executing state).
+ // Setting |onShutdown| to true will avoid extra work, if this is used for detaching on
+ // delayed release.
+ status_t handleSetSurface(const sp<Surface> &surface, bool callCodec, bool onShutdown = false);
status_t connectToSurface(const sp<Surface> &surface, uint32_t *generation);
status_t disconnectFromSurface();
@@ -714,7 +729,7 @@
sp<AMessage> mMsgPollForRenderedBuffers;
class ReleaseSurface;
- std::unique_ptr<ReleaseSurface> mReleaseSurface;
+ std::unique_ptr<ReleaseSurface> mDetachedSurface;
std::list<sp<AMessage>> mLeftover;
status_t handleLeftover(size_t index);
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 24ac2e8..72785d5 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -893,6 +893,8 @@
inline constexpr char PARAMETER_KEY_SUSPEND_TIME[] = "drop-start-time-us";
inline constexpr char PARAMETER_KEY_TUNNEL_PEEK[] = "tunnel-peek";
inline constexpr char PARAMETER_KEY_VIDEO_BITRATE[] = "video-bitrate";
+inline constexpr char PARAMETER_KEY_QP_OFFSET_MAP[] = "qp-offset-map";
+inline constexpr char PARAMETER_KEY_QP_OFFSET_RECTS[] = "qp-offset-rects";
}
diff --git a/media/libstagefright/include/media/stagefright/PersistentSurface.h b/media/libstagefright/include/media/stagefright/PersistentSurface.h
index f4943c3..554ee43 100644
--- a/media/libstagefright/include/media/stagefright/PersistentSurface.h
+++ b/media/libstagefright/include/media/stagefright/PersistentSurface.h
@@ -18,6 +18,8 @@
#define PERSISTENT_SURFACE_H_
+#include <android/binder_auto_utils.h>
+#include <android/binder_libbinder.h>
#include <binder/Parcel.h>
#include <hidl/HidlSupport.h>
#include <hidl/HybridInterface.h>
@@ -29,24 +31,43 @@
struct PersistentSurface : public RefBase {
PersistentSurface() {}
- // create a persistent surface
+ // create a persistent surface in HIDL
PersistentSurface(
const sp<IGraphicBufferProducer>& bufferProducer,
const sp<hidl::base::V1_0::IBase>& hidlTarget) :
mBufferProducer(bufferProducer),
- mHidlTarget(hidlTarget) { }
+ mHidlTarget(hidlTarget),
+ mAidlTarget(nullptr),
+ mAidl(false) { }
+
+ // create a persistent surface in AIDL
+ PersistentSurface(
+ const sp<IGraphicBufferProducer>& bufferProducer,
+ const ::ndk::SpAIBinder& aidlTarget) :
+ mBufferProducer(bufferProducer),
+ mHidlTarget(nullptr),
+ mAidlTarget(aidlTarget),
+ mAidl(true) { }
sp<IGraphicBufferProducer> getBufferProducer() const {
return mBufferProducer;
}
+ bool isTargetAidl() const {
+ return mAidl;
+ }
+
sp<hidl::base::V1_0::IBase> getHidlTarget() const {
- return mHidlTarget;
+ return mAidl ? nullptr : mHidlTarget;
+ }
+
+ ::ndk::SpAIBinder getAidlTarget() const {
+ return mAidl ? mAidlTarget : nullptr;
}
status_t writeToParcel(Parcel *parcel) const {
parcel->writeStrongBinder(IInterface::asBinder(mBufferProducer));
- // write hidl target
+ // write hidl target if available
if (mHidlTarget != nullptr) {
HalToken token;
bool result = createHalToken(mHidlTarget, &token);
@@ -57,6 +78,22 @@
} else {
parcel->writeBool(false);
}
+ // write aidl target if available
+ if (mAidl) {
+ AIBinder *binder = mAidlTarget.get();
+ if (binder != nullptr) {
+ ::android::sp<::android::IBinder> intf =
+ AIBinder_toPlatformBinder(binder);
+ if (intf) {
+ parcel->writeBool(true);
+ parcel->writeStrongBinder(intf);
+ } else {
+ parcel->writeBool(false);
+ }
+ } else {
+ parcel->writeBool(false);
+ }
+ }
return NO_ERROR;
}
@@ -65,21 +102,43 @@
parcel->readStrongBinder());
// read hidl target
bool haveHidlTarget = parcel->readBool();
+ mAidl = false;
if (haveHidlTarget) {
std::vector<uint8_t> tokenVector;
parcel->readByteVector(&tokenVector);
HalToken token = HalToken(tokenVector);
mHidlTarget = retrieveHalInterface(token);
deleteHalToken(token);
+ return NO_ERROR;
} else {
mHidlTarget.clear();
}
+
+ // read aidl target
+ bool haveAidlTarget = false;
+ if (parcel->readBool(&haveAidlTarget) != NO_ERROR) {
+ return NO_ERROR;
+ }
+ mAidl = true;
+ if (haveAidlTarget) {
+ ::android::sp<::android::IBinder> intf = parcel->readStrongBinder();
+ AIBinder *ndkBinder = AIBinder_fromPlatformBinder(intf);
+ if (ndkBinder) {
+ mAidlTarget.set(ndkBinder);
+ } else {
+ mAidlTarget.set(nullptr);
+ }
+ } else {
+ mAidlTarget.set(nullptr);
+ }
return NO_ERROR;
}
private:
sp<IGraphicBufferProducer> mBufferProducer;
sp<hidl::base::V1_0::IBase> mHidlTarget;
+ ::ndk::SpAIBinder mAidlTarget;
+ bool mAidl;
DISALLOW_EVIL_CONSTRUCTORS(PersistentSurface);
};
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
index 3598e8d..22b13f6 100644
--- a/media/libstagefright/renderfright/Android.bp
+++ b/media/libstagefright/renderfright/Android.bp
@@ -118,3 +118,11 @@
local_include_dirs: ["include"],
export_include_dirs: ["include"],
}
+
+cc_library_headers {
+ name: "librenderfright_gl_headers",
+ export_include_dirs: ["gl"],
+ visibility: [
+ "//frameworks/av/media/libstagefright/renderfright/fuzzer:__subpackages__",
+ ],
+}
diff --git a/media/libstagefright/renderfright/fuzzer/Android.bp b/media/libstagefright/renderfright/fuzzer/Android.bp
new file mode 100644
index 0000000..574e49f
--- /dev/null
+++ b/media/libstagefright/renderfright/fuzzer/Android.bp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+ default_team: "trendy_team_android_media_codec_framework",
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
+cc_fuzz {
+ name: "libstagefright_renderfright_fuzzer",
+ srcs: [
+ "libstagefright_renderfright_fuzzer.cpp",
+ ],
+ static_libs: [
+ "librenderfright",
+ ],
+ header_libs: [
+ "librenderfright_gl_headers",
+ ],
+ shared_libs: [
+ "libcutils",
+ "libgui",
+ "liblog",
+ "libutils",
+ "libEGL",
+ "libGLESv1_CM",
+ "libGLESv2",
+ "libGLESv3",
+ "libui",
+ "libbase",
+ "libprocessgroup",
+ "libsync",
+ ],
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ hotlists: ["4593311"],
+ description: "The fuzzer targets the APIs of librenderfright",
+ vector: "local_no_privileges_required",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
+ },
+}
diff --git a/media/libstagefright/renderfright/fuzzer/README.md b/media/libstagefright/renderfright/fuzzer/README.md
new file mode 100644
index 0000000..742bfdc
--- /dev/null
+++ b/media/libstagefright/renderfright/fuzzer/README.md
@@ -0,0 +1,33 @@
+# Fuzzer for libstagefright_renderfright
+
+RenderFright supports the following parameters:
+1. SetContextPriority (parameter name: "kSetContextPriority")
+2. SetRenderEngineType (parameter name: "kSetRenderEngineType")
+3. CleanupMode (parameter name: "kCleanupMode")
+4. DataSpace (parameter name: "kDataSpace")
+5. ReadBufferUsage(parameter name: "kReadBufferUsage")
+6. WriteBufferUsage(parameter name: "kWriteBufferUsage")
+7. RenderBufferUsage(parameter name: "kRenderBufferUsage")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`kSetContextPriority`| 0. `RenderEngine::ContextPriority::LOW`<br/>1. `RenderEngine::ContextPriority::MEDIUM`<br/>2. `RenderEngine::ContextPriority::HIGH` |Value obtained from FuzzedDataProvider|
+|`kSetRenderEngineType`| 0. `RenderEngine::RenderEngineType::GLES`<br/>1. `RenderEngine::RenderEngineType::THREADED`|Value obtained from FuzzedDataProvider|
+|`kCleanupMode`| 0. `RenderEngine::CleanupMode::CLEAN_OUTPUT_RESOURCES`<br/>1. `RenderEngine::CleanupMode::CLEAN_ALL`|Value obtained from FuzzedDataProvider|
+|`kDataSpace`| 0. `ui::Dataspace::UNKNOWN`<br/>1. `ui::Dataspace::ARBITRARY`<br/>2. `ui::Dataspace::STANDARD_SHIFT`<br/>3. `ui::Dataspace::STANDARD_MASK`<br/>4. `ui::Dataspace::STANDARD_UNSPECIFIED`<br/>5. `ui::Dataspace::STANDARD_BT709`<br/>6. `ui::Dataspace::STANDARD_BT601_625`<br/>7. `ui::Dataspace::STANDARD_BT601_625_UNADJUSTED`<br/>8. `ui::Dataspace::STANDARD_BT601_525`<br/>9. `ui::Dataspace::STANDARD_BT601_525_UNADJUSTED`<br/>10. `ui::Dataspace::STANDARD_BT2020`<br/>11. `ui::Dataspace::STANDARD_BT2020_CONSTANT_LUMINANCE`<br/>12. `ui::Dataspace::STANDARD_BT470M`<br/>13. `ui::Dataspace::STANDARD_FILM`<br/>14. `ui::Dataspace::STANDARD_DCI_P3`<br/>15. `ui::Dataspace::STANDARD_ADOBE_RGB`<br/>16. `ui::Dataspace::TRANSFER_SHIFT`<br/>17. `ui::Dataspace::TRANSFER_MASK`<br/>18. `ui::Dataspace::TRANSFER_UNSPECIFIED`<br/>19. `ui::Dataspace::TRANSFER_LINEAR`<br/>20. `ui::Dataspace::TRANSFER_SRGB`<br/>21. `ui::Dataspace::TRANSFER_SMPTE_170M`<br/>22. `ui::Dataspace::TRANSFER_GAMMA2_2`<br/>23. `ui::Dataspace::TRANSFER_GAMMA2_6`<br/>24. `ui::Dataspace::TRANSFER_GAMMA2_8`<br/>25. `ui::Dataspace::TRANSFER_ST2084`<br/>26. `ui::Dataspace::TRANSFER_HLG`<br/>27. `ui::Dataspace::RANGE_SHIFT`<br/>28. `ui::Dataspace::RANGE_MASK`<br/>29. `ui::Dataspace::RANGE_UNSPECIFIED`<br/>30. `ui::Dataspace::RANGE_FULL`<br/>31. `ui::Dataspace::RANGE_LIMITED`<br/>32. `ui::Dataspace::RANGE_EXTENDED`<br/>33. `ui::Dataspace::SRGB_LINEAR`<br/>34. `ui::Dataspace::V0_SRGB_LINEAR`<br/>35. `ui::Dataspace::V0_SCRGB_LINEAR`<br/>36. `ui::Dataspace::SRGB`<br/>37. `ui::Dataspace::V0_SRGB`<br/>38. `ui::Dataspace::V0_SCRGB`<br/>39. `ui::Dataspace::JFIF`<br/>40. `ui::Dataspace::V0_JFIF`<br/>41. `ui::Dataspace::BT601_625`<br/>42. `ui::Dataspace::V0_BT601_625`<br/>43. `ui::Dataspace::BT601_525`<br/>44. `ui::Dataspace::V0_BT601_525`<br/>45. `ui::Dataspace::BT709`<br/>46. `ui::Dataspace::V0_BT709`<br/>47. `ui::Dataspace::DCI_P3_LINEAR`<br/>48. `ui::Dataspace::DCI_P3`<br/>49. `ui::Dataspace::DISPLAY_P3_LINEAR`<br/>50. `ui::Dataspace::DISPLAY_P3`<br/>51. `ui::Dataspace::ADOBE_RGB`<br/>52. `ui::Dataspace::BT2020_LINEAR`<br/>53. `ui::Dataspace::BT2020`<br/>54. `ui::Dataspace::BT2020_PQ`<br/>55. `ui::Dataspace::DEPTH`<br/>56. `ui::Dataspace::SENSOR`<br/>57. `ui::Dataspace::BT2020_ITU`<br/>58. `ui::Dataspace::BT2020_ITU_PQ`<br/>59. `ui::Dataspace::BT2020_ITU_HLG`<br/>60. `ui::Dataspace::BT2020_HLG`<br/>61. `ui::Dataspace::DISPLAY_BT2020`<br/>62. `ui::Dataspace::DYNAMIC_DEPTH`<br/>63. `ui::Dataspace::JPEG_APP_SEGMENTS`<br/>64. `ui::Dataspace::HEIF`|Value obtained from FuzzedDataProvider|
+|`kReadBufferUsage`| 0. `GRALLOC_USAGE_SW_READ_NEVER`<br/>1. `GRALLOC_USAGE_SW_READ_RARELY`<br/>2. `GRALLOC_USAGE_SW_READ_OFTEN`<br/>3. `GRALLOC_USAGE_SW_READ_MASK`|Value obtained from FuzzedDataProvider|
+|`kWriteBufferUsage`| 0. `GRALLOC_USAGE_SW_WRITE_NEVER`<br/>1. `GRALLOC_USAGE_SW_WRITE_RARELY`<br/>2. `GRALLOC_USAGE_SW_WRITE_OFTEN`<br/>3. `GRALLOC_USAGE_SW_WRITE_MASK`|Value obtained from FuzzedDataProvider|
+|`kRenderBufferUsage`| 0. `GRALLOC_USAGE_HW_TEXTURE`<br/>1. `GRALLOC_USAGE_HW_RENDER`<br/>2. `GRALLOC_USAGE_HW_2D`<br/>3. `GRALLOC_USAGE_HW_COMPOSER`<br/>4. `GRALLOC_USAGE_HW_FB`<br/>5. `GRALLOC_USAGE_EXTERNAL_DISP`<br/>6. `GRALLOC_USAGE_PROTECTED`<br/>7. `GRALLOC_USAGE_CURSOR`<br/>8. `GRALLOC_USAGE_HW_VIDEO_ENCODER`<br/>9. `GRALLOC_USAGE_HW_CAMERA_WRITE`<br/>10. `GRALLOC_USAGE_HW_CAMERA_READ`<br/>11. `GRALLOC_USAGE_HW_CAMERA_ZSL`<br/>12. `GRALLOC_USAGE_HW_CAMERA_MASK`<br/>13. `GRALLOC_USAGE_HW_MASK`<br/>14. `GRALLOC_USAGE_RENDERSCRIPT`<br/>15. `GRALLOC_USAGE_FOREIGN_BUFFERS`<br/>16. `GRALLOC_USAGE_HW_IMAGE_ENCODER`|Value obtained from FuzzedDataProvider|
+
+
+
+#### Steps to run
+1. Build the fuzzer
+```
+ $ mm -j$(nproc) libstagefright_renderfright_fuzzer
+```
+2. Run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/libstagefright_renderfright_fuzzer/libstagefright_renderfright_fuzzer
+```
diff --git a/media/libstagefright/renderfright/fuzzer/libstagefright_renderfright_fuzzer.cpp b/media/libstagefright/renderfright/fuzzer/libstagefright_renderfright_fuzzer.cpp
new file mode 100644
index 0000000..b0721e0
--- /dev/null
+++ b/media/libstagefright/renderfright/fuzzer/libstagefright_renderfright_fuzzer.cpp
@@ -0,0 +1,297 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <GLESRenderEngine.h>
+#include <GLFramebuffer.h>
+#include <GLImage.h>
+#include <Program.h>
+#include <ProgramCache.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <renderengine/RenderEngine.h>
+
+using namespace android::renderengine;
+using namespace android;
+
+static constexpr int32_t kMinRenderAPI = 0;
+static constexpr int32_t kMaxRenderAPI = 8;
+static constexpr int32_t kMaxTextureCount = 100;
+static constexpr int32_t KMaxDisplayWidth = 3840;
+static constexpr int32_t KMaxDisplayHeight = 2160;
+static constexpr int32_t kMinPixelFormat = 1;
+static constexpr int32_t kMaxPixelFormat = 55;
+static constexpr int32_t kMaxRenderLayer = 5;
+
+static constexpr ui::Dataspace kDataSpace[] = {
+ ui::Dataspace::UNKNOWN,
+ ui::Dataspace::ARBITRARY,
+ ui::Dataspace::STANDARD_SHIFT,
+ ui::Dataspace::STANDARD_MASK,
+ ui::Dataspace::STANDARD_UNSPECIFIED,
+ ui::Dataspace::STANDARD_BT709,
+ ui::Dataspace::STANDARD_BT601_625,
+ ui::Dataspace::STANDARD_BT601_625_UNADJUSTED,
+ ui::Dataspace::STANDARD_BT601_525,
+ ui::Dataspace::STANDARD_BT601_525_UNADJUSTED,
+ ui::Dataspace::STANDARD_BT2020,
+ ui::Dataspace::STANDARD_BT2020_CONSTANT_LUMINANCE,
+ ui::Dataspace::STANDARD_BT470M,
+ ui::Dataspace::STANDARD_FILM,
+ ui::Dataspace::STANDARD_DCI_P3,
+ ui::Dataspace::STANDARD_ADOBE_RGB,
+ ui::Dataspace::TRANSFER_SHIFT,
+ ui::Dataspace::TRANSFER_MASK,
+ ui::Dataspace::TRANSFER_UNSPECIFIED,
+ ui::Dataspace::TRANSFER_LINEAR,
+ ui::Dataspace::TRANSFER_SRGB,
+ ui::Dataspace::TRANSFER_SMPTE_170M,
+ ui::Dataspace::TRANSFER_GAMMA2_2,
+ ui::Dataspace::TRANSFER_GAMMA2_6,
+ ui::Dataspace::TRANSFER_GAMMA2_8,
+ ui::Dataspace::TRANSFER_ST2084,
+ ui::Dataspace::TRANSFER_HLG,
+ ui::Dataspace::RANGE_SHIFT,
+ ui::Dataspace::RANGE_MASK,
+ ui::Dataspace::RANGE_UNSPECIFIED,
+ ui::Dataspace::RANGE_FULL,
+ ui::Dataspace::RANGE_LIMITED,
+ ui::Dataspace::RANGE_EXTENDED,
+ ui::Dataspace::SRGB_LINEAR,
+ ui::Dataspace::V0_SRGB_LINEAR,
+ ui::Dataspace::V0_SCRGB_LINEAR,
+ ui::Dataspace::SRGB,
+ ui::Dataspace::V0_SRGB,
+ ui::Dataspace::V0_SCRGB,
+ ui::Dataspace::JFIF,
+ ui::Dataspace::V0_JFIF,
+ ui::Dataspace::BT601_625,
+ ui::Dataspace::V0_BT601_625,
+ ui::Dataspace::BT601_525,
+ ui::Dataspace::V0_BT601_525,
+ ui::Dataspace::BT709,
+ ui::Dataspace::V0_BT709,
+ ui::Dataspace::DCI_P3_LINEAR,
+ ui::Dataspace::DCI_P3,
+ ui::Dataspace::DISPLAY_P3_LINEAR,
+ ui::Dataspace::DISPLAY_P3,
+ ui::Dataspace::ADOBE_RGB,
+ ui::Dataspace::BT2020_LINEAR,
+ ui::Dataspace::BT2020,
+ ui::Dataspace::BT2020_PQ,
+ ui::Dataspace::DEPTH,
+ ui::Dataspace::SENSOR,
+ ui::Dataspace::BT2020_ITU,
+ ui::Dataspace::BT2020_ITU_PQ,
+ ui::Dataspace::BT2020_ITU_HLG,
+ ui::Dataspace::BT2020_HLG,
+ ui::Dataspace::DISPLAY_BT2020,
+ ui::Dataspace::DYNAMIC_DEPTH,
+ ui::Dataspace::JPEG_APP_SEGMENTS,
+ ui::Dataspace::HEIF,
+};
+
+static constexpr int32_t kReadBufferUsage[] = {
+ GRALLOC_USAGE_SW_READ_NEVER, GRALLOC_USAGE_SW_READ_RARELY, GRALLOC_USAGE_SW_READ_OFTEN,
+ GRALLOC_USAGE_SW_READ_MASK};
+
+static constexpr int32_t kWriteBufferUsage[] = {
+ GRALLOC_USAGE_SW_WRITE_NEVER, GRALLOC_USAGE_SW_WRITE_RARELY, GRALLOC_USAGE_SW_WRITE_OFTEN,
+ GRALLOC_USAGE_SW_WRITE_MASK};
+
+static constexpr int32_t kRenderBufferUsage[] = {
+ GRALLOC_USAGE_HW_TEXTURE,
+ GRALLOC_USAGE_HW_RENDER,
+ GRALLOC_USAGE_HW_2D,
+ GRALLOC_USAGE_HW_COMPOSER,
+ GRALLOC_USAGE_HW_FB,
+ GRALLOC_USAGE_EXTERNAL_DISP,
+ GRALLOC_USAGE_PROTECTED,
+ GRALLOC_USAGE_CURSOR,
+ GRALLOC_USAGE_HW_VIDEO_ENCODER,
+ GRALLOC_USAGE_HW_CAMERA_WRITE,
+ GRALLOC_USAGE_HW_CAMERA_READ,
+ GRALLOC_USAGE_HW_CAMERA_ZSL,
+ GRALLOC_USAGE_HW_CAMERA_MASK,
+ GRALLOC_USAGE_HW_MASK,
+ GRALLOC_USAGE_RENDERSCRIPT,
+ GRALLOC_USAGE_FOREIGN_BUFFERS,
+ GRALLOC_USAGE_HW_IMAGE_ENCODER,
+};
+
+static constexpr RenderEngine::ContextPriority kSetContextPriority[] = {
+ RenderEngine::ContextPriority::LOW, RenderEngine::ContextPriority::MEDIUM,
+ RenderEngine::ContextPriority::HIGH};
+
+static constexpr RenderEngine::RenderEngineType kSetRenderEngineType[] = {
+ RenderEngine::RenderEngineType::GLES, RenderEngine::RenderEngineType::THREADED};
+
+static constexpr RenderEngine::CleanupMode kCleanupMode[] = {
+ RenderEngine::CleanupMode::CLEAN_OUTPUT_RESOURCES, RenderEngine::CleanupMode::CLEAN_ALL};
+
+class RenderFrightFuzzer {
+ public:
+ RenderFrightFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+ void process();
+
+ private:
+ FuzzedDataProvider mFdp;
+ void getLayerSetting(renderengine::LayerSettings& layerSetting, sp<GraphicBuffer> buffer,
+ const Rect& sourceCrop, uint32_t textureName);
+};
+
+void RenderFrightFuzzer::getLayerSetting(renderengine::LayerSettings& layerSetting,
+ sp<GraphicBuffer> buffer, const Rect& sourceCrop,
+ uint32_t textureName) {
+ layerSetting.geometry.boundaries = sourceCrop.toFloatRect();
+ layerSetting.geometry.roundedCornersRadius = mFdp.ConsumeFloatingPoint<float>();
+ layerSetting.geometry.roundedCornersCrop = sourceCrop.toFloatRect();
+
+ layerSetting.alpha = mFdp.ConsumeFloatingPoint<float>();
+ layerSetting.sourceDataspace = mFdp.PickValueInArray(kDataSpace);
+ layerSetting.backgroundBlurRadius = mFdp.ConsumeFloatingPoint<float>();
+ layerSetting.source.buffer.buffer = buffer;
+ layerSetting.source.buffer.isOpaque = mFdp.ConsumeBool();
+ layerSetting.source.buffer.fence = Fence::NO_FENCE;
+ layerSetting.source.buffer.textureName = textureName;
+ layerSetting.source.buffer.usePremultipliedAlpha = mFdp.ConsumeBool();
+ layerSetting.source.buffer.isY410BT2020 =
+ (layerSetting.sourceDataspace == ui::Dataspace::BT2020_ITU_PQ ||
+ layerSetting.sourceDataspace == ui::Dataspace::BT2020_ITU_HLG);
+ layerSetting.source.buffer.maxMasteringLuminance = mFdp.ConsumeFloatingPoint<float>();
+ layerSetting.source.buffer.maxContentLuminance = mFdp.ConsumeFloatingPoint<float>();
+
+ layerSetting.shadow.lightPos =
+ vec3(mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>(), 0);
+ layerSetting.shadow.ambientColor = {
+ mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>(),
+ mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>()};
+ layerSetting.shadow.spotColor = {
+ mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>(),
+ mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>()};
+ layerSetting.shadow.length = mFdp.ConsumeFloatingPoint<float>();
+ layerSetting.shadow.casterIsTranslucent = mFdp.ConsumeBool();
+}
+
+void RenderFrightFuzzer::process() {
+ auto args = RenderEngineCreationArgs::Builder()
+ .setPixelFormat(mFdp.ConsumeIntegralInRange<int32_t>(kMinPixelFormat,
+ kMaxPixelFormat))
+ .setImageCacheSize(mFdp.ConsumeIntegral<uint32_t>())
+ .setUseColorManagerment(mFdp.ConsumeBool())
+ .setEnableProtectedContext(mFdp.ConsumeBool())
+ .setPrecacheToneMapperShaderOnly(mFdp.ConsumeBool())
+ .setSupportsBackgroundBlur(mFdp.ConsumeBool())
+ .setContextPriority(mFdp.PickValueInArray(kSetContextPriority))
+ .setRenderEngineType(mFdp.PickValueInArray(kSetRenderEngineType))
+ .build();
+ std::unique_ptr<RenderEngine> renderEngine = RenderEngine::create(args);
+
+ std::vector<uint32_t> textures;
+ int32_t maxCount = mFdp.ConsumeIntegralInRange<size_t>(0, kMaxTextureCount);
+ for (size_t i = 0; i < maxCount; ++i) {
+ textures.push_back(mFdp.ConsumeIntegral<uint32_t>());
+ }
+
+ while (mFdp.remaining_bytes()) {
+ int32_t renderFrightAPIs =
+ mFdp.ConsumeIntegralInRange<int32_t>(kMinRenderAPI, kMaxRenderAPI);
+ switch (renderFrightAPIs) {
+ case 0: {
+ renderEngine->genTextures(textures.size(), textures.data());
+ break;
+ }
+ case 1: {
+ renderEngine->deleteTextures(textures.size(), textures.data());
+ break;
+ }
+ case 2: {
+ renderEngine->useProtectedContext(mFdp.ConsumeBool());
+ break;
+ }
+ case 3: {
+ renderEngine->cleanupPostRender(mFdp.PickValueInArray(kCleanupMode));
+ break;
+ }
+ case 4: {
+ renderEngine->unbindExternalTextureBuffer(mFdp.ConsumeIntegral<uint64_t>());
+ break;
+ }
+ case 5: {
+ renderEngine->primeCache();
+ break;
+ }
+ case 6: {
+ sp<Fence> fence = sp<Fence>::make();
+ sp<GraphicBuffer> buffer = sp<GraphicBuffer>::make();
+ renderEngine->bindExternalTextureBuffer(mFdp.ConsumeIntegral<uint32_t>(), buffer,
+ fence);
+ break;
+ }
+ case 7: {
+ sp<GraphicBuffer> buffer = sp<GraphicBuffer>::make();
+ renderEngine->cacheExternalTextureBuffer(buffer);
+ break;
+ }
+ case 8: {
+ std::vector<const renderengine::LayerSettings*> layers;
+ renderengine::LayerSettings layerSetting;
+ int32_t width = mFdp.ConsumeIntegralInRange<int32_t>(0, KMaxDisplayWidth);
+ int32_t height = mFdp.ConsumeIntegralInRange<int32_t>(0, KMaxDisplayHeight);
+ Rect sourceCrop(mFdp.ConsumeIntegralInRange<int32_t>(0, width),
+ mFdp.ConsumeIntegralInRange<int32_t>(0, height));
+ uint32_t textureName = 0;
+ /* Get a single texture name to pass to layers */
+ renderEngine->genTextures(1 /*numTextures*/, &textureName);
+ sp<GraphicBuffer> buffer;
+ const uint32_t usage = (mFdp.PickValueInArray(kReadBufferUsage) |
+ mFdp.PickValueInArray(kWriteBufferUsage) |
+ mFdp.PickValueInArray(kRenderBufferUsage));
+
+ for (int i = 0; i < kMaxRenderLayer; ++i) {
+ buffer = new GraphicBuffer(
+ width, height,
+ mFdp.ConsumeIntegralInRange<int32_t>(PIXEL_FORMAT_RGBA_8888,
+ PIXEL_FORMAT_RGBA_4444),
+ usage, "input");
+ getLayerSetting(layerSetting, buffer, sourceCrop, textureName);
+ layers.push_back(&layerSetting);
+ }
+
+ DisplaySettings settings;
+ settings.physicalDisplay = sourceCrop;
+ settings.clip = sourceCrop;
+ settings.outputDataspace = mFdp.PickValueInArray(kDataSpace);
+ settings.maxLuminance = mFdp.ConsumeFloatingPoint<float>();
+
+ sp<GraphicBuffer> dstBuffer =
+ new GraphicBuffer(width, height,
+ mFdp.ConsumeIntegralInRange<int32_t>(
+ PIXEL_FORMAT_RGBA_8888, PIXEL_FORMAT_RGBA_4444),
+ usage, "output");
+ base::unique_fd bufferFence;
+ base::unique_fd drawFence;
+
+ renderEngine->drawLayers(settings, layers, dstBuffer, mFdp.ConsumeBool(),
+ std::move(bufferFence),
+ (mFdp.ConsumeBool() ? nullptr : &drawFence));
+ }
+ }
+ }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ RenderFrightFuzzer renderFrightFuzzer(data, size);
+ renderFrightFuzzer.process();
+ return 0;
+}
diff --git a/media/libstagefright/rtsp/fuzzer/Android.bp b/media/libstagefright/rtsp/fuzzer/Android.bp
index a2791ba..ff64af5 100644
--- a/media/libstagefright/rtsp/fuzzer/Android.bp
+++ b/media/libstagefright/rtsp/fuzzer/Android.bp
@@ -29,11 +29,19 @@
header_libs: [
"libstagefright_rtsp_headers",
],
- fuzz_config:{
+ fuzz_config: {
cc: [
- "android-media-fuzzing-reports@google.com",
+ "android-media-playback@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "This fuzzer targets the APIs of libstagefright_rtsp",
+ vector: "local_privileges_required",
+ service_privilege: "privileged",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
@@ -44,7 +52,7 @@
],
defaults: [
"libstagefright_rtsp_fuzzer_defaults",
- ]
+ ],
}
cc_fuzz {
@@ -55,7 +63,7 @@
defaults: [
"libstagefright_rtsp_fuzzer_defaults",
],
- shared_libs:[
+ shared_libs: [
"libandroid_net",
"libbase",
"libstagefright",
diff --git a/media/libstagefright/tests/fuzzers/Android.bp b/media/libstagefright/tests/fuzzers/Android.bp
index 2bcfd67..43542c5 100644
--- a/media/libstagefright/tests/fuzzers/Android.bp
+++ b/media/libstagefright/tests/fuzzers/Android.bp
@@ -32,6 +32,15 @@
"liblog",
"media_permission-aidl-cpp",
],
+ fuzz_config: {
+ componentid: 42195,
+ hotlists: ["4593311"],
+ description: "The fuzzer targets the APIs of libstagefright",
+ vector: "local_no_privileges_required",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
+ },
}
cc_fuzz {
diff --git a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
index 4218d2d..3f850c2 100644
--- a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
@@ -24,61 +24,64 @@
namespace android {
-#define MAX_MEDIA_BUFFER_SIZE 2048
+static const android_pixel_format_t kColorFormats[] = {
+ HAL_PIXEL_FORMAT_RGBA_8888,
+ HAL_PIXEL_FORMAT_RGB_565,
+ HAL_PIXEL_FORMAT_BGRA_8888,
+ HAL_PIXEL_FORMAT_RGBA_1010102,
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, /* To cover the default case */
+};
-// Fuzzer entry point.
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
- // Init our wrapper
+static const MediaSource::ReadOptions::SeekMode kSeekModes[] = {
+ MediaSource::ReadOptions::SeekMode::SEEK_PREVIOUS_SYNC,
+ MediaSource::ReadOptions::SeekMode::SEEK_NEXT_SYNC,
+ MediaSource::ReadOptions::SeekMode::SEEK_CLOSEST_SYNC,
+ MediaSource::ReadOptions::SeekMode::SEEK_CLOSEST,
+ MediaSource::ReadOptions::SeekMode::SEEK_FRAME_INDEX,
+};
+
+static const std::string kComponentNames[] = {
+ "c2.android.avc.decoder", "c2.android.hevc.decoder", "c2.android.vp8.decoder",
+ "c2.android.vp9.decoder", "c2.android.av1.decoder", "c2.android.mpeg4.decoder",
+ "c2.android.h263.decoder",
+};
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
FuzzedDataProvider fdp(data, size);
+ std::string component = fdp.PickValueInArray(kComponentNames);
+ AString componentName(component.c_str());
+ sp<MetaData> trackMeta = generateMetaData(&fdp, component);
+ sp<IMediaSource> source = sp<IMediaSourceFuzzImpl>::make(&fdp, gMaxMediaBufferSize);
- std::string name = fdp.ConsumeRandomLengthString(fdp.remaining_bytes());
- AString componentName(name.c_str());
- sp<MetaData> trackMeta = generateMetaData(&fdp);
- sp<IMediaSource> source = new IMediaSourceFuzzImpl(&fdp, MAX_MEDIA_BUFFER_SIZE);
-
- // Image or video Decoder?
- sp<FrameDecoder> decoder;
- bool isVideoDecoder = fdp.ConsumeBool();
- if (isVideoDecoder) {
- decoder = new VideoFrameDecoder(componentName, trackMeta, source);
+ sp<FrameDecoder> decoder = nullptr;
+ if (fdp.ConsumeBool()) {
+ decoder = sp<MediaImageDecoder>::make(componentName, trackMeta, source);
} else {
- decoder = new MediaImageDecoder(componentName, trackMeta, source);
+ decoder = sp<VideoFrameDecoder>::make(componentName, trackMeta, source);
}
- while (fdp.remaining_bytes()) {
- uint8_t switchCase = fdp.ConsumeIntegralInRange<uint8_t>(0, 3);
- switch (switchCase) {
- case 0: {
- int64_t frameTimeUs = fdp.ConsumeIntegral<int64_t>();
- int option = fdp.ConsumeIntegral<int>();
- int colorFormat = fdp.ConsumeIntegral<int>();
- decoder->init(frameTimeUs, option, colorFormat);
- break;
- }
- case 1:
- decoder->extractFrame();
- break;
- case 2: {
- FrameRect rect;
- rect.left = fdp.ConsumeIntegral<int32_t>();
- rect.top = fdp.ConsumeIntegral<int32_t>();
- rect.right = fdp.ConsumeIntegral<int32_t>();
- rect.bottom = fdp.ConsumeIntegral<int32_t>();
- decoder->extractFrame(&rect);
- break;
- }
- case 3: {
- sp<MetaData> trackMeta = generateMetaData(&fdp);
- decoder->getMetadataOnly(trackMeta,
- /*colorFormat*/ fdp.ConsumeIntegral<int>(),
- /*thumbnail*/ fdp.ConsumeBool());
- break;
- }
- }
+ if (decoder.get() &&
+ decoder->init(fdp.ConsumeIntegral<uint64_t>() /* frameTimeUs */,
+ fdp.PickValueInArray(kSeekModes) /* option */,
+ fdp.PickValueInArray(kColorFormats) /* colorFormat */) == OK) {
+ auto frameDecoderAPI = fdp.PickValueInArray<const std::function<void()>>({
+ [&]() { decoder->extractFrame(); },
+ [&]() {
+ FrameRect rect(fdp.ConsumeIntegral<int32_t>() /* left */,
+ fdp.ConsumeIntegral<int32_t>() /* top */,
+ fdp.ConsumeIntegral<int32_t>() /* right */,
+ fdp.ConsumeIntegral<int32_t>() /* bottom */
+ );
+ decoder->extractFrame(&rect);
+ },
+ [&]() {
+ FrameDecoder::getMetadataOnly(
+ trackMeta, fdp.PickValueInArray(kColorFormats) /* colorFormat */,
+ fdp.ConsumeBool() /* thumbnail */);
+ },
+ });
+ frameDecoderAPI();
}
-
- generated_mime_types.clear();
-
return 0;
}
diff --git a/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h b/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h
index 228c04a..5430530 100644
--- a/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h
+++ b/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h
@@ -20,69 +20,100 @@
#include <media/stagefright/MetaData.h>
#include "MediaMimeTypes.h"
-#define MAX_METADATA_BUF_SIZE 512
-
namespace android {
std::vector<std::shared_ptr<char>> generated_mime_types;
+constexpr uint8_t kMinKeyHeight = 32;
+constexpr uint8_t kMinKeyWidth = 32;
+constexpr uint16_t kMaxKeyHeight = 2160;
+constexpr uint16_t kMaxKeyWidth = 3840;
+size_t gMaxMediaBufferSize = 0;
-sp<MetaData> generateMetaData(FuzzedDataProvider *fdp) {
- sp<MetaData> newMeta = new MetaData();
+sp<MetaData> generateMetaData(FuzzedDataProvider* fdp, std::string componentName = std::string()) {
+ sp<MetaData> newMeta = sp<MetaData>::make();
- // random MIME Type
- const char *mime_type;
- size_t index = fdp->ConsumeIntegralInRange<size_t>(0, kMimeTypes.size());
- // Let there be a chance of a true random string
- if (index == kMimeTypes.size()) {
- std::string mime_str = fdp->ConsumeRandomLengthString(64);
- std::shared_ptr<char> mime_cstr(new char[mime_str.length()+1]);
- generated_mime_types.push_back(mime_cstr);
- strncpy(mime_cstr.get(), mime_str.c_str(), mime_str.length()+1);
- mime_type = mime_cstr.get();
- } else {
- mime_type = kMimeTypes[index];
+ const char* mime;
+ if(!componentName.empty())
+ {
+ auto it = decoderToMediaType.find(componentName);
+ mime = it->second;
}
- newMeta->setCString(kKeyMIMEType, mime_type);
+ else{
+ size_t index = fdp->ConsumeIntegralInRange<size_t>(0, kMimeTypes.size());
+ // Let there be a chance of a true random string
+ if (index == kMimeTypes.size()) {
+ std::string mime_str = fdp->ConsumeRandomLengthString(64);
+ std::shared_ptr<char> mime_cstr(new char[mime_str.length()+1]);
+ generated_mime_types.push_back(mime_cstr);
+ strncpy(mime_cstr.get(), mime_str.c_str(), mime_str.length()+1);
+ mime = mime_cstr.get();
+ } else {
+ mime = kMimeTypes[index];
+ }
+ }
+ newMeta->setCString(kKeyMIMEType, mime);
- // Thumbnail time
- newMeta->setInt64(kKeyThumbnailTime, fdp->ConsumeIntegral<int64_t>());
+ auto height = fdp->ConsumeIntegralInRange<uint16_t>(kMinKeyHeight, kMaxKeyHeight);
+ auto width = fdp->ConsumeIntegralInRange<uint16_t>(kMinKeyWidth, kMaxKeyWidth);
+ newMeta->setInt32(kKeyHeight, height);
+ newMeta->setInt32(kKeyWidth, width);
- // Values used by allocVideoFrame
- newMeta->setInt32(kKeyRotation, fdp->ConsumeIntegral<int32_t>());
- size_t profile_size =
- fdp->ConsumeIntegralInRange<size_t>(0, MAX_METADATA_BUF_SIZE);
- std::vector<uint8_t> profile_bytes =
- fdp->ConsumeBytes<uint8_t>(profile_size);
- newMeta->setData(kKeyIccProfile,
- fdp->ConsumeIntegral<int32_t>(),
- profile_bytes.empty() ? nullptr : profile_bytes.data(),
- profile_bytes.size());
- newMeta->setInt32(kKeySARWidth, fdp->ConsumeIntegral<int32_t>());
- newMeta->setInt32(kKeySARHeight, fdp->ConsumeIntegral<int32_t>());
- newMeta->setInt32(kKeyDisplayWidth, fdp->ConsumeIntegral<int32_t>());
- newMeta->setInt32(kKeyDisplayHeight, fdp->ConsumeIntegral<int32_t>());
+ gMaxMediaBufferSize = height * width;
- // Values used by findThumbnailInfo
- newMeta->setInt32(kKeyThumbnailWidth, fdp->ConsumeIntegral<int32_t>());
- newMeta->setInt32(kKeyThumbnailHeight, fdp->ConsumeIntegral<int32_t>());
- size_t thumbnail_size =
- fdp->ConsumeIntegralInRange<size_t>(0, MAX_METADATA_BUF_SIZE);
- std::vector<uint8_t> thumb_bytes =
- fdp->ConsumeBytes<uint8_t>(thumbnail_size);
- newMeta->setData(kKeyThumbnailHVCC,
- fdp->ConsumeIntegral<int32_t>(),
- thumb_bytes.empty() ? nullptr : thumb_bytes.data(),
- thumb_bytes.size());
+ if (fdp->ConsumeBool()) {
+ newMeta->setInt32(kKeyTileHeight,
+ fdp->ConsumeIntegralInRange<uint16_t>(kMinKeyHeight, height));
+ newMeta->setInt32(kKeyTileWidth,
+ fdp->ConsumeIntegralInRange<uint16_t>(kMinKeyWidth, width));
+ newMeta->setInt32(kKeyGridRows, fdp->ConsumeIntegral<uint8_t>());
+ newMeta->setInt32(kKeyGridCols, fdp->ConsumeIntegral<uint8_t>());
+ }
- // Values used by findGridInfo
- newMeta->setInt32(kKeyTileWidth, fdp->ConsumeIntegral<int32_t>());
- newMeta->setInt32(kKeyTileHeight, fdp->ConsumeIntegral<int32_t>());
- newMeta->setInt32(kKeyGridRows, fdp->ConsumeIntegral<int32_t>());
- newMeta->setInt32(kKeyGridCols, fdp->ConsumeIntegral<int32_t>());
+ if (fdp->ConsumeBool()) {
+ newMeta->setInt32(kKeySARHeight, fdp->ConsumeIntegral<uint8_t>());
+ newMeta->setInt32(kKeySARWidth, fdp->ConsumeIntegral<uint8_t>());
+ }
- // A few functions perform a CHECK() that height/width are set
- newMeta->setInt32(kKeyHeight, fdp->ConsumeIntegral<int32_t>());
- newMeta->setInt32(kKeyWidth, fdp->ConsumeIntegral<int32_t>());
+ if (fdp->ConsumeBool()) {
+ newMeta->setInt32(kKeyDisplayHeight,
+ fdp->ConsumeIntegralInRange<uint16_t>(height, UINT16_MAX));
+ newMeta->setInt32(kKeyDisplayWidth,
+ fdp->ConsumeIntegralInRange<uint16_t>(width, UINT16_MAX));
+ }
+
+ if (fdp->ConsumeBool()) {
+ newMeta->setRect(kKeyCropRect, fdp->ConsumeIntegral<int32_t>() /* left */,
+ fdp->ConsumeIntegral<int32_t>() /* top */,
+ fdp->ConsumeIntegral<int32_t>() /* right */,
+ fdp->ConsumeIntegral<int32_t>() /* bottom */);
+ }
+
+ if (fdp->ConsumeBool()) {
+ newMeta->setInt32(kKeyRotation, fdp->ConsumeIntegralInRange<uint8_t>(0, 3) * 90);
+ }
+
+ if (fdp->ConsumeBool()) {
+ newMeta->setInt64(kKeyThumbnailTime, fdp->ConsumeIntegral<uint64_t>());
+ newMeta->setInt32(kKeyThumbnailHeight, fdp->ConsumeIntegral<uint8_t>());
+ newMeta->setInt32(kKeyThumbnailWidth, fdp->ConsumeIntegral<uint8_t>());
+
+ size_t thumbnailSize = fdp->ConsumeIntegral<size_t>();
+ std::vector<uint8_t> thumbnailData = fdp->ConsumeBytes<uint8_t>(thumbnailSize);
+ if (mime == MEDIA_MIMETYPE_VIDEO_AV1) {
+ newMeta->setData(kKeyThumbnailAV1C, fdp->ConsumeIntegral<int32_t>() /* type */,
+ thumbnailData.data(), thumbnailData.size());
+ } else {
+ newMeta->setData(kKeyThumbnailHVCC, fdp->ConsumeIntegral<int32_t>() /* type */,
+ thumbnailData.data(), thumbnailData.size());
+ }
+ }
+
+ if (fdp->ConsumeBool()) {
+ size_t profileSize = fdp->ConsumeIntegral<size_t>();
+ std::vector<uint8_t> profileData = fdp->ConsumeBytes<uint8_t>(profileSize);
+ newMeta->setData(kKeyIccProfile, fdp->ConsumeIntegral<int32_t>() /* type */,
+ profileData.data(), profileData.size());
+ }
return newMeta;
}
diff --git a/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h b/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h
index e769950..7e6f662 100644
--- a/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h
+++ b/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h
@@ -19,31 +19,33 @@
#include <media/stagefright/MediaSource.h>
+#define MAX_FRAMES 5
+
namespace android {
class IMediaSourceFuzzImpl : public IMediaSource {
public:
- IMediaSourceFuzzImpl(FuzzedDataProvider *_fdp, size_t _max_buffer_size) :
- fdp(_fdp),
- max_buffer_size(_max_buffer_size) {}
- status_t start(MetaData*) override { return 0; }
- status_t stop() override { return 0; }
- sp<MetaData> getFormat() override { return nullptr; }
- status_t read(MediaBufferBase**,
- const MediaSource::ReadOptions*) override;
- status_t readMultiple(Vector<MediaBufferBase*>*, uint32_t,
- const MediaSource::ReadOptions*) override;
- bool supportReadMultiple() override { return true; }
- bool supportNonblockingRead() override { return true; }
- status_t pause() override { return 0; }
+ IMediaSourceFuzzImpl(FuzzedDataProvider* _fdp, size_t _max_buffer_size)
+ : frames_read(0), fdp(_fdp), min_buffer_size(32 * 32), max_buffer_size(_max_buffer_size) {}
+ status_t start(MetaData*) override { return 0; }
+ status_t stop() override { return 0; }
+ sp<MetaData> getFormat() override { return nullptr; }
+ status_t read(MediaBufferBase**, const MediaSource::ReadOptions*) override;
+ status_t readMultiple(Vector<MediaBufferBase*>*, uint32_t,
+ const MediaSource::ReadOptions*) override;
+ bool supportReadMultiple() override { return true; }
+ bool supportNonblockingRead() override { return true; }
+ status_t pause() override { return 0; }
protected:
IBinder* onAsBinder() { return nullptr; }
private:
- FuzzedDataProvider *fdp;
- std::vector<std::shared_ptr<MediaBufferBase>> buffer_bases;
- const size_t max_buffer_size;
+ uint8_t frames_read;
+ FuzzedDataProvider* fdp;
+ const size_t min_buffer_size;
+ const size_t max_buffer_size;
+ std::vector<uint8_t> buf;
};
// This class is simply to expose the destructor
@@ -53,32 +55,41 @@
~MediaBufferFuzzImpl() {}
};
-status_t IMediaSourceFuzzImpl::read(MediaBufferBase **buffer,
- const MediaSource::ReadOptions *options) {
+status_t IMediaSourceFuzzImpl::read(MediaBufferBase** buffer, const MediaSource::ReadOptions*) {
Vector<MediaBufferBase*> buffers;
- status_t ret = readMultiple(&buffers, 1, options);
+ status_t ret = readMultiple(&buffers, 1, nullptr);
*buffer = buffers.empty() ? nullptr : buffers[0];
return ret;
}
-status_t IMediaSourceFuzzImpl::readMultiple(Vector<MediaBufferBase*>* buffers,
- uint32_t maxNumBuffers, const MediaSource::ReadOptions*) {
- uint32_t num_buffers =
- fdp->ConsumeIntegralInRange<uint32_t>(0, maxNumBuffers);
- for(uint32_t i = 0; i < num_buffers; i++) {
- std::vector<uint8_t> buf = fdp->ConsumeBytes<uint8_t>(
- fdp->ConsumeIntegralInRange<size_t>(0, max_buffer_size));
+status_t IMediaSourceFuzzImpl::readMultiple(Vector<MediaBufferBase*>* buffers, uint32_t,
+ const MediaSource::ReadOptions*) {
+ if (++frames_read == MAX_FRAMES) {
+ auto size = fdp->ConsumeIntegralInRange<size_t>(min_buffer_size, max_buffer_size);
+ buf = fdp->ConsumeBytes<uint8_t>(size);
+ if (buf.size() < size) {
+ buf.resize(size, 0);
+ }
- std::shared_ptr<MediaBufferBase> mbb(
- new MediaBufferFuzzImpl(buf.data(), buf.size()));
+ MediaBufferBase* mbb = new MediaBufferFuzzImpl(buf.data(), buf.size());
+ mbb->meta_data().setInt64(kKeyTime, fdp->ConsumeIntegral<uint64_t>());
+ buffers->push_back(mbb);
- buffer_bases.push_back(mbb);
- buffers->push_back(mbb.get());
+ return ERROR_END_OF_STREAM;
}
- // STATUS_OK
- return 0;
+ auto size = fdp->ConsumeIntegralInRange<size_t>(min_buffer_size, max_buffer_size);
+ buf = fdp->ConsumeBytes<uint8_t>(size);
+ if (buf.size() < size) {
+ buf.resize(size, 0);
+ }
+
+ MediaBufferBase* mbb = new MediaBufferFuzzImpl(buf.data(), buf.size());
+ mbb->meta_data().setInt64(kKeyTime, fdp->ConsumeIntegral<uint64_t>());
+ buffers->push_back(mbb);
+
+ return OK;
}
} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMimeTypes.h b/media/libstagefright/tests/fuzzers/MediaMimeTypes.h
index 9f337ac..de7814e 100644
--- a/media/libstagefright/tests/fuzzers/MediaMimeTypes.h
+++ b/media/libstagefright/tests/fuzzers/MediaMimeTypes.h
@@ -18,6 +18,7 @@
#define FUZZER_MEDIAMIMETYPES_H_
#include <media/stagefright/foundation/MediaDefs.h>
+#include <unordered_map>
namespace android {
@@ -80,6 +81,15 @@
MEDIA_MIMETYPE_DATA_TIMED_ID3
};
+static const std::unordered_map<std::string, const char*> decoderToMediaType = {
+ {"c2.android.vp8.decoder", MEDIA_MIMETYPE_VIDEO_VP8},
+ {"c2.android.vp9.decoder", MEDIA_MIMETYPE_VIDEO_VP9},
+ {"c2.android.av1.decoder", MEDIA_MIMETYPE_VIDEO_AV1},
+ {"c2.android.avc.decoder", MEDIA_MIMETYPE_VIDEO_AVC},
+ {"c2.android.hevc.decoder", MEDIA_MIMETYPE_VIDEO_HEVC},
+ {"c2.android.mpeg4.decoder", MEDIA_MIMETYPE_VIDEO_MPEG4},
+ {"c2.android.h263.decoder", MEDIA_MIMETYPE_VIDEO_H263}};
+
} // namespace android
#endif // FUZZER_MEDIAMIMETYPES_H_
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
index 70d73c8..5ac2a54 100644
--- a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
@@ -13,94 +13,221 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-// Authors: corbin.souffrant@leviathansecurity.com
-// dylan.katz@leviathansecurity.com
-#include <MediaMuxerFuzzer.h>
-#include <cutils/ashmem.h>
#include <fuzzer/FuzzedDataProvider.h>
#include <media/stagefright/MediaMuxer.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/MediaDefs.h>
namespace android {
+const uint8_t kMinSize = 0;
+const uint8_t kMinTrackCount = 0;
-// Can't seem to get setBuffer or setString working. It always segfaults on a
-// null pointer read or memleaks. So that functionality is missing.
-void createMessage(AMessage *msg, FuzzedDataProvider *fdp) {
- size_t count = fdp->ConsumeIntegralInRange<size_t>(0, 32);
- while (fdp->remaining_bytes() > 0 && count > 0) {
- uint8_t function_id =
- fdp->ConsumeIntegralInRange<uint8_t>(0, amessage_setvals.size() - 1);
- amessage_setvals[function_id](msg, fdp);
- count--;
- }
+enum kBufferFlags { BUFFER_FLAG_SYNCFRAME = 1, BUFFER_FLAG_CODECCONFIG = 2, BUFFER_FLAG_EOS = 4 };
+
+constexpr char kMuxerFile[] = "MediaMuxer";
+
+const std::string kAudioMimeTypes[] = {
+ MEDIA_MIMETYPE_AUDIO_AMR_NB,
+ MEDIA_MIMETYPE_AUDIO_AMR_WB,
+ MEDIA_MIMETYPE_AUDIO_MPEG,
+ MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
+ MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
+ MEDIA_MIMETYPE_AUDIO_MIDI,
+ MEDIA_MIMETYPE_AUDIO_AAC,
+ MEDIA_MIMETYPE_AUDIO_QCELP,
+ MEDIA_MIMETYPE_AUDIO_VORBIS,
+ MEDIA_MIMETYPE_AUDIO_OPUS,
+ MEDIA_MIMETYPE_AUDIO_G711_ALAW,
+ MEDIA_MIMETYPE_AUDIO_G711_MLAW,
+ MEDIA_MIMETYPE_AUDIO_RAW,
+ MEDIA_MIMETYPE_AUDIO_FLAC,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS,
+ MEDIA_MIMETYPE_AUDIO_MSGSM,
+ MEDIA_MIMETYPE_AUDIO_AC3,
+ MEDIA_MIMETYPE_AUDIO_EAC3,
+ MEDIA_MIMETYPE_AUDIO_EAC3_JOC,
+ MEDIA_MIMETYPE_AUDIO_AC4,
+ MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1,
+ MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1,
+ MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L3,
+ MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L4,
+ MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L3,
+ MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L4,
+ MEDIA_MIMETYPE_AUDIO_SCRAMBLED,
+ MEDIA_MIMETYPE_AUDIO_ALAC,
+ MEDIA_MIMETYPE_AUDIO_WMA,
+ MEDIA_MIMETYPE_AUDIO_MS_ADPCM,
+ MEDIA_MIMETYPE_AUDIO_DVI_IMA_ADPCM,
+ MEDIA_MIMETYPE_AUDIO_DTS,
+ MEDIA_MIMETYPE_AUDIO_DTS_HD,
+ MEDIA_MIMETYPE_AUDIO_DTS_HD_MA,
+ MEDIA_MIMETYPE_AUDIO_DTS_UHD,
+ MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1,
+ MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2,
+ MEDIA_MIMETYPE_AUDIO_EVRC,
+ MEDIA_MIMETYPE_AUDIO_EVRCB,
+ MEDIA_MIMETYPE_AUDIO_EVRCWB,
+ MEDIA_MIMETYPE_AUDIO_EVRCNW,
+ MEDIA_MIMETYPE_AUDIO_AMR_WB_PLUS,
+ MEDIA_MIMETYPE_AUDIO_APTX,
+ MEDIA_MIMETYPE_AUDIO_DRA,
+ MEDIA_MIMETYPE_AUDIO_DOLBY_MAT,
+ MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_1_0,
+ MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_0,
+ MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_1,
+ MEDIA_MIMETYPE_AUDIO_DOLBY_TRUEHD,
+ MEDIA_MIMETYPE_AUDIO_AAC_MP4,
+ MEDIA_MIMETYPE_AUDIO_AAC_MAIN,
+ MEDIA_MIMETYPE_AUDIO_AAC_LC,
+ MEDIA_MIMETYPE_AUDIO_AAC_SSR,
+ MEDIA_MIMETYPE_AUDIO_AAC_LTP,
+ MEDIA_MIMETYPE_AUDIO_AAC_HE_V1,
+ MEDIA_MIMETYPE_AUDIO_AAC_SCALABLE,
+ MEDIA_MIMETYPE_AUDIO_AAC_ERLC,
+ MEDIA_MIMETYPE_AUDIO_AAC_LD,
+ MEDIA_MIMETYPE_AUDIO_AAC_HE_V2,
+ MEDIA_MIMETYPE_AUDIO_AAC_ELD,
+ MEDIA_MIMETYPE_AUDIO_AAC_XHE,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADIF,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_MAIN,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LC,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SSR,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LTP,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V1,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SCALABLE,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ERLC,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LD,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V2,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ELD,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_XHE,
+ MEDIA_MIMETYPE_AUDIO_AAC_LATM_LC,
+ MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V1,
+ MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V2,
+ MEDIA_MIMETYPE_AUDIO_IEC61937,
+ MEDIA_MIMETYPE_AUDIO_IEC60958,
+};
+
+const std::string kVideoMimeTypes[] = {
+ MEDIA_MIMETYPE_VIDEO_VP8, MEDIA_MIMETYPE_VIDEO_VP9,
+ MEDIA_MIMETYPE_VIDEO_AV1, MEDIA_MIMETYPE_VIDEO_AVC,
+ MEDIA_MIMETYPE_VIDEO_HEVC, MEDIA_MIMETYPE_VIDEO_MPEG4,
+ MEDIA_MIMETYPE_VIDEO_H263, MEDIA_MIMETYPE_VIDEO_MPEG2,
+ MEDIA_MIMETYPE_VIDEO_RAW, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
+ MEDIA_MIMETYPE_VIDEO_SCRAMBLED, MEDIA_MIMETYPE_VIDEO_DIVX,
+ MEDIA_MIMETYPE_VIDEO_DIVX3, MEDIA_MIMETYPE_VIDEO_XVID,
+ MEDIA_MIMETYPE_VIDEO_MJPEG,
+};
+
+void getSampleAudioFormat(FuzzedDataProvider& fdp, AMessage* format) {
+ std::string mimeType = fdp.PickValueInArray(kAudioMimeTypes);
+ format->setString("mime", mimeType.c_str(), mimeType.length());
+ format->setInt32("sample-rate", fdp.ConsumeIntegral<int32_t>());
+ format->setInt32("channel-count", fdp.ConsumeIntegral<int32_t>());
+}
+
+void getSampleVideoFormat(FuzzedDataProvider& fdp, AMessage* format) {
+ std::string mimeType = fdp.PickValueInArray(kVideoMimeTypes);
+ format->setString("mime", mimeType.c_str(), mimeType.length());
+ format->setInt32("height", fdp.ConsumeIntegral<int32_t>());
+ format->setInt32("width", fdp.ConsumeIntegral<int32_t>());
+ format->setInt32("time-lapse-fps", fdp.ConsumeIntegral<int32_t>());
}
extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
- FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+ FuzzedDataProvider fdp(data, size);
- size_t data_size = fdp.ConsumeIntegralInRange<size_t>(0, size);
- int fd = ashmem_create_region("mediamuxer_fuzz_region", data_size);
- if (fd < 0)
+ // memfd_create() creates an anonymous file and returns a file
+ // descriptor that refers to it. MFD_ALLOW_SEALING allows sealing
+ // operations on this file.
+ int32_t fd = memfd_create(kMuxerFile, MFD_ALLOW_SEALING);
+ if (fd == -1) {
+ ALOGE("memfd_create failed: %s", strerror(errno));
+ return 0;
+ }
+
+ auto outputFormat = (MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(
+ MediaMuxer::OutputFormat::OUTPUT_FORMAT_MPEG_4,
+ MediaMuxer::OutputFormat::OUTPUT_FORMAT_LIST_END);
+
+ sp<MediaMuxer> mMuxer = MediaMuxer::create(fd, outputFormat);
+ if (mMuxer == nullptr) {
+ close(fd);
+ return 0;
+ }
+
+ // Used to consume a maximum of 80% of the data to send buffer data to writeSampleData().
+ // This ensures that we don't completely exhaust data and use the rest 20% for fuzzing
+ // of APIs.
+ const size_t kMaxSize = (size * 80) / 100;
+ while (fdp.remaining_bytes()) {
+ auto invokeMediaMuxerAPI = fdp.PickValueInArray<const std::function<void()>>({
+ [&]() {
+ // Using 'return' here due to a timeout bug present in OGGWriter.cpp
+ // (b/310316183).
+ if (outputFormat == MediaMuxer::OutputFormat::OUTPUT_FORMAT_OGG) {
+ return;
+ }
+
+ sp<AMessage> format = sp<AMessage>::make();
+ fdp.ConsumeBool() ? getSampleAudioFormat(fdp, format.get())
+ : getSampleVideoFormat(fdp, format.get());
+
+ mMuxer->addTrack(fdp.ConsumeBool() ? format : nullptr);
+ },
+ [&]() {
+ mMuxer->setLocation(fdp.ConsumeIntegral<int32_t>() /* latitude */,
+ fdp.ConsumeIntegral<int32_t>() /* longitude */);
+ },
+ [&]() { mMuxer->setOrientationHint(fdp.ConsumeIntegral<int32_t>() /* degrees */); },
+ [&]() { mMuxer->start(); },
+ [&]() {
+ std::vector<uint8_t> sample = fdp.ConsumeBytes<uint8_t>(
+ fdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize));
+ sp<ABuffer> buffer = sp<ABuffer>::make(sample.data(), sample.size());
+
+ size_t offset = fdp.ConsumeIntegralInRange<size_t>(kMinSize, sample.size());
+ size_t length =
+ fdp.ConsumeIntegralInRange<size_t>(kMinSize, buffer->size() - offset);
+ buffer->setRange(offset, length);
+
+ sp<AMessage> meta = buffer->meta();
+ meta->setInt64("sample-file-offset", fdp.ConsumeIntegral<int64_t>());
+ meta->setInt64("last-sample-index-in-chunk", fdp.ConsumeIntegral<int64_t>());
+
+ uint32_t flags = 0;
+ if (fdp.ConsumeBool()) {
+ flags |= kBufferFlags::BUFFER_FLAG_SYNCFRAME;
+ }
+ if (fdp.ConsumeBool()) {
+ flags |= kBufferFlags::BUFFER_FLAG_CODECCONFIG;
+ }
+ if (fdp.ConsumeBool()) {
+ flags |= kBufferFlags::BUFFER_FLAG_EOS;
+ }
+
+ size_t trackIndex = fdp.ConsumeBool()
+ ? fdp.ConsumeIntegralInRange<size_t>(
+ kMinTrackCount, mMuxer->getTrackCount())
+ : fdp.ConsumeIntegral<size_t>();
+ int64_t timeUs = fdp.ConsumeIntegral<int64_t>();
+ mMuxer->writeSampleData(fdp.ConsumeBool() ? buffer : nullptr, trackIndex,
+ timeUs, flags);
+ },
+ [&]() {
+ mMuxer->getTrackFormat(
+ fdp.ConsumeBool() ? fdp.ConsumeIntegralInRange<size_t>(
+ kMinTrackCount, mMuxer->getTrackCount())
+ : fdp.ConsumeIntegral<size_t>() /* idx */);
+ },
+ [&]() { mMuxer->stop(); },
+ });
+
+ invokeMediaMuxerAPI();
+ }
+
+ close(fd);
return 0;
-
- uint8_t *sh_data = static_cast<uint8_t *>(
- mmap(NULL, data_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0));
- if (sh_data == MAP_FAILED)
- return 0;
-
- MediaMuxer::OutputFormat format =
- (MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(0, 4);
- sp<MediaMuxer> mMuxer = MediaMuxer::create(fd, format);
- if (mMuxer == nullptr) {
- return 0;
- }
-
- while (fdp.remaining_bytes() > 1) {
- switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 4)) {
- case 0: {
- // For some reason it only likes mp4s here...
- if (format == 1 || format == 4)
- break;
-
- sp<AMessage> a_format(new AMessage);
- createMessage(a_format.get(), &fdp);
- mMuxer->addTrack(a_format);
- break;
- }
- case 1: {
- mMuxer->start();
- break;
- }
- case 2: {
- int degrees = fdp.ConsumeIntegral<int>();
- mMuxer->setOrientationHint(degrees);
- break;
- }
- case 3: {
- int latitude = fdp.ConsumeIntegral<int>();
- int longitude = fdp.ConsumeIntegral<int>();
- mMuxer->setLocation(latitude, longitude);
- break;
- }
- case 4: {
- size_t buf_size = fdp.ConsumeIntegralInRange<size_t>(0, data_size);
- sp<ABuffer> a_buffer(new ABuffer(buf_size));
-
- size_t trackIndex = fdp.ConsumeIntegral<size_t>();
- int64_t timeUs = fdp.ConsumeIntegral<int64_t>();
- uint32_t flags = fdp.ConsumeIntegral<uint32_t>();
- mMuxer->writeSampleData(a_buffer, trackIndex, timeUs, flags);
- }
- }
- }
-
- if (fdp.ConsumeBool())
- mMuxer->stop();
-
- munmap(sh_data, data_size);
- close(fd);
- return 0;
}
} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
deleted file mode 100644
index 7d4421d..0000000
--- a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-// Authors: corbin.souffrant@leviathansecurity.com
-// dylan.katz@leviathansecurity.com
-
-#pragma once
-
-#include <fuzzer/FuzzedDataProvider.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-namespace android {
-
-// Mappings vectors are the list of attributes that the MediaMuxer
-// class looks for in the message.
-static std::vector<const char *> floatMappings{
- "capture-rate",
- "time-lapse-fps",
- "frame-rate",
-};
-
-static std::vector<const char *> int64Mappings{
- "exif-offset", "exif-size", "target-time",
- "thumbnail-time", "timeUs", "durationUs",
-};
-
-static std::vector<const char *> int32Mappings{"loop",
- "time-scale",
- "crypto-mode",
- "crypto-default-iv-size",
- "crypto-encrypted-byte-block",
- "crypto-skip-byte-block",
- "frame-count",
- "max-bitrate",
- "pcm-big-endian",
- "temporal-layer-count",
- "temporal-layer-id",
- "thumbnail-width",
- "thumbnail-height",
- "track-id",
- "valid-samples",
- "color-format",
- "ca-system-id",
- "is-sync-frame",
- "bitrate",
- "max-bitrate",
- "width",
- "height",
- "sar-width",
- "sar-height",
- "display-width",
- "display-height",
- "is-default",
- "tile-width",
- "tile-height",
- "grid-rows",
- "grid-cols",
- "rotation-degrees",
- "channel-count",
- "sample-rate",
- "bits-per-sample",
- "channel-mask",
- "encoder-delay",
- "encoder-padding",
- "is-adts",
- "frame-rate",
- "max-height",
- "max-width",
- "max-input-size",
- "haptic-channel-count",
- "pcm-encoding",
- "aac-profile"};
-
-static const std::vector<std::function<void(AMessage *, FuzzedDataProvider *)>>
- amessage_setvals = {
- [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
- msg->setRect("crop", fdp->ConsumeIntegral<int32_t>(),
- fdp->ConsumeIntegral<int32_t>(),
- fdp->ConsumeIntegral<int32_t>(),
- fdp->ConsumeIntegral<int32_t>());
- },
- [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
- msg->setFloat(floatMappings[fdp->ConsumeIntegralInRange<size_t>(
- 0, floatMappings.size() - 1)],
- fdp->ConsumeFloatingPoint<float>());
- },
- [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
- msg->setInt64(int64Mappings[fdp->ConsumeIntegralInRange<size_t>(
- 0, int64Mappings.size() - 1)],
- fdp->ConsumeIntegral<int64_t>());
- },
- [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
- msg->setInt32(int32Mappings[fdp->ConsumeIntegralInRange<size_t>(
- 0, int32Mappings.size() - 1)],
- fdp->ConsumeIntegral<int32_t>());
- }};
-} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/corpus/0ef67b8a074fed50b8875df345ab2e62175c34c9 b/media/libstagefright/tests/fuzzers/corpus/0ef67b8a074fed50b8875df345ab2e62175c34c9
new file mode 100644
index 0000000..652581f
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/0ef67b8a074fed50b8875df345ab2e62175c34c9
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/60eb43c963545c0b2676dad3e4c38cfe87136bbc b/media/libstagefright/tests/fuzzers/corpus/60eb43c963545c0b2676dad3e4c38cfe87136bbc
new file mode 100644
index 0000000..60ca169
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/60eb43c963545c0b2676dad3e4c38cfe87136bbc
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/8c7cb9439f81a8e00b651b3658fe24116f37df7e b/media/libstagefright/tests/fuzzers/corpus/8c7cb9439f81a8e00b651b3658fe24116f37df7e
new file mode 100644
index 0000000..c03bcad
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/8c7cb9439f81a8e00b651b3658fe24116f37df7e
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/c624e73c16c59dfbc3c563416cfc962e3c3a96a0 b/media/libstagefright/tests/fuzzers/corpus/c624e73c16c59dfbc3c563416cfc962e3c3a96a0
new file mode 100644
index 0000000..52f2d5a
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/c624e73c16c59dfbc3c563416cfc962e3c3a96a0
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/c6aff0d7ccaf58a1964a6bcc51777bf1786503ca b/media/libstagefright/tests/fuzzers/corpus/c6aff0d7ccaf58a1964a6bcc51777bf1786503ca
new file mode 100644
index 0000000..83c522f
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/c6aff0d7ccaf58a1964a6bcc51777bf1786503ca
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/ec6bd6069f74a2f6e92442f88efb29288ad6f456 b/media/libstagefright/tests/fuzzers/corpus/ec6bd6069f74a2f6e92442f88efb29288ad6f456
new file mode 100644
index 0000000..62d259b
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/ec6bd6069f74a2f6e92442f88efb29288ad6f456
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/fbf47d9a9173df0a39285c94d89fcbc767d5e774 b/media/libstagefright/tests/fuzzers/corpus/fbf47d9a9173df0a39285c94d89fcbc767d5e774
new file mode 100644
index 0000000..db78b75
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/fbf47d9a9173df0a39285c94d89fcbc767d5e774
Binary files differ
diff --git a/media/libstagefright/timedtext/test/fuzzer/Android.bp b/media/libstagefright/timedtext/test/fuzzer/Android.bp
index 6590ebb..8724d51 100644
--- a/media/libstagefright/timedtext/test/fuzzer/Android.bp
+++ b/media/libstagefright/timedtext/test/fuzzer/Android.bp
@@ -48,8 +48,16 @@
],
fuzz_config: {
cc: [
- "android-media-fuzzing-reports@google.com",
+ "android-media-playback@google.com",
],
- componentid: 155276,
+ componentid: 42195,
+ hotlists: [
+ "4593311",
+ ],
+ description: "This fuzzer targets the APIs of libstagefright_timedtext",
+ vector: "local_no_privileges_required",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/libstagefright/webm/Android.bp b/media/libstagefright/webm/Android.bp
index 6ed3e0e..723131d 100644
--- a/media/libstagefright/webm/Android.bp
+++ b/media/libstagefright/webm/Android.bp
@@ -10,8 +10,6 @@
cc_library_static {
name: "libstagefright_webm",
- cppflags: ["-D__STDINT_LIMITS"],
-
cflags: [
"-Werror",
"-Wall",
diff --git a/media/module/aidlpersistentsurface/AidlGraphicBufferSource.cpp b/media/module/aidlpersistentsurface/AidlGraphicBufferSource.cpp
new file mode 100644
index 0000000..c208666
--- /dev/null
+++ b/media/module/aidlpersistentsurface/AidlGraphicBufferSource.cpp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+
+#define LOG_TAG "AidlGraphicBufferSource"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <media/stagefright/bqhelper/ComponentWrapper.h>
+#include <media/stagefright/bqhelper/GraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/C2NodeDef.h>
+
+namespace android::media {
+
+namespace {
+
+class AidlComponentWrapper : public ComponentWrapper {
+public:
+ explicit AidlComponentWrapper(const sp<IAidlNodeWrapper> &node)
+ : mAidlNode(node) {}
+ virtual ~AidlComponentWrapper() = default;
+
+ status_t submitBuffer(
+ int32_t bufferId, const sp<GraphicBuffer> &buffer,
+ int64_t timestamp, int fenceFd) override {
+ return mAidlNode->submitBuffer(
+ bufferId, BUFFERFLAG_ENDOFFRAME, buffer, timestamp, fenceFd);
+ }
+
+ status_t submitEos(int32_t bufferId) override {
+ return mAidlNode->submitBuffer(
+ bufferId, BUFFERFLAG_ENDOFFRAME | BUFFERFLAG_EOS);
+ }
+
+ void dispatchDataSpaceChanged(
+ int32_t dataSpace, int32_t aspects, int32_t pixelFormat) override {
+ mAidlNode->dispatchDataSpaceChanged(dataSpace, aspects, pixelFormat);
+ }
+
+private:
+ sp<IAidlNodeWrapper> mAidlNode;
+
+ DISALLOW_EVIL_CONSTRUCTORS(AidlComponentWrapper);
+};
+
+} // namespace
+
+::ndk::ScopedAStatus AidlGraphicBufferSource::onStart() {
+ status_t err = start();
+ return (OK == err) ? ::ndk::ScopedAStatus::ok() :
+ ::ndk::ScopedAStatus::fromServiceSpecificError(err);
+}
+
+::ndk::ScopedAStatus AidlGraphicBufferSource::onStop() {
+ status_t err = stop();
+ return (OK == err) ? ::ndk::ScopedAStatus::ok() :
+ ::ndk::ScopedAStatus::fromServiceSpecificError(err);
+}
+
+::ndk::ScopedAStatus AidlGraphicBufferSource::onRelease(){
+ status_t err = release();
+ return (OK == err) ? ::ndk::ScopedAStatus::ok() :
+ ::ndk::ScopedAStatus::fromServiceSpecificError(err);
+}
+
+status_t AidlGraphicBufferSource::configure(
+ const sp<IAidlNodeWrapper>& aidlNode,
+ int32_t dataSpace,
+ int32_t bufferCount,
+ uint32_t frameWidth,
+ uint32_t frameHeight,
+ uint64_t consumerUsage) {
+ if (aidlNode == NULL) {
+ return BAD_VALUE;
+ }
+
+ return GraphicBufferSource::configure(
+ new AidlComponentWrapper(aidlNode), dataSpace, bufferCount,
+ frameWidth, frameHeight, consumerUsage);
+}
+
+} // namespace android::media
diff --git a/media/module/aidlpersistentsurface/Android.bp b/media/module/aidlpersistentsurface/Android.bp
new file mode 100644
index 0000000..5c1a010
--- /dev/null
+++ b/media/module/aidlpersistentsurface/Android.bp
@@ -0,0 +1,69 @@
+aidl_interface {
+ name: "graphicbuffersource-aidl",
+ unstable: true,
+ local_include_dir: "aidl",
+ min_sdk_version: "29",
+ srcs: [
+ "aidl/android/media/AidlColorAspects.aidl",
+ "aidl/android/media/IAidlGraphicBufferSource.aidl",
+ "aidl/android/media/IAidlBufferSource.aidl",
+ "aidl/android/media/IAidlNode.aidl",
+ ],
+ headers: [
+ "HardwareBuffer_aidl",
+ ],
+ imports: [
+ "android.hardware.graphics.common-V5",
+ ],
+ include_dirs: [
+ "frameworks/native/aidl/gui",
+ ],
+ backend: {
+ cpp: {
+ enabled: false,
+ },
+ java: {
+ enabled: false,
+ },
+ ndk: {
+ enabled: true,
+ additional_shared_libraries: [
+ "libnativewindow",
+ ],
+ },
+ rust: {
+ // No users, and no rust implementation of android.os.Surface yet
+ enabled: false,
+ },
+ },
+}
+
+cc_library_shared {
+ name: "libstagefright_graphicbuffersource_aidl",
+ min_sdk_version: "29",
+ srcs: [
+ "AidlGraphicBufferSource.cpp",
+ "wrapper/WAidlGraphicBufferSource.cpp",
+ ],
+ export_include_dirs: [
+ "include",
+ ],
+ header_libs: [
+ "media_plugin_headers",
+ ],
+
+ export_header_lib_headers: [
+ "media_plugin_headers",
+ ],
+ shared_libs: [
+ "android.hardware.graphics.common-V5-ndk",
+ "graphicbuffersource-aidl-ndk",
+ "libbinder_ndk",
+ "libcutils",
+ "libgui",
+ "liblog",
+ "libnativewindow",
+ "libstagefright_bufferqueue_helper",
+ "libutils",
+ ],
+}
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/AidlColorAspects.aidl b/media/module/aidlpersistentsurface/aidl/android/media/AidlColorAspects.aidl
new file mode 100644
index 0000000..4edd6ce
--- /dev/null
+++ b/media/module/aidlpersistentsurface/aidl/android/media/AidlColorAspects.aidl
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Ref: frameworks/native/include/media/hardware/VideoAPI.h
+ *
+ * Framework defined color aspects. These are based mainly on ISO 23001-8 spec. As this standard
+ * continues to evolve, new values may be defined in the future. Use OTHER for these future values
+ * as well as for values not listed here, as those are not supported by the framework.
+ */
+parcelable AidlColorAspects {
+ @Backing(type="int")
+ enum Range {
+ UNSPECIFIED, // Unspecified
+ FULL, // Full range
+ LIMITED, // Limited range (if defined), or not full range
+
+ OTHER = 0xff, // Not one of the above values
+ }
+
+ // Color primaries
+ @Backing(type="int")
+ enum Primaries {
+ UNSPECIFIED, // Unspecified
+ BT709_5, // Rec.ITU-R BT.709-5 or equivalent
+ BT470_6M, // Rec.ITU-R BT.470-6 System M or equivalent
+ BT601_6_625, // Rec.ITU-R BT.601-6 625 or equivalent
+ BT601_6_525, // Rec.ITU-R BT.601-6 525 or equivalent
+ GENERIC_FILM, // Generic Film
+ BT2020, // Rec.ITU-R BT.2020 or equivalent
+
+ OTHER = 0xff, // Not one of the above values
+ }
+
+ // Transfer characteristics
+ @Backing(type="int")
+ enum Transfer {
+ UNSPECIFIED, // Unspecified
+ LINEAR, // Linear transfer characteristics
+ SRGB, // sRGB or equivalent
+ SMPTE170M, // SMPTE 170M or equivalent (e.g. BT.601/709/2020)
+ GAMMA22, // Assumed display gamma 2.2
+ GAMMA28, // Assumed display gamma 2.8
+ ST2084, // SMPTE ST 2084 for 10/12/14/16 bit systems
+ HLG, // ARIB STD-B67 hybrid-log-gamma
+
+ // values unlikely to be required by Android follow here
+ SMPTE240M = 0x40, // SMPTE 240M
+ XVYCC, // IEC 61966-2-4
+ BT1361, // Rec.ITU-R BT.1361 extended gamut
+ ST428, // SMPTE ST 428-1
+
+ OTHER = 0xff, // Not one of the above values
+ }
+
+ // YUV <-> RGB conversion
+ @Backing(type="int")
+ enum MatrixCoeffs {
+ UNSPECIFIED, // Unspecified
+ BT709_5, // Rec.ITU-R BT.709-5 or equivalent
+ BT470_6M, // KR=0.30, KB=0.11 or equivalent
+ BT601_6, // Rec.ITU-R BT.601-6 625 or equivalent
+ SMPTE240M, // SMPTE 240M or equivalent
+ BT2020, // Rec.ITU-R BT.2020 non-constant luminance
+ BT2020CONSTANT, // Rec.ITU-R BT.2020 constant luminance
+
+ OTHER = 0xff, // Not one of the above values
+ }
+
+ Range range;
+ Primaries primaries;
+ Transfer transfer;
+ MatrixCoeffs matrixCoeffs;
+}
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl
new file mode 100644
index 0000000..d428e99
--- /dev/null
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.ParcelFileDescriptor;
+
+/**
+ * Binder interface for controlling and handling IAidlGraphicBufferSource
+ * from the process which owns IAidlNode.
+ *
+ * In order to support Persistent InputSurface and/or MediaRecorder
+ */
+interface IAidlBufferSource {
+ /**
+ * This is called when IAidlGraphicBufferSource can start handing buffers.
+ * If we already have buffers of data sitting in the BufferQueue,
+ * this will send them to the codec.
+ */
+ void onStart();
+
+ /**
+ * This is called when IAidlGraphicBufferSource indicaters that
+ * the codec is meant to return all buffers back to the client for them
+ * to be freed. Do NOT submit any more buffers to the component.
+ */
+ void onStop();
+
+ /**
+ * This is called when IAidlGraphicBufferSource indicates that
+ * we are shutting down.
+ */
+ void onRelease();
+
+ /**
+ * A "codec buffer", i.e. a buffer that can be used to pass data into
+ * the encoder, has been allocated.
+ */
+ void onInputBufferAdded(int bufferID);
+
+ /**
+ * If we have a BQ buffer available,
+ * fill it with a new frame of data; otherwise, just mark it as available.
+ *
+ * fence contains the fence's fd that the callee should wait on before
+ * using the buffer (or pass on to the user of the buffer, if the user supports
+ * fences). Callee takes ownership of the fence fd even if it fails.
+ */
+ void onInputBufferEmptied(int bufferID, in @nullable ParcelFileDescriptor fence);
+}
+
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlGraphicBufferSource.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlGraphicBufferSource.aidl
new file mode 100644
index 0000000..6642e89
--- /dev/null
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlGraphicBufferSource.aidl
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.hardware.graphics.common.Dataspace;
+import android.media.AidlColorAspects;
+import android.media.IAidlNode;
+
+/**
+ * Binder interface for configuring/controlling a Codec2 AIDL encoder instance
+ * on behalf of a Surface which will produce input buffers.
+ *
+ * In order to support Persistent InputSurface and/or MediaRecorder.
+ */
+interface IAidlGraphicBufferSource {
+ void configure(IAidlNode node, Dataspace dataSpace);
+ void setSuspend(boolean suspend, long suspendTimeUs);
+ void setRepeatPreviousFrameDelayUs(long repeatAfterUs);
+ void setMaxFps(float maxFps);
+ void setTimeLapseConfig(double fps, double captureFps);
+ void setStartTimeUs(long startTimeUs);
+ void setStopTimeUs(long stopTimeUs);
+ long getStopTimeOffsetUs();
+ void setColorAspects(in AidlColorAspects aspects);
+ void setTimeOffsetUs(long timeOffsetsUs);
+ void signalEndOfInputStream();
+}
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
new file mode 100644
index 0000000..cf880c2
--- /dev/null
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.hardware.HardwareBuffer;
+import android.media.IAidlBufferSource;
+import android.os.ParcelFileDescriptor;
+
+/**
+ * Binder interface abstraction for codec2 encoder instance.
+ *
+ * In order to support Persistent InputSurface and/or MediaRecorder.
+ */
+interface IAidlNode {
+
+ /**
+ * InputBuffer parameter for retrieval for the Node
+ */
+ parcelable InputBufferParams {
+ int bufferCountActual;
+ int frameWidth;
+ int frameHeight;
+ }
+
+ void freeNode();
+ long getConsumerUsage();
+ InputBufferParams getInputBufferParams();
+ void setConsumerUsage(long usage);
+ void setAdjustTimestampGapUs(int gapUs);
+ void setInputSurface(IAidlBufferSource bufferSource);
+ void submitBuffer(
+ int buffer,
+ in HardwareBuffer hBuffer,
+ int flags,
+ long timestampUs,
+ in @nullable ParcelFileDescriptor fence);
+ void onDataSpaceChanged(int dataSpace, int aspects, int pixelFormat);
+}
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h
new file mode 100644
index 0000000..85de688
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <media/stagefright/bqhelper/GraphicBufferSource.h>
+#include <media/stagefright/foundation/ABase.h>
+
+#include <media/stagefright/aidlpersistentsurface/IAidlNodeWrapper.h>
+
+#include <utils/Errors.h>
+
+#include <aidl/android/media/BnAidlBufferSource.h>
+
+namespace android::media {
+
+/*
+ * This class is used to feed codec encoders from a Surface via BufferQueue or
+ * HW producer using AIDL binder interfaces.
+ *
+ * See media/stagefright/bqhelper/GraphicBufferSource.h for documentation.
+ */
+class AidlGraphicBufferSource : public GraphicBufferSource {
+public:
+ AidlGraphicBufferSource() = default;
+ virtual ~AidlGraphicBufferSource() = default;
+
+ // For IAidlBufferSource interface
+ // ------------------------------
+
+ // When we can start handling buffers. If we already have buffers of data
+ // sitting in the BufferQueue, this will send them to the codec.
+ ::ndk::ScopedAStatus onStart();
+
+ // When the codec is meant to return all buffers back to the client for
+ // them to be freed. Do NOT submit any more buffers to the component.
+ ::ndk::ScopedAStatus onStop();
+
+ // When we are shutting down.
+ ::ndk::ScopedAStatus onRelease();
+
+ // Rest of the interface in GraphicBufferSource.
+
+ // IAidlGraphicBufferSource interface
+ // ------------------------------
+
+ // Configure the buffer source to be used with a codec2 aidl node given
+ // parameters.
+ status_t configure(
+ const sp<IAidlNodeWrapper> &aidlNode,
+ int32_t dataSpace,
+ int32_t bufferCount,
+ uint32_t frameWidth,
+ uint32_t frameHeight,
+ uint64_t consumerUsage);
+
+ // Rest of the interface in GraphicBufferSource.
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(AidlGraphicBufferSource);
+};
+
+} // namespace android::media
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h
new file mode 100644
index 0000000..364efe2
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+namespace android::media {
+
+// Node definitions for aidl input surface.
+//
+// Copied from non-aidl version implementation.
+// Unnecessary definitions for input surface implementation
+// are all omitted.
+
+enum C2NodeBufferFlag : uint32_t {
+ BUFFERFLAG_EOS = 1,
+ BUFFERFLAG_ENDOFFRAME = (1 << 4)
+};
+
+} // namespace android::media
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/IAidlNodeWrapper.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/IAidlNodeWrapper.h
new file mode 100644
index 0000000..f23b5e4
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/IAidlNodeWrapper.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <utils/RefBase.h>
+#include <utils/StrongPointer.h>
+#include <ui/GraphicBuffer.h>
+
+#include <stdint.h>
+
+namespace android::media {
+
+struct IAidlNodeWrapper : public RefBase {
+ virtual status_t submitBuffer(
+ int32_t bufferId, uint32_t flags,
+ const sp<GraphicBuffer> &buffer = nullptr,
+ int64_t timestamp = 0, int fenceFd = -1) = 0;
+ virtual void dispatchDataSpaceChanged(
+ int32_t dataSpace, int32_t aspects, int32_t pixelFormat) = 0;
+};
+
+} // namespace android::media
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/Conversion.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/Conversion.h
new file mode 100644
index 0000000..dcb83f6
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/Conversion.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/binder_auto_utils.h>
+#include <ui/GraphicBuffer.h>
+#include <utils/Errors.h>
+
+#include <aidl/android/hardware/graphics/common/Dataspace.h>
+#include <aidl/android/hardware/graphics/common/PixelFormat.h>
+#include <aidl/android/media/AidlColorAspects.h>
+
+namespace android::media::aidl_conversion {
+
+inline status_t fromAidlStatus(const ::ndk::ScopedAStatus &status) {
+ if (!status.isOk()) {
+ if (status.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+ return static_cast<status_t>(status.getServiceSpecificError());
+ } else {
+ return static_cast<status_t>(FAILED_TRANSACTION);
+ }
+ }
+ return NO_ERROR;
+}
+
+inline ::ndk::ScopedAStatus toAidlStatus(status_t status) {
+ if (status == NO_ERROR) {
+ return ::ndk::ScopedAStatus::ok();
+ }
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(status);
+}
+
+inline int32_t compactFromAidlColorAspects(::aidl::android::media::AidlColorAspects const& s) {
+ return static_cast<int32_t>(
+ (static_cast<uint32_t>(s.range) << 24) |
+ (static_cast<uint32_t>(s.primaries) << 16) |
+ (static_cast<uint32_t>(s.transfer)) |
+ (static_cast<uint32_t>(s.matrixCoeffs) << 8));
+}
+
+inline int32_t rawFromAidlDataspace(
+ ::aidl::android::hardware::graphics::common::Dataspace const& s) {
+ return static_cast<int32_t>(s);
+}
+
+} // namespace android::media::aidl_conversion
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h
new file mode 100644
index 0000000..f4d7fe8
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <utils/RefBase.h>
+#include <aidl/android/hardware/graphics/common/Dataspace.h>
+#include <aidl/android/media/IAidlBufferSource.h>
+#include <aidl/android/media/IAidlNode.h>
+#include <aidl/android/media/BnAidlGraphicBufferSource.h>
+
+namespace android::media {
+
+class AidlGraphicBufferSource;
+
+using ::android::sp;
+
+/**
+ * Aidl wrapper implementation for IAidlGraphicBufferSource
+ */
+class WAidlGraphicBufferSource : public ::aidl::android::media::BnAidlGraphicBufferSource {
+public:
+
+ struct WAidlNodeWrapper;
+ class WAidlBufferSource;
+
+ sp<AidlGraphicBufferSource> mBase;
+ std::shared_ptr<::aidl::android::media::IAidlBufferSource> mBufferSource;
+
+ WAidlGraphicBufferSource(sp<AidlGraphicBufferSource> const& base);
+ ::ndk::ScopedAStatus configure(
+ const std::shared_ptr<::aidl::android::media::IAidlNode>& node,
+ aidl::android::hardware::graphics::common::Dataspace dataspace) override;
+ ::ndk::ScopedAStatus setSuspend(bool suspend, int64_t timeUs) override;
+ ::ndk::ScopedAStatus setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs) override;
+ ::ndk::ScopedAStatus setMaxFps(float maxFps) override;
+ ::ndk::ScopedAStatus setTimeLapseConfig(double fps, double captureFps) override;
+ ::ndk::ScopedAStatus setStartTimeUs(int64_t startTimeUs) override;
+ ::ndk::ScopedAStatus setStopTimeUs(int64_t stopTimeUs) override;
+ ::ndk::ScopedAStatus getStopTimeOffsetUs(int64_t *_aidl_return) override;
+ ::ndk::ScopedAStatus setColorAspects(
+ const ::aidl::android::media::AidlColorAspects& aspects) override;
+ ::ndk::ScopedAStatus setTimeOffsetUs(int64_t timeOffsetUs) override;
+ ::ndk::ScopedAStatus signalEndOfInputStream() override;
+};
+
+} // namespace android::media
diff --git a/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp b/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp
new file mode 100644
index 0000000..5526b10
--- /dev/null
+++ b/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp
@@ -0,0 +1,221 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "WAidlGraphicBufferSource"
+#include <android/hardware_buffer_aidl.h>
+#include <private/android/AHardwareBufferHelpers.h>
+#include <utils/Log.h>
+
+#include <aidl/android/media/BnAidlBufferSource.h>
+#include <aidl/android/media/IAidlNode.h>
+
+#include <media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/C2NodeDef.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/Conversion.h>
+
+namespace android::media {
+using ::android::binder::unique_fd;
+using ::aidl::android::hardware::graphics::common::PixelFormat;
+using ::aidl::android::hardware::graphics::common::Dataspace;
+using ::aidl::android::media::AidlColorAspects;
+using ::aidl::android::media::IAidlNode;
+using ::aidl::android::media::BnAidlBufferSource;
+
+// Conversion
+using ::android::media::aidl_conversion::fromAidlStatus;
+using ::android::media::aidl_conversion::toAidlStatus;
+using ::android::media::aidl_conversion::compactFromAidlColorAspects;
+using ::android::media::aidl_conversion::rawFromAidlDataspace;
+
+struct WAidlGraphicBufferSource::WAidlNodeWrapper : public IAidlNodeWrapper {
+ std::shared_ptr<IAidlNode> mNode;
+
+ WAidlNodeWrapper(const std::shared_ptr<IAidlNode> &node): mNode(node) {
+ }
+
+ virtual status_t submitBuffer(
+ int32_t bufferId, uint32_t flags,
+ const sp<GraphicBuffer> &buffer,
+ int64_t timestamp, int fenceFd) override {
+ AHardwareBuffer *ahwBuffer = nullptr;
+ ::aidl::android::hardware::HardwareBuffer hBuffer;
+ if (buffer.get()) {
+ ahwBuffer = AHardwareBuffer_from_GraphicBuffer(buffer.get());
+ AHardwareBuffer_acquire(ahwBuffer);
+ hBuffer.reset(ahwBuffer);
+ }
+
+ ::ndk::ScopedFileDescriptor fence(fenceFd);
+
+ return fromAidlStatus(mNode->submitBuffer(
+ bufferId,
+ hBuffer,
+ flags,
+ timestamp,
+ fence));
+ }
+
+ virtual void dispatchDataSpaceChanged(
+ int32_t dataSpace, int32_t aspects, int32_t pixelFormat) override {
+ ::ndk::ScopedAStatus err = mNode->onDataSpaceChanged(
+ dataSpace, aspects, pixelFormat);
+ status_t status = fromAidlStatus(err);
+ if (status != NO_ERROR) {
+ ALOGE("WAidlNodeWrapper failed to change dataspace (%d): "
+ "dataSpace = %ld, aspects = %ld, pixelFormat = %ld",
+ static_cast<int>(status),
+ static_cast<long>(dataSpace),
+ static_cast<long>(aspects),
+ static_cast<long>(pixelFormat));
+ }
+ }
+};
+
+class WAidlGraphicBufferSource::WAidlBufferSource : public BnAidlBufferSource {
+ sp<AidlGraphicBufferSource> mSource;
+
+public:
+ WAidlBufferSource(const sp<AidlGraphicBufferSource> &source): mSource(source) {
+ }
+
+ ::ndk::ScopedAStatus onStart() override {
+ mSource->onStart();
+ return ::ndk::ScopedAStatus::ok();
+ }
+
+ ::ndk::ScopedAStatus onStop() override {
+ mSource->onStop();
+ return ::ndk::ScopedAStatus::ok();
+ }
+
+ ::ndk::ScopedAStatus onRelease() override {
+ mSource->onRelease();
+ return ::ndk::ScopedAStatus::ok();
+ }
+
+ ::ndk::ScopedAStatus onInputBufferAdded(int32_t bufferId) override {
+ mSource->onInputBufferAdded(bufferId);
+ return ::ndk::ScopedAStatus::ok();
+ }
+
+ ::ndk::ScopedAStatus onInputBufferEmptied(
+ int32_t bufferId, const ::ndk::ScopedFileDescriptor& fence) override {
+ mSource->onInputBufferEmptied(bufferId, ::dup(fence.get()));
+ return ::ndk::ScopedAStatus::ok();
+ }
+};
+
+// WAidlGraphicBufferSource
+WAidlGraphicBufferSource::WAidlGraphicBufferSource(
+ sp<AidlGraphicBufferSource> const& base) :
+ mBase(base),
+ mBufferSource(::ndk::SharedRefBase::make<WAidlBufferSource>(base)) {
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::configure(
+ const std::shared_ptr<IAidlNode>& node, Dataspace dataspace) {
+ if (node == NULL) {
+ return toAidlStatus(BAD_VALUE);
+ }
+
+ // Do setInputSurface() first, the node will try to enable metadata
+ // mode on input, and does necessary error checking. If this fails,
+ // we can't use this input surface on the node.
+ ::ndk::ScopedAStatus err = node->setInputSurface(mBufferSource);
+ status_t fnStatus = fromAidlStatus(err);
+ if (fnStatus != NO_ERROR) {
+ ALOGE("Unable to set input surface: %d", fnStatus);
+ return err;
+ }
+
+ // use consumer usage bits queried from encoder, but always add
+ // HW_VIDEO_ENCODER for backward compatibility.
+ int64_t consumerUsage;
+ fnStatus = OK;
+ err = node->getConsumerUsage(&consumerUsage);
+ fnStatus = fromAidlStatus(err);
+ if (fnStatus != NO_ERROR) {
+ if (fnStatus == FAILED_TRANSACTION) {
+ return err;
+ }
+ consumerUsage = 0;
+ }
+
+ IAidlNode::InputBufferParams rDef ;
+ err = node->getInputBufferParams(&rDef);
+ fnStatus = fromAidlStatus(err);
+ if (fnStatus != NO_ERROR) {
+ ALOGE("Failed to get port definition: %d", fnStatus);
+ return toAidlStatus(fnStatus);
+ }
+
+ return toAidlStatus(mBase->configure(
+ new WAidlNodeWrapper(node),
+ rawFromAidlDataspace(dataspace),
+ rDef.bufferCountActual,
+ rDef.frameWidth,
+ rDef.frameHeight,
+ static_cast<uint64_t>(consumerUsage)));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setSuspend(
+ bool suspend, int64_t timeUs) {
+ return toAidlStatus(mBase->setSuspend(suspend, timeUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setRepeatPreviousFrameDelayUs(
+ int64_t repeatAfterUs) {
+ return toAidlStatus(mBase->setRepeatPreviousFrameDelayUs(repeatAfterUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setMaxFps(float maxFps) {
+ return toAidlStatus(mBase->setMaxFps(maxFps));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setTimeLapseConfig(
+ double fps, double captureFps) {
+ return toAidlStatus(mBase->setTimeLapseConfig(fps, captureFps));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setStartTimeUs(int64_t startTimeUs) {
+ return toAidlStatus(mBase->setStartTimeUs(startTimeUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setStopTimeUs(int64_t stopTimeUs) {
+ return toAidlStatus(mBase->setStopTimeUs(stopTimeUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::getStopTimeOffsetUs(int64_t* _aidl_return) {
+ status_t status = mBase->getStopTimeOffsetUs(_aidl_return);
+ return toAidlStatus(status);
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setColorAspects(
+ const AidlColorAspects& aspects) {
+ return toAidlStatus(mBase->setColorAspects(compactFromAidlColorAspects(aspects)));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setTimeOffsetUs(int64_t timeOffsetUs) {
+ return toAidlStatus(mBase->setTimeOffsetUs(timeOffsetUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::signalEndOfInputStream() {
+ return toAidlStatus(mBase->signalEndOfInputStream());
+}
+
+
+} // namespace android::media
diff --git a/media/module/bufferpool/1.0/vts/multi.cpp b/media/module/bufferpool/1.0/vts/multi.cpp
index d8cc285..21f47d3 100644
--- a/media/module/bufferpool/1.0/vts/multi.cpp
+++ b/media/module/bufferpool/1.0/vts/multi.cpp
@@ -24,6 +24,7 @@
#include <hidl/HidlSupport.h>
#include <hidl/HidlTransportSupport.h>
#include <hidl/LegacySupport.h>
+#include <hidl/ServiceManagement.h>
#include <hidl/Status.h>
#include <signal.h>
#include <sys/types.h>
@@ -36,6 +37,7 @@
using android::hardware::configureRpcThreadpool;
using android::hardware::hidl_handle;
+using android::hardware::isHidlSupported;
using android::hardware::media::bufferpool::V1_0::IClientManager;
using android::hardware::media::bufferpool::V1_0::ResultStatus;
using android::hardware::media::bufferpool::V1_0::implementation::BufferId;
@@ -178,6 +180,7 @@
ResultStatus status;
PipeMessage message;
+ if (!isHidlSupported()) GTEST_SKIP() << "HIDL is not supported on this device";
ASSERT_TRUE(receiveMessage(mResultPipeFds, &message));
android::sp<IClientManager> receiver = IClientManager::getService();
diff --git a/media/module/bufferpool/2.0/AccessorImpl.cpp b/media/module/bufferpool/2.0/AccessorImpl.cpp
index 1d2562e..202d803 100644
--- a/media/module/bufferpool/2.0/AccessorImpl.cpp
+++ b/media/module/bufferpool/2.0/AccessorImpl.cpp
@@ -609,7 +609,7 @@
}
if (ret == false) {
ALOGW("buffer status message processing failure - message : %d connection : %lld",
- message.newStatus, (long long)message.connectionId);
+ (int)message.newStatus, (long long)message.connectionId);
}
}
messages.clear();
diff --git a/media/module/codecs/amrwb/enc/Android.bp b/media/module/codecs/amrwb/enc/Android.bp
index 8780136..04f36b5 100644
--- a/media/module/codecs/amrwb/enc/Android.bp
+++ b/media/module/codecs/amrwb/enc/Android.bp
@@ -79,67 +79,31 @@
arch: {
arm: {
srcs: [
- "src/asm/ARMV5E/convolve_opt.s",
- "src/asm/ARMV5E/cor_h_vec_opt.s",
- "src/asm/ARMV5E/Deemph_32_opt.s",
- "src/asm/ARMV5E/Dot_p_opt.s",
- "src/asm/ARMV5E/Filt_6k_7k_opt.s",
- "src/asm/ARMV5E/Norm_Corr_opt.s",
- "src/asm/ARMV5E/pred_lt4_1_opt.s",
- "src/asm/ARMV5E/residu_asm_opt.s",
- "src/asm/ARMV5E/scale_sig_opt.s",
- "src/asm/ARMV5E/Syn_filt_32_opt.s",
- "src/asm/ARMV5E/syn_filt_opt.s",
+ "src/asm/ARMV7/convolve_neon.s",
+ "src/asm/ARMV7/cor_h_vec_neon.s",
+ "src/asm/ARMV7/Deemph_32_neon.s",
+ "src/asm/ARMV7/Dot_p_neon.s",
+ "src/asm/ARMV7/Filt_6k_7k_neon.s",
+ "src/asm/ARMV7/Norm_Corr_neon.s",
+ "src/asm/ARMV7/pred_lt4_1_neon.s",
+ "src/asm/ARMV7/residu_asm_neon.s",
+ "src/asm/ARMV7/scale_sig_neon.s",
+ "src/asm/ARMV7/Syn_filt_32_neon.s",
+ "src/asm/ARMV7/syn_filt_neon.s",
],
cflags: [
"-DARM",
+ "-DARMV7",
"-DASM_OPT",
+ // don't actually generate neon instructions, see bug 26932980
+ "-mfpu=vfpv3",
],
- local_include_dirs: ["src/asm/ARMV5E"],
+ local_include_dirs: [
+ "src/asm/ARMV7",
+ ],
instruction_set: "arm",
-
- neon: {
- exclude_srcs: [
- "src/asm/ARMV5E/convolve_opt.s",
- "src/asm/ARMV5E/cor_h_vec_opt.s",
- "src/asm/ARMV5E/Deemph_32_opt.s",
- "src/asm/ARMV5E/Dot_p_opt.s",
- "src/asm/ARMV5E/Filt_6k_7k_opt.s",
- "src/asm/ARMV5E/Norm_Corr_opt.s",
- "src/asm/ARMV5E/pred_lt4_1_opt.s",
- "src/asm/ARMV5E/residu_asm_opt.s",
- "src/asm/ARMV5E/scale_sig_opt.s",
- "src/asm/ARMV5E/Syn_filt_32_opt.s",
- "src/asm/ARMV5E/syn_filt_opt.s",
- ],
-
- srcs: [
- "src/asm/ARMV7/convolve_neon.s",
- "src/asm/ARMV7/cor_h_vec_neon.s",
- "src/asm/ARMV7/Deemph_32_neon.s",
- "src/asm/ARMV7/Dot_p_neon.s",
- "src/asm/ARMV7/Filt_6k_7k_neon.s",
- "src/asm/ARMV7/Norm_Corr_neon.s",
- "src/asm/ARMV7/pred_lt4_1_neon.s",
- "src/asm/ARMV7/residu_asm_neon.s",
- "src/asm/ARMV7/scale_sig_neon.s",
- "src/asm/ARMV7/Syn_filt_32_neon.s",
- "src/asm/ARMV7/syn_filt_neon.s",
- ],
-
- // don't actually generate neon instructions, see bug 26932980
- cflags: [
- "-DARMV7",
- "-mfpu=vfpv3",
- ],
- local_include_dirs: [
- "src/asm/ARMV5E",
- "src/asm/ARMV7",
- ],
- },
-
},
},
diff --git a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
index 4fbfab1..6df9dc8 100644
--- a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
+++ b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
@@ -49,6 +49,7 @@
using ::android::hardware::Return;
using ::android::sp;
using ::ndk::ScopedAStatus;
+namespace c2_hidl_V1_0 = ::android::hardware::media::c2::V1_0;
namespace c2_hidl = ::android::hardware::media::c2::V1_2;
namespace c2_aidl = ::aidl::android::hardware::media::c2;
@@ -734,6 +735,46 @@
} // unnamed namespace
+static android::sp<c2_hidl_V1_0::IComponentStore> getDeclaredHidlSwcodec(
+ const std::shared_ptr<C2ComponentStore>& store) {
+ using ::android::hidl::manager::V1_2::IServiceManager;
+ using namespace ::android::hardware::media::c2;
+
+ int platformVersion = android_get_device_api_level();
+ // STOPSHIP: Remove code name checking once platform version bumps up to 35.
+ std::string codeName = android::base::GetProperty("ro.build.version.codename", "");
+
+ if (codeName == "VanillaIceCream") {
+ platformVersion = __ANDROID_API_V__;
+ }
+ IServiceManager::Transport transport =
+ android::hardware::defaultServiceManager1_2()->getTransport(
+ V1_2::IComponentStore::descriptor, "software");
+ if (transport == IServiceManager::Transport::HWBINDER) {
+ if (platformVersion < __ANDROID_API_S__) {
+ LOG(ERROR) << "We don't expect V1.2::IComponentStore to be declared on this device";
+ }
+ return ::android::sp<V1_2::utils::ComponentStore>::make(store);
+ }
+ transport = android::hardware::defaultServiceManager1_2()->getTransport(
+ V1_1::IComponentStore::descriptor, "software");
+ if (transport == IServiceManager::Transport::HWBINDER) {
+ if (platformVersion != __ANDROID_API_R__) {
+ LOG(ERROR) << "We don't expect V1.1::IComponentStore to be declared on this device";
+ }
+ return ::android::sp<V1_1::utils::ComponentStore>::make(store);
+ }
+ transport = android::hardware::defaultServiceManager1_2()->getTransport(
+ V1_0::IComponentStore::descriptor, "software");
+ if (transport == IServiceManager::Transport::HWBINDER) {
+ if (platformVersion != __ANDROID_API_Q__) {
+ LOG(ERROR) << "We don't expect V1.0::IComponentStore to be declared on this device";
+ }
+ return ::android::sp<V1_0::utils::ComponentStore>::make(store);
+ }
+ return nullptr;
+}
+
extern "C" void RegisterCodecServices() {
const bool aidlSelected = c2_aidl::utils::IsSelected();
constexpr int kThreadCount = 64;
@@ -751,33 +792,6 @@
using namespace ::android::hardware::media::c2;
- int platformVersion = android_get_device_api_level();
- // STOPSHIP: Remove code name checking once platform version bumps up to 35.
- std::string codeName =
- android::base::GetProperty("ro.build.version.codename", "");
- if (codeName == "VanillaIceCream") {
- platformVersion = __ANDROID_API_V__;
- }
-
- android::sp<V1_0::IComponentStore> hidlStore;
- std::shared_ptr<c2_aidl::IComponentStore> aidlStore;
- const char *hidlVer = "(unknown)";
- if (aidlSelected) {
- aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(store);
- } else if (platformVersion >= __ANDROID_API_S__) {
- hidlStore = ::android::sp<V1_2::utils::ComponentStore>::make(store);
- hidlVer = "1.2";
- } else if (platformVersion == __ANDROID_API_R__) {
- hidlStore = ::android::sp<V1_1::utils::ComponentStore>::make(store);
- hidlVer = "1.1";
- } else if (platformVersion == __ANDROID_API_Q__) {
- hidlStore = ::android::sp<V1_0::utils::ComponentStore>::make(store);
- hidlVer = "1.0";
- } else { // platformVersion < __ANDROID_API_Q__
- LOG(ERROR) << "The platform version " << platformVersion <<
- " is not supported.";
- return;
- }
if (!ionPropertiesDefined()) {
using IComponentStore =
::android::hardware::media::c2::V1_0::IComponentStore;
@@ -823,7 +837,10 @@
std::string(c2_aidl::IComponentStore::descriptor) + "/software";
if (__builtin_available(android __ANDROID_API_S__, *)) {
if (AServiceManager_isDeclared(aidlServiceName.c_str())) {
- if (!aidlStore) {
+ std::shared_ptr<c2_aidl::IComponentStore> aidlStore;
+ if (aidlSelected) {
+ aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(store);
+ } else {
aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(
std::make_shared<H2C2ComponentStore>(nullptr));
}
@@ -837,22 +854,23 @@
}
}
+ android::sp<V1_0::IComponentStore> hidlStore = getDeclaredHidlSwcodec(store);
// If the software component store isn't declared in the manifest, we don't
// need to create the service and register it.
- using ::android::hidl::manager::V1_2::IServiceManager;
- IServiceManager::Transport transport =
- android::hardware::defaultServiceManager1_2()->getTransport(
- V1_2::utils::ComponentStore::descriptor, "software");
- if (transport == IServiceManager::Transport::HWBINDER) {
- if (!hidlStore) {
+ if (hidlStore) {
+ if (registered && aidlSelected) {
+ LOG(INFO) << "Both HIDL and AIDL software codecs are declared in the vintf "
+ << "manifest, but AIDL was selected. "
+ << "Creating a null HIDL service so it's not accidentally "
+ << "used. The AIDL software codec is already registered.";
hidlStore = ::android::sp<V1_2::utils::ComponentStore>::make(
std::make_shared<H2C2ComponentStore>(nullptr));
- hidlVer = "1.2";
}
if (hidlStore->registerAsService("software") == android::OK) {
registered = true;
} else {
- LOG(ERROR) << "Cannot register software Codec2 v" << hidlVer << " service.";
+ LOG(ERROR) << "Cannot register software Codec2 " << hidlStore->descriptor
+ << " service.";
}
} else {
LOG(INFO) << "The HIDL software Codec2 service is deprecated"
diff --git a/media/module/extractors/fuzzers/Android.bp b/media/module/extractors/fuzzers/Android.bp
index d096d63..7a49d8e 100644
--- a/media/module/extractors/fuzzers/Android.bp
+++ b/media/module/extractors/fuzzers/Android.bp
@@ -134,6 +134,8 @@
],
dictionary: "mp4_extractor_fuzzer.dict",
+
+ corpus: ["corpus_mp4/*"],
}
cc_fuzz {
@@ -202,7 +204,6 @@
"ogg_extractor_fuzzer.cpp",
],
-
static_libs: [
"libstagefright_metadatautils",
"libvorbisidec",
diff --git a/media/module/extractors/fuzzers/corpus_mp4/164a5bad5340b262316f93932c4160813657e1e0 b/media/module/extractors/fuzzers/corpus_mp4/164a5bad5340b262316f93932c4160813657e1e0
new file mode 100644
index 0000000..c17251b
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/164a5bad5340b262316f93932c4160813657e1e0
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/24f242f3b30fd5c2ff0f9aebed4375a3ab5cdceb b/media/module/extractors/fuzzers/corpus_mp4/24f242f3b30fd5c2ff0f9aebed4375a3ab5cdceb
new file mode 100644
index 0000000..16907fd
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/24f242f3b30fd5c2ff0f9aebed4375a3ab5cdceb
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/2a017927fdab79a8cc3b0bb75224cb44f4c1b35b b/media/module/extractors/fuzzers/corpus_mp4/2a017927fdab79a8cc3b0bb75224cb44f4c1b35b
new file mode 100644
index 0000000..2ec7881
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/2a017927fdab79a8cc3b0bb75224cb44f4c1b35b
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/58b3155e64ac16e4e6c68b68257871bcd769b92f b/media/module/extractors/fuzzers/corpus_mp4/58b3155e64ac16e4e6c68b68257871bcd769b92f
new file mode 100644
index 0000000..cd1fdcc
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/58b3155e64ac16e4e6c68b68257871bcd769b92f
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/64093d4da00ba406310c7679cd8b37562e6344b5 b/media/module/extractors/fuzzers/corpus_mp4/64093d4da00ba406310c7679cd8b37562e6344b5
new file mode 100644
index 0000000..f1ea812
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/64093d4da00ba406310c7679cd8b37562e6344b5
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/7355066d4975de07e9b6d0e9907c896eeb90577a b/media/module/extractors/fuzzers/corpus_mp4/7355066d4975de07e9b6d0e9907c896eeb90577a
new file mode 100644
index 0000000..c5d3eb2
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/7355066d4975de07e9b6d0e9907c896eeb90577a
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/7a48b0237581c794097a15add08517b3c6dc0aa2 b/media/module/extractors/fuzzers/corpus_mp4/7a48b0237581c794097a15add08517b3c6dc0aa2
new file mode 100644
index 0000000..1f6c29d
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/7a48b0237581c794097a15add08517b3c6dc0aa2
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/8706f07041a0cf828a7b40d727533d6c732b5ebc b/media/module/extractors/fuzzers/corpus_mp4/8706f07041a0cf828a7b40d727533d6c732b5ebc
new file mode 100644
index 0000000..40d639d
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/8706f07041a0cf828a7b40d727533d6c732b5ebc
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/911a46d40d60b9a806dbdc70799048df2f546615 b/media/module/extractors/fuzzers/corpus_mp4/911a46d40d60b9a806dbdc70799048df2f546615
new file mode 100644
index 0000000..2056348
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/911a46d40d60b9a806dbdc70799048df2f546615
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/97b1d0e78525c793574cce3e66f86564c2a10271 b/media/module/extractors/fuzzers/corpus_mp4/97b1d0e78525c793574cce3e66f86564c2a10271
new file mode 100644
index 0000000..f50d4f4
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/97b1d0e78525c793574cce3e66f86564c2a10271
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/d52e7095534fdf1f040b10a80df4cbc069a97a4e b/media/module/extractors/fuzzers/corpus_mp4/d52e7095534fdf1f040b10a80df4cbc069a97a4e
new file mode 100644
index 0000000..25ea55b
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/d52e7095534fdf1f040b10a80df4cbc069a97a4e
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/ec6bd6069f74a2f6e92442f88efb29288ad6f456 b/media/module/extractors/fuzzers/corpus_mp4/ec6bd6069f74a2f6e92442f88efb29288ad6f456
new file mode 100644
index 0000000..62d259b
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/ec6bd6069f74a2f6e92442f88efb29288ad6f456
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/faa22bcb745206340d5d411b498a3868d2b1feec b/media/module/extractors/fuzzers/corpus_mp4/faa22bcb745206340d5d411b498a3868d2b1feec
new file mode 100644
index 0000000..d649632
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/faa22bcb745206340d5d411b498a3868d2b1feec
Binary files differ
diff --git a/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict b/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict
index 3683649..b48c854 100644
--- a/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict
+++ b/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict
@@ -246,3 +246,4 @@
kw245="iso5"
kw246="resv"
kw247="iso6"
+kw248="clap"
diff --git a/media/module/extractors/mkv/MatroskaExtractor.cpp b/media/module/extractors/mkv/MatroskaExtractor.cpp
index 6900341..f326db1 100644
--- a/media/module/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/module/extractors/mkv/MatroskaExtractor.cpp
@@ -1769,6 +1769,30 @@
}
+status_t MatroskaExtractor::synthesizeVP9(TrackInfo* trackInfo, size_t index) {
+ BlockIterator iter(this, trackInfo->mTrackNum, index);
+ if (iter.eos()) {
+ return ERROR_MALFORMED;
+ }
+
+ const mkvparser::Block* block = iter.block();
+ if (block->GetFrameCount() <= 0) {
+ return ERROR_MALFORMED;
+ }
+
+ const mkvparser::Block::Frame& frame = block->GetFrame(0);
+ auto tmpData = heapbuffer<unsigned char>(frame.len);
+ long n = frame.Read(mReader, tmpData.get());
+ if (n != 0) {
+ return ERROR_MALFORMED;
+ }
+
+ if (!MakeVP9CodecSpecificData(trackInfo->mMeta, tmpData.get(), frame.len)) {
+ return ERROR_MALFORMED;
+ }
+
+ return OK;
+}
static inline bool isValidInt32ColourValue(long long value) {
return value != mkvparser::Colour::kValueNotPresent
@@ -2002,6 +2026,8 @@
// specified in http://www.webmproject.org/vp9/profiles/.
AMediaFormat_setBuffer(meta,
AMEDIAFORMAT_KEY_CSD_0, codecPrivate, codecPrivateSize);
+ } else {
+ isSetCsdFrom1stFrame = true;
}
} else if (!strcmp("V_AV1", codecID)) {
AMediaFormat_setString(meta, AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_VIDEO_AV1);
@@ -2254,6 +2280,13 @@
mTracks.pop();
continue;
}
+ } else if ((!strcmp("V_VP9", codecID) && codecPrivateSize == 0) ||
+ (!strcmp(mimetype, MEDIA_MIMETYPE_VIDEO_VP9) && isSetCsdFrom1stFrame)) {
+ // Attempt to recover from VP9 track without codec private data
+ err = synthesizeVP9(trackInfo, n);
+ if (err != OK) {
+ ALOGW("ignoring error %d in synthesizeVP9", err);
+ }
}
// the TrackInfo owns the metadata now
meta = nullptr;
@@ -2279,6 +2312,8 @@
int64_t thumbnailTimeUs = 0;
size_t maxBlockSize = 0;
while (!iter.eos() && j < 20) {
+ int64_t blockTimeUs = iter.blockTimeUs();
+
if (iter.block()->IsKey()) {
++j;
@@ -2289,9 +2324,13 @@
if (blockSize > maxBlockSize) {
maxBlockSize = blockSize;
- thumbnailTimeUs = iter.blockTimeUs();
+ thumbnailTimeUs = blockTimeUs;
}
}
+ // Exit after 20s if we've already found at least one key frame.
+ if (blockTimeUs > 20000000 && maxBlockSize > 0) {
+ break;
+ }
iter.advance();
}
AMediaFormat_setInt64(info->mMeta,
diff --git a/media/module/extractors/mkv/include/MatroskaExtractor.h b/media/module/extractors/mkv/include/MatroskaExtractor.h
index 99fad17..2e4d955 100644
--- a/media/module/extractors/mkv/include/MatroskaExtractor.h
+++ b/media/module/extractors/mkv/include/MatroskaExtractor.h
@@ -95,6 +95,7 @@
status_t synthesizeAVCC(TrackInfo *trackInfo, size_t index);
status_t synthesizeMPEG2(TrackInfo *trackInfo, size_t index);
status_t synthesizeMPEG4(TrackInfo *trackInfo, size_t index);
+ status_t synthesizeVP9(TrackInfo* trackInfo, size_t index);
status_t initTrackInfo(
const mkvparser::Track *track,
AMediaFormat *meta,
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index b3707c8..cb2994e 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -1615,39 +1615,6 @@
mLastTrack->timescale = ntohl(timescale);
- // 14496-12 says all ones means indeterminate, but some files seem to use
- // 0 instead. We treat both the same.
- int64_t duration = 0;
- if (version == 1) {
- if (mDataSource->readAt(
- timescale_offset + 4, &duration, sizeof(duration))
- < (ssize_t)sizeof(duration)) {
- return ERROR_IO;
- }
- if (duration != -1) {
- duration = ntoh64(duration);
- }
- } else {
- uint32_t duration32;
- if (mDataSource->readAt(
- timescale_offset + 4, &duration32, sizeof(duration32))
- < (ssize_t)sizeof(duration32)) {
- return ERROR_IO;
- }
- if (duration32 != 0xffffffff) {
- duration = ntohl(duration32);
- }
- }
- if (duration != 0 && mLastTrack->timescale != 0) {
- long double durationUs = ((long double)duration * 1000000) / mLastTrack->timescale;
- if (durationUs < 0 || durationUs > INT64_MAX) {
- ALOGE("cannot represent %lld * 1000000 / %lld in 64 bits",
- (long long) duration, (long long) mLastTrack->timescale);
- return ERROR_MALFORMED;
- }
- AMediaFormat_setInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION, durationUs);
- }
-
uint8_t lang[2];
off64_t lang_offset;
if (version == 1) {
@@ -3907,17 +3874,18 @@
}
int32_t id;
+ int64_t duration;
if (version == 1) {
// we can get ctime value from U64_AT(&buffer[4])
// we can get mtime value from U64_AT(&buffer[12])
id = U32_AT(&buffer[20]);
- // we can get duration value from U64_AT(&buffer[28])
+ duration = U64_AT(&buffer[28]);
} else if (version == 0) {
// we can get ctime value from U32_AT(&buffer[4])
// we can get mtime value from U32_AT(&buffer[8])
id = U32_AT(&buffer[12]);
- // we can get duration value from U32_AT(&buffer[20])
+ duration = U32_AT(&buffer[20]);
} else {
return ERROR_UNSUPPORTED;
}
@@ -3926,6 +3894,15 @@
return ERROR_MALFORMED;
AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_TRACK_ID, id);
+ if (duration != 0 && mHeaderTimescale != 0) {
+ long double durationUs = ((long double)duration * 1000000) / mHeaderTimescale;
+ if (durationUs < 0 || durationUs > INT64_MAX) {
+ ALOGE("cannot represent %lld * 1000000 / %lld in 64 bits",
+ (long long) duration, (long long) mHeaderTimescale);
+ return ERROR_MALFORMED;
+ }
+ AMediaFormat_setInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION, durationUs);
+ }
size_t matrixOffset = dynSize + 16;
int32_t a00 = U32_AT(&buffer[matrixOffset]);
diff --git a/media/module/metadatautils/MetaDataUtils.cpp b/media/module/metadatautils/MetaDataUtils.cpp
index db60f04..0895bb5 100644
--- a/media/module/metadatautils/MetaDataUtils.cpp
+++ b/media/module/metadatautils/MetaDataUtils.cpp
@@ -81,6 +81,177 @@
return true;
}
+// Check if the next 24 bits are VP9 SYNC_CODE
+static bool isVp9SyncCode(ABitReader &bits) {
+ if (bits.numBitsLeft() < 24) {
+ return false;
+ }
+ return bits.getBits(24) == 0x498342;
+}
+
+// This parses bitdepth and subsampling in a VP9 uncompressed header
+// (refer section bitdepth_colorspace_sampling in 6.2 of the VP9 bitstream spec)
+static bool getVp9BitdepthChromaSubSampling(ABitReader &bits,
+ int32_t profile,
+ int32_t *bitDepth,
+ int32_t *chromaSubsampling) {
+ if (profile >= 2) {
+ if (bits.numBitsLeft() < 1) {
+ return false;
+ }
+ *bitDepth = bits.getBits(1) ? 12 : 10;
+ } else {
+ *bitDepth = 8;
+ }
+
+ uint32_t colorspace;
+ if (!bits.getBitsGraceful(3, &colorspace)) {
+ return false;
+ }
+
+ *chromaSubsampling = -1;
+ if (colorspace != 7 /*SRGB*/) {
+ // Skip yuv_range_flag
+ if (!bits.skipBits(1)) {
+ return false;
+ }
+ // Check for subsampling only for profiles 1 and 3.
+ if (profile == 1 || profile == 3) {
+ uint32_t ss_x;
+ uint32_t ss_y;
+ if (bits.getBitsGraceful(1, &ss_x) && bits.getBitsGraceful(1, &ss_y)) {
+ *chromaSubsampling = ss_x << 1 & ss_y;
+ } else {
+ return false;
+ }
+ } else {
+ *chromaSubsampling = 3;
+ }
+ } else {
+ if (profile == 1 || profile == 3) {
+ *chromaSubsampling = 0;
+ }
+ }
+ return true;
+}
+// The param data contains the first frame data, starting with the uncompressed frame
+// header. This uncompressed header (refer section 6.2 of the VP9 bitstream spec) is
+// used to parse profile, bitdepth and subsampling.
+bool MakeVP9CodecSpecificData(AMediaFormat* meta, const uint8_t* data, size_t size) {
+ if (meta == nullptr || data == nullptr || size == 0) {
+ return false;
+ }
+
+ ABitReader bits(data, size);
+
+ // First 2 bits of the uncompressed header should be the frame_marker.
+ if (bits.getBits(2) != 0b10) {
+ return false;
+ }
+
+ int32_t profileLowBit = bits.getBits(1);
+ int32_t profileHighBit = bits.getBits(1);
+ int32_t profile = profileHighBit * 2 + profileLowBit;
+
+ // One reserved '0' bit if profile is 3.
+ if (profile == 3 && bits.getBits(1) != 0) {
+ return false;
+ }
+
+ // If show_existing_frame is set, we get no more data. Since this is
+ // expected to be the first frame, we can return false which will cascade
+ // into ERROR_MALFORMED.
+ if (bits.getBits(1)) {
+ return false;
+ }
+
+ int32_t frame_type = bits.getBits(1);
+
+ // Upto 7 bits could be read till now, which were guaranteed to be available
+ // since size > 0. Check for bits available before reading them from now on.
+ if (bits.numBitsLeft() < 2) {
+ return false;
+ }
+
+ int32_t show_frame = bits.getBits(1);
+ int32_t error_resilient_mode = bits.getBits(1);
+ int32_t bitDepth = 8;
+ int32_t chromaSubsampling = -1;
+
+ if (frame_type == 0 /* KEY_FRAME */) {
+ // Check for sync code.
+ if (!isVp9SyncCode(bits)) {
+ return false;
+ }
+
+ if (!getVp9BitdepthChromaSubSampling(bits, profile, &bitDepth, &chromaSubsampling)) {
+ return false;
+ }
+ } else {
+ int32_t intra_only = 0;
+ if (!show_frame) {
+ if (bits.numBitsLeft() < 1) {
+ return false;
+ }
+ intra_only = bits.getBits(1);
+ }
+
+ if (!error_resilient_mode) {
+ if (bits.numBitsLeft() < 2) {
+ return false;
+ }
+ // ignore reset_frame_context
+ bits.skipBits(2);
+ }
+
+ if (!intra_only) {
+ // Require first frame to be either KEY_FRAME or INTER_FRAME with intra_only set to true
+ return false;
+ }
+
+ // Check for sync code.
+ if (!isVp9SyncCode(bits)) {
+ return false;
+ }
+
+ if (profile > 0) {
+ if (!getVp9BitdepthChromaSubSampling(bits, profile, &bitDepth, &chromaSubsampling)) {
+ return false;
+ }
+ } else {
+ bitDepth = 8;
+ chromaSubsampling = 3;
+ }
+ }
+ int32_t csdSize = 6;
+ if (chromaSubsampling != -1) {
+ csdSize += 3;
+ }
+
+ // Create VP9 Codec Feature Metadata (CodecPrivate) that can be parsed
+ // https://www.webmproject.org/docs/container/#vp9-codec-feature-metadata-codecprivate
+ sp<ABuffer> csd = sp<ABuffer>::make(csdSize);
+ uint8_t* csdData = csd->data();
+
+ *csdData++ = 0x01 /* FEATURE PROFILE */;
+ *csdData++ = 0x01 /* length */;
+ *csdData++ = profile;
+
+ *csdData++ = 0x03 /* FEATURE BITDEPTH */;
+ *csdData++ = 0x01 /* length */;
+ *csdData++ = bitDepth;
+
+ // csdSize more than 6 means chroma subsampling data was found.
+ if (csdSize > 6) {
+ *csdData++ = 0x04 /* FEATURE SUBSAMPLING */;
+ *csdData++ = 0x01 /* length */;
+ *csdData++ = chromaSubsampling;
+ }
+
+ AMediaFormat_setBuffer(meta, AMEDIAFORMAT_KEY_CSD_0, csd->data(), csd->size());
+ return true;
+}
+
bool MakeAACCodecSpecificData(MetaDataBase &meta, const uint8_t *data, size_t size) {
if (data == nullptr || size < 7) {
return false;
diff --git a/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h b/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
index dcaf27f..69cf21a 100644
--- a/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
+++ b/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
@@ -38,6 +38,8 @@
void parseVorbisComment(
AMediaFormat *fileMeta, const char *comment, size_t commentLength);
+bool MakeVP9CodecSpecificData(AMediaFormat* meta, const uint8_t* data, size_t size);
+
} // namespace android
#endif // META_DATA_UTILS_H_
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 9ec7700..3d873df 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -192,7 +192,6 @@
header_libs: [
"libstagefright_headers",
"libmedia_headers",
- "libstagefright_headers",
],
shared_libs: [
diff --git a/media/tests/benchmark/src/native/decoder/C2Decoder.cpp b/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
index 6539f24..f9a6b1c 100644
--- a/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
+++ b/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
@@ -106,7 +106,7 @@
work->input.ordinal.frameIndex = mNumInputFrame;
work->input.buffers.clear();
int size = frameInfo[mNumInputFrame].size;
- int alignedSize = ALIGN(size, PAGE_SIZE);
+ int alignedSize = ALIGN(size, getpagesize());
if (size) {
std::shared_ptr<C2LinearBlock> block;
status = mLinearPool->fetchLinearBlock(
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 2946398..b85d9de 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+//#define LOG_NDEBUG 0
#define LOG_TAG "ServiceUtilities"
#include <audio_utils/clock.h>
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index afd28e5..34395d8 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -66,12 +66,12 @@
// Remove some pedantic stylistic requirements.
"-google-readability-casting", // C++ casts not always necessary and may be verbose
- "-google-readability-todo", // do not require TODO(info)
+ "-google-readability-todo", // do not require TODO(info)
- "-bugprone-unhandled-self-assignment",
- "-bugprone-suspicious-string-compare",
- "-cert-oop54-cpp", // found in TransactionLog.h
"-bugprone-narrowing-conversions", // b/182410845
+ "-bugprone-suspicious-string-compare",
+ "-bugprone-unhandled-self-assignment",
+ "-cert-oop54-cpp", // found in TransactionLog.h
]
// TODO(b/275642749) Reenable these warnings
@@ -101,9 +101,9 @@
"-Wall",
"-Wdeprecated",
"-Werror",
+ "-Werror=conditional-uninitialized",
"-Werror=implicit-fallthrough",
"-Werror=sometimes-uninitialized",
- "-Werror=conditional-uninitialized",
"-Wextra",
// suppress some warning chatter.
@@ -113,7 +113,6 @@
"-Wredundant-decls",
"-Wshadow",
"-Wstrict-aliasing",
- "-fstrict-aliasing",
"-Wthread-safety",
//"-Wthread-safety-negative", // experimental - looks broken in R.
"-Wunreachable-code",
@@ -121,6 +120,7 @@
"-Wunreachable-code-return",
"-Wunused",
"-Wused-but-marked-unused",
+ "-fstrict-aliasing",
]
// Eventually use common tidy defaults
@@ -134,7 +134,7 @@
tidy_checks: audioflinger_tidy_errors,
tidy_checks_as_errors: audioflinger_tidy_errors,
tidy_flags: [
- "-format-style=file",
+ "-format-style=file",
],
}
@@ -142,48 +142,47 @@
name: "libaudioflinger_dependencies",
shared_libs: [
- "audioflinger-aidl-cpp",
"audioclient-types-aidl-cpp",
+ "audioflinger-aidl-cpp",
"av-types-aidl-cpp",
"effect-aidl-cpp",
- "libaudioclient_aidl_conversion",
"libactivitymanager_aidl",
+ "libaudioclient",
+ "libaudioclient_aidl_conversion",
"libaudioflinger_datapath",
"libaudioflinger_fastpath",
"libaudioflinger_timing",
"libaudioflinger_utils",
"libaudiofoundation",
"libaudiohal",
+ "libaudiomanager",
"libaudioprocessing",
"libaudioutils",
- "libcutils",
- "libutils",
- "liblog",
"libbinder",
"libbinder_ndk",
- "libaudioclient",
- "libaudiomanager",
+ "libcutils",
+ "liblog",
+ "libmedia_helper",
"libmediametrics",
"libmediautils",
+ "libmemunreachable",
"libnbaio",
"libnblog",
"libpermission",
"libpowermanager",
- "libmemunreachable",
- "libmedia_helper",
"libshmemcompat",
"libsounddose",
+ "libutils",
"libvibrator",
"packagemanager_aidl-cpp",
],
static_libs: [
- "libmedialogservice",
"libaudiospdif",
+ "libmedialogservice",
],
}
-
cc_library {
name: "libaudioflinger",
@@ -230,9 +229,9 @@
],
cflags: [
- "-fvisibility=hidden",
- "-Werror",
"-Wall",
+ "-Werror",
+ "-fvisibility=hidden",
],
sanitize: {
integer_overflow: true,
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index c5424a2..4e50114 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1922,10 +1922,11 @@
if (mPrimaryHardwareDev == nullptr) {
return 0;
}
+ if (mInputBufferSizeOrderedDevs.empty()) {
+ return 0;
+ }
mHardwareStatus = AUDIO_HW_GET_INPUT_BUFFER_SIZE;
- sp<DeviceHalInterface> dev = mPrimaryHardwareDev.load()->hwDevice();
-
std::vector<audio_channel_mask_t> channelMasks = {channelMask};
if (channelMask != AUDIO_CHANNEL_IN_MONO) {
channelMasks.push_back(AUDIO_CHANNEL_IN_MONO);
@@ -1955,6 +1956,22 @@
mHardwareStatus = AUDIO_HW_IDLE;
+ auto getInputBufferSize = [](const sp<DeviceHalInterface>& dev, audio_config_t config,
+ size_t* bytes) -> status_t {
+ if (!dev) {
+ return BAD_VALUE;
+ }
+ status_t result = dev->getInputBufferSize(&config, bytes);
+ if (result == BAD_VALUE) {
+ // Retry with the config suggested by the HAL.
+ result = dev->getInputBufferSize(&config, bytes);
+ }
+ if (result != OK || *bytes == 0) {
+ return BAD_VALUE;
+ }
+ return result;
+ };
+
// Change parameters of the configuration each iteration until we find a
// configuration that the device will support, or HAL suggests what it supports.
audio_config_t config = AUDIO_CONFIG_INITIALIZER;
@@ -1966,16 +1983,15 @@
config.sample_rate = testSampleRate;
size_t bytes = 0;
- audio_config_t loopConfig = config;
- status_t result = dev->getInputBufferSize(&config, &bytes);
- if (result == BAD_VALUE) {
- // Retry with the config suggested by the HAL.
- result = dev->getInputBufferSize(&config, &bytes);
+ ret = BAD_VALUE;
+ for (const AudioHwDevice* dev : mInputBufferSizeOrderedDevs) {
+ ret = getInputBufferSize(dev->hwDevice(), config, &bytes);
+ if (ret == OK) {
+ break;
+ }
}
- if (result != OK || bytes == 0) {
- config = loopConfig;
- continue;
- }
+ if (ret == BAD_VALUE) continue;
+
if (config.sample_rate != sampleRate || config.channel_mask != channelMask ||
config.format != format) {
uint32_t dstChannelCount = audio_channel_count_from_in_mask(channelMask);
@@ -2551,6 +2567,7 @@
bool mm;
if (OK == dev->getMasterMute(&mm)) {
mMasterMute = mm;
+ ALOGI_IF(mMasterMute, "%s: applying mute from HAL %s", __func__, name);
}
}
@@ -2602,12 +2619,43 @@
}
mAudioHwDevs.add(handle, audioDevice);
+ if (strcmp(name, AUDIO_HARDWARE_MODULE_ID_STUB) != 0) {
+ mInputBufferSizeOrderedDevs.insert(audioDevice);
+ }
ALOGI("loadHwModule() Loaded %s audio interface, handle %d", name, handle);
return audioDevice;
}
+// Sort AudioHwDevice to be traversed in the getInputBufferSize call in the following order:
+// Primary, Usb, Bluetooth, A2DP, other modules, remote submix.
+/* static */
+bool AudioFlinger::inputBufferSizeDevsCmp(const AudioHwDevice* lhs, const AudioHwDevice* rhs) {
+ static const std::map<std::string_view, int> kPriorities = {
+ { AUDIO_HARDWARE_MODULE_ID_PRIMARY, 0 }, { AUDIO_HARDWARE_MODULE_ID_USB, 1 },
+ { AUDIO_HARDWARE_MODULE_ID_BLUETOOTH, 2 }, { AUDIO_HARDWARE_MODULE_ID_A2DP, 3 },
+ { AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX, std::numeric_limits<int>::max() }
+ };
+
+ const std::string_view lhsName = lhs->moduleName();
+ const std::string_view rhsName = rhs->moduleName();
+
+ auto lhsPriority = std::numeric_limits<int>::max() - 1;
+ if (const auto lhsIt = kPriorities.find(lhsName); lhsIt != kPriorities.end()) {
+ lhsPriority = lhsIt->second;
+ }
+ auto rhsPriority = std::numeric_limits<int>::max() - 1;
+ if (const auto rhsIt = kPriorities.find(rhsName); rhsIt != kPriorities.end()) {
+ rhsPriority = rhsIt->second;
+ }
+
+ if (lhsPriority != rhsPriority) {
+ return lhsPriority < rhsPriority;
+ }
+ return lhsName < rhsName;
+}
+
// ----------------------------------------------------------------------------
uint32_t AudioFlinger::getPrimaryOutputSamplingRate() const
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 0f75d6e..39462fc 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -629,6 +629,10 @@
DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*> mAudioHwDevs
GUARDED_BY(hardwareMutex()) {nullptr /* defValue */};
+ static bool inputBufferSizeDevsCmp(const AudioHwDevice* lhs, const AudioHwDevice* rhs);
+ std::set<AudioHwDevice*, decltype(&inputBufferSizeDevsCmp)>
+ mInputBufferSizeOrderedDevs GUARDED_BY(hardwareMutex()) {inputBufferSizeDevsCmp};
+
const sp<DevicesFactoryHalInterface> mDevicesFactoryHal =
DevicesFactoryHalInterface::create();
/* const */ sp<DevicesFactoryHalCallback> mDevicesFactoryHalCallback; // set onFirstRef().
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 73a89e5..ae55329 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -31,6 +31,7 @@
#include <media/AudioContainers.h>
#include <media/AudioDeviceTypeAddr.h>
#include <media/AudioEffect.h>
+#include <media/EffectClientAsyncProxy.h>
#include <media/ShmemCompat.h>
#include <media/TypeConverter.h>
#include <media/audiohal/EffectHalInterface.h>
@@ -1691,7 +1692,8 @@
const sp<media::IEffectClient>& effectClient,
int32_t priority, bool notifyFramesProcessed)
: BnEffect(),
- mEffect(effect), mEffectClient(effectClient), mClient(client), mCblk(NULL),
+ mEffect(effect), mEffectClient(media::EffectClientAsyncProxy::makeIfNeeded(effectClient)),
+ mClient(client), mCblk(nullptr),
mPriority(priority), mHasControl(false), mEnabled(false), mDisconnected(false),
mNotifyFramesProcessed(notifyFramesProcessed)
{
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index ddef7f3..60abb58 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -721,8 +721,9 @@
{
audio_utils::unique_lock _l(event->mutex());
while (event->mWaitStatus) {
- if (event->mCondition.wait_for(_l, std::chrono::nanoseconds(kConfigEventTimeoutNs))
- == std::cv_status::timeout) {
+ if (event->mCondition.wait_for(
+ _l, std::chrono::nanoseconds(kConfigEventTimeoutNs), getTid())
+ == std::cv_status::timeout) {
event->mStatus = TIMED_OUT;
event->mWaitStatus = false;
}
@@ -2689,14 +2690,17 @@
}
}
- // Set DIRECT flag if current thread is DirectOutputThread. This can
- // happen when the playback is rerouted to direct output thread by
+ // Set DIRECT/OFFLOAD flag if current thread is DirectOutputThread/OffloadThread.
+ // This can happen when the playback is rerouted to direct output/offload thread by
// dynamic audio policy.
// Do NOT report the flag changes back to client, since the client
- // doesn't explicitly request a direct flag.
+ // doesn't explicitly request a direct/offload flag.
audio_output_flags_t trackFlags = *flags;
if (mType == DIRECT) {
trackFlags = static_cast<audio_output_flags_t>(trackFlags | AUDIO_OUTPUT_FLAG_DIRECT);
+ } else if (mType == OFFLOAD) {
+ trackFlags = static_cast<audio_output_flags_t>(trackFlags |
+ AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT);
}
*afTrackFlags = trackFlags;
@@ -3356,9 +3360,9 @@
return NO_ERROR;
} else {
status_t status;
- uint32_t frames;
+ uint64_t frames = 0;
status = mOutput->getRenderPosition(&frames);
- *dspFrames = (size_t)frames;
+ *dspFrames = (uint32_t)frames;
return status;
}
}
@@ -5902,7 +5906,7 @@
vaf = v * sendLevel * (1. / MAX_GAIN_INT);
}
- track->setFinalVolume(vrf, vlf);
+ track->setFinalVolume(vlf, vrf);
// Delegate volume control to effect in track effect chain if needed
if (chain != 0 && chain->setVolume_l(&vl, &vr)) {
@@ -8152,7 +8156,6 @@
inputStandBy();
reacquire_wakelock:
- sp<IAfRecordTrack> activeTrack;
{
audio_utils::lock_guard _l(mutex());
acquireWakeLock_l();
@@ -8168,6 +8171,8 @@
// loop while there is work to do
for (int64_t loopCount = 0;; ++loopCount) { // loopCount used for statistics tracking
+ // Note: these sp<> are released at the end of the for loop outside of the mutex() lock.
+ sp<IAfRecordTrack> activeTrack;
Vector<sp<IAfEffectChain>> effectChains;
// activeTracks accumulates a copy of a subset of mActiveTracks
@@ -11060,7 +11065,7 @@
char *endptr;
unsigned long ul = strtoul(value, &endptr, 0);
if (*endptr == '\0' && ul != 0) {
- ALOGD("Silence is golden");
+ ALOGW("%s: mute from ro.audio.silent. Silence is golden", __func__);
// The setprop command will not allow a property to be changed after
// the first time it is set, so we don't have to worry about un-muting.
setMasterMute_l(true);
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 4e82173..77abaf6 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1674,7 +1674,7 @@
if (result == OK) {
ALOGI("%s(%d): processed mute state for port ID %d from %d to %d", __func__, id(), mPortId,
- int(muteState), int(mMuteState));
+ static_cast<int>(mMuteState), static_cast<int>(muteState));
mMuteState = muteState;
} else {
ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d", __func__, id(),
@@ -3554,6 +3554,8 @@
}
if (result == OK) {
+ ALOGI("%s(%d): processed mute state for port ID %d from %d to %d", __func__, id(), mPortId,
+ static_cast<int>(mMuteState), static_cast<int>(muteState));
mMuteState = muteState;
} else {
ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d",
diff --git a/services/audioflinger/afutils/Android.bp b/services/audioflinger/afutils/Android.bp
index 5e29ce9..e147266 100644
--- a/services/audioflinger/afutils/Android.bp
+++ b/services/audioflinger/afutils/Android.bp
@@ -23,7 +23,7 @@
tidy_checks: audioflinger_utils_tidy_errors,
tidy_checks_as_errors: audioflinger_utils_tidy_errors,
tidy_flags: [
- "-format-style=file",
+ "-format-style=file",
],
}
@@ -64,10 +64,10 @@
],
header_libs: [
- "libaaudio_headers", // PropertyUtils.cpp
+ "libaaudio_headers", // PropertyUtils.cpp
],
include_dirs: [
- "frameworks/av/services/audioflinger", // for configuration
+ "frameworks/av/services/audioflinger", // for configuration
],
}
diff --git a/services/audioflinger/afutils/NBAIO_Tee.cpp b/services/audioflinger/afutils/NBAIO_Tee.cpp
index 86fb128..cdc8e95 100644
--- a/services/audioflinger/afutils/NBAIO_Tee.cpp
+++ b/services/audioflinger/afutils/NBAIO_Tee.cpp
@@ -514,6 +514,12 @@
return NO_ERROR; // return full path
}
+/* static */
+NBAIO_Tee::RunningTees& NBAIO_Tee::getRunningTees() {
+ [[clang::no_destroy]] static RunningTees runningTees;
+ return runningTees;
+}
+
} // namespace android
#endif // TEE_SINK
diff --git a/services/audioflinger/afutils/NBAIO_Tee.h b/services/audioflinger/afutils/NBAIO_Tee.h
index a5c544e..5ab1949 100644
--- a/services/audioflinger/afutils/NBAIO_Tee.h
+++ b/services/audioflinger/afutils/NBAIO_Tee.h
@@ -310,10 +310,7 @@
};
// singleton
- static RunningTees &getRunningTees() {
- static RunningTees runningTees;
- return runningTees;
- }
+ static RunningTees& getRunningTees();
// The NBAIO TeeImpl may have lifetime longer than NBAIO_Tee if
// RunningTees::dump() is being called simultaneous to ~NBAIO_Tee().
diff --git a/services/audioflinger/datapath/Android.bp b/services/audioflinger/datapath/Android.bp
index 4235f14..6918881 100644
--- a/services/audioflinger/datapath/Android.bp
+++ b/services/audioflinger/datapath/Android.bp
@@ -29,7 +29,7 @@
tidy_checks: audioflinger_datapath_tidy_errors,
tidy_checks_as_errors: audioflinger_datapath_tidy_errors,
tidy_flags: [
- "-format-style=file",
+ "-format-style=file",
],
}
@@ -70,6 +70,6 @@
],
include_dirs: [
- "frameworks/av/services/audioflinger", // for configuration
+ "frameworks/av/services/audioflinger", // for configuration
],
}
diff --git a/services/audioflinger/datapath/AudioStreamIn.cpp b/services/audioflinger/datapath/AudioStreamIn.cpp
index 76618f4..165ac25 100644
--- a/services/audioflinger/datapath/AudioStreamIn.cpp
+++ b/services/audioflinger/datapath/AudioStreamIn.cpp
@@ -58,7 +58,7 @@
if (mHalFormatHasProportionalFrames &&
(flags & AUDIO_INPUT_FLAG_DIRECT) == AUDIO_INPUT_FLAG_DIRECT) {
- // For DirectRecord reset timestamp to 0 on standby.
+ // For DirectRecord reset position to 0 on standby.
const uint64_t adjustedPosition = (halPosition <= mFramesReadAtStandby) ?
0 : (halPosition - mFramesReadAtStandby);
// Scale from HAL sample rate to application rate.
diff --git a/services/audioflinger/datapath/AudioStreamOut.cpp b/services/audioflinger/datapath/AudioStreamOut.cpp
index 9851f3a..a686ff6 100644
--- a/services/audioflinger/datapath/AudioStreamOut.cpp
+++ b/services/audioflinger/datapath/AudioStreamOut.cpp
@@ -51,42 +51,17 @@
return NO_INIT;
}
- uint32_t halPosition = 0;
+ uint64_t halPosition = 0;
const status_t status = stream->getRenderPosition(&halPosition);
if (status != NO_ERROR) {
return status;
}
-
- // Maintain a 64-bit render position using the 32-bit result from the HAL.
- // This delta calculation relies on the arithmetic overflow behavior
- // of integers. For example (100 - 0xFFFFFFF0) = 116.
- const auto truncatedPosition = (uint32_t)mRenderPosition;
- int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
- (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
-
- if (deltaHalPosition > 0) {
- mRenderPosition += deltaHalPosition;
- } else if (mExpectRetrograde) {
- mExpectRetrograde = false;
- mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition);
- }
// Scale from HAL sample rate to application rate.
- *frames = mRenderPosition / mRateMultiplier;
+ *frames = halPosition / mRateMultiplier;
return status;
}
-// return bottom 32-bits of the render position
-status_t AudioStreamOut::getRenderPosition(uint32_t *frames)
-{
- uint64_t position64 = 0;
- const status_t status = getRenderPosition(&position64);
- if (status == NO_ERROR) {
- *frames = (uint32_t)position64;
- }
- return status;
-}
-
status_t AudioStreamOut::getPresentationPosition(uint64_t *frames, struct timespec *timestamp)
{
if (stream == nullptr) {
@@ -101,7 +76,7 @@
if (mHalFormatHasProportionalFrames &&
(flags & AUDIO_OUTPUT_FLAG_DIRECT) == AUDIO_OUTPUT_FLAG_DIRECT) {
- // For DirectTrack reset timestamp to 0 on standby.
+ // For DirectTrack reset position to 0 on standby.
const uint64_t adjustedPosition = (halPosition <= mFramesWrittenAtStandby) ?
0 : (halPosition - mFramesWrittenAtStandby);
// Scale from HAL sample rate to application rate.
@@ -179,8 +154,6 @@
int AudioStreamOut::flush()
{
- mRenderPosition = 0;
- mExpectRetrograde = false;
mFramesWritten = 0;
mFramesWrittenAtStandby = 0;
const status_t result = stream->flush();
@@ -189,12 +162,14 @@
int AudioStreamOut::standby()
{
- mRenderPosition = 0;
- mExpectRetrograde = false;
mFramesWrittenAtStandby = mFramesWritten;
return stream->standby();
}
+void AudioStreamOut::presentationComplete() {
+ stream->presentationComplete();
+}
+
ssize_t AudioStreamOut::write(const void *buffer, size_t numBytes)
{
size_t bytesWritten;
diff --git a/services/audioflinger/datapath/AudioStreamOut.h b/services/audioflinger/datapath/AudioStreamOut.h
index ea41bba..2c9fb3e 100644
--- a/services/audioflinger/datapath/AudioStreamOut.h
+++ b/services/audioflinger/datapath/AudioStreamOut.h
@@ -51,9 +51,6 @@
virtual ~AudioStreamOut();
- // Get the bottom 32-bits of the 64-bit render position.
- status_t getRenderPosition(uint32_t *frames);
-
virtual status_t getRenderPosition(uint64_t *frames);
virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
@@ -91,21 +88,14 @@
virtual status_t flush();
virtual status_t standby();
- // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when
- // transitioning between tracks.
- // The HAL resets the frame position without flush/stop being called, but calls back prior to
- // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of
- // mRenderPosition.
- virtual void presentationComplete() { mExpectRetrograde = true; }
+ virtual void presentationComplete();
protected:
uint64_t mFramesWritten = 0; // reset by flush
uint64_t mFramesWrittenAtStandby = 0;
- uint64_t mRenderPosition = 0; // reset by flush, standby, or presentation complete
int mRateMultiplier = 1;
bool mHalFormatHasProportionalFrames = false;
size_t mHalFrameSize = 0;
- bool mExpectRetrograde = false; // see presentationComplete
};
} // namespace android
diff --git a/services/audioflinger/fastpath/Android.bp b/services/audioflinger/fastpath/Android.bp
index 84a580f..5ebc583 100644
--- a/services/audioflinger/fastpath/Android.bp
+++ b/services/audioflinger/fastpath/Android.bp
@@ -24,7 +24,7 @@
tidy_checks: fastpath_tidy_errors,
tidy_checks_as_errors: fastpath_tidy_errors,
tidy_flags: [
- "-format-style=file",
+ "-format-style=file",
],
}
diff --git a/services/audioflinger/sounddose/Android.bp b/services/audioflinger/sounddose/Android.bp
index 2cab5d1..884622e 100644
--- a/services/audioflinger/sounddose/Android.bp
+++ b/services/audioflinger/sounddose/Android.bp
@@ -29,7 +29,7 @@
tidy_checks: audioflinger_sounddose_tidy_errors,
tidy_checks_as_errors: audioflinger_sounddose_tidy_errors,
tidy_flags: [
- "-format-style=file",
+ "-format-style=file",
],
}
@@ -40,9 +40,9 @@
defaults: [
"audioflinger_sounddose_flags_defaults",
- "latest_android_media_audio_common_types_ndk_shared",
"latest_android_hardware_audio_core_sounddose_ndk_shared",
"latest_android_hardware_audio_sounddose_ndk_shared",
+ "latest_android_media_audio_common_types_ndk_shared",
],
srcs: [
@@ -66,9 +66,9 @@
],
cflags: [
+ "-DBACKEND_NDK",
"-Wall",
"-Werror",
- "-DBACKEND_NDK",
],
}
diff --git a/services/audioflinger/sounddose/tests/Android.bp b/services/audioflinger/sounddose/tests/Android.bp
index 2a2addf..b963d25 100644
--- a/services/audioflinger/sounddose/tests/Android.bp
+++ b/services/audioflinger/sounddose/tests/Android.bp
@@ -11,13 +11,13 @@
name: "sounddosemanager_tests",
srcs: [
- "sounddosemanager_tests.cpp"
+ "sounddosemanager_tests.cpp",
],
defaults: [
- "latest_android_media_audio_common_types_ndk_static",
"latest_android_hardware_audio_core_sounddose_ndk_static",
"latest_android_hardware_audio_sounddose_ndk_static",
+ "latest_android_media_audio_common_types_ndk_static",
],
shared_libs: [
@@ -42,10 +42,10 @@
],
cflags: [
+ "-DBACKEND_NDK",
"-Wall",
"-Werror",
"-Wextra",
- "-DBACKEND_NDK",
],
test_suites: [
diff --git a/services/audioflinger/timing/Android.bp b/services/audioflinger/timing/Android.bp
index 30ebca0..2666ddb 100644
--- a/services/audioflinger/timing/Android.bp
+++ b/services/audioflinger/timing/Android.bp
@@ -29,7 +29,7 @@
tidy_checks: audioflinger_timing_tidy_errors,
tidy_checks_as_errors: audioflinger_timing_tidy_errors,
tidy_flags: [
- "-format-style=file",
+ "-format-style=file",
],
}
diff --git a/services/audioflinger/timing/tests/Android.bp b/services/audioflinger/timing/tests/Android.bp
index d1e5563..94eaa6a 100644
--- a/services/audioflinger/timing/tests/Android.bp
+++ b/services/audioflinger/timing/tests/Android.bp
@@ -13,7 +13,7 @@
host_supported: true,
srcs: [
- "mediasyncevent_tests.cpp"
+ "mediasyncevent_tests.cpp",
],
header_libs: [
@@ -38,7 +38,7 @@
host_supported: true,
srcs: [
- "monotonicframecounter_tests.cpp"
+ "monotonicframecounter_tests.cpp",
],
static_libs: [
@@ -54,26 +54,26 @@
}
cc_test {
- name: "synchronizedrecordstate_tests",
+ name: "synchronizedrecordstate_tests",
- host_supported: true,
+ host_supported: true,
- srcs: [
- "synchronizedrecordstate_tests.cpp"
- ],
+ srcs: [
+ "synchronizedrecordstate_tests.cpp",
+ ],
- header_libs: [
- "libaudioclient_headers",
- ],
+ header_libs: [
+ "libaudioclient_headers",
+ ],
- static_libs: [
- "liblog",
- "libutils", // RefBase
- ],
+ static_libs: [
+ "liblog",
+ "libutils", // RefBase
+ ],
- cflags: [
- "-Wall",
- "-Werror",
- "-Wextra",
- ],
- }
\ No newline at end of file
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ ],
+}
diff --git a/services/audioparameterparser/Android.bp b/services/audioparameterparser/Android.bp
index b3da333..1c1c1e1 100644
--- a/services/audioparameterparser/Android.bp
+++ b/services/audioparameterparser/Android.bp
@@ -35,18 +35,18 @@
name: "android.hardware.audio.parameter_parser.example_defaults",
defaults: [
"latest_android_hardware_audio_core_ndk_shared",
+ "latest_av_audio_types_aidl_ndk_shared",
],
shared_libs: [
- "av-audio-types-aidl-V1-ndk",
"libbase",
"libbinder_ndk",
],
cflags: [
"-Wall",
- "-Wextra",
"-Werror",
+ "-Wextra",
"-Wthread-safety",
],
}
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 13b70e5..c8b2962 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -16,7 +16,6 @@
#pragma once
-#define __STDC_LIMIT_MACROS
#include <inttypes.h>
#include <sys/types.h>
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index d027564..747af4a 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -561,6 +561,7 @@
audio_port_config config = {};
devicePort->toAudioPortConfig(&config);
config.config_mask = AUDIO_PORT_CONFIG_GAIN;
+ config.gain.mode = gains[0]->getMode();
config.gain.values[0] = gainValueMb;
return mClientInterface->setAudioPortConfig(&config, 0) == NO_ERROR;
}
diff --git a/services/audiopolicy/config/Android.bp b/services/audiopolicy/config/Android.bp
index 86600f4..321181d 100644
--- a/services/audiopolicy/config/Android.bp
+++ b/services/audiopolicy/config/Android.bp
@@ -31,41 +31,49 @@
vendor: true,
src: ":a2dp_in_audio_policy_configuration",
}
+
prebuilt_etc {
name: "a2dp_audio_policy_configuration.xml",
vendor: true,
src: ":a2dp_audio_policy_configuration",
}
+
prebuilt_etc {
name: "audio_policy_configuration.xml",
vendor: true,
src: ":audio_policy_configuration_generic",
}
+
prebuilt_etc {
name: "r_submix_audio_policy_configuration.xml",
vendor: true,
src: ":r_submix_audio_policy_configuration",
}
+
prebuilt_etc {
name: "audio_policy_volumes.xml",
vendor: true,
src: ":audio_policy_volumes",
}
+
prebuilt_etc {
name: "default_volume_tables.xml",
vendor: true,
src: ":default_volume_tables",
}
+
prebuilt_etc {
name: "surround_sound_configuration_5_0.xml",
vendor: true,
src: ":surround_sound_configuration_5_0",
}
+
prebuilt_etc {
name: "usb_audio_policy_configuration.xml",
vendor: true,
src: ":usb_audio_policy_configuration",
}
+
prebuilt_etc {
name: "primary_audio_policy_configuration.xml",
src: ":primary_audio_policy_configuration",
@@ -76,50 +84,62 @@
name: "a2dp_in_audio_policy_configuration",
srcs: ["a2dp_in_audio_policy_configuration.xml"],
}
+
filegroup {
name: "a2dp_audio_policy_configuration",
srcs: ["a2dp_audio_policy_configuration.xml"],
}
+
filegroup {
name: "primary_audio_policy_configuration",
srcs: ["primary_audio_policy_configuration.xml"],
}
+
filegroup {
name: "surround_sound_configuration_5_0",
srcs: ["surround_sound_configuration_5_0.xml"],
}
+
filegroup {
name: "default_volume_tables",
srcs: ["default_volume_tables.xml"],
}
+
filegroup {
name: "audio_policy_volumes",
srcs: ["audio_policy_volumes.xml"],
}
+
filegroup {
name: "audio_policy_configuration_generic",
srcs: ["audio_policy_configuration_generic.xml"],
}
+
filegroup {
name: "audio_policy_configuration_generic_configurable",
srcs: ["audio_policy_configuration_generic_configurable.xml"],
}
+
filegroup {
name: "usb_audio_policy_configuration",
srcs: ["usb_audio_policy_configuration.xml"],
}
+
filegroup {
name: "r_submix_audio_policy_configuration",
srcs: ["r_submix_audio_policy_configuration.xml"],
}
+
filegroup {
name: "bluetooth_audio_policy_configuration_7_0",
srcs: ["bluetooth_audio_policy_configuration_7_0.xml"],
}
+
filegroup {
name: "bluetooth_with_le_audio_policy_configuration_7_0",
srcs: ["bluetooth_with_le_audio_policy_configuration_7_0.xml"],
}
+
filegroup {
name: "hearing_aid_audio_policy_configuration_7_0",
srcs: ["hearing_aid_audio_policy_configuration_7_0.xml"],
diff --git a/services/audiopolicy/engine/common/Android.bp b/services/audiopolicy/engine/common/Android.bp
index 0034a04..878e0e9 100644
--- a/services/audiopolicy/engine/common/Android.bp
+++ b/services/audiopolicy/engine/common/Android.bp
@@ -31,10 +31,10 @@
name: "libaudiopolicyengine_common",
srcs: [
"src/EngineBase.cpp",
+ "src/LastRemovableMediaDevices.cpp",
"src/ProductStrategy.cpp",
"src/VolumeCurve.cpp",
"src/VolumeGroup.cpp",
- "src/LastRemovableMediaDevices.cpp",
],
cflags: [
"-Wall",
@@ -42,10 +42,10 @@
"-Wextra",
],
header_libs: [
- "libbase_headers",
"libaudiopolicycommon",
"libaudiopolicyengine_common_headers",
"libaudiopolicyengine_interface_headers",
+ "libbase_headers",
],
export_header_lib_headers: [
"libaudiopolicyengine_common_headers",
@@ -58,7 +58,10 @@
"libaudiopolicycomponents",
],
whole_static_libs: [
- "server_configurable_flags",
"com.android.media.audio-aconfig-cc",
+ "server_configurable_flags",
+ ],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
],
}
diff --git a/services/audiopolicy/engine/common/src/VolumeCurve.cpp b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
index fccbc60..9411155 100644
--- a/services/audiopolicy/engine/common/src/VolumeCurve.cpp
+++ b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
@@ -69,7 +69,7 @@
return mCurvePoints[nbCurvePoints - 1].mAttenuationInMb / 100.0f;
}
if (indexInUiPosition == 0) {
- if (indexInUiPosition != mCurvePoints[0].mIndex) {
+ if ((size_t)volIdx != mCurvePoints[0].mIndex) {
return VOLUME_MIN_DB; // out of bounds
}
return mCurvePoints[0].mAttenuationInMb / 100.0f;
diff --git a/services/audiopolicy/engine/config/Android.bp b/services/audiopolicy/engine/config/Android.bp
index 12597de..05434bc 100644
--- a/services/audiopolicy/engine/config/Android.bp
+++ b/services/audiopolicy/engine/config/Android.bp
@@ -32,7 +32,7 @@
],
header_libs: [
"libaudio_system_headers",
- "libmedia_headers",
"libaudioclient_headers",
+ "libmedia_headers",
],
}
diff --git a/services/audiopolicy/engine/config/tests/Android.bp b/services/audiopolicy/engine/config/tests/Android.bp
index 5d1aa16..fd2ded8 100644
--- a/services/audiopolicy/engine/config/tests/Android.bp
+++ b/services/audiopolicy/engine/config/tests/Android.bp
@@ -27,8 +27,8 @@
data: [":audiopolicy_engineconfig_files"],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
test_suites: ["device-tests"],
diff --git a/services/audiopolicy/engineconfigurable/Android.bp b/services/audiopolicy/engineconfigurable/Android.bp
index eb2e2f4..2c3c4be 100644
--- a/services/audiopolicy/engineconfigurable/Android.bp
+++ b/services/audiopolicy/engineconfigurable/Android.bp
@@ -19,8 +19,8 @@
srcs: [
"src/Engine.cpp",
"src/EngineInstance.cpp",
- "src/Stream.cpp",
"src/InputSource.cpp",
+ "src/Stream.cpp",
],
cflags: [
"-Wall",
@@ -29,10 +29,10 @@
],
local_include_dirs: ["include"],
header_libs: [
- "libbase_headers",
"libaudiopolicycommon",
"libaudiopolicyengine_interface_headers",
"libaudiopolicyengineconfigurable_interface_headers",
+ "libbase_headers",
],
static_libs: [
"libaudiopolicyengine_common",
@@ -40,17 +40,20 @@
"libaudiopolicyengineconfigurable_pfwwrapper",
],
- shared_libs: [
+ shared_libs: [
"libaudio_aidl_conversion_common_cpp",
"libaudiofoundation",
+ "libaudiopolicy",
"libaudiopolicycomponents",
"libbase",
- "liblog",
"libcutils",
- "libutils",
+ "liblog",
"libmedia_helper",
- "libaudiopolicy",
"libparameter",
+ "libutils",
"libxml2",
],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
}
diff --git a/services/audiopolicy/engineconfigurable/config/Android.bp b/services/audiopolicy/engineconfigurable/config/Android.bp
index b3d1f97..a733c3f 100644
--- a/services/audiopolicy/engineconfigurable/config/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/Android.bp
@@ -30,10 +30,12 @@
vendor: true,
src: ":audio_policy_engine_criteria",
}
+
filegroup {
name: "audio_policy_engine_criterion_types_template",
srcs: ["example/common/audio_policy_engine_criterion_types.xml.in"],
}
+
filegroup {
name: "audio_policy_engine_criteria",
srcs: ["example/common/audio_policy_engine_criteria.xml"],
diff --git a/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp b/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
index e46b60f..f0926eb 100644
--- a/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
@@ -36,22 +36,25 @@
vendor: true,
src: ":audio_policy_engine_configuration",
required: [
- ":audio_policy_engine_criterion_types.xml",
":audio_policy_engine_criteria.xml",
+ ":audio_policy_engine_criterion_types.xml",
":audio_policy_engine_product_strategies.xml",
":audio_policy_engine_volumes.xml",
],
}
+
prebuilt_etc {
name: "audio_policy_engine_product_strategies.xml",
vendor: true,
src: "audio_policy_engine_product_strategies.xml",
}
+
prebuilt_etc {
name: "audio_policy_engine_volumes.xml",
vendor: true,
src: ":audio_policy_engine_volumes",
}
+
prebuilt_etc {
name: "audio_policy_engine_criterion_types.xml",
vendor: true,
@@ -65,39 +68,44 @@
name: "audio_policy_engine_criterion_types",
defaults: ["buildpolicycriteriontypesrule"],
srcs: [
- ":audio_policy_configuration_top_file",
":audio_policy_configuration_files",
+ ":audio_policy_configuration_top_file",
],
}
+
filegroup {
name: "audio_policy_configuration_files",
srcs: [
- ":r_submix_audio_policy_configuration",
- ":default_volume_tables",
":audio_policy_volumes",
- ":surround_sound_configuration_5_0",
+ ":default_volume_tables",
":primary_audio_policy_configuration",
+ ":r_submix_audio_policy_configuration",
+ ":surround_sound_configuration_5_0",
],
}
+
filegroup {
- name : "audio_policy_configuration_top_file",
+ name: "audio_policy_configuration_top_file",
srcs: [":audio_policy_configuration_generic"],
}
+
filegroup {
name: "audio_policy_engine_configuration",
srcs: ["audio_policy_engine_configuration.xml"],
}
+
filegroup {
name: "audio_policy_engine_volumes",
srcs: ["audio_policy_engine_volumes.xml"],
}
+
filegroup {
name: "audio_policy_engine_configuration_files",
srcs: [
":audio_policy_engine_configuration",
- "audio_policy_engine_product_strategies.xml",
- ":audio_policy_engine_volumes",
- ":audio_policy_engine_criterion_types",
":audio_policy_engine_criteria",
+ ":audio_policy_engine_criterion_types",
+ ":audio_policy_engine_volumes",
+ "audio_policy_engine_product_strategies.xml",
],
}
diff --git a/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp b/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
index ad6eeb1..981b5a7 100644
--- a/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
@@ -18,8 +18,8 @@
soong_namespace {
imports: [
- "frameworks/av/services/audiopolicy/engineconfigurable/config/example/automotive",
"frameworks/av/services/audiopolicy/config",
+ "frameworks/av/services/audiopolicy/engineconfigurable/config/example/automotive",
],
}
@@ -37,17 +37,19 @@
vendor: true,
src: ":audio_policy_engine_configuration",
required: [
- "audio_policy_engine_criterion_types.xml",
- "audio_policy_engine_criteria.xml",
- "audio_policy_engine_product_strategies.xml",
":audio_policy_engine_volumes.xml",
+ "audio_policy_engine_criteria.xml",
+ "audio_policy_engine_criterion_types.xml",
+ "audio_policy_engine_product_strategies.xml",
],
}
+
prebuilt_etc {
name: "audio_policy_engine_product_strategies.xml",
vendor: true,
src: "audio_policy_engine_product_strategies.xml",
}
+
prebuilt_etc {
name: "audio_policy_engine_criterion_types.xml",
vendor: true,
@@ -61,31 +63,34 @@
name: "audio_policy_engine_criterion_types",
defaults: ["buildpolicycriteriontypesrule"],
srcs: [
- ":audio_policy_configuration_top_file",
":audio_policy_configuration_files",
+ ":audio_policy_configuration_top_file",
],
}
+
filegroup {
name: "audio_policy_configuration_files",
srcs: [
- ":r_submix_audio_policy_configuration",
- ":default_volume_tables",
":audio_policy_volumes",
- ":surround_sound_configuration_5_0",
+ ":default_volume_tables",
":primary_audio_policy_configuration",
+ ":r_submix_audio_policy_configuration",
+ ":surround_sound_configuration_5_0",
],
}
+
filegroup {
- name : "audio_policy_configuration_top_file",
+ name: "audio_policy_configuration_top_file",
srcs: [":audio_policy_configuration_generic"],
}
+
filegroup {
name: "audio_policy_engine_configuration_files",
srcs: [
":audio_policy_engine_configuration",
- "audio_policy_engine_product_strategies.xml",
- ":audio_policy_engine_volumes",
- ":audio_policy_engine_criterion_types",
":audio_policy_engine_criteria",
+ ":audio_policy_engine_criterion_types",
+ ":audio_policy_engine_volumes",
+ "audio_policy_engine_product_strategies.xml",
],
}
diff --git a/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp b/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
index 773a99a..9f44bd6 100644
--- a/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
@@ -36,27 +36,31 @@
vendor: true,
src: ":audio_policy_engine_configuration",
required: [
- ":audio_policy_engine_criterion_types.xml",
":audio_policy_engine_criteria.xml",
+ ":audio_policy_engine_criterion_types.xml",
":audio_policy_engine_product_strategies.xml",
":audio_policy_engine_volumes.xml",
],
}
+
prebuilt_etc {
name: "audio_policy_engine_product_strategies.xml",
vendor: true,
src: "audio_policy_engine_product_strategies.xml",
}
+
prebuilt_etc {
name: "audio_policy_engine_stream_volumes.xml",
vendor: true,
src: ":audio_policy_engine_stream_volumes",
}
+
prebuilt_etc {
name: "audio_policy_engine_default_stream_volumes.xml",
vendor: true,
src: ":audio_policy_engine_default_stream_volumes",
}
+
prebuilt_etc {
name: "audio_policy_engine_criterion_types.xml",
vendor: true,
@@ -70,44 +74,50 @@
name: "audio_policy_engine_criterion_types",
defaults: ["buildpolicycriteriontypesrule"],
srcs: [
- ":audio_policy_configuration_top_file",
":audio_policy_configuration_files",
+ ":audio_policy_configuration_top_file",
],
}
+
filegroup {
name: "audio_policy_configuration_files",
srcs: [
- ":r_submix_audio_policy_configuration",
- ":default_volume_tables",
":audio_policy_volumes",
- ":surround_sound_configuration_5_0",
+ ":default_volume_tables",
":primary_audio_policy_configuration",
+ ":r_submix_audio_policy_configuration",
+ ":surround_sound_configuration_5_0",
],
}
+
filegroup {
- name : "audio_policy_configuration_top_file",
+ name: "audio_policy_configuration_top_file",
srcs: [":audio_policy_configuration_generic"],
}
+
filegroup {
name: "audio_policy_engine_configuration",
srcs: ["audio_policy_engine_configuration.xml"],
}
+
filegroup {
name: "audio_policy_engine_stream_volumes",
srcs: ["audio_policy_engine_stream_volumes.xml"],
}
+
filegroup {
name: "audio_policy_engine_default_stream_volumes",
srcs: ["audio_policy_engine_default_stream_volumes.xml"],
}
+
filegroup {
name: "audio_policy_engine_configuration_files",
srcs: [
":audio_policy_engine_configuration",
- "audio_policy_engine_product_strategies.xml",
- ":audio_policy_engine_stream_volumes",
- ":audio_policy_engine_default_stream_volumes",
- ":audio_policy_engine_criterion_types",
":audio_policy_engine_criteria",
+ ":audio_policy_engine_criterion_types",
+ ":audio_policy_engine_default_stream_volumes",
+ ":audio_policy_engine_stream_volumes",
+ "audio_policy_engine_product_strategies.xml",
],
}
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
index ee62d5e..98b8e78 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
@@ -31,18 +31,21 @@
src: ":PolicyClass",
sub_dir: "parameter-framework/Structure/Policy",
}
+
prebuilt_etc {
name: "PolicySubsystem.xml",
vendor: true,
src: ":PolicySubsystem",
sub_dir: "parameter-framework/Structure/Policy",
}
+
prebuilt_etc {
name: "PolicySubsystem-CommonTypes.xml",
vendor: true,
src: ":buildcommontypesstructure_gen",
sub_dir: "parameter-framework/Structure/Policy",
}
+
genrule {
name: "buildcommontypesstructure_gen",
defaults: ["buildcommontypesstructurerule"],
@@ -52,34 +55,42 @@
name: "product_strategies_structure_template",
srcs: ["examples/common/Structure/ProductStrategies.xml.in"],
}
+
filegroup {
name: "PolicySubsystem",
srcs: ["examples/common/Structure/PolicySubsystem.xml"],
}
+
filegroup {
name: "PolicySubsystem-no-strategy",
srcs: ["examples/common/Structure/PolicySubsystem-no-strategy.xml"],
}
+
filegroup {
name: "common_types_structure_template",
srcs: ["examples/common/Structure/PolicySubsystem-CommonTypes.xml.in"],
}
+
filegroup {
name: "PolicyClass",
srcs: ["examples/common/Structure/PolicyClass.xml"],
}
+
filegroup {
name: "volumes.pfw",
srcs: ["examples/Settings/volumes.pfw"],
}
+
filegroup {
name: "device_for_input_source.pfw",
srcs: ["examples/Settings/device_for_input_source.pfw"],
}
+
filegroup {
name: "ParameterFrameworkConfigurationPolicy.userdebug.xml",
srcs: ["examples/ParameterFrameworkConfigurationPolicy.userdebug.xml"],
}
+
filegroup {
name: "ParameterFrameworkConfigurationPolicy.user.xml",
srcs: ["examples/ParameterFrameworkConfigurationPolicy.user.xml"],
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
index 7d2d293..7329032 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
@@ -18,8 +18,8 @@
soong_namespace {
imports: [
- "frameworks/av/services/audiopolicy/engineconfigurable/config/example/automotive",
"frameworks/av/services/audiopolicy/config",
+ "frameworks/av/services/audiopolicy/engineconfigurable/config/example/automotive",
],
}
@@ -42,6 +42,7 @@
sub_dir: "parameter-framework/Structure/Policy",
required: ["libpolicy-subsystem"],
}
+
genrule {
name: "buildstrategiesstructure_gen",
defaults: ["buildstrategiesstructurerule"],
@@ -61,23 +62,25 @@
src: ":domaingeneratorpolicyrule_gen",
sub_dir: "parameter-framework/Settings/Policy",
required: [
- "ProductStrategies.xml",
"PolicyClass.xml",
- "PolicySubsystem.xml",
"PolicySubsystem-CommonTypes.xml",
+ "PolicySubsystem.xml",
+ "ProductStrategies.xml",
],
}
+
genrule {
name: "domaingeneratorpolicyrule_gen",
enabled: false, // TODO: This module fails to build
defaults: ["domaingeneratorpolicyrule"],
srcs: [
- ":audio_policy_pfw_toplevel",
- ":audio_policy_pfw_structure_files",
":audio_policy_engine_criterion_types",
+ ":audio_policy_pfw_structure_files",
+ ":audio_policy_pfw_toplevel",
":edd_files",
],
}
+
filegroup {
name: "edd_files",
srcs: [
@@ -86,11 +89,13 @@
"Settings/device_for_product_strategies.pfw",
],
}
+
// This is for Settings generation, must use socket port, so userdebug version is required
filegroup {
name: "audio_policy_pfw_toplevel",
srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
}
+
filegroup {
name: "audio_policy_pfw_structure_files",
srcs: [
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
index f825e5f..6715e06 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
@@ -18,9 +18,9 @@
soong_namespace {
imports: [
+ "frameworks/av/services/audiopolicy/config",
"frameworks/av/services/audiopolicy/engineconfigurable/config/example/caremu",
"frameworks/av/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car",
- "frameworks/av/services/audiopolicy/config",
],
}
@@ -43,6 +43,7 @@
sub_dir: "parameter-framework/Structure/Policy",
required: ["libpolicy-subsystem"],
}
+
genrule {
name: "buildstrategiesstructure_gen",
defaults: ["buildstrategiesstructurerule"],
@@ -62,23 +63,25 @@
src: ":domaingeneratorpolicyrule_gen",
sub_dir: "parameter-framework/Settings/Policy",
required: [
- "ProductStrategies.xml",
"PolicyClass.xml",
- "PolicySubsystem.xml",
"PolicySubsystem-CommonTypes.xml",
+ "PolicySubsystem.xml",
+ "ProductStrategies.xml",
],
}
+
genrule {
name: "domaingeneratorpolicyrule_gen",
enabled: false, // TODO: This module fails to build
defaults: ["domaingeneratorpolicyrule"],
srcs: [
- ":audio_policy_pfw_toplevel",
- ":audio_policy_pfw_structure_files",
":audio_policy_engine_criterion_types",
+ ":audio_policy_pfw_structure_files",
+ ":audio_policy_pfw_toplevel",
":edd_files",
],
}
+
filegroup {
name: "edd_files",
srcs: [
@@ -87,11 +90,13 @@
"Settings/device_for_product_strategies.pfw",
],
}
+
// This is for Settings generation, must use socket port, so userdebug version is required
filegroup {
name: "audio_policy_pfw_toplevel",
srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
}
+
filegroup {
name: "audio_policy_pfw_structure_files",
srcs: [
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
index 4a83cbc..bd401d0 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
@@ -18,8 +18,8 @@
soong_namespace {
imports: [
- "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
"frameworks/av/services/audiopolicy/config",
+ "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
],
}
@@ -42,6 +42,7 @@
sub_dir: "parameter-framework/Structure/Policy",
required: ["libpolicy-subsystem"],
}
+
genrule {
name: "buildstrategiesstructure_gen",
defaults: ["buildstrategiesstructurerule"],
@@ -61,45 +62,49 @@
src: ":domaingeneratorpolicyrule_gen",
sub_dir: "parameter-framework/Settings/Policy",
required: [
- "ProductStrategies.xml",
"PolicyClass.xml",
- "PolicySubsystem.xml",
"PolicySubsystem-CommonTypes.xml",
+ "PolicySubsystem.xml",
+ "ProductStrategies.xml",
],
}
+
genrule {
name: "domaingeneratorpolicyrule_gen",
enabled: false, // TODO: This module fails to build
defaults: ["domaingeneratorpolicyrule"],
srcs: [
- ":audio_policy_pfw_toplevel",
- ":audio_policy_pfw_structure_files",
":audio_policy_engine_criterion_types",
+ ":audio_policy_pfw_structure_files",
+ ":audio_policy_pfw_toplevel",
":edd_files",
],
}
+
filegroup {
name: "edd_files",
srcs: [
":device_for_input_source.pfw",
":volumes.pfw",
- "Settings/device_for_product_strategy_media.pfw",
"Settings/device_for_product_strategy_accessibility.pfw",
"Settings/device_for_product_strategy_dtmf.pfw",
"Settings/device_for_product_strategy_enforced_audible.pfw",
+ "Settings/device_for_product_strategy_media.pfw",
+ "Settings/device_for_product_strategy_patch.pfw",
"Settings/device_for_product_strategy_phone.pfw",
+ "Settings/device_for_product_strategy_rerouting.pfw",
"Settings/device_for_product_strategy_sonification.pfw",
"Settings/device_for_product_strategy_sonification_respectful.pfw",
"Settings/device_for_product_strategy_transmitted_through_speaker.pfw",
- "Settings/device_for_product_strategy_rerouting.pfw",
- "Settings/device_for_product_strategy_patch.pfw",
],
}
+
// This is for Settings generation, must use socket port, so userdebug version is required
filegroup {
name: "audio_policy_pfw_toplevel",
srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
}
+
filegroup {
name: "audio_policy_pfw_structure_files",
srcs: [
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
index 89ab892..7c6fc54 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
@@ -18,8 +18,8 @@
soong_namespace {
imports: [
- "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
"frameworks/av/services/audiopolicy/config",
+ "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
],
}
@@ -41,8 +41,8 @@
sub_dir: "parameter-framework/Settings/Policy",
required: [
"PolicyClass.xml",
- "PolicySubsystem.xml",
"PolicySubsystem-CommonTypes.xml",
+ "PolicySubsystem.xml",
],
}
@@ -51,16 +51,18 @@
enabled: false, // TODO: This module fails to build
defaults: ["domaingeneratorpolicyrule"],
srcs: [
- ":audio_policy_pfw_toplevel",
- ":audio_policy_pfw_structure_files",
":audio_policy_engine_criterion_types",
+ ":audio_policy_pfw_structure_files",
+ ":audio_policy_pfw_toplevel",
":edd_files",
],
}
+
filegroup {
name: "audio_policy_pfw_toplevel",
srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
}
+
filegroup {
name: "audio_policy_pfw_structure_files",
srcs: [
@@ -69,13 +71,15 @@
":buildcommontypesstructure_gen",
],
}
+
filegroup {
name: "edd_files",
srcs: [
- "device_for_input_source.pfw",
":volumes.pfw",
+ "device_for_input_source.pfw",
],
}
+
prebuilt_etc {
name: "PolicySubsystem.xml",
vendor: true,
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
index 4880547..f1348df 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
@@ -18,8 +18,8 @@
soong_namespace {
imports: [
- "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
"frameworks/av/services/audiopolicy/config",
+ "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
],
}
@@ -41,25 +41,28 @@
sub_dir: "parameter-framework/Settings/Policy",
required: [
"PolicyClass.xml",
- "PolicySubsystem.xml",
"PolicySubsystem-CommonTypes.xml",
+ "PolicySubsystem.xml",
],
}
+
genrule {
name: "domaingeneratorpolicyrule_gen",
enabled: false, // TODO: This module fails to build
defaults: ["domaingeneratorpolicyrule"],
srcs: [
- ":audio_policy_pfw_toplevel",
- ":audio_policy_pfw_structure_files",
":audio_policy_engine_criterion_types",
+ ":audio_policy_pfw_structure_files",
+ ":audio_policy_pfw_toplevel",
":edd_files",
],
}
+
filegroup {
name: "audio_policy_pfw_toplevel",
srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
}
+
filegroup {
name: "audio_policy_pfw_structure_files",
srcs: [
@@ -68,14 +71,16 @@
":buildcommontypesstructure_gen",
],
}
+
filegroup {
name: "edd_files",
srcs: [
- "device_for_strategies.pfw",
- ":volumes.pfw",
":device_for_input_source.pfw",
+ ":volumes.pfw",
+ "device_for_strategies.pfw",
],
}
+
prebuilt_etc {
name: "PolicySubsystem.xml",
vendor: true,
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
index f7159c5..1495b46 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
@@ -10,11 +10,11 @@
cc_library_shared {
name: "libpolicy-subsystem",
srcs: [
- "PolicySubsystemBuilder.cpp",
- "PolicySubsystem.cpp",
"InputSource.cpp",
- "Stream.cpp",
+ "PolicySubsystem.cpp",
+ "PolicySubsystemBuilder.cpp",
"ProductStrategy.cpp",
+ "Stream.cpp",
],
cflags: [
"-Wall",
@@ -24,11 +24,11 @@
"-fvisibility=hidden",
],
header_libs: [
- "libbase_headers",
- "libaudiopolicycommon",
"libaudioclient_headers",
+ "libaudiopolicycommon",
"libaudiopolicyengine_interface_headers",
"libaudiopolicyengineconfigurable_interface_headers",
+ "libbase_headers",
],
static_libs: [
"libaudiopolicyengine_common",
@@ -38,8 +38,8 @@
"libaudiopolicycomponents",
"libaudiopolicyengineconfigurable",
"liblog",
- "libutils",
"libmedia_helper",
- "libparameter"
+ "libparameter",
+ "libutils",
],
}
diff --git a/services/audiopolicy/engineconfigurable/tools/Android.bp b/services/audiopolicy/engineconfigurable/tools/Android.bp
index 3aec064..95c2fb6 100644
--- a/services/audiopolicy/engineconfigurable/tools/Android.bp
+++ b/services/audiopolicy/engineconfigurable/tools/Android.bp
@@ -36,13 +36,13 @@
name: "buildpolicycriteriontypesrule",
tools: ["buildPolicyCriterionTypes"],
cmd: "cp $(locations :audio_policy_configuration_files) $(genDir)/. && " +
- "cp $(location :audio_policy_configuration_top_file) $(genDir)/audio_policy_configuration.xml && " +
- "$(location buildPolicyCriterionTypes) " +
- " --androidaudiobaseheader $(location :libaudio_system_audio_base) " +
- " --androidaudiocommonbaseheader $(location :libaudio_system_audio_common_base) " +
- "--audiopolicyconfigurationfile $(genDir)/audio_policy_configuration.xml " +
- "--criteriontypes $(location :audio_policy_engine_criterion_types_template) " +
- "--outputfile $(out)",
+ "cp $(location :audio_policy_configuration_top_file) $(genDir)/audio_policy_configuration.xml && " +
+ "$(location buildPolicyCriterionTypes) " +
+ " --androidaudiobaseheader $(location :libaudio_system_audio_base) " +
+ " --androidaudiocommonbaseheader $(location :libaudio_system_audio_common_base) " +
+ "--audiopolicyconfigurationfile $(genDir)/audio_policy_configuration.xml " +
+ "--criteriontypes $(location :audio_policy_engine_criterion_types_template) " +
+ "--outputfile $(out)",
srcs: [
// The commented inputs must be provided to use this genrule_defaults
// @todo uncomment if 1428659 is merged":android_audio_base_header_file",
@@ -66,8 +66,8 @@
],
libs: [
"EddParser.py",
- "hostConfig.py",
"PFWScriptGenerator.py",
+ "hostConfig.py",
],
required: [
"domainGeneratorConnector",
@@ -77,21 +77,21 @@
genrule_defaults {
name: "domaingeneratorpolicyrule",
tools: [
- "domainGeneratorPolicy",
"domainGeneratorConnector",
+ "domainGeneratorPolicy",
],
cmd: "mkdir -p $(genDir)/Structure/Policy && " +
- "cp $(locations :audio_policy_pfw_structure_files) $(genDir)/Structure/Policy && " +
- "cp $(location :audio_policy_pfw_toplevel) $(genDir)/top_level && " +
- "$(location domainGeneratorPolicy) " +
- "--validate " +
- "--domain-generator-tool $(location domainGeneratorConnector) " +
- "--toplevel-config $(genDir)/top_level " +
- "--criteria $(location :audio_policy_engine_criteria) " +
- "--criteriontypes $(location :audio_policy_engine_criterion_types) " +
- "--add-edds $(locations :edd_files) " +
- "--schemas-dir external/parameter-framework/upstream/schemas " +
- " > $(out)",
+ "cp $(locations :audio_policy_pfw_structure_files) $(genDir)/Structure/Policy && " +
+ "cp $(location :audio_policy_pfw_toplevel) $(genDir)/top_level && " +
+ "$(location domainGeneratorPolicy) " +
+ "--validate " +
+ "--domain-generator-tool $(location domainGeneratorConnector) " +
+ "--toplevel-config $(genDir)/top_level " +
+ "--criteria $(location :audio_policy_engine_criteria) " +
+ "--criteriontypes $(location :audio_policy_engine_criterion_types) " +
+ "--add-edds $(locations :edd_files) " +
+ "--schemas-dir external/parameter-framework/upstream/schemas " +
+ " > $(out)",
srcs: [
// The commented inputs must be provided to use this genrule_defaults
// ":audio_policy_pfw_toplevel",
@@ -118,11 +118,11 @@
genrule_defaults {
name: "buildstrategiesstructurerule",
tools: ["buildStrategiesStructureFile"],
- cmd: "cp $(locations :audio_policy_engine_configuration_files) $(genDir) && ls -l $(genDir) &&"+
- "$(location buildStrategiesStructureFile) " +
- "--audiopolicyengineconfigurationfile $(genDir)/audio_policy_engine_configuration.xml "+
- "--productstrategiesstructurefile $(location :product_strategies_structure_template) " +
- "--outputfile $(out)",
+ cmd: "cp $(locations :audio_policy_engine_configuration_files) $(genDir) && ls -l $(genDir) &&" +
+ "$(location buildStrategiesStructureFile) " +
+ "--audiopolicyengineconfigurationfile $(genDir)/audio_policy_engine_configuration.xml " +
+ "--productstrategiesstructurefile $(location :product_strategies_structure_template) " +
+ "--outputfile $(out)",
srcs: [
// The commented inputs must be provided to use this genrule_defaults
// ":audio_policy_engine_configuration_files",
@@ -146,9 +146,9 @@
name: "buildcommontypesstructurerule",
tools: ["buildCommonTypesStructureFile"],
cmd: "$(location buildCommonTypesStructureFile) " +
- "--androidaudiobaseheader $(location :libaudio_system_audio_base) " +
- "--commontypesstructure $(location :common_types_structure_template) " +
- "--outputfile $(out)",
+ "--androidaudiobaseheader $(location :libaudio_system_audio_base) " +
+ "--commontypesstructure $(location :common_types_structure_template) " +
+ "--outputfile $(out)",
srcs: [
":common_types_structure_template",
":libaudio_system_audio_base",
diff --git a/services/audiopolicy/engineconfigurable/wrapper/Android.bp b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
index 0ef0b82..770e56c 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/Android.bp
+++ b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
@@ -17,14 +17,14 @@
"-Wextra",
],
header_libs: [
- "libbase_headers",
- "libaudiopolicycommon",
"libaudiofoundation_headers",
+ "libaudiopolicycommon",
+ "libbase_headers",
],
shared_libs: [
"liblog",
- "libutils",
"libmedia_helper",
"libparameter",
+ "libutils",
],
}
diff --git a/services/audiopolicy/enginedefault/Android.bp b/services/audiopolicy/enginedefault/Android.bp
index 7d4ccab..f5958ba 100644
--- a/services/audiopolicy/enginedefault/Android.bp
+++ b/services/audiopolicy/enginedefault/Android.bp
@@ -14,15 +14,15 @@
"src/EngineInstance.cpp",
],
cflags: [
- "-fvisibility=hidden",
"-Wall",
"-Werror",
"-Wextra",
+ "-fvisibility=hidden",
],
header_libs: [
- "libbase_headers",
"libaudiopolicycommon",
"libaudiopolicyengine_interface_headers",
+ "libbase_headers",
],
static_libs: [
"libaudiopolicyengine_common",
@@ -31,13 +31,16 @@
shared_libs: [
"libaudio_aidl_conversion_common_cpp",
"libaudiofoundation",
+ "libaudiopolicy",
"libaudiopolicycomponents",
"libbase",
- "liblog",
"libcutils",
- "libutils",
+ "liblog",
"libmedia_helper",
- "libaudiopolicy",
+ "libutils",
"libxml2",
],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
}
diff --git a/services/audiopolicy/enginedefault/config/example/Android.bp b/services/audiopolicy/enginedefault/config/example/Android.bp
index 59a704b..679b455 100644
--- a/services/audiopolicy/enginedefault/config/example/Android.bp
+++ b/services/audiopolicy/enginedefault/config/example/Android.bp
@@ -33,21 +33,24 @@
vendor: true,
src: "phone/audio_policy_engine_configuration.xml",
required: [
- ":audio_policy_engine_stream_volumes.xml",
":audio_policy_engine_default_stream_volumes.xml",
":audio_policy_engine_product_strategies.xml",
+ ":audio_policy_engine_stream_volumes.xml",
],
}
+
prebuilt_etc {
name: "audio_policy_engine_product_strategies.xml",
vendor: true,
src: "phone/audio_policy_engine_product_strategies.xml",
}
+
prebuilt_etc {
name: "audio_policy_engine_stream_volumes.xml",
vendor: true,
src: "phone/audio_policy_engine_stream_volumes.xml",
}
+
prebuilt_etc {
name: "audio_policy_engine_default_stream_volumes.xml",
vendor: true,
diff --git a/services/audiopolicy/fuzzer/Android.bp b/services/audiopolicy/fuzzer/Android.bp
index fd240e3..d276a76 100644
--- a/services/audiopolicy/fuzzer/Android.bp
+++ b/services/audiopolicy/fuzzer/Android.bp
@@ -36,22 +36,22 @@
shared_libs: [
"android.hardware.audio.common-util",
"capture_state_listener-aidl-cpp",
+ "framework-permission-aidl-cpp",
"libaudioclient",
"libaudiofoundation",
+ "libaudiopolicy",
"libaudiopolicycomponents",
+ "libaudiopolicymanagerdefault",
"libbase",
+ "libbinder",
"libcutils",
- "libhidlbase",
"libdl",
+ "libhidlbase",
"liblog",
"libmedia_helper",
"libmediametrics",
"libutils",
"libxml2",
- "libbinder",
- "libaudiopolicy",
- "libaudiopolicymanagerdefault",
- "framework-permission-aidl-cpp",
],
static_libs: [
"android.hardware.audio.common@7.0-enums",
diff --git a/services/audiopolicy/fuzzer/aidl/Android.bp b/services/audiopolicy/fuzzer/aidl/Android.bp
index 38a2cde..1227db9 100644
--- a/services/audiopolicy/fuzzer/aidl/Android.bp
+++ b/services/audiopolicy/fuzzer/aidl/Android.bp
@@ -22,30 +22,31 @@
"audiopolicy-aidl-cpp",
"audiopolicy-types-aidl-cpp",
"framework-permission-aidl-cpp",
+ "libactivitymanager_aidl",
+ "libaudioclient",
+ "libaudioflinger",
+ "libaudiohal",
"libaudiopolicy",
"libaudiopolicymanagerdefault",
- "libactivitymanager_aidl",
- "libaudiohal",
"libaudiopolicyservice",
- "libaudioflinger",
- "libaudioclient",
"libaudioprocessing",
"libhidlbase",
"liblog",
"libmediautils",
- "libnblog",
"libnbaio",
+ "libnblog",
"libpowermanager",
"libvibrator",
"packagemanager_aidl-cpp",
],
static_libs: [
+ "libaudiomockhal",
"libfakeservicemanager",
"libmediaplayerservice",
],
header_libs: [
- "libaudiohal_headers",
"libaudioflinger_headers",
+ "libaudiohal_headers",
"libaudiopolicymanager_interface_headers",
"libbinder_headers",
"libmedia_headers",
@@ -69,6 +70,9 @@
srcs: ["audiopolicy_aidl_fuzzer.cpp"],
defaults: [
"audiopolicy_aidl_fuzzer_defaults",
+ "latest_android_hardware_audio_core_ndk_shared",
+ "latest_android_hardware_audio_core_sounddose_ndk_shared",
+ "latest_android_hardware_audio_effect_ndk_shared",
"service_fuzzer_defaults",
],
}
diff --git a/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
index ca79c49..f5e72f5 100644
--- a/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
@@ -18,8 +18,12 @@
#include <AudioFlinger.h>
#include <android-base/logging.h>
#include <android/binder_interface_utils.h>
+#include <android/binder_manager.h>
#include <android/binder_process.h>
#include <android/media/IAudioPolicyService.h>
+#include <core-mock/ConfigMock.h>
+#include <core-mock/ModuleMock.h>
+#include <effect-mock/FactoryMock.h>
#include <fakeservicemanager/FakeServiceManager.h>
#include <fuzzbinder/libbinder_driver.h>
#include <fuzzbinder/random_binder.h>
@@ -34,6 +38,7 @@
[[clang::no_destroy]] static std::once_flag gSmOnce;
sp<FakeServiceManager> gFakeServiceManager;
+sp<AudioPolicyService> gAudioPolicyService;
bool addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
FuzzedDataProvider& fdp) {
@@ -45,42 +50,58 @@
return true;
}
+extern "C" int LLVMFuzzerInitialize(int* /*argc*/, char*** /*argv*/) {
+ /* Create a FakeServiceManager instance and add required services */
+ gFakeServiceManager = sp<FakeServiceManager>::make();
+ setDefaultServiceManager(gFakeServiceManager);
+
+ auto configService = ndk::SharedRefBase::make<ConfigMock>();
+ CHECK_EQ(NO_ERROR, AServiceManager_addService(configService.get()->asBinder().get(),
+ "android.hardware.audio.core.IConfig/default"));
+
+ auto factoryService = ndk::SharedRefBase::make<FactoryMock>();
+ CHECK_EQ(NO_ERROR,
+ AServiceManager_addService(factoryService.get()->asBinder().get(),
+ "android.hardware.audio.effect.IFactory/default"));
+
+ auto moduleService = ndk::SharedRefBase::make<ModuleMock>();
+ CHECK_EQ(NO_ERROR, AServiceManager_addService(moduleService.get()->asBinder().get(),
+ "android.hardware.audio.core.IModule/default"));
+
+ // Disable creating thread pool for fuzzer instance of audio flinger and audio policy services
+ AudioSystem::disableThreadPool();
+
+ return 0;
+}
+
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
FuzzedDataProvider fdp(data, size);
- std::call_once(gSmOnce, [&] {
- /* Create a FakeServiceManager instance and add required services */
- gFakeServiceManager = sp<FakeServiceManager>::make();
- setDefaultServiceManager(gFakeServiceManager);
- });
- gFakeServiceManager->clear();
-
- for (const char* service :
- {"activity", "sensor_privacy", "permission", "scheduling_policy",
- "android.hardware.audio.core.IConfig", "batterystats", "media.metrics"}) {
+ for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
+ "batterystats", "media.metrics"}) {
if (!addService(String16(service), gFakeServiceManager, fdp)) {
return 0;
}
}
- const auto audioFlinger = sp<AudioFlinger>::make();
- const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+ // TODO(330882064) : Initialise Audio Flinger and Audio Policy services every time
+ std::call_once(gSmOnce, [&] {
+ const auto audioFlinger = sp<AudioFlinger>::make();
+ const auto audioFlingerServerAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+ CHECK_EQ(NO_ERROR,
+ gFakeServiceManager->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+ IInterface::asBinder(audioFlingerServerAdapter),
+ false /* allowIsolated */,
+ IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
- CHECK_EQ(NO_ERROR,
- gFakeServiceManager->addService(
- String16(IAudioFlinger::DEFAULT_SERVICE_NAME), IInterface::asBinder(afAdapter),
- false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+ gAudioPolicyService = sp<AudioPolicyService>::make();
+ CHECK_EQ(NO_ERROR,
+ gFakeServiceManager->addService(String16("media.audio_policy"),
+ gAudioPolicyService, false /* allowIsolated */,
+ IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+ });
- AudioSystem::get_audio_flinger_for_fuzzer();
- const auto audioPolicyService = sp<AudioPolicyService>::make();
-
- CHECK_EQ(NO_ERROR,
- gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
- false /* allowIsolated */,
- IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
-
- fuzzService(media::IAudioPolicyService::asBinder(audioPolicyService),
- FuzzedDataProvider(data, size));
+ fuzzService(media::IAudioPolicyService::asBinder(gAudioPolicyService), std::move(fdp));
return 0;
}
diff --git a/services/audiopolicy/managerdefault/Android.bp b/services/audiopolicy/managerdefault/Android.bp
index a1785da..a3acdc7 100644
--- a/services/audiopolicy/managerdefault/Android.bp
+++ b/services/audiopolicy/managerdefault/Android.bp
@@ -23,25 +23,25 @@
shared_libs: [
"libaudiofoundation",
+ "libaudiopolicy",
"libaudiopolicycomponents",
+ "libbinder",
"libcutils",
"libdl",
- "libutils",
+ "libhidlbase",
"liblog",
- "libaudiopolicy",
"libmedia_helper",
"libmediametrics",
- "libbinder",
- "libhidlbase",
+ "libutils",
"libxml2",
// The default audio policy engine is always present in the system image.
// libaudiopolicyengineconfigurable can be built in addition by specifying
// a dependency on it in the device makefile. There will be no build time
// conflict with libaudiopolicyenginedefault.
- "libaudiopolicyenginedefault",
+ "audioclient-types-aidl-cpp",
"framework-permission-aidl-cpp",
"libaudioclient_aidl_conversion",
- "audioclient-types-aidl-cpp",
+ "libaudiopolicyenginedefault",
],
header_libs: [
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 42afa1e..5c4ab7b 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -123,8 +123,8 @@
device->toAudioPort(&devicePort);
if (status_t status = mpClientInterface->setDeviceConnectedState(&devicePort, state);
status != OK) {
- ALOGE("Error %d while setting connected state for device %s",
- static_cast<int>(state),
+ ALOGE("Error %d while setting connected state %d for device %s",
+ status, static_cast<int>(state),
device->getDeviceTypeAddr().toString(false).c_str());
}
}
@@ -212,9 +212,9 @@
if (checkOutputsForDevice(device, state, outputs) != NO_ERROR) {
mAvailableOutputDevices.remove(device);
- mHwModules.cleanUpForDevice(device);
-
broadcastDeviceConnectionState(device, media::DeviceConnectedState::DISCONNECTED);
+
+ mHwModules.cleanUpForDevice(device);
return INVALID_OPERATION;
}
@@ -2086,7 +2086,14 @@
// sampling rate match
if (samplingRate > SAMPLE_RATE_HZ_DEFAULT) {
- currentMatchCriteria[4] = outputDesc->getSamplingRate();
+ int diff; // avoid unsigned integer overflow.
+ __builtin_sub_overflow(outputDesc->getSamplingRate(), samplingRate, &diff);
+
+ // prefer the closest output sampling rate greater than or equal to target
+ // if none exists, prefer the closest output sampling rate less than target.
+ //
+ // criteria is offset to make non-negative.
+ currentMatchCriteria[4] = diff >= 0 ? -diff + 200'000'000 : diff + 100'000'000;
}
// performance flags match
@@ -6390,6 +6397,15 @@
if ((desc->mFlags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0
&& !isOutputOnlyAvailableRouteToSomeDevice(desc)) {
outputsClosed.push_back(desc->mIoHandle);
+ nextAudioPortGeneration();
+ ssize_t index = mAudioPatches.indexOfKey(desc->getPatchHandle());
+ if (index >= 0) {
+ sp<AudioPatch> patchDesc = mAudioPatches.valueAt(index);
+ (void) /*status_t status*/ mpClientInterface->releaseAudioPatch(
+ patchDesc->getAfHandle(), 0);
+ mAudioPatches.removeItemsAt(index);
+ mpClientInterface->onAudioPatchListUpdate();
+ }
desc->close();
}
}
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index fb55225..1a0bf3c 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -11,6 +11,14 @@
name: "libaudiopolicyservice_dependencies",
shared_libs: [
+ "audioclient-types-aidl-cpp",
+ "audioflinger-aidl-cpp",
+ "audiopolicy-aidl-cpp",
+ "audiopolicy-types-aidl-cpp",
+ "capture_state_listener-aidl-cpp",
+ "com.android.media.audio-aconfig-cc",
+ "framework-permission-aidl-cpp",
+ "libPlatformProperties",
"libactivitymanager_aidl",
"libaudioclient",
"libaudioclient_aidl_conversion",
@@ -31,54 +39,45 @@
"libmediametrics",
"libmediautils",
"libpermission",
- "libPlatformProperties",
"libsensor",
"libsensorprivacy",
"libshmemcompat",
"libstagefright_foundation",
"libutils",
"libxml2",
- "audioclient-types-aidl-cpp",
- "audioflinger-aidl-cpp",
- "audiopolicy-aidl-cpp",
- "audiopolicy-types-aidl-cpp",
- "capture_state_listener-aidl-cpp",
- "com.android.media.audio-aconfig-cc",
- "framework-permission-aidl-cpp",
"packagemanager_aidl-cpp",
"spatializer-aidl-cpp",
],
static_libs: [
- "libeffectsconfig",
"libaudiopolicycomponents",
- ]
+ "libeffectsconfig",
+ ],
}
cc_library {
name: "libaudiopolicyservice",
defaults: [
- "libaudiopolicyservice_dependencies",
"latest_android_media_audio_common_types_cpp_shared",
+ "libaudiopolicyservice_dependencies",
],
srcs: [
- "AudioRecordClient.cpp",
"AudioPolicyClientImpl.cpp",
"AudioPolicyEffects.cpp",
"AudioPolicyInterfaceImpl.cpp",
"AudioPolicyService.cpp",
+ "AudioRecordClient.cpp",
"CaptureStateNotifier.cpp",
"Spatializer.cpp",
"SpatializerPoseController.cpp",
],
include_dirs: [
- "frameworks/av/services/audioflinger"
+ "frameworks/av/services/audioflinger",
],
-
static_libs: [
"framework-permission-aidl-cpp",
],
@@ -92,18 +91,18 @@
],
cflags: [
- "-fvisibility=hidden",
- "-Werror",
"-Wall",
+ "-Werror",
"-Wthread-safety",
+ "-fvisibility=hidden",
],
export_shared_lib_headers: [
+ "framework-permission-aidl-cpp",
"libactivitymanager_aidl",
"libaudiousecasevalidation",
"libheadtracking",
"libheadtracking-binding",
"libsensorprivacy",
- "framework-permission-aidl-cpp",
],
}
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 42f7899..d67ddb6 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -42,55 +42,25 @@
// ----------------------------------------------------------------------------
AudioPolicyEffects::AudioPolicyEffects(const sp<EffectsFactoryHalInterface>& effectsFactoryHal) {
+ // Note: clang thread-safety permits the ctor to call guarded _l methods without
+ // acquiring the associated mutex capability as standard practice is to assume
+ // single threaded construction and destruction.
+
// load xml config with effectsFactoryHal
- status_t loadResult = loadAudioEffectConfig(effectsFactoryHal);
+ status_t loadResult = loadAudioEffectConfig_ll(effectsFactoryHal);
if (loadResult < 0) {
ALOGW("Failed to query effect configuration, fallback to load .conf");
// load automatic audio effect modules
if (access(AUDIO_EFFECT_VENDOR_CONFIG_FILE, R_OK) == 0) {
- loadAudioEffectConfigLegacy(AUDIO_EFFECT_VENDOR_CONFIG_FILE);
+ loadAudioEffectConfigLegacy_l(AUDIO_EFFECT_VENDOR_CONFIG_FILE);
} else if (access(AUDIO_EFFECT_DEFAULT_CONFIG_FILE, R_OK) == 0) {
- loadAudioEffectConfigLegacy(AUDIO_EFFECT_DEFAULT_CONFIG_FILE);
+ loadAudioEffectConfigLegacy_l(AUDIO_EFFECT_DEFAULT_CONFIG_FILE);
}
} else if (loadResult > 0) {
ALOGE("Effect config is partially invalid, skipped %d elements", loadResult);
}
}
-void AudioPolicyEffects::setDefaultDeviceEffects() {
- mDefaultDeviceEffectFuture = std::async(
- std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
-}
-
-AudioPolicyEffects::~AudioPolicyEffects()
-{
- size_t i = 0;
- // release audio input processing resources
- for (i = 0; i < mInputSources.size(); i++) {
- delete mInputSources.valueAt(i);
- }
- mInputSources.clear();
-
- for (i = 0; i < mInputSessions.size(); i++) {
- mInputSessions.valueAt(i)->mEffects.clear();
- delete mInputSessions.valueAt(i);
- }
- mInputSessions.clear();
-
- // release audio output processing resources
- for (i = 0; i < mOutputStreams.size(); i++) {
- delete mOutputStreams.valueAt(i);
- }
- mOutputStreams.clear();
-
- for (i = 0; i < mOutputSessions.size(); i++) {
- mOutputSessions.valueAt(i)->mEffects.clear();
- delete mOutputSessions.valueAt(i);
- }
- mOutputSessions.clear();
-}
-
-
status_t AudioPolicyEffects::addInputEffects(audio_io_handle_t input,
audio_source_t inputSource,
audio_session_t audioSession)
@@ -102,47 +72,42 @@
AUDIO_SOURCE_VOICE_RECOGNITION : inputSource;
audio_utils::lock_guard _l(mMutex);
- ssize_t index = mInputSources.indexOfKey(aliasSource);
- if (index < 0) {
+ auto sourceIt = mInputSources.find(aliasSource);
+ if (sourceIt == mInputSources.end()) {
ALOGV("addInputEffects(): no processing needs to be attached to this source");
return status;
}
- ssize_t idx = mInputSessions.indexOfKey(audioSession);
- EffectVector *sessionDesc;
- if (idx < 0) {
- sessionDesc = new EffectVector(audioSession);
- mInputSessions.add(audioSession, sessionDesc);
- } else {
- // EffectVector is existing and we just need to increase ref count
- sessionDesc = mInputSessions.valueAt(idx);
+ std::shared_ptr<EffectVector>& sessionDesc = mInputSessions[audioSession];
+ if (sessionDesc == nullptr) {
+ sessionDesc = std::make_shared<EffectVector>(audioSession);
}
sessionDesc->mRefCount++;
ALOGV("addInputEffects(): input: %d, refCount: %d", input, sessionDesc->mRefCount);
if (sessionDesc->mRefCount == 1) {
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- Vector <EffectDesc *> effects = mInputSources.valueAt(index)->mEffects;
- for (size_t i = 0; i < effects.size(); i++) {
- EffectDesc *effect = effects[i];
+ const std::shared_ptr<EffectDescVector>& effects = sourceIt->second;
+ for (const std::shared_ptr<EffectDesc>& effect : *effects) {
AttributionSourceState attributionSource;
attributionSource.packageName = "android";
attributionSource.token = sp<BBinder>::make();
- sp<AudioEffect> fx = new AudioEffect(attributionSource);
+ auto fx = sp<AudioEffect>::make(attributionSource);
fx->set(nullptr /*type */, &effect->mUuid, -1 /* priority */, nullptr /* callback */,
audioSession, input);
status_t status = fx->initCheck();
if (status != NO_ERROR && status != ALREADY_EXISTS) {
ALOGW("addInputEffects(): failed to create Fx %s on source %d",
- effect->mName, (int32_t)aliasSource);
+ effect->mName.c_str(), (int32_t)aliasSource);
// fx goes out of scope and strong ref on AudioEffect is released
continue;
}
for (size_t j = 0; j < effect->mParams.size(); j++) {
- fx->setParameter(effect->mParams[j]);
+ // const_cast here due to API.
+ fx->setParameter(const_cast<effect_param_t*>(effect->mParams[j].get()));
}
ALOGV("addInputEffects(): added Fx %s on source: %d",
- effect->mName, (int32_t)aliasSource);
- sessionDesc->mEffects.add(fx);
+ effect->mName.c_str(), (int32_t)aliasSource);
+ sessionDesc->mEffects.push_back(std::move(fx));
}
sessionDesc->setProcessorEnabled(true);
IPCThreadState::self()->restoreCallingIdentity(token);
@@ -157,17 +122,16 @@
status_t status = NO_ERROR;
audio_utils::lock_guard _l(mMutex);
- ssize_t index = mInputSessions.indexOfKey(audioSession);
- if (index < 0) {
+ auto it = mInputSessions.find(audioSession);
+ if (it == mInputSessions.end()) {
return status;
}
- EffectVector *sessionDesc = mInputSessions.valueAt(index);
+ std::shared_ptr<EffectVector> sessionDesc = it->second;
sessionDesc->mRefCount--;
ALOGV("releaseInputEffects(): input: %d, refCount: %d", input, sessionDesc->mRefCount);
if (sessionDesc->mRefCount == 0) {
sessionDesc->setProcessorEnabled(false);
- delete sessionDesc;
- mInputSessions.removeItemsAt(index);
+ mInputSessions.erase(it);
ALOGV("releaseInputEffects(): all effects released");
}
return status;
@@ -180,23 +144,15 @@
status_t status = NO_ERROR;
audio_utils::lock_guard _l(mMutex);
- size_t index;
- for (index = 0; index < mInputSessions.size(); index++) {
- if (mInputSessions.valueAt(index)->mSessionId == audioSession) {
- break;
- }
- }
- if (index == mInputSessions.size()) {
+ auto it = mInputSessions.find(audioSession);
+ if (it == mInputSessions.end()) {
*count = 0;
return BAD_VALUE;
}
- Vector< sp<AudioEffect> > effects = mInputSessions.valueAt(index)->mEffects;
-
- for (size_t i = 0; i < effects.size(); i++) {
- effect_descriptor_t desc = effects[i]->descriptor();
- if (i < *count) {
- descriptors[i] = desc;
- }
+ const std::vector<sp<AudioEffect>>& effects = it->second->mEffects;
+ const size_t copysize = std::min(effects.size(), (size_t)*count);
+ for (size_t i = 0; i < copysize; i++) {
+ descriptors[i] = effects[i]->descriptor();
}
if (effects.size() > *count) {
status = NO_MEMORY;
@@ -213,23 +169,15 @@
status_t status = NO_ERROR;
audio_utils::lock_guard _l(mMutex);
- size_t index;
- for (index = 0; index < mOutputSessions.size(); index++) {
- if (mOutputSessions.valueAt(index)->mSessionId == audioSession) {
- break;
- }
- }
- if (index == mOutputSessions.size()) {
+ auto it = mOutputSessions.find(audioSession);
+ if (it == mOutputSessions.end()) {
*count = 0;
return BAD_VALUE;
}
- Vector< sp<AudioEffect> > effects = mOutputSessions.valueAt(index)->mEffects;
-
- for (size_t i = 0; i < effects.size(); i++) {
- effect_descriptor_t desc = effects[i]->descriptor();
- if (i < *count) {
- descriptors[i] = desc;
- }
+ const std::vector<sp<AudioEffect>>& effects = it->second->mEffects;
+ const size_t copysize = std::min(effects.size(), (size_t)*count);
+ for (size_t i = 0; i < copysize; i++) {
+ descriptors[i] = effects[i]->descriptor();
}
if (effects.size() > *count) {
status = NO_MEMORY;
@@ -252,20 +200,15 @@
if (stream >= AUDIO_STREAM_PUBLIC_CNT) {
stream = AUDIO_STREAM_MUSIC;
}
- ssize_t index = mOutputStreams.indexOfKey(stream);
- if (index < 0) {
+ auto it = mOutputStreams.find(stream);
+ if (it == mOutputStreams.end()) {
ALOGV("addOutputSessionEffects(): no output processing needed for this stream");
return NO_ERROR;
}
- ssize_t idx = mOutputSessions.indexOfKey(audioSession);
- EffectVector *procDesc;
- if (idx < 0) {
- procDesc = new EffectVector(audioSession);
- mOutputSessions.add(audioSession, procDesc);
- } else {
- // EffectVector is existing and we just need to increase ref count
- procDesc = mOutputSessions.valueAt(idx);
+ std::shared_ptr<EffectVector>& procDesc = mOutputSessions[audioSession];
+ if (procDesc == nullptr) {
+ procDesc = std::make_shared<EffectVector>(audioSession);
}
procDesc->mRefCount++;
@@ -274,25 +217,24 @@
if (procDesc->mRefCount == 1) {
// make sure effects are associated to audio server even if we are executing a binder call
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- Vector <EffectDesc *> effects = mOutputStreams.valueAt(index)->mEffects;
- for (size_t i = 0; i < effects.size(); i++) {
- EffectDesc *effect = effects[i];
+ const std::shared_ptr<EffectDescVector>& effects = it->second;
+ for (const std::shared_ptr<EffectDesc>& effect : *effects) {
AttributionSourceState attributionSource;
attributionSource.packageName = "android";
attributionSource.token = sp<BBinder>::make();
- sp<AudioEffect> fx = new AudioEffect(attributionSource);
+ auto fx = sp<AudioEffect>::make(attributionSource);
fx->set(nullptr /* type */, &effect->mUuid, 0 /* priority */, nullptr /* callback */,
audioSession, output);
status_t status = fx->initCheck();
if (status != NO_ERROR && status != ALREADY_EXISTS) {
ALOGE("addOutputSessionEffects(): failed to create Fx %s on session %d",
- effect->mName, audioSession);
+ effect->mName.c_str(), audioSession);
// fx goes out of scope and strong ref on AudioEffect is released
continue;
}
ALOGV("addOutputSessionEffects(): added Fx %s on session: %d for stream: %d",
- effect->mName, audioSession, (int32_t)stream);
- procDesc->mEffects.add(fx);
+ effect->mName.c_str(), audioSession, (int32_t)stream);
+ procDesc->mEffects.push_back(std::move(fx));
}
procDesc->setProcessorEnabled(true);
@@ -305,30 +247,28 @@
audio_stream_type_t stream,
audio_session_t audioSession)
{
- status_t status = NO_ERROR;
(void) output; // argument not used for now
(void) stream; // argument not used for now
audio_utils::lock_guard _l(mMutex);
- ssize_t index = mOutputSessions.indexOfKey(audioSession);
- if (index < 0) {
+ auto it = mOutputSessions.find(audioSession);
+ if (it == mOutputSessions.end()) {
ALOGV("releaseOutputSessionEffects: no output processing was attached to this stream");
return NO_ERROR;
}
- EffectVector *procDesc = mOutputSessions.valueAt(index);
+ std::shared_ptr<EffectVector> procDesc = it->second;
procDesc->mRefCount--;
ALOGV("releaseOutputSessionEffects(): session: %d, refCount: %d",
audioSession, procDesc->mRefCount);
if (procDesc->mRefCount == 0) {
procDesc->setProcessorEnabled(false);
procDesc->mEffects.clear();
- delete procDesc;
- mOutputSessions.removeItemsAt(index);
+ mOutputSessions.erase(it);
ALOGV("releaseOutputSessionEffects(): output processing released from session: %d",
audioSession);
}
- return status;
+ return NO_ERROR;
}
status_t AudioPolicyEffects::addSourceDefaultEffect(const effect_uuid_t *type,
@@ -373,14 +313,9 @@
audio_utils::lock_guard _l(mMutex);
// Find the EffectDescVector for the given source type, or create a new one if necessary.
- ssize_t index = mInputSources.indexOfKey(source);
- EffectDescVector *desc = NULL;
- if (index < 0) {
- // No effects for this source type yet.
- desc = new EffectDescVector();
- mInputSources.add(source, desc);
- } else {
- desc = mInputSources.valueAt(index);
+ std::shared_ptr<EffectDescVector>& desc = mInputSources[source];
+ if (desc == nullptr) {
+ desc = std::make_shared<EffectDescVector>();
}
// Create a new effect and add it to the vector.
@@ -389,9 +324,9 @@
ALOGE("addSourceDefaultEffect(): failed to get new unique id.");
return res;
}
- EffectDesc *effect = new EffectDesc(
+ std::shared_ptr<EffectDesc> effect = std::make_shared<EffectDesc>(
descriptor.name, descriptor.type, opPackageName, descriptor.uuid, priority, *id);
- desc->mEffects.add(effect);
+ desc->push_back(std::move(effect));
// TODO(b/71813697): Support setting params as well.
// TODO(b/71814300): Retroactively attach to any existing sources of the given type.
@@ -438,14 +373,10 @@
audio_utils::lock_guard _l(mMutex);
// Find the EffectDescVector for the given stream type, or create a new one if necessary.
- ssize_t index = mOutputStreams.indexOfKey(stream);
- EffectDescVector *desc = NULL;
- if (index < 0) {
+ std::shared_ptr<EffectDescVector>& desc = mOutputStreams[stream];
+ if (desc == nullptr) {
// No effects for this stream type yet.
- desc = new EffectDescVector();
- mOutputStreams.add(stream, desc);
- } else {
- desc = mOutputStreams.valueAt(index);
+ desc = std::make_shared<EffectDescVector>();
}
// Create a new effect and add it to the vector.
@@ -454,9 +385,9 @@
ALOGE("addStreamDefaultEffect(): failed to get new unique id.");
return res;
}
- EffectDesc *effect = new EffectDesc(
+ std::shared_ptr<EffectDesc> effect = std::make_shared<EffectDesc>(
descriptor.name, descriptor.type, opPackageName, descriptor.uuid, priority, *id);
- desc->mEffects.add(effect);
+ desc->push_back(std::move(effect));
// TODO(b/71813697): Support setting params as well.
// TODO(b/71814300): Retroactively attach to any existing streams of the given type.
@@ -478,15 +409,13 @@
audio_utils::lock_guard _l(mMutex);
// Check each source type.
- size_t numSources = mInputSources.size();
- for (size_t i = 0; i < numSources; ++i) {
+ for (auto& [source, descVector] : mInputSources) {
// Check each effect for each source.
- EffectDescVector* descVector = mInputSources[i];
- for (auto desc = descVector->mEffects.begin(); desc != descVector->mEffects.end(); ++desc) {
+ for (auto desc = descVector->begin(); desc != descVector->end(); ++desc) {
if ((*desc)->mId == id) {
// Found it!
// TODO(b/71814300): Remove from any sources the effect was attached to.
- descVector->mEffects.erase(desc);
+ descVector->erase(desc);
// Handles are unique; there can only be one match, so return early.
return NO_ERROR;
}
@@ -509,15 +438,13 @@
audio_utils::lock_guard _l(mMutex);
// Check each stream type.
- size_t numStreams = mOutputStreams.size();
- for (size_t i = 0; i < numStreams; ++i) {
+ for (auto& [stream, descVector] : mOutputStreams) {
// Check each effect for each stream.
- EffectDescVector* descVector = mOutputStreams[i];
- for (auto desc = descVector->mEffects.begin(); desc != descVector->mEffects.end(); ++desc) {
+ for (auto desc = descVector->begin(); desc != descVector->end(); ++desc) {
if ((*desc)->mId == id) {
// Found it!
// TODO(b/71814300): Remove from any streams the effect was attached to.
- descVector->mEffects.erase(desc);
+ descVector->erase(desc);
// Handles are unique; there can only be one match, so return early.
return NO_ERROR;
}
@@ -530,8 +457,8 @@
void AudioPolicyEffects::EffectVector::setProcessorEnabled(bool enabled)
{
- for (size_t i = 0; i < mEffects.size(); i++) {
- mEffects.itemAt(i)->setEnabled(enabled);
+ for (const auto& effect : mEffects) {
+ effect->setEnabled(enabled);
}
}
@@ -540,7 +467,8 @@
// Audio processing configuration
// ----------------------------------------------------------------------------
-/*static*/ const char * const AudioPolicyEffects::kInputSourceNames[AUDIO_SOURCE_CNT -1] = {
+// we keep to const char* instead of std::string_view as comparison is believed faster.
+constexpr const char* kInputSourceNames[AUDIO_SOURCE_CNT - 1] = {
MIC_SRC_TAG,
VOICE_UL_SRC_TAG,
VOICE_DL_SRC_TAG,
@@ -567,7 +495,8 @@
return (audio_source_t)i;
}
-const char *AudioPolicyEffects::kStreamNames[AUDIO_STREAM_PUBLIC_CNT+1] = {
+// +1 as enum starts from -1
+constexpr const char* kStreamNames[AUDIO_STREAM_PUBLIC_CNT + 1] = {
AUDIO_STREAM_DEFAULT_TAG,
AUDIO_STREAM_VOICE_CALL_TAG,
AUDIO_STREAM_SYSTEM_TAG,
@@ -584,6 +513,7 @@
// returns the audio_stream_t enum corresponding to the output stream name or
// AUDIO_STREAM_PUBLIC_CNT is no match found
+/* static */
audio_stream_type_t AudioPolicyEffects::streamNameToEnum(const char *name)
{
int i;
@@ -600,6 +530,7 @@
// Audio Effect Config parser
// ----------------------------------------------------------------------------
+/* static */
size_t AudioPolicyEffects::growParamSize(char **param,
size_t size,
size_t *curSize,
@@ -623,7 +554,7 @@
return pos;
}
-
+/* static */
size_t AudioPolicyEffects::readParamValue(cnode *node,
char **param,
size_t *curSize,
@@ -692,7 +623,8 @@
return len;
}
-effect_param_t *AudioPolicyEffects::loadEffectParameter(cnode *root)
+/* static */
+std::shared_ptr<const effect_param_t> AudioPolicyEffects::loadEffectParameter(cnode* root)
{
cnode *param;
cnode *value;
@@ -722,7 +654,7 @@
*ptr = atoi(param->value);
fx_param->psize = sizeof(int);
fx_param->vsize = sizeof(int);
- return fx_param;
+ return {fx_param, free};
}
}
if (param == NULL || value == NULL) {
@@ -760,42 +692,43 @@
value = value->next;
}
- return fx_param;
+ return {fx_param, free};
error:
free(fx_param);
return NULL;
}
-void AudioPolicyEffects::loadEffectParameters(cnode *root, Vector <effect_param_t *>& params)
+/* static */
+void AudioPolicyEffects::loadEffectParameters(
+ cnode* root, std::vector<std::shared_ptr<const effect_param_t>>& params)
{
cnode *node = root->first_child;
while (node) {
ALOGV("loadEffectParameters() loading param %s", node->name);
- effect_param_t *param = loadEffectParameter(node);
- if (param != NULL) {
- params.add(param);
+ const auto param = loadEffectParameter(node);
+ if (param != nullptr) {
+ params.push_back(param);
}
node = node->next;
}
}
-
-AudioPolicyEffects::EffectDescVector *AudioPolicyEffects::loadEffectConfig(
- cnode *root,
- const Vector <EffectDesc *>& effects)
+/* static */
+std::shared_ptr<AudioPolicyEffects::EffectDescVector> AudioPolicyEffects::loadEffectConfig(
+ cnode* root, const EffectDescVector& effects)
{
cnode *node = root->first_child;
if (node == NULL) {
ALOGW("loadInputSource() empty element %s", root->name);
return NULL;
}
- EffectDescVector *desc = new EffectDescVector();
+ auto desc = std::make_shared<EffectDescVector>();
while (node) {
size_t i;
for (i = 0; i < effects.size(); i++) {
- if (strncmp(effects[i]->mName, node->name, EFFECT_STRING_LEN_MAX) == 0) {
+ if (effects[i]->mName == node->name) {
ALOGV("loadEffectConfig() found effect %s in list", node->name);
break;
}
@@ -805,23 +738,22 @@
node = node->next;
continue;
}
- EffectDesc *effect = new EffectDesc(*effects[i]); // deep copy
+ auto effect = std::make_shared<EffectDesc>(*effects[i]); // deep copy
loadEffectParameters(node, effect->mParams);
ALOGV("loadEffectConfig() adding effect %s uuid %08x",
- effect->mName, effect->mUuid.timeLow);
- desc->mEffects.add(effect);
+ effect->mName.c_str(), effect->mUuid.timeLow);
+ desc->push_back(std::move(effect));
node = node->next;
}
- if (desc->mEffects.size() == 0) {
+ if (desc->empty()) {
ALOGW("loadEffectConfig() no valid effects found in config %s", root->name);
- delete desc;
- return NULL;
+ return nullptr;
}
return desc;
}
-status_t AudioPolicyEffects::loadInputEffectConfigurations(cnode *root,
- const Vector <EffectDesc *>& effects)
+status_t AudioPolicyEffects::loadInputEffectConfigurations_l(cnode* root,
+ const EffectDescVector& effects)
{
cnode *node = config_find(root, PREPROCESSING_TAG);
if (node == NULL) {
@@ -831,24 +763,24 @@
while (node) {
audio_source_t source = inputSourceNameToEnum(node->name);
if (source == AUDIO_SOURCE_CNT) {
- ALOGW("loadInputSources() invalid input source %s", node->name);
+ ALOGW("%s() invalid input source %s", __func__, node->name);
node = node->next;
continue;
}
- ALOGV("loadInputSources() loading input source %s", node->name);
- EffectDescVector *desc = loadEffectConfig(node, effects);
+ ALOGV("%s() loading input source %s", __func__, node->name);
+ auto desc = loadEffectConfig(node, effects);
if (desc == NULL) {
node = node->next;
continue;
}
- mInputSources.add(source, desc);
+ mInputSources[source] = std::move(desc);
node = node->next;
}
return NO_ERROR;
}
-status_t AudioPolicyEffects::loadStreamEffectConfigurations(cnode *root,
- const Vector <EffectDesc *>& effects)
+status_t AudioPolicyEffects::loadStreamEffectConfigurations_l(cnode* root,
+ const EffectDescVector& effects)
{
cnode *node = config_find(root, OUTPUT_SESSION_PROCESSING_TAG);
if (node == NULL) {
@@ -858,23 +790,24 @@
while (node) {
audio_stream_type_t stream = streamNameToEnum(node->name);
if (stream == AUDIO_STREAM_PUBLIC_CNT) {
- ALOGW("loadStreamEffectConfigurations() invalid output stream %s", node->name);
+ ALOGW("%s() invalid output stream %s", __func__, node->name);
node = node->next;
continue;
}
- ALOGV("loadStreamEffectConfigurations() loading output stream %s", node->name);
- EffectDescVector *desc = loadEffectConfig(node, effects);
+ ALOGV("%s() loading output stream %s", __func__, node->name);
+ std::shared_ptr<EffectDescVector> desc = loadEffectConfig(node, effects);
if (desc == NULL) {
node = node->next;
continue;
}
- mOutputStreams.add(stream, desc);
+ mOutputStreams[stream] = std::move(desc);
node = node->next;
}
return NO_ERROR;
}
-AudioPolicyEffects::EffectDesc *AudioPolicyEffects::loadEffect(cnode *root)
+/* static */
+std::shared_ptr<AudioPolicyEffects::EffectDesc> AudioPolicyEffects::loadEffect(cnode* root)
{
cnode *node = config_find(root, UUID_TAG);
if (node == NULL) {
@@ -885,30 +818,33 @@
ALOGW("loadEffect() invalid uuid %s", node->value);
return NULL;
}
- return new EffectDesc(root->name, uuid);
+ return std::make_shared<EffectDesc>(root->name, uuid);
}
-status_t AudioPolicyEffects::loadEffects(cnode *root, Vector <EffectDesc *>& effects)
+/* static */
+android::AudioPolicyEffects::EffectDescVector AudioPolicyEffects::loadEffects(cnode *root)
{
+ EffectDescVector effects;
cnode *node = config_find(root, EFFECTS_TAG);
if (node == NULL) {
- return -ENOENT;
+ ALOGW("%s() Cannot find %s configuration", __func__, EFFECTS_TAG);
+ return effects;
}
node = node->first_child;
while (node) {
ALOGV("loadEffects() loading effect %s", node->name);
- EffectDesc *effect = loadEffect(node);
+ auto effect = loadEffect(node);
if (effect == NULL) {
node = node->next;
continue;
}
- effects.add(effect);
+ effects.push_back(std::move(effect));
node = node->next;
}
- return NO_ERROR;
+ return effects;
}
-status_t AudioPolicyEffects::loadAudioEffectConfig(
+status_t AudioPolicyEffects::loadAudioEffectConfig_ll(
const sp<EffectsFactoryHalInterface>& effectsFactoryHal) {
if (!effectsFactoryHal) {
ALOGE("%s Null EffectsFactoryHalInterface", __func__);
@@ -924,11 +860,12 @@
auto loadProcessingChain = [](auto& processingChain, auto& streams) {
for (auto& stream : processingChain) {
- auto effectDescs = std::make_unique<EffectDescVector>();
+ auto effectDescs = std::make_shared<EffectDescVector>();
for (auto& effect : stream.effects) {
- effectDescs->mEffects.add(new EffectDesc{effect->name.c_str(), effect->uuid});
+ effectDescs->push_back(
+ std::make_shared<EffectDesc>(effect->name, effect->uuid));
}
- streams.add(stream.type, effectDescs.release());
+ streams[stream.type] = std::move(effectDescs);
}
};
@@ -936,26 +873,26 @@
for (auto& deviceProcess : processingChain) {
auto effectDescs = std::make_unique<EffectDescVector>();
for (auto& effect : deviceProcess.effects) {
- effectDescs->mEffects.add(new EffectDesc{effect->name.c_str(), effect->uuid});
+ effectDescs->push_back(
+ std::make_shared<EffectDesc>(effect->name, effect->uuid));
}
- auto deviceEffects = std::make_unique<DeviceEffects>(
+ auto devEffects = std::make_unique<DeviceEffects>(
std::move(effectDescs), deviceProcess.type, deviceProcess.address);
- devicesEffects.emplace(deviceProcess.address, std::move(deviceEffects));
+ devicesEffects.emplace(deviceProcess.address, std::move(devEffects));
}
};
+ // access to mInputSources and mOutputStreams requires mMutex;
loadProcessingChain(processings->preprocess, mInputSources);
loadProcessingChain(processings->postprocess, mOutputStreams);
- {
- audio_utils::lock_guard _l(mMutex);
- loadDeviceProcessingChain(processings->deviceprocess, mDeviceEffects);
- }
+ // access to mDeviceEffects requires mDeviceEffectsMutex
+ loadDeviceProcessingChain(processings->deviceprocess, mDeviceEffects);
return skippedElements;
}
-status_t AudioPolicyEffects::loadAudioEffectConfigLegacy(const char *path)
+status_t AudioPolicyEffects::loadAudioEffectConfigLegacy_l(const char *path)
{
cnode *root;
char *data;
@@ -967,15 +904,11 @@
root = config_node("", "");
config_load(root, data);
- Vector <EffectDesc *> effects;
- loadEffects(root, effects);
- loadInputEffectConfigurations(root, effects);
- loadStreamEffectConfigurations(root, effects);
+ const EffectDescVector effects = loadEffects(root);
- for (size_t i = 0; i < effects.size(); i++) {
- delete effects[i];
- }
-
+ // requires mMutex
+ loadInputEffectConfigurations_l(root, effects);
+ loadStreamEffectConfigurations_l(root, effects);
config_free(root);
free(root);
free(data);
@@ -985,14 +918,14 @@
void AudioPolicyEffects::initDefaultDeviceEffects()
{
- audio_utils::lock_guard _l(mMutex);
+ std::lock_guard _l(mDeviceEffectsMutex);
for (const auto& deviceEffectsIter : mDeviceEffects) {
const auto& deviceEffects = deviceEffectsIter.second;
- for (const auto& effectDesc : deviceEffects->mEffectDescriptors->mEffects) {
+ for (const auto& effectDesc : *deviceEffects->mEffectDescriptors) {
AttributionSourceState attributionSource;
attributionSource.packageName = "android";
attributionSource.token = sp<BBinder>::make();
- sp<AudioEffect> fx = new AudioEffect(attributionSource);
+ sp<AudioEffect> fx = sp<AudioEffect>::make(attributionSource);
fx->set(EFFECT_UUID_NULL, &effectDesc->mUuid, 0 /* priority */, nullptr /* callback */,
AUDIO_SESSION_DEVICE, AUDIO_IO_HANDLE_NONE,
AudioDeviceTypeAddr{deviceEffects->getDeviceType(),
@@ -1000,16 +933,16 @@
status_t status = fx->initCheck();
if (status != NO_ERROR && status != ALREADY_EXISTS) {
ALOGE("%s(): failed to create Fx %s on port type=%d address=%s", __func__,
- effectDesc->mName, deviceEffects->getDeviceType(),
+ effectDesc->mName.c_str(), deviceEffects->getDeviceType(),
deviceEffects->getDeviceAddress().c_str());
// fx goes out of scope and strong ref on AudioEffect is released
continue;
}
fx->setEnabled(true);
ALOGV("%s(): create Fx %s added on port type=%d address=%s", __func__,
- effectDesc->mName, deviceEffects->getDeviceType(),
+ effectDesc->mName.c_str(), deviceEffects->getDeviceType(),
deviceEffects->getDeviceAddress().c_str());
- deviceEffects->mEffects.push_back(fx);
+ deviceEffects->mEffects.push_back(std::move(fx));
}
}
}
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index 7f41f09..259b84a 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -14,8 +14,7 @@
* limitations under the License.
*/
-#ifndef ANDROID_AUDIOPOLICYEFFECTS_H
-#define ANDROID_AUDIOPOLICYEFFECTS_H
+#pragma once
#include <stdlib.h>
#include <stdio.h>
@@ -57,7 +56,6 @@
// First it will look whether vendor specific file exists,
// otherwise it will parse the system default file.
explicit AudioPolicyEffects(const sp<EffectsFactoryHalInterface>& effectsFactoryHal);
- virtual ~AudioPolicyEffects();
// NOTE: methods on AudioPolicyEffects should never be called with the AudioPolicyService
// main mutex (mMutex) held as they will indirectly call back into AudioPolicyService when
@@ -67,34 +65,34 @@
// associated with audioSession
status_t queryDefaultInputEffects(audio_session_t audioSession,
effect_descriptor_t *descriptors,
- uint32_t *count);
+ uint32_t* count) EXCLUDES_AudioPolicyEffects_Mutex;
// Add all input effects associated with this input
// Effects are attached depending on the audio_source_t
status_t addInputEffects(audio_io_handle_t input,
audio_source_t inputSource,
- audio_session_t audioSession);
+ audio_session_t audioSession) EXCLUDES_AudioPolicyEffects_Mutex;
// Add all input effects associated to this input
status_t releaseInputEffects(audio_io_handle_t input,
- audio_session_t audioSession);
+ audio_session_t audioSession) EXCLUDES_AudioPolicyEffects_Mutex;
// Return a list of effect descriptors for default output effects
// associated with audioSession
status_t queryDefaultOutputSessionEffects(audio_session_t audioSession,
effect_descriptor_t *descriptors,
- uint32_t *count);
+ uint32_t* count) EXCLUDES_AudioPolicyEffects_Mutex;
// Add all output effects associated to this output
// Effects are attached depending on the audio_stream_type_t
status_t addOutputSessionEffects(audio_io_handle_t output,
audio_stream_type_t stream,
- audio_session_t audioSession);
+ audio_session_t audioSession) EXCLUDES_AudioPolicyEffects_Mutex;
// release all output effects associated with this output stream and audiosession
status_t releaseOutputSessionEffects(audio_io_handle_t output,
audio_stream_type_t stream,
- audio_session_t audioSession);
+ audio_session_t audioSession) EXCLUDES_AudioPolicyEffects_Mutex;
// Add the effect to the list of default effects for sources of type |source|.
status_t addSourceDefaultEffect(const effect_uuid_t *type,
@@ -102,7 +100,7 @@
const effect_uuid_t *uuid,
int32_t priority,
audio_source_t source,
- audio_unique_id_t* id);
+ audio_unique_id_t* id) EXCLUDES_AudioPolicyEffects_Mutex;
// Add the effect to the list of default effects for streams of a given usage.
status_t addStreamDefaultEffect(const effect_uuid_t *type,
@@ -110,36 +108,37 @@
const effect_uuid_t *uuid,
int32_t priority,
audio_usage_t usage,
- audio_unique_id_t* id);
+ audio_unique_id_t* id) EXCLUDES_AudioPolicyEffects_Mutex;
// Remove the default source effect from wherever it's attached.
- status_t removeSourceDefaultEffect(audio_unique_id_t id);
+ status_t removeSourceDefaultEffect(audio_unique_id_t id) EXCLUDES_AudioPolicyEffects_Mutex;
// Remove the default stream effect from wherever it's attached.
- status_t removeStreamDefaultEffect(audio_unique_id_t id);
+ status_t removeStreamDefaultEffect(audio_unique_id_t id) EXCLUDES_AudioPolicyEffects_Mutex;
- void setDefaultDeviceEffects();
+ // Initializes the Effects (AudioSystem must be ready as this creates audio client objects).
+ void initDefaultDeviceEffects() EXCLUDES(mDeviceEffectsMutex) EXCLUDES_EffectHandle_Mutex;
private:
- void initDefaultDeviceEffects();
// class to store the description of an effects and its parameters
// as defined in audio_effects.conf
class EffectDesc {
public:
- EffectDesc(const char *name,
+ EffectDesc(std::string_view name,
const effect_uuid_t& typeUuid,
const String16& opPackageName,
const effect_uuid_t& uuid,
uint32_t priority,
audio_unique_id_t id) :
- mName(strdup(name)),
+ mName(name),
mTypeUuid(typeUuid),
mOpPackageName(opPackageName),
mUuid(uuid),
mPriority(priority),
mId(id) { }
- EffectDesc(const char *name, const effect_uuid_t& uuid) :
+ // Modern EffectDesc usage:
+ EffectDesc(std::string_view name, const effect_uuid_t& uuid) :
EffectDesc(name,
*EFFECT_UUID_NULL,
String16(""),
@@ -147,67 +146,36 @@
0,
AUDIO_UNIQUE_ID_ALLOCATE) { }
EffectDesc(const EffectDesc& orig) :
- mName(strdup(orig.mName)),
+ mName(orig.mName),
mTypeUuid(orig.mTypeUuid),
mOpPackageName(orig.mOpPackageName),
mUuid(orig.mUuid),
mPriority(orig.mPriority),
- mId(orig.mId) {
- // deep copy mParams
- for (size_t k = 0; k < orig.mParams.size(); k++) {
- effect_param_t *origParam = orig.mParams[k];
- // psize and vsize are rounded up to an int boundary for allocation
- size_t origSize = sizeof(effect_param_t) +
- ((origParam->psize + 3) & ~3) +
- ((origParam->vsize + 3) & ~3);
- effect_param_t *dupParam = (effect_param_t *) malloc(origSize);
- memcpy(dupParam, origParam, origSize);
- // This works because the param buffer allocation is also done by
- // multiples of 4 bytes originally. In theory we should memcpy only
- // the actual param size, that is without rounding vsize.
- mParams.add(dupParam);
- }
- }
- /*virtual*/ ~EffectDesc() {
- free(mName);
- for (size_t k = 0; k < mParams.size(); k++) {
- free(mParams[k]);
- }
- }
- char *mName;
- effect_uuid_t mTypeUuid;
- String16 mOpPackageName;
- effect_uuid_t mUuid;
- int32_t mPriority;
- audio_unique_id_t mId;
- Vector <effect_param_t *> mParams;
+ mId(orig.mId),
+ mParams(orig.mParams) { }
+
+ const std::string mName;
+ const effect_uuid_t mTypeUuid;
+ const String16 mOpPackageName;
+ const effect_uuid_t mUuid;
+ const int32_t mPriority;
+ const audio_unique_id_t mId;
+ std::vector<std::shared_ptr<const effect_param_t>> mParams;
};
- // class to store voctor of EffectDesc
- class EffectDescVector {
- public:
- EffectDescVector() {}
- /*virtual*/ ~EffectDescVector() {
- for (size_t j = 0; j < mEffects.size(); j++) {
- delete mEffects[j];
- }
- }
- Vector <EffectDesc *> mEffects;
- };
+ using EffectDescVector = std::vector<std::shared_ptr<EffectDesc>>;
- // class to store voctor of AudioEffects
class EffectVector {
public:
- explicit EffectVector(audio_session_t session) : mSessionId(session), mRefCount(0) {}
- /*virtual*/ ~EffectVector() {}
+ explicit EffectVector(audio_session_t session) : mSessionId(session) {}
// Enable or disable all effects in effect vector
void setProcessorEnabled(bool enabled);
const audio_session_t mSessionId;
// AudioPolicyManager keeps mMutex, no need for lock on reference count here
- int mRefCount;
- Vector< sp<AudioEffect> >mEffects;
+ int mRefCount = 0;
+ std::vector<sp<AudioEffect>> mEffects;
};
/**
@@ -216,12 +184,11 @@
class DeviceEffects {
public:
DeviceEffects(std::unique_ptr<EffectDescVector> effectDescriptors,
- audio_devices_t device, const std::string& address) :
+ audio_devices_t device, std::string_view address) :
mEffectDescriptors(std::move(effectDescriptors)),
mDeviceType(device), mDeviceAddress(address) {}
- /*virtual*/ ~DeviceEffects() = default;
- std::vector< sp<AudioEffect> > mEffects;
+ std::vector<sp<AudioEffect>> mEffects;
audio_devices_t getDeviceType() const { return mDeviceType; }
std::string getDeviceAddress() const { return mDeviceAddress; }
const std::unique_ptr<EffectDescVector> mEffectDescriptors;
@@ -232,65 +199,81 @@
};
- static const char * const kInputSourceNames[AUDIO_SOURCE_CNT -1];
+ status_t loadAudioEffectConfig_ll(const sp<EffectsFactoryHalInterface>& effectsFactoryHal)
+ REQUIRES(mMutex, mDeviceEffectsMutex);
+
+ // Legacy: Begin methods below.
+ // Parse audio_effects.conf - called from constructor.
+ status_t loadAudioEffectConfigLegacy_l(const char* path) REQUIRES(mMutex);
+
+ // Legacy: Load all automatic effect configurations
+ status_t loadInputEffectConfigurations_l(cnode* root,
+ const EffectDescVector& effects) REQUIRES(mMutex);
+ status_t loadStreamEffectConfigurations_l(cnode* root,
+ const EffectDescVector& effects) REQUIRES(mMutex);
+
+ // Legacy: static methods below.
+
static audio_source_t inputSourceNameToEnum(const char *name);
- static const char *kStreamNames[AUDIO_STREAM_PUBLIC_CNT+1]; //+1 required as streams start from -1
- audio_stream_type_t streamNameToEnum(const char *name);
-
- // Parse audio_effects.conf
- status_t loadAudioEffectConfigLegacy(const char *path);
- status_t loadAudioEffectConfig(const sp<EffectsFactoryHalInterface>& effectsFactoryHal);
+ static audio_stream_type_t streamNameToEnum(const char* name);
// Load all effects descriptors in configuration file
- status_t loadEffects(cnode *root, Vector <EffectDesc *>& effects);
- EffectDesc *loadEffect(cnode *root);
-
- // Load all automatic effect configurations
- status_t loadInputEffectConfigurations(cnode *root, const Vector <EffectDesc *>& effects);
- status_t loadStreamEffectConfigurations(cnode *root, const Vector <EffectDesc *>& effects);
- EffectDescVector *loadEffectConfig(cnode *root, const Vector <EffectDesc *>& effects);
+ static EffectDescVector loadEffects(cnode* root);
+ static std::shared_ptr<AudioPolicyEffects::EffectDesc> loadEffect(cnode* root);
+ static std::shared_ptr<EffectDescVector> loadEffectConfig(cnode* root,
+ const EffectDescVector& effects);
// Load all automatic effect parameters
- void loadEffectParameters(cnode *root, Vector <effect_param_t *>& params);
- effect_param_t *loadEffectParameter(cnode *root);
- size_t readParamValue(cnode *node,
+ static void loadEffectParameters(
+ cnode* root, std::vector<std::shared_ptr<const effect_param_t>>& params);
+
+ // loadEffectParameter returns a shared_ptr instead of a unique_ptr as there may
+ // be multiple references to the same effect parameter.
+ static std::shared_ptr<const effect_param_t> loadEffectParameter(cnode* root);
+ static size_t readParamValue(cnode* node,
char **param,
size_t *curSize,
size_t *totSize);
- size_t growParamSize(char **param,
+ static size_t growParamSize(char** param,
size_t size,
size_t *curSize,
size_t *totSize);
+ // Legacy: End methods above.
+
+ // Note: The association of Effects to audio source, session, or stream
+ // is done through std::map instead of std::unordered_map. This gives
+ // better reproducibility of issues, since map is ordered and more predictable
+ // in enumeration.
+
// protects access to mInputSources, mInputSessions, mOutputStreams, mOutputSessions
// never hold AudioPolicyService::mMutex when calling AudioPolicyEffects methods as
// those can call back into AudioPolicyService methods and try to acquire the mutex
mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kAudioPolicyEffects_Mutex};
// Automatic input effects are configured per audio_source_t
- KeyedVector< audio_source_t, EffectDescVector* > mInputSources;
- // Automatic input effects are unique for audio_io_handle_t
- KeyedVector< audio_session_t, EffectVector* > mInputSessions;
+ std::map<audio_source_t, std::shared_ptr<EffectDescVector>> mInputSources
+ GUARDED_BY(mMutex);
+ // Automatic input effects are unique for an audio_session_t.
+ std::map<audio_session_t, std::shared_ptr<EffectVector>> mInputSessions
+ GUARDED_BY(mMutex);
// Automatic output effects are organized per audio_stream_type_t
- KeyedVector< audio_stream_type_t, EffectDescVector* > mOutputStreams;
- // Automatic output effects are unique for audiosession ID
- KeyedVector< audio_session_t, EffectVector* > mOutputSessions;
+ std::map<audio_stream_type_t, std::shared_ptr<EffectDescVector>> mOutputStreams
+ GUARDED_BY(mMutex);
+ // Automatic output effects are unique for an audio_session_t.
+ std::map<audio_session_t, std::shared_ptr<EffectVector>> mOutputSessions
+ GUARDED_BY(mMutex);
/**
* @brief mDeviceEffects map of device effects indexed by the device address
*/
- std::map<std::string, std::unique_ptr<DeviceEffects>> mDeviceEffects GUARDED_BY(mMutex);
- /**
- * Device Effect initialization must be asynchronous: the audio_policy service parses and init
- * effect on first reference. AudioFlinger will handle effect creation and register these
- * effect on audio_policy service.
- * We must store the reference of the furture garantee real asynchronous operation.
- */
- std::future<void> mDefaultDeviceEffectFuture;
+ // mDeviceEffects is never accessed through AudioPolicyEffects methods.
+ // We keep a separate mutex here to catch future methods attempting to access this variable.
+ std::mutex mDeviceEffectsMutex;
+ std::map<std::string, std::unique_ptr<DeviceEffects>> mDeviceEffects
+ GUARDED_BY(mDeviceEffectsMutex);
};
} // namespace android
-
-#endif // ANDROID_AUDIOPOLICYEFFECTS_H
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 835a617..717640f 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -18,9 +18,6 @@
//#define LOG_NDEBUG 0
#include "Configuration.h"
-#undef __STRICT_ANSI__
-#define __STDINT_LIMITS
-#define __STDC_LIMIT_MACROS
#include <stdint.h>
#include <sys/time.h>
#include <dlfcn.h>
@@ -280,9 +277,9 @@
// load audio processing modules
const sp<EffectsFactoryHalInterface> effectsFactoryHal = EffectsFactoryHalInterface::create();
- sp<AudioPolicyEffects> audioPolicyEffects = new AudioPolicyEffects(effectsFactoryHal);
- sp<UidPolicy> uidPolicy = new UidPolicy(this);
- sp<SensorPrivacyPolicy> sensorPrivacyPolicy = new SensorPrivacyPolicy(this);
+ auto audioPolicyEffects = sp<AudioPolicyEffects>::make(effectsFactoryHal);
+ auto uidPolicy = sp<UidPolicy>::make(this);
+ auto sensorPrivacyPolicy = sp<SensorPrivacyPolicy>::make(this);
{
audio_utils::lock_guard _l(mMutex);
mAudioPolicyEffects = audioPolicyEffects;
@@ -312,9 +309,16 @@
}
}
AudioSystem::audioPolicyReady();
- // AudioFlinger will handle effect creation and register these effects on audio_policy
- // service. Hence, audio_policy service must be ready.
- audioPolicyEffects->setDefaultDeviceEffects();
+}
+
+void AudioPolicyService::onAudioSystemReady() {
+ sp<AudioPolicyEffects> audioPolicyEffects;
+ {
+ audio_utils::lock_guard _l(mMutex);
+
+ audioPolicyEffects = mAudioPolicyEffects;
+ }
+ audioPolicyEffects->initDefaultDeviceEffects();
}
void AudioPolicyService::unloadAudioPolicyManager()
@@ -2481,7 +2485,7 @@
while (command->mWaitStatus) {
nsecs_t timeOutNs = kAudioCommandTimeoutNs + milliseconds(delayMs);
if (command->mCond.wait_for(
- ul, std::chrono::nanoseconds(timeOutNs)) == std::cv_status::timeout) {
+ ul, std::chrono::nanoseconds(timeOutNs), getTid()) == std::cv_status::timeout) {
command->mStatus = TIMED_OUT;
command->mWaitStatus = false;
}
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 9a8a056..5adedc6 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -320,6 +320,9 @@
// RefBase
virtual void onFirstRef();
+ // Commence initialization when AudioSystem is ready.
+ void onAudioSystemReady();
+
//
// Helpers for the struct audio_policy_service_ops implementation.
// This is used by the audio policy manager for certain operations that
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index cc191ca..b6b9720 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -49,12 +49,12 @@
using aidl_utils::statusTFromBinderStatus;
using android::content::AttributionSourceState;
using binder::Status;
+using internal::ToString;
using media::HeadTrackingMode;
using media::Pose3f;
using media::SensorPoseProvider;
using media::audio::common::HeadTracking;
using media::audio::common::Spatialization;
-using ::android::internal::ToString;
using namespace std::chrono_literals;
@@ -348,7 +348,8 @@
bool activeLevelFound = false;
for (const auto spatializationLevel : spatializationLevels) {
if (!aidl_utils::isValidEnum(spatializationLevel)) {
- ALOGW("%s: ignoring spatializationLevel:%d", __func__, (int)spatializationLevel);
+ ALOGW("%s: ignoring spatializationLevel:%s", __func__,
+ ToString(spatializationLevel).c_str());
continue;
}
if (spatializationLevel == Spatialization::Level::NONE) {
@@ -375,7 +376,8 @@
for (const auto spatializationMode : spatializationModes) {
if (!aidl_utils::isValidEnum(spatializationMode)) {
- ALOGW("%s: ignoring spatializationMode:%d", __func__, (int)spatializationMode);
+ ALOGW("%s: ignoring spatializationMode:%s", __func__,
+ ToString(spatializationMode).c_str());
continue;
}
// we don't detect duplicates.
@@ -406,27 +408,26 @@
return BAD_VALUE;
}
- //TODO b/273373363: use AIDL enum when available
if (com::android::media::audio::dsa_over_bt_le_audio()
&& mSupportsHeadTracking) {
- mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED;
- std::vector<uint8_t> headtrackingConnectionModes;
+ mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
+ std::vector<HeadTracking::ConnectionMode> headtrackingConnectionModes;
status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION,
&headtrackingConnectionModes);
if (status == NO_ERROR) {
for (const auto htConnectionMode : headtrackingConnectionModes) {
- if (htConnectionMode < HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED ||
- htConnectionMode > HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL) {
- ALOGW("%s: ignoring HT connection mode:%d", __func__, (int)htConnectionMode);
+ if (htConnectionMode < HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED ||
+ htConnectionMode > HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL) {
+ ALOGW("%s: ignoring HT connection mode:%s", __func__,
+ ToString(htConnectionMode).c_str());
continue;
}
- mSupportedHeadtrackingConnectionModes.insert(
- static_cast<headtracking_connection_t> (htConnectionMode));
+ mSupportedHeadtrackingConnectionModes.insert(htConnectionMode);
}
ALOGW_IF(mSupportedHeadtrackingConnectionModes.find(
- HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED)
- == mSupportedHeadtrackingConnectionModes.end(),
- "%s: HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED not reported", __func__);
+ HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED) ==
+ mSupportedHeadtrackingConnectionModes.end(),
+ "%s: Headtracking FRAMEWORK_PROCESSED not reported", __func__);
}
}
@@ -553,12 +554,12 @@
}
audio_utils::lock_guard lock(mMutex);
*level = mLevel;
- ALOGV("%s level %d", __func__, (int)*level);
+ ALOGV("%s level %s", __func__, ToString(*level).c_str());
return Status::ok();
}
Status Spatializer::isHeadTrackingSupported(bool *supports) {
- ALOGV("%s mSupportsHeadTracking %d", __func__, mSupportsHeadTracking);
+ ALOGV("%s mSupportsHeadTracking %s", __func__, ToString(mSupportsHeadTracking).c_str());
if (supports == nullptr) {
return binderStatusFromStatusT(BAD_VALUE);
}
@@ -853,7 +854,7 @@
}
void Spatializer::onActualModeChange(HeadTrackingMode mode) {
- std::string modeStr = media::toString(mode);
+ std::string modeStr = ToString(mode);
ALOGV("%s(%s)", __func__, modeStr.c_str());
sp<AMessage> msg = new AMessage(EngineCallbackHandler::kWhatOnActualModeChange, mHandler);
msg->setInt32(EngineCallbackHandler::kModeKey, static_cast<int>(mode));
@@ -861,7 +862,7 @@
}
void Spatializer::onActualModeChangeMsg(HeadTrackingMode mode) {
- ALOGV("%s(%d)", __func__, (int) mode);
+ ALOGV("%s(%s)", __func__, ToString(mode).c_str());
sp<media::ISpatializerHeadTrackingCallback> callback;
HeadTracking::Mode spatializerMode;
{
@@ -880,7 +881,7 @@
spatializerMode = HeadTracking::Mode::RELATIVE_SCREEN;
break;
default:
- LOG_ALWAYS_FATAL("Unknown mode: %d", static_cast<int>(mode));
+ LOG_ALWAYS_FATAL("Unknown mode: %s", ToString(mode).c_str());
}
}
mActualHeadTrackingMode = spatializerMode;
@@ -894,7 +895,7 @@
}
}
callback = mHeadTrackingCallback;
- mLocalLog.log("%s: updating mode to %s", __func__, media::toString(mode).c_str());
+ mLocalLog.log("%s: updating mode to %s", __func__, ToString(mode).c_str());
}
if (callback != nullptr) {
callback->onHeadTrackingModeChanged(spatializerMode);
@@ -1052,24 +1053,23 @@
}
}
-//TODO b/273373363: use AIDL enum when available
audio_latency_mode_t Spatializer::selectHeadtrackingConnectionMode_l() {
if (!com::android::media::audio::dsa_over_bt_le_audio()) {
return AUDIO_LATENCY_MODE_LOW;
}
// mSupportedLatencyModes is ordered according to system preferences loaded in
// mOrderedLowLatencyModes
- mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED;
+ mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
audio_latency_mode_t requestedLatencyMode = mSupportedLatencyModes[0];
if (requestedLatencyMode == AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE) {
if (mSupportedHeadtrackingConnectionModes.find(
- HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL)
+ HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL)
!= mSupportedHeadtrackingConnectionModes.end()) {
- mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL;
+ mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL;
} else if (mSupportedHeadtrackingConnectionModes.find(
- HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_SW)
+ HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW)
!= mSupportedHeadtrackingConnectionModes.end()) {
- mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_SW;
+ mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW;
} else {
// if the engine does not support direct reading of IMU data, do not allow
// DYNAMIC_SPATIAL_AUDIO_HARDWARE mode and fallback to next mode
@@ -1213,7 +1213,7 @@
base::StringAppendF(&ss, " %s", ToString(mode).c_str());
}
base::StringAppendF(&ss, "], Desired: %s, Actual %s\n",
- media::toString(mDesiredHeadTrackingMode).c_str(),
+ ToString(mDesiredHeadTrackingMode).c_str(),
ToString(mActualHeadTrackingMode).c_str());
base::StringAppendF(&ss, "%smSpatializationModes: [", prefixSpace.c_str());
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 24788dc..355df18 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -486,11 +486,13 @@
bool mSupportsHeadTracking;
/** List of supported headtracking connection modes reported by the spatializer.
* If the list is empty, the spatializer does not support any optional connection
- * mode and mode HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED is assumed.
+ * mode and mode HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED is assumed.
*/
- std::unordered_set<headtracking_connection_t> mSupportedHeadtrackingConnectionModes;
+ std::unordered_set<media::audio::common::HeadTracking::ConnectionMode>
+ mSupportedHeadtrackingConnectionModes;
/** Selected HT connection mode when several modes are supported by the spatializer */
- headtracking_connection_t mHeadtrackingConnectionMode;
+ media::audio::common::HeadTracking::ConnectionMode mHeadtrackingConnectionMode =
+ media::audio::common::HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
// Looper thread for mEngine callbacks
class EngineCallbackHandler;
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index a4a0cd4..4af66bc 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -27,11 +27,11 @@
"libbase",
"libbinder",
"libcutils",
+ "libcutils",
"libhidlbase",
"liblog",
"libmedia_helper",
"libutils",
- "libcutils",
"libxml2",
],
@@ -49,21 +49,20 @@
srcs: ["audiopolicymanager_tests.cpp"],
- data: [":audiopolicytest_configuration_files",],
+ data: [":audiopolicytest_configuration_files"],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
test_suites: [
- "device-tests",
"automotive-tests",
+ "device-tests",
],
}
-
cc_test {
name: "audio_health_tests",
@@ -98,8 +97,8 @@
srcs: ["audio_health_tests.cpp"],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
test_suites: ["device-tests"],
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 74d3474..8642fd4 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -2373,6 +2373,116 @@
)
);
+namespace {
+
+class AudioPolicyManagerTestClientOpenFails : public AudioPolicyManagerTestClient {
+ public:
+ status_t openOutput(audio_module_handle_t module,
+ audio_io_handle_t *output,
+ audio_config_t * halConfig,
+ audio_config_base_t * mixerConfig,
+ const sp<DeviceDescriptorBase>& device,
+ uint32_t * latencyMs,
+ audio_output_flags_t flags) override {
+ return mSimulateFailure ? BAD_VALUE :
+ AudioPolicyManagerTestClient::openOutput(
+ module, output, halConfig, mixerConfig, device, latencyMs, flags);
+ }
+
+ status_t openInput(audio_module_handle_t module,
+ audio_io_handle_t *input,
+ audio_config_t * config,
+ audio_devices_t * device,
+ const String8 & address,
+ audio_source_t source,
+ audio_input_flags_t flags) override {
+ return mSimulateFailure ? BAD_VALUE :
+ AudioPolicyManagerTestClient::openInput(
+ module, input, config, device, address, source, flags);
+ }
+
+ void setSimulateFailure(bool simulateFailure) { mSimulateFailure = simulateFailure; }
+
+ private:
+ bool mSimulateFailure = false;
+};
+
+} // namespace
+
+using DeviceConnectionWithFormatTestParams =
+ std::tuple<audio_devices_t /*type*/, std::string /*name*/, std::string /*address*/,
+ audio_format_t /*format*/>;
+
+class AudioPolicyManagerTestDeviceConnectionFailed :
+ public AudioPolicyManagerTestWithConfigurationFile,
+ public testing::WithParamInterface<DeviceConnectionWithFormatTestParams> {
+ protected:
+ std::string getConfigFile() override { return sBluetoothConfig; }
+ AudioPolicyManagerTestClient* getClient() override {
+ mFullClient = new AudioPolicyManagerTestClientOpenFails;
+ return mFullClient;
+ }
+ void setSimulateOpenFailure(bool simulateFailure) {
+ mFullClient->setSimulateFailure(simulateFailure); }
+
+ static const std::string sBluetoothConfig;
+
+ private:
+ AudioPolicyManagerTestClientOpenFails* mFullClient;
+};
+
+const std::string AudioPolicyManagerTestDeviceConnectionFailed::sBluetoothConfig =
+ AudioPolicyManagerTestDeviceConnectionFailed::sExecutableDir +
+ "test_audio_policy_configuration_bluetooth.xml";
+
+TEST_P(AudioPolicyManagerTestDeviceConnectionFailed, SetDeviceConnectedStateHasAddress) {
+ const audio_devices_t type = std::get<0>(GetParam());
+ const std::string name = std::get<1>(GetParam());
+ const std::string address = std::get<2>(GetParam());
+ const audio_format_t format = std::get<3>(GetParam());
+
+ EXPECT_EQ(0, mClient->getConnectedDevicePortCount());
+ EXPECT_EQ(0, mClient->getDisconnectedDevicePortCount());
+
+ setSimulateOpenFailure(true);
+ ASSERT_EQ(INVALID_OPERATION, mManager->setDeviceConnectionState(
+ type, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ address.c_str(), name.c_str(), format));
+
+ // Since the failure happens when opening input/output, the device must be connected
+ // first and then disconnected.
+ EXPECT_EQ(1, mClient->getConnectedDevicePortCount());
+ EXPECT_EQ(1, mClient->getDisconnectedDevicePortCount());
+
+ if (mClient->getConnectedDevicePortCount() > 0) {
+ auto port = mClient->getLastConnectedDevicePort();
+ EXPECT_EQ(type, port->ext.device.type);
+ EXPECT_EQ(0, strncmp(port->ext.device.address, address.c_str(),
+ AUDIO_DEVICE_MAX_ADDRESS_LEN)) << "\"" << port->ext.device.address << "\"";
+ }
+ if (mClient->getDisconnectedDevicePortCount() > 0) {
+ auto port = mClient->getLastDisconnectedDevicePort();
+ EXPECT_EQ(type, port->ext.device.type);
+ EXPECT_EQ(0, strncmp(port->ext.device.address, address.c_str(),
+ AUDIO_DEVICE_MAX_ADDRESS_LEN)) << "\"" << port->ext.device.address << "\"";
+ }
+}
+
+INSTANTIATE_TEST_CASE_P(
+ DeviceConnectionFailure,
+ AudioPolicyManagerTestDeviceConnectionFailed,
+ testing::Values(
+ DeviceConnectionWithFormatTestParams({AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET,
+ "bt_hfp_in", "00:11:22:33:44:55", AUDIO_FORMAT_DEFAULT}),
+ DeviceConnectionWithFormatTestParams({AUDIO_DEVICE_OUT_BLUETOOTH_SCO,
+ "bt_hfp_out", "00:11:22:33:44:55", AUDIO_FORMAT_DEFAULT}),
+ DeviceConnectionWithFormatTestParams({AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+ "bt_a2dp_out", "00:11:22:33:44:55", AUDIO_FORMAT_DEFAULT}),
+ DeviceConnectionWithFormatTestParams({AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+ "bt_a2dp_out", "00:11:22:33:44:66", AUDIO_FORMAT_LDAC})
+ )
+ );
+
class AudioPolicyManagerCarTest : public AudioPolicyManagerTestDynamicPolicy {
protected:
std::string getConfigFile() override { return sCarConfig; }
@@ -3283,4 +3393,4 @@
// unregister effect should succeed since effect shall have been restore on the client session
ASSERT_EQ(NO_ERROR, mManager->unregisterEffect(effectId));
-}
\ No newline at end of file
+}
diff --git a/services/audiopolicy/tests/resources/Android.bp b/services/audiopolicy/tests/resources/Android.bp
index 5e71210..535dd7a 100644
--- a/services/audiopolicy/tests/resources/Android.bp
+++ b/services/audiopolicy/tests/resources/Android.bp
@@ -11,10 +11,11 @@
name: "audiopolicytest_configuration_files",
srcs: [
"test_audio_policy_configuration.xml",
+ "test_audio_policy_configuration_bluetooth.xml",
"test_audio_policy_primary_only_configuration.xml",
"test_car_ap_atmos_offload_configuration.xml",
"test_invalid_audio_policy_configuration.xml",
- "test_tv_apm_configuration.xml",
"test_settop_box_surround_configuration.xml",
+ "test_tv_apm_configuration.xml",
],
}
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration_bluetooth.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration_bluetooth.xml
new file mode 100644
index 0000000..0cf1688
--- /dev/null
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration_bluetooth.xml
@@ -0,0 +1,157 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<audioPolicyConfiguration version="7.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <globalConfiguration speaker_drc_enabled="true"/>
+
+ <modules>
+ <!-- Primary module -->
+ <module name="primary" halVersion="2.0">
+ <attachedDevices>
+ <item>Speaker</item>
+ <item>Built-In Mic</item>
+ </attachedDevices>
+ <defaultOutputDevice>Speaker</defaultOutputDevice>
+ <mixPorts>
+ <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="primary input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_bt_hfp_output" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_bt_hfp_input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 11025 16000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_IN_MONO"/>
+ </mixPort>
+ <mixPort name="voip_tx" role="sink"
+ flags="AUDIO_INPUT_FLAG_VOIP_TX">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 16000 32000 48000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+ </mixPort>
+ <mixPort name="voip_rx" role="source"
+ flags="AUDIO_OUTPUT_FLAG_VOIP_RX">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 16000 32000 48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
+ </devicePort>
+ <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+ </devicePort>
+ <devicePort tagName="Hdmi" type="AUDIO_DEVICE_OUT_HDMI" role="sink"
+ encodedFormats="AUDIO_FORMAT_AC3">
+ </devicePort>
+ <devicePort tagName="Hdmi-In Mic" type="AUDIO_DEVICE_IN_HDMI" role="source">
+ </devicePort>
+ <devicePort tagName="BT SCO" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO" role="sink" />
+ <devicePort tagName="BT SCO Headset Mic" type="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET"
+ role="source" />
+ <devicePort tagName="BT A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink"
+ encodedFormats="AUDIO_FORMAT_SBC">
+ <profile name="" format="AUDIO_FORMAT_PCM_8_BIT"
+ samplingRates="44100 48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </devicePort>
+ <devicePort tagName="USB Device Out" type="AUDIO_DEVICE_OUT_USB_DEVICE" role="sink">
+ </devicePort>
+ <devicePort tagName="USB Device In" type="AUDIO_DEVICE_IN_USB_DEVICE" role="source">
+ </devicePort>
+ </devicePorts>
+ <routes>
+ <route type="mix" sink="Speaker"
+ sources="primary output,voip_rx"/>
+ <route type="mix" sink="primary input"
+ sources="Built-In Mic,Hdmi-In Mic,USB Device In"/>
+ <route type="mix" sink="voip_tx"
+ sources="Built-In Mic"/>
+ <route type="mix" sink="Hdmi"
+ sources="primary output"/>
+ <route type="mix" sink="BT SCO"
+ sources="mixport_bt_hfp_output"/>
+ <route type="mix" sink="mixport_bt_hfp_input"
+ sources="BT SCO Headset Mic"/>
+ <route type="mix" sink="BT A2DP Out"
+ sources="primary output"/>
+ <route type="mix" sink="USB Device Out"
+ sources="primary output"/>
+ </routes>
+ </module>
+
+ <!-- Remote Submix module -->
+ <module name="r_submix" halVersion="2.0">
+ <attachedDevices>
+ <item>Remote Submix In</item>
+ </attachedDevices>
+ <mixPorts>
+ <mixPort name="r_submix output" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="r_submix input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <devicePort tagName="Remote Submix Out" type="AUDIO_DEVICE_OUT_REMOTE_SUBMIX" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </devicePort>
+ <devicePort tagName="Remote Submix In" type="AUDIO_DEVICE_IN_REMOTE_SUBMIX" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </devicePort>
+ </devicePorts>
+ <routes>
+ <route type="mix" sink="Remote Submix Out"
+ sources="r_submix output"/>
+ <route type="mix" sink="r_submix input"
+ sources="Remote Submix In"/>
+ </routes>
+ </module>
+
+ <!-- Software Bluetooth Module -->
+ <module name="bluetooth" halVersion="2.0">
+ <mixPorts>
+ <mixPort name="a2dp_sw_output" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="44100 48000 88200 96000"
+ channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <devicePort tagName="BTS A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="44100 48000 88200 96000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </devicePort>
+ </devicePorts>
+ <routes>
+ <route type="mix" sink="BTS A2DP Out"
+ sources="a2dp_sw_output"/>
+ </routes>
+ </module>
+
+ </modules>
+</audioPolicyConfiguration>
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index a126f61..959bd3c 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -135,16 +135,17 @@
return NO_INIT;
}
+ // Verify ops permissions
+ res = TClientBase::startCameraOps();
+ if (res != OK) {
+ TClientBase::finishCameraOps();
+ return res;
+ }
+
res = mDevice->initialize(providerPtr, monitorTags);
if (res != OK) {
ALOGE("%s: Camera %s: unable to initialize device: %s (%d)",
__FUNCTION__, TClientBase::mCameraIdStr.c_str(), strerror(-res), res);
- return res;
- }
-
- // Verify ops permissions
- res = TClientBase::startCameraOps();
- if (res != OK) {
TClientBase::finishCameraOps();
return res;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 28b2d78..58b3e51 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -205,6 +205,8 @@
return res;
}
+ setCameraMuteLocked(mCameraMuteInitial);
+
mPreparerThread = new PreparerThread();
internalUpdateStatusLocked(STATUS_UNCONFIGURED);
@@ -3647,19 +3649,18 @@
cleanUpFailedRequests(/*sendRequestError*/ true);
// Check if any stream is abandoned.
checkAndStopRepeatingRequest();
+ // Inform waitUntilRequestProcessed thread of a failed request ID
+ wakeupLatestRequest(/*failedRequestId*/true, latestRequestId);
return true;
} else if (res != OK) {
cleanUpFailedRequests(/*sendRequestError*/ false);
+ // Inform waitUntilRequestProcessed thread of a failed request ID
+ wakeupLatestRequest(/*failedRequestId*/true, latestRequestId);
return false;
}
// Inform waitUntilRequestProcessed thread of a new request ID
- {
- Mutex::Autolock al(mLatestRequestMutex);
-
- mLatestRequestId = latestRequestId;
- mLatestRequestSignal.signal();
- }
+ wakeupLatestRequest(/*failedRequestId*/false, latestRequestId);
// Submit a batch of requests to HAL.
// Use flush lock only when submitting multilple requests in a batch.
@@ -4391,12 +4392,7 @@
hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
captureRequest->mResultExtras);
}
- {
- Mutex::Autolock al(mLatestRequestMutex);
-
- mLatestFailedRequestId = captureRequest->mResultExtras.requestId;
- mLatestRequestSignal.signal();
- }
+ wakeupLatestRequest(/*failedRequestId*/true, captureRequest->mResultExtras.requestId);
}
// Remove yet-to-be submitted inflight request from inflightMap
@@ -5058,6 +5054,20 @@
return OK;
}
+void Camera3Device::RequestThread::wakeupLatestRequest(
+ bool latestRequestFailed,
+ int32_t latestRequestId) {
+ Mutex::Autolock al(mLatestRequestMutex);
+
+ if (latestRequestFailed) {
+ mLatestFailedRequestId = latestRequestId;
+ } else {
+ mLatestRequestId = latestRequestId;
+ }
+ mLatestRequestSignal.signal();
+}
+
+
/**
* PreparerThread inner class methods
*/
@@ -5526,10 +5536,19 @@
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
+ return setCameraMuteLocked(enabled);
+}
- if (mRequestThread == nullptr || !mSupportCameraMute) {
+status_t Camera3Device::setCameraMuteLocked(bool enabled) {
+ if (mRequestThread == nullptr) {
+ mCameraMuteInitial = enabled;
+ return OK;
+ }
+
+ if (!mSupportCameraMute) {
return INVALID_OPERATION;
}
+
int32_t muteMode =
!enabled ? ANDROID_SENSOR_TEST_PATTERN_MODE_OFF :
mSupportTestPatternSolidColor ? ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR :
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index c1b173e..1820702 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -301,6 +301,15 @@
status_t setCameraMute(bool enabled);
/**
+ * Mute the camera.
+ *
+ * When muted, black image data is output on all output streams.
+ * This method assumes the caller already acquired the 'mInterfaceLock'
+ * and 'mLock' locks.
+ */
+ status_t setCameraMuteLocked(bool enabled);
+
+ /**
* Enables/disables camera service watchdog
*/
status_t setCameraServiceWatchdog(bool enabled);
@@ -1017,6 +1026,11 @@
const sp<CaptureRequest> &request,
const CameraMetadata& injectedSessionParams);
+ /**
+ * signal mLatestRequestmutex
+ **/
+ void wakeupLatestRequest(bool latestRequestFailed, int32_t latestRequestId);
+
protected:
virtual bool threadLoop();
@@ -1499,6 +1513,10 @@
// Auto framing override value
camera_metadata_enum_android_control_autoframing mAutoframingOverride;
+ // Initial camera mute state stored before the request thread
+ // is active.
+ bool mCameraMuteInitial = false;
+
// Settings override value
int32_t mSettingsOverride; // -1 = use original, otherwise
// the settings override to use.
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index aef6531..11ef9b7 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -1133,7 +1133,7 @@
}
void filterParameters(const CameraMetadata& src, const CameraMetadata& deviceInfo,
- int vendorTagId, CameraMetadata& dst) {
+ metadata_vendor_id_t vendorTagId, CameraMetadata& dst) {
const CameraMetadata params(src);
camera_metadata_ro_entry_t availableSessionKeys = deviceInfo.find(
ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 29e3eca..5b2ea5c 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -177,7 +177,7 @@
aidl::android::hardware::camera::device::RequestTemplate* tempId /*out*/);
void filterParameters(const CameraMetadata& src, const CameraMetadata& deviceInfo,
- int vendorTagId, CameraMetadata& dst);
+ metadata_vendor_id_t vendorTagId, CameraMetadata& dst);
constexpr int32_t MAX_SURFACES_PER_STREAM = 4;
diff --git a/services/camera/virtualcamera/VirtualCameraService.cc b/services/camera/virtualcamera/VirtualCameraService.cc
index 370a5a8..7907cdb 100644
--- a/services/camera/virtualcamera/VirtualCameraService.cc
+++ b/services/camera/virtualcamera/VirtualCameraService.cc
@@ -158,6 +158,7 @@
mVirtualCameraProvider->removeCamera(it->second);
+ mTokenToCameraName.erase(it);
return ndk::ScopedAStatus::ok();
}
diff --git a/services/camera/virtualcamera/VirtualCameraStream.cc b/services/camera/virtualcamera/VirtualCameraStream.cc
index 03da171..fad6cac 100644
--- a/services/camera/virtualcamera/VirtualCameraStream.cc
+++ b/services/camera/virtualcamera/VirtualCameraStream.cc
@@ -26,8 +26,6 @@
#include "EGL/egl.h"
#include "aidl/android/hardware/camera/device/Stream.h"
-#include "aidl/android/hardware/camera/device/StreamBuffer.h"
-#include "aidl/android/hardware/graphics/common/PixelFormat.h"
#include "aidlcommonsupport/NativeHandle.h"
#include "android/hardware_buffer.h"
#include "cutils/native_handle.h"
@@ -39,52 +37,33 @@
namespace virtualcamera {
using ::aidl::android::hardware::camera::device::Stream;
-using ::aidl::android::hardware::camera::device::StreamBuffer;
using ::aidl::android::hardware::common::NativeHandle;
-using ::aidl::android::hardware::graphics::common::PixelFormat;
namespace {
-sp<GraphicBuffer> createBlobGraphicBuffer(GraphicBufferMapper& mapper,
- buffer_handle_t bufferHandle) {
- uint64_t allocationSize;
- uint64_t usage;
- uint64_t layerCount;
- if (mapper.getAllocationSize(bufferHandle, &allocationSize) != NO_ERROR ||
- mapper.getUsage(bufferHandle, &usage) != NO_ERROR ||
- mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR) {
- ALOGE("Error fetching metadata for the imported BLOB buffer handle.");
- return nullptr;
- }
-
- return sp<GraphicBuffer>::make(
- bufferHandle, GraphicBuffer::HandleWrapMethod::TAKE_HANDLE,
- allocationSize, /*height=*/1, static_cast<int>(ui::PixelFormat::BLOB),
- layerCount, usage, 0);
-}
-
-sp<GraphicBuffer> createYCbCr420GraphicBuffer(GraphicBufferMapper& mapper,
- buffer_handle_t bufferHandle) {
+sp<GraphicBuffer> createGraphicBuffer(GraphicBufferMapper& mapper,
+ const buffer_handle_t bufferHandle) {
uint64_t width;
uint64_t height;
uint64_t usage;
uint64_t layerCount;
+ ui::PixelFormat pixelFormat;
if (mapper.getWidth(bufferHandle, &width) != NO_ERROR ||
mapper.getHeight(bufferHandle, &height) != NO_ERROR ||
mapper.getUsage(bufferHandle, &usage) != NO_ERROR ||
- mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR) {
+ mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR ||
+ mapper.getPixelFormatRequested(bufferHandle, &pixelFormat) != NO_ERROR) {
ALOGE("Error fetching metadata for the imported YCbCr420 buffer handle.");
return nullptr;
}
return sp<GraphicBuffer>::make(
bufferHandle, GraphicBuffer::HandleWrapMethod::TAKE_HANDLE, width, height,
- static_cast<int>(ui::PixelFormat::YCBCR_420_888), /*layers=*/1, usage,
- width);
+ static_cast<int>(pixelFormat), layerCount, usage, width);
}
std::shared_ptr<AHardwareBuffer> importBufferInternal(
- const NativeHandle& aidlHandle, const Stream& streamConfig) {
+ const NativeHandle& aidlHandle) {
if (aidlHandle.fds.empty()) {
ALOGE("Empty handle - nothing to import");
return nullptr;
@@ -103,12 +82,9 @@
return nullptr;
}
- sp<GraphicBuffer> buf =
- streamConfig.format == PixelFormat::BLOB
- ? createBlobGraphicBuffer(mapper, bufferHandle)
- : createYCbCr420GraphicBuffer(mapper, bufferHandle);
+ sp<GraphicBuffer> buf = createGraphicBuffer(mapper, bufferHandle);
- if (buf->initCheck() != NO_ERROR) {
+ if (buf == nullptr || buf->initCheck() != NO_ERROR) {
ALOGE("Imported graphic buffer is not correcly initialized.");
return nullptr;
}
@@ -128,7 +104,7 @@
std::shared_ptr<AHardwareBuffer> VirtualCameraStream::importBuffer(
const ::aidl::android::hardware::camera::device::StreamBuffer& buffer) {
- auto hwBufferPtr = importBufferInternal(buffer.buffer, mStreamConfig);
+ auto hwBufferPtr = importBufferInternal(buffer.buffer);
if (hwBufferPtr != nullptr) {
std::lock_guard<std::mutex> lock(mLock);
mBuffers.emplace(std::piecewise_construct,
diff --git a/services/medialog/Android.bp b/services/medialog/Android.bp
index 8088ef0..fdb56e5 100644
--- a/services/medialog/Android.bp
+++ b/services/medialog/Android.bp
@@ -30,7 +30,7 @@
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
}
diff --git a/services/medialog/fuzzer/Android.bp b/services/medialog/fuzzer/Android.bp
index c96c37b..84a1ce6 100644
--- a/services/medialog/fuzzer/Android.bp
+++ b/services/medialog/fuzzer/Android.bp
@@ -30,12 +30,12 @@
"frameworks/av/services/medialog",
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
fuzz_config: {
cc: [
- "android-media-fuzzing-reports@google.com",
+ "android-audio-fuzzing-reports@google.com",
],
componentid: 155276,
hotlists: [
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index 630a436..7dc445b 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -549,7 +549,7 @@
int slot = 1;
std::stringstream ss;
- ss << "AudioPowerUsage:\n";
+ ss << "AudioPowerUsage interval " << mIntervalHours << " hours:\n";
for (const auto &item : mItems) {
if (slot >= limit - 1) {
ss << "-- AudioPowerUsage may be truncated!\n";
diff --git a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
index 433332c..c6793a9 100644
--- a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
+++ b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
@@ -33,6 +33,8 @@
constexpr size_t kLogItemsLowWater = 1;
// high water mark
constexpr size_t kLogItemsHighWater = 2;
+constexpr size_t kMaxItemLength = 16;
+constexpr size_t kMaxApis = 64;
class MediaMetricsServiceFuzzer {
public:
@@ -304,10 +306,11 @@
}
FuzzedDataProvider fdp2 = FuzzedDataProvider(data, size);
-
- while (fdp2.remaining_bytes()) {
+ size_t apiCount = 0;
+ while (fdp2.remaining_bytes() && ++apiCount <= kMaxApis) {
// make a test item
- auto item = std::make_shared<mediametrics::Item>(fdp2.ConsumeRandomLengthString().c_str());
+ auto item = std::make_shared<mediametrics::Item>(
+ fdp2.ConsumeRandomLengthString(kMaxItemLength).c_str());
(*item).set("event", fdp2.ConsumeRandomLengthString().c_str());
// get the actions and execute them
diff --git a/services/mediametrics/include/mediametricsservice/TimedAction.h b/services/mediametrics/include/mediametricsservice/TimedAction.h
index c7ef585..8b53ded 100644
--- a/services/mediametrics/include/mediametricsservice/TimedAction.h
+++ b/services/mediametrics/include/mediametricsservice/TimedAction.h
@@ -25,6 +25,12 @@
namespace android::mediametrics {
class TimedAction {
+ // Use system_clock instead of steady_clock to include suspend time.
+ using TimerClock = class std::chrono::system_clock;
+
+ // Define granularity of wakeup to prevent delayed events if
+ // device is suspended.
+ static constexpr auto kWakeupInterval = std::chrono::minutes(3);
public:
TimedAction() : mThread{[this](){threadLoop();}} {}
@@ -35,7 +41,7 @@
// TODO: return a handle for cancelling the action?
template <typename T> // T is in units of std::chrono::duration.
void postIn(const T& time, std::function<void()> f) {
- postAt(std::chrono::steady_clock::now() + time, f);
+ postAt(TimerClock::now() + time, f);
}
template <typename T> // T is in units of std::chrono::time_point
@@ -75,16 +81,21 @@
void threadLoop() NO_THREAD_SAFETY_ANALYSIS { // thread safety doesn't cover unique_lock
std::unique_lock l(mLock);
while (!mQuit) {
- auto sleepUntilTime = std::chrono::time_point<std::chrono::steady_clock>::max();
+ auto sleepUntilTime = std::chrono::time_point<TimerClock>::max();
if (!mMap.empty()) {
sleepUntilTime = mMap.begin()->first;
- if (sleepUntilTime <= std::chrono::steady_clock::now()) {
+ const auto now = TimerClock::now();
+ if (sleepUntilTime <= now) {
auto node = mMap.extract(mMap.begin()); // removes from mMap.
l.unlock();
node.mapped()();
l.lock();
continue;
}
+ // Bionic uses CLOCK_MONOTONIC for its pthread_mutex regardless
+ // of REALTIME specification, use kWakeupInterval to ensure minimum
+ // granularity if suspended.
+ sleepUntilTime = std::min(sleepUntilTime, now + kWakeupInterval);
}
mCondition.wait_until(l, sleepUntilTime);
}
@@ -93,7 +104,7 @@
mutable std::mutex mLock;
std::condition_variable mCondition GUARDED_BY(mLock);
bool mQuit GUARDED_BY(mLock) = false;
- std::multimap<std::chrono::time_point<std::chrono::steady_clock>, std::function<void()>>
+ std::multimap<std::chrono::time_point<TimerClock>, std::function<void()>>
mMap GUARDED_BY(mLock); // multiple functions could execute at the same time.
// needs to be initialized after the variables above, done in constructor initializer list.
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
index cd00937..a8a1de1 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.cpp
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -109,23 +109,17 @@
return CodecBucketUnspecified;
}
-static bool getLogMessage(int hwCount, int swCount, std::stringstream& logMsg) {
- bool update = false;
- logMsg.clear();
+static std::string getLogMessage(const std::string& firstKey, const long& firstValue,
+ const std::string& secondKey, const long& secondValue) {
- if (hwCount > 0) {
- logMsg << " HW: " << hwCount;
- update = true;
+ std::stringstream logMsg;
+ if (firstValue > 0) {
+ logMsg << firstKey << firstValue;
}
- if (swCount > 0) {
- logMsg << " SW: " << swCount;
- update = true;
+ if (secondValue > 0) {
+ logMsg << secondKey << secondValue;
}
-
- if (update) {
- logMsg << " ] ";
- }
- return update;
+ return logMsg.str();
}
ResourceManagerMetrics::ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo) {
@@ -364,6 +358,15 @@
std::scoped_lock lock(mLock);
// post MediaCodecConcurrentUsageReported for this terminated pid.
pushConcurrentUsageReport(pid, uid);
+ // Remove all the metrics associated with this process.
+ std::map<int32_t, ConcurrentCodecs>::iterator it1 = mProcessConcurrentCodecsMap.find(pid);
+ if (it1 != mProcessConcurrentCodecsMap.end()) {
+ mProcessConcurrentCodecsMap.erase(it1);
+ }
+ std::map<int32_t, PixelCount>::iterator it2 = mProcessPixelsMap.find(pid);
+ if (it2 != mProcessPixelsMap.end()) {
+ mProcessPixelsMap.erase(it2);
+ }
}
void ResourceManagerMetrics::pushConcurrentUsageReport(int32_t pid, uid_t uid) {
@@ -400,24 +403,30 @@
std::stringstream peakCodecLog;
peakCodecLog << "Peak { ";
- std::stringstream logMsg;
- if (getLogMessage(peakHwAudioEncoderCount, peakSwAudioEncoderCount, logMsg)) {
- peakCodecLog << "AudioEnc[" << logMsg.str();
+ std::string logMsg;
+ logMsg = getLogMessage(" HW: ", peakHwAudioEncoderCount, " SW: ", peakSwAudioEncoderCount);
+ if (!logMsg.empty()) {
+ peakCodecLog << "AudioEnc[ " << logMsg << " ] ";
}
- if (getLogMessage(peakHwAudioDecoderCount, peakSwAudioDecoderCount, logMsg)) {
- peakCodecLog << "AudioDec[" << logMsg.str();
+ logMsg = getLogMessage(" HW: ", peakHwAudioDecoderCount, " SW: ", peakSwAudioDecoderCount);
+ if (!logMsg.empty()) {
+ peakCodecLog << "AudioDec[" << logMsg << " ] ";
}
- if (getLogMessage(peakHwVideoEncoderCount, peakSwVideoEncoderCount, logMsg)) {
- peakCodecLog << "VideoEnc[" << logMsg.str();
+ logMsg = getLogMessage(" HW: ", peakHwVideoEncoderCount, " SW: ", peakSwVideoEncoderCount);
+ if (!logMsg.empty()) {
+ peakCodecLog << "VideoEnc[" << logMsg << " ] ";
}
- if (getLogMessage(peakHwVideoDecoderCount, peakSwVideoDecoderCount, logMsg)) {
- peakCodecLog << "VideoDec[" << logMsg.str();
+ logMsg = getLogMessage(" HW: ", peakHwVideoDecoderCount, " SW: ", peakSwVideoDecoderCount);
+ if (!logMsg.empty()) {
+ peakCodecLog << "VideoDec[" << logMsg << " ] ";
}
- if (getLogMessage(peakHwImageEncoderCount, peakSwImageEncoderCount, logMsg)) {
- peakCodecLog << "ImageEnc[" << logMsg.str();
+ logMsg = getLogMessage(" HW: ", peakHwImageEncoderCount, " SW: ", peakSwImageEncoderCount);
+ if (!logMsg.empty()) {
+ peakCodecLog << "ImageEnc[" << logMsg << " ] ";
}
- if (getLogMessage(peakHwImageDecoderCount, peakSwImageDecoderCount, logMsg)) {
- peakCodecLog << "ImageDec[" << logMsg.str();
+ logMsg = getLogMessage(" HW: ", peakHwImageDecoderCount, " SW: ", peakSwImageDecoderCount);
+ if (!logMsg.empty()) {
+ peakCodecLog << "ImageDec[" << logMsg << " ] ";
}
peakCodecLog << "}";
@@ -705,4 +714,114 @@
return 0;
}
+static std::string getConcurrentInstanceCount(const std::map<std::string, int>& resourceMap) {
+ if (resourceMap.empty()) {
+ return "";
+ }
+ std::stringstream concurrentInstanceInfo;
+ for (const auto& [name, count] : resourceMap) {
+ if (count > 0) {
+ concurrentInstanceInfo << " Name: " << name << " Instances: " << count << "\n";
+ }
+ }
+
+ std::string info = concurrentInstanceInfo.str();
+ if (info.empty()) {
+ return "";
+ }
+ return " Current Concurrent Codec Instances:\n" + info;
+}
+
+static std::string getAppsPixelCount(const std::map<int32_t, PixelCount>& pixelMap) {
+ if (pixelMap.empty()) {
+ return "";
+ }
+ std::stringstream pixelInfo;
+ for (const auto& [pid, pixelCount] : pixelMap) {
+ std::string logMsg = getLogMessage(" Current Pixels: ", pixelCount.mCurrent,
+ " Peak Pixels: ", pixelCount.mPeak);
+ if (!logMsg.empty()) {
+ pixelInfo << " PID[" << pid << "]: {" << logMsg << " }\n";
+ }
+ }
+
+ return " Applications Pixel Usage:\n" + pixelInfo.str();
+}
+
+static std::string getCodecUsageMetrics(const ConcurrentCodecsMap& codecsMap) {
+ int peakHwAudioEncoderCount = codecsMap[HwAudioEncoder];
+ int peakHwAudioDecoderCount = codecsMap[HwAudioDecoder];
+ int peakHwVideoEncoderCount = codecsMap[HwVideoEncoder];
+ int peakHwVideoDecoderCount = codecsMap[HwVideoDecoder];
+ int peakHwImageEncoderCount = codecsMap[HwImageEncoder];
+ int peakHwImageDecoderCount = codecsMap[HwImageDecoder];
+ int peakSwAudioEncoderCount = codecsMap[SwAudioEncoder];
+ int peakSwAudioDecoderCount = codecsMap[SwAudioDecoder];
+ int peakSwVideoEncoderCount = codecsMap[SwVideoEncoder];
+ int peakSwVideoDecoderCount = codecsMap[SwVideoDecoder];
+ int peakSwImageEncoderCount = codecsMap[SwImageEncoder];
+ int peakSwImageDecoderCount = codecsMap[SwImageDecoder];
+ std::stringstream usageMetrics;
+ std::string logMsg;
+ logMsg = getLogMessage(" HW: ", peakHwAudioEncoderCount, " SW: ", peakSwAudioEncoderCount);
+ if (!logMsg.empty()) {
+ usageMetrics << "AudioEnc[" << logMsg << " ] ";
+ }
+ logMsg = getLogMessage(" HW: ", peakHwAudioDecoderCount, " SW: ", peakSwAudioDecoderCount);
+ if (!logMsg.empty()) {
+ usageMetrics << "AudioDec[" << logMsg << " ] ";
+ }
+ logMsg = getLogMessage(" HW: ", peakHwVideoEncoderCount, " SW: ", peakSwVideoEncoderCount);
+ if (!logMsg.empty()) {
+ usageMetrics << "VideoEnc[" << logMsg << " ] ";
+ }
+ logMsg = getLogMessage(" HW: ", peakHwVideoDecoderCount, " SW: ", peakSwVideoDecoderCount);
+ if (!logMsg.empty()) {
+ usageMetrics << "VideoDec[" << logMsg << " ] ";
+ }
+ logMsg = getLogMessage(" HW: ", peakHwImageEncoderCount, " SW: ", peakSwImageEncoderCount);
+ if (!logMsg.empty()) {
+ usageMetrics << "ImageEnc[" << logMsg << " ] ";
+ }
+ logMsg = getLogMessage(" HW: ", peakHwImageDecoderCount, " SW: ", peakSwImageDecoderCount);
+ if (!logMsg.empty()) {
+ usageMetrics << "ImageDec[" << logMsg << " ] ";
+ }
+
+ return usageMetrics.str();
+}
+
+static std::string getAppsCodecUsageMetrics(
+ const std::map<int32_t, ConcurrentCodecs>& processCodecsMap) {
+ if (processCodecsMap.empty()) {
+ return "";
+ }
+ std::stringstream codecUsage;
+ std::string info;
+ for (const auto& [pid, codecMap] : processCodecsMap) {
+ codecUsage << " PID[" << pid << "]: ";
+ info = getCodecUsageMetrics(codecMap.mCurrent);
+ if (!info.empty()) {
+ codecUsage << "Current Codec Usage: { " << info << "} ";
+ }
+ info = getCodecUsageMetrics(codecMap.mPeak);
+ if (!info.empty()) {
+ codecUsage << "Peak Codec Usage: { " << info << "}";
+ }
+ codecUsage << "\n";
+ }
+
+ return " Applications Codec Usage:\n" + codecUsage.str();
+}
+
+
+std::string ResourceManagerMetrics::dump() const {
+ std::string metricsLog(" Metrics logs:\n");
+ metricsLog += getConcurrentInstanceCount(mConcurrentResourceCountMap);
+ metricsLog += getAppsPixelCount(mProcessPixelsMap);
+ metricsLog += getAppsCodecUsageMetrics(mProcessConcurrentCodecsMap);
+
+ return std::move(metricsLog);
+}
+
} // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.h b/services/mediaresourcemanager/ResourceManagerMetrics.h
index 7a5a89f..9904f7d 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.h
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.h
@@ -171,6 +171,9 @@
// Get the current concurrent pixel count (associated with the video codecs) for the process.
long getCurrentConcurrentPixelCount(int pid) const;
+ // retrieves metrics log.
+ std::string dump() const;
+
private:
ResourceManagerMetrics(const ResourceManagerMetrics&) = delete;
ResourceManagerMetrics(ResourceManagerMetrics&&) = delete;
@@ -204,9 +207,9 @@
// Map of resources (name) and number of concurrent instances
std::map<std::string, int> mConcurrentResourceCountMap;
- // Map of concurrent codes by CodecBucket across the system.
+ // Map of concurrent codecs by CodecBucket across the system.
ConcurrentCodecsMap mConcurrentCodecsMap;
- // Map of concurrent and peak codes by CodecBucket for each process/application.
+ // Map of concurrent and peak codecs by CodecBucket for each process/application.
std::map<int32_t, ConcurrentCodecs> mProcessConcurrentCodecsMap;
// Uid Observer to monitor the application termination.
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index d37d893..9c2fb7c 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -108,10 +108,17 @@
serviceLog = mServiceLog->toString(" " /* linePrefix */);
}
- // Get all the resource (and overload pid) logs
+ // Get all the resource (and overload pid) log.
std::string resourceLog;
getResourceDump(resourceLog);
+ // Get all the metrics log.
+ std::string metricsLog;
+ {
+ std::scoped_lock lock{mLock};
+ metricsLog = mResourceManagerMetrics->dump();
+ }
+
const size_t SIZE = 256;
char buffer[SIZE];
snprintf(buffer, SIZE, "ResourceManagerService: %p\n", this);
@@ -123,11 +130,16 @@
supportsSecureWithNonSecureCodec);
result.append(buffer);
+ // Add resource log.
result.append(resourceLog.c_str());
+ // Add service log.
result.append(" Events logs (most recent at top):\n");
result.append(serviceLog);
+ // Add metrics log.
+ result.append(metricsLog.c_str());
+
write(fd, result.c_str(), result.size());
return OK;
}
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 6a64823..5dfec30 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -29,6 +29,9 @@
"libactivitymanager_aidl",
"server_configurable_flags",
],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
include_dirs: [
"frameworks/av/include",
"frameworks/av/services/mediaresourcemanager",
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 9fe06b7..e3601a1 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -69,10 +69,10 @@
"-android-cloexec-dup", // found in AAudioServiceEndpointMMAP.cpp
"-bugprone-narrowing-conversions", // found in several interface from size_t to int32_t
- "-google-readability-casting", // C++ casts not always necessary and may be verbose
- "-google-readability-todo", // do not require TODO(info)
"-google-build-using-namespace", // Reenable and fix later.
"-google-global-names-in-headers", // found in several files
+ "-google-readability-casting", // C++ casts not always necessary and may be verbose
+ "-google-readability-todo", // do not require TODO(info)
"-misc-non-private-member-variables-in-classes", // found in aidl generated files
@@ -82,26 +82,27 @@
name: "libaaudioservice_dependencies",
shared_libs: [
+ "aaudio-aidl-cpp",
+ "com.android.media.aaudio-aconfig-cc",
+ "framework-permission-aidl-cpp",
"libaaudio_internal",
"libaudioclient",
+ "libaudioclient_aidl_conversion",
"libaudioutils",
- "libmedia_helper",
- "libmediametrics",
- "libmediautils",
"libbase",
"libbinder",
"libcutils",
"liblog",
+ "libmedia_helper",
+ "libmediametrics",
+ "libmediautils",
"libutils",
- "aaudio-aidl-cpp",
- "framework-permission-aidl-cpp",
- "libaudioclient_aidl_conversion",
"packagemanager_aidl-cpp",
],
static_libs: [
"libaudioflinger",
- ]
+ ],
}
cc_library_static {
@@ -109,8 +110,8 @@
name: "libaaudioservice",
defaults: [
- "libaaudioservice_dependencies",
"latest_android_media_audio_common_types_cpp_shared",
+ "libaaudioservice_dependencies",
],
srcs: [
@@ -136,15 +137,15 @@
],
cflags: [
- "-Wthread-safety",
- "-Wno-unused-parameter",
"-Wall",
"-Werror",
+ "-Wno-unused-parameter",
+ "-Wthread-safety",
],
export_shared_lib_headers: [
- "libaaudio_internal",
"framework-permission-aidl-cpp",
+ "libaaudio_internal",
],
header_libs: [
@@ -152,8 +153,8 @@
],
include_dirs: [
- "frameworks/av/media/libnbaio/include_mono",
"frameworks/av/media/libnbaio/include",
+ "frameworks/av/media/libnbaio/include_mono",
],
tidy: true,
@@ -161,5 +162,5 @@
tidy_checks_as_errors: tidy_errors,
tidy_flags: [
"-format-style=file",
- ]
+ ],
}
diff --git a/services/oboeservice/fuzzer/Android.bp b/services/oboeservice/fuzzer/Android.bp
index 0230935..36a91a9 100644
--- a/services/oboeservice/fuzzer/Android.bp
+++ b/services/oboeservice/fuzzer/Android.bp
@@ -36,21 +36,21 @@
"oboeservice_fuzzer.cpp",
],
shared_libs: [
+ "aaudio-aidl-cpp",
+ "framework-permission-aidl-cpp",
"libaaudio_internal",
"libaudioclient",
+ "libaudioclient_aidl_conversion",
"libaudioflinger",
"libaudioutils",
- "libmedia_helper",
- "libmediametrics",
- "libmediautils",
"libbase",
"libbinder",
"libcutils",
"liblog",
+ "libmedia_helper",
+ "libmediametrics",
+ "libmediautils",
"libutils",
- "aaudio-aidl-cpp",
- "framework-permission-aidl-cpp",
- "libaudioclient_aidl_conversion",
],
static_libs: [
"libaaudioservice",