Merge "libaudioclient: fix AudioTrack blocking write after underrun" into main
diff --git a/Android.bp b/Android.bp
index 0c7ed6e..c716a06 100644
--- a/Android.bp
+++ b/Android.bp
@@ -135,6 +135,7 @@
aidl_interface {
name: "av-audio-types-aidl",
+ unstable: true,
host_supported: true,
vendor_available: true,
double_loadable: true,
@@ -154,28 +155,4 @@
sdk_version: "module_current",
},
},
- versions_with_info: [
- {
- version: "1",
- imports: ["android.hardware.audio.core-V2"],
- },
- ],
- frozen: false,
-
-}
-
-latest_av_audio_types_aidl = "av-audio-types-aidl-V2"
-
-cc_defaults {
- name: "latest_av_audio_types_aidl_ndk_shared",
- shared_libs: [
- latest_av_audio_types_aidl + "-ndk",
- ],
-}
-
-cc_defaults {
- name: "latest_av_audio_types_aidl_ndk_static",
- static_libs: [
- latest_av_audio_types_aidl + "-ndk",
- ],
}
diff --git a/aidl_api/av-audio-types-aidl/1/.hash b/aidl_api/av-audio-types-aidl/1/.hash
deleted file mode 100644
index 0002682..0000000
--- a/aidl_api/av-audio-types-aidl/1/.hash
+++ /dev/null
@@ -1 +0,0 @@
-ef1bc5ed9db445fbfc116cdec6e6ad081458ee40
diff --git a/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl b/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl
deleted file mode 100644
index a9aa2c1..0000000
--- a/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-///////////////////////////////////////////////////////////////////////////////
-// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
-///////////////////////////////////////////////////////////////////////////////
-
-// This file is a snapshot of an AIDL file. Do not edit it manually. There are
-// two cases:
-// 1). this is a frozen version file - do not edit this in any case.
-// 2). this is a 'current' file. If you make a backwards compatible change to
-// the interface (from the latest frozen version), the build system will
-// prompt you to update this file with `m <name>-update-api`.
-//
-// You must not make a backward incompatible change to any AIDL file built
-// with the aidl_interface module type with versions property set. The module
-// type is used to build AIDL files in a way that they can be used across
-// independently updatable components of the system. If a device is shipped
-// with such a backward incompatible change, it has a high risk of breaking
-// later when a module using the interface is updated, e.g., Mainline modules.
-
-package android.media.audio;
-/* @hide */
-interface IHalAdapterVendorExtension {
- @utf8InCpp String[] parseVendorParameterIds(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeys);
- void parseVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeysAndValues, out android.hardware.audio.core.VendorParameter[] syncParameters, out android.hardware.audio.core.VendorParameter[] asyncParameters);
- android.hardware.audio.core.VendorParameter[] parseBluetoothA2dpReconfigureOffload(in @utf8InCpp String rawValue);
- android.hardware.audio.core.VendorParameter[] parseBluetoothLeReconfigureOffload(in @utf8InCpp String rawValue);
- @utf8InCpp String processVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in android.hardware.audio.core.VendorParameter[] parameters);
- enum ParameterScope {
- MODULE = 0,
- STREAM = 1,
- }
-}
diff --git a/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl b/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl
deleted file mode 100644
index a9aa2c1..0000000
--- a/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-///////////////////////////////////////////////////////////////////////////////
-// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
-///////////////////////////////////////////////////////////////////////////////
-
-// This file is a snapshot of an AIDL file. Do not edit it manually. There are
-// two cases:
-// 1). this is a frozen version file - do not edit this in any case.
-// 2). this is a 'current' file. If you make a backwards compatible change to
-// the interface (from the latest frozen version), the build system will
-// prompt you to update this file with `m <name>-update-api`.
-//
-// You must not make a backward incompatible change to any AIDL file built
-// with the aidl_interface module type with versions property set. The module
-// type is used to build AIDL files in a way that they can be used across
-// independently updatable components of the system. If a device is shipped
-// with such a backward incompatible change, it has a high risk of breaking
-// later when a module using the interface is updated, e.g., Mainline modules.
-
-package android.media.audio;
-/* @hide */
-interface IHalAdapterVendorExtension {
- @utf8InCpp String[] parseVendorParameterIds(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeys);
- void parseVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeysAndValues, out android.hardware.audio.core.VendorParameter[] syncParameters, out android.hardware.audio.core.VendorParameter[] asyncParameters);
- android.hardware.audio.core.VendorParameter[] parseBluetoothA2dpReconfigureOffload(in @utf8InCpp String rawValue);
- android.hardware.audio.core.VendorParameter[] parseBluetoothLeReconfigureOffload(in @utf8InCpp String rawValue);
- @utf8InCpp String processVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in android.hardware.audio.core.VendorParameter[] parameters);
- enum ParameterScope {
- MODULE = 0,
- STREAM = 1,
- }
-}
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index 754f066..eaa5bd5 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -288,10 +288,10 @@
}
DrmStatus DrmHal::getSupportedSchemes(std::vector<uint8_t>& schemes) const {
- status_t statusResult;
- statusResult = mDrmHalAidl->getSupportedSchemes(schemes);
- if (statusResult == OK) return statusResult;
- return mDrmHalHidl->getSupportedSchemes(schemes);
+ const DrmStatus statusResultAidl = mDrmHalAidl->getSupportedSchemes(schemes);
+ const DrmStatus statusResultHidl = mDrmHalHidl->getSupportedSchemes(schemes);
+ if (statusResultHidl == OK || statusResultAidl == OK) return OK;
+ return statusResultAidl;
}
} // namespace android
diff --git a/media/OWNERS b/media/OWNERS
index b926075..5e32047 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -1,6 +1,7 @@
# Bug component: 1344
elaurent@google.com
essick@google.com
+atneya@google.com
hunga@google.com
jiabin@google.com
jmtrivi@google.com
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index 96fb3e3..af82982 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -29,6 +29,13 @@
}
flag {
+ name: "codec_availability_support"
+ namespace: "codec_fwk"
+ description: "Feature flag for codec availability HAL API implementation"
+ bug: "363282971"
+}
+
+flag {
name: "codec_buffer_state_cleanup"
namespace: "codec_fwk"
description: "Bugfix flag for more buffer state cleanup in MediaCodec"
diff --git a/media/aconfig/swcodec_flags.aconfig b/media/aconfig/swcodec_flags.aconfig
index a435a43..9dd1fdd 100644
--- a/media/aconfig/swcodec_flags.aconfig
+++ b/media/aconfig/swcodec_flags.aconfig
@@ -12,3 +12,12 @@
description: "Feature flag for APV Software C2 codec"
bug: "376770121"
}
+
+flag {
+ name: "mpeg2_keep_threads_active"
+ is_exported: true
+ is_fixed_read_only: true
+ namespace: "codec_fwk"
+ description: "Enable keep_threads_active in mpeg2 decoder"
+ bug: "343793479"
+}
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 40d5f5f..0682f65 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -76,8 +76,6 @@
using media::audio::common::AudioOffloadInfo;
using media::audio::common::AudioOutputFlags;
using media::audio::common::AudioPlaybackRate;
-using media::audio::common::AudioPolicyForcedConfig;
-using media::audio::common::AudioPolicyForceUse;
using media::audio::common::AudioPort;
using media::audio::common::AudioPortConfig;
using media::audio::common::AudioPortDeviceExt;
@@ -3322,138 +3320,6 @@
return OK;
}
-ConversionResult<audio_policy_force_use_t>
-aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(AudioPolicyForceUse aidl) {
- switch (aidl) {
- case AudioPolicyForceUse::COMMUNICATION:
- return AUDIO_POLICY_FORCE_FOR_COMMUNICATION;
- case AudioPolicyForceUse::MEDIA:
- return AUDIO_POLICY_FORCE_FOR_MEDIA;
- case AudioPolicyForceUse::RECORD:
- return AUDIO_POLICY_FORCE_FOR_RECORD;
- case AudioPolicyForceUse::DOCK:
- return AUDIO_POLICY_FORCE_FOR_DOCK;
- case AudioPolicyForceUse::SYSTEM:
- return AUDIO_POLICY_FORCE_FOR_SYSTEM;
- case AudioPolicyForceUse::HDMI_SYSTEM_AUDIO:
- return AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO;
- case AudioPolicyForceUse::ENCODED_SURROUND:
- return AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND;
- case AudioPolicyForceUse::VIBRATE_RINGING:
- return AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING;
- }
- return unexpected(BAD_VALUE);
-}
-
-ConversionResult<AudioPolicyForceUse>
-legacy2aidl_audio_policy_force_use_t_AudioPolicyForceUse(audio_policy_force_use_t legacy) {
- switch (legacy) {
- case AUDIO_POLICY_FORCE_FOR_COMMUNICATION:
- return AudioPolicyForceUse::COMMUNICATION;
- case AUDIO_POLICY_FORCE_FOR_MEDIA:
- return AudioPolicyForceUse::MEDIA;
- case AUDIO_POLICY_FORCE_FOR_RECORD:
- return AudioPolicyForceUse::RECORD;
- case AUDIO_POLICY_FORCE_FOR_DOCK:
- return AudioPolicyForceUse::DOCK;
- case AUDIO_POLICY_FORCE_FOR_SYSTEM:
- return AudioPolicyForceUse::SYSTEM;
- case AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO:
- return AudioPolicyForceUse::HDMI_SYSTEM_AUDIO;
- case AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND:
- return AudioPolicyForceUse::ENCODED_SURROUND;
- case AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING:
- return AudioPolicyForceUse::VIBRATE_RINGING;
- case AUDIO_POLICY_FORCE_USE_CNT:
- break;
- }
- return unexpected(BAD_VALUE);
-}
-
-ConversionResult<audio_policy_forced_cfg_t>
-aidl2legacy_AudioPolicyForcedConfig_audio_policy_forced_cfg_t(AudioPolicyForcedConfig aidl) {
- switch (aidl) {
- case AudioPolicyForcedConfig::NONE:
- return AUDIO_POLICY_FORCE_NONE;
- case AudioPolicyForcedConfig::SPEAKER:
- return AUDIO_POLICY_FORCE_SPEAKER;
- case AudioPolicyForcedConfig::HEADPHONES:
- return AUDIO_POLICY_FORCE_HEADPHONES;
- case AudioPolicyForcedConfig::BT_SCO:
- return AUDIO_POLICY_FORCE_BT_SCO;
- case AudioPolicyForcedConfig::BT_A2DP:
- return AUDIO_POLICY_FORCE_BT_A2DP;
- case AudioPolicyForcedConfig::WIRED_ACCESSORY:
- return AUDIO_POLICY_FORCE_WIRED_ACCESSORY;
- case AudioPolicyForcedConfig::BT_CAR_DOCK:
- return AUDIO_POLICY_FORCE_BT_CAR_DOCK;
- case AudioPolicyForcedConfig::BT_DESK_DOCK:
- return AUDIO_POLICY_FORCE_BT_DESK_DOCK;
- case AudioPolicyForcedConfig::ANALOG_DOCK:
- return AUDIO_POLICY_FORCE_ANALOG_DOCK;
- case AudioPolicyForcedConfig::DIGITAL_DOCK:
- return AUDIO_POLICY_FORCE_DIGITAL_DOCK;
- case AudioPolicyForcedConfig::NO_BT_A2DP:
- return AUDIO_POLICY_FORCE_NO_BT_A2DP;
- case AudioPolicyForcedConfig::SYSTEM_ENFORCED:
- return AUDIO_POLICY_FORCE_SYSTEM_ENFORCED;
- case AudioPolicyForcedConfig::HDMI_SYSTEM_AUDIO_ENFORCED:
- return AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED;
- case AudioPolicyForcedConfig::ENCODED_SURROUND_NEVER:
- return AUDIO_POLICY_FORCE_ENCODED_SURROUND_NEVER;
- case AudioPolicyForcedConfig::ENCODED_SURROUND_ALWAYS:
- return AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS;
- case AudioPolicyForcedConfig::ENCODED_SURROUND_MANUAL:
- return AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL;
- case AudioPolicyForcedConfig::BT_BLE:
- return AUDIO_POLICY_FORCE_BT_BLE;
- }
- return unexpected(BAD_VALUE);
-}
-
-ConversionResult<AudioPolicyForcedConfig>
-legacy2aidl_audio_policy_forced_cfg_t_AudioPolicyForcedConfig(audio_policy_forced_cfg_t legacy) {
- switch (legacy) {
- case AUDIO_POLICY_FORCE_NONE:
- return AudioPolicyForcedConfig::NONE;
- case AUDIO_POLICY_FORCE_SPEAKER:
- return AudioPolicyForcedConfig::SPEAKER;
- case AUDIO_POLICY_FORCE_HEADPHONES:
- return AudioPolicyForcedConfig::HEADPHONES;
- case AUDIO_POLICY_FORCE_BT_SCO:
- return AudioPolicyForcedConfig::BT_SCO;
- case AUDIO_POLICY_FORCE_BT_A2DP:
- return AudioPolicyForcedConfig::BT_A2DP;
- case AUDIO_POLICY_FORCE_WIRED_ACCESSORY:
- return AudioPolicyForcedConfig::WIRED_ACCESSORY;
- case AUDIO_POLICY_FORCE_BT_CAR_DOCK:
- return AudioPolicyForcedConfig::BT_CAR_DOCK;
- case AUDIO_POLICY_FORCE_BT_DESK_DOCK:
- return AudioPolicyForcedConfig::BT_DESK_DOCK;
- case AUDIO_POLICY_FORCE_ANALOG_DOCK:
- return AudioPolicyForcedConfig::ANALOG_DOCK;
- case AUDIO_POLICY_FORCE_DIGITAL_DOCK:
- return AudioPolicyForcedConfig::DIGITAL_DOCK;
- case AUDIO_POLICY_FORCE_NO_BT_A2DP:
- return AudioPolicyForcedConfig::NO_BT_A2DP;
- case AUDIO_POLICY_FORCE_SYSTEM_ENFORCED:
- return AudioPolicyForcedConfig::SYSTEM_ENFORCED;
- case AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED:
- return AudioPolicyForcedConfig::HDMI_SYSTEM_AUDIO_ENFORCED;
- case AUDIO_POLICY_FORCE_ENCODED_SURROUND_NEVER:
- return AudioPolicyForcedConfig::ENCODED_SURROUND_NEVER;
- case AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS:
- return AudioPolicyForcedConfig::ENCODED_SURROUND_ALWAYS;
- case AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL:
- return AudioPolicyForcedConfig::ENCODED_SURROUND_MANUAL;
- case AUDIO_POLICY_FORCE_BT_BLE:
- return AudioPolicyForcedConfig::BT_BLE;
- case AUDIO_POLICY_FORCE_CFG_CNT:
- break;
- }
- return unexpected(BAD_VALUE);
-}
-
} // namespace android
#undef GET_DEVICE_DESC_CONNECTION
diff --git a/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h b/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
index 9dfb7e7..7268464 100644
--- a/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
+++ b/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
@@ -58,8 +58,6 @@
#include PREFIX(android/media/audio/common/AudioMode.h)
#include PREFIX(android/media/audio/common/AudioOffloadInfo.h)
#include PREFIX(android/media/audio/common/AudioOutputFlags.h)
-#include PREFIX(android/media/audio/common/AudioPolicyForceUse.h)
-#include PREFIX(android/media/audio/common/AudioPolicyForcedConfig.h)
#include PREFIX(android/media/audio/common/AudioPort.h)
#include PREFIX(android/media/audio/common/AudioPortConfig.h)
#include PREFIX(android/media/audio/common/AudioPortExt.h)
@@ -78,7 +76,6 @@
#include <system/audio.h>
#include <system/audio_effect.h>
-#include <system/audio_policy.h>
#if defined(BACKEND_NDK_IMPL)
namespace aidl {
@@ -457,18 +454,6 @@
media::audio::common::MicrophoneInfo* aidlInfo,
media::audio::common::MicrophoneDynamicInfo* aidlDynamic);
-ConversionResult<audio_policy_forced_cfg_t>
-aidl2legacy_AudioPolicyForcedConfig_audio_policy_forced_cfg_t(
- media::audio::common::AudioPolicyForcedConfig aidl);
-ConversionResult<media::audio::common::AudioPolicyForcedConfig>
-legacy2aidl_audio_policy_forced_cfg_t_AudioPolicyForcedConfig(audio_policy_forced_cfg_t legacy);
-
-ConversionResult<audio_policy_force_use_t>
-aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(
- media::audio::common::AudioPolicyForceUse aidl);
-ConversionResult<media::audio::common::AudioPolicyForceUse>
-legacy2aidl_audio_policy_force_use_t_AudioPolicyForceUse(audio_policy_force_use_t legacy);
-
} // namespace android
#if defined(BACKEND_NDK_IMPL)
diff --git a/media/codec2/components/apv/C2SoftApvEnc.cpp b/media/codec2/components/apv/C2SoftApvEnc.cpp
index d6a9597..9c5e0b2 100644
--- a/media/codec2/components/apv/C2SoftApvEnc.cpp
+++ b/media/codec2/components/apv/C2SoftApvEnc.cpp
@@ -958,108 +958,98 @@
finish(workIndex, fillWork);
}
}
-void C2SoftApvEnc::createCsdData(const std::unique_ptr<C2Work>& work, oapv_bitb_t* bitb,
- uint32_t encodedSize) {
- uint32_t csdStart = 0, csdEnd = 0;
- uint32_t bitOffset = 0;
- uint8_t* buf = (uint8_t*)bitb->addr + csdStart;
- if (encodedSize == 0) {
- ALOGE("the first frame size is zero, so no csd data will be created.");
+void C2SoftApvEnc::createCsdData(const std::unique_ptr<C2Work>& work,
+ oapv_bitb_t* bitb,
+ uint32_t encodedSize) {
+ if (encodedSize < 31) {
+ ALOGE("the first frame size is too small, so no csd data will be created.");
return;
}
- ABitReader reader(buf, encodedSize);
+ ABitReader reader((uint8_t*)bitb->addr, encodedSize);
+
+ uint8_t number_of_configuration_entry = 0;
+ uint8_t pbu_type = 0;
+ uint8_t number_of_frame_info = 0;
+ bool color_description_present_flag = false;
+ bool capture_time_distance_ignored = false;
+ uint8_t profile_idc = 0;
+ uint8_t level_idc = 0;
+ uint8_t band_idc = 0;
+ uint32_t frame_width_minus1 = 0;
+ uint32_t frame_height_minus1 = 0;
+ uint8_t chroma_format_idc = 0;
+ uint8_t bit_depth_minus8 = 0;
+ uint8_t capture_time_distance = 0;
+ uint8_t color_primaries = 0;
+ uint8_t transfer_characteristics = 0;
+ uint8_t matrix_coefficients = 0;
/* pbu_header() */
- reader.skipBits(32);
- bitOffset += 32; // pbu_size
- reader.skipBits(32);
- bitOffset += 32; // currReadSize
- csdStart = bitOffset / 8;
-
- int32_t pbu_type = reader.getBits(8);
- bitOffset += 8; // pbu_type
- reader.skipBits(16);
- bitOffset += 16; // group_id
- reader.skipBits(8);
- bitOffset += 8; // reserved_zero_8bits
+ reader.skipBits(32); // pbu_size
+ reader.skipBits(32); // currReadSize
+ pbu_type = reader.getBits(8); // pbu_type
+ reader.skipBits(16); // group_id
+ reader.skipBits(8); // reserved_zero_8bits
/* frame info() */
- int32_t profile_idc = reader.getBits(8);
- bitOffset += 8; // profile_idc
- int32_t level_idc = reader.getBits(8);
- bitOffset += 8; // level_idc
- int32_t band_idc = reader.getBits(3);
- bitOffset += 3; // band_idc
- reader.skipBits(5);
- bitOffset += 5; // reserved_zero_5bits
- int32_t width = reader.getBits(32);
- bitOffset += 32; // width
- int32_t height = reader.getBits(32);
- bitOffset += 32; // height
- int32_t chroma_idc = reader.getBits(4);
- bitOffset += 4; // chroma_format_idc
- reader.skipBits(4);
- bitOffset += 4; // bit_depth
- reader.skipBits(8);
- bitOffset += 8; // capture_time_distance
- reader.skipBits(8);
- bitOffset += 8; // reserved_zero_8bits
+ profile_idc = reader.getBits(8); // profile_idc
+ level_idc = reader.getBits(8); // level_idc
+ band_idc = reader.getBits(3); // band_idc
+ reader.skipBits(5); // reserved_zero_5bits
+ frame_width_minus1 = reader.getBits(32); // width
+ frame_height_minus1 = reader.getBits(32); // height
+ chroma_format_idc = reader.getBits(4); // chroma_format_idc
+ bit_depth_minus8 = reader.getBits(4); // bit_depth
+ capture_time_distance = reader.getBits(8); // capture_time_distance
+ reader.skipBits(8); // reserved_zero_8bits
/* frame header() */
- reader.skipBits(8);
- bitOffset += 8; // reserved_zero_8bit
- bool color_description_present_flag = reader.getBits(1);
- bitOffset += 1; // color_description_present_flag
+ reader.skipBits(8); // reserved_zero_8bit
+ color_description_present_flag = reader.getBits(1); // color_description_present_flag
if (color_description_present_flag) {
- reader.skipBits(8);
- bitOffset += 8; // color_primaries
- reader.skipBits(8);
- bitOffset += 8; // transfer_characteristics
- reader.skipBits(8);
- bitOffset += 8; // matrix_coefficients
- }
- bool use_q_matrix = reader.getBits(1);
- bitOffset += 1; // use_q_matrix
- if (use_q_matrix) {
- /* quantization_matrix() */
- int32_t numComp = chroma_idc == 0 ? 1
- : chroma_idc == 2 ? 3
- : chroma_idc == 3 ? 3
- : chroma_idc == 4 ? 4
- : -1;
- int32_t needBitsForQ = 64 * 8 * numComp;
- reader.skipBits(needBitsForQ);
- bitOffset += needBitsForQ;
+ color_primaries = reader.getBits(8); // color_primaries
+ transfer_characteristics = reader.getBits(8); // transfer_characteristics
+ matrix_coefficients = reader.getBits(8); // matrix_coefficients
}
- /* tile_info() */
- int32_t tile_width_in_mbs_minus1 = reader.getBits(28);
- bitOffset += 28;
- int32_t tile_height_in_mbs_minus1 = reader.getBits(28);
- bitOffset += 28;
- bool tile_size_present_in_fh_flag = reader.getBits(1);
- bitOffset += 1;
- if (tile_size_present_in_fh_flag) {
- int32_t numTiles = ceil((double)width / (double)tile_width_in_mbs_minus1) *
- ceil((double)height / (double)tile_height_in_mbs_minus1);
- reader.skipBits(32 * numTiles);
- bitOffset += (32 * numTiles);
- }
+ number_of_configuration_entry = 1; // The real-time encoding on the device is assumed to be 1.
+ number_of_frame_info = 1; // The real-time encoding on the device is assumed to be 1.
- reader.skipBits(8);
- bitOffset += 8; // reserved_zero_8bits
+ std::vector<uint8_t> csdData;
+ csdData.push_back((uint8_t)0x1);
+ csdData.push_back(number_of_configuration_entry);
- /* byte_alignmenet() */
- while (bitOffset % 8) {
- reader.skipBits(1);
- bitOffset += 1;
+ for (uint8_t i = 0; i < number_of_configuration_entry; i++) {
+ csdData.push_back(pbu_type);
+ csdData.push_back(number_of_frame_info);
+ for (uint8_t j = 0; j < number_of_frame_info; j++) {
+ csdData.push_back((uint8_t)((color_description_present_flag << 1) |
+ capture_time_distance_ignored));
+ csdData.push_back(profile_idc);
+ csdData.push_back(level_idc);
+ csdData.push_back(band_idc);
+ csdData.push_back((uint8_t)((frame_width_minus1 >> 24) & 0xff));
+ csdData.push_back((uint8_t)((frame_width_minus1 >> 16) & 0xff));
+ csdData.push_back((uint8_t)((frame_width_minus1 >> 8) & 0xff));
+ csdData.push_back((uint8_t)(frame_width_minus1 & 0xff));
+ csdData.push_back((uint8_t)((frame_height_minus1 >> 24) & 0xff));
+ csdData.push_back((uint8_t)((frame_height_minus1 >> 16) & 0xff));
+ csdData.push_back((uint8_t)((frame_height_minus1 >> 8) & 0xff));
+ csdData.push_back((uint8_t)(frame_height_minus1 & 0xff));
+ csdData.push_back((uint8_t)(((chroma_format_idc << 4) & 0xf0) |
+ (bit_depth_minus8 & 0xf)));
+ csdData.push_back((uint8_t)(capture_time_distance));
+ if (color_description_present_flag) {
+ csdData.push_back(color_primaries);
+ csdData.push_back(transfer_characteristics);
+ csdData.push_back(matrix_coefficients);
+ }
+ }
}
- csdEnd = bitOffset / 8;
- int32_t csdSize = csdEnd - csdStart + 1;
std::unique_ptr<C2StreamInitDataInfo::output> csd =
- C2StreamInitDataInfo::output::AllocUnique(csdSize, 0u);
+ C2StreamInitDataInfo::output::AllocUnique(csdData.size(), 0u);
if (!csd) {
ALOGE("CSD allocation failed");
mSignalledError = true;
@@ -1068,10 +1058,10 @@
return;
}
- buf = buf + csdStart;
- memcpy(csd->m.value, buf, csdSize);
+ memcpy(csd->m.value, csdData.data(), csdData.size());
work->worklets.front()->output.configUpdate.push_back(std::move(csd));
}
+
c2_status_t C2SoftApvEnc::drainInternal(uint32_t drainMode,
const std::shared_ptr<C2BlockPool>& pool,
const std::unique_ptr<C2Work>& work) {
diff --git a/media/codec2/components/mpeg2/Android.bp b/media/codec2/components/mpeg2/Android.bp
index e644ee3..ed711ee 100644
--- a/media/codec2/components/mpeg2/Android.bp
+++ b/media/codec2/components/mpeg2/Android.bp
@@ -14,11 +14,10 @@
"libcodec2_soft_sanitize_signed-defaults",
],
- cflags: [
- "-DKEEP_THREADS_ACTIVE=0",
- ],
-
srcs: ["C2SoftMpeg2Dec.cpp"],
- static_libs: ["libmpeg2dec"],
+ static_libs: [
+ "libmpeg2dec",
+ "android.media.swcodec.flags-aconfig-cc",
+ ],
}
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 52920c2..64e4bf0 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -16,11 +16,10 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "C2SoftMpeg2Dec"
-#ifndef KEEP_THREADS_ACTIVE
-#define KEEP_THREADS_ACTIVE 0
-#endif
#include <log/log.h>
+#include <android_media_swcodec_flags.h>
+
#include <media/stagefright/foundation/MediaDefs.h>
#include <C2Debug.h>
@@ -320,14 +319,7 @@
c2_node_id_t id,
const std::shared_ptr<IntfImpl> &intfImpl)
: SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
- mIntf(intfImpl),
- mDecHandle(nullptr),
- mMemRecords(nullptr),
- mOutBufferDrain(nullptr),
- mIvColorformat(IV_YUV_420P),
- mWidth(320),
- mHeight(240),
- mOutIndex(0u) {
+ mIntf(intfImpl) {
// If input dump is enabled, then open create an empty file
GENERATE_FILE_NAMES();
CREATE_DUMP_FILE(mInFile);
@@ -436,7 +428,7 @@
s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size = sizeof(ivdext_fill_mem_rec_ip_t);
s_fill_mem_ip.u4_share_disp_buf = 0;
- s_fill_mem_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
+ s_fill_mem_ip.u4_keep_threads_active = mKeepThreadsActive;
s_fill_mem_ip.e_output_format = mIvColorformat;
s_fill_mem_ip.u4_deinterlace = 1;
s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
@@ -478,7 +470,7 @@
s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = mHeight;
s_init_ip.u4_share_disp_buf = 0;
s_init_ip.u4_deinterlace = 1;
- s_init_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
+ s_init_ip.u4_keep_threads_active = mKeepThreadsActive;
s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorformat;
s_init_op.s_ivd_init_op_t.u4_size = sizeof(ivdext_init_op_t);
@@ -571,6 +563,7 @@
status_t ret = getNumMemRecords();
if (OK != ret) return ret;
+ mKeepThreadsActive = android::media::swcodec::flags::mpeg2_keep_threads_active();
ret = fillMemRecords();
if (OK != ret) return ret;
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
index 3965bcc..6d09694 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
@@ -144,21 +144,22 @@
};
std::shared_ptr<IntfImpl> mIntf;
- iv_obj_t *mDecHandle;
- iv_mem_rec_t *mMemRecords;
- size_t mNumMemRecords;
+ iv_obj_t *mDecHandle = nullptr;
+ iv_mem_rec_t *mMemRecords = nullptr;
+ size_t mNumMemRecords = 0;
std::shared_ptr<C2GraphicBlock> mOutBlock;
- uint8_t *mOutBufferDrain;
+ uint8_t *mOutBufferDrain = nullptr;
- size_t mNumCores;
- IV_COLOR_FORMAT_T mIvColorformat;
+ size_t mNumCores = 1;
+ IV_COLOR_FORMAT_T mIvColorformat = IV_YUV_420P;
- uint32_t mWidth;
- uint32_t mHeight;
- uint32_t mStride;
- bool mSignalledOutputEos;
- bool mSignalledError;
- std::atomic_uint64_t mOutIndex;
+ uint32_t mWidth = 320;
+ uint32_t mHeight = 240;
+ uint32_t mStride = 0;
+ bool mSignalledOutputEos = false;
+ bool mSignalledError = false;
+ bool mKeepThreadsActive = false;
+ std::atomic_uint64_t mOutIndex = 0;
// Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
// converting them to C2 values for each frame
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 069d6ad..fa5ce77 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -70,6 +70,7 @@
enum platform_level_t : uint32_t; ///< platform level
enum prepend_header_mode_t : uint32_t; ///< prepend header operational modes
enum profile_t : uint32_t; ///< coding profile
+ enum resource_kind_t : uint32_t; ///< resource kinds
enum scaling_method_t : uint32_t; ///< scaling methods
enum scan_order_t : uint32_t; ///< scan orders
enum secure_mode_t : uint32_t; ///< secure/protected modes
@@ -101,6 +102,7 @@
kParamIndexMasteringDisplayColorVolume,
kParamIndexChromaOffset,
kParamIndexGopLayer,
+ kParamIndexSystemResource,
/* =================================== parameter indices =================================== */
@@ -167,6 +169,10 @@
/* Region of Interest Encoding parameters */
kParamIndexQpOffsetMapBuffer, // info-buffer, used to signal qp-offset map for a frame
+ /* resource capacity and resources excluded */
+ kParamIndexResourcesCapacity,
+ kParamIndexResourcesExcluded,
+
// deprecated
kParamIndexDelayRequest = kParamIndexDelay | C2Param::CoreIndex::IS_REQUEST_FLAG,
@@ -1257,21 +1263,114 @@
/* ----------------------------------------- resources ----------------------------------------- */
/**
- * Resources needed and resources reserved for current configuration.
- *
- * Resources are tracked as a vector of positive numbers. Available resources are defined by
- * the vendor.
- *
- * By default, no resources are reserved for a component. If resource reservation is successful,
- * the component shall be able to use those resources exclusively. If however, the component is
- * not using all of the reserved resources, those may be shared with other components.
- *
- * TODO: define some of the resources.
+ * Resource kind.
*/
-typedef C2GlobalParam<C2Tuning, C2Uint64Array, kParamIndexResourcesNeeded> C2ResourcesNeededTuning;
-typedef C2GlobalParam<C2Tuning, C2Uint64Array, kParamIndexResourcesReserved>
- C2ResourcesReservedTuning;
+C2ENUM(C2Config::resource_kind_t, uint32_t,
+ CONST,
+ PER_FRAME,
+ PER_INPUT_BLOCK,
+ PER_OUTPUT_BLOCK
+)
+
+/**
+ * Definition of a system resource use.
+ *
+ * [PROPOSED]
+ *
+ * System resources are defined by the default component store.
+ * They represent any physical or abstract entities of limited availability
+ * that is required for a component instance to execute and process work.
+ *
+ * Each defined resource has an id.
+ * The use of a resource is specified by the amount and the kind (e.g. whether the amount
+ * of resources is required for each frame processed, or whether they are required
+ * regardless of the processing rate (const amount)).
+ *
+ * Note: implementations can shadow this structure with their own custom resource
+ * structure where a uint32_t based enum is used for id.
+ * This can be used to provide a name for each resource, via parameter descriptors.
+ */
+
+struct C2SystemResourceStruct {
+ C2SystemResourceStruct(uint32_t id_,
+ C2Config::resource_kind_t kind_,
+ uint64_t amount_)
+ : id(id_), kind(kind_), amount(amount_) { }
+ uint32_t id;
+ C2Config::resource_kind_t kind;
+ uint64_t amount;
+
+ DEFINE_AND_DESCRIBE_C2STRUCT(SystemResource)
+ C2FIELD(id, "id")
+ C2FIELD(kind, "kind")
+ C2FIELD(amount, "amount")
+};
+
+/**
+ * Total system resource capacity.
+ *
+ * [PROPOSED]
+ *
+ * This setting is implemented by the default component store.
+ * The total resource capacity is specified as the maximum amount for each resource ID
+ * that is supported by the device hardware or firmware.
+ * As such, the kind must be CONST for each element.
+ */
+typedef C2GlobalParam<C2Tuning,
+ C2SimpleArrayStruct<C2SystemResourceStruct>,
+ kParamIndexResourcesCapacity> C2ResourcesCapacityTuning;
+constexpr char C2_PARAMKEY_RESOURCES_CAPACITY[] = "resources.capacity";
+
+/**
+ * Excluded system resources.
+ *
+ * [PROPOSED]
+ *
+ * This setting is implemented by the default component store.
+ * Some system resources may be used by components and not tracked by the Codec 2.0 API.
+ * This is communicated by this tuning.
+ * Excluded resources are the total resources that are used by non-Codec 2.0 components.
+ * It is specified as the excluded amount for each resource ID that is used by
+ * a non-Codec 2.0 component. As such, the kind must be CONST for each element.
+ *
+ * The platform can calculate the available resources as total capacity minus
+ * excluded resource minus sum of needed resources for each component.
+ */
+typedef C2GlobalParam<C2Tuning,
+ C2SimpleArrayStruct<C2SystemResourceStruct>,
+ kParamIndexResourcesExcluded> C2ResourcesExcludedTuning;
+constexpr char C2_PARAMKEY_RESOURCES_EXCLUDED[] = "resources.excluded";
+
+/**
+ * System resources needed for the current configuration.
+ *
+ * [PROPOSED]
+ *
+ * Resources are tracked as a list of individual resource use specifications.
+ * The resource kind can be CONST, PER_FRAME, PER_INPUT_BLODCK or PER_OUTPUT_BLOCK.
+ */
+typedef C2GlobalParam<C2Tuning,
+ C2SimpleArrayStruct<C2SystemResourceStruct>,
+ kParamIndexResourcesNeeded> C2ResourcesNeededTuning;
constexpr char C2_PARAMKEY_RESOURCES_NEEDED[] = "resources.needed";
+
+/**
+ * System resources reserved for this component
+ *
+ * [FUTURE]
+ *
+ * This allows the platform to set aside system resources for the component.
+ * Since this is a static resource reservation, kind must be CONST for each element.
+ * This resource reservation only considers CONST and PER_FRAME use.
+ *
+ * By default, no resources are reserved for a component.
+ * If resource reservation is successful, the component shall be able to use those
+ * resources exclusively. If however, the component is not using all of the
+ * reserved resources, those may be shared with other components.
+ */
+typedef C2GlobalParam<C2Tuning,
+ C2SimpleArrayStruct<C2SystemResourceStruct>,
+ kParamIndexResourcesReserved> C2ResourcesReservedTuning;
constexpr char C2_PARAMKEY_RESOURCES_RESERVED[] = "resources.reserved";
/**
diff --git a/media/codec2/hal/client/Android.bp b/media/codec2/hal/client/Android.bp
index 864eeb8..029044f 100644
--- a/media/codec2/hal/client/Android.bp
+++ b/media/codec2/hal/client/Android.bp
@@ -23,6 +23,7 @@
name: "libcodec2_client",
srcs: [
+ "ApexCodecsLazy.cpp",
"GraphicBufferAllocator.cpp",
"GraphicsTracker.cpp",
"client.cpp",
@@ -41,17 +42,18 @@
cpp_std: "gnu++20",
header_libs: [
+ "libapexcodecs-header",
"libcodec2_internal", // private
],
shared_libs: [
"android.hardware.graphics.bufferqueue@1.0",
+ "android.hardware.media.bufferpool2-V2-ndk",
"android.hardware.media.bufferpool@2.0",
+ "android.hardware.media.c2-V1-ndk",
"android.hardware.media.c2@1.0",
"android.hardware.media.c2@1.1",
"android.hardware.media.c2@1.2",
- "android.hardware.media.bufferpool2-V2-ndk",
- "android.hardware.media.c2-V1-ndk",
"libbase",
"libbinder",
"libbinder_ndk",
@@ -79,6 +81,10 @@
"include",
],
+ export_header_lib_headers: [
+ "libapexcodecs-header",
+ ],
+
export_shared_lib_headers: [
"android.hardware.media.c2@1.0",
"android.hardware.media.c2@1.1",
@@ -89,5 +95,4 @@
"libcodec2_hidl_client@1.2",
"libcodec2_vndk",
],
-
}
diff --git a/media/codec2/hal/client/ApexCodecsLazy.cpp b/media/codec2/hal/client/ApexCodecsLazy.cpp
new file mode 100644
index 0000000..cd7953e
--- /dev/null
+++ b/media/codec2/hal/client/ApexCodecsLazy.cpp
@@ -0,0 +1,295 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ApexCodecsLazy"
+#include <log/log.h>
+
+#include <mutex>
+
+#include <dlfcn.h>
+
+#include <android-base/no_destructor.h>
+#include <apex/ApexCodecs.h>
+#include <utils/RWLock.h>
+
+using android::RWLock;
+
+namespace {
+
+// This file provides a lazy interface to libapexcodecs.so to address early boot dependencies.
+
+// Method pointers to libapexcodecs methods are held in an array which simplifies checking
+// all pointers are initialized.
+enum MethodIndex {
+ k_ApexCodec_Component_create,
+ k_ApexCodec_Component_destroy,
+ k_ApexCodec_Component_flush,
+ k_ApexCodec_Component_getConfigurable,
+ k_ApexCodec_Component_process,
+ k_ApexCodec_Component_start,
+ k_ApexCodec_Component_reset,
+ k_ApexCodec_Configurable_config,
+ k_ApexCodec_Configurable_query,
+ k_ApexCodec_Configurable_querySupportedParams,
+ k_ApexCodec_Configurable_querySupportedValues,
+ k_ApexCodec_GetComponentStore,
+ k_ApexCodec_ParamDescriptors_getDescriptor,
+ k_ApexCodec_ParamDescriptors_getIndices,
+ k_ApexCodec_ParamDescriptors_release,
+ k_ApexCodec_SettingResults_getResultAtIndex,
+ k_ApexCodec_SettingResults_release,
+ k_ApexCodec_SupportedValues_getTypeAndValues,
+ k_ApexCodec_SupportedValues_release,
+ k_ApexCodec_Traits_get,
+
+ // Marker for count of methods
+ k_MethodCount
+};
+
+class ApexCodecsLazyLoader {
+public:
+ ApexCodecsLazyLoader() = default;
+
+ static ApexCodecsLazyLoader &Get() {
+ static ::android::base::NoDestructor<ApexCodecsLazyLoader> sLoader;
+ return *sLoader;
+ }
+
+ void *getMethodAt(enum MethodIndex index) {
+ RWLock::AutoRLock l(mLock);
+ if (mInit) {
+ return mMethods[index];
+ } else {
+ mLock.unlock();
+ if (!init()) {
+ return nullptr;
+ }
+ mLock.readLock();
+ return mMethods[index];
+ }
+ }
+
+private:
+ static void* LoadLibapexcodecs(int dlopen_flags) {
+ return dlopen("libapexcodecs.so", dlopen_flags);
+ }
+
+ // Initialization and symbol binding.
+ void bindSymbol_l(void* handle, const char* name, enum MethodIndex index) {
+ void* symbol = dlsym(handle, name);
+ ALOGI_IF(symbol == nullptr, "Failed to find symbol '%s' in libapexcodecs.so: %s",
+ name, dlerror());
+ mMethods[index] = symbol;
+ }
+
+ bool init() {
+ {
+ RWLock::AutoRLock l(mLock);
+ if (mInit) {
+ return true;
+ }
+ }
+ void* handle = LoadLibapexcodecs(RTLD_NOW);
+ if (handle == nullptr) {
+ ALOGI("Failed to load libapexcodecs.so: %s", dlerror());
+ return false;
+ }
+
+ RWLock::AutoWLock l(mLock);
+#undef BIND_SYMBOL
+#define BIND_SYMBOL(name) bindSymbol_l(handle, #name, k_##name);
+ BIND_SYMBOL(ApexCodec_Component_create);
+ BIND_SYMBOL(ApexCodec_Component_destroy);
+ BIND_SYMBOL(ApexCodec_Component_flush);
+ BIND_SYMBOL(ApexCodec_Component_getConfigurable);
+ BIND_SYMBOL(ApexCodec_Component_process);
+ BIND_SYMBOL(ApexCodec_Component_start);
+ BIND_SYMBOL(ApexCodec_Component_reset);
+ BIND_SYMBOL(ApexCodec_Configurable_config);
+ BIND_SYMBOL(ApexCodec_Configurable_query);
+ BIND_SYMBOL(ApexCodec_Configurable_querySupportedParams);
+ BIND_SYMBOL(ApexCodec_Configurable_querySupportedValues);
+ BIND_SYMBOL(ApexCodec_GetComponentStore);
+ BIND_SYMBOL(ApexCodec_ParamDescriptors_getDescriptor);
+ BIND_SYMBOL(ApexCodec_ParamDescriptors_getIndices);
+ BIND_SYMBOL(ApexCodec_ParamDescriptors_release);
+ BIND_SYMBOL(ApexCodec_SettingResults_getResultAtIndex);
+ BIND_SYMBOL(ApexCodec_SettingResults_release);
+ BIND_SYMBOL(ApexCodec_SupportedValues_getTypeAndValues);
+ BIND_SYMBOL(ApexCodec_SupportedValues_release);
+ BIND_SYMBOL(ApexCodec_Traits_get);
+#undef BIND_SYMBOL
+
+ // Check every symbol is bound.
+ for (int i = 0; i < k_MethodCount; ++i) {
+ if (mMethods[i] == nullptr) {
+ ALOGI("Uninitialized method in libapexcodecs_lazy at index: %d", i);
+ return false;
+ }
+ }
+ mInit = true;
+ return true;
+ }
+
+ RWLock mLock;
+ // Table of methods pointers in libapexcodecs APIs.
+ void* mMethods[k_MethodCount];
+ bool mInit{false};
+};
+
+} // anonymous namespace
+
+#define INVOKE_METHOD(name, returnIfNull, args...) \
+ do { \
+ void* method = ApexCodecsLazyLoader::Get().getMethodAt(k_##name); \
+ if (!method) return (returnIfNull); \
+ return reinterpret_cast<decltype(&name)>(method)(args); \
+ } while (0)
+
+//
+// Forwarding for methods in ApexCodecs.h.
+//
+
+ApexCodec_ComponentStore *ApexCodec_GetComponentStore() {
+ INVOKE_METHOD(ApexCodec_GetComponentStore, nullptr);
+}
+
+ApexCodec_ComponentTraits *ApexCodec_Traits_get(
+ ApexCodec_ComponentStore *store, size_t index) {
+ INVOKE_METHOD(ApexCodec_Traits_get, nullptr, store, index);
+}
+
+ApexCodec_Status ApexCodec_Component_create(
+ ApexCodec_ComponentStore *store, const char *name, ApexCodec_Component **comp) {
+ INVOKE_METHOD(ApexCodec_Component_create, APEXCODEC_STATUS_OMITTED, store, name, comp);
+}
+
+void ApexCodec_Component_destroy(ApexCodec_Component *comp) {
+ INVOKE_METHOD(ApexCodec_Component_destroy, void(), comp);
+}
+
+ApexCodec_Status ApexCodec_Component_start(ApexCodec_Component *comp) {
+ INVOKE_METHOD(ApexCodec_Component_start, APEXCODEC_STATUS_OMITTED, comp);
+}
+
+ApexCodec_Status ApexCodec_Component_flush(ApexCodec_Component *comp) {
+ INVOKE_METHOD(ApexCodec_Component_flush, APEXCODEC_STATUS_OMITTED, comp);
+}
+
+ApexCodec_Status ApexCodec_Component_reset(ApexCodec_Component *comp) {
+ INVOKE_METHOD(ApexCodec_Component_reset, APEXCODEC_STATUS_OMITTED, comp);
+}
+
+ApexCodec_Configurable *ApexCodec_Component_getConfigurable(
+ ApexCodec_Component *comp) {
+ INVOKE_METHOD(ApexCodec_Component_getConfigurable, nullptr, comp);
+}
+
+ApexCodec_Status ApexCodec_SupportedValues_getTypeAndValues(
+ ApexCodec_SupportedValues *supportedValues,
+ ApexCodec_SupportedValuesType *type,
+ ApexCodec_SupportedValuesNumberType *numberType,
+ ApexCodec_Value **values,
+ uint32_t *numValues) {
+ INVOKE_METHOD(ApexCodec_SupportedValues_getTypeAndValues, APEXCODEC_STATUS_OMITTED,
+ supportedValues, type, numberType, values, numValues);
+}
+
+void ApexCodec_SupportedValues_release(ApexCodec_SupportedValues *values) {
+ INVOKE_METHOD(ApexCodec_SupportedValues_release, void(), values);
+}
+
+ApexCodec_Status ApexCodec_SettingResults_getResultAtIndex(
+ ApexCodec_SettingResults *results,
+ size_t index,
+ ApexCodec_SettingResultFailure *failure,
+ ApexCodec_ParamFieldValues *field,
+ ApexCodec_ParamFieldValues **conflicts,
+ size_t *numConflicts) {
+ INVOKE_METHOD(ApexCodec_SettingResults_getResultAtIndex, APEXCODEC_STATUS_OMITTED,
+ results, index, failure, field, conflicts, numConflicts);
+}
+
+void ApexCodec_SettingResults_release(ApexCodec_SettingResults *results) {
+ INVOKE_METHOD(ApexCodec_SettingResults_release, void(), results);
+}
+
+ApexCodec_Status ApexCodec_Component_process(
+ ApexCodec_Component *comp,
+ const ApexCodec_Buffer *input,
+ ApexCodec_Buffer *output,
+ size_t *consumed,
+ size_t *produced) {
+ INVOKE_METHOD(ApexCodec_Component_process, APEXCODEC_STATUS_OMITTED,
+ comp, input, output, consumed, produced);
+}
+
+ApexCodec_Status ApexCodec_Configurable_config(
+ ApexCodec_Configurable *comp,
+ ApexCodec_LinearBuffer *config,
+ ApexCodec_SettingResults **results) {
+ INVOKE_METHOD(ApexCodec_Configurable_config, APEXCODEC_STATUS_OMITTED, comp, config, results);
+}
+
+ApexCodec_Status ApexCodec_Configurable_query(
+ ApexCodec_Configurable *comp,
+ uint32_t indices[],
+ size_t numIndices,
+ ApexCodec_LinearBuffer *config,
+ size_t *writtenOrRequested) {
+ INVOKE_METHOD(ApexCodec_Configurable_query, APEXCODEC_STATUS_OMITTED,
+ comp, indices, numIndices, config, writtenOrRequested);
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_getIndices(
+ ApexCodec_ParamDescriptors *descriptors,
+ uint32_t **indices,
+ size_t *numIndices) {
+ INVOKE_METHOD(ApexCodec_ParamDescriptors_getIndices, APEXCODEC_STATUS_OMITTED,
+ descriptors, indices, numIndices);
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_getDescriptor(
+ ApexCodec_ParamDescriptors *descriptors,
+ uint32_t index,
+ ApexCodec_ParamAttribute *attr,
+ const char **name,
+ uint32_t **dependencies,
+ size_t *numDependencies) {
+ INVOKE_METHOD(ApexCodec_ParamDescriptors_getDescriptor, APEXCODEC_STATUS_OMITTED,
+ descriptors, index, attr, name, dependencies, numDependencies);
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_release(
+ ApexCodec_ParamDescriptors *descriptors) {
+ INVOKE_METHOD(ApexCodec_ParamDescriptors_release, APEXCODEC_STATUS_OMITTED, descriptors);
+}
+
+ApexCodec_Status ApexCodec_Configurable_querySupportedParams(
+ ApexCodec_Configurable *comp,
+ ApexCodec_ParamDescriptors **descriptors) {
+ INVOKE_METHOD(ApexCodec_Configurable_querySupportedParams, APEXCODEC_STATUS_OMITTED,
+ comp, descriptors);
+}
+
+ApexCodec_Status ApexCodec_Configurable_querySupportedValues(
+ ApexCodec_Configurable *comp,
+ ApexCodec_SupportedValuesQuery *queries,
+ size_t numQueries) {
+ INVOKE_METHOD(ApexCodec_Configurable_querySupportedValues, APEXCODEC_STATUS_OMITTED,
+ comp, queries, numQueries);
+}
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 6348e42..9ee9b9e 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -20,6 +20,8 @@
#include <android-base/logging.h>
#include <utils/Trace.h>
+#include <android_media_codec.h>
+
#include <codec2/aidl/GraphicBufferAllocator.h>
#include <codec2/common/HalSelection.h>
#include <codec2/hidl/client.h>
@@ -55,7 +57,9 @@
#include <android/binder_ibinder.h>
#include <android/binder_manager.h>
#include <android-base/properties.h>
+#include <android-base/scopeguard.h>
#include <android-base/stringprintf.h>
+#include <apex/ApexCodecs.h>
#include <bufferpool/ClientManager.h>
#include <bufferpool2/ClientManager.h>
#include <codec2/aidl/BufferTypes.h>
@@ -64,14 +68,14 @@
#include <codec2/hidl/1.1/types.h>
#include <codec2/hidl/1.2/types.h>
#include <codec2/hidl/output.h>
-
#include <cutils/native_handle.h>
#include <gui/bufferqueue/2.0/B2HGraphicBufferProducer.h>
#include <gui/bufferqueue/2.0/H2BGraphicBufferProducer.h>
#include <hardware/gralloc.h> // for GRALLOC_USAGE_*
#include <hidl/HidlSupport.h>
-#include <system/window.h> // for NATIVE_WINDOW_QUERY_*
#include <media/stagefright/foundation/ADebug.h> // for asString(status_t)
+#include <private/android/AHardwareBufferHelpers.h>
+#include <system/window.h> // for NATIVE_WINDOW_QUERY_*
#include <deque>
#include <iterator>
@@ -799,6 +803,386 @@
return status;
}
+// Codec2ConfigurableClient::ApexImpl
+
+struct Codec2ConfigurableClient::ApexImpl : public Codec2ConfigurableClient::ImplBase {
+ ApexImpl(ApexCodec_Configurable *base, const C2String &name);
+
+ const C2String& getName() const override {
+ return mName;
+ }
+
+ c2_status_t query(
+ const std::vector<C2Param*>& stackParams,
+ const std::vector<C2Param::Index> &heapParamIndices,
+ c2_blocking_t mayBlock,
+ std::vector<std::unique_ptr<C2Param>>* const heapParams) const override;
+
+ c2_status_t config(
+ const std::vector<C2Param*> ¶ms,
+ c2_blocking_t mayBlock,
+ std::vector<std::unique_ptr<C2SettingResult>>* const failures) override;
+
+ c2_status_t querySupportedParams(
+ std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
+ ) const override;
+
+ c2_status_t querySupportedValues(
+ std::vector<C2FieldSupportedValuesQuery>& fields,
+ c2_blocking_t mayBlock) const override;
+
+private:
+ ApexCodec_Configurable* mBase;
+ const C2String mName;
+};
+
+Codec2ConfigurableClient::ApexImpl::ApexImpl(ApexCodec_Configurable *base, const C2String &name)
+ : mBase{base},
+ mName{name} {
+}
+
+c2_status_t Codec2ConfigurableClient::ApexImpl::query(
+ const std::vector<C2Param*> &stackParams,
+ const std::vector<C2Param::Index> &heapParamIndices,
+ [[maybe_unused]] c2_blocking_t mayBlock,
+ std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
+ if (mBase == nullptr) {
+ return C2_OMITTED;
+ }
+
+ if (__builtin_available(android 36, *)) {
+ std::vector<uint32_t> indices(
+ stackParams.size() + heapParamIndices.size());
+ size_t numIndices = 0;
+ for (C2Param* const& stackParam : stackParams) {
+ if (!stackParam) {
+ LOG(WARNING) << "query -- null stack param encountered.";
+ continue;
+ }
+ indices[numIndices++] = uint32_t(stackParam->index());
+ }
+ size_t numStackIndices = numIndices;
+ for (const C2Param::Index& index : heapParamIndices) {
+ indices[numIndices++] = uint32_t(index);
+ }
+ indices.resize(numIndices);
+ if (heapParams) {
+ heapParams->reserve(heapParams->size() + numIndices);
+ }
+ if (numIndices == 0) {
+ return C2_OK;
+ }
+ thread_local std::vector<uint8_t> configBuffer(1024);
+ if (configBuffer.capacity() < numIndices * 16u) {
+ configBuffer.resize(numIndices * 16u);
+ }
+ ApexCodec_LinearBuffer config{configBuffer.data(), configBuffer.capacity()};
+ size_t writtenOrRequested = 0;
+ ApexCodec_Status status = ApexCodec_Configurable_query(
+ mBase, indices.data(), indices.size(), &config, &writtenOrRequested);
+ if (status == APEXCODEC_STATUS_NO_MEMORY) {
+ size_t requested = writtenOrRequested;
+ configBuffer.resize(align(requested, 1024));
+ config.data = configBuffer.data();
+ config.size = configBuffer.capacity();
+ status = ApexCodec_Configurable_query(
+ mBase, indices.data(), indices.size(), &config, &writtenOrRequested);
+ }
+ size_t written = writtenOrRequested;
+ if (status != APEXCODEC_STATUS_OK && status != APEXCODEC_STATUS_BAD_INDEX) {
+ written = 0;
+ }
+ configBuffer.resize(written);
+ std::vector<C2Param*> paramPointers;
+ if (!::android::parseParamsBlob(¶mPointers, configBuffer)) {
+ LOG(ERROR) << "query -- error while parsing params.";
+ return C2_CORRUPTED;
+ }
+ size_t i = 0;
+ size_t numQueried = 0;
+ for (auto it = paramPointers.begin(); it != paramPointers.end(); ) {
+ C2Param* paramPointer = *it;
+ if (numStackIndices > 0) {
+ --numStackIndices;
+ if (!paramPointer) {
+ LOG(DEBUG) << "query -- null stack param.";
+ ++it;
+ continue;
+ }
+ for (; i < stackParams.size() && !stackParams[i]; ) {
+ ++i;
+ }
+ if (i >= stackParams.size()) {
+ LOG(ERROR) << "query -- unexpected error.";
+ status = APEXCODEC_STATUS_CORRUPTED;
+ break;
+ }
+ if (stackParams[i]->index() != paramPointer->index()) {
+ LOG(DEBUG) << "query -- param skipped: "
+ "index = "
+ << stackParams[i]->index() << ".";
+ stackParams[i++]->invalidate();
+ // this means that the param could not be queried.
+ // signalling C2_BAD_INDEX to the client.
+ status = APEXCODEC_STATUS_BAD_INDEX;
+ continue;
+ }
+ if (stackParams[i++]->updateFrom(*paramPointer)) {
+ ++numQueried;
+ } else {
+ LOG(WARNING) << "query -- param update failed: "
+ "index = "
+ << paramPointer->index() << ".";
+ }
+ } else {
+ if (!paramPointer) {
+ LOG(DEBUG) << "query -- null heap param.";
+ ++it;
+ continue;
+ }
+ if (!heapParams) {
+ LOG(WARNING) << "query -- "
+ "unexpected extra stack param.";
+ } else {
+ heapParams->emplace_back(C2Param::Copy(*paramPointer));
+ ++numQueried;
+ }
+ }
+ ++it;
+ }
+ if (status == APEXCODEC_STATUS_OK && indices.size() != numQueried) {
+ status = APEXCODEC_STATUS_BAD_INDEX;
+ }
+ return (c2_status_t)status;
+ } else {
+ return C2_OMITTED;
+ }
+}
+
+namespace {
+struct ParamOrField : public C2ParamField {
+ explicit ParamOrField(const ApexCodec_ParamFieldValues& field)
+ : C2ParamField(field.index, field.offset, field.size) {}
+};
+
+static bool FromApex(
+ ApexCodec_SupportedValues *apexValues,
+ C2FieldSupportedValues* c2Values) {
+ if (__builtin_available(android 36, *)) {
+ if (apexValues == nullptr) {
+ c2Values->type = C2FieldSupportedValues::EMPTY;
+ return true;
+ }
+ ApexCodec_SupportedValuesType type = APEXCODEC_SUPPORTED_VALUES_EMPTY;
+ ApexCodec_SupportedValuesNumberType numberType = APEXCODEC_SUPPORTED_VALUES_TYPE_NONE;
+ ApexCodec_Value* values = nullptr;
+ uint32_t numValues = 0;
+ ApexCodec_SupportedValues_getTypeAndValues(
+ apexValues, &type, &numberType, &values, &numValues);
+ c2Values->type = (C2FieldSupportedValues::type_t)type;
+ std::function<C2Value::Primitive(const ApexCodec_Value &)> getPrimitive;
+ switch (numberType) {
+ case APEXCODEC_SUPPORTED_VALUES_TYPE_NONE:
+ getPrimitive = [](const ApexCodec_Value &) -> C2Value::Primitive {
+ return C2Value::Primitive();
+ };
+ break;
+ case APEXCODEC_SUPPORTED_VALUES_TYPE_INT32:
+ getPrimitive = [](const ApexCodec_Value &value) -> C2Value::Primitive {
+ return C2Value::Primitive(value.i32);
+ };
+ break;
+ case APEXCODEC_SUPPORTED_VALUES_TYPE_UINT32:
+ getPrimitive = [](const ApexCodec_Value &value) -> C2Value::Primitive {
+ return C2Value::Primitive(value.u32);
+ };
+ break;
+ case APEXCODEC_SUPPORTED_VALUES_TYPE_INT64:
+ getPrimitive = [](const ApexCodec_Value &value) -> C2Value::Primitive {
+ return C2Value::Primitive(value.i64);
+ };
+ break;
+ case APEXCODEC_SUPPORTED_VALUES_TYPE_UINT64:
+ getPrimitive = [](const ApexCodec_Value &value) -> C2Value::Primitive {
+ return C2Value::Primitive(value.u64);
+ };
+ break;
+ case APEXCODEC_SUPPORTED_VALUES_TYPE_FLOAT:
+ getPrimitive = [](const ApexCodec_Value &value) -> C2Value::Primitive {
+ return C2Value::Primitive(value.f);
+ };
+ break;
+ default:
+ LOG(ERROR) << "Unsupported number type: " << numberType;
+ return false;
+ }
+ switch (type) {
+ case APEXCODEC_SUPPORTED_VALUES_EMPTY:
+ break;
+ case APEXCODEC_SUPPORTED_VALUES_RANGE:
+ c2Values->range.min = getPrimitive(values[0]);
+ c2Values->range.max = getPrimitive(values[1]);
+ c2Values->range.step = getPrimitive(values[2]);
+ c2Values->range.num = getPrimitive(values[3]);
+ c2Values->range.denom = getPrimitive(values[4]);
+ break;
+ case APEXCODEC_SUPPORTED_VALUES_VALUES:
+ case APEXCODEC_SUPPORTED_VALUES_FLAGS:
+ c2Values->values.clear();
+ for (uint32_t i = 0; i < numValues; ++i) {
+ c2Values->values.push_back(getPrimitive(values[i]));
+ }
+ break;
+ default:
+ LOG(ERROR) << "Unsupported supported values type: " << type;
+ return false;
+ }
+ return true;
+ } else {
+ return false;
+ }
+}
+
+} // anonymous namespace
+
+c2_status_t Codec2ConfigurableClient::ApexImpl::config(
+ const std::vector<C2Param*> ¶ms,
+ c2_blocking_t mayBlock,
+ std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
+ (void)mayBlock;
+ if (mBase == nullptr) {
+ return C2_OMITTED;
+ }
+
+ if (__builtin_available(android 36, *)) {
+ std::vector<uint8_t> configBuffer;
+ if (!::android::_createParamsBlob(&configBuffer, params)) {
+ LOG(ERROR) << "config -- bad input.";
+ return C2_TRANSACTION_FAILED;
+ }
+ ApexCodec_SettingResults* result = nullptr;
+ ApexCodec_LinearBuffer config{configBuffer.data(), configBuffer.size()};
+ ApexCodec_Status status = ApexCodec_Configurable_config(
+ mBase, &config, &result);
+ base::ScopeGuard guard([result] {
+ if (result) {
+ ApexCodec_SettingResults_release(result);
+ }
+ });
+ size_t index = 0;
+ ApexCodec_SettingResultFailure failure;
+ ApexCodec_ParamFieldValues field;
+ ApexCodec_ParamFieldValues* conflicts = nullptr;
+ size_t numConflicts = 0;
+ ApexCodec_Status getResultStatus = ApexCodec_SettingResults_getResultAtIndex(
+ result, 0, &failure, &field, &conflicts, &numConflicts);
+ while (getResultStatus == APEXCODEC_STATUS_OK) {
+ std::unique_ptr<C2SettingResult> settingResult;
+ settingResult.reset(new C2SettingResult{
+ C2SettingResult::Failure(failure), C2ParamFieldValues(ParamOrField(field)), {}
+ });
+ // TODO: settingResult->field.values = ?
+ for (size_t i = 0; i < numConflicts; ++i) {
+ settingResult->conflicts.emplace_back(ParamOrField(conflicts[i]));
+ C2ParamFieldValues& conflict = settingResult->conflicts.back();
+ conflict.values = std::make_unique<C2FieldSupportedValues>();
+ FromApex(conflicts[i].values, conflict.values.get());
+ }
+ failures->push_back(std::move(settingResult));
+ getResultStatus = ApexCodec_SettingResults_getResultAtIndex(
+ result, ++index, &failure, &field, &conflicts, &numConflicts);
+ }
+ if (!::android::updateParamsFromBlob(params, configBuffer)) {
+ LOG(ERROR) << "config -- "
+ << "failed to parse returned params.";
+ status = APEXCODEC_STATUS_CORRUPTED;
+ }
+ return (c2_status_t)status;
+ } else {
+ return C2_OMITTED;
+ }
+}
+
+c2_status_t Codec2ConfigurableClient::ApexImpl::querySupportedParams(
+ std::vector<std::shared_ptr<C2ParamDescriptor>>* const params) const {
+ if (mBase == nullptr) {
+ return C2_OMITTED;
+ }
+
+ if (__builtin_available(android 36, *)) {
+ // TODO: Cache and query properly!
+ ApexCodec_ParamDescriptors* paramDescs = nullptr;
+ ApexCodec_Configurable_querySupportedParams(mBase, ¶mDescs);
+ base::ScopeGuard guard([paramDescs] {
+ if (paramDescs) {
+ ApexCodec_ParamDescriptors_release(paramDescs);
+ }
+ });
+ uint32_t *indices = nullptr;
+ size_t numIndices = 0;
+ ApexCodec_Status status = ApexCodec_ParamDescriptors_getIndices(
+ paramDescs, &indices, &numIndices);
+ if (status != APEXCODEC_STATUS_OK) {
+ return (c2_status_t)status;
+ }
+ if (numIndices > 0) {
+ for (int i = 0; i < numIndices; ++i) {
+ uint32_t index = indices[i];
+ ApexCodec_ParamAttribute attr = (ApexCodec_ParamAttribute)0;
+ const char* name = nullptr;
+ uint32_t* dependencies = nullptr;
+ size_t numDependencies = 0;
+ ApexCodec_Status status = ApexCodec_ParamDescriptors_getDescriptor(
+ paramDescs, index, &attr, &name, &dependencies, &numDependencies);
+ if (status != APEXCODEC_STATUS_OK) {
+ LOG(WARNING) << "querySupportedParams -- "
+ << "failed to get descriptor for index "
+ << std::hex << index << std::dec << " with status " << status;
+ continue;
+ }
+ params->push_back(std::make_shared<C2ParamDescriptor>(
+ C2Param::Index(index), C2ParamDescriptor::attrib_t(attr), name,
+ std::vector<C2Param::Index>(dependencies, dependencies + numDependencies)));
+ }
+ }
+ return (c2_status_t)status;
+ } else {
+ return C2_OMITTED;
+ }
+}
+
+c2_status_t Codec2ConfigurableClient::ApexImpl::querySupportedValues(
+ std::vector<C2FieldSupportedValuesQuery>& fields,
+ [[maybe_unused]] c2_blocking_t mayBlock) const {
+ if (mBase == nullptr) {
+ return C2_OMITTED;
+ }
+
+ if (__builtin_available(android 36, *)) {
+ std::vector<ApexCodec_SupportedValuesQuery> queries(fields.size());
+ for (size_t i = 0; i < fields.size(); ++i) {
+ queries[i].index = _C2ParamInspector::GetIndex(fields[i].field());
+ queries[i].offset = _C2ParamInspector::GetOffset(fields[i].field());
+ queries[i].type = (ApexCodec_SupportedValuesQueryType)fields[i].type();
+ queries[i].status = APEXCODEC_STATUS_OK;
+ queries[i].values = nullptr;
+ }
+ ApexCodec_Status status = ApexCodec_Configurable_querySupportedValues(
+ mBase, queries.data(), queries.size());
+ for (size_t i = 0; i < fields.size(); ++i) {
+ fields[i].status = (c2_status_t)queries[i].status;
+ FromApex(queries[i].values, &fields[i].values);
+ if (queries[i].values) {
+ ApexCodec_SupportedValues_release(queries[i].values);
+ queries[i].values = nullptr;
+ }
+ }
+ return (c2_status_t)status;
+ } else {
+ return C2_OMITTED;
+ }
+}
+
// Codec2ConfigurableClient
Codec2ConfigurableClient::Codec2ConfigurableClient(const sp<HidlBase> &hidlBase)
@@ -810,6 +1194,11 @@
: mImpl(new Codec2ConfigurableClient::AidlImpl(aidlBase)) {
}
+Codec2ConfigurableClient::Codec2ConfigurableClient(
+ ApexCodec_Configurable *apexBase, const C2String &name)
+ : mImpl(new Codec2ConfigurableClient::ApexImpl(apexBase, name)) {
+}
+
const C2String& Codec2ConfigurableClient::getName() const {
return mImpl->getName();
}
@@ -1035,6 +1424,393 @@
};
+// Codec2Client::Component::ApexHandler
+class Codec2Client::Component::ApexHandler {
+public:
+ ApexHandler(ApexCodec_Component *apexComponent,
+ const std::shared_ptr<Listener> &listener,
+ const std::shared_ptr<Component> &comp)
+ : mApexComponent(apexComponent),
+ mListener(listener),
+ mComponent(comp),
+ mStopped(false),
+ mOutputBufferType(APEXCODEC_BUFFER_TYPE_INVALID) {
+ }
+
+ void start() {
+ std::shared_ptr<Component> comp = mComponent.lock();
+ if (!comp) {
+ LOG(ERROR) << "ApexHandler::start -- component died.";
+ return;
+ }
+ C2ComponentDomainSetting domain;
+ C2ComponentKindSetting kind;
+ c2_status_t status = comp->query({&domain, &kind}, {}, C2_MAY_BLOCK, {});
+ if (status != C2_OK) {
+ LOG(ERROR) << "ApexHandler::start -- failed to query component domain and kind";
+ return;
+ }
+ if (kind.value != C2Component::KIND_DECODER
+ && kind.value != C2Component::KIND_ENCODER) {
+ LOG(ERROR) << "ApexHandler::start -- unrecognized component kind " << kind.value;
+ return;
+ }
+ ApexCodec_BufferType outputBufferType = APEXCODEC_BUFFER_TYPE_INVALID;
+ if (domain.value == C2Component::DOMAIN_AUDIO) {
+ // For both encoders and decoders the output buffer type is linear.
+ outputBufferType = APEXCODEC_BUFFER_TYPE_LINEAR;
+ } else if (domain.value == C2Component::DOMAIN_VIDEO
+ || domain.value == C2Component::DOMAIN_IMAGE) {
+ // For video / image domain the decoder outputs a graphic buffer, and the encoder
+ // outputs a linear buffer.
+ outputBufferType = (kind.value == C2Component::KIND_DECODER)
+ ? APEXCODEC_BUFFER_TYPE_GRAPHIC : APEXCODEC_BUFFER_TYPE_LINEAR;
+ } else {
+ LOG(ERROR) << "ApexHandler::start -- unrecognized component domain " << domain.value;
+ return;
+ }
+ {
+ std::unique_lock<std::mutex> l(mMutex);
+ mStopped = false;
+ mOutputBufferType = outputBufferType;
+ }
+ mThread = std::thread([this]() {
+ run();
+ });
+ }
+
+ void queue(std::list<std::unique_ptr<C2Work>>& workItems) {
+ std::unique_lock<std::mutex> l(mMutex);
+ mWorkQueue.splice(mWorkQueue.end(), workItems);
+ mCondition.notify_all();
+ }
+
+ void stop() {
+ std::unique_lock<std::mutex> l(mMutex);
+ mStopped = true;
+ mCondition.notify_all();
+ l.unlock();
+ mThread.join();
+ }
+
+private:
+ void run() {
+ while (true) {
+ std::unique_lock<std::mutex> l(mMutex);
+ mCondition.wait(l, [this]() {
+ return !mWorkQueue.empty() || mStopped;
+ });
+ if (mStopped) {
+ break;
+ }
+ if (mWorkQueue.empty()) {
+ continue;
+ }
+ std::list<std::unique_ptr<C2Work>> workItems;
+ mWorkQueue.swap(workItems);
+ for (std::unique_ptr<C2Work>& workItem : workItems) {
+ if (mStopped) {
+ break;
+ }
+ l.unlock();
+ handleWork(std::move(workItem));
+ l.lock();
+ }
+ }
+ mWorkQueue.clear();
+ mWorkMap.clear();
+ }
+
+ void handleWork(std::unique_ptr<C2Work> &&workItem) {
+ if (__builtin_available(android 36, *)) {
+ std::shared_ptr<Listener> listener = mListener.lock();
+ if (!listener) {
+ LOG(DEBUG) << "handleWork -- listener died.";
+ return;
+ }
+ ApexCodec_Buffer input;
+ input.flags = (ApexCodec_BufferFlags)workItem->input.flags;
+ input.frameIndex = workItem->input.ordinal.frameIndex.peekll();
+ input.timestampUs = workItem->input.ordinal.timestamp.peekll();
+
+ if (workItem->input.buffers.size() > 1) {
+ LOG(ERROR) << "handleWork -- input buffer size is "
+ << workItem->input.buffers.size();
+ return;
+ }
+ std::shared_ptr<C2Buffer> buffer;
+ std::optional<C2ReadView> linearView;
+ if (!workItem->input.buffers.empty()) {
+ buffer = workItem->input.buffers[0];
+ }
+ if (!FillMemory(buffer, &input, &linearView)) {
+ LOG(ERROR) << "handleWork -- failed to map input";
+ return;
+ }
+
+ std::vector<uint8_t> configUpdatesVector;
+ if (!_createParamsBlob(&configUpdatesVector, workItem->input.configUpdate)) {
+ listener->onError(mComponent, C2_CORRUPTED);
+ return;
+ }
+ input.configUpdates.data = configUpdatesVector.data();
+ input.configUpdates.size = configUpdatesVector.size();
+ mWorkMap.insert_or_assign(
+ workItem->input.ordinal.frameIndex.peekll(), std::move(workItem));
+
+ std::list<std::unique_ptr<C2Work>> workItems;
+ bool inputDrained = false;
+ while (!inputDrained) {
+ ApexCodec_Buffer output;
+ std::shared_ptr<C2LinearBlock> linearBlock;
+ std::optional<C2WriteView> linearView;
+ std::shared_ptr<C2GraphicBlock> graphicBlock;
+ allocOutputBuffer(&output, &linearBlock, &linearView, &graphicBlock);
+ size_t consumed = 0;
+ size_t produced = 0;
+ ApexCodec_Status status = ApexCodec_Component_process(
+ mApexComponent, &input, &output, &consumed, &produced);
+ if (status == APEXCODEC_STATUS_NO_MEMORY) {
+ continue;
+ }
+ if (produced > 0) {
+ auto it = mWorkMap.find(output.frameIndex);
+ std::unique_ptr<C2Work> outputWorkItem;
+ if (it != mWorkMap.end()) {
+ if (output.flags & APEXCODEC_FLAG_INCOMPLETE) {
+ outputWorkItem = std::make_unique<C2Work>();
+ outputWorkItem->input.ordinal = it->second->input.ordinal;
+ outputWorkItem->input.flags = it->second->input.flags;
+ } else {
+ outputWorkItem = std::move(it->second);
+ mWorkMap.erase(it);
+ }
+ } else {
+ LOG(WARNING) << "handleWork -- no work item found for output frame index "
+ << output.frameIndex;
+ outputWorkItem = std::make_unique<C2Work>();
+ outputWorkItem->input.ordinal.frameIndex = output.frameIndex;
+ outputWorkItem->input.ordinal.timestamp = output.timestampUs;
+ }
+ outputWorkItem->worklets.emplace_back(new C2Worklet);
+ const std::unique_ptr<C2Worklet> &worklet = outputWorkItem->worklets.front();
+ if (worklet == nullptr) {
+ LOG(ERROR) << "handleWork -- output work item has null worklet";
+ return;
+ }
+ worklet->output.ordinal.frameIndex = output.frameIndex;
+ worklet->output.ordinal.timestamp = output.timestampUs;
+ // non-owning hidl_vec<> to wrap around the output config updates
+ hidl_vec<uint8_t> outputConfigUpdates;
+ outputConfigUpdates.setToExternal(
+ output.configUpdates.data, output.configUpdates.size);
+ std::vector<C2Param*> outputConfigUpdatePtrs;
+ parseParamsBlob(&outputConfigUpdatePtrs, outputConfigUpdates);
+ worklet->output.configUpdate.clear();
+ std::ranges::transform(
+ outputConfigUpdatePtrs,
+ std::back_inserter(worklet->output.configUpdate),
+ [](C2Param* param) { return C2Param::Copy(*param); });
+ worklet->output.flags = (C2FrameData::flags_t)output.flags;
+
+ workItems.push_back(std::move(outputWorkItem));
+ }
+
+ // determine whether the input buffer is drained
+ if (input.type == APEXCODEC_BUFFER_TYPE_LINEAR) {
+ if (input.memory.linear.size < consumed) {
+ LOG(WARNING) << "handleWork -- component consumed more bytes "
+ << "than the input buffer size";
+ inputDrained = true;
+ } else {
+ input.memory.linear.data += consumed;
+ input.memory.linear.size -= consumed;
+ }
+ } else if (input.type == APEXCODEC_BUFFER_TYPE_GRAPHIC) {
+ inputDrained = (consumed > 0);
+ }
+ }
+
+ if (!workItems.empty()) {
+ listener->onWorkDone(mComponent, workItems);
+ }
+ }
+ }
+
+ bool ensureBlockPool() {
+ std::shared_ptr<Component> comp = mComponent.lock();
+ if (!comp) {
+ return false;
+ }
+ std::vector<std::unique_ptr<C2Param>> heapParams;
+ comp->query({}, {C2PortBlockPoolsTuning::output::PARAM_TYPE}, C2_MAY_BLOCK, &heapParams);
+ if (heapParams.size() != 1) {
+ return false;
+ }
+ const C2Param* param = heapParams[0].get();
+ if (param->type() != C2PortBlockPoolsTuning::output::PARAM_TYPE) {
+ return false;
+ }
+ const C2PortBlockPoolsTuning::output *blockPools =
+ static_cast<const C2PortBlockPoolsTuning::output *>(param);
+ if (blockPools->flexCount() == 0) {
+ return false;
+ }
+ C2BlockPool::local_id_t blockPoolId = blockPools->m.values[0];
+ if (mBlockPool && mBlockPool->getLocalId() == blockPoolId) {
+ // no need to update
+ return true;
+ }
+ return C2_OK == GetCodec2BlockPool(blockPoolId, nullptr, &mBlockPool);
+ }
+
+ void allocOutputBuffer(
+ ApexCodec_Buffer* output,
+ std::shared_ptr<C2LinearBlock> *linearBlock,
+ std::optional<C2WriteView> *linearView,
+ std::shared_ptr<C2GraphicBlock> *graphicBlock) {
+ if (mOutputBufferType == APEXCODEC_BUFFER_TYPE_LINEAR) {
+ if (!ensureBlockPool()) {
+ return;
+ }
+ {
+ std::shared_ptr<Component> comp = mComponent.lock();
+ if (!comp) {
+ return;
+ }
+ C2StreamMaxBufferSizeInfo::output maxBufferSize(0u /* stream */);
+ comp->query({&maxBufferSize}, {}, C2_MAY_BLOCK, {});
+ mLinearBlockCapacity = maxBufferSize ? maxBufferSize.value : 1024 * 1024;
+ }
+ output->type = APEXCODEC_BUFFER_TYPE_LINEAR;
+ c2_status_t status = mBlockPool->fetchLinearBlock(
+ mLinearBlockCapacity,
+ C2MemoryUsage(C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE),
+ linearBlock);
+ if (!(*linearBlock)) {
+ return;
+ }
+ linearView->emplace((*linearBlock)->map().get());
+ if ((*linearView)->error() != C2_OK) {
+ return;
+ }
+ output->memory.linear.data = (*linearView)->data();
+ output->memory.linear.size = (*linearView)->capacity();
+ } else if (mOutputBufferType == APEXCODEC_BUFFER_TYPE_GRAPHIC) {
+ if (!ensureBlockPool()) {
+ return;
+ }
+ {
+ std::shared_ptr<Component> comp = mComponent.lock();
+ if (!comp) {
+ return;
+ }
+ C2StreamMaxPictureSizeTuning::output maxPictureSize(0u /* stream */);
+ C2StreamPictureSizeInfo::output pictureSize(0u /* stream */);
+ C2StreamPixelFormatInfo::output pixelFormat(0u /* stream */);
+ comp->query({&maxPictureSize, &pictureSize, &pixelFormat}, {}, C2_MAY_BLOCK, {});
+ mWidth = maxPictureSize ? maxPictureSize.width : pictureSize.width;
+ mHeight = maxPictureSize ? maxPictureSize.height : pictureSize.height;
+ mFormat = pixelFormat ? pixelFormat.value : HAL_PIXEL_FORMAT_YCBCR_420_888;
+ }
+ output->type = APEXCODEC_BUFFER_TYPE_GRAPHIC;
+ c2_status_t status = mBlockPool->fetchGraphicBlock(
+ mWidth, mHeight, mFormat,
+ C2MemoryUsage(C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE),
+ graphicBlock);
+ if (!(*graphicBlock)) {
+ return;
+ }
+ const C2Handle *handle = (*graphicBlock)->handle();
+ uint32_t width, height, format, stride, igbp_slot, generation;
+ uint64_t usage, igbp_id;
+ _UnwrapNativeCodec2GrallocMetadata(
+ handle, &width, &height, &format, &usage, &stride, &generation,
+ &igbp_id, &igbp_slot);
+ native_handle_t *grallocHandle = UnwrapNativeCodec2GrallocHandle(handle);
+ sp<GraphicBuffer> graphicBuffer = new GraphicBuffer(
+ grallocHandle, GraphicBuffer::CLONE_HANDLE,
+ width, height, format, 1, usage, stride);
+ native_handle_delete(grallocHandle);
+ AHardwareBuffer *hardwareBuffer =
+ AHardwareBuffer_from_GraphicBuffer(graphicBuffer.get());
+ AHardwareBuffer_acquire(hardwareBuffer);
+ output->memory.graphic = hardwareBuffer;
+ } else {
+ LOG(ERROR) << "allocOutputBuffer -- unsupported output buffer type: "
+ << mOutputBufferType;
+ return;
+ }
+ }
+
+ static bool FillMemory(
+ const std::shared_ptr<C2Buffer>& buffer,
+ ApexCodec_Buffer* apexBuffer,
+ std::optional<C2ReadView>* linearView) {
+ if (buffer->data().type() == C2BufferData::LINEAR) {
+ apexBuffer->type = APEXCODEC_BUFFER_TYPE_LINEAR;
+ if (buffer->data().linearBlocks().empty()) {
+ apexBuffer->memory.linear.data = nullptr;
+ apexBuffer->memory.linear.size = 0;
+ return true;
+ } else if (buffer->data().linearBlocks().size() > 1) {
+ return false;
+ }
+ linearView->emplace(buffer->data().linearBlocks().front().map().get());
+ if ((*linearView)->error() != C2_OK) {
+ return false;
+ }
+ apexBuffer->memory.linear.data = const_cast<uint8_t*>((*linearView)->data());
+ apexBuffer->memory.linear.size = (*linearView)->capacity();
+ return true;
+ } else if (buffer->data().type() == C2BufferData::GRAPHIC) {
+ apexBuffer->type = APEXCODEC_BUFFER_TYPE_GRAPHIC;
+ if (buffer->data().graphicBlocks().empty()) {
+ apexBuffer->memory.graphic = nullptr;
+ return true;
+ } else if (buffer->data().graphicBlocks().size() > 1) {
+ return false;
+ }
+ const C2Handle *handle = buffer->data().graphicBlocks().front().handle();
+ uint32_t width, height, format, stride, igbp_slot, generation;
+ uint64_t usage, igbp_id;
+ _UnwrapNativeCodec2GrallocMetadata(
+ handle, &width, &height, &format, &usage, &stride, &generation,
+ &igbp_id, &igbp_slot);
+ native_handle_t *grallocHandle = UnwrapNativeCodec2GrallocHandle(handle);
+ sp<GraphicBuffer> graphicBuffer = new GraphicBuffer(
+ grallocHandle, GraphicBuffer::CLONE_HANDLE,
+ width, height, format, 1, usage, stride);
+ native_handle_delete(grallocHandle);
+ AHardwareBuffer *hardwareBuffer =
+ AHardwareBuffer_from_GraphicBuffer(graphicBuffer.get());
+ AHardwareBuffer_acquire(hardwareBuffer);
+ apexBuffer->memory.graphic = hardwareBuffer;
+ return true;
+ }
+ return false;
+ }
+
+ ApexCodec_Component *mApexComponent;
+ std::weak_ptr<Listener> mListener;
+ std::weak_ptr<Component> mComponent;
+
+ std::thread mThread;
+ std::mutex mMutex;
+ std::condition_variable mCondition;
+ bool mStopped;
+ ApexCodec_BufferType mOutputBufferType;
+
+ size_t mLinearBlockCapacity;
+ uint32_t mWidth;
+ uint32_t mHeight;
+ uint32_t mFormat;
+
+ std::shared_ptr<C2BlockPool> mBlockPool;
+ std::list<std::unique_ptr<C2Work>> mWorkQueue;
+ std::map<uint64_t, std::unique_ptr<C2Work>> mWorkMap;
+};
+
// Codec2Client::Component::HidlBufferPoolSender
struct Codec2Client::Component::HidlBufferPoolSender :
hardware::media::c2::V1_1::utils::DefaultBufferPoolSender {
@@ -1168,6 +1944,13 @@
}
}
+Codec2Client::Codec2Client(ApexCodec_ComponentStore *base,
+ size_t serviceIndex)
+ : Configurable{nullptr, "android.componentStore.apexCodecs"},
+ mApexBase{base},
+ mServiceIndex{serviceIndex} {
+}
+
sp<Codec2Client::HidlBase> const& Codec2Client::getHidlBase() const {
return mHidlBase1_0;
}
@@ -1196,36 +1979,71 @@
const C2String& name,
const std::shared_ptr<Codec2Client::Listener>& listener,
std::shared_ptr<Codec2Client::Component>* const component) {
- if (mAidlBase) {
- std::shared_ptr<Component::AidlListener> aidlListener =
- Component::AidlListener::make<Component::AidlListener>();
- aidlListener->base = listener;
- std::shared_ptr<c2_aidl::IComponent> aidlComponent;
- ::ndk::ScopedAStatus transStatus = mAidlBase->createComponent(
- name,
- aidlListener,
- bufferpool2_aidl::implementation::ClientManager::getInstance(),
- &aidlComponent);
- c2_status_t status = GetC2Status(transStatus, "createComponent");
- if (status != C2_OK) {
- return status;
- } else if (!aidlComponent) {
- LOG(ERROR) << "createComponent(" << name.c_str()
- << ") -- null component.";
- return C2_CORRUPTED;
- }
- *component = std::make_shared<Codec2Client::Component>(aidlComponent);
- status = (*component)->setDeathListener((*component), listener);
- if (status != C2_OK) {
- LOG(ERROR) << "createComponent(" << name.c_str()
- << ") -- failed to set up death listener: "
- << status << ".";
- }
- (*component)->mAidlBufferPoolSender->setReceiver(mAidlHostPoolManager);
- aidlListener->component = *component;
- return status;
+ if (mApexBase) {
+ return createComponent_apex(name, listener, component);
+ } else if (mAidlBase) {
+ return createComponent_aidl(name, listener, component);
+ } else {
+ return createComponent_hidl(name, listener, component);
}
+}
+c2_status_t Codec2Client::createComponent_apex(
+ const C2String& name,
+ const std::shared_ptr<Codec2Client::Listener>& listener,
+ std::shared_ptr<Codec2Client::Component>* const component) {
+ if (__builtin_available(android 36, *)) {
+ ApexCodec_Component *apexComponent = nullptr;
+ ApexCodec_Status status = ApexCodec_Component_create(
+ mApexBase, name.c_str(), &apexComponent);
+ if (status != APEXCODEC_STATUS_OK) {
+ return (c2_status_t)status;
+ }
+ *component = std::make_shared<Codec2Client::Component>(apexComponent, name);
+ (*component)->initApexHandler(listener, *component);
+ return C2_OK;
+ } else {
+ return C2_OMITTED;
+ }
+}
+
+c2_status_t Codec2Client::createComponent_aidl(
+ const C2String& name,
+ const std::shared_ptr<Codec2Client::Listener>& listener,
+ std::shared_ptr<Codec2Client::Component>* const component) {
+ std::shared_ptr<Component::AidlListener> aidlListener =
+ Component::AidlListener::make<Component::AidlListener>();
+ aidlListener->base = listener;
+ std::shared_ptr<c2_aidl::IComponent> aidlComponent;
+ ::ndk::ScopedAStatus transStatus = mAidlBase->createComponent(
+ name,
+ aidlListener,
+ bufferpool2_aidl::implementation::ClientManager::getInstance(),
+ &aidlComponent);
+ c2_status_t status = GetC2Status(transStatus, "createComponent");
+ if (status != C2_OK) {
+ return status;
+ } else if (!aidlComponent) {
+ LOG(ERROR) << "createComponent(" << name.c_str()
+ << ") -- null component.";
+ return C2_CORRUPTED;
+ }
+ *component = std::make_shared<Codec2Client::Component>(aidlComponent);
+ status = (*component)->setDeathListener((*component), listener);
+ if (status != C2_OK) {
+ LOG(ERROR) << "createComponent(" << name.c_str()
+ << ") -- failed to set up death listener: "
+ << status << ".";
+ }
+ (*component)->mAidlBufferPoolSender->setReceiver(mAidlHostPoolManager);
+ aidlListener->component = *component;
+ return status;
+}
+
+c2_status_t Codec2Client::createComponent_hidl(
+ const C2String& name,
+ const std::shared_ptr<Codec2Client::Listener>& listener,
+ std::shared_ptr<Codec2Client::Component>* const component) {
c2_status_t status;
sp<Component::HidlListener> hidlListener = new Component::HidlListener{};
hidlListener->base = listener;
@@ -1593,6 +2411,13 @@
return a < b;
});
+ if (__builtin_available(android 36, *)) {
+ if (android::media::codec::provider_->in_process_sw_audio_codec_support()
+ && nullptr != ApexCodec_GetComponentStore()) {
+ names.push_back("__ApexCodecs__");
+ }
+ }
+
// Summarize to logcat.
if (names.empty()) {
LOG(INFO) << "No Codec2 services declared in the manifest.";
@@ -1649,7 +2474,13 @@
std::string const& name = GetServiceNames()[index];
LOG(VERBOSE) << "Creating a Codec2 client to service \"" << name << "\"";
- if (c2_aidl::utils::IsSelected()) {
+ if (name == "__ApexCodecs__") {
+ if (__builtin_available(android 36, *)) {
+ return std::make_shared<Codec2Client>(ApexCodec_GetComponentStore(), index);
+ } else {
+ LOG(FATAL) << "ApexCodecs not supported on Android version older than 36";
+ }
+ } else if (c2_aidl::utils::IsSelected()) {
if (__builtin_available(android __ANDROID_API_S__, *)) {
std::string instanceName =
::android::base::StringPrintf("%s/%s", AidlBase::descriptor, name.c_str());
@@ -2054,16 +2885,41 @@
mGraphicBufferAllocators{std::make_unique<GraphicBufferAllocators>()} {
}
+Codec2Client::Component::Component(ApexCodec_Component *base, const C2String &name)
+ : Configurable{[base]() -> ApexCodec_Configurable * {
+ if (__builtin_available(android 36, *)) {
+ return ApexCodec_Component_getConfigurable(base);
+ } else {
+ return nullptr;
+ }
+ }(), name},
+ mApexBase{base} {
+}
+
Codec2Client::Component::~Component() {
if (mAidlDeathSeq) {
GetAidlDeathManager()->unlinkToDeath(*mAidlDeathSeq, mAidlBase);
}
+ if (mApexBase) {
+ if (__builtin_available(android 36, *)) {
+ ApexCodec_Component_destroy(mApexBase);
+ }
+ mApexBase = nullptr;
+ }
}
c2_status_t Codec2Client::Component::createBlockPool(
C2Allocator::id_t id,
C2BlockPool::local_id_t* blockPoolId,
std::shared_ptr<Codec2Client::Configurable>* configurable) {
+ if (mApexBase) {
+ std::shared_ptr<C2BlockPool> blockPool;
+ CreateCodec2BlockPool(id, nullptr, &blockPool);
+ *blockPoolId = blockPool->getLocalId();
+ *configurable = nullptr;
+ mBlockPools[*blockPoolId] = blockPool;
+ return C2_OK;
+ }
if (mAidlBase) {
c2_aidl::IComponent::BlockPool aidlBlockPool;
c2_status_t status = C2_OK;
@@ -2134,6 +2990,10 @@
c2_status_t Codec2Client::Component::destroyBlockPool(
C2BlockPool::local_id_t localId) {
+ if (mApexBase) {
+ mBlockPools.erase(localId);
+ return C2_OK;
+ }
if (mAidlBase) {
mGraphicBufferAllocators->remove(localId);
::ndk::ScopedAStatus transStatus = mAidlBase->destroyBlockPool(localId);
@@ -2150,7 +3010,10 @@
void Codec2Client::Component::handleOnWorkDone(
const std::list<std::unique_ptr<C2Work>> &workItems) {
- if (mAidlBase) {
+ if (mApexBase) {
+ // no-op
+ return;
+ } else if (mAidlBase) {
holdIgbaBlocks(workItems);
} else {
// Output bufferqueue-based blocks' lifetime management
@@ -2160,6 +3023,10 @@
c2_status_t Codec2Client::Component::queue(
std::list<std::unique_ptr<C2Work>>* const items) {
+ if (mApexBase) {
+ mApexHandler->queue(*items);
+ return C2_OK;
+ }
if (mAidlBase) {
c2_aidl::WorkBundle workBundle;
if (!c2_aidl::utils::ToAidl(&workBundle, *items, mAidlBufferPoolSender.get())) {
@@ -2191,6 +3058,13 @@
C2Component::flush_mode_t mode,
std::list<std::unique_ptr<C2Work>>* const flushedWork) {
(void)mode; // Flush mode isn't supported in HIDL/AIDL yet.
+ if (mApexBase) {
+ if (__builtin_available(android 36, *)) {
+ return (c2_status_t)ApexCodec_Component_flush(mApexBase);
+ } else {
+ return C2_OMITTED;
+ }
+ }
c2_status_t status = C2_OK;
if (mAidlBase) {
c2_aidl::WorkBundle workBundle;
@@ -2250,6 +3124,9 @@
}
c2_status_t Codec2Client::Component::drain(C2Component::drain_mode_t mode) {
+ if (mApexBase) {
+ return C2_OMITTED;
+ }
if (mAidlBase) {
::ndk::ScopedAStatus transStatus = mAidlBase->drain(
mode == C2Component::DRAIN_COMPONENT_WITH_EOS);
@@ -2270,6 +3147,10 @@
}
c2_status_t Codec2Client::Component::start() {
+ if (mApexBase) {
+ // no-op
+ return C2_OK;
+ }
if (mAidlBase) {
::ndk::ScopedAStatus transStatus = mAidlBase->start();
return GetC2Status(transStatus, "start");
@@ -2306,6 +3187,13 @@
}
c2_status_t Codec2Client::Component::reset() {
+ if (mApexBase) {
+ if (__builtin_available(android 36, *)) {
+ return (c2_status_t)ApexCodec_Component_reset(mApexBase);
+ } else {
+ return C2_OMITTED;
+ }
+ }
if (mAidlBase) {
::ndk::ScopedAStatus transStatus = mAidlBase->reset();
return GetC2Status(transStatus, "reset");
@@ -2324,6 +3212,13 @@
}
c2_status_t Codec2Client::Component::release() {
+ if (mApexBase) {
+ if (__builtin_available(android 36, *)) {
+ return (c2_status_t)ApexCodec_Component_reset(mApexBase);
+ } else {
+ return C2_OMITTED;
+ }
+ }
if (mAidlBase) {
::ndk::ScopedAStatus transStatus = mAidlBase->release();
return GetC2Status(transStatus, "release");
@@ -2345,6 +3240,10 @@
uint32_t avSyncHwId,
native_handle_t** sidebandHandle) {
*sidebandHandle = nullptr;
+ if (mApexBase) {
+ // tunneling is not supported in APEX
+ return C2_OMITTED;
+ }
if (mAidlBase) {
::aidl::android::hardware::common::NativeHandle handle;
::ndk::ScopedAStatus transStatus = mAidlBase->configureVideoTunnel(avSyncHwId, &handle);
@@ -2616,6 +3515,10 @@
c2_status_t Codec2Client::Component::connectToInputSurface(
const std::shared_ptr<InputSurface>& inputSurface,
std::shared_ptr<InputSurfaceConnection>* connection) {
+ if (mApexBase) {
+ // FIXME
+ return C2_OMITTED;
+ }
if (mAidlBase) {
// FIXME
return C2_OMITTED;
@@ -2644,6 +3547,10 @@
const sp<HGraphicBufferProducer1>& producer,
const sp<HGraphicBufferSource>& source,
std::shared_ptr<InputSurfaceConnection>* connection) {
+ if (mApexBase) {
+ LOG(WARNING) << "Connecting to OMX input surface is not supported for AIDL C2 HAL";
+ return C2_OMITTED;
+ }
if (mAidlBase) {
LOG(WARNING) << "Connecting to OMX input surface is not supported for AIDL C2 HAL";
return C2_OMITTED;
@@ -2669,6 +3576,10 @@
}
c2_status_t Codec2Client::Component::disconnectFromInputSurface() {
+ if (mApexBase) {
+ // FIXME
+ return C2_OMITTED;
+ }
if (mAidlBase) {
// FIXME
return C2_OMITTED;
@@ -2693,6 +3604,16 @@
return sManager;
}
+c2_status_t Codec2Client::Component::initApexHandler(
+ const std::shared_ptr<Listener> &listener,
+ const std::shared_ptr<Component> &comp) {
+ if (!mApexBase) {
+ return C2_BAD_STATE;
+ }
+ mApexHandler = std::make_unique<ApexHandler>(mApexBase, listener, comp);
+ return C2_OK;
+}
+
c2_status_t Codec2Client::Component::setDeathListener(
const std::shared_ptr<Component>& component,
const std::shared_ptr<Listener>& listener) {
diff --git a/media/codec2/hal/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
index 7923f04..35c87e0 100644
--- a/media/codec2/hal/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -112,6 +112,10 @@
struct IGraphicBufferSource;
} // namespace android::hardware::media::omx::V1_0
+struct ApexCodec_ComponentStore;
+struct ApexCodec_Component;
+struct ApexCodec_Configurable;
+
namespace android {
// This class is supposed to be called Codec2Client::Configurable, but forward
@@ -148,6 +152,7 @@
explicit Codec2ConfigurableClient(const sp<HidlBase> &hidlBase);
explicit Codec2ConfigurableClient(const std::shared_ptr<AidlBase> &aidlBase);
+ Codec2ConfigurableClient(ApexCodec_Configurable *base, const C2String &name);
const C2String& getName() const;
@@ -172,6 +177,7 @@
private:
struct HidlImpl;
struct AidlImpl;
+ struct ApexImpl;
const std::unique_ptr<ImplBase> mImpl;
};
@@ -282,12 +288,16 @@
std::shared_ptr<AidlBase> const& base,
std::shared_ptr<Codec2ConfigurableClient::AidlBase> const& configurable,
size_t serviceIndex);
+ Codec2Client(
+ ApexCodec_ComponentStore* base,
+ size_t serviceIndex);
protected:
sp<HidlBase1_0> mHidlBase1_0;
sp<HidlBase1_1> mHidlBase1_1;
sp<HidlBase1_2> mHidlBase1_2;
std::shared_ptr<AidlBase> mAidlBase;
+ ApexCodec_ComponentStore* mApexBase{nullptr};
// Finds the first store where the predicate returns C2_OK and returns the
// last predicate result. The predicate will be tried on all stores. The
@@ -325,6 +335,20 @@
std::vector<C2Component::Traits> _listComponents(bool* success) const;
class Cache;
+
+private:
+ c2_status_t createComponent_aidl(
+ C2String const& name,
+ std::shared_ptr<Listener> const& listener,
+ std::shared_ptr<Component>* const component);
+ c2_status_t createComponent_hidl(
+ C2String const& name,
+ std::shared_ptr<Listener> const& listener,
+ std::shared_ptr<Component>* const component);
+ c2_status_t createComponent_apex(
+ C2String const& name,
+ std::shared_ptr<Listener> const& listener,
+ std::shared_ptr<Component>* const component);
};
struct Codec2Client::Interface : public Codec2Client::Configurable {
@@ -508,11 +532,16 @@
c2_status_t disconnectFromInputSurface();
+ c2_status_t initApexHandler(
+ const std::shared_ptr<Listener> &listener,
+ const std::shared_ptr<Component> &comp);
+
// base cannot be null.
Component(const sp<HidlBase>& base);
Component(const sp<HidlBase1_1>& base);
Component(const sp<HidlBase1_2>& base);
Component(const std::shared_ptr<AidlBase>& base);
+ Component(ApexCodec_Component* base, const C2String& name);
~Component();
@@ -521,12 +550,16 @@
sp<HidlBase1_1> mHidlBase1_1;
sp<HidlBase1_2> mHidlBase1_2;
std::shared_ptr<AidlBase> mAidlBase;
+ ApexCodec_Component *mApexBase{nullptr};
struct HidlBufferPoolSender;
struct AidlBufferPoolSender;
std::unique_ptr<HidlBufferPoolSender> mHidlBufferPoolSender;
std::unique_ptr<AidlBufferPoolSender> mAidlBufferPoolSender;
+ class ApexHandler;
+ std::unique_ptr<ApexHandler> mApexHandler;
+
struct OutputBufferQueue;
std::unique_ptr<OutputBufferQueue> mOutputBufferQueue;
@@ -547,6 +580,11 @@
const std::shared_ptr<Listener>& listener);
sp<::android::hardware::hidl_death_recipient> mDeathRecipient;
+ // This is a map of block pools created for APEX components in the client.
+ // Note that the APEX codec API requires output buffers to be passed from the client,
+ // so the client creates and keeps track of the block pools here.
+ std::map<C2BlockPool::local_id_t, std::shared_ptr<C2BlockPool>> mBlockPools;
+
friend struct Codec2Client;
struct HidlListener;
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index fd242a1..0f5cdd6 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -16,6 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "Codec2InfoBuilder"
+
+#include <cstdlib>
+
#include <log/log.h>
#include <strings.h>
@@ -508,6 +511,10 @@
&& !hasPrefix(v.first, "domain-")
&& !hasPrefix(v.first, "variant-")) {
writer->addGlobalSetting(v.first.c_str(), v.second.c_str());
+ if (v.first == "max-concurrent-instances") {
+ MediaCodecInfoWriter::SetMaxSupportedInstances(
+ (int32_t)strtol(v.second.c_str(), NULL, 10));
+ }
}
}
@@ -797,6 +804,7 @@
}
}
}
+ codecInfo->createCodecCaps();
}
}
return OK;
diff --git a/media/janitors/better_together_OWNERS b/media/janitors/better_together_OWNERS
new file mode 100644
index 0000000..70723cb
--- /dev/null
+++ b/media/janitors/better_together_OWNERS
@@ -0,0 +1,5 @@
+# Bug component: 137631
+
+aquilescanta@google.com
+asapperstein@google.com
+halliwell@google.com
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 61204ae..6dfb327 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -359,6 +359,8 @@
"aidl/android/media/AudioMixerBehavior.aidl",
"aidl/android/media/AudioOffloadMode.aidl",
"aidl/android/media/AudioPolicyDeviceState.aidl",
+ "aidl/android/media/AudioPolicyForceUse.aidl",
+ "aidl/android/media/AudioPolicyForcedConfig.aidl",
"aidl/android/media/AudioProductStrategy.aidl",
"aidl/android/media/AudioVolumeGroup.aidl",
"aidl/android/media/DeviceRole.aidl",
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 85f4a86..3f4fcfd 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -60,8 +60,6 @@
using media::audio::common::AudioMMapPolicyInfo;
using media::audio::common::AudioMMapPolicyType;
using media::audio::common::AudioOffloadInfo;
-using media::audio::common::AudioPolicyForceUse;
-using media::audio::common::AudioPolicyForcedConfig;
using media::audio::common::AudioSource;
using media::audio::common::AudioStreamType;
using media::audio::common::AudioUsage;
@@ -1061,9 +1059,9 @@
if (aps == 0) return AUDIO_POLICY_FORCE_NONE;
auto result = [&]() -> ConversionResult<audio_policy_forced_cfg_t> {
- AudioPolicyForceUse usageAidl = VALUE_OR_RETURN(
+ media::AudioPolicyForceUse usageAidl = VALUE_OR_RETURN(
legacy2aidl_audio_policy_force_use_t_AudioPolicyForceUse(usage));
- AudioPolicyForcedConfig configAidl;
+ media::AudioPolicyForcedConfig configAidl;
RETURN_IF_ERROR(statusTFromBinderStatus(
aps->getForceUse(usageAidl, &configAidl)));
return aidl2legacy_AudioPolicyForcedConfig_audio_policy_forced_cfg_t(configAidl);
diff --git a/media/libaudioclient/PolicyAidlConversion.cpp b/media/libaudioclient/PolicyAidlConversion.cpp
index a414cb7..163a359 100644
--- a/media/libaudioclient/PolicyAidlConversion.cpp
+++ b/media/libaudioclient/PolicyAidlConversion.cpp
@@ -296,6 +296,138 @@
return unexpected(BAD_VALUE);
}
+ConversionResult<audio_policy_force_use_t>
+aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(media::AudioPolicyForceUse aidl) {
+ switch (aidl) {
+ case media::AudioPolicyForceUse::COMMUNICATION:
+ return AUDIO_POLICY_FORCE_FOR_COMMUNICATION;
+ case media::AudioPolicyForceUse::MEDIA:
+ return AUDIO_POLICY_FORCE_FOR_MEDIA;
+ case media::AudioPolicyForceUse::RECORD:
+ return AUDIO_POLICY_FORCE_FOR_RECORD;
+ case media::AudioPolicyForceUse::DOCK:
+ return AUDIO_POLICY_FORCE_FOR_DOCK;
+ case media::AudioPolicyForceUse::SYSTEM:
+ return AUDIO_POLICY_FORCE_FOR_SYSTEM;
+ case media::AudioPolicyForceUse::HDMI_SYSTEM_AUDIO:
+ return AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO;
+ case media::AudioPolicyForceUse::ENCODED_SURROUND:
+ return AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND;
+ case media::AudioPolicyForceUse::VIBRATE_RINGING:
+ return AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioPolicyForceUse>
+legacy2aidl_audio_policy_force_use_t_AudioPolicyForceUse(audio_policy_force_use_t legacy) {
+ switch (legacy) {
+ case AUDIO_POLICY_FORCE_FOR_COMMUNICATION:
+ return media::AudioPolicyForceUse::COMMUNICATION;
+ case AUDIO_POLICY_FORCE_FOR_MEDIA:
+ return media::AudioPolicyForceUse::MEDIA;
+ case AUDIO_POLICY_FORCE_FOR_RECORD:
+ return media::AudioPolicyForceUse::RECORD;
+ case AUDIO_POLICY_FORCE_FOR_DOCK:
+ return media::AudioPolicyForceUse::DOCK;
+ case AUDIO_POLICY_FORCE_FOR_SYSTEM:
+ return media::AudioPolicyForceUse::SYSTEM;
+ case AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO:
+ return media::AudioPolicyForceUse::HDMI_SYSTEM_AUDIO;
+ case AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND:
+ return media::AudioPolicyForceUse::ENCODED_SURROUND;
+ case AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING:
+ return media::AudioPolicyForceUse::VIBRATE_RINGING;
+ case AUDIO_POLICY_FORCE_USE_CNT:
+ break;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_policy_forced_cfg_t>
+aidl2legacy_AudioPolicyForcedConfig_audio_policy_forced_cfg_t(media::AudioPolicyForcedConfig aidl) {
+ switch (aidl) {
+ case media::AudioPolicyForcedConfig::NONE:
+ return AUDIO_POLICY_FORCE_NONE;
+ case media::AudioPolicyForcedConfig::SPEAKER:
+ return AUDIO_POLICY_FORCE_SPEAKER;
+ case media::AudioPolicyForcedConfig::HEADPHONES:
+ return AUDIO_POLICY_FORCE_HEADPHONES;
+ case media::AudioPolicyForcedConfig::BT_SCO:
+ return AUDIO_POLICY_FORCE_BT_SCO;
+ case media::AudioPolicyForcedConfig::BT_A2DP:
+ return AUDIO_POLICY_FORCE_BT_A2DP;
+ case media::AudioPolicyForcedConfig::WIRED_ACCESSORY:
+ return AUDIO_POLICY_FORCE_WIRED_ACCESSORY;
+ case media::AudioPolicyForcedConfig::BT_CAR_DOCK:
+ return AUDIO_POLICY_FORCE_BT_CAR_DOCK;
+ case media::AudioPolicyForcedConfig::BT_DESK_DOCK:
+ return AUDIO_POLICY_FORCE_BT_DESK_DOCK;
+ case media::AudioPolicyForcedConfig::ANALOG_DOCK:
+ return AUDIO_POLICY_FORCE_ANALOG_DOCK;
+ case media::AudioPolicyForcedConfig::DIGITAL_DOCK:
+ return AUDIO_POLICY_FORCE_DIGITAL_DOCK;
+ case media::AudioPolicyForcedConfig::NO_BT_A2DP:
+ return AUDIO_POLICY_FORCE_NO_BT_A2DP;
+ case media::AudioPolicyForcedConfig::SYSTEM_ENFORCED:
+ return AUDIO_POLICY_FORCE_SYSTEM_ENFORCED;
+ case media::AudioPolicyForcedConfig::HDMI_SYSTEM_AUDIO_ENFORCED:
+ return AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED;
+ case media::AudioPolicyForcedConfig::ENCODED_SURROUND_NEVER:
+ return AUDIO_POLICY_FORCE_ENCODED_SURROUND_NEVER;
+ case media::AudioPolicyForcedConfig::ENCODED_SURROUND_ALWAYS:
+ return AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS;
+ case media::AudioPolicyForcedConfig::ENCODED_SURROUND_MANUAL:
+ return AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL;
+ case media::AudioPolicyForcedConfig::BT_BLE:
+ return AUDIO_POLICY_FORCE_BT_BLE;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioPolicyForcedConfig>
+legacy2aidl_audio_policy_forced_cfg_t_AudioPolicyForcedConfig(audio_policy_forced_cfg_t legacy) {
+ switch (legacy) {
+ case AUDIO_POLICY_FORCE_NONE:
+ return media::AudioPolicyForcedConfig::NONE;
+ case AUDIO_POLICY_FORCE_SPEAKER:
+ return media::AudioPolicyForcedConfig::SPEAKER;
+ case AUDIO_POLICY_FORCE_HEADPHONES:
+ return media::AudioPolicyForcedConfig::HEADPHONES;
+ case AUDIO_POLICY_FORCE_BT_SCO:
+ return media::AudioPolicyForcedConfig::BT_SCO;
+ case AUDIO_POLICY_FORCE_BT_A2DP:
+ return media::AudioPolicyForcedConfig::BT_A2DP;
+ case AUDIO_POLICY_FORCE_WIRED_ACCESSORY:
+ return media::AudioPolicyForcedConfig::WIRED_ACCESSORY;
+ case AUDIO_POLICY_FORCE_BT_CAR_DOCK:
+ return media::AudioPolicyForcedConfig::BT_CAR_DOCK;
+ case AUDIO_POLICY_FORCE_BT_DESK_DOCK:
+ return media::AudioPolicyForcedConfig::BT_DESK_DOCK;
+ case AUDIO_POLICY_FORCE_ANALOG_DOCK:
+ return media::AudioPolicyForcedConfig::ANALOG_DOCK;
+ case AUDIO_POLICY_FORCE_DIGITAL_DOCK:
+ return media::AudioPolicyForcedConfig::DIGITAL_DOCK;
+ case AUDIO_POLICY_FORCE_NO_BT_A2DP:
+ return media::AudioPolicyForcedConfig::NO_BT_A2DP;
+ case AUDIO_POLICY_FORCE_SYSTEM_ENFORCED:
+ return media::AudioPolicyForcedConfig::SYSTEM_ENFORCED;
+ case AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED:
+ return media::AudioPolicyForcedConfig::HDMI_SYSTEM_AUDIO_ENFORCED;
+ case AUDIO_POLICY_FORCE_ENCODED_SURROUND_NEVER:
+ return media::AudioPolicyForcedConfig::ENCODED_SURROUND_NEVER;
+ case AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS:
+ return media::AudioPolicyForcedConfig::ENCODED_SURROUND_ALWAYS;
+ case AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL:
+ return media::AudioPolicyForcedConfig::ENCODED_SURROUND_MANUAL;
+ case AUDIO_POLICY_FORCE_BT_BLE:
+ return media::AudioPolicyForcedConfig::BT_BLE;
+ case AUDIO_POLICY_FORCE_CFG_CNT:
+ break;
+ }
+ return unexpected(BAD_VALUE);
+}
+
ConversionResult<device_role_t>
aidl2legacy_DeviceRole_device_role_t(media::DeviceRole aidl) {
switch (aidl) {
diff --git a/media/libaudioclient/aidl/android/media/AudioPolicyForceUse.aidl b/media/libaudioclient/aidl/android/media/AudioPolicyForceUse.aidl
new file mode 100644
index 0000000..9bb0605
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPolicyForceUse.aidl
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioPolicyForceUse {
+ COMMUNICATION = 0,
+ MEDIA = 1,
+ RECORD = 2,
+ DOCK = 3,
+ SYSTEM = 4,
+ HDMI_SYSTEM_AUDIO = 5,
+ ENCODED_SURROUND = 6,
+ VIBRATE_RINGING = 7,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPolicyForcedConfig.aidl b/media/libaudioclient/aidl/android/media/AudioPolicyForcedConfig.aidl
new file mode 100644
index 0000000..111bb2f
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPolicyForcedConfig.aidl
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioPolicyForcedConfig {
+ NONE = 0,
+ SPEAKER = 1,
+ HEADPHONES = 2,
+ BT_SCO = 3,
+ BT_A2DP = 4,
+ WIRED_ACCESSORY = 5,
+ BT_CAR_DOCK = 6,
+ BT_DESK_DOCK = 7,
+ ANALOG_DOCK = 8,
+ DIGITAL_DOCK = 9,
+ NO_BT_A2DP = 10, /* A2DP sink is not preferred to speaker or wired HS */
+ SYSTEM_ENFORCED = 11,
+ HDMI_SYSTEM_AUDIO_ENFORCED = 12,
+ ENCODED_SURROUND_NEVER = 13,
+ ENCODED_SURROUND_ALWAYS = 14,
+ ENCODED_SURROUND_MANUAL = 15,
+ BT_BLE = 16,
+}
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 7f5e8e2..ac42ea9 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -25,6 +25,8 @@
import android.media.AudioOffloadMode;
import android.media.AudioPatchFw;
import android.media.AudioPolicyDeviceState;
+import android.media.AudioPolicyForcedConfig;
+import android.media.AudioPolicyForceUse;
import android.media.AudioPortFw;
import android.media.AudioPortConfigFw;
import android.media.AudioPortRole;
@@ -47,8 +49,6 @@
import android.media.audio.common.AudioDeviceDescription;
import android.media.audio.common.AudioFormatDescription;
import android.media.audio.common.AudioMode;
-import android.media.audio.common.AudioPolicyForcedConfig;
-import android.media.audio.common.AudioPolicyForceUse;
import android.media.audio.common.AudioProfile;
import android.media.audio.common.AudioOffloadInfo;
import android.media.audio.common.AudioPort;
diff --git a/media/libaudioclient/include/media/PolicyAidlConversion.h b/media/libaudioclient/include/media/PolicyAidlConversion.h
index 1b90d6b..ed9ddd6 100644
--- a/media/libaudioclient/include/media/PolicyAidlConversion.h
+++ b/media/libaudioclient/include/media/PolicyAidlConversion.h
@@ -28,6 +28,8 @@
#include <android/media/AudioMixRouteFlag.h>
#include <android/media/AudioMixType.h>
#include <android/media/AudioOffloadMode.h>
+#include <android/media/AudioPolicyForceUse.h>
+#include <android/media/AudioPolicyForcedConfig.h>
#include <android/media/DeviceRole.h>
#include <media/AidlConversionUtil.h>
@@ -82,6 +84,16 @@
ConversionResult<media::AudioPolicyDeviceState>
legacy2aidl_audio_policy_dev_state_t_AudioPolicyDeviceState(audio_policy_dev_state_t legacy);
+ConversionResult<audio_policy_force_use_t>
+aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(media::AudioPolicyForceUse aidl);
+ConversionResult<media::AudioPolicyForceUse>
+legacy2aidl_audio_policy_force_use_t_AudioPolicyForceUse(audio_policy_force_use_t legacy);
+
+ConversionResult<audio_policy_forced_cfg_t>
+aidl2legacy_AudioPolicyForcedConfig_audio_policy_forced_cfg_t(media::AudioPolicyForcedConfig aidl);
+ConversionResult<media::AudioPolicyForcedConfig>
+legacy2aidl_audio_policy_forced_cfg_t_AudioPolicyForcedConfig(audio_policy_forced_cfg_t legacy);
+
ConversionResult<device_role_t>
aidl2legacy_DeviceRole_device_role_t(media::DeviceRole aidl);
ConversionResult<media::DeviceRole>
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index f5dec56..0dd0f74 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -201,11 +201,11 @@
"latest_android_hardware_audio_core_sounddose_ndk_shared",
"latest_android_hardware_audio_effect_ndk_shared",
"latest_android_media_audio_common_types_ndk_shared",
- "latest_av_audio_types_aidl_ndk_shared",
],
shared_libs: [
"android.hardware.common-V2-ndk",
"android.hardware.common.fmq-V1-ndk",
+ "av-audio-types-aidl-ndk",
"libaudio_aidl_conversion_common_cpp",
"libaudio_aidl_conversion_common_ndk",
"libaudio_aidl_conversion_common_ndk_cpp",
diff --git a/media/libmedia/CodecCapabilities.cpp b/media/libmedia/CodecCapabilities.cpp
index 87eb4bc..407d376 100644
--- a/media/libmedia/CodecCapabilities.cpp
+++ b/media/libmedia/CodecCapabilities.cpp
@@ -17,6 +17,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "CodecCapabilities"
+#include <android-base/strings.h>
#include <utils/Log.h>
#include <media/CodecCapabilities.h>
#include <media/CodecCapabilitiesUtils.h>
@@ -25,6 +26,58 @@
namespace android {
+static const int32_t HEVCHighTierLevels =
+ HEVCHighTierLevel1 | HEVCHighTierLevel2 | HEVCHighTierLevel21 | HEVCHighTierLevel3 |
+ HEVCHighTierLevel31 | HEVCHighTierLevel4 | HEVCHighTierLevel41 | HEVCHighTierLevel5 |
+ HEVCHighTierLevel51 | HEVCHighTierLevel52 | HEVCHighTierLevel6 | HEVCHighTierLevel61 |
+ HEVCHighTierLevel62;
+
+static const int32_t DEFAULT_MAX_SUPPORTED_INSTANCES = 32;
+static const int32_t MAX_SUPPORTED_INSTANCES_LIMIT = 256;
+
+// must not contain KEY_PROFILE
+static const std::set<std::pair<std::string, AMessage::Type>> AUDIO_LEVEL_CRITICAL_FORMAT_KEYS = {
+ // We don't set level-specific limits for audio codecs today. Key candidates would
+ // be sample rate, bit rate or channel count.
+ // MediaFormat.KEY_SAMPLE_RATE,
+ // MediaFormat.KEY_CHANNEL_COUNT,
+ // MediaFormat.KEY_BIT_RATE,
+ { KEY_MIME, AMessage::kTypeString }
+};
+
+// CodecCapabilities Features
+static const std::vector<Feature> DECODER_FEATURES = {
+ Feature(FEATURE_AdaptivePlayback, (1 << 0), true),
+ Feature(FEATURE_SecurePlayback, (1 << 1), false),
+ Feature(FEATURE_TunneledPlayback, (1 << 2), false),
+ Feature(FEATURE_PartialFrame, (1 << 3), false),
+ Feature(FEATURE_FrameParsing, (1 << 4), false),
+ Feature(FEATURE_MultipleFrames, (1 << 5), false),
+ Feature(FEATURE_DynamicTimestamp, (1 << 6), false),
+ Feature(FEATURE_LowLatency, (1 << 7), true),
+ // feature to exclude codec from REGULAR codec list
+ Feature(FEATURE_SpecialCodec, (1 << 30), false, true),
+};
+static const std::vector<Feature> ENCODER_FEATURES = {
+ Feature(FEATURE_IntraRefresh, (1 << 0), false),
+ Feature(FEATURE_MultipleFrames, (1 << 1), false),
+ Feature(FEATURE_DynamicTimestamp, (1 << 2), false),
+ Feature(FEATURE_QpBounds, (1 << 3), false),
+ Feature(FEATURE_EncodingStatistics, (1 << 4), false),
+ Feature(FEATURE_HdrEditing, (1 << 5), false),
+ // feature to exclude codec from REGULAR codec list
+ Feature(FEATURE_SpecialCodec, (1 << 30), false, true),
+};
+
+// must not contain KEY_PROFILE
+static const std::set<std::pair<std::string, AMessage::Type>> VIDEO_LEVEL_CRITICAL_FORMAT_KEYS = {
+ { KEY_WIDTH, AMessage::kTypeInt32 },
+ { KEY_HEIGHT, AMessage::kTypeInt32 },
+ { KEY_FRAME_RATE, AMessage::kTypeInt32 },
+ { KEY_BIT_RATE, AMessage::kTypeInt32 },
+ { KEY_MIME, AMessage::kTypeString }
+};
+
bool CodecCapabilities::SupportsBitrate(Range<int32_t> bitrateRange,
const sp<AMessage> &format) {
// consider max bitrate over average bitrate for support
@@ -46,6 +99,212 @@
return true;
}
+bool CodecCapabilities::isFeatureSupported(const std::string &name) const {
+ return mFeaturesSupported.contains(name);
+}
+
+bool CodecCapabilities::isFeatureRequired(const std::string &name) const {
+ return mFeaturesRequired.contains(name);
+}
+
+std::vector<std::string> CodecCapabilities::validFeatures() const {
+ std::vector<std::string> res;
+ for (const Feature& feature : getValidFeatures()) {
+ if (!feature.mInternal) {
+ res.push_back(feature.mName);
+ }
+ }
+ return res;
+}
+
+std::vector<Feature> CodecCapabilities::getValidFeatures() const {
+ if (isEncoder()) {
+ return ENCODER_FEATURES;
+ } else {
+ return DECODER_FEATURES;
+ }
+}
+
+bool CodecCapabilities::isRegular() const {
+ // regular codecs only require default features
+ std::vector<Feature> features = getValidFeatures();
+ return std::all_of(features.begin(), features.end(),
+ [this](Feature feat){ return (feat.mDefault || !isFeatureRequired(feat.mName)); });
+}
+
+bool CodecCapabilities::isFormatSupported(const sp<AMessage> &format) const {
+ AString mediaType;
+ format->findString(KEY_MIME, &mediaType);
+ // mediaType must match if present
+ if (!base::EqualsIgnoreCase(mMediaType, mediaType.c_str())) {
+ return false;
+ }
+
+ // check feature support
+ for (Feature feat: getValidFeatures()) {
+ if (feat.mInternal) {
+ continue;
+ }
+
+ int32_t yesNo;
+ std::string key = KEY_FEATURE_;
+ key = key + feat.mName;
+ if (format->findInt32(key.c_str(), &yesNo)) {
+ continue;
+ }
+ if ((yesNo == 1 && !isFeatureSupported(feat.mName)) ||
+ (yesNo == 0 && isFeatureRequired(feat.mName))) {
+ return false;
+ }
+ }
+
+ int32_t profile;
+ if (format->findInt32(KEY_PROFILE, &profile)) {
+ int32_t level = -1;
+ format->findInt32(KEY_LEVEL, &level);
+ if (!supportsProfileLevel(profile, level)) {
+ return false;
+ }
+
+ // If we recognize this profile, check that this format is supported by the
+ // highest level supported by the codec for that profile. (Ignore specified
+ // level beyond the above profile/level check as level is only used as a
+ // guidance. E.g. AVC Level 1 CIF format is supported if codec supports level 1.1
+ // even though max size for Level 1 is QCIF. However, MPEG2 Simple Profile
+ // 1080p format is not supported even if codec supports Main Profile Level High,
+ // as Simple Profile does not support 1080p.
+ int32_t maxLevel = 0;
+ for (ProfileLevel pl : mProfileLevels) {
+ if (pl.mProfile == profile && pl.mLevel > maxLevel) {
+ // H.263 levels are not completely ordered:
+ // Level45 support only implies Level10 support
+ if (!base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_H263)
+ || pl.mLevel != H263Level45
+ || maxLevel == H263Level10) {
+ maxLevel = pl.mLevel;
+ }
+ }
+ }
+ std::shared_ptr<CodecCapabilities> levelCaps
+ = CreateFromProfileLevel(mMediaType, profile, maxLevel);
+ // We must remove the profile from this format otherwise levelCaps.isFormatSupported
+ // will get into this same condition and loop forever. Furthermore, since levelCaps
+ // does not contain features and bitrate specific keys, keep only keys relevant for
+ // a level check.
+ sp<AMessage> levelCriticalFormat = new AMessage;
+
+ // critical keys will always contain KEY_MIME, but should also contain others to be
+ // meaningful
+ if ((isVideo() || isAudio()) && levelCaps != nullptr) {
+ const std::set<std::pair<std::string, AMessage::Type>> criticalKeys =
+ isVideo() ? VIDEO_LEVEL_CRITICAL_FORMAT_KEYS : AUDIO_LEVEL_CRITICAL_FORMAT_KEYS;
+ for (std::pair<std::string, AMessage::Type> key : criticalKeys) {
+ if (format->contains(key.first.c_str())) {
+ // AMessage::ItemData value = format->findItem(key.c_str());
+ // levelCriticalFormat->setItem(key.c_str(), value);
+ switch (key.second) {
+ case AMessage::kTypeInt32: {
+ int32_t value;
+ format->findInt32(key.first.c_str(), &value);
+ levelCriticalFormat->setInt32(key.first.c_str(), value);
+ break;
+ }
+ case AMessage::kTypeString: {
+ AString value;
+ format->findString(key.first.c_str(), &value);
+ levelCriticalFormat->setString(key.first.c_str(), value);
+ break;
+ }
+ default:
+ ALOGE("Unsupported type");
+ }
+ }
+ }
+ if (!levelCaps->isFormatSupported(levelCriticalFormat)) {
+ return false;
+ }
+ }
+ }
+ if (mAudioCaps && !mAudioCaps->supportsFormat(format)) {
+ return false;
+ }
+ if (mVideoCaps && !mVideoCaps->supportsFormat(format)) {
+ return false;
+ }
+ if (mEncoderCaps && !mEncoderCaps->supportsFormat(format)) {
+ return false;
+ }
+ return true;
+}
+
+bool CodecCapabilities::supportsProfileLevel(int32_t profile, int32_t level) const {
+ for (ProfileLevel pl: mProfileLevels) {
+ if (pl.mProfile != profile) {
+ continue;
+ }
+
+ // No specific level requested
+ if (level == -1) {
+ return true;
+ }
+
+ // AAC doesn't use levels
+ if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AAC)) {
+ return true;
+ }
+
+ // DTS doesn't use levels
+ if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS)
+ || base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS_HD)
+ || base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS_UHD)) {
+ return true;
+ }
+
+ // H.263 levels are not completely ordered:
+ // Level45 support only implies Level10 support
+ if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_H263)) {
+ if (pl.mLevel != level && pl.mLevel == H263Level45
+ && level > H263Level10) {
+ continue;
+ }
+ }
+
+ // MPEG4 levels are not completely ordered:
+ // Level1 support only implies Level0 (and not Level0b) support
+ if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_MPEG4)) {
+ if (pl.mLevel != level && pl.mLevel == MPEG4Level1
+ && level > MPEG4Level0) {
+ continue;
+ }
+ }
+
+ // HEVC levels incorporate both tiers and levels. Verify tier support.
+ if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_VIDEO_HEVC)) {
+ bool supportsHighTier =
+ (pl.mLevel & HEVCHighTierLevels) != 0;
+ bool checkingHighTier = (level & HEVCHighTierLevels) != 0;
+ // high tier levels are only supported by other high tier levels
+ if (checkingHighTier && !supportsHighTier) {
+ continue;
+ }
+ }
+
+ if (pl.mLevel >= level) {
+ // if we recognize the listed profile/level, we must also recognize the
+ // profile/level arguments.
+ if (CreateFromProfileLevel(mMediaType, profile, pl.mLevel) != nullptr) {
+ return CreateFromProfileLevel(mMediaType, profile, level) != nullptr;
+ }
+ return true;
+ }
+ }
+ return false;
+}
+
+sp<AMessage> CodecCapabilities::getDefaultFormat() const {
+ return mDefaultFormat;
+}
+
const std::string& CodecCapabilities::getMediaType() {
return mMediaType;
}
@@ -54,4 +313,130 @@
return mProfileLevels;
}
+std::vector<uint32_t> CodecCapabilities::getColorFormats() const {
+ return mColorFormats;
+}
+
+int32_t CodecCapabilities::getMaxSupportedInstances() const {
+ return mMaxSupportedInstances;
+}
+
+bool CodecCapabilities::isAudio() const {
+ return mAudioCaps != nullptr;
+}
+
+std::shared_ptr<AudioCapabilities>
+ CodecCapabilities::getAudioCapabilities() const {
+ return mAudioCaps;
+}
+
+bool CodecCapabilities::isEncoder() const {
+ return mEncoderCaps != nullptr;
+}
+
+std::shared_ptr<EncoderCapabilities>
+ CodecCapabilities::getEncoderCapabilities() const {
+ return mEncoderCaps;
+}
+
+bool CodecCapabilities::isVideo() const {
+ return mVideoCaps != nullptr;
+}
+
+std::shared_ptr<VideoCapabilities> CodecCapabilities::getVideoCapabilities() const {
+ return mVideoCaps;
+}
+
+// static
+std::shared_ptr<CodecCapabilities> CodecCapabilities::CreateFromProfileLevel(
+ std::string mediaType, int32_t profile, int32_t level, int32_t maxConcurrentInstances) {
+ ProfileLevel pl;
+ pl.mProfile = profile;
+ pl.mLevel = level;
+ sp<AMessage> defaultFormat = new AMessage;
+ defaultFormat->setString(KEY_MIME, mediaType.c_str());
+
+ std::vector<ProfileLevel> pls;
+ pls.push_back(pl);
+ std::vector<uint32_t> colFmts;
+ sp<AMessage> capabilitiesInfo = new AMessage;
+ std::shared_ptr<CodecCapabilities> ret(new CodecCapabilities());
+ ret->init(pls, colFmts, true /* encoder */, defaultFormat, capabilitiesInfo,
+ maxConcurrentInstances);
+ if (ret->getErrors() != 0) {
+ return nullptr;
+ }
+ return ret;
+}
+
+void CodecCapabilities::init(std::vector<ProfileLevel> profLevs, std::vector<uint32_t> colFmts,
+ bool encoder, sp<AMessage> &defaultFormat, sp<AMessage> &capabilitiesInfo,
+ int32_t maxConcurrentInstances) {
+ mColorFormats = colFmts;
+ mDefaultFormat = defaultFormat;
+ mCapabilitiesInfo = capabilitiesInfo;
+
+ AString mediaTypeAStr;
+ mDefaultFormat->findString(KEY_MIME, &mediaTypeAStr);
+ mMediaType = mediaTypeAStr.c_str();
+
+ /* VP9 introduced profiles around 2016, so some VP9 codecs may not advertise any
+ supported profiles. Determine the level for them using the info they provide. */
+ if (profLevs.size() == 0 && mMediaType == MIMETYPE_VIDEO_VP9) {
+ ProfileLevel profLev;
+ profLev.mProfile = VP9Profile0;
+ profLev.mLevel = VideoCapabilities::EquivalentVP9Level(capabilitiesInfo);
+ profLevs.push_back(profLev);
+ }
+ mProfileLevels = profLevs;
+
+ if (mediaTypeAStr.startsWithIgnoreCase("audio/")) {
+ mAudioCaps = AudioCapabilities::Create(mMediaType, profLevs, capabilitiesInfo);
+ mAudioCaps->getDefaultFormat(mDefaultFormat);
+ } else if (mediaTypeAStr.startsWithIgnoreCase("video/")
+ || mediaTypeAStr.equalsIgnoreCase(MIMETYPE_IMAGE_ANDROID_HEIC)) {
+ mVideoCaps = VideoCapabilities::Create(mMediaType, profLevs, capabilitiesInfo);
+ }
+
+ if (encoder) {
+ mEncoderCaps = EncoderCapabilities::Create(mMediaType, profLevs, capabilitiesInfo);
+ mEncoderCaps->getDefaultFormat(mDefaultFormat);
+ }
+
+ mMaxSupportedInstances = maxConcurrentInstances > 0
+ ? maxConcurrentInstances : DEFAULT_MAX_SUPPORTED_INSTANCES;
+
+ int32_t maxInstances = mMaxSupportedInstances;
+ capabilitiesInfo->findInt32("max-concurrent-instances", &maxInstances);
+ mMaxSupportedInstances =
+ Range(1, MAX_SUPPORTED_INSTANCES_LIMIT).clamp(maxInstances);
+
+ mFeaturesRequired.clear();
+ mFeaturesSupported.clear();
+ for (Feature feat: getValidFeatures()) {
+ std::string key = KEY_FEATURE_;
+ key = key + feat.mName;
+ int yesNo = -1;
+ if (!capabilitiesInfo->findInt32(key.c_str(), &yesNo)) {
+ continue;
+ }
+ if (yesNo > 0) {
+ mFeaturesRequired.insert(feat.mName);
+ }
+ mFeaturesSupported.insert(feat.mName);
+ if (!feat.mInternal) {
+ mDefaultFormat->setInt32(key.c_str(), 1);
+ }
+ }
+}
+
+int32_t CodecCapabilities::getErrors() const {
+ if (mAudioCaps) {
+ return mAudioCaps->mError;
+ } else if (mVideoCaps) {
+ return mVideoCaps->mError;
+ }
+ return 0;
+}
+
} // namespace android
\ No newline at end of file
diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp
index d5d1a09..3834278 100644
--- a/media/libmedia/MediaCodecInfo.cpp
+++ b/media/libmedia/MediaCodecInfo.cpp
@@ -26,6 +26,9 @@
namespace android {
+// initialize max supported instances with default value.
+int32_t MediaCodecInfo::sMaxSupportedInstances = 0;
+
/** This redundant redeclaration is needed for C++ pre 14 */
constexpr char MediaCodecInfo::Capabilities::FEATURE_ADAPTIVE_PLAYBACK[];
constexpr char MediaCodecInfo::Capabilities::FEATURE_DYNAMIC_TIMESTAMP[];
@@ -169,6 +172,15 @@
return NULL;
}
+const std::shared_ptr<CodecCapabilities> MediaCodecInfo::getCodecCapsFor(
+ const char *mediaType) const {
+ ssize_t ix = getCodecCapIndex(mediaType);
+ if (ix >= 0) {
+ return mCodecCaps.valueAt(ix);
+ }
+ return nullptr;
+}
+
const char *MediaCodecInfo::getCodecName() const {
return mName.c_str();
}
@@ -179,6 +191,7 @@
// static
sp<MediaCodecInfo> MediaCodecInfo::FromParcel(const Parcel &parcel) {
+ sMaxSupportedInstances = parcel.readInt32();
AString name = AString::FromParcel(parcel);
AString owner = AString::FromParcel(parcel);
Attributes attributes = static_cast<Attributes>(parcel.readInt32());
@@ -201,12 +214,17 @@
return NULL;
if (info != NULL) {
info->mCaps.add(mediaType, caps);
+ std::shared_ptr<CodecCapabilities> codecCaps
+ = MediaCodecInfoWriter::BuildCodecCapabilities(
+ mediaType.c_str(), caps, info->isEncoder());
+ info->mCodecCaps.add(mediaType, codecCaps);
}
}
return info;
}
status_t MediaCodecInfo::writeToParcel(Parcel *parcel) const {
+ parcel->writeInt32(sMaxSupportedInstances);
mName.writeToParcel(parcel);
mOwner.writeToParcel(parcel);
parcel->writeInt32(mAttributes);
@@ -234,6 +252,25 @@
return -1;
}
+ssize_t MediaCodecInfo::getCodecCapIndex(const char *mediaType) const {
+ if (mediaType == nullptr) {
+ return -1;
+ }
+
+ if (mCodecCaps.size() != mCaps.size()) {
+ ALOGE("Size of mCodecCaps and mCaps do not match, which are %zu and %zu",
+ mCodecCaps.size(), mCaps.size());
+ }
+
+ for (size_t ix = 0; ix < mCodecCaps.size(); ix++) {
+ if (mCodecCaps.keyAt(ix).equalsIgnoreCase(mediaType)) {
+ return ix;
+ }
+ }
+
+ return -1;
+}
+
MediaCodecInfo::MediaCodecInfo()
: mAttributes((MediaCodecInfo::Attributes)0),
mRank(0x100) {
@@ -283,6 +320,52 @@
return false;
}
+void MediaCodecInfoWriter::createCodecCaps() {
+ mInfo->mCodecCaps.clear();
+ for (size_t ix = 0; ix < mInfo->mCaps.size(); ix++) {
+ AString mediaType = mInfo->mCaps.keyAt(ix);
+ sp<MediaCodecInfo::Capabilities> caps = mInfo->mCaps.valueAt(ix);
+ mInfo->mCodecCaps.add(mediaType,
+ BuildCodecCapabilities(mediaType.c_str(), caps, mInfo->isEncoder(),
+ MediaCodecInfo::sMaxSupportedInstances));
+ }
+}
+
+// static
+std::shared_ptr<CodecCapabilities> MediaCodecInfoWriter::BuildCodecCapabilities(
+ const char *mediaType, sp<MediaCodecInfo::Capabilities> caps, bool isEncoder,
+ int32_t maxSupportedInstances) {
+ Vector<ProfileLevel> profileLevels_;
+ Vector<uint32_t> colorFormats_;
+ caps->getSupportedProfileLevels(&profileLevels_);
+ caps->getSupportedColorFormats(&colorFormats_);
+
+ std::vector<ProfileLevel> profileLevels;
+ std::vector<uint32_t> colorFormats;
+ for (ProfileLevel pl : profileLevels_) {
+ profileLevels.push_back(pl);
+ }
+ for (uint32_t cf : colorFormats_) {
+ colorFormats.push_back(cf);
+ }
+
+ sp<AMessage> defaultFormat = new AMessage();
+ defaultFormat->setString("mime", mediaType);
+
+ sp<AMessage> capabilitiesInfo = caps->getDetails();
+
+ std::shared_ptr<CodecCapabilities> codecCaps = std::make_shared<CodecCapabilities>();
+ codecCaps->init(profileLevels, colorFormats, isEncoder, defaultFormat,
+ capabilitiesInfo, maxSupportedInstances);
+
+ return codecCaps;
+}
+
+// static
+void MediaCodecInfoWriter::SetMaxSupportedInstances(int32_t maxSupportedInstances) {
+ MediaCodecInfo::sMaxSupportedInstances = maxSupportedInstances;
+}
+
MediaCodecInfoWriter::MediaCodecInfoWriter(MediaCodecInfo* info) :
mInfo(info) {
}
diff --git a/media/libmedia/include/media/CodecCapabilities.h b/media/libmedia/include/media/CodecCapabilities.h
index 7c631d1..0611d8c 100644
--- a/media/libmedia/include/media/CodecCapabilities.h
+++ b/media/libmedia/include/media/CodecCapabilities.h
@@ -40,6 +40,26 @@
const sp<AMessage> &format);
/**
+ * Retrieve the codec capabilities for a certain {@code mime type}, {@code
+ * profile} and {@code level}. If the type, or profile-level combination
+ * is not understood by the framework, it returns null.
+ * <p class=note> In {@link android.os.Build.VERSION_CODES#M}, calling this
+ * method without calling any method of the {@link MediaCodecList} class beforehand
+ * results in a {@link NullPointerException}.</p>
+ */
+ static std::shared_ptr<CodecCapabilities> CreateFromProfileLevel(std::string mediaType,
+ int32_t profile, int32_t level, int32_t maxConcurrentInstances = -1);
+
+ CodecCapabilities() {};
+
+ /**
+ * Init CodecCapabilities with settings.
+ */
+ void init(std::vector<ProfileLevel> profLevs, std::vector<uint32_t> colFmts, bool encoder,
+ sp<AMessage> &defaultFormat, sp<AMessage> &capabilitiesInfo,
+ int32_t maxConcurrentInstances = 0);
+
+ /**
* Returns the media type for which this codec-capability object was created.
*/
const std::string& getMediaType();
@@ -49,13 +69,209 @@
*/
const std::vector<ProfileLevel>& getProfileLevels();
+ /**
+ * Returns the supported color formats.
+ */
+ std::vector<uint32_t> getColorFormats() const;
+
+ /**
+ * Returns a media format with default values for configurations that have defaults.
+ */
+ sp<AMessage> getDefaultFormat() const;
+
+ /**
+ * Returns the max number of the supported concurrent codec instances.
+ * <p>
+ * This is a hint for an upper bound. Applications should not expect to successfully
+ * operate more instances than the returned value, but the actual number of
+ * concurrently operable instances may be less as it depends on the available
+ * resources at time of use.
+ */
+ int32_t getMaxSupportedInstances() const;
+
+ /**
+ * Returns the audio capabilities or {@code null} if this is not an audio codec.
+ */
+ std::shared_ptr<AudioCapabilities> getAudioCapabilities() const;
+
+ /**
+ * Returns the video capabilities or {@code null} if this is not a video codec.
+ */
+ std::shared_ptr<VideoCapabilities> getVideoCapabilities() const;
+
+ /**
+ * Returns the encoding capabilities or {@code null} if this is not an encoder.
+ */
+ std::shared_ptr<EncoderCapabilities> getEncoderCapabilities() const;
+
+ std::vector<std::string> validFeatures() const;
+
+ /**
+ * Query codec feature capabilities.
+ * <p>
+ * These features are supported to be used by the codec. These
+ * include optional features that can be turned on, as well as
+ * features that are always on.
+ */
+ bool isFeatureSupported(const std::string &name) const;
+
+ /**
+ * Query codec feature requirements.
+ * <p>
+ * These features are required to be used by the codec, and as such,
+ * they are always turned on.
+ */
+ bool isFeatureRequired(const std::string &name) const;
+
+ bool isRegular() const;
+
+ /**
+ * Query whether codec supports a given {@link MediaFormat}.
+ *
+ * <p class=note>
+ * <strong>Note:</strong> On {@link android.os.Build.VERSION_CODES#LOLLIPOP},
+ * {@code format} must not contain a {@linkplain MediaFormat#KEY_FRAME_RATE
+ * frame rate}. Use
+ * <code class=prettyprint>format.setString(MediaFormat.KEY_FRAME_RATE, null)</code>
+ * to clear any existing frame rate setting in the format.
+ * <p>
+ *
+ * The following table summarizes the format keys considered by this method.
+ * This is especially important to consider when targeting a higher SDK version than the
+ * minimum SDK version, as this method will disregard some keys on devices below the target
+ * SDK version.
+ *
+ * <table style="width: 0%">
+ * <thead>
+ * <tr>
+ * <th rowspan=3>OS Version(s)</th>
+ * <td colspan=3>{@code MediaFormat} keys considered for</th>
+ * </tr><tr>
+ * <th>Audio Codecs</th>
+ * <th>Video Codecs</th>
+ * <th>Encoders</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP}</td>
+ * <td rowspan=3>{@link MediaFormat#KEY_MIME}<sup>*</sup>,<br>
+ * {@link MediaFormat#KEY_SAMPLE_RATE},<br>
+ * {@link MediaFormat#KEY_CHANNEL_COUNT},</td>
+ * <td>{@link MediaFormat#KEY_MIME}<sup>*</sup>,<br>
+ * {@link CodecCapabilities#FEATURE_AdaptivePlayback}<sup>D</sup>,<br>
+ * {@link CodecCapabilities#FEATURE_SecurePlayback}<sup>D</sup>,<br>
+ * {@link CodecCapabilities#FEATURE_TunneledPlayback}<sup>D</sup>,<br>
+ * {@link MediaFormat#KEY_WIDTH},<br>
+ * {@link MediaFormat#KEY_HEIGHT},<br>
+ * <strong>no</strong> {@code KEY_FRAME_RATE}</td>
+ * <td rowspan=10>as to the left, plus<br>
+ * {@link MediaFormat#KEY_BITRATE_MODE},<br>
+ * {@link MediaFormat#KEY_PROFILE}
+ * (and/or {@link MediaFormat#KEY_AAC_PROFILE}<sup>~</sup>),<br>
+ * <!-- {link MediaFormat#KEY_QUALITY},<br> -->
+ * {@link MediaFormat#KEY_COMPLEXITY}
+ * (and/or {@link MediaFormat#KEY_FLAC_COMPRESSION_LEVEL}<sup>~</sup>)</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP_MR1}</td>
+ * <td rowspan=2>as above, plus<br>
+ * {@link MediaFormat#KEY_FRAME_RATE}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#M}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#N}</td>
+ * <td rowspan=2>as above, plus<br>
+ * {@link MediaFormat#KEY_PROFILE},<br>
+ * <!-- {link MediaFormat#KEY_MAX_BIT_RATE},<br> -->
+ * {@link MediaFormat#KEY_BIT_RATE}</td>
+ * <td rowspan=2>as above, plus<br>
+ * {@link MediaFormat#KEY_PROFILE},<br>
+ * {@link MediaFormat#KEY_LEVEL}<sup>+</sup>,<br>
+ * <!-- {link MediaFormat#KEY_MAX_BIT_RATE},<br> -->
+ * {@link MediaFormat#KEY_BIT_RATE},<br>
+ * {@link CodecCapabilities#FEATURE_IntraRefresh}<sup>E</sup></td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#N_MR1}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#O}</td>
+ * <td rowspan=3 colspan=2>as above, plus<br>
+ * {@link CodecCapabilities#FEATURE_PartialFrame}<sup>D</sup></td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#O_MR1}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#P}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#Q}</td>
+ * <td colspan=2>as above, plus<br>
+ * {@link CodecCapabilities#FEATURE_FrameParsing}<sup>D</sup>,<br>
+ * {@link CodecCapabilities#FEATURE_MultipleFrames},<br>
+ * {@link CodecCapabilities#FEATURE_DynamicTimestamp}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#R}</td>
+ * <td colspan=2>as above, plus<br>
+ * {@link CodecCapabilities#FEATURE_LowLatency}<sup>D</sup></td>
+ * </tr>
+ * <tr>
+ * <td colspan=4>
+ * <p class=note><strong>Notes:</strong><br>
+ * *: must be specified; otherwise, method returns {@code false}.<br>
+ * +: method does not verify that the format parameters are supported
+ * by the specified level.<br>
+ * D: decoders only<br>
+ * E: encoders only<br>
+ * ~: if both keys are provided values must match
+ * </td>
+ * </tr>
+ * </tbody>
+ * </table>
+ *
+ * @param format media format with optional feature directives.
+ * @return whether the codec capabilities support the given format
+ * and feature requests.
+ */
+ bool isFormatSupported(const sp<AMessage> &format) const;
+
+ /**
+ * If the CodecCapabilities contains an AudioCapabilities.
+ *
+ * Not a public API to users.
+ */
+ bool isAudio() const;
+
+ /**
+ * If the CodecCapabilities contains a VideoCapabilities.
+ *
+ * Not a public API to users.
+ */
+ bool isVideo() const;
+
+ /**
+ * If the CodecCapabilities contains an EncoderCapabilities.
+ *
+ * Not a public API to users.
+ */
+ bool isEncoder() const;
+
private:
std::string mMediaType;
std::vector<ProfileLevel> mProfileLevels;
+ std::vector<uint32_t> mColorFormats;
+ int32_t mMaxSupportedInstances;
+
+ sp<AMessage> mDefaultFormat;
+ sp<AMessage> mCapabilitiesInfo;
+
+ // Features
+ std::set<std::string> mFeaturesSupported;
+ std::set<std::string> mFeaturesRequired;
std::shared_ptr<AudioCapabilities> mAudioCaps;
std::shared_ptr<VideoCapabilities> mVideoCaps;
std::shared_ptr<EncoderCapabilities> mEncoderCaps;
+
+ bool supportsProfileLevel(int32_t profile, int32_t level) const;
+ std::vector<Feature> getValidFeatures() const;
+ int32_t getErrors() const;
};
} // namespace android
diff --git a/media/libmedia/include/media/MediaCodecInfo.h b/media/libmedia/include/media/MediaCodecInfo.h
index 72aca98..60e383a 100644
--- a/media/libmedia/include/media/MediaCodecInfo.h
+++ b/media/libmedia/include/media/MediaCodecInfo.h
@@ -192,6 +192,7 @@
Attributes getAttributes() const;
void getSupportedMediaTypes(Vector<AString> *mediaTypes) const;
const sp<Capabilities> getCapabilitiesFor(const char *mediaType) const;
+ const std::shared_ptr<CodecCapabilities> getCodecCapsFor(const char *mediaType) const;
const char *getCodecName() const;
/**
@@ -229,14 +230,21 @@
status_t writeToParcel(Parcel *parcel) const;
private:
+ /**
+ * Max supported instances setting from MediaCodecList global setting.
+ */
+ static int32_t sMaxSupportedInstances;
+
AString mName;
AString mOwner;
Attributes mAttributes;
KeyedVector<AString, sp<Capabilities> > mCaps;
+ KeyedVector<AString, std::shared_ptr<CodecCapabilities>> mCodecCaps;
Vector<AString> mAliases;
uint32_t mRank;
ssize_t getCapabilityIndex(const char *mediaType) const;
+ ssize_t getCodecCapIndex(const char *mediaType) const;
/**
* Construct an `MediaCodecInfo` object. After the construction, its
@@ -264,6 +272,15 @@
*/
struct MediaCodecInfoWriter {
/**
+ * Get CodecCapabilities from Capabilities.
+ */
+ static std::shared_ptr<CodecCapabilities> BuildCodecCapabilities(const char *mediaType,
+ sp<MediaCodecInfo::Capabilities> caps, bool isEncoder, int maxSupportedInstances = 0);
+ /**
+ * Set the max supported instances global setting from MediaCodecList.
+ */
+ static void SetMaxSupportedInstances(int32_t maxSupportedInstances);
+ /**
* Set the name of the codec.
*
* @param name The new name.
@@ -319,6 +336,10 @@
* @param rank The rank of the component.
*/
void setRank(uint32_t rank);
+ /**
+ * Create CodecCapabilities map from Capabilities.
+ */
+ void createCodecCaps();
private:
/**
* The associated `MediaCodecInfo`.
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index d084f10..9ed5343 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -99,6 +99,7 @@
static_libs: [
"libstagefright_esds",
+ "android.media.extractor.flags-aconfig-cc",
],
export_include_dirs: [
@@ -321,6 +322,7 @@
static_libs: [
"android.media.codec-aconfig-cc",
+ "android.media.extractor.flags-aconfig-cc",
"com.android.media.flags.editing-aconfig-cc",
"libstagefright_esds",
"libstagefright_color_conversion",
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 8cb1674..a374bf4 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -6090,9 +6090,9 @@
}
if (csd->size() == 0) {
ALOGW("csd-%zu size is 0", i);
+ } else {
+ mCSD.push_back(csd);
}
-
- mCSD.push_back(csd);
++i;
}
diff --git a/media/libstagefright/OmxInfoBuilder.cpp b/media/libstagefright/OmxInfoBuilder.cpp
index 79ffdeb..1cb8f14 100644
--- a/media/libstagefright/OmxInfoBuilder.cpp
+++ b/media/libstagefright/OmxInfoBuilder.cpp
@@ -21,6 +21,8 @@
#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
#endif
+#include <cstdlib>
+
#include <android-base/properties.h>
#include <utils/Log.h>
@@ -131,6 +133,10 @@
for (const auto& p : serviceAttributes) {
writer->addGlobalSetting(
p.key.c_str(), p.value.c_str());
+ if (p.key == "max-concurrent-instances") {
+ MediaCodecInfoWriter::SetMaxSupportedInstances(
+ (int32_t)strtol(p.value.c_str(), NULL, 10));
+ }
}
// Convert roles to lists of codecs
@@ -217,6 +223,8 @@
ALOGW("Fail to add media type %s to codec %s",
typeName.c_str(), nodeName.c_str());
info->removeMediaType(typeName.c_str());
+ } else {
+ info->createCodecCaps();
}
}
}
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 86741a6..50eeb62 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -44,6 +44,8 @@
#include <media/AudioParameter.h>
#include <system/audio.h>
+#include <com_android_media_extractor_flags.h>
+
// TODO : Remove the defines once mainline media is built against NDK >= 31.
// The mp4 extractor is part of mainline and builds against NDK 29 as of
// writing. These keys are available only from NDK 31:
@@ -1443,6 +1445,17 @@
buffer->meta()->setInt64("timeUs", 0);
msg->setBuffer("csd-0", buffer);
parseAV1ProfileLevelFromCsd(buffer, msg);
+ } else if (com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
+ meta->findData(kKeyAPVC, &type, &data, &size)) {
+ sp<ABuffer> buffer = new (std::nothrow) ABuffer(size);
+ if (buffer.get() == NULL || buffer->base() == NULL) {
+ return NO_MEMORY;
+ }
+ memcpy(buffer->data(), data, size);
+
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-0", buffer);
} else if (meta->findData(kKeyESDS, &type, &data, &size)) {
ESDS esds((const char *)data, size);
if (esds.InitCheck() != (status_t)OK) {
@@ -2091,6 +2104,9 @@
} else if (mime == MEDIA_MIMETYPE_VIDEO_AV1 ||
mime == MEDIA_MIMETYPE_IMAGE_AVIF) {
meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
+ } else if (com::android::media::extractor::flags::extractor_mp4_enable_apv() &&
+ mime == MEDIA_MIMETYPE_VIDEO_APV) {
+ meta->setData(kKeyAPVC, 0, csd0->data(), csd0->size());
} else if (mime == MEDIA_MIMETYPE_VIDEO_DOLBY_VISION) {
int32_t profile = -1;
uint8_t blCompatibilityId = -1;
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index 72a2551..2fb2d59 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -101,6 +101,7 @@
<Limit name="bitrate" range="1-240000000"/>
<Limit name="block-size" value="16x16" />
<Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+ <Feature name="dynamic-color-aspects" />
<Attribute name="software-codec"/>
</MediaCodec>
</Decoders>
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index 20c97dc..c79ac5c 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -262,6 +262,7 @@
<Limit name="block-size" value="16x16" />
<Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
<Attribute name="software-codec"/>
</MediaCodec>
</Decoders>
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 8f2f162..b0f671d 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -854,12 +854,17 @@
}
inline constexpr char FEATURE_AdaptivePlayback[] = "adaptive-playback";
+inline constexpr char FEATURE_DynamicTimestamp[] = "dynamic-timestamp";
inline constexpr char FEATURE_EncodingStatistics[] = "encoding-statistics";
+inline constexpr char FEATURE_FrameParsing[] = "frame-parsing";
+inline constexpr char FEATURE_HdrEditing[] = "hdr-editing";
inline constexpr char FEATURE_IntraRefresh[] = "intra-refresh";
+inline constexpr char FEATURE_LowLatency[] = "low-latency";
inline constexpr char FEATURE_MultipleFrames[] = "multiple-frames";
inline constexpr char FEATURE_PartialFrame[] = "partial-frame";
inline constexpr char FEATURE_QpBounds[] = "qp-bounds";
inline constexpr char FEATURE_SecurePlayback[] = "secure-playback";
+inline constexpr char FEATURE_SpecialCodec[] = "special-codec";
inline constexpr char FEATURE_TunneledPlayback[] = "tunneled-playback";
// from MediaFormat.java
diff --git a/media/module/libapexcodecs/Android.bp b/media/module/libapexcodecs/Android.bp
index 790b749..dbda81b 100644
--- a/media/module/libapexcodecs/Android.bp
+++ b/media/module/libapexcodecs/Android.bp
@@ -40,6 +40,15 @@
}
+cc_library_headers {
+ name: "libapexcodecs-header",
+ visibility: [
+ "//frameworks/av/apex:__subpackages__",
+ "//frameworks/av/media/codec2/hal/client",
+ ],
+ export_include_dirs: ["include"],
+}
+
cc_library {
name: "libapexcodecs-testing",
defaults: ["libapexcodecs-defaults"],
diff --git a/media/module/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp b/media/module/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
index d21908f..7905e4f 100644
--- a/media/module/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
+++ b/media/module/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
@@ -458,6 +458,12 @@
if (bufferInfo.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
LOG(DEBUG) << "EOS from decoder.";
+ // NOTE - b/360057459
+ // There is a synchronization problem between feeding the frame to the encoder input surface
+ // and signaling end of stream.
+ // Waiting before signaling end of stream so that input surface has time to feed remaining
+ // frames to the encoder.
+ std::this_thread::sleep_for(std::chrono::milliseconds(10));
media_status_t status = AMediaCodec_signalEndOfInputStream(mEncoder->getCodec());
if (status != AMEDIA_OK) {
LOG(ERROR) << "SignalEOS on encoder returned error: " << status;
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 7b19ac0..7797841 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -655,6 +655,28 @@
}
}
+media_status_t
+AImageReader::setUsage(uint64_t usage) {
+ Mutex::Autolock _l(mLock);
+ if (!mIsOpen || mBufferItemConsumer == nullptr) {
+ ALOGE("not ready to perform setUsage()");
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ if (mUsage == usage) {
+ return AMEDIA_OK;
+ }
+
+ uint64_t halUsage = AHardwareBuffer_convertToGrallocUsageBits(mUsage);
+ status_t ret = mBufferItemConsumer->setConsumerUsageBits(halUsage);
+ if (ret != OK) {
+ ALOGE("setConsumerUsageBits() failed %d", ret);
+ return AMEDIA_ERROR_UNKNOWN;
+ }
+ mUsage = usage;
+ mHalUsage = halUsage;
+ return AMEDIA_OK;
+}
+
static
media_status_t validateParameters(int32_t width, int32_t height, int32_t format,
uint64_t usage, int32_t maxImages,
@@ -912,3 +934,14 @@
reader->setBufferRemovedListener(listener);
return AMEDIA_OK;
}
+
+EXPORT
+media_status_t AImageReader_setUsage(
+ AImageReader *reader, uint64_t usage) {
+ ALOGV("%s", __FUNCTION__);
+ if (reader == nullptr) {
+ ALOGE("%s: invalid argument! reader %p", __FUNCTION__, reader);
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ return reader->setUsage(usage);
+}
diff --git a/media/ndk/NdkImageReaderPriv.h b/media/ndk/NdkImageReaderPriv.h
index 0199616..1c50d83 100644
--- a/media/ndk/NdkImageReaderPriv.h
+++ b/media/ndk/NdkImageReaderPriv.h
@@ -20,6 +20,7 @@
#include <inttypes.h>
#include <media/NdkImageReader.h>
+#include <media-vndk/VndkImageReader.h>
#include <utils/List.h>
#include <utils/Mutex.h>
@@ -67,6 +68,7 @@
media_status_t setImageListener(AImageReader_ImageListener* listener);
media_status_t setBufferRemovedListener(AImageReader_BufferRemovedListener* listener);
+ media_status_t setUsage(uint64_t usage);
media_status_t acquireNextImage(/*out*/AImage** image, /*out*/int* fenceFd);
media_status_t acquireLatestImage(/*out*/AImage** image, /*out*/int* fenceFd);
@@ -120,7 +122,7 @@
const int32_t mWidth;
const int32_t mHeight;
int32_t mFormat;
- const uint64_t mUsage; // AHARDWAREBUFFER_USAGE_* flags.
+ uint64_t mUsage; // AHARDWAREBUFFER_USAGE_* flags.
const int32_t mMaxImages;
// TODO(jwcai) Seems completely unused in AImageReader class.
diff --git a/media/ndk/include/media-vndk/VndkImageReader.h b/media/ndk/include/media-vndk/VndkImageReader.h
new file mode 100644
index 0000000..c67a38c
--- /dev/null
+++ b/media/ndk/include/media-vndk/VndkImageReader.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _VNDK_IMAGE_READER_H
+#define _VNDK_IMAGE_READER_H
+
+// vndk is a superset of the NDK
+#include <media/NdkImageReader.h>
+
+__BEGIN_DECLS
+
+/**
+ * Set the usage of this image reader.
+ *
+ * <p>Note that calling this method will replace the previously set usage.</p>
+ *
+ * <p>Note: This will trigger re-allocation, could cause producer failures mid-stream
+ * if the new usage combination isn't supported, and thus should be avoided as much as
+ * possible regardless.</p>
+ *
+ * Available since API level 36.
+ *
+ * @param reader The image reader of interest.
+ * @param usage specifies how the consumer will access the AImage.
+ * See {@link AImageReader_newWithUsage} parameter description for more details.
+ * @return <ul>
+ * <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ * <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader is NULL.</li>
+ * <li>{@link AMEDIA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ *
+ * @see AImage_getHardwareBuffer
+ */
+media_status_t AImageReader_setUsage(
+ AImageReader* _Nonnull reader, uint64_t usage) __INTRODUCED_IN(36);
+
+__END_DECLS
+
+#endif //_VNDK_IMAGE_READER_H
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 262c169..8fb203f 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -16,6 +16,7 @@
AImageReader_newWithDataSpace; # introduced=UpsideDownCake
AImageReader_setBufferRemovedListener; # introduced=26
AImageReader_setImageListener; # introduced=24
+ AImageReader_setUsage; # introduced=36 llndk
AImage_delete; # introduced=24
AImage_deleteAsync; # introduced=26
AImage_getCropRect; # introduced=24
diff --git a/media/utils/EventLogTags.logtags b/media/utils/EventLogTags.logtags
index c397f34..5b98b0f 100644
--- a/media/utils/EventLogTags.logtags
+++ b/media/utils/EventLogTags.logtags
@@ -31,7 +31,7 @@
# 6: Percent
# Default value for data of type int/long is 2 (bytes).
#
-# See system/core/logcat/event.logtags for the original definition of the tags.
+# See system/logging/logcat/event.logtags for the original definition of the tags.
# 61000 - 61199 reserved for audioserver
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 8215247..282f3fa 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -98,10 +98,6 @@
static constexpr char kAudioServiceName[] = "audio";
-// In order to avoid invalidating offloaded tracks each time a Visualizer is turned on and off
-// we define a minimum time during which a global effect is considered enabled.
-static const nsecs_t kMinGlobalEffectEnabletimeNs = seconds(7200);
-
// Keep a strong reference to media.log service around forever.
// The service is within our parent process so it can never die in a way that we could observe.
// These two variables are const after initialization.
@@ -4842,11 +4838,6 @@
bool AudioFlinger::isNonOffloadableGlobalEffectEnabled_l() const
{
- if (mGlobalEffectEnableTime != 0 &&
- ((systemTime() - mGlobalEffectEnableTime) < kMinGlobalEffectEnabletimeNs)) {
- return true;
- }
-
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
const auto thread = mPlaybackThreads.valueAt(i);
audio_utils::lock_guard l(thread->mutex());
@@ -4862,8 +4853,6 @@
{
audio_utils::lock_guard _l(mutex());
- mGlobalEffectEnableTime = systemTime();
-
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
const sp<IAfPlaybackThread> t = mPlaybackThreads.valueAt(i);
if (t->type() == IAfThreadBase::OFFLOAD) {
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 6777075..c229e83 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -760,9 +760,6 @@
std::atomic<size_t> mClientSharedHeapSize = kMinimumClientSharedHeapSizeBytes;
static constexpr size_t kMinimumClientSharedHeapSizeBytes = 1024 * 1024; // 1MB
- // when a global effect was last enabled
- nsecs_t mGlobalEffectEnableTime GUARDED_BY(mutex()) = 0;
-
/* const */ sp<IAfPatchPanel> mPatchPanel;
const sp<EffectsFactoryHalInterface> mEffectsFactoryHal =
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index b7c0bb3..b82a9af 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -3520,26 +3520,8 @@
void PlaybackThread::checkSilentMode_l()
{
- if (!mMasterMute) {
- char value[PROPERTY_VALUE_MAX];
- if (mOutDeviceTypeAddrs.empty()) {
- ALOGD("ro.audio.silent is ignored since no output device is set");
- return;
- }
- if (isSingleDeviceType(outDeviceTypes_l(), AUDIO_DEVICE_OUT_REMOTE_SUBMIX)) {
- ALOGD("ro.audio.silent will be ignored for threads on AUDIO_DEVICE_OUT_REMOTE_SUBMIX");
- return;
- }
- if (property_get("ro.audio.silent", value, "0") > 0) {
- char *endptr;
- unsigned long ul = strtoul(value, &endptr, 0);
- if (*endptr == '\0' && ul != 0) {
- ALOGW("%s: mute from ro.audio.silent. Silence is golden", __func__);
- // The setprop command will not allow a property to be changed after
- // the first time it is set, so we don't have to worry about un-muting.
- setMasterMute_l(true);
- }
- }
+ if (property_get_bool("ro.audio.silent", false)) {
+ ALOGW("ro.audio.silent is now ignored");
}
}
@@ -8436,7 +8418,6 @@
}
if (invalidate) {
activeTrack->invalidate();
- ALOG_ASSERT(fastTrackToRemove == 0);
fastTrackToRemove = activeTrack;
removeTrack_l(activeTrack);
mActiveTracks.remove(activeTrack);
@@ -11201,18 +11182,8 @@
void MmapPlaybackThread::checkSilentMode_l()
{
- if (!mMasterMute) {
- char value[PROPERTY_VALUE_MAX];
- if (property_get("ro.audio.silent", value, "0") > 0) {
- char *endptr;
- unsigned long ul = strtoul(value, &endptr, 0);
- if (*endptr == '\0' && ul != 0) {
- ALOGW("%s: mute from ro.audio.silent. Silence is golden", __func__);
- // The setprop command will not allow a property to be changed after
- // the first time it is set, so we don't have to worry about un-muting.
- setMasterMute_l(true);
- }
- }
+ if (property_get_bool("ro.audio.silent", false)) {
+ ALOGW("ro.audio.silent is now ignored");
}
}
diff --git a/services/audioparameterparser/Android.bp b/services/audioparameterparser/Android.bp
index 1c1c1e1..0b2c1ba 100644
--- a/services/audioparameterparser/Android.bp
+++ b/services/audioparameterparser/Android.bp
@@ -35,10 +35,10 @@
name: "android.hardware.audio.parameter_parser.example_defaults",
defaults: [
"latest_android_hardware_audio_core_ndk_shared",
- "latest_av_audio_types_aidl_ndk_shared",
],
shared_libs: [
+ "av-audio-types-aidl-ndk",
"libbase",
"libbinder_ndk",
],
diff --git a/services/audiopolicy/engineconfigurable/config/example/common/audio_policy_engine_criterion_types_aidl.xml.in b/services/audiopolicy/engineconfigurable/config/example/common/audio_policy_engine_criterion_types_aidl.xml.in
index dc2517b..424c983 100644
--- a/services/audiopolicy/engineconfigurable/config/example/common/audio_policy_engine_criterion_types_aidl.xml.in
+++ b/services/audiopolicy/engineconfigurable/config/example/common/audio_policy_engine_criterion_types_aidl.xml.in
@@ -19,76 +19,77 @@
<criterion_type name="OutputDevicesAddressesType" type="inclusive">
<values>
<!-- legacy remote submix -->
- <value literal="0" numerical="1"/>
+ <value literal="0"/>
</values>
</criterion_type>
<criterion_type name="InputDevicesAddressesType" type="inclusive">
<values>
<!-- legacy remote submix -->
- <value literal="0" numerical="1"/>
+ <value literal="0"/>
</values>
</criterion_type>
<criterion_type name="AndroidModeType" type="exclusive"/>
<criterion_type name="ForceUseForCommunicationType" type="exclusive">
<values>
- <value literal="NONE" numerical="0"/>
- <value literal="SPEAKER" numerical="1"/>
- <value literal="BT_SCO" numerical="3"/>
+ <value literal="NONE"/>
+ <value literal="SPEAKER"/>
+ <value literal="BT_SCO"/>
</values>
</criterion_type>
<criterion_type name="ForceUseForMediaType" type="exclusive">
<values>
- <value literal="NONE" numerical="0"/>
- <value literal="SPEAKER" numerical="1"/>
- <value literal="HEADPHONES" numerical="2"/>
- <value literal="BT_A2DP" numerical="4"/>
- <value literal="WIRED_ACCESSORY" numerical="5"/>
- <value literal="ANALOG_DOCK" numerical="8"/>
- <value literal="DIGITAL_DOCK" numerical="9"/>
- <value literal="NO_BT_A2DP" numerical="10"/>
+ <value literal="NONE"/>
+ <value literal="SPEAKER"/>
+ <value literal="HEADPHONES"/>
+ <value literal="BT_A2DP"/>
+ <value literal="ANALOG_DOCK"/>
+ <value literal="DIGITAL_DOCK"/>
+ <value literal="WIRED_ACCESSORY"/>
+ <value literal="NO_BT_A2DP"/>
</values>
</criterion_type>
<criterion_type name="ForceUseForRecordType" type="exclusive">
<values>
- <value literal="NONE" numerical="0"/>
- <value literal="BT_SCO" numerical="3"/>
- <value literal="WIRED_ACCESSORY" numerical="5"/>
+ <value literal="NONE"/>
+ <value literal="BT_SCO"/>
+ <value literal="WIRED_ACCESSORY"/>
</values>
</criterion_type>
<criterion_type name="ForceUseForDockType" type="exclusive">
<values>
- <value literal="NONE" numerical="0"/>
- <value literal="WIRED_ACCESSORY" numerical="5"/>
- <value literal="BT_CAR_DOCK" numerical="6"/>
- <value literal="BT_DESK_DOCK" numerical="7"/>
- <value literal="ANALOG_DOCK" numerical="8"/>
- <value literal="DIGITAL_DOCK" numerical="9"/>
+ <value literal="NONE"/>
+ <value literal="BT_CAR_DOCK"/>
+ <value literal="BT_DESK_DOCK"/>
+ <value literal="ANALOG_DOCK"/>
+ <value literal="DIGITAL_DOCK"/>
+ <value literal="WIRED_ACCESSORY"/>
</values>
</criterion_type>
<criterion_type name="ForceUseForSystemType" type="exclusive" >
<values>
- <value literal="NONE" numerical="0"/>
- <value literal="SYSTEM_ENFORCED" numerical="11"/>
+ <value literal="NONE"/>
+ <value literal="SYSTEM_ENFORCED"/>
</values>
</criterion_type>
<criterion_type name="ForceUseForHdmiSystemAudioType" type="exclusive">
<values>
- <value literal="NONE" numerical="0"/>
- <value literal="HDMI_SYSTEM_AUDIO_ENFORCED" numerical="12"/>
+ <value literal="NONE"/>
+ <value literal="HDMI_SYSTEM_AUDIO_ENFORCED"/>
</values>
</criterion_type>
<criterion_type name="ForceUseForEncodedSurroundType" type="exclusive">
<values>
- <value literal="NONE" numerical="0"/>
- <value literal="ENCODED_SURROUND_NEVER" numerical="13"/>
- <value literal="ENCODED_SURROUND_ALWAYS" numerical="14"/>
- <value literal="ENCODED_SURROUND_MANUAL" numerical="15"/>
+ <value literal="UNSPECIFIED"/>
+ <value literal="NEVER"/>
+ <value literal="ALWAYS"/>
+ <value literal="MANUAL"/>
</values>
</criterion_type>
<criterion_type name="ForceUseForVibrateRingingType" type="exclusive">
<values>
- <value literal="NONE" numerical="0"/>
- <value literal="BT_SCO" numerical="3"/>
+ <value literal="NONE"/>
+ <value literal="BT_SCO"/>
+ <value literal="BT_BLE"/>
</values>
</criterion_type>
</criterion_types>
diff --git a/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp b/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp
index b72e517..b89fba0 100644
--- a/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp
+++ b/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp
@@ -41,21 +41,23 @@
namespace android {
-using utilities::convertTo;
+using base::unexpected;
using media::audio::common::AudioDeviceAddress;
using media::audio::common::AudioDeviceDescription;
using media::audio::common::AudioHalCapCriterion;
+using media::audio::common::AudioHalCapCriterionV2;
using media::audio::common::AudioHalCapParameter;
using media::audio::common::AudioHalCapRule;
+using media::audio::common::AudioPolicyForceUse;
using media::audio::common::AudioSource;
using media::audio::common::AudioStreamType;
-using media::audio::common::AudioHalCapCriterionV2;
-using ::android::base::unexpected;
+using utilities::convertTo;
namespace capEngineConfig {
static constexpr const char *gLegacyOutputDevicePrefix = "AUDIO_DEVICE_OUT_";
static constexpr const char *gLegacyInputDevicePrefix = "AUDIO_DEVICE_IN_";
+static constexpr const char *gLegacyForcePrefix = "AUDIO_POLICY_FORCE_";
static constexpr const char *gLegacyStreamPrefix = "AUDIO_STREAM_";
static constexpr const char *gLegacySourcePrefix = "AUDIO_SOURCE_";
static constexpr const char *gPolicyParamPrefix = "/Policy/policy/";
@@ -83,6 +85,134 @@
return capName;
}
+ConversionResult<audio_policy_forced_cfg_t>
+ aidl2legacy_AudioPolicyForceUseCommunicationDeviceCategory_audio_policy_forced_cfg_t(
+ const AudioPolicyForceUse::CommunicationDeviceCategory aidl) {
+ switch (aidl) {
+ case AudioPolicyForceUse::CommunicationDeviceCategory::NONE:
+ return AUDIO_POLICY_FORCE_NONE;
+ case AudioPolicyForceUse::CommunicationDeviceCategory::SPEAKER:
+ return AUDIO_POLICY_FORCE_SPEAKER;
+ case AudioPolicyForceUse::CommunicationDeviceCategory::BT_SCO:
+ return AUDIO_POLICY_FORCE_BT_SCO;
+ case AudioPolicyForceUse::CommunicationDeviceCategory::BT_BLE:
+ return AUDIO_POLICY_FORCE_BT_BLE;
+ case AudioPolicyForceUse::CommunicationDeviceCategory::WIRED_ACCESSORY:
+ return AUDIO_POLICY_FORCE_WIRED_ACCESSORY;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_policy_forced_cfg_t>
+ aidl2legacy_AudioPolicyForceUseMediaDeviceCategory_audio_policy_forced_cfg_t(
+ const AudioPolicyForceUse::MediaDeviceCategory aidl) {
+ switch (aidl) {
+ case AudioPolicyForceUse::MediaDeviceCategory::NONE:
+ return AUDIO_POLICY_FORCE_NONE;
+ case AudioPolicyForceUse::MediaDeviceCategory::SPEAKER:
+ return AUDIO_POLICY_FORCE_SPEAKER;
+ case AudioPolicyForceUse::MediaDeviceCategory::HEADPHONES:
+ return AUDIO_POLICY_FORCE_HEADPHONES;
+ case AudioPolicyForceUse::MediaDeviceCategory::BT_A2DP:
+ return AUDIO_POLICY_FORCE_BT_A2DP;
+ case AudioPolicyForceUse::MediaDeviceCategory::ANALOG_DOCK:
+ return AUDIO_POLICY_FORCE_ANALOG_DOCK;
+ case AudioPolicyForceUse::MediaDeviceCategory::DIGITAL_DOCK:
+ return AUDIO_POLICY_FORCE_DIGITAL_DOCK;
+ case AudioPolicyForceUse::MediaDeviceCategory::WIRED_ACCESSORY:
+ return AUDIO_POLICY_FORCE_WIRED_ACCESSORY;
+ case AudioPolicyForceUse::MediaDeviceCategory::NO_BT_A2DP:
+ return AUDIO_POLICY_FORCE_NO_BT_A2DP;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_policy_forced_cfg_t>
+ aidl2legacy_AudioPolicyForceUseDockType_audio_policy_forced_cfg_t(
+ const AudioPolicyForceUse::DockType aidl) {
+ switch (aidl) {
+ case AudioPolicyForceUse::DockType::NONE:
+ return AUDIO_POLICY_FORCE_NONE;
+ case AudioPolicyForceUse::DockType::BT_CAR_DOCK:
+ return AUDIO_POLICY_FORCE_BT_CAR_DOCK;
+ case AudioPolicyForceUse::DockType::BT_DESK_DOCK:
+ return AUDIO_POLICY_FORCE_BT_DESK_DOCK;
+ case AudioPolicyForceUse::DockType::ANALOG_DOCK:
+ return AUDIO_POLICY_FORCE_ANALOG_DOCK;
+ case AudioPolicyForceUse::DockType::DIGITAL_DOCK:
+ return AUDIO_POLICY_FORCE_DIGITAL_DOCK;
+ case AudioPolicyForceUse::DockType::WIRED_ACCESSORY:
+ return AUDIO_POLICY_FORCE_WIRED_ACCESSORY;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_policy_forced_cfg_t>
+ aidl2legacy_AudioPolicyForceUseEncodedSurroundConfig_audio_policy_forced_cfg_t(
+ const AudioPolicyForceUse::EncodedSurroundConfig aidl) {
+ switch (aidl) {
+ case AudioPolicyForceUse::EncodedSurroundConfig::UNSPECIFIED:
+ return AUDIO_POLICY_FORCE_NONE;
+ case AudioPolicyForceUse::EncodedSurroundConfig::NEVER:
+ return AUDIO_POLICY_FORCE_ENCODED_SURROUND_NEVER;
+ case AudioPolicyForceUse::EncodedSurroundConfig::ALWAYS:
+ return AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS;
+ case AudioPolicyForceUse::EncodedSurroundConfig::MANUAL:
+ return AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<std::pair<audio_policy_force_use_t, audio_policy_forced_cfg_t>>
+ aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t_audio_policy_forced_cfg_t(
+ const AudioPolicyForceUse& aidl) {
+ switch (aidl.getTag()) {
+ case AudioPolicyForceUse::forCommunication:
+ return std::make_pair(
+ AUDIO_POLICY_FORCE_FOR_COMMUNICATION,
+ VALUE_OR_RETURN(
+ aidl2legacy_AudioPolicyForceUseCommunicationDeviceCategory_audio_policy_forced_cfg_t(
+ aidl.get<AudioPolicyForceUse::forCommunication>())));
+ case AudioPolicyForceUse::forMedia:
+ return std::make_pair(
+ AUDIO_POLICY_FORCE_FOR_MEDIA,
+ VALUE_OR_RETURN(
+ aidl2legacy_AudioPolicyForceUseMediaDeviceCategory_audio_policy_forced_cfg_t(
+ aidl.get<AudioPolicyForceUse::forMedia>())));
+ case AudioPolicyForceUse::forRecord:
+ return std::make_pair(
+ AUDIO_POLICY_FORCE_FOR_RECORD,
+ VALUE_OR_RETURN(
+ aidl2legacy_AudioPolicyForceUseCommunicationDeviceCategory_audio_policy_forced_cfg_t(
+ aidl.get<AudioPolicyForceUse::forRecord>())));
+ case AudioPolicyForceUse::dock:
+ return std::make_pair(AUDIO_POLICY_FORCE_FOR_DOCK,
+ VALUE_OR_RETURN(
+ aidl2legacy_AudioPolicyForceUseDockType_audio_policy_forced_cfg_t(
+ aidl.get<AudioPolicyForceUse::dock>())));
+ case AudioPolicyForceUse::systemSounds:
+ return std::make_pair(AUDIO_POLICY_FORCE_FOR_SYSTEM,
+ aidl.get<AudioPolicyForceUse::systemSounds>() ?
+ AUDIO_POLICY_FORCE_SYSTEM_ENFORCED : AUDIO_POLICY_FORCE_NONE);
+ case AudioPolicyForceUse::hdmiSystemAudio:
+ return std::make_pair(
+ AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO,
+ aidl.get<AudioPolicyForceUse::hdmiSystemAudio>() ?
+ AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED : AUDIO_POLICY_FORCE_NONE);
+ case AudioPolicyForceUse::encodedSurround:
+ return std::make_pair(AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, VALUE_OR_RETURN(
+ aidl2legacy_AudioPolicyForceUseEncodedSurroundConfig_audio_policy_forced_cfg_t(
+ aidl.get<AudioPolicyForceUse::encodedSurround>())));
+ case AudioPolicyForceUse::forVibrateRinging:
+ return std::make_pair(
+ AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING,
+ VALUE_OR_RETURN(
+ aidl2legacy_AudioPolicyForceUseCommunicationDeviceCategory_audio_policy_forced_cfg_t(
+ aidl.get<AudioPolicyForceUse::forVibrateRinging>())));
+ }
+ return unexpected(BAD_VALUE);
+}
+
ConversionResult<std::string> aidl2legacy_AudioHalCapCriterionV2_CapName(
const AudioHalCapCriterionV2& aidl) {
switch (aidl.getTag()) {
@@ -97,14 +227,14 @@
case AudioHalCapCriterionV2::telephonyMode:
return gPhoneStateCriterionName;
case AudioHalCapCriterionV2::forceConfigForUse: {
- auto aidlCriterion = aidl.get<AudioHalCapCriterionV2::forceConfigForUse>();
- return gForceUseCriterionTag[VALUE_OR_RETURN(
- aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(
- aidlCriterion.forceUse))];
+ auto aidlCriterion = aidl.get<AudioHalCapCriterionV2::forceConfigForUse>().values[0];
+ const auto [forceUse, _] = VALUE_OR_RETURN(
+ aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t_audio_policy_forced_cfg_t(
+ aidlCriterion));
+ return gForceUseCriterionTag[forceUse];
}
- default:
- return unexpected(BAD_VALUE);
}
+ return unexpected(BAD_VALUE);
}
ConversionResult<std::string> aidl2legacy_AudioHalCapCriterionV2TypeDevice_CapCriterionValue(
@@ -121,6 +251,32 @@
isOut ? gLegacyOutputDevicePrefix : gLegacyInputDevicePrefix);
}
+ConversionResult<audio_policy_forced_cfg_t>
+ aidl2legacy_AudioHalCapCriterionV2ForceUse_audio_policy_forced_cfg_t(
+ const AudioPolicyForceUse& aidl) {
+ const auto [_, legacyForcedCfg] = VALUE_OR_RETURN(
+ aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t_audio_policy_forced_cfg_t(
+ aidl));
+ return legacyForcedCfg;
+}
+
+ConversionResult<std::string> audio_policy_forced_cfg_t_CapCriterionValue(
+ audio_policy_forced_cfg_t legacyForcedCfg) {
+ std::string legacyForcedCfgLiteral = audio_policy_forced_cfg_to_string(legacyForcedCfg);
+ if (legacyForcedCfgLiteral.empty()) {
+ ALOGE("%s Invalid forced config value %d", __func__, legacyForcedCfg);
+ return unexpected(BAD_VALUE);
+ }
+ return truncatePrefix(legacyForcedCfgLiteral, gLegacyForcePrefix);
+}
+
+ConversionResult<std::string> aidl2legacy_AudioHalCapCriterionV2ForceUse_CapCriterionValue(
+ const AudioPolicyForceUse& aidl) {
+ const audio_policy_forced_cfg_t legacyForcedCfg = VALUE_OR_RETURN(
+ aidl2legacy_AudioHalCapCriterionV2ForceUse_audio_policy_forced_cfg_t(aidl));
+ return audio_policy_forced_cfg_t_CapCriterionValue(legacyForcedCfg);
+}
+
ConversionResult<std::string> aidl2legacy_AudioHalCapCriterionV2Type_CapCriterionValue(
const AudioHalCapCriterionV2& aidl) {
switch (aidl.getTag()) {
@@ -139,10 +295,10 @@
case AudioHalCapCriterionV2::telephonyMode:
return toString(aidl.get<AudioHalCapCriterionV2::telephonyMode>().values[0]);
case AudioHalCapCriterionV2::forceConfigForUse:
- return toString(aidl.get<AudioHalCapCriterionV2::forceConfigForUse>().values[0]);
- default:
- return unexpected(BAD_VALUE);
+ return aidl2legacy_AudioHalCapCriterionV2ForceUse_CapCriterionValue(
+ aidl.get<AudioHalCapCriterionV2::forceConfigForUse>().values[0]);
}
+ return unexpected(BAD_VALUE);
}
ConversionResult<std::string> aidl2legacy_AudioHalCapRule_CapRule(
@@ -331,24 +487,28 @@
engineConfig::Criterion& criterion = capCriterion.criterion;
engineConfig::CriterionType& criterionType = capCriterion.criterionType;
- auto loadForceUseCriterion = [](const auto &aidlCriterion, auto &criterion,
- auto &criterionType) -> status_t {
- uint32_t legacyForceUse = VALUE_OR_RETURN_STATUS(
- aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(
- aidlCriterion.forceUse));
+ auto loadForceUseCriterion = [](const auto& aidlCriterion, auto& criterion,
+ auto& criterionType) -> status_t {
+ if (aidlCriterion.values.empty()) {
+ return BAD_VALUE;
+ }
+ const auto [legacyForceUse, _] = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t_audio_policy_forced_cfg_t(
+ aidlCriterion.values[0]));
criterion.typeName = criterionType.name;
criterionType.name = criterion.typeName + gCriterionTypeSuffix;
criterionType.isInclusive =
(aidlCriterion.logic == AudioHalCapCriterionV2::LogicalDisjunction::INCLUSIVE);
criterion.name = gForceUseCriterionTag[legacyForceUse];
- criterion.defaultLiteralValue = toString(aidlCriterion.defaultValue);
- if (aidlCriterion.values.empty()) {
- return BAD_VALUE;
- }
+ criterion.defaultLiteralValue = toString(
+ aidlCriterion.defaultValue.template get<AudioPolicyForceUse::forMedia>());
for (auto &value : aidlCriterion.values) {
- uint32_t legacyForcedConfig = VALUE_OR_RETURN_STATUS(
- aidl2legacy_AudioPolicyForcedConfig_audio_policy_forced_cfg_t(value));
- criterionType.valuePairs.push_back({legacyForcedConfig, 0, toString(value)});
+ const audio_policy_forced_cfg_t legacyForcedCfg = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioHalCapCriterionV2ForceUse_audio_policy_forced_cfg_t(value));
+ const std::string legacyForcedCfgLiteral = VALUE_OR_RETURN_STATUS(
+ audio_policy_forced_cfg_t_CapCriterionValue(legacyForcedCfg));
+ criterionType.valuePairs.push_back(
+ {legacyForcedCfg, 0, legacyForcedCfgLiteral});
}
return NO_ERROR;
};
diff --git a/services/audiopolicy/engineconfigurable/tools/capBuildPolicyCriterionTypes.py b/services/audiopolicy/engineconfigurable/tools/capBuildPolicyCriterionTypes.py
index b873830..1adc602 100755
--- a/services/audiopolicy/engineconfigurable/tools/capBuildPolicyCriterionTypes.py
+++ b/services/audiopolicy/engineconfigurable/tools/capBuildPolicyCriterionTypes.py
@@ -102,7 +102,6 @@
ordered_values = OrderedDict(sorted(values_dict.items(), key=lambda x: x[1]))
for key, value in ordered_values.items():
value_node = ET.SubElement(values_node, "value")
- value_node.set('numerical', str(value))
value_node.set('literal', key)
if criterion_type.get('name') == "OutputDevicesMaskType":
@@ -114,20 +113,14 @@
for criterion_name, values_list in addressCriteria.items():
for criterion_type in criterion_types_root.findall('criterion_type'):
if criterion_type.get('name') == criterion_name:
- index = 0
existing_values_node = criterion_type.find("values")
if existing_values_node is not None:
- for existing_value in existing_values_node.findall('value'):
- if existing_value.get('numerical') == str(1 << index):
- index += 1
values_node = existing_values_node
else:
values_node = ET.SubElement(criterion_type, "values")
for value in values_list:
value_node = ET.SubElement(values_node, "value", literal=value)
- value_node.set('numerical', str(1 << index))
- index += 1
xmlstr = ET.tostring(criterion_types_root, encoding='utf8', method='xml')
reparsed = MINIDOM.parseString(xmlstr)
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 354c59c..74e77e8 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1492,7 +1492,8 @@
for (auto &secondaryMix : secondaryMixes) {
sp<SwAudioOutputDescriptor> outputDesc = secondaryMix->getOutput();
if (outputDesc != nullptr &&
- outputDesc->mIoHandle != AUDIO_IO_HANDLE_NONE) {
+ outputDesc->mIoHandle != AUDIO_IO_HANDLE_NONE &&
+ outputDesc->mIoHandle != *output) {
secondaryOutputs->push_back(outputDesc->mIoHandle);
weakSecondaryOutputDescs.push_back(outputDesc);
}
@@ -7432,7 +7433,8 @@
for (auto &secondaryMix : secondaryMixes) {
sp<SwAudioOutputDescriptor> outputDesc = secondaryMix->getOutput();
if (outputDesc != nullptr &&
- outputDesc->mIoHandle != AUDIO_IO_HANDLE_NONE) {
+ outputDesc->mIoHandle != AUDIO_IO_HANDLE_NONE &&
+ outputDesc != outputDescriptor) {
secondaryDescs.push_back(outputDesc);
}
}
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index b2f0c22..768cd07 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -60,8 +60,6 @@
using media::audio::common::AudioFormatDescription;
using media::audio::common::AudioMode;
using media::audio::common::AudioOffloadInfo;
-using media::audio::common::AudioPolicyForceUse;
-using media::audio::common::AudioPolicyForcedConfig;
using media::audio::common::AudioSource;
using media::audio::common::AudioStreamType;
using media::audio::common::AudioUsage;
@@ -255,8 +253,8 @@
return Status::ok();
}
-Status AudioPolicyService::setForceUse(AudioPolicyForceUse usageAidl,
- AudioPolicyForcedConfig configAidl)
+Status AudioPolicyService::setForceUse(media::AudioPolicyForceUse usageAidl,
+ media::AudioPolicyForcedConfig configAidl)
{
audio_policy_force_use_t usage = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(usageAidl));
@@ -285,8 +283,8 @@
return Status::ok();
}
-Status AudioPolicyService::getForceUse(AudioPolicyForceUse usageAidl,
- AudioPolicyForcedConfig* _aidl_return) {
+Status AudioPolicyService::getForceUse(media::AudioPolicyForceUse usageAidl,
+ media::AudioPolicyForcedConfig* _aidl_return) {
audio_policy_force_use_t usage = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(usageAidl));
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index eccefa7..428e560 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -105,10 +105,10 @@
const std::string& deviceName,
const AudioFormatDescription& encodedFormat) override;
binder::Status setPhoneState(AudioMode state, int32_t uid) override;
- binder::Status setForceUse(android::media::audio::common::AudioPolicyForceUse usage,
- android::media::audio::common::AudioPolicyForcedConfig config) override;
- binder::Status getForceUse(android::media::audio::common::AudioPolicyForceUse usage,
- android::media::audio::common::AudioPolicyForcedConfig* _aidl_return) override;
+ binder::Status setForceUse(media::AudioPolicyForceUse usage,
+ media::AudioPolicyForcedConfig config) override;
+ binder::Status getForceUse(media::AudioPolicyForceUse usage,
+ media::AudioPolicyForcedConfig* _aidl_return) override;
binder::Status getOutput(AudioStreamType stream, int32_t* _aidl_return) override;
binder::Status getOutputForAttr(const media::audio::common::AudioAttributes& attr,
int32_t session,