Merge "usecase validator: narrow down the remapping to game use case" into udc-dev
diff --git a/media/audioaidlconversion/AidlConversionNdkCpp.cpp b/media/audioaidlconversion/AidlConversionNdkCpp.cpp
new file mode 100644
index 0000000..36f6128
--- /dev/null
+++ b/media/audioaidlconversion/AidlConversionNdkCpp.cpp
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <type_traits>
+
+#define LOG_TAG "AidlConversionNdkCpp"
+#include <utils/Log.h>
+
+#include <android-base/expected.h>
+#include <android/binder_auto_utils.h>
+#include <android/binder_enums.h>
+#include <android/binder_parcel.h>
+#include <binder/Enums.h>
+#include <media/AidlConversionNdkCpp.h>
+#include <media/AidlConversionUtil.h>
+
+using aidl::android::aidl_utils::statusTFromBinderStatusT;
+
+namespace android {
+
+namespace {
+
+// cpp2ndk and ndk2cpp are universal converters which work for any type,
+// however they are not the most efficient way to convert due to extra
+// marshaling / unmarshaling step.
+
+template<typename NdkType, typename CppType>
+ConversionResult<NdkType> cpp2ndk(const CppType& cpp) {
+ Parcel cppParcel;
+ RETURN_IF_ERROR(cpp.writeToParcel(&cppParcel));
+ ::ndk::ScopedAParcel ndkParcel(AParcel_create());
+ const int32_t ndkParcelBegin = AParcel_getDataPosition(ndkParcel.get());
+ RETURN_IF_ERROR(statusTFromBinderStatusT(AParcel_unmarshal(
+ ndkParcel.get(), cppParcel.data(), cppParcel.dataSize())));
+ RETURN_IF_ERROR(statusTFromBinderStatusT(AParcel_setDataPosition(
+ ndkParcel.get(), ndkParcelBegin)));
+ NdkType ndk;
+ RETURN_IF_ERROR(statusTFromBinderStatusT(ndk.readFromParcel(ndkParcel.get())));
+ return ndk;
+}
+
+template<typename CppType, typename NdkType>
+ConversionResult<CppType> ndk2cpp(const NdkType& ndk) {
+ ::ndk::ScopedAParcel ndkParcel(AParcel_create());
+ RETURN_IF_ERROR(statusTFromBinderStatusT(ndk.writeToParcel(ndkParcel.get())));
+ const int32_t ndkParcelDataSize = AParcel_getDataSize(ndkParcel.get());
+ if (ndkParcelDataSize < 0) {
+ return base::unexpected(BAD_VALUE);
+ }
+ // Parcel does not expose its data in a mutable form, we have to use an intermediate buffer.
+ std::vector<uint8_t> parcelData(static_cast<size_t>(ndkParcelDataSize));
+ RETURN_IF_ERROR(statusTFromBinderStatusT(AParcel_marshal(
+ ndkParcel.get(), parcelData.data(), 0, ndkParcelDataSize)));
+ Parcel cppParcel;
+ RETURN_IF_ERROR(cppParcel.setData(parcelData.data(), parcelData.size()));
+ CppType cpp;
+ RETURN_IF_ERROR(cpp.readFromParcel(&cppParcel));
+ return cpp;
+}
+
+// cpp2ndk_Enum and ndk2cpp_Enum are more efficient implementations specifically for enums.
+
+template<typename OutEnum, typename OutEnumRange, typename InEnum>
+ ConversionResult<OutEnum> convertEnum(const OutEnumRange& range, InEnum e) {
+ using InIntType = std::underlying_type_t<InEnum>;
+ static_assert(std::is_same_v<InIntType, std::underlying_type_t<OutEnum>>);
+
+ InIntType inEnumIndex = static_cast<InIntType>(e);
+ OutEnum outEnum = static_cast<OutEnum>(inEnumIndex);
+ if (std::find(range.begin(), range.end(), outEnum) == range.end()) {
+ return base::unexpected(BAD_VALUE);
+ }
+ return outEnum;
+}
+
+template<typename NdkEnum, typename CppEnum>
+ ConversionResult<NdkEnum> cpp2ndk_Enum(CppEnum cpp) {
+ return convertEnum<NdkEnum>(::ndk::enum_range<NdkEnum>(), cpp);
+}
+
+template<typename CppEnum, typename NdkEnum>
+ ConversionResult<CppEnum> ndk2cpp_Enum(NdkEnum ndk) {
+ return convertEnum<CppEnum>(enum_range<CppEnum>(), ndk);
+}
+
+} // namespace
+
+#define GENERATE_CONVERTERS(packageName, className) \
+ ConversionResult<::aidl::packageName::className> cpp2ndk_##className( \
+ const ::packageName::className& cpp) { \
+ return cpp2ndk<::aidl::packageName::className>(cpp); \
+ } \
+ ConversionResult<::packageName::className> ndk2cpp_##className( \
+ const ::aidl::packageName::className& ndk) { \
+ return ndk2cpp<::packageName::className>(ndk); \
+ }
+
+#define GENERATE_ENUM_CONVERTERS(packageName, className) \
+ ConversionResult<::aidl::packageName::className> cpp2ndk_##className( \
+ const ::packageName::className& cpp) { \
+ return cpp2ndk_Enum<::aidl::packageName::className>(cpp); \
+ } \
+ ConversionResult<::packageName::className> ndk2cpp_##className( \
+ const ::aidl::packageName::className& ndk) { \
+ return ndk2cpp_Enum<::packageName::className>(ndk); \
+}
+
+GENERATE_CONVERTERS(android::media::audio::common, AudioFormatDescription);
+GENERATE_CONVERTERS(android::media::audio::common, AudioHalEngineConfig);
+GENERATE_CONVERTERS(android::media::audio::common, AudioMMapPolicyInfo);
+GENERATE_ENUM_CONVERTERS(android::media::audio::common, AudioMMapPolicyType);
+GENERATE_CONVERTERS(android::media::audio::common, AudioPort);
+
+} // namespace android
diff --git a/media/audioaidlconversion/Android.bp b/media/audioaidlconversion/Android.bp
index 1ec4849..d3a5755 100644
--- a/media/audioaidlconversion/Android.bp
+++ b/media/audioaidlconversion/Android.bp
@@ -212,3 +212,27 @@
],
min_sdk_version: "31", //AParcelableHolder has been introduced in 31
}
+
+/**
+ * Conversions between the NDK and CPP backends for common types.
+ */
+cc_library {
+ name: "libaudio_aidl_conversion_common_ndk_cpp",
+ srcs: [
+ "AidlConversionNdkCpp.cpp",
+ ],
+ defaults: [
+ "audio_aidl_conversion_common_default",
+ "audio_aidl_conversion_common_util_default",
+ "latest_android_media_audio_common_types_cpp_shared",
+ "latest_android_media_audio_common_types_ndk_shared",
+ ],
+ shared_libs: [
+ "libbinder_ndk",
+ "libbase",
+ ],
+ cflags: [
+ "-DBACKEND_CPP_NDK",
+ ],
+ min_sdk_version: "33", //AParcel_unmarshal has been introduced in 33
+}
diff --git a/media/audioaidlconversion/TEST_MAPPING b/media/audioaidlconversion/TEST_MAPPING
index a0c9759..903b88a 100644
--- a/media/audioaidlconversion/TEST_MAPPING
+++ b/media/audioaidlconversion/TEST_MAPPING
@@ -1,7 +1,8 @@
{
"presubmit": [
{
- "name": "audio_aidl_ndk_conversion_tests"
+ "name": "audio_aidl_ndk_conversion_tests",
+ "name": "audio_aidl_ndk_cpp_conversion_tests"
}
]
}
diff --git a/media/audioaidlconversion/include/media/AidlConversionNdkCpp.h b/media/audioaidlconversion/include/media/AidlConversionNdkCpp.h
new file mode 100644
index 0000000..09f1c22
--- /dev/null
+++ b/media/audioaidlconversion/include/media/AidlConversionNdkCpp.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+/**
+ * Conversions between the NDK and CPP backends for common types.
+ */
+#include <aidl/android/media/audio/common/AudioFormatDescription.h>
+#include <aidl/android/media/audio/common/AudioHalEngineConfig.h>
+#include <aidl/android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <aidl/android/media/audio/common/AudioMMapPolicyType.h>
+#include <aidl/android/media/audio/common/AudioPort.h>
+#include <android/media/audio/common/AudioFormatDescription.h>
+#include <android/media/audio/common/AudioHalEngineConfig.h>
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <android/media/audio/common/AudioPort.h>
+#include <media/AidlConversionUtil.h>
+
+namespace android {
+
+#define DECLARE_CONVERTERS(packageName, className) \
+ ConversionResult<::aidl::packageName::className> \
+ cpp2ndk_##className(const ::packageName::className& cpp); \
+ ConversionResult<::packageName::className> \
+ ndk2cpp_##className(const ::aidl::packageName::className& ndk);
+
+DECLARE_CONVERTERS(android::media::audio::common, AudioFormatDescription);
+DECLARE_CONVERTERS(android::media::audio::common, AudioHalEngineConfig);
+DECLARE_CONVERTERS(android::media::audio::common, AudioMMapPolicyInfo);
+DECLARE_CONVERTERS(android::media::audio::common, AudioMMapPolicyType);
+DECLARE_CONVERTERS(android::media::audio::common, AudioPort);
+
+#undef DECLARE_CONVERTERS
+
+} // namespace android
diff --git a/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h b/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
index b179cbb..f49f681 100644
--- a/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
+++ b/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
@@ -389,6 +389,10 @@
?: statusTFromExceptionCode(status.getExceptionCode()); // a service-side error with a
// standard Java exception (fromExceptionCode)
}
+
+static inline ::android::status_t statusTFromBinderStatusT(binder_status_t status) {
+ return statusTFromBinderStatus(::ndk::ScopedAStatus::fromStatus(status));
+}
#endif
/**
diff --git a/media/audioaidlconversion/tests/Android.bp b/media/audioaidlconversion/tests/Android.bp
index de7c8a2..88b2cc9 100644
--- a/media/audioaidlconversion/tests/Android.bp
+++ b/media/audioaidlconversion/tests/Android.bp
@@ -44,3 +44,27 @@
"-DBACKEND_NDK",
],
}
+
+cc_test {
+ name: "audio_aidl_ndk_cpp_conversion_tests",
+
+ defaults: [
+ "latest_android_media_audio_common_types_cpp_static",
+ "latest_android_media_audio_common_types_ndk_static",
+ "libaudio_aidl_conversion_tests_defaults",
+ ],
+ srcs: ["audio_aidl_ndk_cpp_conversion_tests.cpp"],
+ shared_libs: [
+ "libbinder",
+ "libbinder_ndk",
+ "libcutils",
+ "liblog",
+ "libutils",
+ ],
+ static_libs: [
+ "libaudio_aidl_conversion_common_ndk_cpp",
+ ],
+ cflags: [
+ "-DBACKEND_CPP_NDK",
+ ],
+}
diff --git a/media/audioaidlconversion/tests/audio_aidl_ndk_cpp_conversion_tests.cpp b/media/audioaidlconversion/tests/audio_aidl_ndk_cpp_conversion_tests.cpp
new file mode 100644
index 0000000..735a14b
--- /dev/null
+++ b/media/audioaidlconversion/tests/audio_aidl_ndk_cpp_conversion_tests.cpp
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+#include <type_traits>
+
+#include <gtest/gtest.h>
+
+#include <media/AidlConversionNdkCpp.h>
+
+namespace {
+template<typename> struct mf_traits {};
+template<class T, class U> struct mf_traits<U T::*> {
+ using member_type = U;
+};
+} // namespace
+
+// Provide value printers for types generated from AIDL
+// They need to be in the same namespace as the types we intend to print
+#define DEFINE_PRINTING_TEMPLATES()
+ template <typename P> \
+ std::enable_if_t<std::is_function_v<typename mf_traits<decltype(&P::toString)>::member_type>, \
+ std::ostream&> operator<<(std::ostream& os, const P& p) { \
+ return os << p.toString(); \
+ } \
+ template <typename E> \
+ std::enable_if_t<std::is_enum_v<E>, std::ostream&> operator<<(std::ostream& os, const E& e) { \
+ return os << toString(e); \
+ }
+
+namespace aidl::android::media::audio::common {
+DEFINE_PRINTING_TEMPLATES();
+} // namespace aidl::android::media::audio::common
+namespace android::hardware::audio::common {
+DEFINE_PRINTING_TEMPLATES();
+} // namespace android::hardware::audio::common
+#undef DEFINE_PRINTING_TEMPLATES
+
+using namespace android;
+
+namespace {
+
+using namespace ::aidl::android::media::audio::common;
+
+AudioFormatDescription make_AudioFormatDescription(AudioFormatType type) {
+ AudioFormatDescription result;
+ result.type = type;
+ return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(PcmType pcm) {
+ auto result = make_AudioFormatDescription(AudioFormatType::PCM);
+ result.pcm = pcm;
+ return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(const std::string& encoding) {
+ AudioFormatDescription result;
+ result.encoding = encoding;
+ return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(PcmType transport, const std::string& encoding) {
+ auto result = make_AudioFormatDescription(encoding);
+ result.pcm = transport;
+ return result;
+}
+
+AudioFormatDescription make_AFD_Default() {
+ return AudioFormatDescription{};
+}
+
+AudioFormatDescription make_AFD_Invalid() {
+ return make_AudioFormatDescription(AudioFormatType::SYS_RESERVED_INVALID);
+}
+
+AudioFormatDescription make_AFD_Pcm16Bit() {
+ return make_AudioFormatDescription(PcmType::INT_16_BIT);
+}
+
+AudioFormatDescription make_AFD_Bitstream() {
+ return make_AudioFormatDescription("example");
+}
+
+AudioFormatDescription make_AFD_Encap() {
+ return make_AudioFormatDescription(PcmType::INT_16_BIT, "example.encap");
+}
+
+AudioFormatDescription make_AFD_Encap_with_Enc() {
+ auto afd = make_AFD_Encap();
+ afd.encoding += "+example";
+ return afd;
+}
+
+} // namespace
+
+// There is no reason to write test for every type which gets converted via parcelable
+// since the conversion code is all the same.
+
+class AudioFormatDescriptionRoundTripTest :
+ public testing::TestWithParam<::aidl::android::media::audio::common::AudioFormatDescription>
+{
+};
+TEST_P(AudioFormatDescriptionRoundTripTest, Ndk2Cpp2Ndk) {
+ const auto& initial = GetParam();
+ auto conv = ndk2cpp_AudioFormatDescription(initial);
+ ASSERT_TRUE(conv.ok());
+ auto convBack = cpp2ndk_AudioFormatDescription(conv.value());
+ ASSERT_TRUE(convBack.ok());
+ EXPECT_EQ(initial, convBack.value());
+}
+INSTANTIATE_TEST_SUITE_P(AudioFormatDescriptionRoundTrip, AudioFormatDescriptionRoundTripTest,
+ testing::Values(make_AFD_Invalid(), make_AFD_Default(), make_AFD_Pcm16Bit(),
+ make_AFD_Bitstream(), make_AFD_Encap(), make_AFD_Encap_with_Enc()));
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index 9c054f0..e424860 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -334,7 +334,10 @@
// By default needsUpdate = false in case the supplied level does meet
// the requirements. For Level 1b, we want to update the level anyway,
// so we set it to true in that case.
- bool needsUpdate = (me.v.level == LEVEL_AVC_1B);
+ bool needsUpdate = false;
+ if (me.v.level == LEVEL_AVC_1B || !me.F(me.v.level).supportsAtAll(me.v.level)) {
+ needsUpdate = true;
+ }
for (const LevelLimits &limit : kLimits) {
if (mbs <= limit.mbs && mbsPerSec <= limit.mbsPerSec &&
bitrate.v.value <= limit.bitrate) {
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index 56e6e8a..ec1dd14 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -341,6 +341,9 @@
// By default needsUpdate = false in case the supplied level does meet
// the requirements.
bool needsUpdate = false;
+ if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+ needsUpdate = true;
+ }
for (const LevelLimits &limit : kLimits) {
if (samples <= limit.samples && samplesPerSec <= limit.samplesPerSec &&
bitrate.v.value <= limit.bitrate) {
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index 703033b..acc42e9 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -243,6 +243,9 @@
needsUpdate = true;
}
#endif
+ if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+ needsUpdate = true;
+ }
for (const LevelLimits &limit : kLimits) {
if (sampleRate <= limit.sampleRate && size.v.width <= limit.width &&
vbvSize <= limit.vbvSize && size.v.height <= limit.height &&
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 5700e5d..e903069 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -263,6 +263,9 @@
// By default needsUpdate = false in case the supplied level does meet
// the requirements.
bool needsUpdate = false;
+ if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+ needsUpdate = true;
+ }
for (const LevelLimits& limit : kLimits) {
if (samples <= limit.samples && samplesPerSec <= limit.samplesPerSec &&
bitrate.v.value <= limit.bitrate && dimension <= limit.dimension) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 3b29c57..da33b0d 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -2096,7 +2096,10 @@
// csd cannot be re-ordered and will always arrive first.
if (initData != nullptr) {
Mutexed<Output>::Locked output(mOutput);
- if (output->buffers && outputFormat) {
+ if (!output->buffers) {
+ return false;
+ }
+ if (outputFormat) {
output->buffers->updateSkipCutBuffer(outputFormat);
output->buffers->setFormat(outputFormat);
}
@@ -2105,7 +2108,7 @@
}
size_t index;
sp<MediaCodecBuffer> outBuffer;
- if (output->buffers && output->buffers->registerCsd(initData, &index, &outBuffer) == OK) {
+ if (output->buffers->registerCsd(initData, &index, &outBuffer) == OK) {
outBuffer->meta()->setInt64("timeUs", timestamp.peek());
outBuffer->meta()->setInt32("flags", BUFFER_FLAG_CODEC_CONFIG);
ALOGV("[%s] onWorkDone: csd index = %zu [%p]", mName, index, outBuffer.get());
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index c606d6f..9297520 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -119,8 +119,8 @@
{ C2Color::PRIMARIES_BT601_525, ColorAspects::PrimariesBT601_6_525 },
{ C2Color::PRIMARIES_GENERIC_FILM, ColorAspects::PrimariesGenericFilm },
{ C2Color::PRIMARIES_BT2020, ColorAspects::PrimariesBT2020 },
-// { C2Color::PRIMARIES_RP431, ColorAspects::Primaries... },
-// { C2Color::PRIMARIES_EG432, ColorAspects::Primaries... },
+ { C2Color::PRIMARIES_RP431, ColorAspects::PrimariesRP431 },
+ { C2Color::PRIMARIES_EG432, ColorAspects::PrimariesEG432 },
// { C2Color::PRIMARIES_EBU3213, ColorAspects::Primaries... },
{ C2Color::PRIMARIES_OTHER, ColorAspects::PrimariesOther },
};
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 2bbfb76..eb54f75 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -300,11 +300,13 @@
"aidl/android/media/AudioPortRole.aidl",
"aidl/android/media/AudioPortType.aidl",
"aidl/android/media/AudioProfileSys.aidl",
+ "aidl/android/media/AudioRoute.aidl",
"aidl/android/media/AudioTimestampInternal.aidl",
"aidl/android/media/AudioUniqueIdUse.aidl",
"aidl/android/media/AudioVibratorInfo.aidl",
"aidl/android/media/EffectDescriptor.aidl",
"aidl/android/media/TrackSecondaryOutputInfo.aidl",
+ "aidl/android/media/SurroundSoundConfig.aidl",
],
imports: [
"android.media.audio.common.types-V2",
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index d8219a8..f01b653 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1616,13 +1616,16 @@
}
AutoMutex lock(mLock);
+ // IAudioTrack::stop() isn't synchronous; we don't know when presentation completes
+ if (mState == STATE_STOPPED || mState == STATE_FLUSHED) {
+ *position = 0;
+ return NO_ERROR;
+ }
// FIXME: offloaded and direct tracks call into the HAL for render positions
// for compressed/synced data; however, we use proxy position for pure linear pcm data
// as we do not know the capability of the HAL for pcm position support and standby.
// There may be some latency differences between the HAL position and the proxy position.
if (isOffloadedOrDirect_l() && !isPurePcmData_l()) {
- uint32_t dspFrames = 0;
-
if (isOffloaded_l() && ((mState == STATE_PAUSED) || (mState == STATE_PAUSED_STOPPING))) {
ALOGV("%s(%d): called in paused state, return cached position %u",
__func__, mPortId, mPausedPosition);
@@ -1630,13 +1633,15 @@
return NO_ERROR;
}
+ uint32_t dspFrames = 0;
if (mOutput != AUDIO_IO_HANDLE_NONE) {
uint32_t halFrames; // actually unused
- (void) AudioSystem::getRenderPosition(mOutput, &halFrames, &dspFrames);
// FIXME: on getRenderPosition() error, we return OK with frame position 0.
+ if (AudioSystem::getRenderPosition(mOutput, &halFrames, &dspFrames) != NO_ERROR) {
+ *position = 0;
+ return NO_ERROR;
+ }
}
- // FIXME: dspFrames may not be zero in (mState == STATE_STOPPED || mState == STATE_FLUSHED)
- // due to hardware latency. We leave this behavior for now.
*position = dspFrames;
} else {
if (mCblk->mFlags & CBLK_INVALID) {
@@ -1644,11 +1649,9 @@
// FIXME: for compatibility with the Java API we ignore the restoreTrack_l()
// error here (e.g. DEAD_OBJECT) and return OK with the last recorded server position.
}
-
- // IAudioTrack::stop() isn't synchronous; we don't know when presentation completes
- *position = (mState == STATE_STOPPED || mState == STATE_FLUSHED) ?
- 0 : updateAndGetPosition_l().value();
+ *position = updateAndGetPosition_l().value();
}
+
return NO_ERROR;
}
diff --git a/media/libaudioclient/aidl/android/media/AudioRoute.aidl b/media/libaudioclient/aidl/android/media/AudioRoute.aidl
new file mode 100644
index 0000000..5ee2161
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioRoute.aidl
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * TODO(b/280077672): This is a temporary copy of the stable
+ * android.hardware.audio.core.AudioRoute. Interfaces from the Core API do not
+ * support the CPP backend. This copy will be removed either by moving the
+ * AudioRoute from core to a.m.a.common or by switching the framework internal
+ * interfaces to the NDK backend.
+ * {@hide}
+ */
+parcelable AudioRoute {
+ /**
+ * The list of IDs of source audio ports ('AudioPort.id').
+ * There must be at least one source in a valid route and all IDs must be
+ * unique.
+ */
+ int[] sourcePortIds;
+ /** The ID of the sink audio port ('AudioPort.id'). */
+ int sinkPortId;
+ /** If set, only one source can be active, mixing is not supported. */
+ boolean isExclusive;
+}
diff --git a/media/libaudioclient/aidl/android/media/ISoundDose.aidl b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
index 0e2a5ab..6cb22ef 100644
--- a/media/libaudioclient/aidl/android/media/ISoundDose.aidl
+++ b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
@@ -49,13 +49,11 @@
oneway void updateAttenuation(float attenuationDB, int device);
/**
- * Disable the calculation of sound dose. This has the effect that no MEL
- * values will be computed on the framework side. The MEL returned from
- * the IHalSoundDoseCallbacks will be ignored.
- * Should only be called once at startup if the AudioService does not
- * support CSD.
+ * Enables/disables the calculation of sound dose. This has the effect that
+ * if disabled no MEL values will be computed on the framework side. The MEL
+ * returned from the IHalSoundDoseCallbacks will be ignored.
*/
- oneway void disableCsd();
+ oneway void setCsdEnabled(boolean enabled);
/* -------------------------- Test API methods --------------------------
/** Get the currently used RS2 upper bound. */
diff --git a/media/libaudioclient/aidl/android/media/SurroundSoundConfig.aidl b/media/libaudioclient/aidl/android/media/SurroundSoundConfig.aidl
new file mode 100644
index 0000000..f83fdef
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/SurroundSoundConfig.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.audio.common.AudioFormatDescription;
+
+/**
+ * TODO(b/280077672): This is a temporary copy of the stable
+ * android.hardware.audio.core.SurroundSoundConfig parcelable.
+ * Interfaces from the Core API do not support the CPP backend. This copy will
+ * be removed either by moving the AudioRoute from core to a.m.a.common or by
+ * switching the framework internal interfaces to the NDK backend.
+ * {@hide}
+ */
+parcelable SurroundSoundConfig {
+ parcelable SurroundFormatFamily {
+ /**
+ * A primaryFormat shall get an entry in the Surround Settings dialog on TV
+ * devices. There must be a corresponding Java ENCODING_... constant
+ * defined in AudioFormat.java, and a display name defined in
+ * AudioFormat.toDisplayName.
+ */
+ AudioFormatDescription primaryFormat;
+ /**
+ * List of formats that shall be equivalent to the primaryFormat from the
+ * users' point of view and don't need a dedicated Surround Settings
+ * dialog entry.
+ */
+ AudioFormatDescription[] subFormats;
+ }
+ SurroundFormatFamily[] formatFamilies;
+}
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index 1dbcb86..3c05b0b 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -74,6 +74,12 @@
cc_library_headers {
name: "libaudiohal_headers",
+ header_libs: [
+ "libeffectsconfig_headers",
+ ],
+
+ export_header_lib_headers: ["libeffectsconfig_headers"],
+
export_include_dirs: ["include"],
}
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index 9dbb591..09e70eb 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -35,6 +35,7 @@
"android.hidl.allocator@1.0",
"android.hidl.memory@1.0",
"libaudiohal_deathhandler",
+ "libeffectsconfig",
"libhidlbase",
"libhidlmemory",
],
@@ -287,15 +288,17 @@
"android.hardware.common.fmq-V1-ndk",
],
shared_libs: [
- "libbinder_ndk",
"libaudio_aidl_conversion_common_cpp",
"libaudio_aidl_conversion_common_ndk",
+ "libaudio_aidl_conversion_common_ndk_cpp",
"libaudio_aidl_conversion_core_ndk",
"libaudio_aidl_conversion_effect_ndk",
"libaudioaidlcommon",
+ "libbinder_ndk",
],
header_libs: [
"libaudio_system_headers",
+ "libeffectsconfig_headers",
],
cflags: [
"-Wall",
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index aafe6e0..2a8cbe1 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -23,10 +23,9 @@
#include <aidl/android/hardware/audio/core/BnStreamCallback.h>
#include <aidl/android/hardware/audio/core/BnStreamOutEventCallback.h>
#include <aidl/android/hardware/audio/core/StreamDescriptor.h>
-#include <android/binder_enums.h>
-#include <binder/Enums.h>
#include <error/expected_utils.h>
#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionNdkCpp.h>
#include <media/AidlConversionUtil.h>
#include <mediautils/TimeCheck.h>
#include <Utils.h>
@@ -96,77 +95,35 @@
portConfig->format = config.base.format;
}
-template<typename OutEnum, typename OutEnumRange, typename InEnum>
-ConversionResult<OutEnum> convertEnum(const OutEnumRange& range, InEnum e) {
- using InIntType = std::underlying_type_t<InEnum>;
- static_assert(std::is_same_v<InIntType, std::underlying_type_t<OutEnum>>);
-
- InIntType inEnumIndex = static_cast<InIntType>(e);
- OutEnum outEnum = static_cast<OutEnum>(inEnumIndex);
- if (std::find(range.begin(), range.end(), outEnum) == range.end()) {
- return ::android::base::unexpected(BAD_VALUE);
- }
- return outEnum;
-}
-
-template<typename NdkEnum, typename CppEnum>
-ConversionResult<NdkEnum> cpp2ndk_Enum(CppEnum e) {
- return convertEnum<NdkEnum>(::ndk::enum_range<NdkEnum>(), e);
-}
-
-template<typename CppEnum, typename NdkEnum>
-ConversionResult<CppEnum> ndk2cpp_Enum(NdkEnum e) {
- return convertEnum<CppEnum>(::android::enum_range<CppEnum>(), e);
-}
-
-ConversionResult<android::media::audio::common::AudioDeviceAddress>
-ndk2cpp_AudioDeviceAddress(const AudioDeviceAddress& ndk) {
- using CppTag = android::media::audio::common::AudioDeviceAddress::Tag;
- using NdkTag = AudioDeviceAddress::Tag;
-
- CppTag cppTag = VALUE_OR_RETURN(ndk2cpp_Enum<CppTag>(ndk.getTag()));
-
- switch (cppTag) {
- case CppTag::id:
- return android::media::audio::common::AudioDeviceAddress::make<CppTag::id>(
- ndk.get<NdkTag::id>());
- case CppTag::mac:
- return android::media::audio::common::AudioDeviceAddress::make<CppTag::mac>(
- ndk.get<NdkTag::mac>());
- case CppTag::ipv4:
- return android::media::audio::common::AudioDeviceAddress::make<CppTag::ipv4>(
- ndk.get<NdkTag::ipv4>());
- case CppTag::ipv6:
- return android::media::audio::common::AudioDeviceAddress::make<CppTag::ipv6>(
- ndk.get<NdkTag::ipv6>());
- case CppTag::alsa:
- return android::media::audio::common::AudioDeviceAddress::make<CppTag::alsa>(
- ndk.get<NdkTag::alsa>());
- }
-
- return ::android::base::unexpected(BAD_VALUE);
-}
-
-ConversionResult<media::audio::common::AudioDevice> ndk2cpp_AudioDevice(const AudioDevice& ndk) {
- media::audio::common::AudioDevice cpp;
- cpp.type.type = VALUE_OR_RETURN(
- ndk2cpp_Enum<media::audio::common::AudioDeviceType>(ndk.type.type));
- cpp.type.connection = ndk.type.connection;
- cpp.address = VALUE_OR_RETURN(ndk2cpp_AudioDeviceAddress(ndk.address));
- return cpp;
-}
-
-ConversionResult<media::audio::common::AudioMMapPolicyInfo>
-ndk2cpp_AudioMMapPolicyInfo(const AudioMMapPolicyInfo& ndk) {
- media::audio::common::AudioMMapPolicyInfo cpp;
- cpp.device = VALUE_OR_RETURN(ndk2cpp_AudioDevice(ndk.device));
- cpp.mmapPolicy = VALUE_OR_RETURN(
- ndk2cpp_Enum<media::audio::common::AudioMMapPolicy>(ndk.mmapPolicy));
+// Note: these converters are for types defined in different AIDL files. Although these
+// AIDL files are copies of each other, however formally these are different types
+// thus we don't use a conversion via a parcelable.
+ConversionResult<media::AudioRoute> ndk2cpp_AudioRoute(const AudioRoute& ndk) {
+ media::AudioRoute cpp;
+ cpp.sourcePortIds.insert(
+ cpp.sourcePortIds.end(), ndk.sourcePortIds.begin(), ndk.sourcePortIds.end());
+ cpp.sinkPortId = ndk.sinkPortId;
+ cpp.isExclusive = ndk.isExclusive;
return cpp;
}
} // namespace
+status_t DeviceHalAidl::getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) {
+ auto convertAudioPortFromMap = [](const Ports::value_type& pair) {
+ return ndk2cpp_AudioPort(pair.second);
+ };
+ return ::aidl::android::convertRange(mPorts.begin(), mPorts.end(), ports->begin(),
+ convertAudioPortFromMap);
+}
+
+status_t DeviceHalAidl::getAudioRoutes(std::vector<media::AudioRoute> *routes) {
+ *routes = VALUE_OR_RETURN_STATUS(
+ ::aidl::android::convertContainer<std::vector<media::AudioRoute>>(
+ mRoutes, ndk2cpp_AudioRoute));
+ return OK;
+}
+
status_t DeviceHalAidl::getSupportedDevices(uint32_t*) {
// Obsolete.
return INVALID_OPERATION;
@@ -176,8 +133,7 @@
TIME_CHECK();
if (mModule == nullptr) return NO_INIT;
std::vector<AudioPort> ports;
- RETURN_STATUS_IF_ERROR(
- statusTFromBinderStatus(mModule->getAudioPorts(&ports)));
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->getAudioPorts(&ports)));
ALOGW_IF(ports.empty(), "%s: module %s returned an empty list of audio ports",
__func__, mInstance.c_str());
std::transform(ports.begin(), ports.end(), std::inserter(mPorts, mPorts.end()),
@@ -204,6 +160,9 @@
std::transform(portConfigs.begin(), portConfigs.end(),
std::inserter(mPortConfigs, mPortConfigs.end()),
[](const auto& p) { return std::make_pair(p.id, p); });
+ std::transform(mPortConfigs.begin(), mPortConfigs.end(),
+ std::inserter(mInitialPortConfigIds, mInitialPortConfigIds.end()),
+ [](const auto& pcPair) { return pcPair.first; });
std::vector<AudioPatch> patches;
RETURN_STATUS_IF_ERROR(
statusTFromBinderStatus(mModule->getAudioPatches(&patches))); // OK if empty
@@ -357,12 +316,14 @@
this, getClassName().c_str(), __func__, aidlHandle, aidlDevice.toString().c_str(),
aidlFlags.toString().c_str(), toString(aidlSource).c_str(),
aidlConfig->toString().c_str(), mixPortConfig->toString().c_str());
+ resetUnusedPatchesAndPortConfigs();
const bool isInput = aidlFlags.getTag() == AudioIoFlags::Tag::input;
// Find / create AudioPortConfigs for the device port and the mix port,
// then find / create a patch between them, and open a stream on the mix port.
AudioPortConfig devicePortConfig;
bool created = false;
- RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(aidlDevice, &devicePortConfig, &created));
+ RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(aidlDevice, aidlConfig,
+ &devicePortConfig, &created));
if (created) {
cleanups->emplace_front(this, &DeviceHalAidl::resetPortConfig, devicePortConfig.id);
}
@@ -888,8 +849,8 @@
media::audio::common::AudioMMapPolicyType policyType,
std::vector<media::audio::common::AudioMMapPolicyInfo>* policyInfos) {
TIME_CHECK();
- AudioMMapPolicyType mmapPolicyType =
- VALUE_OR_RETURN_STATUS(cpp2ndk_Enum<AudioMMapPolicyType>(policyType));
+ AudioMMapPolicyType mmapPolicyType = VALUE_OR_RETURN_STATUS(
+ cpp2ndk_AudioMMapPolicyType(policyType));
std::vector<AudioMMapPolicyInfo> mmapPolicyInfos;
@@ -1109,7 +1070,7 @@
return OK;
}
-status_t DeviceHalAidl::findOrCreatePortConfig(const AudioDevice& device,
+status_t DeviceHalAidl::findOrCreatePortConfig(const AudioDevice& device, const AudioConfig* config,
AudioPortConfig* portConfig, bool* created) {
auto portConfigIt = findPortConfig(device);
if (portConfigIt == mPortConfigs.end()) {
@@ -1121,6 +1082,9 @@
}
AudioPortConfig requestedPortConfig;
requestedPortConfig.portId = portsIt->first;
+ if (config != nullptr) {
+ setPortConfigFromConfig(&requestedPortConfig, *config);
+ }
RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(requestedPortConfig, &portConfigIt,
created));
} else {
@@ -1223,7 +1187,8 @@
portConfig, created);
} else if (requestedPortConfig.ext.getTag() == Tag::device) {
return findOrCreatePortConfig(
- requestedPortConfig.ext.get<Tag::device>().device, portConfig, created);
+ requestedPortConfig.ext.get<Tag::device>().device, nullptr /*config*/,
+ portConfig, created);
}
ALOGW("%s: unsupported audio port config: %s",
__func__, requestedPortConfig.toString().c_str());
@@ -1252,7 +1217,6 @@
[&](const auto& pair) { return audioDeviceMatches(device, pair.second); });
}
-
DeviceHalAidl::Ports::iterator DeviceHalAidl::findPort(
const AudioConfig& config, const AudioIoFlags& flags,
const std::set<int32_t>& destinationPortIds) {
@@ -1265,10 +1229,20 @@
std::find(prof.sampleRates.begin(), prof.sampleRates.end(),
config.base.sampleRate) != prof.sampleRates.end());
};
+ static const std::vector<AudioOutputFlags> kOptionalOutputFlags{AudioOutputFlags::BIT_PERFECT};
+ int optionalFlags = 0;
+ auto flagMatches = [&flags, &optionalFlags](const AudioIoFlags& portFlags) {
+ // Ports should be able to match if the optional flags are not requested.
+ return portFlags == flags ||
+ (portFlags.getTag() == AudioIoFlags::Tag::output &&
+ AudioIoFlags::make<AudioIoFlags::Tag::output>(
+ portFlags.get<AudioIoFlags::Tag::output>() &
+ ~optionalFlags) == flags);
+ };
auto matcher = [&](const auto& pair) {
const auto& p = pair.second;
return p.ext.getTag() == AudioPortExt::Tag::mix &&
- p.flags == flags &&
+ flagMatches(p.flags) &&
(destinationPortIds.empty() ||
std::any_of(destinationPortIds.begin(), destinationPortIds.end(),
[&](const int32_t destId) { return mRoutingMatrix.count(
@@ -1276,7 +1250,24 @@
(p.profiles.empty() ||
std::find_if(p.profiles.begin(), p.profiles.end(), belongsToProfile) !=
p.profiles.end()); };
- return std::find_if(mPorts.begin(), mPorts.end(), matcher);
+ auto result = std::find_if(mPorts.begin(), mPorts.end(), matcher);
+ if (result == mPorts.end() && flags.getTag() == AudioIoFlags::Tag::output) {
+ auto optionalOutputFlagsIt = kOptionalOutputFlags.begin();
+ while (result == mPorts.end() && optionalOutputFlagsIt != kOptionalOutputFlags.end()) {
+ if (isBitPositionFlagSet(
+ flags.get<AudioIoFlags::Tag::output>(), *optionalOutputFlagsIt)) {
+ // If the flag is set by the request, it must be matched.
+ ++optionalOutputFlagsIt;
+ continue;
+ }
+ optionalFlags |= makeBitPositionFlagMask(*optionalOutputFlagsIt++);
+ result = std::find_if(mPorts.begin(), mPorts.end(), matcher);
+ ALOGI("%s: port for config %s, flags %s was not found in the module %s, "
+ "retried with excluding optional flags %#x", __func__, config.toString().c_str(),
+ flags.toString().c_str(), mInstance.c_str(), optionalFlags);
+ }
+ }
+ return result;
}
DeviceHalAidl::PortConfigs::iterator DeviceHalAidl::findPortConfig(const AudioDevice& device) {
@@ -1358,18 +1349,20 @@
for (int32_t id : p.second.sourcePortConfigIds) portConfigIds.erase(id);
for (int32_t id : p.second.sinkPortConfigIds) portConfigIds.erase(id);
}
+ for (int32_t id : mInitialPortConfigIds) {
+ portConfigIds.erase(id);
+ }
for (int32_t id : portConfigIds) resetPortConfig(id);
}
status_t DeviceHalAidl::updateRoutes() {
TIME_CHECK();
- std::vector<AudioRoute> routes;
RETURN_STATUS_IF_ERROR(
- statusTFromBinderStatus(mModule->getAudioRoutes(&routes)));
- ALOGW_IF(routes.empty(), "%s: module %s returned an empty list of audio routes",
+ statusTFromBinderStatus(mModule->getAudioRoutes(&mRoutes)));
+ ALOGW_IF(mRoutes.empty(), "%s: module %s returned an empty list of audio routes",
__func__, mInstance.c_str());
mRoutingMatrix.clear();
- for (const auto& r : routes) {
+ for (const auto& r : mRoutes) {
for (auto portId : r.sourcePortIds) {
mRoutingMatrix.emplace(r.sinkPortId, portId);
mRoutingMatrix.emplace(portId, r.sinkPortId);
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index e4d5ec6..b2bba1f 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -69,6 +69,10 @@
class DeviceHalAidl : public DeviceHalInterface, public ConversionHelperAidl,
public CallbackBroker, public MicrophoneInfoProvider {
public:
+ status_t getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) override;
+
+ status_t getAudioRoutes(std::vector<media::AudioRoute> *routes) override;
+
// Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
status_t getSupportedDevices(uint32_t *devices) override;
@@ -185,6 +189,7 @@
using PortConfigs = std::map<int32_t /*port config ID*/,
::aidl::android::media::audio::common::AudioPortConfig>;
using Ports = std::map<int32_t /*port ID*/, ::aidl::android::media::audio::common::AudioPort>;
+ using Routes = std::vector<::aidl::android::hardware::audio::core::AudioRoute>;
// Answers the question "whether portID 'first' is reachable from portID 'second'?"
// It's not a map because both portIDs are known. The matrix is symmetric.
using RoutingMatrix = std::set<std::pair<int32_t, int32_t>>;
@@ -215,6 +220,7 @@
::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
status_t findOrCreatePortConfig(
const ::aidl::android::media::audio::common::AudioDevice& device,
+ const ::aidl::android::media::audio::common::AudioConfig* config,
::aidl::android::media::audio::common::AudioPortConfig* portConfig,
bool* created);
status_t findOrCreatePortConfig(
@@ -284,7 +290,9 @@
int32_t mDefaultInputPortId = -1;
int32_t mDefaultOutputPortId = -1;
PortConfigs mPortConfigs;
+ std::set<int32_t> mInitialPortConfigIds;
Patches mPatches;
+ Routes mRoutes;
RoutingMatrix mRoutingMatrix;
Streams mStreams;
Microphones mMicrophones;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index e0b1afb..22eb850 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -104,6 +104,15 @@
}
}
+status_t DeviceHalHidl::getAudioPorts(
+ std::vector<media::audio::common::AudioPort> *ports __unused) {
+ return INVALID_OPERATION;
+}
+
+status_t DeviceHalHidl::getAudioRoutes(std::vector<media::AudioRoute> *routes __unused) {
+ return INVALID_OPERATION;
+}
+
status_t DeviceHalHidl::getSupportedDevices(uint32_t*) {
// Obsolete.
return INVALID_OPERATION;
@@ -430,6 +439,7 @@
template <typename HalPort>
status_t DeviceHalHidl::getAudioPortImpl(HalPort *port) {
+ using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::AudioPort;
if (mDevice == 0) return NO_INIT;
AudioPort hidlPort;
HidlUtils::audioPortFromHal(*port, &hidlPort);
@@ -472,6 +482,7 @@
}
status_t DeviceHalHidl::setAudioPortConfig(const struct audio_port_config *config) {
+ using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::AudioPortConfig;
TIME_CHECK();
if (mDevice == 0) return NO_INIT;
AudioPortConfig hidlConfig;
@@ -536,6 +547,7 @@
#endif
status_t DeviceHalHidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
+ using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::AudioPort;
TIME_CHECK();
if (mDevice == 0) return NO_INIT;
#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index afaad51..ddeb7de 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -29,6 +29,10 @@
class DeviceHalHidl : public DeviceHalInterface, public CoreConversionHelperHidl
{
public:
+ status_t getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) override;
+
+ status_t getAudioRoutes(std::vector<media::AudioRoute> *routes) override;
+
// Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
status_t getSupportedDevices(uint32_t *devices) override;
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
index 2eaaf5d..8345cd2 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
@@ -14,26 +14,67 @@
* limitations under the License.
*/
+#include <memory>
+
#define LOG_TAG "DevicesFactoryHalAidl"
//#define LOG_NDEBUG 0
#include <aidl/android/hardware/audio/core/IModule.h>
#include <android/binder_manager.h>
#include <binder/IServiceManager.h>
-#include <memory>
+#include <media/AidlConversionNdkCpp.h>
+#include <media/AidlConversionUtil.h>
#include <utils/Log.h>
#include "DeviceHalAidl.h"
#include "DevicesFactoryHalAidl.h"
-using namespace ::aidl::android::hardware::audio::core;
+using aidl::android::aidl_utils::statusTFromBinderStatus;
+using aidl::android::hardware::audio::core::IConfig;
+using aidl::android::hardware::audio::core::IModule;
+using aidl::android::hardware::audio::core::SurroundSoundConfig;
+using aidl::android::media::audio::common::AudioHalEngineConfig;
using ::android::detail::AudioHalVersionInfo;
namespace android {
-DevicesFactoryHalAidl::DevicesFactoryHalAidl(std::shared_ptr<IConfig> iconfig)
- : mIConfig(std::move(iconfig)) {
- ALOG_ASSERT(iconfig != nullptr, "Provided default IConfig service is NULL");
+namespace {
+
+ConversionResult<media::SurroundSoundConfig::SurroundFormatFamily>
+ndk2cpp_SurroundSoundConfigFormatFamily(const SurroundSoundConfig::SurroundFormatFamily& ndk) {
+ media::SurroundSoundConfig::SurroundFormatFamily cpp;
+ cpp.primaryFormat = VALUE_OR_RETURN(ndk2cpp_AudioFormatDescription(ndk.primaryFormat));
+ cpp.subFormats = VALUE_OR_RETURN(::aidl::android::convertContainer<std::vector<
+ media::audio::common::AudioFormatDescription>>(ndk.subFormats,
+ ndk2cpp_AudioFormatDescription));
+ return cpp;
+}
+
+ConversionResult<media::SurroundSoundConfig>
+ndk2cpp_SurroundSoundConfig(const SurroundSoundConfig& ndk) {
+ media::SurroundSoundConfig cpp;
+ cpp.formatFamilies = VALUE_OR_RETURN(::aidl::android::convertContainer<std::vector<
+ media::SurroundSoundConfig::SurroundFormatFamily>>(ndk.formatFamilies,
+ ndk2cpp_SurroundSoundConfigFormatFamily));
+ return cpp;
+}
+
+} // namespace
+
+DevicesFactoryHalAidl::DevicesFactoryHalAidl(std::shared_ptr<IConfig> config)
+ : mConfig(std::move(config)) {
+}
+
+status_t DevicesFactoryHalAidl::getDeviceNames(std::vector<std::string> *names) {
+ if (names == nullptr) {
+ return BAD_VALUE;
+ }
+ AServiceManager_forEachDeclaredInstance(IModule::descriptor, static_cast<void*>(names),
+ [](const char* instance, void* context) {
+ if (strcmp(instance, "default") == 0) instance = "primary";
+ static_cast<decltype(names)>(context)->push_back(instance);
+ });
+ return OK;
}
// Opens a device with the specified name. To close the device, it is
@@ -43,19 +84,22 @@
return BAD_VALUE;
}
+ // FIXME: Remove this call and the check for the supported module names
+ // after implementing retrieval of module names on the framework side.
+ // Currently it is still using the legacy XML config.
+ std::vector<std::string> deviceNames;
+ if (status_t status = getDeviceNames(&deviceNames); status != OK) {
+ return status;
+ }
std::shared_ptr<IModule> service;
- // FIXME: Normally we will list available HAL modules and connect to them,
- // however currently we still get the list of module names from the config.
- // Since the example service does not have all modules, the SM will wait
- // for the missing ones forever.
- if (strcmp(name, "primary") == 0 || strcmp(name, "r_submix") == 0 || strcmp(name, "usb") == 0) {
+ if (std::find(deviceNames.begin(), deviceNames.end(), name) != deviceNames.end()) {
if (strcmp(name, "primary") == 0) name = "default";
auto serviceName = std::string(IModule::descriptor) + "/" + name;
service = IModule::fromBinder(
ndk::SpAIBinder(AServiceManager_waitForService(serviceName.c_str())));
ALOGE_IF(service == nullptr, "%s fromBinder %s failed", __func__, serviceName.c_str());
}
- // If the service is a nullptr, the device will not be really functional,
+ // If the service is a nullptr, the device object will not be really functional,
// but will not crash either.
*device = sp<DeviceHalAidl>::make(name, service);
return OK;
@@ -97,18 +141,28 @@
AudioHalVersionInfo DevicesFactoryHalAidl::getHalVersion() const {
int32_t versionNumber = 0;
- if (mIConfig != 0) {
- if (ndk::ScopedAStatus status = mIConfig->getInterfaceVersion(&versionNumber);
- !status.isOk()) {
- ALOGE("%s getInterfaceVersion failed: %s", __func__, status.getDescription().c_str());
- }
- } else {
- ALOGW("%s no IConfig instance", __func__);
+ if (ndk::ScopedAStatus status = mConfig->getInterfaceVersion(&versionNumber); !status.isOk()) {
+ ALOGE("%s getInterfaceVersion failed: %s", __func__, status.getDescription().c_str());
}
// AIDL does not have minor version, fill 0 for all versions
return AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, versionNumber);
}
+status_t DevicesFactoryHalAidl::getSurroundSoundConfig(media::SurroundSoundConfig *config) {
+ SurroundSoundConfig ndkConfig;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mConfig->getSurroundSoundConfig(&ndkConfig)));
+ *config = VALUE_OR_RETURN_STATUS(ndk2cpp_SurroundSoundConfig(ndkConfig));
+ return OK;
+}
+
+status_t DevicesFactoryHalAidl::getEngineConfig(
+ media::audio::common::AudioHalEngineConfig *config) {
+ AudioHalEngineConfig ndkConfig;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mConfig->getEngineConfig(&ndkConfig)));
+ *config = VALUE_OR_RETURN_STATUS(ndk2cpp_AudioHalEngineConfig(ndkConfig));
+ return OK;
+}
+
// Main entry-point to the shared library.
extern "C" __attribute__((visibility("default"))) void* createIDevicesFactoryImpl() {
auto serviceName = std::string(IConfig::descriptor) + "/default";
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.h b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
index cb627bc..21957bc 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
@@ -26,7 +26,9 @@
{
public:
explicit DevicesFactoryHalAidl(
- std::shared_ptr<::aidl::android::hardware::audio::core::IConfig> iConfig);
+ std::shared_ptr<::aidl::android::hardware::audio::core::IConfig> config);
+
+ status_t getDeviceNames(std::vector<std::string> *names) override;
// Opens a device with the specified name. To close the device, it is
// necessary to release references to the returned object.
@@ -38,8 +40,12 @@
android::detail::AudioHalVersionInfo getHalVersion() const override;
+ status_t getSurroundSoundConfig(media::SurroundSoundConfig *config) override;
+
+ status_t getEngineConfig(media::audio::common::AudioHalEngineConfig *config) override;
+
private:
- const std::shared_ptr<::aidl::android::hardware::audio::core::IConfig> mIConfig;
+ const std::shared_ptr<::aidl::android::hardware::audio::core::IConfig> mConfig;
~DevicesFactoryHalAidl() = default;
};
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
index 9f06f83..eef60b5 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
@@ -106,6 +106,10 @@
}
#endif
+status_t DevicesFactoryHalHidl::getDeviceNames(std::vector<std::string> *names __unused) {
+ return INVALID_OPERATION;
+}
+
status_t DevicesFactoryHalHidl::openDevice(const char *name, sp<DeviceHalInterface> *device) {
auto factories = copyDeviceFactories();
if (factories.empty()) return NO_INIT;
@@ -232,6 +236,16 @@
return AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, MAJOR_VERSION, MINOR_VERSION);
}
+status_t DevicesFactoryHalHidl::getSurroundSoundConfig(
+ media::SurroundSoundConfig *config __unused) {
+ return INVALID_OPERATION;
+}
+
+status_t DevicesFactoryHalHidl::getEngineConfig(
+ media::audio::common::AudioHalEngineConfig *config __unused) {
+ return INVALID_OPERATION;
+}
+
// Main entry-point to the shared library.
extern "C" __attribute__((visibility("default"))) void* createIDevicesFactoryImpl() {
auto service = hardware::audio::CPP_VERSION::IDevicesFactory::getService();
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.h b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
index 5294728..3285af7 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
@@ -37,6 +37,8 @@
explicit DevicesFactoryHalHidl(sp<IDevicesFactory> devicesFactory);
void onFirstRef() override;
+ status_t getDeviceNames(std::vector<std::string> *names) override;
+
// Opens a device with the specified name. To close the device, it is
// necessary to release references to the returned object.
status_t openDevice(const char *name, sp<DeviceHalInterface> *device) override;
@@ -47,6 +49,10 @@
android::detail::AudioHalVersionInfo getHalVersion() const override;
+ status_t getSurroundSoundConfig(media::SurroundSoundConfig *config) override;
+
+ status_t getEngineConfig(media::audio::common::AudioHalEngineConfig *config) override;
+
private:
friend class ServiceNotificationListener;
void addDeviceFactory(sp<IDevicesFactory> factory, bool needToNotify);
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index bc05aa0..0dcb8ee 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -23,6 +23,7 @@
//#define LOG_NDEBUG 0
#include <error/expected_utils.h>
+#include <aidl/android/media/audio/common/AudioStreamType.h>
#include <android/binder_manager.h>
#include <media/AidlConversionCppNdk.h>
#include <media/AidlConversionEffect.h>
@@ -35,11 +36,13 @@
#include "EffectsFactoryHalAidl.h"
using ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid;
-using aidl::android::aidl_utils::statusTFromBinderStatus;
-using aidl::android::hardware::audio::effect::Descriptor;
-using aidl::android::hardware::audio::effect::IFactory;
-using aidl::android::media::audio::common::AudioUuid;
-using android::detail::AudioHalVersionInfo;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::Processing;
+using ::aidl::android::media::audio::common::AudioUuid;
+using ::android::base::unexpected;
+using ::android::detail::AudioHalVersionInfo;
namespace android {
namespace effect {
@@ -92,7 +95,8 @@
[](const Descriptor& desc) { return !desc.common.id.proxy.has_value(); });
return list;
}()),
- mEffectCount(mNonProxyDescList.size() + mProxyDescList.size()) {
+ mEffectCount(mNonProxyDescList.size() + mProxyDescList.size()),
+ mEffectProcessings(nullptr /* TODO: add AIDL implementation */) {
ALOG_ASSERT(mFactory != nullptr, "Provided IEffectsFactory service is NULL");
ALOGI("%s with %zu nonProxyEffects and %zu proxyEffects", __func__, mNonProxyDescList.size(),
mProxyDescList.size());
@@ -269,6 +273,19 @@
return 0 != mUuidProxyMap.count(uuid);
}
+std::shared_ptr<const effectsConfig::Processings> EffectsFactoryHalAidl::getProcessings() const {
+ return mEffectProcessings;
+}
+
+::android::error::Result<size_t> EffectsFactoryHalAidl::getSkippedElements() const {
+ if (!mEffectProcessings) {
+ return ::android::base::unexpected(BAD_VALUE);
+ }
+
+ // Only return 0 for AIDL, because the AIDL interface doesn't aware of configuration file
+ return 0;
+}
+
} // namespace effect
// When a shared library is built from a static library, even explicit
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.h b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
index debfacf..70a7012 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
@@ -62,6 +62,10 @@
detail::AudioHalVersionInfo getHalVersion() const override;
+ std::shared_ptr<const effectsConfig::Processings> getProcessings() const override;
+
+ ::android::error::Result<size_t> getSkippedElements() const override;
+
private:
const std::shared_ptr<IFactory> mFactory;
const detail::AudioHalVersionInfo mHalVersion;
@@ -77,6 +81,8 @@
const std::vector<Descriptor> mNonProxyDescList;
// total number of effects including proxy effects
const size_t mEffectCount;
+ // Query result of pre and post processing from effect factory
+ const std::shared_ptr<const effectsConfig::Processings> mEffectProcessings;
std::mutex mLock;
uint64_t mEffectIdCounter GUARDED_BY(mLock) = 0; // Align with HIDL (0 is INVALID_ID)
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
index 172ebdf..210c4b5 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
@@ -33,10 +33,11 @@
#include "android/media/AudioHalVersion.h"
+using ::android::base::unexpected;
using ::android::detail::AudioHalVersionInfo;
+using ::android::hardware::Return;
using ::android::hardware::audio::common::CPP_VERSION::implementation::UuidUtils;
using ::android::hardware::audio::effect::CPP_VERSION::implementation::EffectUtils;
-using ::android::hardware::Return;
namespace android {
namespace effect {
@@ -78,9 +79,11 @@
}
EffectsFactoryHalHidl::EffectsFactoryHalHidl(sp<IEffectsFactory> effectsFactory)
- : EffectConversionHelperHidl("EffectsFactory"), mCache(new EffectDescriptorCache) {
- ALOG_ASSERT(effectsFactory != nullptr, "Provided IEffectsFactory service is NULL");
- mEffectsFactory = std::move(effectsFactory);
+ : EffectConversionHelperHidl("EffectsFactory"),
+ mEffectsFactory(std::move(effectsFactory)),
+ mCache(new EffectDescriptorCache),
+ mParsingResult(effectsConfig::parse()) {
+ ALOG_ASSERT(mEffectsFactory != nullptr, "Provided IEffectsFactory service is NULL");
}
status_t EffectsFactoryHalHidl::queryNumberEffects(uint32_t *pNumEffects) {
@@ -228,6 +231,17 @@
return AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, MAJOR_VERSION, MINOR_VERSION);
}
+std::shared_ptr<const effectsConfig::Processings> EffectsFactoryHalHidl::getProcessings() const {
+ return mParsingResult.parsedConfig;
+}
+
+::android::error::Result<size_t> EffectsFactoryHalHidl::getSkippedElements() const {
+ if (!mParsingResult.parsedConfig) {
+ return ::android::base::unexpected(BAD_VALUE);
+ }
+ return mParsingResult.nbSkippedElement;
+}
+
} // namespace effect
// When a shared library is built from a static library, even explicit
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.h b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
index 9875154..4110ba3 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
@@ -17,6 +17,7 @@
#pragma once
#include <memory>
+#include <vector>
#include PATH(android/hardware/audio/effect/FILE_VERSION/IEffectsFactory.h)
#include <media/audiohal/EffectsFactoryHalInterface.h>
@@ -62,9 +63,19 @@
android::detail::AudioHalVersionInfo getHalVersion() const override;
+ std::shared_ptr<const effectsConfig::Processings> getProcessings() const override;
+
+ ::android::error::Result<size_t> getSkippedElements() const override;
+
private:
- sp<IEffectsFactory> mEffectsFactory;
- std::unique_ptr<EffectDescriptorCache> mCache;
+ const sp<IEffectsFactory> mEffectsFactory;
+ const std::unique_ptr<EffectDescriptorCache> mCache;
+ /**
+ * Configuration file parser result, used by getProcessings() and getConfigParseResult().
+ * This struct holds the result of parsing a configuration file. The result includes the parsed
+ * configuration data, as well as any errors that occurred during parsing.
+ */
+ const effectsConfig::ParsingResult mParsingResult;
};
} // namespace effect
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 6c43591..eccdfe8 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -436,8 +436,7 @@
ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
TIME_CHECK();
if (!mStream) return NO_INIT;
- ALOGE("%s not implemented yet", __func__);
- return OK;
+ return statusTFromBinderStatus(mStream->prepareToClose());
}
status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 192790c..2b0af49 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -979,9 +979,10 @@
}
status_t StreamOutHalHidl::exit() {
- // FIXME this is using hard-coded strings but in the future, this functionality will be
- // converted to use audio HAL extensions required to support tunneling
- return setParameters(String8("exiting=1"));
+ // Signal exiting to remote_submix HAL.
+ AudioParameter param;
+ param.addInt(String8(AudioParameter::keyExiting), 1);
+ return setParameters(param.toString());
}
StreamInHalHidl::StreamInHalHidl(
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
index 2d5af59..b4440ee 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
@@ -52,6 +52,7 @@
Parameter aidlParam;
switch (type) {
case VISUALIZER_PARAM_CAPTURE_SIZE: {
+ mCaptureSize = value;
aidlParam = MAKE_SPECIFIC_PARAMETER(Visualizer, visualizer, captureSamples, value);
break;
}
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index e8d8998..c7d8319 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -19,6 +19,8 @@
#include <android/media/audio/common/AudioMMapPolicyInfo.h>
#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <android/media/audio/common/AudioPort.h>
+#include <android/media/AudioRoute.h>
#include <error/Result.h>
#include <media/audiohal/EffectHalInterface.h>
#include <system/audio.h>
@@ -38,6 +40,10 @@
class DeviceHalInterface : public virtual RefBase
{
public:
+ virtual status_t getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) = 0;
+
+ virtual status_t getAudioRoutes(std::vector<media::AudioRoute> *routes) = 0;
+
// Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
virtual status_t getSupportedDevices(uint32_t *devices) = 0;
diff --git a/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h b/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
index be3a723..8397e9b 100644
--- a/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
@@ -16,6 +16,8 @@
#pragma once
+#include <android/media/audio/common/AudioHalEngineConfig.h>
+#include <android/media/SurroundSoundConfig.h>
#include <media/audiohal/DeviceHalInterface.h>
#include <utils/Errors.h>
#include <utils/RefBase.h>
@@ -34,6 +36,8 @@
class DevicesFactoryHalInterface : public RefBase
{
public:
+ virtual status_t getDeviceNames(std::vector<std::string> *names) = 0;
+
// Opens a device with the specified name. To close the device, it is
// necessary to release references to the returned object.
virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device) = 0;
@@ -46,6 +50,10 @@
virtual android::detail::AudioHalVersionInfo getHalVersion() const = 0;
+ virtual status_t getSurroundSoundConfig(media::SurroundSoundConfig *config) = 0;
+
+ virtual status_t getEngineConfig(media::audio::common::AudioHalEngineConfig *config) = 0;
+
static sp<DevicesFactoryHalInterface> create();
protected:
diff --git a/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h b/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
index d740fe9..832df18 100644
--- a/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
@@ -15,8 +15,10 @@
*/
#pragma once
+#include <vector>
#include <media/audiohal/EffectHalInterface.h>
+#include <media/EffectsConfig.h>
#include <system/audio_effect.h>
#include <utils/Errors.h>
#include <utils/RefBase.h>
@@ -33,21 +35,24 @@
virtual status_t queryNumberEffects(uint32_t *pNumEffects) = 0;
// Returns a descriptor of the next available effect.
- virtual status_t getDescriptor(uint32_t index,
- effect_descriptor_t *pDescriptor) = 0;
+ virtual status_t getDescriptor(uint32_t index, effect_descriptor_t* pDescriptor) = 0;
- virtual status_t getDescriptor(const effect_uuid_t *pEffectUuid,
- effect_descriptor_t *pDescriptor) = 0;
+ virtual status_t getDescriptor(const effect_uuid_t* pEffectUuid,
+ effect_descriptor_t* pDescriptor) = 0;
virtual status_t getDescriptors(const effect_uuid_t *pEffectType,
std::vector<effect_descriptor_t> *descriptors) = 0;
+ virtual std::shared_ptr<const effectsConfig::Processings> getProcessings() const = 0;
+
+ // status_t if parser return error, skipped elements if parsing result is OK (always 0 for AIDL)
+ virtual error::Result<size_t> getSkippedElements() const = 0;
+
// Creates an effect engine of the specified type.
// To release the effect engine, it is necessary to release references
// to the returned effect object.
- virtual status_t createEffect(const effect_uuid_t *pEffectUuid,
- int32_t sessionId, int32_t ioId, int32_t deviceId,
- sp<EffectHalInterface> *effect) = 0;
+ virtual status_t createEffect(const effect_uuid_t* pEffectUuid, int32_t sessionId, int32_t ioId,
+ int32_t deviceId, sp<EffectHalInterface>* effect) = 0;
virtual status_t dumpEffects(int fd) = 0;
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index 6a39108..57b860d 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -193,7 +193,7 @@
// See if we should use our built-in non-effect downmixer.
if (mMixerInFormat == AUDIO_FORMAT_PCM_FLOAT
- && mMixerChannelMask == AUDIO_CHANNEL_OUT_STEREO
+ && ChannelMixBufferProvider::isOutputChannelMaskSupported(mMixerChannelMask)
&& audio_channel_mask_get_representation(channelMask)
== AUDIO_CHANNEL_REPRESENTATION_POSITION) {
mDownmixerBufferProvider.reset(new ChannelMixBufferProvider(channelMask,
diff --git a/media/libaudioprocessing/BufferProviders.cpp b/media/libaudioprocessing/BufferProviders.cpp
index a9944fb..9f19f7b 100644
--- a/media/libaudioprocessing/BufferProviders.cpp
+++ b/media/libaudioprocessing/BufferProviders.cpp
@@ -373,18 +373,23 @@
audio_bytes_per_sample(format)
* audio_channel_count_from_out_mask(outputChannelMask),
bufferFrameCount)
+ , mChannelMix{format == AUDIO_FORMAT_PCM_FLOAT
+ ? audio_utils::channels::IChannelMix::create(outputChannelMask) : nullptr}
+ , mIsValid{mChannelMix && mChannelMix->setInputChannelMask(inputChannelMask)}
{
ALOGV("ChannelMixBufferProvider(%p)(%#x, %#x, %#x)",
this, format, inputChannelMask, outputChannelMask);
- if (outputChannelMask == AUDIO_CHANNEL_OUT_STEREO && format == AUDIO_FORMAT_PCM_FLOAT) {
- mIsValid = mChannelMix.setInputChannelMask(inputChannelMask);
- }
}
void ChannelMixBufferProvider::copyFrames(void *dst, const void *src, size_t frames)
{
- mChannelMix.process(static_cast<const float *>(src), static_cast<float *>(dst),
- frames, false /* accumulate */);
+ if (mIsValid) {
+ mChannelMix->process(static_cast<const float *>(src), static_cast<float *>(dst),
+ frames, false /* accumulate */);
+ } else {
+ // Should fall back to a different BufferProvider if not valid.
+ ALOGE("%s: Use without being valid!", __func__);
+ }
}
ReformatBufferProvider::ReformatBufferProvider(int32_t channelCount,
diff --git a/media/libaudioprocessing/include/media/BufferProviders.h b/media/libaudioprocessing/include/media/BufferProviders.h
index a0b025f..8d18010 100644
--- a/media/libaudioprocessing/include/media/BufferProviders.h
+++ b/media/libaudioprocessing/include/media/BufferProviders.h
@@ -142,9 +142,14 @@
bool isValid() const { return mIsValid; }
+ static bool isOutputChannelMaskSupported(audio_channel_mask_t outputChannelMask) {
+ return audio_utils::channels::IChannelMix::isOutputChannelMaskSupported(
+ outputChannelMask);
+ }
+
protected:
- audio_utils::channels::ChannelMix<AUDIO_CHANNEL_OUT_STEREO> mChannelMix;
- bool mIsValid = false;
+ const std::shared_ptr<audio_utils::channels::IChannelMix> mChannelMix;
+ const bool mIsValid;
};
// RemixBufferProvider derives from CopyBufferProvider to perform an
diff --git a/media/libeffects/config/Android.bp b/media/libeffects/config/Android.bp
index b02dcb6..293a9c2 100644
--- a/media/libeffects/config/Android.bp
+++ b/media/libeffects/config/Android.bp
@@ -27,8 +27,21 @@
"libcutils",
],
- header_libs: ["libaudio_system_headers"],
- export_header_lib_headers: ["libaudio_system_headers"],
+ header_libs: [
+ "libaudio_system_headers",
+ "liberror_headers",
+ ],
+
+ export_header_lib_headers: [
+ "libaudio_system_headers",
+ "liberror_headers",
+ ],
+
+ export_include_dirs: ["include"],
+}
+
+cc_library_headers {
+ name: "libeffectsconfig_headers",
export_include_dirs: ["include"],
}
diff --git a/media/libeffects/config/include/media/EffectsConfig.h b/media/libeffects/config/include/media/EffectsConfig.h
index 57d4dd7..a9730e5 100644
--- a/media/libeffects/config/include/media/EffectsConfig.h
+++ b/media/libeffects/config/include/media/EffectsConfig.h
@@ -22,8 +22,10 @@
* @see audio_effects_conf_V2_0.xsd for documentation on each structure
*/
+#include <error/Result.h>
#include <system/audio_effect.h>
+#include <cstddef>
#include <map>
#include <memory>
#include <string>
@@ -75,6 +77,12 @@
std::string address;
};
+struct Processings {
+ std::vector<InputStream> preprocess;
+ std::vector<OutputStream> postprocess;
+ std::vector<DeviceEffects> deviceprocess;
+};
+
/** Parsed configuration.
* Intended to be a transient structure only used for deserialization.
* Note: Everything is copied in the configuration from the xml dom.
@@ -82,19 +90,16 @@
* consider keeping a private handle on the xml dom and replace all strings by dom pointers.
* Or even better, use SAX parsing to avoid the allocations all together.
*/
-struct Config {
+struct Config : public Processings {
float version;
Libraries libraries;
Effects effects;
- std::vector<OutputStream> postprocess;
- std::vector<InputStream> preprocess;
- std::vector<DeviceEffects> deviceprocess;
};
/** Result of `parse(const char*)` */
struct ParsingResult {
/** Parsed config, nullptr if the xml lib could not load the file */
- std::unique_ptr<Config> parsedConfig;
+ std::shared_ptr<const Config> parsedConfig;
size_t nbSkippedElement; //< Number of skipped invalid library, effect or processing chain
const std::string configPath; //< Path to the loaded configuration
};
diff --git a/media/libeffects/config/src/EffectsConfig.cpp b/media/libeffects/config/src/EffectsConfig.cpp
index 1696233..3096659 100644
--- a/media/libeffects/config/src/EffectsConfig.cpp
+++ b/media/libeffects/config/src/EffectsConfig.cpp
@@ -286,7 +286,7 @@
return {nullptr, 0, std::move(path)};
}
- auto config = std::make_unique<Config>();
+ auto config = std::make_shared<Config>();
size_t nbSkippedElements = 0;
auto registerFailure = [&nbSkippedElements](bool result) {
nbSkippedElements += result ? 0 : 1;
diff --git a/media/libeffects/downmix/EffectDownmix.cpp b/media/libeffects/downmix/EffectDownmix.cpp
index 7f8455a..b921537 100644
--- a/media/libeffects/downmix/EffectDownmix.cpp
+++ b/media/libeffects/downmix/EffectDownmix.cpp
@@ -259,7 +259,7 @@
ret = Downmix_Init(module);
if (ret < 0) {
ALOGW("DownmixLib_Create() init failed");
- free(module);
+ delete module;
return ret;
}
@@ -582,7 +582,7 @@
ALOGV("Downmix_Init module %p", pDwmModule);
int ret = 0;
- memset(&pDwmModule->context, 0, sizeof(downmix_object_t));
+ pDwmModule->context = downmix_object_t{}; // zero initialize (contains class with vtable).
pDwmModule->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
pDwmModule->config.inputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
diff --git a/media/libeffects/factory/Android.bp b/media/libeffects/factory/Android.bp
index 22838a3..d94093e 100644
--- a/media/libeffects/factory/Android.bp
+++ b/media/libeffects/factory/Android.bp
@@ -39,6 +39,7 @@
header_libs: [
"libaudioeffects",
"libeffects_headers",
+ "liberror_headers",
],
export_header_lib_headers: ["libeffects_headers"],
}
@@ -56,7 +57,6 @@
"-Werror",
],
-
shared_libs: [
"libeffectsconfig",
"libeffects",
diff --git a/media/libmediahelper/AudioParameter.cpp b/media/libmediahelper/AudioParameter.cpp
index 9a8156e..e25f9b7 100644
--- a/media/libmediahelper/AudioParameter.cpp
+++ b/media/libmediahelper/AudioParameter.cpp
@@ -32,6 +32,8 @@
const char * const AudioParameter::keyFrameCount = AUDIO_PARAMETER_STREAM_FRAME_COUNT;
const char * const AudioParameter::keyInputSource = AUDIO_PARAMETER_STREAM_INPUT_SOURCE;
const char * const AudioParameter::keyScreenState = AUDIO_PARAMETER_KEY_SCREEN_STATE;
+const char * const AudioParameter::keyClosing = AUDIO_PARAMETER_KEY_CLOSING;
+const char * const AudioParameter::keyExiting = AUDIO_PARAMETER_KEY_EXITING;
const char * const AudioParameter::keyBtNrec = AUDIO_PARAMETER_KEY_BT_NREC;
const char * const AudioParameter::keyHwAvSync = AUDIO_PARAMETER_HW_AV_SYNC;
const char * const AudioParameter::keyPresentationId = AUDIO_PARAMETER_STREAM_PRESENTATION_ID;
diff --git a/media/libmediahelper/include/media/AudioParameter.h b/media/libmediahelper/include/media/AudioParameter.h
index 41aff7c..6c34a4f 100644
--- a/media/libmediahelper/include/media/AudioParameter.h
+++ b/media/libmediahelper/include/media/AudioParameter.h
@@ -49,6 +49,12 @@
static const char * const keyInputSource;
static const char * const keyScreenState;
+ // TODO(b/73175392) consider improvement to AIDL StreamOut interface.
+ // keyClosing: "true" when AudioOutputDescriptor is closing. Used by A2DP HAL.
+ // keyExiting: "1" on AudioFlinger Thread preExit. Used by remote_submix and A2DP HAL.
+ static const char * const keyClosing;
+ static const char * const keyExiting;
+
// keyBtNrec: BT SCO Noise Reduction + Echo Cancellation parameters
// keyHwAvSync: get HW synchronization source identifier from a device
// keyMonoOutput: Enable mono audio playback
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index ec6a00a..569a25f 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -79,7 +79,6 @@
},
}
-// this library gets wrapped into libstagefright as well as modules
cc_library_static {
name: "libstagefright_mpeg2extractor",
apex_available: [
@@ -242,6 +241,7 @@
"CodecErrorLog.cpp",
"CryptoAsync.cpp",
"FrameDecoder.cpp",
+ "HevcUtils.cpp",
"InterfaceUtils.cpp",
"JPEGSource.cpp",
"MPEG2TSWriter.cpp",
@@ -255,6 +255,7 @@
"MediaCodecSource.cpp",
"MediaExtractor.cpp",
"MediaExtractorFactory.cpp",
+ "MediaSource.cpp",
"MediaSync.cpp",
"MediaTrack.cpp",
"MediaMuxer.cpp",
@@ -268,6 +269,7 @@
"StagefrightMediaScanner.cpp",
"SurfaceUtils.cpp",
"ThrottledSource.cpp",
+ "Utils.cpp",
"VideoFrameSchedulerBase.cpp",
"VideoFrameScheduler.cpp",
],
@@ -326,11 +328,6 @@
"libmedia_ndkformatpriv",
],
- // to get Utils, MediaSource, HevcUtils
- whole_static_libs: [
- "libstagefright_mpeg2extractor",
- ],
-
header_libs:[
"libmediadrm_headers",
"libnativeloader-headers",
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 09c5d64..080c3d0 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -118,15 +118,6 @@
static const char *kCodecCaptureRate = "android.media.mediacodec.capture-rate";
static const char *kCodecOperatingRate = "android.media.mediacodec.operating-rate";
static const char *kCodecPriority = "android.media.mediacodec.priority";
-static const char *kCodecConfigColorStandard = "android.media.mediacodec.config-color-standard";
-static const char *kCodecConfigColorRange = "android.media.mediacodec.config-color-range";
-static const char *kCodecConfigColorTransfer = "android.media.mediacodec.config-color-transfer";
-static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
-static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
-static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
-static const char *kCodecHDRStaticInfo = "android.media.mediacodec.hdr-static-info";
-static const char *kCodecHDR10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
-static const char *kCodecHDRFormat = "android.media.mediacodec.hdr-format";
// Min/Max QP before shaping
static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
@@ -175,6 +166,29 @@
static const char *kCodecVideoInputBytes = "android.media.mediacodec.video.input.bytes";
static const char *kCodecVideoInputFrames = "android.media.mediacodec.video.input.frames";
static const char *kCodecVideoEncodedDurationUs = "android.media.mediacodec.vencode.durationUs";
+// HDR metrics
+static const char *kCodecConfigColorStandard = "android.media.mediacodec.config-color-standard";
+static const char *kCodecConfigColorRange = "android.media.mediacodec.config-color-range";
+static const char *kCodecConfigColorTransfer = "android.media.mediacodec.config-color-transfer";
+static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
+static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
+static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
+static const char *kCodecHDRStaticInfo = "android.media.mediacodec.hdr-static-info";
+static const char *kCodecHDR10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
+static const char *kCodecHDRFormat = "android.media.mediacodec.hdr-format";
+// array/sync/async/block modes
+static const char *kCodecArrayMode = "android.media.mediacodec.array-mode";
+static const char *kCodecOperationMode = "android.media.mediacodec.operation-mode";
+static const char *kCodecOutputSurface = "android.media.mediacodec.output-surface";
+// max size configured by the app
+static const char *kCodecAppMaxInputSize = "android.media.mediacodec.app-max-input-size";
+// max size actually used
+static const char *kCodecUsedMaxInputSize = "android.media.mediacodec.used-max-input-size";
+// max size suggested by the codec
+static const char *kCodecCodecMaxInputSize = "android.media.mediacodec.codec-max-input-size";
+static const char *kCodecFlushCount = "android.media.mediacodec.flush-count";
+static const char *kCodecSetSurfaceCount = "android.media.mediacodec.set-surface-count";
+static const char *kCodecResolutionChangeCount = "android.media.mediacodec.resolution-change-count";
// the kCodecRecent* fields appear only in getMetrics() results
static const char *kCodecRecentLatencyMax = "android.media.mediacodec.recent.max"; /* in us */
@@ -935,7 +949,6 @@
mWidth(0),
mHeight(0),
mRotationDegrees(0),
- mHdrInfoFlags(0),
mDequeueInputTimeoutGeneration(0),
mDequeueInputReplyID(0),
mDequeueOutputTimeoutGeneration(0),
@@ -1043,6 +1056,14 @@
}
mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
+ resetMetricsFields();
+}
+
+void MediaCodec::resetMetricsFields() {
+ mHdrInfoFlags = 0;
+
+ mApiUsageMetrics = ApiUsageMetrics();
+ mReliabilityContextMetrics = ReliabilityContextMetrics();
}
void MediaCodec::updateMediametrics() {
@@ -1053,6 +1074,28 @@
Mutex::Autolock _lock(mMetricsLock);
+ mediametrics_setInt32(mMetricsHandle, kCodecArrayMode, mApiUsageMetrics.isArrayMode ? 1 : 0);
+ mApiUsageMetrics.operationMode = (mFlags & kFlagIsAsync) ?
+ ((mFlags & kFlagUseBlockModel) ? ApiUsageMetrics::kBlockMode
+ : ApiUsageMetrics::kAsynchronousMode)
+ : ApiUsageMetrics::kSynchronousMode;
+ mediametrics_setInt32(mMetricsHandle, kCodecOperationMode, mApiUsageMetrics.operationMode);
+ mediametrics_setInt32(mMetricsHandle, kCodecOutputSurface,
+ mApiUsageMetrics.isUsingOutputSurface ? 1 : 0);
+
+ mediametrics_setInt32(mMetricsHandle, kCodecAppMaxInputSize,
+ mApiUsageMetrics.inputBufferSize.appMax);
+ mediametrics_setInt32(mMetricsHandle, kCodecUsedMaxInputSize,
+ mApiUsageMetrics.inputBufferSize.usedMax);
+ mediametrics_setInt32(mMetricsHandle, kCodecCodecMaxInputSize,
+ mApiUsageMetrics.inputBufferSize.codecMax);
+
+ mediametrics_setInt32(mMetricsHandle, kCodecFlushCount, mReliabilityContextMetrics.flushCount);
+ mediametrics_setInt32(mMetricsHandle, kCodecSetSurfaceCount,
+ mReliabilityContextMetrics.setOutputSurfaceCount);
+ mediametrics_setInt32(mMetricsHandle, kCodecResolutionChangeCount,
+ mReliabilityContextMetrics.resolutionChangeCount);
+
if (mLatencyHist.getCount() != 0 ) {
mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
@@ -1295,7 +1338,7 @@
// update does its own mutex locking
updateMediametrics();
- mHdrInfoFlags = 0;
+ resetMetricsFields();
// ensure mutex while we do our own work
Mutex::Autolock _lock(mMetricsLock);
@@ -1967,6 +2010,10 @@
if (format->findInt32("color-format", &colorFormat)) {
mediametrics_setInt32(nextMetricsHandle, kCodecColorFormat, colorFormat);
}
+ int32_t appMaxInputSize = -1;
+ if (format->findInt32(KEY_MAX_INPUT_SIZE, &appMaxInputSize)) {
+ mApiUsageMetrics.inputBufferSize.appMax = appMaxInputSize;
+ }
if (mDomain == DOMAIN_VIDEO) {
float frameRate = -1.0;
if (format->findFloat("frame-rate", &frameRate)) {
@@ -3768,6 +3815,10 @@
mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
}
updateHdrMetrics(true /* isConfig */);
+ int32_t codecMaxInputSize = -1;
+ if (mInputFormat->findInt32(KEY_MAX_INPUT_SIZE, &codecMaxInputSize)) {
+ mApiUsageMetrics.inputBufferSize.codecMax = codecMaxInputSize;
+ }
// bitrate and bitrate mode, encoder only
if (mFlags & kFlagIsEncoder) {
// encoder specific values
@@ -4168,6 +4219,7 @@
setState(STARTED);
mCodec->signalResume();
}
+ mReliabilityContextMetrics.flushCount++;
postPendingRepliesAndDeferredMessages("kWhatFlushCompleted");
break;
@@ -4328,6 +4380,8 @@
handleSetSurface(NULL);
}
+ mApiUsageMetrics.isUsingOutputSurface = true;
+
uint32_t flags;
CHECK(msg->findInt32("flags", (int32_t *)&flags));
if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL ||
@@ -4468,6 +4522,7 @@
(void)disconnectFromSurface();
mSurface = surface;
}
+ mReliabilityContextMetrics.setOutputSurfaceCount++;
}
}
break;
@@ -5005,6 +5060,8 @@
}
}
+ mApiUsageMetrics.isArrayMode = true;
+
(new AMessage)->postReply(replyID);
break;
}
@@ -5273,6 +5330,7 @@
ClientConfigParcel clientConfig;
initClientConfigParcel(clientConfig);
mResourceManagerProxy->notifyClientConfigChanged(clientConfig);
+ mReliabilityContextMetrics.resolutionChangeCount++;
}
updateHdrMetrics(false /* isConfig */);
@@ -5708,6 +5766,10 @@
if (err != OK) {
return -EINVAL;
}
+
+ int32_t usedMaxInputSize = mApiUsageMetrics.inputBufferSize.usedMax;
+ mApiUsageMetrics.inputBufferSize.usedMax = size > usedMaxInputSize ? size : usedMaxInputSize;
+
if (hasCryptoOrDescrambler() && !c2Buffer && !memory) {
AString *errorDetailMsg;
CHECK(msg->findPointer("errorDetailMsg", (void **)&errorDetailMsg));
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 9d2568e..f91a8b2 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -733,32 +733,36 @@
const bool is10Bit = (mSrcFormat == COLOR_FormatYUVP010
|| mSrcFormat == OMX_COLOR_FormatYUV420Planar16);
- switch (mSrcColorSpace.mStandard) {
- case ColorUtils::kColorStandardBT601_525:
- case ColorUtils::kColorStandardBT601_625:
+ ColorAspects::Primaries primaries;
+ ColorAspects::MatrixCoeffs matrix;
+ if (ColorUtils::unwrapColorAspectsFromColorStandard(
+ mSrcColorSpace.mStandard, &primaries, &matrix) != OK) {
+ matrix = ColorAspects::MatrixUnspecified;
+ }
+
+ switch (matrix) {
+ case ColorAspects::MatrixBT601_6:
+ case ColorAspects::MatrixBT470_6M: // use 601 matrix as that is the closest for now
+ case ColorAspects::MatrixSMPTE240M: // use 601 matrix as that is the closest for now
return (isFullRange ? &BT601_FULL :
is10Bit ? &BT601_LTD_10BIT : &BT601_LIMITED);
- case ColorUtils::kColorStandardBT709:
+ case ColorAspects::MatrixBT709_5:
return (isFullRange ? &BT709_FULL :
is10Bit ? &BT709_LTD_10BIT : &BT709_LIMITED);
- case ColorUtils::kColorStandardBT2020:
+ case ColorAspects::MatrixBT2020:
+ case ColorAspects::MatrixBT2020Constant: // use 2020 matrix as that is the closest for now
return (isFullRange ? &BT2020_FULL :
is10Bit ? &BT2020_LTD_10BIT : &BT2020_LIMITED);
default:
- // for now use the default matrices for unhandled color spaces
- // TODO: fail?
- // return nullptr;
- [[fallthrough]];
-
- case ColorUtils::kColorStandardUnspecified:
- if (isFullRange) {
- return is10Bit ? &BT2020_FULL : &BT601_FULL;
+ // use BT.2020 for 10-bit and 601 for 8-bit by default
+ if (is10Bit) {
+ return isFullRange ? &BT2020_FULL : &BT2020_LTD_10BIT;
+ } else {
+ return isFullRange ? &BT601_FULL : &BT601_LIMITED;
}
- return is10Bit ? &BT2020_LTD_10BIT : &BT601_LIMITED;
-
}
}
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 1cc281b..3d4b6f8 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -453,6 +453,7 @@
void initMediametrics();
void updateMediametrics();
void flushMediametrics();
+ void resetMetricsFields();
void updateEphemeralMediametrics(mediametrics_handle_t item);
void updateLowLatency(const sp<AMessage> &msg);
void onGetMetrics(const sp<AMessage>& msg);
@@ -492,6 +493,28 @@
const int32_t colorTransfer);
bool profileSupport10Bits(const AString &mime, const int32_t profile);
+ struct ApiUsageMetrics {
+ bool isArrayMode;
+ enum OperationMode {
+ kUnknownMode = 0,
+ kSynchronousMode = 1,
+ kAsynchronousMode = 2,
+ kBlockMode = 3,
+ };
+ OperationMode operationMode;
+ bool isUsingOutputSurface;
+ struct InputBufferSize {
+ int32_t appMax; // max size configured by the app
+ int32_t usedMax; // max size actually used
+ int32_t codecMax; // max size suggested by the codec
+ } inputBufferSize;
+ } mApiUsageMetrics;
+ struct ReliabilityContextMetrics {
+ int32_t flushCount;
+ int32_t setOutputSurfaceCount;
+ int32_t resolutionChangeCount;
+ } mReliabilityContextMetrics;
+
// initial create parameters
AString mInitName;
diff --git a/media/module/foundation/ColorUtils.cpp b/media/module/foundation/ColorUtils.cpp
index 6dc8157..12bffca 100644
--- a/media/module/foundation/ColorUtils.cpp
+++ b/media/module/foundation/ColorUtils.cpp
@@ -60,6 +60,10 @@
{ CU::kColorStandardBT470M, { CA::PrimariesBT470_6M, CA::MatrixBT470_6M } },
// NOTE: there is no close match to the matrix used by standard film, chose closest
{ CU::kColorStandardFilm, { CA::PrimariesGenericFilm, CA::MatrixBT2020 } },
+ // DCI-P3 (in DataSpace that drives this standard) is actually Display P3
+ // ITU does not specify a matrix suitable for P3. The theoretical KR/KB numbers are
+ // 0.229 and 0.079. Assume BT.601 matrix as P3 is commonly used for JPEG with BT.601.
+ { CU::kColorStandardDisplay_P3, { CA::PrimariesEG432, CA::MatrixBT601_6 } },
}
};
@@ -264,6 +268,8 @@
{ 8, ColorAspects::PrimariesGenericFilm },
{ 9, ColorAspects::PrimariesBT2020 },
{ 10, ColorAspects::PrimariesOther /* XYZ */ },
+ { 11, ColorAspects::PrimariesRP431 },
+ { 12, ColorAspects::PrimariesEG432 },
}
};
@@ -438,6 +444,9 @@
{ CU::kColorStandardBT2020, CA::PrimariesBT2020 },
{ CU::kColorStandardBT601_525_Unadjusted, CA::PrimariesBT601_6_525 },
{ CU::kColorStandardBT601_625_Unadjusted, CA::PrimariesBT601_6_625 },
+ { CU::kColorStandardDisplay_P3, CA::PrimariesEG432 },
+ // fall back DCI P3 primaries to Display P3
+ { CU::kColorStandardDisplay_P3, CA::PrimariesRP431 },
}
};
@@ -469,7 +478,8 @@
{ CU::kColorStandardBT2020Constant, GET_HAL_BITFIELD(STANDARD, BT2020_CONSTANT_LUMINANCE) },
{ CU::kColorStandardBT470M, GET_HAL_BITFIELD(STANDARD, BT470M) },
{ CU::kColorStandardFilm, GET_HAL_BITFIELD(STANDARD, FILM) },
- { CU::kColorStandardDCI_P3, GET_HAL_BITFIELD(STANDARD, DCI_P3) },
+ // DCI-P3 (in DataSpace that drives this standard) is actually Display P3
+ { CU::kColorStandardDisplay_P3, GET_HAL_BITFIELD(STANDARD, DCI_P3) },
}
};
diff --git a/media/module/foundation/include/media/stagefright/foundation/ColorUtils.h b/media/module/foundation/include/media/stagefright/foundation/ColorUtils.h
index 72c8074..f4e89bb 100644
--- a/media/module/foundation/include/media/stagefright/foundation/ColorUtils.h
+++ b/media/module/foundation/include/media/stagefright/foundation/ColorUtils.h
@@ -57,7 +57,8 @@
kColorStandardBT2020Constant = 7, // not in SDK
kColorStandardBT470M = 8, // not in SDK
kColorStandardFilm = 9, // not in SDK
- kColorStandardDCI_P3 = 10, // not in SDK, new in Android 8.0
+ kColorStandardDisplay_P3 = 10, // not in SDK, new in Android 8.0
+ kColorStandardDCI_P3 = kColorStandardDisplay_P3, // legacy (incorrect) name for Display P3
/* This marks a section of color-standard values that are not supported by graphics HAL,
but track defined color primaries-matrix coefficient combinations in media.
@@ -211,7 +212,7 @@
case ColorUtils::kColorStandardBT2020Constant: return "BT2020Constant";
case ColorUtils::kColorStandardBT470M: return "BT470M";
case ColorUtils::kColorStandardFilm: return "Film";
- case ColorUtils::kColorStandardDCI_P3: return "DCI_P3";
+ case ColorUtils::kColorStandardDisplay_P3: return "Display_P3";
default: return def;
}
}
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 3c0f8f3..387c669 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1236,21 +1236,20 @@
output.portId = portId;
if (lStatus == NO_ERROR) {
+ // no risk of deadlock because AudioFlinger::mLock is held
+ Mutex::Autolock _dl(thread->mLock);
// Connect secondary outputs. Failure on a secondary output must not imped the primary
// Any secondary output setup failure will lead to a desync between the AP and AF until
// the track is destroyed.
updateSecondaryOutputsForTrack_l(track.get(), thread, secondaryOutputs);
- }
-
- // move effect chain to this output thread if an effect on same session was waiting
- // for a track to be created
- if (lStatus == NO_ERROR && effectThread != NULL) {
- // no risk of deadlock because AudioFlinger::mLock is held
- Mutex::Autolock _dl(thread->mLock);
- Mutex::Autolock _sl(effectThread->mLock);
- if (moveEffectChain_l(sessionId, effectThread, thread) == NO_ERROR) {
- effectThreadId = thread->id();
- effectIds = thread->getEffectIds_l(sessionId);
+ // move effect chain to this output thread if an effect on same session was waiting
+ // for a track to be created
+ if (effectThread != nullptr) {
+ Mutex::Autolock _sl(effectThread->mLock);
+ if (moveEffectChain_l(sessionId, effectThread, thread) == NO_ERROR) {
+ effectThreadId = thread->id();
+ effectIds = thread->getEffectIds_l(sessionId);
+ }
}
}
@@ -1861,6 +1860,8 @@
String8(AudioParameter::keyStreamSupportedFormats),
String8(AudioParameter::keyStreamSupportedChannels),
String8(AudioParameter::keyStreamSupportedSamplingRates),
+ String8(AudioParameter::keyClosing),
+ String8(AudioParameter::keyExiting),
};
if (isAudioServerUid(callingUid)) {
@@ -3950,7 +3951,7 @@
patchTrack->setPeerProxy(patchRecord, true /* holdReference */);
patchRecord->setPeerProxy(patchTrack, false /* holdReference */);
}
- track->setTeePatches(std::move(teePatches));
+ track->setTeePatchesToUpdate(std::move(teePatches));
}
sp<AudioFlinger::SyncEvent> AudioFlinger::createSyncEvent(AudioSystem::sync_event_t type,
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
index 496aedc..e024319 100644
--- a/services/audioflinger/MelReporter.cpp
+++ b/services/audioflinger/MelReporter.cpp
@@ -81,7 +81,7 @@
}
bool AudioFlinger::MelReporter::shouldComputeMelForDeviceType(audio_devices_t device) {
- if (mSoundDoseManager->isCsdDisabled()) {
+ if (!mSoundDoseManager->isCsdEnabled()) {
ALOGV("%s csd is disabled", __func__);
return false;
}
@@ -107,7 +107,7 @@
void AudioFlinger::MelReporter::updateMetadataForCsd(audio_io_handle_t streamHandle,
const std::vector<playback_track_metadata_v7_t>& metadataVec) {
- if (mSoundDoseManager->isCsdDisabled()) {
+ if (!mSoundDoseManager->isCsdEnabled()) {
ALOGV("%s csd is disabled", __func__);
return;
}
@@ -143,14 +143,10 @@
void AudioFlinger::MelReporter::onCreateAudioPatch(audio_patch_handle_t handle,
const PatchPanel::Patch& patch) {
- if (mSoundDoseManager->isCsdDisabled()) {
+ if (!mSoundDoseManager->isCsdEnabled()) {
ALOGV("%s csd is disabled", __func__);
return;
}
- if (useHalSoundDoseInterface()) {
- ALOGV("%s using HAL sound dose, ignore new patch", __func__);
- return;
- }
ALOGV("%s: handle %d mHalHandle %d device sink %08x",
__func__, handle, patch.mHalHandle,
@@ -199,7 +195,7 @@
ALOGI("%s add stream %d that uses device %d for CSD, nr of streams: %d", __func__,
patch.streamHandle, deviceHandle, mActiveDevices[deviceHandle]);
- if (outputThread != nullptr) {
+ if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
outputThread->startMelComputation_l(mSoundDoseManager->getOrCreateProcessorForDevice(
deviceHandle,
patch.streamHandle,
@@ -211,7 +207,7 @@
}
void AudioFlinger::MelReporter::onReleaseAudioPatch(audio_patch_handle_t handle) {
- if (mSoundDoseManager->isCsdDisabled()) {
+ if (!mSoundDoseManager->isCsdEnabled()) {
ALOGV("%s csd is disabled", __func__);
return;
}
@@ -271,7 +267,7 @@
}
}
- if (outputThread != nullptr) {
+ if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
outputThread->stopMelComputation_l();
}
}
@@ -287,13 +283,8 @@
return std::nullopt;
}
-bool AudioFlinger::MelReporter::useHalSoundDoseInterface() {
- bool useHalSoundDoseInterface = !mSoundDoseManager->forceUseFrameworkMel();
- {
- std::lock_guard _l(mLock);
- useHalSoundDoseInterface &= mUseHalSoundDoseInterface;
- }
- return useHalSoundDoseInterface;
+bool AudioFlinger::MelReporter::useHalSoundDoseInterface_l() {
+ return !mSoundDoseManager->forceUseFrameworkMel() & mUseHalSoundDoseInterface;
}
std::string AudioFlinger::MelReporter::dump() {
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
index 81a307a..2bc33f2 100644
--- a/services/audioflinger/MelReporter.h
+++ b/services/audioflinger/MelReporter.h
@@ -98,7 +98,7 @@
std::optional<audio_patch_handle_t>
activePatchStreamHandle_l(audio_io_handle_t streamHandle) REQUIRES(mLock);
- bool useHalSoundDoseInterface();
+ bool useHalSoundDoseInterface_l() REQUIRES(mLock);
AudioFlinger& mAudioFlinger; // does not own the object
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index d25d46f..d0feba5 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -735,7 +735,11 @@
/* Disconnect a patch */
status_t AudioFlinger::PatchPanel::releaseAudioPatch(audio_patch_handle_t handle)
-{
+ //unlocks AudioFlinger::mLock when calling ThreadBase::sendReleaseAudioPatchConfigEvent
+ //to avoid deadlocks if the thread loop needs to acquire AudioFlinger::mLock
+ //before processing the release patch request.
+ NO_THREAD_SAFETY_ANALYSIS
+ {
ALOGV("%s handle %d", __func__, handle);
status_t status = NO_ERROR;
@@ -772,7 +776,9 @@
break;
}
}
+ mAudioFlinger.unlock();
status = thread->sendReleaseAudioPatchConfigEvent(removedPatch.mHalHandle);
+ mAudioFlinger.lock();
} else {
status = hwDevice->releaseAudioPatch(removedPatch.mHalHandle);
}
@@ -793,7 +799,9 @@
break;
}
}
+ mAudioFlinger.unlock();
status = thread->sendReleaseAudioPatchConfigEvent(removedPatch.mHalHandle);
+ mAudioFlinger.lock();
} break;
default:
status = BAD_VALUE;
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 9560609..0e1a3c9 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -186,7 +186,9 @@
}
sp<os::ExternalVibration> getExternalVibration() const { return mExternalVibration; }
- void setTeePatches(TeePatches teePatches);
+ // This function should be called with holding thread lock.
+ void updateTeePatches();
+ void setTeePatchesToUpdate(TeePatches teePatchesToUpdate);
void tallyUnderrunFrames(size_t frames) override {
if (isOut()) { // we expect this from output tracks only
@@ -369,6 +371,7 @@
bool mPauseHwPending = false; // direct/offload track request for thread pause
audio_output_flags_t mFlags;
TeePatches mTeePatches;
+ std::optional<TeePatches> mTeePatchesToUpdate;
const float mSpeed;
const bool mIsSpatialized;
const bool mIsBitPerfect;
@@ -426,6 +429,7 @@
private:
status_t obtainBuffer(AudioBufferProvider::Buffer* buffer,
uint32_t waitTimeMs);
+ void queueBuffer(Buffer& inBuffer);
void clearBufferQueue();
void restartIfDisabled();
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index de0abf0..8d0c648 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -4023,7 +4023,7 @@
LOG_AUDIO_STATE();
mThreadMetrics.logEndInterval();
mThreadSnapshot.onEnd();
- mStandby = true;
+ setStandby_l();
}
sendStatistics(false /* force */);
}
@@ -4103,6 +4103,18 @@
activeTracks.insert(activeTracks.end(), mActiveTracks.begin(), mActiveTracks.end());
setHalLatencyMode_l();
+
+ for (const auto &track : mActiveTracks ) {
+ track->updateTeePatches();
+ }
+
+ // signal actual start of output stream when the render position reported by the kernel
+ // starts moving.
+ if (!mStandby && !mHalStarted && mKernelPositionOnStandby !=
+ mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]) {
+ mHalStarted = true;
+ mWaitHalStartCV.broadcast();
+ }
} // mLock scope ends
if (mBytesRemaining == 0) {
@@ -4488,7 +4500,7 @@
if (!mStandby) {
threadLoop_standby();
- mStandby = true;
+ setStandby();
}
releaseWakeLock();
@@ -6235,12 +6247,12 @@
if (status == NO_ERROR) {
status = mOutput->stream->setParameters(keyValuePair);
if (!mStandby && status == INVALID_OPERATION) {
+ ALOGW("%s: setParameters failed with keyValuePair %s, entering standby",
+ __func__, keyValuePair.c_str());
mOutput->standby();
- if (!mStandby) {
- mThreadMetrics.logEndInterval();
- mThreadSnapshot.onEnd();
- mStandby = true;
- }
+ mThreadMetrics.logEndInterval();
+ mThreadSnapshot.onEnd();
+ setStandby_l();
mBytesWritten = 0;
status = mOutput->stream->setParameters(keyValuePair);
}
@@ -6547,7 +6559,8 @@
mFlushPending = true;
}
} else /* mType == OFFLOAD */ {
- if (previousTrack->sessionId() != latestTrack->sessionId()) {
+ if (previousTrack->sessionId() != latestTrack->sessionId() ||
+ previousTrack->isFlushPending()) {
mFlushPending = true;
}
}
@@ -6889,7 +6902,7 @@
if (!mStandby) {
mThreadMetrics.logEndInterval();
mThreadSnapshot.onEnd();
- mStandby = true;
+ setStandby_l();
}
mBytesWritten = 0;
status = mOutput->stream->setParameters(keyValuePair);
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 7b4c150..e88134b 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -1116,6 +1116,32 @@
void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor) override;
void stopMelComputation_l() override;
+ void setStandby() {
+ Mutex::Autolock _l(mLock);
+ setStandby_l();
+ }
+
+ void setStandby_l() {
+ mStandby = true;
+ mHalStarted = false;
+ mKernelPositionOnStandby =
+ mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+ }
+
+ bool waitForHalStart() {
+ Mutex::Autolock _l(mLock);
+ static const nsecs_t kWaitHalTimeoutNs = seconds(2);
+ nsecs_t endWaitTimetNs = systemTime() + kWaitHalTimeoutNs;
+ while (!mHalStarted) {
+ nsecs_t timeNs = systemTime();
+ if (timeNs >= endWaitTimetNs) {
+ break;
+ }
+ nsecs_t waitTimeLeftNs = endWaitTimetNs - timeNs;
+ mWaitHalStartCV.waitRelative(mLock, waitTimeLeftNs);
+ }
+ return mHalStarted;
+ }
protected:
// updated by readOutputParameters_l()
size_t mNormalFrameCount; // normal mixer and effects
@@ -1415,6 +1441,14 @@
// Downstream patch latency, available if mDownstreamLatencyStatMs.getN() > 0.
audio_utils::Statistics<double> mDownstreamLatencyStatMs{0.999};
+ // output stream start detection based on render position returned by the kernel
+ // condition signalled when the output stream has started
+ Condition mWaitHalStartCV;
+ // true when the output stream render position has moved, reset to false in standby
+ bool mHalStarted = false;
+ // last kernel render position saved when entering standby
+ int64_t mKernelPositionOnStandby = 0;
+
public:
virtual bool hasFastMixer() const = 0;
virtual FastTrackUnderruns getFastTrackUnderruns(size_t fastIndex __unused) const
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 8faaffe..7d2c4db 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1491,15 +1491,24 @@
*backInserter++ = metadata;
}
-void AudioFlinger::PlaybackThread::Track::setTeePatches(TeePatches teePatches) {
- forEachTeePatchTrack([](auto patchTrack) { patchTrack->destroy(); });
- mTeePatches = std::move(teePatches);
- if (mState == TrackBase::ACTIVE || mState == TrackBase::RESUMING ||
- mState == TrackBase::STOPPING_1) {
- forEachTeePatchTrack([](auto patchTrack) { patchTrack->start(); });
+void AudioFlinger::PlaybackThread::Track::updateTeePatches() {
+ if (mTeePatchesToUpdate.has_value()) {
+ forEachTeePatchTrack([](auto patchTrack) { patchTrack->destroy(); });
+ mTeePatches = mTeePatchesToUpdate.value();
+ if (mState == TrackBase::ACTIVE || mState == TrackBase::RESUMING ||
+ mState == TrackBase::STOPPING_1) {
+ forEachTeePatchTrack([](auto patchTrack) { patchTrack->start(); });
+ }
+ mTeePatchesToUpdate.reset();
}
}
+void AudioFlinger::PlaybackThread::Track::setTeePatchesToUpdate(TeePatches teePatchesToUpdate) {
+ ALOGW_IF(mTeePatchesToUpdate.has_value(),
+ "%s, existing tee patches to update will be ignored", __func__);
+ mTeePatchesToUpdate = std::move(teePatchesToUpdate);
+}
+
// must be called with player thread lock held
void AudioFlinger::PlaybackThread::Track::processMuteEvent_l(const sp<
IAudioManager>& audioManager, mute_state_t muteState)
@@ -2056,17 +2065,50 @@
ssize_t AudioFlinger::PlaybackThread::OutputTrack::write(void* data, uint32_t frames)
{
+ if (!mActive && frames != 0) {
+ sp<ThreadBase> thread = mThread.promote();
+ if (thread != nullptr && thread->standby()) {
+ // preload one silent buffer to trigger mixer on start()
+ ClientProxy::Buffer buf { .mFrameCount = mClientProxy->getStartThresholdInFrames() };
+ status_t status = mClientProxy->obtainBuffer(&buf);
+ if (status != NO_ERROR && status != NOT_ENOUGH_DATA && status != WOULD_BLOCK) {
+ ALOGE("%s(%d): could not obtain buffer on start", __func__, mId);
+ return 0;
+ }
+ memset(buf.mRaw, 0, buf.mFrameCount * mFrameSize);
+ mClientProxy->releaseBuffer(&buf);
+
+ (void) start();
+
+ // wait for HAL stream to start before sending actual audio. Doing this on each
+ // OutputTrack makes that playback start on all output streams is synchronized.
+ // If another OutputTrack has already started it can underrun but this is OK
+ // as only silence has been played so far and the retry count is very high on
+ // OutputTrack.
+ auto pt = static_cast<PlaybackThread *>(thread.get());
+ if (!pt->waitForHalStart()) {
+ ALOGW("%s(%d): timeout waiting for thread to exit standby", __func__, mId);
+ stop();
+ return 0;
+ }
+
+ // enqueue the first buffer and exit so that other OutputTracks will also start before
+ // write() is called again and this buffer actually consumed.
+ Buffer firstBuffer;
+ firstBuffer.frameCount = frames;
+ firstBuffer.raw = data;
+ queueBuffer(firstBuffer);
+ return frames;
+ } else {
+ (void) start();
+ }
+ }
+
Buffer *pInBuffer;
Buffer inBuffer;
inBuffer.frameCount = frames;
inBuffer.raw = data;
-
uint32_t waitTimeLeftMs = mSourceThread->waitTimeMs();
-
- if (!mActive && frames != 0) {
- (void) start();
- }
-
while (waitTimeLeftMs) {
// First write pending buffers, then new data
if (mBufferQueue.size()) {
@@ -2134,25 +2176,7 @@
if (inBuffer.frameCount) {
sp<ThreadBase> thread = mThread.promote();
if (thread != 0 && !thread->standby()) {
- if (mBufferQueue.size() < kMaxOverFlowBuffers) {
- pInBuffer = new Buffer;
- const size_t bufferSize = inBuffer.frameCount * mFrameSize;
- pInBuffer->mBuffer = malloc(bufferSize);
- LOG_ALWAYS_FATAL_IF(pInBuffer->mBuffer == nullptr,
- "%s: Unable to malloc size %zu", __func__, bufferSize);
- pInBuffer->frameCount = inBuffer.frameCount;
- pInBuffer->raw = pInBuffer->mBuffer;
- memcpy(pInBuffer->raw, inBuffer.raw, inBuffer.frameCount * mFrameSize);
- mBufferQueue.add(pInBuffer);
- ALOGV("%s(%d): thread %d adding overflow buffer %zu", __func__, mId,
- (int)mThreadIoHandle, mBufferQueue.size());
- // audio data is consumed (stored locally); set frameCount to 0.
- inBuffer.frameCount = 0;
- } else {
- ALOGW("%s(%d): thread %d no more overflow buffers",
- __func__, mId, (int)mThreadIoHandle);
- // TODO: return error for this.
- }
+ queueBuffer(inBuffer);
}
}
@@ -2165,6 +2189,29 @@
return frames - inBuffer.frameCount; // number of frames consumed.
}
+void AudioFlinger::PlaybackThread::OutputTrack::queueBuffer(Buffer& inBuffer) {
+
+ if (mBufferQueue.size() < kMaxOverFlowBuffers) {
+ Buffer *pInBuffer = new Buffer;
+ const size_t bufferSize = inBuffer.frameCount * mFrameSize;
+ pInBuffer->mBuffer = malloc(bufferSize);
+ LOG_ALWAYS_FATAL_IF(pInBuffer->mBuffer == nullptr,
+ "%s: Unable to malloc size %zu", __func__, bufferSize);
+ pInBuffer->frameCount = inBuffer.frameCount;
+ pInBuffer->raw = pInBuffer->mBuffer;
+ memcpy(pInBuffer->raw, inBuffer.raw, inBuffer.frameCount * mFrameSize);
+ mBufferQueue.add(pInBuffer);
+ ALOGV("%s(%d): thread %d adding overflow buffer %zu", __func__, mId,
+ (int)mThreadIoHandle, mBufferQueue.size());
+ // audio data is consumed (stored locally); set frameCount to 0.
+ inBuffer.frameCount = 0;
+ } else {
+ ALOGW("%s(%d): thread %d no more overflow buffers",
+ __func__, mId, (int)mThreadIoHandle);
+ // TODO: return error for this.
+ }
+}
+
void AudioFlinger::PlaybackThread::OutputTrack::copyMetadataTo(MetadataInserter& backInserter) const
{
std::lock_guard<std::mutex> lock(mTrackMetadatasMutex);
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index 827f7d4..a114a38 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -50,7 +50,7 @@
size_t channelCount, audio_format_t format) {
std::lock_guard _l(mLock);
- if (mHalSoundDose != nullptr && !mDisableCsd) {
+ if (mHalSoundDose != nullptr && mEnabledCsd) {
ALOGD("%s: using HAL MEL computation, no MelProcessor needed.", __func__);
return nullptr;
}
@@ -216,7 +216,7 @@
if (id == AUDIO_PORT_HANDLE_NONE) {
ALOGI("%s: no mapped id for audio device with type %d and address %s",
__func__, in_audioDevice.type.type,
- in_audioDevice.address.get<AudioDeviceAddress::id>().c_str());
+ in_audioDevice.address.toString().c_str());
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
soundDoseManager->onMomentaryExposure(in_currentDbA, id);
@@ -243,7 +243,7 @@
if (id == AUDIO_PORT_HANDLE_NONE) {
ALOGI("%s: no mapped id for audio device with type %d and address %s",
__func__, in_audioDevice.type.type,
- in_audioDevice.address.get<AudioDeviceAddress::id>().c_str());
+ in_audioDevice.address.toString().c_str());
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
// TODO: introduce timestamp in onNewMelValues callback
@@ -290,11 +290,11 @@
return binder::Status::ok();
}
-binder::Status SoundDoseManager::SoundDose::disableCsd() {
+binder::Status SoundDoseManager::SoundDose::setCsdEnabled(bool enabled) {
ALOGV("%s", __func__);
auto soundDoseManager = mSoundDoseManager.promote();
if (soundDoseManager != nullptr) {
- soundDoseManager->disableCsd();
+ soundDoseManager->setCsdEnabled(enabled);
}
return binder::Status::ok();
}
@@ -365,26 +365,27 @@
}
}
-void SoundDoseManager::disableCsd() {
+void SoundDoseManager::setCsdEnabled(bool enabled) {
ALOGV("%s", __func__);
std::lock_guard _l(mLock);
- mDisableCsd = true;
+ mEnabledCsd = enabled;
- // Normally, there should be no active MelProcessors when this method is called
- // We pause however every cached MelProcessor as a defensive mechanism to not
- // have unnecessary processing
for (auto& activeEntry : mActiveProcessors) {
auto melProcessor = activeEntry.second.promote();
if (melProcessor != nullptr) {
- melProcessor->pause();
+ if (enabled) {
+ melProcessor->resume();
+ } else {
+ melProcessor->pause();
+ }
}
}
}
-bool SoundDoseManager::isCsdDisabled() {
+bool SoundDoseManager::isCsdEnabled() {
std::lock_guard _l(mLock);
- return mDisableCsd;
+ return mEnabledCsd;
}
void SoundDoseManager::setUseFrameworkMel(bool useFrameworkMel) {
@@ -411,7 +412,7 @@
}
bool SoundDoseManager::isSoundDoseHalSupported() const {
- if (mDisableCsd) {
+ if (!mEnabledCsd) {
return false;
}
@@ -455,7 +456,7 @@
float currentCsd;
{
std::lock_guard _l(mLock);
- if (mDisableCsd) {
+ if (!mEnabledCsd) {
return;
}
@@ -496,7 +497,7 @@
{
std::lock_guard _l(mLock);
- if (mDisableCsd) {
+ if (!mEnabledCsd) {
return;
}
}
@@ -522,7 +523,7 @@
std::string output;
{
std::lock_guard _l(mLock);
- if (mDisableCsd) {
+ if (!mEnabledCsd) {
base::StringAppendF(&output, "CSD is disabled");
return output;
}
diff --git a/services/audioflinger/sounddose/SoundDoseManager.h b/services/audioflinger/sounddose/SoundDoseManager.h
index 5081ce4..6c02afb 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.h
+++ b/services/audioflinger/sounddose/SoundDoseManager.h
@@ -101,8 +101,8 @@
/** Clear all map entries with passed audio_port_handle_t. */
void clearMapDeviceIdEntries(audio_port_handle_t deviceId);
- /** Returns true if CSD is disabled. */
- bool isCsdDisabled();
+ /** Returns true if CSD is enabled. */
+ bool isCsdEnabled();
std::string dump() const;
@@ -137,7 +137,7 @@
const std::vector<media::SoundDoseRecord>& records) override;
binder::Status updateAttenuation(float attenuationDB, int device) override;
binder::Status getOutputRs2UpperBound(float* value) override;
- binder::Status disableCsd() override;
+ binder::Status setCsdEnabled(bool enabled) override;
binder::Status getCsd(float* value) override;
binder::Status forceUseFrameworkMel(bool useFrameworkMel) override;
@@ -170,7 +170,7 @@
sp<media::ISoundDoseCallback> getSoundDoseCallback() const;
void updateAttenuation(float attenuationDB, audio_devices_t deviceType);
- void disableCsd();
+ void setCsdEnabled(bool enabled);
void setUseFrameworkMel(bool useFrameworkMel);
void setComputeCsdOnAllDevices(bool computeCsdOnAllDevices);
bool isSoundDoseHalSupported() const;
@@ -202,7 +202,7 @@
bool mUseFrameworkMel GUARDED_BY(mLock) = true;
bool mComputeCsdOnAllDevices GUARDED_BY(mLock) = false;
- bool mDisableCsd GUARDED_BY(mLock) = false;
+ bool mEnabledCsd GUARDED_BY(mLock) = true;
};
} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 7ee6566..09ca989 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -701,8 +701,10 @@
}
}
+ // TODO(b/73175392) consider improving the AIDL interface.
+ // Signal closing to A2DP HAL.
AudioParameter param;
- param.add(String8("closing"), String8("true"));
+ param.add(String8(AudioParameter::keyClosing), String8("true"));
mClientInterface->setParameters(mIoHandle, param.toString());
mClientInterface->closeOutput(mIoHandle);
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index c7a60c2..b111865 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -41,26 +41,25 @@
// AudioPolicyEffects Implementation
// ----------------------------------------------------------------------------
-AudioPolicyEffects::AudioPolicyEffects()
-{
- status_t loadResult = loadAudioEffectXmlConfig();
+AudioPolicyEffects::AudioPolicyEffects(const sp<EffectsFactoryHalInterface>& effectsFactoryHal) {
+ // load xml config with effectsFactoryHal
+ status_t loadResult = loadAudioEffectConfig(effectsFactoryHal);
if (loadResult == NO_ERROR) {
- mDefaultDeviceEffectFuture = std::async(
- std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
+ mDefaultDeviceEffectFuture =
+ std::async(std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
} else if (loadResult < 0) {
- ALOGW("Failed to load XML effect configuration, fallback to .conf");
+ ALOGW("Failed to query effect configuration, fallback to load .conf");
// load automatic audio effect modules
if (access(AUDIO_EFFECT_VENDOR_CONFIG_FILE, R_OK) == 0) {
- loadAudioEffectConfig(AUDIO_EFFECT_VENDOR_CONFIG_FILE);
+ loadAudioEffectConfigLegacy(AUDIO_EFFECT_VENDOR_CONFIG_FILE);
} else if (access(AUDIO_EFFECT_DEFAULT_CONFIG_FILE, R_OK) == 0) {
- loadAudioEffectConfig(AUDIO_EFFECT_DEFAULT_CONFIG_FILE);
+ loadAudioEffectConfigLegacy(AUDIO_EFFECT_DEFAULT_CONFIG_FILE);
}
} else if (loadResult > 0) {
ALOGE("Effect config is partially invalid, skipped %d elements", loadResult);
}
}
-
AudioPolicyEffects::~AudioPolicyEffects()
{
size_t i = 0;
@@ -907,10 +906,18 @@
return NO_ERROR;
}
-status_t AudioPolicyEffects::loadAudioEffectXmlConfig() {
- auto result = effectsConfig::parse();
- if (result.parsedConfig == nullptr) {
- return -ENOENT;
+status_t AudioPolicyEffects::loadAudioEffectConfig(
+ const sp<EffectsFactoryHalInterface>& effectsFactoryHal) {
+ if (!effectsFactoryHal) {
+ ALOGE("%s Null EffectsFactoryHalInterface", __func__);
+ return UNEXPECTED_NULL;
+ }
+
+ const auto skippedElements = VALUE_OR_RETURN_STATUS(effectsFactoryHal->getSkippedElements());
+ const auto processings = effectsFactoryHal->getProcessings();
+ if (!processings) {
+ ALOGE("%s Null processings with %zu skipped elements", __func__, skippedElements);
+ return UNEXPECTED_NULL;
}
auto loadProcessingChain = [](auto& processingChain, auto& streams) {
@@ -924,9 +931,8 @@
}
};
- auto loadDeviceProcessingChain = [](auto &processingChain, auto& devicesEffects) {
+ auto loadDeviceProcessingChain = [](auto& processingChain, auto& devicesEffects) {
for (auto& deviceProcess : processingChain) {
-
auto effectDescs = std::make_unique<EffectDescVector>();
for (auto& effect : deviceProcess.effects) {
effectDescs->mEffects.add(
@@ -938,17 +944,18 @@
}
};
- loadProcessingChain(result.parsedConfig->preprocess, mInputSources);
- loadProcessingChain(result.parsedConfig->postprocess, mOutputStreams);
+ loadProcessingChain(processings->preprocess, mInputSources);
+ loadProcessingChain(processings->postprocess, mOutputStreams);
+
{
Mutex::Autolock _l(mLock);
- loadDeviceProcessingChain(result.parsedConfig->deviceprocess, mDeviceEffects);
+ loadDeviceProcessingChain(processings->deviceprocess, mDeviceEffects);
}
- // Casting from ssize_t to status_t is probably safe, there should not be more than 2^31 errors
- return result.nbSkippedElement;
+
+ return skippedElements;
}
-status_t AudioPolicyEffects::loadAudioEffectConfig(const char *path)
+status_t AudioPolicyEffects::loadAudioEffectConfigLegacy(const char *path)
{
cnode *root;
char *data;
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index 13d5d0c..9f65a96 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -20,22 +20,33 @@
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
+#include <future>
+
+#include <android-base/thread_annotations.h>
#include <cutils/misc.h>
#include <media/AudioEffect.h>
+#include <media/audiohal/EffectsFactoryHalInterface.h>
#include <system/audio.h>
#include <utils/Vector.h>
#include <utils/SortedVector.h>
-#include <android-base/thread_annotations.h>
-
-#include <future>
namespace android {
// ----------------------------------------------------------------------------
-// AudioPolicyEffects class
-// This class will manage all effects attached to input and output streams in
-// AudioPolicyService as configured in audio_effects.conf.
+/**
+ * AudioPolicyEffects class.
+ *
+ * This class manages all effects attached to input and output streams in AudioPolicyService.
+ * The effect configurations can be queried in several ways:
+ *
+ * With HIDL HAL, the configuration file `audio_effects.xml` will be loaded by libAudioHal. If this
+ * file does not exist, AudioPolicyEffects class will fallback to load configuration from
+ * `/vendor/etc/audio_effects.conf` (AUDIO_EFFECT_VENDOR_CONFIG_FILE). If this file also does not
+ * exist, the configuration will be loaded from the file `/system/etc/audio_effects.conf`.
+ *
+ * With AIDL HAL, the configuration will be queried with the method `IFactory::queryProcessing()`.
+ */
class AudioPolicyEffects : public RefBase
{
@@ -44,7 +55,7 @@
// The constructor will parse audio_effects.conf
// First it will look whether vendor specific file exists,
// otherwise it will parse the system default file.
- AudioPolicyEffects();
+ explicit AudioPolicyEffects(const sp<EffectsFactoryHalInterface>& effectsFactoryHal);
virtual ~AudioPolicyEffects();
// NOTE: methods on AudioPolicyEffects should never be called with the AudioPolicyService
@@ -218,7 +229,6 @@
};
-
static const char * const kInputSourceNames[AUDIO_SOURCE_CNT -1];
static audio_source_t inputSourceNameToEnum(const char *name);
@@ -226,8 +236,8 @@
audio_stream_type_t streamNameToEnum(const char *name);
// Parse audio_effects.conf
- status_t loadAudioEffectConfig(const char *path); // TODO: add legacy in the name
- status_t loadAudioEffectXmlConfig(); // TODO: remove "Xml" in the name
+ status_t loadAudioEffectConfigLegacy(const char *path);
+ status_t loadAudioEffectConfig(const sp<EffectsFactoryHalInterface>& effectsFactoryHal);
// Load all effects descriptors in configuration file
status_t loadEffects(cnode *root, Vector <EffectDesc *>& effects);
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index af7be52..0d12060 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -476,14 +476,14 @@
}
ALOGV("startOutput()");
sp<AudioPlaybackClient> client;
- sp<AudioPolicyEffects>audioPolicyEffects;
+ sp<AudioPolicyEffects> audioPolicyEffects;
getPlaybackClientAndEffects(portId, client, audioPolicyEffects, __func__);
if (audioPolicyEffects != 0) {
// create audio processors according to stream
- status_t status = audioPolicyEffects->addOutputSessionEffects(
- client->io, client->stream, client->session);
+ status_t status = audioPolicyEffects->addOutputSessionEffects(client->io, client->stream,
+ client->session);
if (status != NO_ERROR && status != ALREADY_EXISTS) {
ALOGW("Failed to add effects on session %d", client->session);
}
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 974ae38..193e68a 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -259,7 +259,8 @@
}
// load audio processing modules
- sp<AudioPolicyEffects> audioPolicyEffects = new AudioPolicyEffects();
+ const sp<EffectsFactoryHalInterface> effectsFactoryHal = EffectsFactoryHalInterface::create();
+ sp<AudioPolicyEffects> audioPolicyEffects = new AudioPolicyEffects(effectsFactoryHal);
sp<UidPolicy> uidPolicy = new UidPolicy(this);
sp<SensorPrivacyPolicy> sensorPrivacyPolicy = new SensorPrivacyPolicy(this);
{
@@ -278,7 +279,7 @@
AudioDeviceTypeAddrVector devices;
bool hasSpatializer = mAudioPolicyManager->canBeSpatialized(&attr, nullptr, devices);
if (hasSpatializer) {
- mSpatializer = Spatializer::create(this);
+ mSpatializer = Spatializer::create(this, effectsFactoryHal);
}
if (mSpatializer == nullptr) {
// No spatializer created, signal the reason: NO_INIT a failure, OK means intended.
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index 95b8e7c..ee2e0eb 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -30,7 +30,6 @@
#include <audio_utils/fixedfft.h>
#include <cutils/bitops.h>
#include <hardware/sensors.h>
-#include <media/audiohal/EffectsFactoryHalInterface.h>
#include <media/stagefright/foundation/AHandler.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/MediaMetricsItem.h>
@@ -215,18 +214,17 @@
};
// ---------------------------------------------------------------------------
-sp<Spatializer> Spatializer::create(SpatializerPolicyCallback *callback) {
+sp<Spatializer> Spatializer::create(SpatializerPolicyCallback* callback,
+ const sp<EffectsFactoryHalInterface>& effectsFactoryHal) {
sp<Spatializer> spatializer;
- sp<EffectsFactoryHalInterface> effectsFactoryHal = EffectsFactoryHalInterface::create();
if (effectsFactoryHal == nullptr) {
ALOGW("%s failed to create effect factory interface", __func__);
return spatializer;
}
std::vector<effect_descriptor_t> descriptors;
- status_t status =
- effectsFactoryHal->getDescriptors(FX_IID_SPATIALIZER, &descriptors);
+ status_t status = effectsFactoryHal->getDescriptors(FX_IID_SPATIALIZER, &descriptors);
if (status != NO_ERROR) {
ALOGW("%s failed to get spatializer descriptor, error %d", __func__, status);
return spatializer;
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 23de0c0..0d4d3f6 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -27,6 +27,7 @@
#include <audio_utils/SimpleLog.h>
#include <math.h>
#include <media/AudioEffect.h>
+#include <media/audiohal/EffectsFactoryHalInterface.h>
#include <media/VectorRecorder.h>
#include <media/audiohal/EffectHalInterface.h>
#include <media/stagefright/foundation/ALooper.h>
@@ -95,7 +96,8 @@
private SpatializerPoseController::Listener,
public virtual AudioSystem::SupportedLatencyModesCallback {
public:
- static sp<Spatializer> create(SpatializerPolicyCallback *callback);
+ static sp<Spatializer> create(SpatializerPolicyCallback* callback,
+ const sp<EffectsFactoryHalInterface>& effectsFactoryHal);
~Spatializer() override;
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 1564ff3..a2aafdf 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -2159,6 +2159,7 @@
client->setImageDumpMask(mImageDumpMask);
client->setStreamUseCaseOverrides(mStreamUseCaseOverrides);
+ client->setZoomOverride(mZoomOverrideValue);
} // lock is destroyed, allow further connect calls
// Important: release the mutex here so the client can call back into the service from its
@@ -5123,6 +5124,8 @@
} else if (args.size() >= 1 && args[0] == String16("clear-stream-use-case-override")) {
handleClearStreamUseCaseOverrides();
return OK;
+ } else if (args.size() >= 1 && args[0] == String16("set-zoom-override")) {
+ return handleSetZoomOverride(args);
} else if (args.size() >= 2 && args[0] == String16("watch")) {
return handleWatchCommand(args, in, out);
} else if (args.size() >= 2 && args[0] == String16("set-watchdog")) {
@@ -5366,6 +5369,34 @@
mStreamUseCaseOverrides.clear();
}
+status_t CameraService::handleSetZoomOverride(const Vector<String16>& args) {
+ char* end;
+ int zoomOverrideValue = strtol(String8(args[1]), &end, /*base=*/10);
+ if ((*end != '\0') ||
+ (zoomOverrideValue != -1 &&
+ zoomOverrideValue != ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF &&
+ zoomOverrideValue != ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM)) {
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock lock(mServiceLock);
+ mZoomOverrideValue = zoomOverrideValue;
+
+ const auto clients = mActiveClientManager.getAll();
+ for (auto& current : clients) {
+ if (current != nullptr) {
+ const auto basicClient = current->getValue();
+ if (basicClient.get() != nullptr) {
+ if (basicClient->supportsZoomOverride()) {
+ basicClient->setZoomOverride(mZoomOverrideValue);
+ }
+ }
+ }
+ }
+
+ return OK;
+}
+
status_t CameraService::handleWatchCommand(const Vector<String16>& args, int inFd, int outFd) {
if (args.size() >= 3 && args[1] == String16("start")) {
return startWatchingTags(args, outFd);
@@ -5732,6 +5763,8 @@
" Valid values are (case sensitive): DEFAULT, PREVIEW, STILL_CAPTURE, VIDEO_RECORD,\n"
" PREVIEW_VIDEO_STILL, VIDEO_CALL, CROPPED_RAW\n"
" clear-stream-use-case-override clear the stream use case override\n"
+ " set-zoom-override <-1/0/1> enable or disable zoom override\n"
+ " Valid values -1: do not override, 0: override to OFF, 1: override to ZOOM\n"
" watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
" help print this message\n");
}
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index d8b14d7..ecec15f 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -371,6 +371,12 @@
// Clear stream use case overrides
virtual void clearStreamUseCaseOverrides() = 0;
+ // Whether the client supports camera zoom override
+ virtual bool supportsZoomOverride() = 0;
+
+ // Set/reset zoom override
+ virtual status_t setZoomOverride(int32_t zoomOverride) = 0;
+
// The injection camera session to replace the internal camera
// session.
virtual status_t injectCamera(const String8& injectedCamId,
@@ -1306,6 +1312,9 @@
// Clear the stream use case overrides
void handleClearStreamUseCaseOverrides();
+ // Set or clear the zoom override flag
+ status_t handleSetZoomOverride(const Vector<String16>& args);
+
// Handle 'watch' command as passed through 'cmd'
status_t handleWatchCommand(const Vector<String16> &args, int inFd, int outFd);
@@ -1408,6 +1417,9 @@
// Current stream use case overrides
std::vector<int64_t> mStreamUseCaseOverrides;
+ // Current zoom override value
+ int32_t mZoomOverrideValue = -1;
+
/**
* A listener class that implements the IBinder::DeathRecipient interface
* for use to call back the error state injected by the external camera, and
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index d71462f..06ef88a 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -2391,6 +2391,14 @@
mDevice->clearStreamUseCaseOverrides();
}
+bool Camera2Client::supportsZoomOverride() {
+ return mDevice->supportsZoomOverride();
+}
+
+status_t Camera2Client::setZoomOverride(int zoomOverride) {
+ return mDevice->setZoomOverride(zoomOverride);
+}
+
status_t Camera2Client::waitUntilCurrentRequestIdLocked() {
int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
if (activeRequestId != 0) {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 6d7651f..6bdb644 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -97,6 +97,9 @@
const std::vector<int64_t>& useCaseOverrides);
virtual void clearStreamUseCaseOverrides();
+ virtual bool supportsZoomOverride();
+ virtual status_t setZoomOverride(int32_t zoomOverride);
+
/**
* Interface used by CameraService
*/
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 55b0f03..fda6af3 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -1789,6 +1789,14 @@
mDevice->clearStreamUseCaseOverrides();
}
+bool CameraDeviceClient::supportsZoomOverride() {
+ return mDevice->supportsZoomOverride();
+}
+
+status_t CameraDeviceClient::setZoomOverride(int32_t zoomOverride) {
+ return mDevice->setZoomOverride(zoomOverride);
+}
+
binder::Status CameraDeviceClient::switchToOffline(
const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
const std::vector<int>& offlineOutputIds,
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index c6688a5..1533cf5 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -204,6 +204,9 @@
virtual bool supportsCameraMute();
virtual status_t setCameraMute(bool enabled);
+ virtual bool supportsZoomOverride() override;
+ virtual status_t setZoomOverride(int32_t zoomOverride) override;
+
virtual status_t dump(int fd, const Vector<String16>& args);
virtual status_t dumpClient(int fd, const Vector<String16>& args);
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 29d7e6f..66077c5 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -106,6 +106,14 @@
void CameraOfflineSessionClient::clearStreamUseCaseOverrides() {
}
+bool CameraOfflineSessionClient::supportsZoomOverride() {
+ return false;
+}
+
+status_t CameraOfflineSessionClient::setZoomOverride(int32_t /*zoomOverride*/) {
+ return INVALID_OPERATION;
+}
+
status_t CameraOfflineSessionClient::dump(int fd, const Vector<String16>& args) {
return BasicClient::dump(fd, args);
}
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 906d454..ad763f9 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -94,6 +94,10 @@
void clearStreamUseCaseOverrides() override;
+ bool supportsZoomOverride() override;
+
+ status_t setZoomOverride(int32_t zoomOverride) override;
+
// permissions management
status_t startCameraOps() override;
status_t finishCameraOps() override;
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index fd80cc5..530b03e 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -128,6 +128,9 @@
int32_t mOriginalTestPatternMode = 0;
int32_t mOriginalTestPatternData[4] = {};
+ // Original value of SETTINGS_OVERRIDE so that they can be restored if
+ // camera service isn't overwriting the app value.
+ int32_t mOriginalSettingsOverride = ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF;
};
typedef List<PhysicalCameraSettings> PhysicalCameraSettingsList;
@@ -467,6 +470,14 @@
virtual status_t setCameraMute(bool enabled) = 0;
/**
+ * Whether the camera device supports zoom override.
+ */
+ virtual bool supportsZoomOverride() = 0;
+
+ // Set/reset zoom override
+ virtual status_t setZoomOverride(int32_t zoomOverride) = 0;
+
+ /**
* Enable/disable camera service watchdog
*/
virtual status_t setCameraServiceWatchdog(bool enabled) = 0;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 2ebb98a..77abeeb 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -1110,7 +1110,7 @@
for (size_t i = 0; i < entry.count; i += 3) {
if (entry.data.i64[i] == profile) {
- if (entry.data.i64[i+1] & concurrentProfile) {
+ if ((entry.data.i64[i+1] == 0) || (entry.data.i64[i+1] & concurrentProfile)) {
return true;
}
}
@@ -2526,6 +2526,10 @@
(mDeviceStateOrientationMap.find(newState) != mDeviceStateOrientationMap.end())) {
mCameraCharacteristics.update(ANDROID_SENSOR_ORIENTATION,
&mDeviceStateOrientationMap[newState], 1);
+ if (mCameraCharNoPCOverride.get() != nullptr) {
+ mCameraCharNoPCOverride->update(ANDROID_SENSOR_ORIENTATION,
+ &mDeviceStateOrientationMap[newState], 1);
+ }
}
}
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index a13b937..8ff5c3f 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -697,6 +697,11 @@
mTorchStrengthLevel = 0;
+ if (!kEnableLazyHal) {
+ // Save HAL reference indefinitely
+ mSavedInterface = interface;
+ }
+
queryPhysicalCameraIds();
// Get physical camera characteristics if applicable
@@ -757,13 +762,6 @@
}
}
}
-
- if (!kEnableLazyHal) {
- // Save HAL reference indefinitely
- mSavedInterface = interface;
- }
-
-
}
status_t HidlProviderInfo::HidlDeviceInfo3::setTorchMode(bool enabled) {
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 153e999..dcafd74 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -103,6 +103,7 @@
mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
mComposerOutput(false),
mAutoframingOverride(ANDROID_CONTROL_AUTOFRAMING_OFF),
+ mSettingsOverride(-1),
mActivePhysicalId("")
{
ATRACE_CALL();
@@ -172,10 +173,21 @@
}
}
+ camera_metadata_entry_t availableSettingsOverrides = mDeviceInfo.find(
+ ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES);
+ for (size_t i = 0; i < availableSettingsOverrides.count; i++) {
+ if (availableSettingsOverrides.data.i32[i] ==
+ ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM) {
+ mSupportZoomOverride = true;
+ break;
+ }
+ }
+
/** Start up request queue thread */
mRequestThread = createNewRequestThread(
this, mStatusTracker, mInterface, sessionParamKeys,
- mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait);
+ mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait,
+ mSupportZoomOverride);
res = mRequestThread->run(String8::format("C3Dev-%s-ReqQueue", mId.string()).string());
if (res != OK) {
SET_ERR_L("Unable to start request queue thread: %s (%d)",
@@ -2232,6 +2244,16 @@
}
}
+ if (mSupportZoomOverride) {
+ for (auto& settings : newRequest->mSettingsList) {
+ auto settingsOverrideEntry =
+ settings.metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
+ settings.mOriginalSettingsOverride = settingsOverrideEntry.count > 0 ?
+ settingsOverrideEntry.data.i32[0] :
+ ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF;
+ }
+ }
+
return newRequest;
}
@@ -2959,7 +2981,8 @@
sp<HalInterface> interface, const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) :
+ bool overrideToPortrait,
+ bool supportSettingsOverride) :
Thread(/*canCallJava*/false),
mParent(parent),
mStatusTracker(statusTracker),
@@ -2981,6 +3004,7 @@
mComposerOutput(false),
mCameraMute(ANDROID_SENSOR_TEST_PATTERN_MODE_OFF),
mCameraMuteChanged(false),
+ mSettingsOverride(ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF),
mRepeatingLastFrameNumber(
hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES),
mPrepareVideoStream(false),
@@ -2990,7 +3014,8 @@
mLatestSessionParams(sessionParamKeys.size()),
mUseHalBufManager(useHalBufManager),
mSupportCameraMute(supportCameraMute),
- mOverrideToPortrait(overrideToPortrait) {
+ mOverrideToPortrait(overrideToPortrait),
+ mSupportSettingsOverride(supportSettingsOverride) {
mStatusId = statusTracker->addComponent("RequestThread");
mVndkVersion = property_get_int32("ro.vndk.version", __ANDROID_API_FUTURE__);
}
@@ -3671,6 +3696,7 @@
mPrevTriggers = triggerCount;
bool testPatternChanged = overrideTestPattern(captureRequest);
+ bool settingsOverrideChanged = overrideSettingsOverride(captureRequest);
// If the request is the same as last, or we had triggers now or last time or
// changing overrides this time
@@ -3678,7 +3704,7 @@
(mPrevRequest != captureRequest || triggersMixedIn ||
captureRequest->mRotateAndCropChanged ||
captureRequest->mAutoframingChanged ||
- testPatternChanged) &&
+ testPatternChanged || settingsOverrideChanged) &&
// Request settings are all the same within one batch, so only treat the first
// request in a batch as new
!(batchedRequest && i > 0);
@@ -4182,6 +4208,13 @@
return OK;
}
+status_t Camera3Device::RequestThread::setZoomOverride(int32_t zoomOverride) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mTriggerMutex);
+ mSettingsOverride = zoomOverride;
+ return OK;
+}
+
nsecs_t Camera3Device::getExpectedInFlightDuration() {
ATRACE_CALL();
std::lock_guard<std::mutex> l(mInFlightLock);
@@ -4883,6 +4916,33 @@
return changed;
}
+bool Camera3Device::RequestThread::overrideSettingsOverride(
+ const sp<CaptureRequest> &request) {
+ ATRACE_CALL();
+
+ if (!mSupportSettingsOverride) return false;
+
+ Mutex::Autolock l(mTriggerMutex);
+
+ // For a multi-camera, only override the logical camera's metadata.
+ CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+ camera_metadata_entry entry = metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
+ int32_t originalValue = request->mSettingsList.begin()->mOriginalSettingsOverride;
+ if (mSettingsOverride != -1 &&
+ (entry.count == 0 || entry.data.i32[0] != mSettingsOverride)) {
+ metadata.update(ANDROID_CONTROL_SETTINGS_OVERRIDE,
+ &mSettingsOverride, 1);
+ return true;
+ } else if (mSettingsOverride == -1 &&
+ (entry.count == 0 || entry.data.i32[0] != originalValue)) {
+ metadata.update(ANDROID_CONTROL_SETTINGS_OVERRIDE,
+ &originalValue, 1);
+ return true;
+ }
+
+ return false;
+}
+
status_t Camera3Device::RequestThread::setHalInterface(
sp<HalInterface> newHalInterface) {
if (newHalInterface.get() == nullptr) {
@@ -5349,6 +5409,25 @@
return mRequestThread->setCameraMute(muteMode);
}
+bool Camera3Device::supportsZoomOverride() {
+ Mutex::Autolock il(mInterfaceLock);
+ Mutex::Autolock l(mLock);
+
+ return mSupportZoomOverride;
+}
+
+status_t Camera3Device::setZoomOverride(int32_t zoomOverride) {
+ ATRACE_CALL();
+ Mutex::Autolock il(mInterfaceLock);
+ Mutex::Autolock l(mLock);
+
+ if (mRequestThread == nullptr || !mSupportZoomOverride) {
+ return INVALID_OPERATION;
+ }
+
+ return mRequestThread->setZoomOverride(zoomOverride);
+}
+
status_t Camera3Device::injectCamera(const String8& injectedCamId,
sp<CameraProviderManager> manager) {
ALOGI("%s Injection camera: injectedCamId = %s", __FUNCTION__, injectedCamId.string());
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index e045b98..abfd9aa 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -312,6 +312,14 @@
// Clear stream use case overrides
void clearStreamUseCaseOverrides();
+ /**
+ * Whether the camera device supports zoom override.
+ */
+ bool supportsZoomOverride();
+
+ // Set/reset zoom override
+ status_t setZoomOverride(int32_t zoomOverride);
+
// Get the status trackeer for the camera device
wp<camera3::StatusTracker> getStatusTracker() { return mStatusTracker; }
@@ -862,7 +870,8 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait);
+ bool overrideToPortrait,
+ bool supportSettingsOverride);
~RequestThread();
void setNotificationListener(wp<NotificationListener> listener);
@@ -969,6 +978,8 @@
status_t setCameraMute(int32_t muteMode);
+ status_t setZoomOverride(int32_t zoomOverride);
+
status_t setHalInterface(sp<HalInterface> newHalInterface);
protected:
@@ -998,6 +1009,10 @@
// if the current value was changed
bool overrideTestPattern(const sp<CaptureRequest> &request);
+ // Override settings override if needed for lower zoom latency; return
+ // true if the current value was changed
+ bool overrideSettingsOverride(const sp<CaptureRequest> &request);
+
static const nsecs_t kRequestTimeout = 50e6; // 50 ms
// TODO: does this need to be adjusted for long exposure requests?
@@ -1132,6 +1147,8 @@
bool mComposerOutput;
int32_t mCameraMute; // 0 = no mute, otherwise the TEST_PATTERN_MODE to use
bool mCameraMuteChanged;
+ int32_t mSettingsOverride; // -1 = use original, otherwise
+ // the settings override to use.
int64_t mRepeatingLastFrameNumber;
@@ -1151,6 +1168,7 @@
const bool mUseHalBufManager;
const bool mSupportCameraMute;
const bool mOverrideToPortrait;
+ const bool mSupportSettingsOverride;
int32_t mVndkVersion = -1;
};
@@ -1160,7 +1178,8 @@
const Vector<int32_t>& /*sessionParamKeys*/,
bool /*useHalBufManager*/,
bool /*supportCameraMute*/,
- bool /*overrideToPortrait*/) = 0;
+ bool /*overrideToPortrait*/,
+ bool /*supportSettingsOverride*/) = 0;
sp<RequestThread> mRequestThread;
@@ -1425,6 +1444,8 @@
bool mSupportCameraMute = false;
// Whether the HAL supports SOLID_COLOR or BLACK if mSupportCameraMute is true
bool mSupportTestPatternSolidColor = false;
+ // Whether the HAL supports zoom settings override
+ bool mSupportZoomOverride = false;
// Whether the camera framework overrides the device characteristics for
// performance class.
@@ -1439,6 +1460,10 @@
// Auto framing override value
camera_metadata_enum_android_control_autoframing mAutoframingOverride;
+ // Settings override value
+ int32_t mSettingsOverride; // -1 = use original, otherwise
+ // the settings override to use.
+
// Current active physical id of the logical multi-camera, if any
std::string mActivePhysicalId;
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 3b1eba3..e186f13 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -1419,9 +1419,10 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) :
+ bool overrideToPortrait,
+ bool supportSettingsOverride) :
RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
- supportCameraMute, overrideToPortrait) {}
+ supportCameraMute, overrideToPortrait, supportSettingsOverride) {}
status_t AidlCamera3Device::AidlRequestThread::switchToOffline(
const std::vector<int32_t>& streamsToKeep,
@@ -1591,9 +1592,10 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) {
+ bool overrideToPortrait,
+ bool supportSettingsOverride) {
return new AidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
- useHalBufManager, supportCameraMute, overrideToPortrait);
+ useHalBufManager, supportCameraMute, overrideToPortrait, supportSettingsOverride);
};
sp<Camera3Device::Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index 8ee5c63..99a308b 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -179,7 +179,8 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait);
+ bool overrideToPortrait,
+ bool supportSettingsOverride);
status_t switchToOffline(
const std::vector<int32_t>& streamsToKeep,
@@ -265,7 +266,8 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) override;
+ bool overrideToPortrait,
+ bool supportSettingsOverride) override;
virtual sp<Camera3DeviceInjectionMethods>
createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index b367019..0d44dd5 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -704,9 +704,10 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) {
+ bool overrideToPortrait,
+ bool supportSettingsOverride) {
return new HidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
- useHalBufManager, supportCameraMute, overrideToPortrait);
+ useHalBufManager, supportCameraMute, overrideToPortrait, supportSettingsOverride);
};
sp<Camera3Device::Camera3DeviceInjectionMethods>
@@ -1701,9 +1702,10 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) :
+ bool overrideToPortrait,
+ bool supportSettingsOverride) :
RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
- supportCameraMute, overrideToPortrait) {}
+ supportCameraMute, overrideToPortrait, supportSettingsOverride) {}
status_t HidlCamera3Device::HidlRequestThread::switchToOffline(
const std::vector<int32_t>& streamsToKeep,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index 7b216b2..1e50844 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -177,7 +177,8 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait);
+ bool overrideToPortrait,
+ bool supportSettingsOverride);
status_t switchToOffline(
const std::vector<int32_t>& streamsToKeep,
@@ -225,7 +226,8 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) override;
+ bool overrideToPortrait,
+ bool supportSettingsOverride) override;
virtual sp<Camera3DeviceInjectionMethods>
createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index cb5e783..158914a 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -438,7 +438,7 @@
}
AStatsEvent_writeInt32(event, hdr10PlusInfo);
- int32_t hdrFormat= -1;
+ int32_t hdrFormat = -1;
if (item->getInt32("android.media.mediacodec.hdr-format", &hdrFormat)) {
metrics_proto.set_hdr_format(hdrFormat);
}
@@ -450,6 +450,61 @@
}
AStatsEvent_writeInt64(event, codecId);
+ int32_t arrayMode = -1;
+ if (item->getInt32("android.media.mediacodec.array-mode", &arrayMode)) {
+ metrics_proto.set_array_mode(arrayMode);
+ }
+ AStatsEvent_writeInt32(event, arrayMode);
+
+ int32_t operationMode = -1;
+ if (item->getInt32("android.media.mediacodec.operation-mode", &operationMode)) {
+ metrics_proto.set_operation_mode(operationMode);
+ }
+ AStatsEvent_writeInt32(event, operationMode);
+
+ int32_t outputSurface = -1;
+ if (item->getInt32("android.media.mediacodec.output-surface", &outputSurface)) {
+ metrics_proto.set_output_surface(outputSurface);
+ }
+ AStatsEvent_writeInt32(event, outputSurface);
+
+ int32_t appMaxInputSize = -1;
+ if (item->getInt32("android.media.mediacodec.app-max-input-size", &appMaxInputSize)) {
+ metrics_proto.set_app_max_input_size(appMaxInputSize);
+ }
+ AStatsEvent_writeInt32(event, appMaxInputSize);
+
+ int32_t usedMaxInputSize = -1;
+ if (item->getInt32("android.media.mediacodec.used-max-input-size", &usedMaxInputSize)) {
+ metrics_proto.set_used_max_input_size(usedMaxInputSize);
+ }
+ AStatsEvent_writeInt32(event, usedMaxInputSize);
+
+ int32_t codecMaxInputSize = -1;
+ if (item->getInt32("android.media.mediacodec.codec-max-input-size", &codecMaxInputSize)) {
+ metrics_proto.set_codec_max_input_size(codecMaxInputSize);
+ }
+ AStatsEvent_writeInt32(event, codecMaxInputSize);
+
+ int32_t flushCount = -1;
+ if (item->getInt32("android.media.mediacodec.flush-count", &flushCount)) {
+ metrics_proto.set_flush_count(flushCount);
+ }
+ AStatsEvent_writeInt32(event, flushCount);
+
+ int32_t setSurfaceCount = -1;
+ if (item->getInt32("android.media.mediacodec.set-surface-count", &setSurfaceCount)) {
+ metrics_proto.set_set_surface_count(setSurfaceCount);
+ }
+ AStatsEvent_writeInt32(event, setSurfaceCount);
+
+ int32_t resolutionChangeCount = -1;
+ if (item->getInt32("android.media.mediacodec.resolution-change-count",
+ &resolutionChangeCount)) {
+ metrics_proto.set_resolution_change_count(resolutionChangeCount);
+ }
+ AStatsEvent_writeInt32(event, resolutionChangeCount);
+
int err = AStatsEvent_write(event);
if (err < 0) {
ALOGE("Failed to write codec metrics to statsd (%d)", err);