Merge "Virtualizer Effect : Add support to force virtualization mode" into main
diff --git a/drm/libmediadrmrkp/Android.bp b/drm/libmediadrmrkp/Android.bp
index f13eb62..b1a01e4 100644
--- a/drm/libmediadrmrkp/Android.bp
+++ b/drm/libmediadrmrkp/Android.bp
@@ -5,7 +5,7 @@
"src/**/*.cpp",
],
export_include_dirs: [
- "include"
+ "include",
],
shared_libs: [
"libbinder_ndk",
@@ -17,7 +17,7 @@
"android.hardware.drm-V1-ndk",
"android.hardware.security.rkp-V3-ndk",
"libbase",
- "libcppbor_external",
+ "libcppbor",
],
defaults: [
"keymint_use_latest_hal_aidl_ndk_shared",
@@ -42,7 +42,7 @@
"android.hardware.drm-V1-ndk",
"android.hardware.security.rkp-V3-ndk",
"libbase",
- "libcppbor_external",
+ "libcppbor",
"libmediadrmrkp",
],
vendor: true,
@@ -50,4 +50,4 @@
"-Wall",
"-Werror",
],
-}
\ No newline at end of file
+}
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
index d1f0fb5..4c2ef9c 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
@@ -83,6 +83,7 @@
}
}
+// @VsrTest = 3.2-001.003
TEST_P(Codec2MasterHalTest, MustUseAidlBeyond202404) {
static int sVendorApiLevel = android::base::GetIntProperty("ro.vendor.api_level", 0);
if (sVendorApiLevel < 202404) {
diff --git a/media/codec2/sfplugin/C2AidlNode.cpp b/media/codec2/sfplugin/C2AidlNode.cpp
index c20f50d..93c9d8b 100644
--- a/media/codec2/sfplugin/C2AidlNode.cpp
+++ b/media/codec2/sfplugin/C2AidlNode.cpp
@@ -29,9 +29,6 @@
using ::aidl::android::media::IAidlBufferSource;
using ::aidl::android::media::IAidlNode;
-using ::android::media::CommandStateSet;
-using ::android::media::NodeStatusLoaded;
-
// Conversion
using ::android::media::aidl_conversion::toAidlStatus;
@@ -43,13 +40,6 @@
return toAidlStatus(mImpl->freeNode());
}
-::ndk::ScopedAStatus C2AidlNode::sendCommand(int32_t cmd, int32_t param) {
- if (cmd == CommandStateSet && param == NodeStatusLoaded) {
- mImpl->onFirstInputFrame();
- }
- return toAidlStatus(ERROR_UNSUPPORTED);
-}
-
::ndk::ScopedAStatus C2AidlNode::getConsumerUsage(int64_t* _aidl_return) {
uint64_t usage;
mImpl->getConsumerUsageBits(&usage);
diff --git a/media/codec2/sfplugin/C2AidlNode.h b/media/codec2/sfplugin/C2AidlNode.h
index a7c328e..365a41d 100644
--- a/media/codec2/sfplugin/C2AidlNode.h
+++ b/media/codec2/sfplugin/C2AidlNode.h
@@ -34,8 +34,6 @@
// IAidlNode
::ndk::ScopedAStatus freeNode() override;
- ::ndk::ScopedAStatus sendCommand(int32_t cmd, int32_t param) override;
-
::ndk::ScopedAStatus getConsumerUsage(int64_t *_aidl_return) override;
::ndk::ScopedAStatus getInputBufferParams(
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 37a7a4f..692f700 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -687,6 +687,11 @@
const MediaCodecsXmlParser::AttributeMap &attrMap = typeIt->second;
std::unique_ptr<MediaCodecInfo::CapabilitiesWriter> caps =
codecInfo->addMediaType(mediaType.c_str());
+
+ // we could detect tunneled playback via the playback interface, but we never did
+ // that for the advertised feature, so for now use only the advertised feature.
+ bool canDoTunneledPlayback = false;
+
for (const auto &v : attrMap) {
std::string key = v.first;
std::string value = v.second;
@@ -707,6 +712,11 @@
// Ignore trailing bad characters and default to 0.
(void)sscanf(value.c_str(), "%d", &intValue);
caps->addDetail(key.c_str(), intValue);
+
+ if (key.compare(
+ MediaCodecInfo::Capabilities::FEATURE_TUNNELED_PLAYBACK) == 0) {
+ canDoTunneledPlayback = true;
+ }
} else {
caps->addDetail(key.c_str(), value.c_str());
}
@@ -774,6 +784,17 @@
}
}
}
+
+ if (android::media::codec::provider_->null_output_surface_support() &&
+ android::media::codec::provider_->null_output_surface()) {
+ // all non-tunneled video decoders support detached surface mode
+ if (trait.kind == C2Component::KIND_DECODER &&
+ trait.domain == C2Component::DOMAIN_VIDEO &&
+ !canDoTunneledPlayback) {
+ caps->addDetail(
+ MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE, 0);
+ }
+ }
}
}
}
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index f865603..2447b18 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -478,15 +478,21 @@
__func__, ret.desc.toString().c_str());
return NO_INIT;
}
- *outStream = sp<StreamOutHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
+ auto stream = sp<StreamOutHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
std::move(ret.stream), mVendorExt, this /*callbackBroker*/);
- void* cbCookie = (*outStream).get();
+ *outStream = stream;
+ /* StreamOutHalInterface* */ void* cbCookie = (*outStream).get();
{
std::lock_guard l(mLock);
mCallbacks.emplace(cbCookie, Callbacks{});
mMapper.addStream(*outStream, mixPortConfig.id, aidlPatch.id);
}
- if (streamCb) streamCb->setCookie(cbCookie);
+ if (streamCb) {
+ streamCb->setCookie(cbCookie);
+ // Although StreamOutHalAidl implements StreamOutHalInterfaceCallback,
+ // we always go via the CallbackBroker for consistency.
+ setStreamOutCallback(cbCookie, stream);
+ }
eventCb->setCookie(cbCookie);
cleanups.disarmAll();
return OK;
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index ce56d87..3fd0911 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -82,7 +82,12 @@
mConfig(configToBase(config)),
mContext(std::move(context)),
mStream(stream),
- mVendorExt(vext) {
+ mVendorExt(vext),
+ mLastReplyLifeTimeNs(
+ std::min(static_cast<size_t>(100),
+ 2 * mContext.getBufferDurationMs(mConfig.sample_rate))
+ * NANOS_PER_MILLISECOND)
+{
ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
{
std::lock_guard l(mLock);
@@ -285,8 +290,7 @@
ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
if (!mStream) return NO_INIT;
StreamDescriptor::Reply reply;
- // TODO: switch to updateCountersIfNeeded once we sort out mWorkerTid initialization
- RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), &reply, true));
+ RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
*frames = std::max<int64_t>(0, reply.hardware.frames);
*timestamp = std::max<int64_t>(0, reply.hardware.timeNs);
return OK;
@@ -409,6 +413,37 @@
return statusTFromBinderStatus(mStream->prepareToClose());
}
+void StreamHalAidl::onAsyncTransferReady() {
+ if (auto state = getState(); state == StreamDescriptor::State::TRANSFERRING) {
+ // Retrieve the current state together with position counters.
+ updateCountersIfNeeded();
+ } else {
+ ALOGW("%s: unexpected onTransferReady in the state %s", __func__, toString(state).c_str());
+ }
+}
+
+void StreamHalAidl::onAsyncDrainReady() {
+ if (auto state = getState(); state == StreamDescriptor::State::DRAINING) {
+ // Retrieve the current state together with position counters.
+ updateCountersIfNeeded();
+ } else {
+ ALOGW("%s: unexpected onDrainReady in the state %s", __func__, toString(state).c_str());
+ }
+}
+
+void StreamHalAidl::onAsyncError() {
+ std::lock_guard l(mLock);
+ if (mLastReply.state == StreamDescriptor::State::IDLE ||
+ mLastReply.state == StreamDescriptor::State::DRAINING ||
+ mLastReply.state == StreamDescriptor::State::TRANSFERRING) {
+ mLastReply.state = StreamDescriptor::State::ERROR;
+ ALOGW("%s: onError received", __func__);
+ } else {
+ ALOGW("%s: unexpected onError in the state %s", __func__,
+ toString(mLastReply.state).c_str());
+ }
+}
+
status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
struct audio_mmap_buffer_info *info) {
ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
@@ -487,6 +522,7 @@
reply->latencyMs = mLastReply.latencyMs;
}
mLastReply = *reply;
+ mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
}
switch (reply->status) {
case STATUS_OK: return OK;
@@ -502,14 +538,17 @@
status_t StreamHalAidl::updateCountersIfNeeded(
::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply) {
- if (mWorkerTid.load(std::memory_order_acquire) == gettid()) {
- if (const auto state = getState(); state != StreamDescriptor::State::ACTIVE &&
- state != StreamDescriptor::State::DRAINING &&
- state != StreamDescriptor::State::TRANSFERRING) {
- return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), reply);
- }
+ bool doUpdate = false;
+ {
+ std::lock_guard l(mLock);
+ doUpdate = uptimeNanos() > mLastReplyExpirationNs;
}
- if (reply != nullptr) {
+ if (doUpdate) {
+ // Since updates are paced, it is OK to perform them from any thread, they should
+ // not interfere with I/O operations of the worker.
+ return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
+ reply, true /*safeFromNonWorkerThread */);
+ } else if (reply != nullptr) { // provide cached reply
std::lock_guard l(mLock);
*reply = mLastReply;
}
@@ -545,7 +584,7 @@
StreamOutHalAidl::~StreamOutHalAidl() {
if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
- broker->clearCallbacks(this);
+ broker->clearCallbacks(static_cast<StreamOutHalInterface*>(this));
}
}
@@ -614,21 +653,14 @@
}
status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
+ ALOGD("%p %s", this, __func__);
TIME_CHECK();
if (!mStream) return NO_INIT;
if (!mContext.isAsynchronous()) {
ALOGE("%s: the callback is intended for asynchronous streams only", __func__);
return INVALID_OPERATION;
}
- if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
- if (auto cb = callback.promote(); cb != nullptr) {
- broker->setStreamOutCallback(this, cb);
- } else {
- // It is expected that the framework never passes a null pointer.
- // In the AIDL model callbacks can't be "unregistered".
- LOG_ALWAYS_FATAL("%s: received an expired or null callback pointer", __func__);
- }
- }
+ mClientCallback = callback;
return OK;
}
@@ -751,7 +783,7 @@
TIME_CHECK();
if (!mStream) return NO_INIT;
if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
- broker->setStreamOutEventCallback(this, callback);
+ broker->setStreamOutEventCallback(static_cast<StreamOutHalInterface*>(this), callback);
}
return OK;
}
@@ -785,7 +817,8 @@
TIME_CHECK();
if (!mStream) return NO_INIT;
if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
- broker->setStreamOutLatencyModeCallback(this, callback);
+ broker->setStreamOutLatencyModeCallback(
+ static_cast<StreamOutHalInterface*>(this), callback);
}
return OK;
};
@@ -794,6 +827,27 @@
return StreamHalAidl::exit();
}
+void StreamOutHalAidl::onWriteReady() {
+ onAsyncTransferReady();
+ if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
+ clientCb->onWriteReady();
+ }
+}
+
+void StreamOutHalAidl::onDrainReady() {
+ onAsyncDrainReady();
+ if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
+ clientCb->onDrainReady();
+ }
+}
+
+void StreamOutHalAidl::onError() {
+ onAsyncError();
+ if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
+ clientCb->onError();
+ }
+}
+
status_t StreamOutHalAidl::filterAndUpdateOffloadMetadata(AudioParameter ¶meters) {
TIME_CHECK();
bool updateMetadata = false;
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index 4acc6ac..53d46e5 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -32,6 +32,7 @@
#include <media/audiohal/StreamHalInterface.h>
#include <media/AidlConversionUtil.h>
#include <media/AudioParameter.h>
+#include <mediautils/Synchronization.h>
#include "ConversionHelperAidl.h"
#include "StreamPowerLog.h"
@@ -93,6 +94,9 @@
}
size_t getBufferSizeBytes() const { return mFrameSizeBytes * mBufferSizeFrames; }
size_t getBufferSizeFrames() const { return mBufferSizeFrames; }
+ size_t getBufferDurationMs(int32_t sampleRate) const {
+ return sampleRate != 0 ? mBufferSizeFrames * MILLIS_PER_SECOND / sampleRate : 0;
+ }
CommandMQ* getCommandMQ() const { return mCommandMQ.get(); }
DataMQ* getDataMQ() const { return mDataMQ.get(); }
size_t getFrameSizeBytes() const { return mFrameSizeBytes; }
@@ -232,6 +236,10 @@
status_t exit();
+ void onAsyncTransferReady();
+ void onAsyncDrainReady();
+ void onAsyncError();
+
const bool mIsInput;
const audio_config_base_t mConfig;
const StreamContextAidl mContext;
@@ -257,8 +265,10 @@
const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> mStream;
const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
+ const int64_t mLastReplyLifeTimeNs;
std::mutex mLock;
::aidl::android::hardware::audio::core::StreamDescriptor::Reply mLastReply GUARDED_BY(mLock);
+ int64_t mLastReplyExpirationNs GUARDED_BY(mLock) = 0;
// mStreamPowerLog is used for audio signal power logging.
StreamPowerLog mStreamPowerLog;
std::atomic<pid_t> mWorkerTid = -1;
@@ -266,7 +276,9 @@
class CallbackBroker;
-class StreamOutHalAidl : public StreamOutHalInterface, public StreamHalAidl {
+class StreamOutHalAidl : public virtual StreamOutHalInterface,
+ public virtual StreamOutHalInterfaceCallback,
+ public StreamHalAidl {
public:
// Extract the output stream parameters and set by AIDL APIs.
status_t setParameters(const String8& kvPairs) override;
@@ -344,6 +356,11 @@
status_t exit() override;
+ // StreamOutHalInterfaceCallback
+ void onWriteReady() override;
+ void onDrainReady() override;
+ void onError() override;
+
private:
friend class sp<StreamOutHalAidl>;
@@ -352,6 +369,7 @@
const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOut> mStream;
const wp<CallbackBroker> mCallbackBroker;
+ mediautils::atomic_wp<StreamOutHalInterfaceCallback> mClientCallback;
AudioOffloadMetadata mOffloadMetadata;
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
index da28204..df1f9bd 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
@@ -175,17 +175,19 @@
if (!range) {
return BAD_VALUE;
}
+ std::vector<Spatialization::Level> levels;
for (const auto level : ::ndk::enum_range<Spatialization::Level>()) {
const auto spatializer =
Spatializer::make<Spatializer::spatializationLevel>(level);
if (spatializer >= range->min && spatializer <= range->max) {
- if (status_t status = param.writeToValue(&level); status != OK) {
- ALOGW("%s %d: write level %s to value failed %d", __func__, __LINE__,
- toString(level).c_str(), status);
- return status;
- }
+ levels.emplace_back(level);
}
}
+ const uint8_t num = levels.size();
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
+ for (const auto level : levels) {
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&level));
+ }
return OK;
}
case SPATIALIZER_PARAM_LEVEL: {
@@ -234,15 +236,14 @@
const auto& supportedLayouts = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
aidlParam, Spatializer, spatializer, Spatializer::supportedChannelLayout,
std::vector<AudioChannelLayout>));
+ // audio_channel_mask_t is uint32_t enum, write number in 32bit
+ const uint32_t num = supportedLayouts.size();
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
for (const auto& layout : supportedLayouts) {
audio_channel_mask_t mask = VALUE_OR_RETURN_STATUS(
::aidl::android::aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
layout, false /* isInput */));
- if (status_t status = param.writeToValue(&mask); status != OK) {
- ALOGW("%s %d: write mask %s to value failed %d", __func__, __LINE__,
- layout.toString().c_str(), status);
- return status;
- }
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&mask));
}
return OK;
}
@@ -252,17 +253,19 @@
if (!range) {
return BAD_VALUE;
}
+ std::vector<Spatialization::Mode> modes;
for (const auto mode : ::ndk::enum_range<Spatialization::Mode>()) {
if (const auto spatializer =
Spatializer::make<Spatializer::spatializationMode>(mode);
spatializer >= range->min && spatializer <= range->max) {
- if (status_t status = param.writeToValue(&mode); status != OK) {
- ALOGW("%s %d: write mode %s to value failed %d", __func__, __LINE__,
- toString(mode).c_str(), status);
- return status;
- }
+ modes.emplace_back(mode);
}
}
+ const uint8_t num = modes.size();
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
+ for (const auto mode : modes) {
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&mode));
+ }
return OK;
}
case SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION: {
@@ -271,17 +274,18 @@
if (!range) {
return BAD_VALUE;
}
+ std::vector<HeadTracking::ConnectionMode> modes;
for (const auto mode : ::ndk::enum_range<HeadTracking::ConnectionMode>()) {
if (const auto spatializer =
Spatializer::make<Spatializer::headTrackingConnectionMode>(mode);
spatializer < range->min || spatializer > range->max) {
- continue;
+ modes.emplace_back(mode);
}
- if (status_t status = param.writeToValue(&mode); status != OK) {
- ALOGW("%s %d: write mode %s to value failed %d", __func__, __LINE__,
- toString(mode).c_str(), status);
- return status;
- }
+ }
+ const uint8_t num = modes.size();
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
+ for (const auto mode : modes) {
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&mode));
}
return OK;
}
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 590a7b7..840897f 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -223,7 +223,6 @@
"com.android.media",
],
-
srcs: ["MidiIoWrapper.cpp"],
static_libs: [
@@ -278,6 +277,10 @@
"libutils",
],
+ static_libs: [
+ "android.media.codec-aconfig-cc",
+ ],
+
include_dirs: [
"system/libhidl/transport/token/1.0/utils/include",
],
diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp
index 86ad997..c45c5c3 100644
--- a/media/libmedia/MediaCodecInfo.cpp
+++ b/media/libmedia/MediaCodecInfo.cpp
@@ -36,6 +36,7 @@
constexpr char MediaCodecInfo::Capabilities::FEATURE_MULTIPLE_FRAMES[];
constexpr char MediaCodecInfo::Capabilities::FEATURE_SECURE_PLAYBACK[];
constexpr char MediaCodecInfo::Capabilities::FEATURE_TUNNELED_PLAYBACK[];
+constexpr char MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE[];
void MediaCodecInfo::Capabilities::getSupportedProfileLevels(
Vector<ProfileLevel> *profileLevels) const {
diff --git a/media/libmedia/include/media/MediaCodecInfo.h b/media/libmedia/include/media/MediaCodecInfo.h
index 54f565a..88a2dc4 100644
--- a/media/libmedia/include/media/MediaCodecInfo.h
+++ b/media/libmedia/include/media/MediaCodecInfo.h
@@ -69,6 +69,7 @@
constexpr static char FEATURE_MULTIPLE_FRAMES[] = "feature-multiple-frames";
constexpr static char FEATURE_SECURE_PLAYBACK[] = "feature-secure-playback";
constexpr static char FEATURE_TUNNELED_PLAYBACK[] = "feature-tunneled-playback";
+ constexpr static char FEATURE_DETACHED_SURFACE[] = "feature-detached-surface";
/**
* Returns the supported levels for each supported profile in a target array.
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index f9ceef2..1593aa0 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -21,6 +21,8 @@
#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
#endif
+#include <android_media_codec.h>
+
#include <inttypes.h>
#include <utils/Trace.h>
@@ -9314,6 +9316,12 @@
// adaptive playback is not supported
caps->removeDetail(MediaCodecInfo::Capabilities::FEATURE_ADAPTIVE_PLAYBACK);
}
+
+ // all non-tunneled video decoders support detached surface mode
+ if (android::media::codec::provider_->null_output_surface_support() &&
+ android::media::codec::provider_->null_output_surface()) {
+ caps->addDetail(MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE, 0);
+ }
}
}
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 896e021..886285e 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -319,6 +319,7 @@
],
static_libs: [
+ "android.media.codec-aconfig-cc",
"libstagefright_esds",
"libstagefright_color_conversion",
"libyuv_static",
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index e4f3b83..cc6f5e8 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -30,6 +30,8 @@
#include "include/SoftwareRenderer.h"
+#include <android_media_codec.h>
+
#include <android/api-level.h>
#include <android/content/pm/IPackageManagerNative.h>
#include <android/hardware/cas/native/1.0/IDescrambler.h>
@@ -3017,6 +3019,13 @@
return PostAndAwaitResponse(msg, &response);
}
+status_t MediaCodec::detachOutputSurface() {
+ sp<AMessage> msg = new AMessage(kWhatDetachSurface, this);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
status_t MediaCodec::setSurface(const sp<Surface> &surface) {
sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
msg->setObject("surface", surface);
@@ -4676,7 +4685,7 @@
}
mResourceManagerProxy->removeClient();
- mReleaseSurface.reset();
+ mDetachedSurface.reset();
if (mReplyID != nullptr) {
postPendingRepliesAndDeferredMessages("kWhatReleaseCompleted");
@@ -4849,6 +4858,23 @@
mFlags |= kFlagPushBlankBuffersOnShutdown;
}
+ uint32_t flags;
+ CHECK(msg->findInt32("flags", (int32_t *)&flags));
+
+ if (android::media::codec::provider_->null_output_surface_support()) {
+ if (obj == nullptr
+ && (flags & CONFIGURE_FLAG_DETACHED_SURFACE)
+ && !(flags & CONFIGURE_FLAG_ENCODE)) {
+ sp<Surface> surface = getOrCreateDetachedSurface();
+ if (surface == nullptr) {
+ mErrorLog.log(
+ LOG_TAG, "Detached surface mode is not supported by this codec");
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ }
+ obj = surface;
+ }
+ }
+
if (obj != NULL) {
if (!format->findInt32(KEY_ALLOW_FRAME_DROP, &mAllowFrameDroppingBySurface)) {
// allow frame dropping by surface by default
@@ -4872,8 +4898,6 @@
mApiUsageMetrics.isUsingOutputSurface = true;
- uint32_t flags;
- CHECK(msg->findInt32("flags", (int32_t *)&flags));
if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL ||
flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
if (!(mFlags & kFlagIsAsync)) {
@@ -4994,6 +5018,23 @@
break;
}
+ case kWhatDetachSurface:
+ {
+ // detach surface is equivalent to setSurface(mDetachedSurface)
+ sp<Surface> surface = getOrCreateDetachedSurface();
+
+ if (surface == nullptr) {
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ mErrorLog.log(LOG_TAG, "Detaching surface is not supported by the codec.");
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ }
+
+ msg->setObject("surface", surface);
+ }
+ [[fallthrough]];
+
case kWhatSetSurface:
{
sp<AReplyToken> replyID;
@@ -5011,14 +5052,17 @@
sp<Surface> surface = static_cast<Surface *>(obj.get());
if (mSurface == NULL) {
// do not support setting surface if it was not set
- mErrorLog.log(LOG_TAG,
- "Cannot set surface if the codec is not configured with "
- "a surface already");
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Cannot %s surface if the codec is not configured with "
+ "a surface already",
+ msg->what() == kWhatDetachSurface ? "detach" : "set"));
err = INVALID_OPERATION;
} else if (obj == NULL) {
// do not support unsetting surface
mErrorLog.log(LOG_TAG, "Unsetting surface is not supported");
err = BAD_VALUE;
+ } else if (android::media::codec::provider_->null_output_surface_support()) {
+ err = handleSetSurface(surface, true /* callCodec */);
} else {
uint32_t generation;
err = connectToSurface(surface, &generation);
@@ -5052,7 +5096,8 @@
default:
mErrorLog.log(LOG_TAG, base::StringPrintf(
- "setSurface() is valid only at Executing states; currently %s",
+ "%sSurface() is valid only at Executing states; currently %s",
+ msg->what() == kWhatDetachSurface ? "detach" : "set",
apiStateString().c_str()));
err = INVALID_OPERATION;
break;
@@ -5273,30 +5318,40 @@
bool forceSync = false;
if (asyncNotify != nullptr && mSurface != NULL) {
- if (!mReleaseSurface) {
- uint64_t usage = 0;
- if (mSurface->getConsumerUsage(&usage) != OK) {
- usage = 0;
- }
- mReleaseSurface.reset(new ReleaseSurface(usage));
- }
- if (mSurface != mReleaseSurface->getSurface()) {
- uint32_t generation;
- status_t err = connectToSurface(mReleaseSurface->getSurface(), &generation);
- ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
- if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
- err = mCodec->setSurface(mReleaseSurface->getSurface(), generation);
- ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
- }
- if (err == OK) {
- (void)disconnectFromSurface();
- mSurface = mReleaseSurface->getSurface();
- mSurfaceGeneration = generation;
- } else {
- // We were not able to switch the surface, so force
+ if (android::media::codec::provider_->null_output_surface_support()) {
+ if (handleSetSurface(getOrCreateDetachedSurface(), true /* callCodec */,
+ true /* onShutDown */) != OK) {
+ // We were not able to detach the surface, so force
// synchronous release.
forceSync = true;
}
+ } else {
+ if (!mDetachedSurface) {
+ uint64_t usage = 0;
+ if (mSurface->getConsumerUsage(&usage) != OK) {
+ usage = 0;
+ }
+ mDetachedSurface.reset(new ReleaseSurface(usage));
+ }
+ if (mSurface != mDetachedSurface->getSurface()) {
+ uint32_t generation;
+ status_t err =
+ connectToSurface(mDetachedSurface->getSurface(), &generation);
+ ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
+ if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
+ err = mCodec->setSurface(mDetachedSurface->getSurface(), generation);
+ ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
+ }
+ if (err == OK) {
+ (void)disconnectFromSurface();
+ mSurface = mDetachedSurface->getSurface();
+ mSurfaceGeneration = generation;
+ } else {
+ // We were not able to switch the surface, so force
+ // synchronous release.
+ forceSync = true;
+ }
+ }
}
}
@@ -6612,6 +6667,23 @@
return index;
}
+sp<Surface> MediaCodec::getOrCreateDetachedSurface() {
+ if (mDomain != DOMAIN_VIDEO || (mFlags & kFlagIsEncoder)) {
+ return nullptr;
+ }
+
+ if (!mDetachedSurface) {
+ uint64_t usage = 0;
+ if (!mSurface || mSurface->getConsumerUsage(&usage) != OK) {
+ // TODO: should we use a/the default consumer usage?
+ usage = 0;
+ }
+ mDetachedSurface.reset(new ReleaseSurface(usage));
+ }
+
+ return mDetachedSurface->getSurface();
+}
+
status_t MediaCodec::connectToSurface(const sp<Surface> &surface, uint32_t *generation) {
status_t err = OK;
if (surface != NULL) {
@@ -6685,7 +6757,56 @@
return err;
}
+status_t MediaCodec::handleSetSurface(const sp<Surface> &surface, bool callCodec, bool onShutDown) {
+ uint32_t generation;
+ status_t err = OK;
+ if (surface != nullptr) {
+ err = connectToSurface(surface, &generation);
+ if (err == ALREADY_EXISTS) {
+ // reconnecting to same surface
+ return OK;
+ }
+
+ if (err == OK && callCodec) {
+ if (mFlags & kFlagUsesSoftwareRenderer) {
+ if (mSoftRenderer != NULL
+ && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
+ pushBlankBuffersToNativeWindow(mSurface.get());
+ }
+ // do not create a new software renderer on shutdown (release)
+ // as it will not be used anyway
+ if (!onShutDown) {
+ surface->setDequeueTimeout(-1);
+ mSoftRenderer = new SoftwareRenderer(surface);
+ // TODO: check if this was successful
+ }
+ } else {
+ err = mCodec->setSurface(surface, generation);
+ }
+
+ mReliabilityContextMetrics.setOutputSurfaceCount++;
+ }
+ }
+
+ if (err == OK) {
+ if (mSurface != NULL) {
+ (void)disconnectFromSurface();
+ }
+
+ if (surface != NULL) {
+ mSurface = surface;
+ mSurfaceGeneration = generation;
+ }
+ }
+
+ return err;
+}
+
status_t MediaCodec::handleSetSurface(const sp<Surface> &surface) {
+ if (android::media::codec::provider_->null_output_surface_support()) {
+ return handleSetSurface(surface, false /* callCodec */);
+ }
+
status_t err = OK;
if (mSurface != NULL) {
(void)disconnectFromSurface();
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 9ecb12e..7169b1e 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -96,6 +96,7 @@
CONFIGURE_FLAG_ENCODE = 1,
CONFIGURE_FLAG_USE_BLOCK_MODEL = 2,
CONFIGURE_FLAG_USE_CRYPTO_ASYNC = 4,
+ CONFIGURE_FLAG_DETACHED_SURFACE = 8,
};
enum BufferFlags {
@@ -274,6 +275,8 @@
status_t setSurface(const sp<Surface> &nativeWindow);
+ status_t detachOutputSurface();
+
status_t requestIDRFrame();
// Notification will be posted once there "is something to do", i.e.
@@ -368,6 +371,7 @@
kWhatInit = 'init',
kWhatConfigure = 'conf',
kWhatSetSurface = 'sSur',
+ kWhatDetachSurface = 'dSur',
kWhatCreateInputSurface = 'cisf',
kWhatSetInputSurface = 'sisf',
kWhatStart = 'strt',
@@ -474,6 +478,10 @@
uint32_t mSurfaceGeneration = 0;
SoftwareRenderer *mSoftRenderer;
+ // Get the detached BufferQueue surface for a video decoder, and create it
+ // if it did not yet exist.
+ sp<Surface> getOrCreateDetachedSurface();
+
Mutex mMetricsLock;
mediametrics_handle_t mMetricsHandle = 0;
bool mMetricsToUpload = false;
@@ -642,6 +650,13 @@
status_t queueCSDInputBuffer(size_t bufferIndex);
status_t handleSetSurface(const sp<Surface> &surface);
+
+ // Common reimplementation of changing the output surface.
+ // Handles setting null surface, which is used during configure and init.
+ // Set |callCodec| to true if the codec needs to be notified (e.g. during executing state).
+ // Setting |onShutdown| to true will avoid extra work, if this is used for detaching on
+ // delayed release.
+ status_t handleSetSurface(const sp<Surface> &surface, bool callCodec, bool onShutdown = false);
status_t connectToSurface(const sp<Surface> &surface, uint32_t *generation);
status_t disconnectFromSurface();
@@ -714,7 +729,7 @@
sp<AMessage> mMsgPollForRenderedBuffers;
class ReleaseSurface;
- std::unique_ptr<ReleaseSurface> mReleaseSurface;
+ std::unique_ptr<ReleaseSurface> mDetachedSurface;
std::list<sp<AMessage>> mLeftover;
status_t handleLeftover(size_t index);
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl
index 0679e41..d428e99 100644
--- a/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl
@@ -52,7 +52,7 @@
void onInputBufferAdded(int bufferID);
/**
- * Called from OnEmptyBufferDone. If we have a BQ buffer available,
+ * If we have a BQ buffer available,
* fill it with a new frame of data; otherwise, just mark it as available.
*
* fence contains the fence's fd that the callee should wait on before
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
index 6cac19e..cf880c2 100644
--- a/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
@@ -37,7 +37,6 @@
}
void freeNode();
- void sendCommand(int cmd, int param);
long getConsumerUsage();
InputBufferParams getInputBufferParams();
void setConsumerUsage(long usage);
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h
index 4b38294..85de688 100644
--- a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h
@@ -28,7 +28,7 @@
namespace android::media {
/*
- * This class is used to emulate feed OMX codecs from a Surface via BufferQueue or
+ * This class is used to feed codec encoders from a Surface via BufferQueue or
* HW producer using AIDL binder interfaces.
*
* See media/stagefright/bqhelper/GraphicBufferSource.h for documentation.
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h
index 64de7ae..364efe2 100644
--- a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h
@@ -29,12 +29,4 @@
BUFFERFLAG_ENDOFFRAME = (1 << 4)
};
-enum C2NodeCommand : int32_t {
- CommandStateSet = 1
-};
-
-enum C2NodeStatus : int32_t {
- NodeStatusLoaded = 1
-};
-
} // namespace android::media
diff --git a/media/module/bufferpool/1.0/vts/multi.cpp b/media/module/bufferpool/1.0/vts/multi.cpp
index d8cc285..21f47d3 100644
--- a/media/module/bufferpool/1.0/vts/multi.cpp
+++ b/media/module/bufferpool/1.0/vts/multi.cpp
@@ -24,6 +24,7 @@
#include <hidl/HidlSupport.h>
#include <hidl/HidlTransportSupport.h>
#include <hidl/LegacySupport.h>
+#include <hidl/ServiceManagement.h>
#include <hidl/Status.h>
#include <signal.h>
#include <sys/types.h>
@@ -36,6 +37,7 @@
using android::hardware::configureRpcThreadpool;
using android::hardware::hidl_handle;
+using android::hardware::isHidlSupported;
using android::hardware::media::bufferpool::V1_0::IClientManager;
using android::hardware::media::bufferpool::V1_0::ResultStatus;
using android::hardware::media::bufferpool::V1_0::implementation::BufferId;
@@ -178,6 +180,7 @@
ResultStatus status;
PipeMessage message;
+ if (!isHidlSupported()) GTEST_SKIP() << "HIDL is not supported on this device";
ASSERT_TRUE(receiveMessage(mResultPipeFds, &message));
android::sp<IClientManager> receiver = IClientManager::getService();
diff --git a/media/tests/benchmark/src/native/decoder/C2Decoder.cpp b/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
index 6539f24..f9a6b1c 100644
--- a/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
+++ b/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
@@ -106,7 +106,7 @@
work->input.ordinal.frameIndex = mNumInputFrame;
work->input.buffers.clear();
int size = frameInfo[mNumInputFrame].size;
- int alignedSize = ALIGN(size, PAGE_SIZE);
+ int alignedSize = ALIGN(size, getpagesize());
if (size) {
std::shared_ptr<C2LinearBlock> block;
status = mLinearPool->fetchLinearBlock(
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index cc191ca..b6b9720 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -49,12 +49,12 @@
using aidl_utils::statusTFromBinderStatus;
using android::content::AttributionSourceState;
using binder::Status;
+using internal::ToString;
using media::HeadTrackingMode;
using media::Pose3f;
using media::SensorPoseProvider;
using media::audio::common::HeadTracking;
using media::audio::common::Spatialization;
-using ::android::internal::ToString;
using namespace std::chrono_literals;
@@ -348,7 +348,8 @@
bool activeLevelFound = false;
for (const auto spatializationLevel : spatializationLevels) {
if (!aidl_utils::isValidEnum(spatializationLevel)) {
- ALOGW("%s: ignoring spatializationLevel:%d", __func__, (int)spatializationLevel);
+ ALOGW("%s: ignoring spatializationLevel:%s", __func__,
+ ToString(spatializationLevel).c_str());
continue;
}
if (spatializationLevel == Spatialization::Level::NONE) {
@@ -375,7 +376,8 @@
for (const auto spatializationMode : spatializationModes) {
if (!aidl_utils::isValidEnum(spatializationMode)) {
- ALOGW("%s: ignoring spatializationMode:%d", __func__, (int)spatializationMode);
+ ALOGW("%s: ignoring spatializationMode:%s", __func__,
+ ToString(spatializationMode).c_str());
continue;
}
// we don't detect duplicates.
@@ -406,27 +408,26 @@
return BAD_VALUE;
}
- //TODO b/273373363: use AIDL enum when available
if (com::android::media::audio::dsa_over_bt_le_audio()
&& mSupportsHeadTracking) {
- mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED;
- std::vector<uint8_t> headtrackingConnectionModes;
+ mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
+ std::vector<HeadTracking::ConnectionMode> headtrackingConnectionModes;
status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION,
&headtrackingConnectionModes);
if (status == NO_ERROR) {
for (const auto htConnectionMode : headtrackingConnectionModes) {
- if (htConnectionMode < HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED ||
- htConnectionMode > HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL) {
- ALOGW("%s: ignoring HT connection mode:%d", __func__, (int)htConnectionMode);
+ if (htConnectionMode < HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED ||
+ htConnectionMode > HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL) {
+ ALOGW("%s: ignoring HT connection mode:%s", __func__,
+ ToString(htConnectionMode).c_str());
continue;
}
- mSupportedHeadtrackingConnectionModes.insert(
- static_cast<headtracking_connection_t> (htConnectionMode));
+ mSupportedHeadtrackingConnectionModes.insert(htConnectionMode);
}
ALOGW_IF(mSupportedHeadtrackingConnectionModes.find(
- HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED)
- == mSupportedHeadtrackingConnectionModes.end(),
- "%s: HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED not reported", __func__);
+ HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED) ==
+ mSupportedHeadtrackingConnectionModes.end(),
+ "%s: Headtracking FRAMEWORK_PROCESSED not reported", __func__);
}
}
@@ -553,12 +554,12 @@
}
audio_utils::lock_guard lock(mMutex);
*level = mLevel;
- ALOGV("%s level %d", __func__, (int)*level);
+ ALOGV("%s level %s", __func__, ToString(*level).c_str());
return Status::ok();
}
Status Spatializer::isHeadTrackingSupported(bool *supports) {
- ALOGV("%s mSupportsHeadTracking %d", __func__, mSupportsHeadTracking);
+ ALOGV("%s mSupportsHeadTracking %s", __func__, ToString(mSupportsHeadTracking).c_str());
if (supports == nullptr) {
return binderStatusFromStatusT(BAD_VALUE);
}
@@ -853,7 +854,7 @@
}
void Spatializer::onActualModeChange(HeadTrackingMode mode) {
- std::string modeStr = media::toString(mode);
+ std::string modeStr = ToString(mode);
ALOGV("%s(%s)", __func__, modeStr.c_str());
sp<AMessage> msg = new AMessage(EngineCallbackHandler::kWhatOnActualModeChange, mHandler);
msg->setInt32(EngineCallbackHandler::kModeKey, static_cast<int>(mode));
@@ -861,7 +862,7 @@
}
void Spatializer::onActualModeChangeMsg(HeadTrackingMode mode) {
- ALOGV("%s(%d)", __func__, (int) mode);
+ ALOGV("%s(%s)", __func__, ToString(mode).c_str());
sp<media::ISpatializerHeadTrackingCallback> callback;
HeadTracking::Mode spatializerMode;
{
@@ -880,7 +881,7 @@
spatializerMode = HeadTracking::Mode::RELATIVE_SCREEN;
break;
default:
- LOG_ALWAYS_FATAL("Unknown mode: %d", static_cast<int>(mode));
+ LOG_ALWAYS_FATAL("Unknown mode: %s", ToString(mode).c_str());
}
}
mActualHeadTrackingMode = spatializerMode;
@@ -894,7 +895,7 @@
}
}
callback = mHeadTrackingCallback;
- mLocalLog.log("%s: updating mode to %s", __func__, media::toString(mode).c_str());
+ mLocalLog.log("%s: updating mode to %s", __func__, ToString(mode).c_str());
}
if (callback != nullptr) {
callback->onHeadTrackingModeChanged(spatializerMode);
@@ -1052,24 +1053,23 @@
}
}
-//TODO b/273373363: use AIDL enum when available
audio_latency_mode_t Spatializer::selectHeadtrackingConnectionMode_l() {
if (!com::android::media::audio::dsa_over_bt_le_audio()) {
return AUDIO_LATENCY_MODE_LOW;
}
// mSupportedLatencyModes is ordered according to system preferences loaded in
// mOrderedLowLatencyModes
- mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED;
+ mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
audio_latency_mode_t requestedLatencyMode = mSupportedLatencyModes[0];
if (requestedLatencyMode == AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE) {
if (mSupportedHeadtrackingConnectionModes.find(
- HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL)
+ HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL)
!= mSupportedHeadtrackingConnectionModes.end()) {
- mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL;
+ mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL;
} else if (mSupportedHeadtrackingConnectionModes.find(
- HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_SW)
+ HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW)
!= mSupportedHeadtrackingConnectionModes.end()) {
- mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_SW;
+ mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW;
} else {
// if the engine does not support direct reading of IMU data, do not allow
// DYNAMIC_SPATIAL_AUDIO_HARDWARE mode and fallback to next mode
@@ -1213,7 +1213,7 @@
base::StringAppendF(&ss, " %s", ToString(mode).c_str());
}
base::StringAppendF(&ss, "], Desired: %s, Actual %s\n",
- media::toString(mDesiredHeadTrackingMode).c_str(),
+ ToString(mDesiredHeadTrackingMode).c_str(),
ToString(mActualHeadTrackingMode).c_str());
base::StringAppendF(&ss, "%smSpatializationModes: [", prefixSpace.c_str());
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 24788dc..355df18 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -486,11 +486,13 @@
bool mSupportsHeadTracking;
/** List of supported headtracking connection modes reported by the spatializer.
* If the list is empty, the spatializer does not support any optional connection
- * mode and mode HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED is assumed.
+ * mode and mode HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED is assumed.
*/
- std::unordered_set<headtracking_connection_t> mSupportedHeadtrackingConnectionModes;
+ std::unordered_set<media::audio::common::HeadTracking::ConnectionMode>
+ mSupportedHeadtrackingConnectionModes;
/** Selected HT connection mode when several modes are supported by the spatializer */
- headtracking_connection_t mHeadtrackingConnectionMode;
+ media::audio::common::HeadTracking::ConnectionMode mHeadtrackingConnectionMode =
+ media::audio::common::HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
// Looper thread for mEngine callbacks
class EngineCallbackHandler;