Merge changes I415a7be5,Id3ec0934 into udc-dev
* changes:
Repleace std::reference_wrapper with std::shared_ptr in effectsConfig
Deprecate support for legacy effect config file audio_effects.conf
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index cd4932d..f53fc0a 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -122,7 +122,7 @@
static uint32_t gBitRate = 20000000; // 20Mbps
static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
static uint32_t gBframes = 0;
-static PhysicalDisplayId gPhysicalDisplayId;
+static std::optional<PhysicalDisplayId> gPhysicalDisplayId;
// Set by signal handler to stop recording.
static volatile bool gStopRequested = false;
@@ -336,6 +336,24 @@
}
/*
+ * Gets the physical id of the display to record. If the user specified a physical
+ * display id, then that id will be set. Otherwise, the default display will be set.
+ */
+static status_t getPhysicalDisplayId(PhysicalDisplayId& outDisplayId) {
+ if (gPhysicalDisplayId) {
+ outDisplayId = *gPhysicalDisplayId;
+ return NO_ERROR;
+ }
+
+ const std::vector<PhysicalDisplayId> ids = SurfaceComposerClient::getPhysicalDisplayIds();
+ if (ids.empty()) {
+ return INVALID_OPERATION;
+ }
+ outDisplayId = ids.front();
+ return NO_ERROR;
+}
+
+/*
* Configures the virtual display. When this completes, virtual display
* frames will start arriving from the buffer producer.
*/
@@ -350,7 +368,12 @@
setDisplayProjection(t, dpy, displayState);
ui::LayerStack layerStack = ui::LayerStack::fromValue(std::rand());
t.setDisplayLayerStack(dpy, layerStack);
- *mirrorRoot = SurfaceComposerClient::getDefault()->mirrorDisplay(gPhysicalDisplayId);
+ PhysicalDisplayId displayId;
+ status_t err = getPhysicalDisplayId(displayId);
+ if (err != NO_ERROR) {
+ return err;
+ }
+ *mirrorRoot = SurfaceComposerClient::getDefault()->mirrorDisplay(displayId);
if (*mirrorRoot == nullptr) {
ALOGE("Failed to create a mirror for screenrecord");
return UNKNOWN_ERROR;
@@ -486,6 +509,43 @@
}
/*
+ * Update the display projection if size or orientation have changed.
+ */
+void updateDisplayProjection(const sp<IBinder>& virtualDpy, ui::DisplayState& displayState) {
+ ATRACE_NAME("updateDisplayProjection");
+
+ PhysicalDisplayId displayId;
+ if (getPhysicalDisplayId(displayId) != NO_ERROR) {
+ fprintf(stderr, "ERROR: Failed to get display id\n");
+ return;
+ }
+
+ sp<IBinder> displayToken = SurfaceComposerClient::getPhysicalDisplayToken(displayId);
+ if (!displayToken) {
+ fprintf(stderr, "ERROR: failed to get display token\n");
+ return;
+ }
+
+ ui::DisplayState currentDisplayState;
+ if (SurfaceComposerClient::getDisplayState(displayToken, ¤tDisplayState) != NO_ERROR) {
+ ALOGW("ERROR: failed to get display state\n");
+ return;
+ }
+
+ if (currentDisplayState.orientation != displayState.orientation ||
+ currentDisplayState.layerStackSpaceRect != displayState.layerStackSpaceRect) {
+ displayState = currentDisplayState;
+ ALOGD("display state changed, now has orientation %s, size (%d, %d)",
+ toCString(displayState.orientation), displayState.layerStackSpaceRect.getWidth(),
+ displayState.layerStackSpaceRect.getHeight());
+
+ SurfaceComposerClient::Transaction t;
+ setDisplayProjection(t, virtualDpy, currentDisplayState);
+ t.apply();
+ }
+}
+
+/*
* Runs the MediaCodec encoder, sending the output to the MediaMuxer. The
* input frames are coming from the virtual display as fast as SurfaceFlinger
* wants to send them.
@@ -494,9 +554,8 @@
*
* The muxer must *not* have been started before calling.
*/
-static status_t runEncoder(const sp<MediaCodec>& encoder,
- AMediaMuxer *muxer, FILE* rawFp, const sp<IBinder>& display,
- const sp<IBinder>& virtualDpy, ui::Rotation orientation) {
+static status_t runEncoder(const sp<MediaCodec>& encoder, AMediaMuxer* muxer, FILE* rawFp,
+ const sp<IBinder>& virtualDpy, ui::DisplayState displayState) {
static int kTimeout = 250000; // be responsive on signal
status_t err;
ssize_t trackIdx = -1;
@@ -555,24 +614,7 @@
ALOGV("Got data in buffer %zu, size=%zu, pts=%" PRId64,
bufIndex, size, ptsUsec);
- { // scope
- ATRACE_NAME("orientation");
- // Check orientation, update if it has changed.
- //
- // Polling for changes is inefficient and wrong, but the
- // useful stuff is hard to get at without a Dalvik VM.
- ui::DisplayState displayState;
- err = SurfaceComposerClient::getDisplayState(display, &displayState);
- if (err != NO_ERROR) {
- ALOGW("getDisplayState() failed: %d", err);
- } else if (orientation != displayState.orientation) {
- ALOGD("orientation changed, now %s", toCString(displayState.orientation));
- SurfaceComposerClient::Transaction t;
- setDisplayProjection(t, virtualDpy, displayState);
- t.apply();
- orientation = displayState.orientation;
- }
- }
+ updateDisplayProjection(virtualDpy, displayState);
// If the virtual display isn't providing us with timestamps,
// use the current time. This isn't great -- we could get
@@ -764,6 +806,38 @@
};
/*
+ * Computes the maximum width and height across all physical displays.
+ */
+static ui::Size getMaxDisplaySize() {
+ const std::vector<PhysicalDisplayId> physicalDisplayIds =
+ SurfaceComposerClient::getPhysicalDisplayIds();
+ if (physicalDisplayIds.empty()) {
+ fprintf(stderr, "ERROR: Failed to get physical display ids\n");
+ return {};
+ }
+
+ ui::Size result;
+ for (auto& displayId : physicalDisplayIds) {
+ sp<IBinder> displayToken = SurfaceComposerClient::getPhysicalDisplayToken(displayId);
+ if (!displayToken) {
+ fprintf(stderr, "ERROR: failed to get display token\n");
+ continue;
+ }
+
+ ui::DisplayState displayState;
+ status_t err = SurfaceComposerClient::getDisplayState(displayToken, &displayState);
+ if (err != NO_ERROR) {
+ fprintf(stderr, "ERROR: failed to get display state\n");
+ continue;
+ }
+
+ result.height = std::max(result.height, displayState.layerStackSpaceRect.getHeight());
+ result.width = std::max(result.width, displayState.layerStackSpaceRect.getWidth());
+ }
+ return result;
+}
+
+/*
* Main "do work" start point.
*
* Configures codec, muxer, and virtual display, then starts moving bits
@@ -781,9 +855,15 @@
sp<ProcessState> self = ProcessState::self();
self->startThreadPool();
+ PhysicalDisplayId displayId;
+ err = getPhysicalDisplayId(displayId);
+ if (err != NO_ERROR) {
+ fprintf(stderr, "ERROR: Failed to get display id\n");
+ return err;
+ }
+
// Get main display parameters.
- sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(
- gPhysicalDisplayId);
+ sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(displayId);
if (display == nullptr) {
fprintf(stderr, "ERROR: no display\n");
return NAME_NOT_FOUND;
@@ -808,7 +888,8 @@
return INVALID_OPERATION;
}
- const ui::Size& layerStackSpaceRect = displayState.layerStackSpaceRect;
+ const ui::Size layerStackSpaceRect =
+ gPhysicalDisplayId ? displayState.layerStackSpaceRect : getMaxDisplaySize();
if (gVerbose) {
printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
layerStackSpaceRect.getWidth(), layerStackSpaceRect.getHeight(),
@@ -973,8 +1054,7 @@
}
} else {
// Main encoder loop.
- err = runEncoder(recordingData.encoder, muxer, rawFp, display, recordingData.dpy,
- displayState.orientation);
+ err = runEncoder(recordingData.encoder, muxer, rawFp, recordingData.dpy, displayState);
if (err != NO_ERROR) {
fprintf(stderr, "Encoder failed (err=%d)\n", err);
// fall through to cleanup
@@ -1175,14 +1255,6 @@
{ NULL, 0, NULL, 0 }
};
- const std::vector<PhysicalDisplayId> ids = SurfaceComposerClient::getPhysicalDisplayIds();
- if (ids.empty()) {
- fprintf(stderr, "Failed to get ID for any displays\n");
- return 1;
- }
-
- gPhysicalDisplayId = ids.front();
-
while (true) {
int optionIndex = 0;
int ic = getopt_long(argc, argv, "", longOptions, &optionIndex);
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
index 59cad9d..e8b0506 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.cpp
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -542,15 +542,15 @@
mCodecConfiguration->kf_max_dist = 3000;
// Encoder determines optimal key frame placement automatically.
mCodecConfiguration->kf_mode = AOM_KF_AUTO;
- // Initial value of the buffer level in ms.
- mCodecConfiguration->rc_buf_initial_sz = 500;
- // Amount of data that the encoder should try to maintain in ms.
- mCodecConfiguration->rc_buf_optimal_sz = 600;
// The amount of data that may be buffered by the decoding
// application in ms.
mCodecConfiguration->rc_buf_sz = 1000;
if (mBitrateControlMode == AOM_CBR) {
+ // Initial value of the buffer level in ms.
+ mCodecConfiguration->rc_buf_initial_sz = 500;
+ // Amount of data that the encoder should try to maintain in ms.
+ mCodecConfiguration->rc_buf_optimal_sz = 600;
// Maximum amount of bits that can be subtracted from the target
// bitrate - expressed as percentage of the target bitrate.
mCodecConfiguration->rc_undershoot_pct = 100;
@@ -563,7 +563,7 @@
mCodecConfiguration->rc_undershoot_pct = 100;
// Maximum amount of bits that can be added to the target
// bitrate - expressed as percentage of the target bitrate.
- mCodecConfiguration->rc_overshoot_pct = 25;
+ mCodecConfiguration->rc_overshoot_pct = 100;
}
if (mIntf->getSyncFramePeriod() >= 0) {
@@ -576,6 +576,12 @@
}
if (mMaxQuantizer > 0) {
mCodecConfiguration->rc_max_quantizer = mMaxQuantizer;
+ } else {
+ if (mBitrateControlMode == AOM_VBR) {
+ // For VBR we are limiting MaxQP to 52 (down 11 steps) to maintain quality
+ // 52 comes from experiments done on libaom standalone app
+ mCodecConfiguration->rc_max_quantizer = 52;
+ }
}
mCodecContext = new aom_codec_ctx_t;
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 9359e29..829a179 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -1523,8 +1523,8 @@
uint64_t consumerUsage = kDefaultConsumerUsage;
{
if (surface) {
- int usage = 0;
- status_t err = surface->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &usage);
+ uint64_t usage = 0;
+ status_t err = surface->getConsumerUsage(&usage);
if (err != NO_ERROR) {
ALOGD("setOutputSurface -- failed to get consumer usage bits (%d/%s). ignoring",
err, asString(err));
@@ -1537,8 +1537,7 @@
// they do not exist inside of C2 scope. Any buffer usage shall be communicated
// through the sideband channel.
- // do an unsigned conversion as bit-31 may be 1
- consumerUsage = (uint32_t)usage | kDefaultConsumerUsage;
+ consumerUsage = usage | kDefaultConsumerUsage;
}
}
@@ -1562,6 +1561,8 @@
static_cast<uint64_t>(blockPoolId),
bqId == 0 ? nullHgbp : igbp);
+ mOutputBufferQueue->expireOldWaiters();
+
if (!transStatus.isOk()) {
LOG(ERROR) << "setOutputSurface -- transaction failed.";
return C2_TRANSACTION_FAILED;
@@ -1607,6 +1608,7 @@
<< status << ".";
}
}
+ mOutputBufferQueue->expireOldWaiters();
}
c2_status_t Codec2Client::Component::connectToInputSurface(
diff --git a/media/codec2/hal/client/include/codec2/hidl/output.h b/media/codec2/hal/client/include/codec2/hidl/output.h
index 35a0224..2e89c3b 100644
--- a/media/codec2/hal/client/include/codec2/hidl/output.h
+++ b/media/codec2/hal/client/include/codec2/hidl/output.h
@@ -51,6 +51,10 @@
int maxDequeueBufferCount,
std::shared_ptr<V1_2::SurfaceSyncObj> *syncObj);
+ // If there are waiters to allocate from the old surface, wake up and expire
+ // them.
+ void expireOldWaiters();
+
// Stop using the current output surface. Pending buffer opeations will not
// perform anymore.
void stop();
@@ -90,6 +94,8 @@
std::weak_ptr<_C2BlockPoolData> mPoolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
bool mStopped;
+ std::mutex mOldMutex;
+ std::shared_ptr<C2SurfaceSyncMemory> mOldMem;
bool registerBuffer(const C2ConstGraphicBlock& block);
};
diff --git a/media/codec2/hal/client/output.cpp b/media/codec2/hal/client/output.cpp
index ce706cc..4eebd1c 100644
--- a/media/codec2/hal/client/output.cpp
+++ b/media/codec2/hal/client/output.cpp
@@ -217,6 +217,7 @@
sp<GraphicBuffer> buffers[BufferQueueDefs::NUM_BUFFER_SLOTS];
std::weak_ptr<_C2BlockPoolData>
poolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
+ std::shared_ptr<C2SurfaceSyncMemory> oldMem;
{
std::scoped_lock<std::mutex> l(mMutex);
bool stopped = mStopped;
@@ -238,7 +239,7 @@
}
return false;
}
- std::shared_ptr<C2SurfaceSyncMemory> oldMem = mSyncMem;
+ oldMem = mSyncMem;
C2SyncVariables *oldSync = mSyncMem ? mSyncMem->mem() : nullptr;
if (oldSync) {
oldSync->lock();
@@ -314,11 +315,26 @@
newSync->unlock();
}
}
+ {
+ std::scoped_lock<std::mutex> l(mOldMutex);
+ mOldMem = oldMem;
+ }
ALOGD("remote graphic buffer migration %zu/%zu",
success, tryNum);
return true;
}
+void OutputBufferQueue::expireOldWaiters() {
+ std::scoped_lock<std::mutex> l(mOldMutex);
+ if (mOldMem) {
+ C2SyncVariables *oldSync = mOldMem->mem();
+ if (oldSync) {
+ oldSync->notifyAll();
+ }
+ mOldMem.reset();
+ }
+}
+
void OutputBufferQueue::stop() {
std::scoped_lock<std::mutex> l(mMutex);
mStopped = true;
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index eb1b4b5..2db6f2f 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -1828,6 +1828,7 @@
mCallback->onError(err2, ACTION_CODE_FATAL);
return;
}
+
err2 = mChannel->start(inputFormat, outputFormat, buffersBoundToCodec);
if (err2 != OK) {
mCallback->onError(err2, ACTION_CODE_FATAL);
@@ -2131,6 +2132,25 @@
RevertOutputFormatIfNeeded(outputFormat, config->mOutputFormat);
}
+ std::map<size_t, sp<MediaCodecBuffer>> clientInputBuffers;
+ status_t err = mChannel->prepareInitialInputBuffers(&clientInputBuffers);
+ if (err != OK) {
+ if (err == NO_MEMORY) {
+ // NO_MEMORY happens here when all the buffers are still
+ // with the codec. That is not an error as it is momentarily
+ // and the buffers are send to the client as soon as the codec
+ // releases them
+ ALOGI("Resuming with all input buffers still with codec");
+ } else {
+ ALOGE("Resume request for Input Buffers failed");
+ mCallback->onError(err, ACTION_CODE_FATAL);
+ return;
+ }
+ }
+
+ // channel start should be called after prepareInitialBuffers
+ // Calling before can cause a failure during prepare when
+ // buffers are sent to the client before preparation from onWorkDone
(void)mChannel->start(nullptr, nullptr, [&]{
Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
const std::unique_ptr<Config> &config = *configLocked;
@@ -2148,14 +2168,6 @@
state->set(RUNNING);
}
- std::map<size_t, sp<MediaCodecBuffer>> clientInputBuffers;
- status_t err = mChannel->prepareInitialInputBuffers(&clientInputBuffers);
- // FIXME(b/237656746)
- if (err != OK && err != NO_MEMORY) {
- ALOGE("Resume request for Input Buffers failed");
- mCallback->onError(err, ACTION_CODE_FATAL);
- return;
- }
mChannel->requestInitialInputBuffers(std::move(clientInputBuffers));
}
diff --git a/media/libaudiohal/impl/ConversionHelperAidl.h b/media/libaudiohal/impl/ConversionHelperAidl.h
index db6b6cf..5534d13 100644
--- a/media/libaudiohal/impl/ConversionHelperAidl.h
+++ b/media/libaudiohal/impl/ConversionHelperAidl.h
@@ -20,6 +20,9 @@
#include <string_view>
#include <vector>
+#include <android-base/expected.h>
+#include <error/Result.h>
+#include <media/AudioParameter.h>
#include <utils/String16.h>
#include <utils/Vector.h>
@@ -51,4 +54,24 @@
const std::string mClassName;
};
+// 'action' must accept a value of type 'T' and return 'status_t'.
+// The function returns 'true' if the parameter was found, and the action has succeeded.
+// The function returns 'false' if the parameter was not found.
+// Any errors get propagated, if there are errors it means the parameter was found.
+template<typename T, typename F>
+error::Result<bool> filterOutAndProcessParameter(
+ AudioParameter& parameters, const String8& key, const F& action) {
+ if (parameters.containsKey(key)) {
+ T value;
+ status_t status = parameters.get(key, value);
+ if (status == OK) {
+ parameters.remove(key);
+ status = action(value);
+ if (status == OK) return true;
+ }
+ return base::unexpected(status);
+ }
+ return false;
+}
+
} // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 865d1d6..3125e311 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -35,6 +35,7 @@
#include "StreamHalAidl.h"
using aidl::android::aidl_utils::statusTFromBinderStatus;
+using aidl::android::media::audio::common::Boolean;
using aidl::android::media::audio::common::AudioChannelLayout;
using aidl::android::media::audio::common::AudioConfig;
using aidl::android::media::audio::common::AudioDevice;
@@ -68,6 +69,9 @@
using aidl::android::hardware::audio::common::RecordTrackMetadata;
using aidl::android::hardware::audio::core::AudioPatch;
using aidl::android::hardware::audio::core::AudioRoute;
+using aidl::android::hardware::audio::core::IBluetooth;
+using aidl::android::hardware::audio::core::IBluetoothA2dp;
+using aidl::android::hardware::audio::core::IBluetoothLe;
using aidl::android::hardware::audio::core::IModule;
using aidl::android::hardware::audio::core::ITelephony;
using aidl::android::hardware::audio::core::ModuleDebug;
@@ -124,7 +128,10 @@
DeviceHalAidl::DeviceHalAidl(const std::string& instance, const std::shared_ptr<IModule>& module)
: ConversionHelperAidl("DeviceHalAidl"),
mInstance(instance), mModule(module),
- mTelephony(retrieveSubInterface<ITelephony>(module, &IModule::getTelephony)) {
+ mTelephony(retrieveSubInterface<ITelephony>(module, &IModule::getTelephony)),
+ mBluetooth(retrieveSubInterface<IBluetooth>(module, &IModule::getBluetooth)),
+ mBluetoothA2dp(retrieveSubInterface<IBluetoothA2dp>(module, &IModule::getBluetoothA2dp)),
+ mBluetoothLe(retrieveSubInterface<IBluetoothLe>(module, &IModule::getBluetoothLe)) {
}
status_t DeviceHalAidl::getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) {
@@ -265,15 +272,32 @@
return statusTFromBinderStatus(mModule->getMasterMute(state));
}
-status_t DeviceHalAidl::setParameters(const String8& kvPairs __unused) {
- TIME_CHECK();
+status_t DeviceHalAidl::setParameters(const String8& kvPairs) {
if (!mModule) return NO_INIT;
- ALOGE("%s not implemented yet", __func__);
+ AudioParameter parameters(kvPairs);
+ ALOGD("%s: parameters: \"%s\"", __func__, parameters.toString().c_str());
+
+ if (status_t status = filterAndUpdateBtA2dpParameters(parameters); status != OK) {
+ ALOGW("%s: filtering or updating BT A2DP parameters failed: %d", __func__, status);
+ }
+ if (status_t status = filterAndUpdateBtHfpParameters(parameters); status != OK) {
+ ALOGW("%s: filtering or updating BT HFP parameters failed: %d", __func__, status);
+ }
+ if (status_t status = filterAndUpdateBtLeParameters(parameters); status != OK) {
+ ALOGW("%s: filtering or updating BT LE parameters failed: %d", __func__, status);
+ }
+ if (status_t status = filterAndUpdateBtScoParameters(parameters); status != OK) {
+ ALOGW("%s: filtering or updating BT SCO parameters failed: %d", __func__, status);
+ }
+
+ ALOGW_IF(parameters.size() != 0, "%s: unknown parameters, ignored: \"%s\"",
+ __func__, parameters.toString().c_str());
return OK;
}
status_t DeviceHalAidl::getParameters(const String8& keys __unused, String8 *values) {
TIME_CHECK();
+ // FIXME(b/278976019): Support keyReconfigA2dpSupported via vendor plugin
values->clear();
if (!mModule) return NO_INIT;
ALOGE("%s not implemented yet", __func__);
@@ -1088,6 +1112,150 @@
return OK;
}
+status_t DeviceHalAidl::filterAndUpdateBtA2dpParameters(AudioParameter ¶meters) {
+ TIME_CHECK();
+ std::optional<bool> a2dpEnabled;
+ (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+ parameters, String8(AudioParameter::keyBtA2dpSuspended),
+ [&a2dpEnabled](const String8& trueOrFalse) {
+ if (trueOrFalse == AudioParameter::valueTrue) {
+ a2dpEnabled = false; // 'suspended' == true
+ return OK;
+ } else if (trueOrFalse == AudioParameter::valueFalse) {
+ a2dpEnabled = true; // 'suspended' == false
+ return OK;
+ }
+ ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+ AudioParameter::keyBtA2dpSuspended, trueOrFalse.c_str());
+ return BAD_VALUE;
+ }));
+ // FIXME(b/278976019): Support keyReconfigA2dp via vendor plugin
+ if (mBluetoothA2dp != nullptr && a2dpEnabled.has_value()) {
+ return statusTFromBinderStatus(mBluetoothA2dp->setEnabled(a2dpEnabled.value()));
+ }
+ return OK;
+}
+
+status_t DeviceHalAidl::filterAndUpdateBtHfpParameters(AudioParameter ¶meters) {
+ TIME_CHECK();
+ IBluetooth::HfpConfig hfpConfig;
+ (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+ parameters, String8(AudioParameter::keyBtHfpEnable),
+ [&hfpConfig](const String8& trueOrFalse) {
+ if (trueOrFalse == AudioParameter::valueTrue) {
+ hfpConfig.isEnabled = Boolean{ .value = true };
+ return OK;
+ } else if (trueOrFalse == AudioParameter::valueFalse) {
+ hfpConfig.isEnabled = Boolean{ .value = false };
+ return OK;
+ }
+ ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+ AudioParameter::keyBtHfpEnable, trueOrFalse.c_str());
+ return BAD_VALUE;
+ }));
+ (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+ parameters, String8(AudioParameter::keyBtHfpSamplingRate),
+ [&hfpConfig](int sampleRate) {
+ return sampleRate > 0 ?
+ hfpConfig.sampleRate = Int{ .value = sampleRate }, OK : BAD_VALUE;
+ }));
+ (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+ parameters, String8(AudioParameter::keyBtHfpVolume),
+ [&hfpConfig](int volume0to15) {
+ if (volume0to15 >= 0 && volume0to15 <= 15) {
+ hfpConfig.volume = Float{ .value = volume0to15 / 15.0f };
+ return OK;
+ }
+ return BAD_VALUE;
+ }));
+ if (mBluetooth != nullptr && hfpConfig != IBluetooth::HfpConfig{}) {
+ IBluetooth::HfpConfig newHfpConfig;
+ return statusTFromBinderStatus(mBluetooth->setHfpConfig(hfpConfig, &newHfpConfig));
+ }
+ return OK;
+}
+
+status_t DeviceHalAidl::filterAndUpdateBtLeParameters(AudioParameter ¶meters) {
+ TIME_CHECK();
+ std::optional<bool> leEnabled;
+ (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+ parameters, String8(AudioParameter::keyBtLeSuspended),
+ [&leEnabled](const String8& trueOrFalse) {
+ if (trueOrFalse == AudioParameter::valueTrue) {
+ leEnabled = false; // 'suspended' == true
+ return OK;
+ } else if (trueOrFalse == AudioParameter::valueFalse) {
+ leEnabled = true; // 'suspended' == false
+ return OK;
+ }
+ ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+ AudioParameter::keyBtLeSuspended, trueOrFalse.c_str());
+ return BAD_VALUE;
+ }));
+ if (mBluetoothLe != nullptr && leEnabled.has_value()) {
+ return statusTFromBinderStatus(mBluetoothLe->setEnabled(leEnabled.value()));
+ }
+ return OK;
+}
+
+status_t DeviceHalAidl::filterAndUpdateBtScoParameters(AudioParameter ¶meters) {
+ TIME_CHECK();
+ IBluetooth::ScoConfig scoConfig;
+ (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+ parameters, String8(AudioParameter::keyBtSco),
+ [&scoConfig](const String8& onOrOff) {
+ if (onOrOff == AudioParameter::valueOn) {
+ scoConfig.isEnabled = Boolean{ .value = true };
+ return OK;
+ } else if (onOrOff == AudioParameter::valueOff) {
+ scoConfig.isEnabled = Boolean{ .value = false };
+ return OK;
+ }
+ ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+ AudioParameter::keyBtSco, onOrOff.c_str());
+ return BAD_VALUE;
+ }));
+ (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+ parameters, String8(AudioParameter::keyBtScoHeadsetName),
+ [&scoConfig](const String8& name) {
+ scoConfig.debugName = name;
+ return OK;
+ }));
+ (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+ parameters, String8(AudioParameter::keyBtNrec),
+ [&scoConfig](const String8& onOrOff) {
+ if (onOrOff == AudioParameter::valueOn) {
+ scoConfig.isNrecEnabled = Boolean{ .value = true };
+ return OK;
+ } else if (onOrOff == AudioParameter::valueOff) {
+ scoConfig.isNrecEnabled = Boolean{ .value = false };
+ return OK;
+ }
+ ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+ AudioParameter::keyBtNrec, onOrOff.c_str());
+ return BAD_VALUE;
+ }));
+ (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+ parameters, String8(AudioParameter::keyBtScoWb),
+ [&scoConfig](const String8& onOrOff) {
+ if (onOrOff == AudioParameter::valueOn) {
+ scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO_WB;
+ return OK;
+ } else if (onOrOff == AudioParameter::valueOff) {
+ scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO;
+ return OK;
+ }
+ ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+ AudioParameter::keyBtScoWb, onOrOff.c_str());
+ return BAD_VALUE;
+ }));
+ if (mBluetooth != nullptr && scoConfig != IBluetooth::ScoConfig{}) {
+ IBluetooth::ScoConfig newScoConfig;
+ return statusTFromBinderStatus(mBluetooth->setScoConfig(scoConfig, &newScoConfig));
+ }
+ return OK;
+}
+
status_t DeviceHalAidl::findOrCreatePatch(
const AudioPatch& requestedPatch, AudioPatch* patch, bool* created) {
std::set<int32_t> sourcePortConfigIds(requestedPatch.sourcePortConfigIds.begin(),
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index 5c9950b..37d800b 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -214,6 +214,10 @@
status_t createOrUpdatePortConfig(
const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
PortConfigs::iterator* result, bool *created);
+ status_t filterAndUpdateBtA2dpParameters(AudioParameter ¶meters);
+ status_t filterAndUpdateBtHfpParameters(AudioParameter ¶meters);
+ status_t filterAndUpdateBtLeParameters(AudioParameter ¶meters);
+ status_t filterAndUpdateBtScoParameters(AudioParameter ¶meters);
status_t findOrCreatePatch(
const std::set<int32_t>& sourcePortConfigIds,
const std::set<int32_t>& sinkPortConfigIds,
@@ -288,6 +292,9 @@
const std::string mInstance;
const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
const std::shared_ptr<::aidl::android::hardware::audio::core::ITelephony> mTelephony;
+ const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetooth> mBluetooth;
+ const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothA2dp> mBluetoothA2dp;
+ const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothLe> mBluetoothLe;
std::shared_ptr<::aidl::android::hardware::audio::core::sounddose::ISoundDose>
mSoundDose = nullptr;
Ports mPorts;
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index eccdfe8..d1044dc 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -122,30 +122,6 @@
return OK;
}
-namespace {
-
-// 'action' must accept a value of type 'T' and return 'status_t'.
-// The function returns 'true' if the parameter was found, and the action has succeeded.
-// The function returns 'false' if the parameter was not found.
-// Any errors get propagated, if there are errors it means the parameter was found.
-template<typename T, typename F>
-error::Result<bool> filterOutAndProcessParameter(
- AudioParameter& parameters, const String8& key, const F& action) {
- if (parameters.containsKey(key)) {
- T value;
- status_t status = parameters.get(key, value);
- if (status == OK) {
- parameters.remove(key);
- status = action(value);
- if (status == OK) return true;
- }
- return base::unexpected(status);
- }
- return false;
-}
-
-} // namespace
-
status_t StreamHalAidl::setParameters(const String8& kvPairs) {
TIME_CHECK();
if (!mStream) return NO_INIT;
@@ -579,10 +555,10 @@
if (!mStream) return NO_INIT;
AudioParameter parameters(kvPairs);
- ALOGD("%s parameters: %s", __func__, parameters.toString().c_str());
+ ALOGD("%s: parameters: \"%s\"", __func__, parameters.toString().c_str());
if (status_t status = filterAndUpdateOffloadMetadata(parameters); status != OK) {
- ALOGW("%s filtering or updating offload metadata failed: %d", __func__, status);
+ ALOGW("%s: filtering or updating offload metadata failed: %d", __func__, status);
}
return StreamHalAidl::setParameters(parameters.toString());
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index c66861b..649f813 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -49,8 +49,9 @@
"liblog",
],
header_libs: [
- "libmedia_helper_headers",
"libaudio_system_headers",
+ "libhardware_headers",
+ "libmedia_helper_headers",
],
export_header_lib_headers: [
"libmedia_helper_headers",
diff --git a/media/libmediahelper/AudioParameter.cpp b/media/libmediahelper/AudioParameter.cpp
index e25f9b7..a61a1bc 100644
--- a/media/libmediahelper/AudioParameter.cpp
+++ b/media/libmediahelper/AudioParameter.cpp
@@ -20,6 +20,7 @@
#include <utils/Log.h>
#include <media/AudioParameter.h>
+#include <hardware/audio.h>
#include <system/audio.h>
namespace android {
@@ -34,7 +35,13 @@
const char * const AudioParameter::keyScreenState = AUDIO_PARAMETER_KEY_SCREEN_STATE;
const char * const AudioParameter::keyClosing = AUDIO_PARAMETER_KEY_CLOSING;
const char * const AudioParameter::keyExiting = AUDIO_PARAMETER_KEY_EXITING;
+const char * const AudioParameter::keyBtSco = AUDIO_PARAMETER_KEY_BT_SCO;
+const char * const AudioParameter::keyBtScoHeadsetName = AUDIO_PARAMETER_KEY_BT_SCO_HEADSET_NAME;
const char * const AudioParameter::keyBtNrec = AUDIO_PARAMETER_KEY_BT_NREC;
+const char * const AudioParameter::keyBtScoWb = AUDIO_PARAMETER_KEY_BT_SCO_WB;
+const char * const AudioParameter::keyBtHfpEnable = AUDIO_PARAMETER_KEY_HFP_ENABLE;
+const char * const AudioParameter::keyBtHfpSamplingRate = AUDIO_PARAMETER_KEY_HFP_SET_SAMPLING_RATE;
+const char * const AudioParameter::keyBtHfpVolume = AUDIO_PARAMETER_KEY_HFP_VOLUME;
const char * const AudioParameter::keyHwAvSync = AUDIO_PARAMETER_HW_AV_SYNC;
const char * const AudioParameter::keyPresentationId = AUDIO_PARAMETER_STREAM_PRESENTATION_ID;
const char * const AudioParameter::keyProgramId = AUDIO_PARAMETER_STREAM_PROGRAM_ID;
@@ -52,9 +59,13 @@
AUDIO_PARAMETER_STREAM_SUP_SAMPLING_RATES;
const char * const AudioParameter::valueOn = AUDIO_PARAMETER_VALUE_ON;
const char * const AudioParameter::valueOff = AUDIO_PARAMETER_VALUE_OFF;
+const char * const AudioParameter::valueTrue = AUDIO_PARAMETER_VALUE_TRUE;
+const char * const AudioParameter::valueFalse = AUDIO_PARAMETER_VALUE_FALSE;
const char * const AudioParameter::valueListSeparator = AUDIO_PARAMETER_VALUE_LIST_SEPARATOR;
+const char * const AudioParameter::keyBtA2dpSuspended = AUDIO_PARAMETER_KEY_BT_A2DP_SUSPENDED;
const char * const AudioParameter::keyReconfigA2dp = AUDIO_PARAMETER_RECONFIG_A2DP;
const char * const AudioParameter::keyReconfigA2dpSupported = AUDIO_PARAMETER_A2DP_RECONFIG_SUPPORTED;
+const char * const AudioParameter::keyBtLeSuspended = AUDIO_PARAMETER_KEY_BT_LE_SUSPENDED;
// const char * const AudioParameter::keyDeviceSupportedEncapsulationModes =
// AUDIO_PARAMETER_DEVICE_SUP_ENCAPSULATION_MODES;
// const char * const AudioParameter::keyDeviceSupportedEncapsulationMetadataTypes =
diff --git a/media/libmediahelper/include/media/AudioParameter.h b/media/libmediahelper/include/media/AudioParameter.h
index 6c34a4f..70f8af3 100644
--- a/media/libmediahelper/include/media/AudioParameter.h
+++ b/media/libmediahelper/include/media/AudioParameter.h
@@ -55,11 +55,22 @@
static const char * const keyClosing;
static const char * const keyExiting;
+ // keyBtSco: Whether BT SCO is 'on' or 'off'
+ // keyBtScoHeadsetName: BT SCO headset name (for debugging)
// keyBtNrec: BT SCO Noise Reduction + Echo Cancellation parameters
+ // keyBtScoWb: BT SCO NR wideband mode
+ // keyHfp...: Parameters of the Hands-Free Profile
+ static const char * const keyBtSco;
+ static const char * const keyBtScoHeadsetName;
+ static const char * const keyBtNrec;
+ static const char * const keyBtScoWb;
+ static const char * const keyBtHfpEnable;
+ static const char * const keyBtHfpSamplingRate;
+ static const char * const keyBtHfpVolume;
+
// keyHwAvSync: get HW synchronization source identifier from a device
// keyMonoOutput: Enable mono audio playback
// keyStreamHwAvSync: set HW synchronization source identifier on a stream
- static const char * const keyBtNrec;
static const char * const keyHwAvSync;
static const char * const keyMonoOutput;
static const char * const keyStreamHwAvSync;
@@ -90,13 +101,19 @@
static const char * const valueOn;
static const char * const valueOff;
+ static const char * const valueTrue;
+ static const char * const valueFalse;
static const char * const valueListSeparator;
+ // keyBtA2dpSuspended: 'true' or 'false'
// keyReconfigA2dp: Ask HwModule to reconfigure A2DP offloaded codec
// keyReconfigA2dpSupported: Query if HwModule supports A2DP offload codec config
+ // keyBtLeSuspended: 'true' or 'false'
+ static const char * const keyBtA2dpSuspended;
static const char * const keyReconfigA2dp;
static const char * const keyReconfigA2dpSupported;
+ static const char * const keyBtLeSuspended;
// For querying device supported encapsulation capabilities. All returned values are integer,
// which are bit fields composed from using encapsulation capability values as position bits.
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index f324408..ce2d8f4 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -3375,8 +3375,18 @@
ALOGV("%s type %d device type %d address %s device ID %d patch.isSoftware() %d",
__func__, port->type, port->ext.device.type,
port->ext.device.address, port->id, patch.isSoftware());
- if (port->type != AUDIO_PORT_TYPE_DEVICE || port->ext.device.type != mDevice.mType
- || port->ext.device.address != mDevice.address()) {
+ if (port->type != AUDIO_PORT_TYPE_DEVICE || port->ext.device.type != mDevice.mType ||
+ port->ext.device.address != mDevice.address()) {
+ return NAME_NOT_FOUND;
+ }
+ if (((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) &&
+ (audio_port_config_has_input_direction(port))) {
+ ALOGI("%s don't create postprocessing effect on record port", __func__);
+ return NAME_NOT_FOUND;
+ }
+ if (((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC) &&
+ (!audio_port_config_has_input_direction(port))) {
+ ALOGI("%s don't create preprocessing effect on playback port", __func__);
return NAME_NOT_FOUND;
}
status_t status = NAME_NOT_FOUND;
@@ -3407,20 +3417,12 @@
} else if (patch.isSoftware() || patch.thread().promote() != nullptr) {
sp <ThreadBase> thread;
if (audio_port_config_has_input_direction(port)) {
- if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
- ALOGI("%s don't create postprocessing effect on record thread", __func__);
- return NAME_NOT_FOUND;
- }
if (patch.isSoftware()) {
thread = patch.mRecord.thread();
} else {
thread = patch.thread().promote();
}
} else {
- if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC) {
- ALOGI("%s don't create preprocessing effect on playback thread", __func__);
- return NAME_NOT_FOUND;
- }
if (patch.isSoftware()) {
thread = patch.mPlayback.thread();
} else {
@@ -3436,6 +3438,7 @@
} else {
status = BAD_VALUE;
}
+
if (status == NO_ERROR || status == ALREADY_EXISTS) {
Status bs;
if (isEnabled()) {
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index dad663e..2eb0177 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -92,7 +92,7 @@
template <typename Key>
Element<Key> *Engine::getFromCollection(const Key &key) const
{
- const Collection<Key> collection = getCollection<Key>();
+ const Collection<Key> &collection = getCollection<Key>();
return collection.get(key);
}
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index cecced4..eb824de 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -3910,18 +3910,21 @@
void CameraService::UidPolicy::registerWithActivityManager() {
Mutex::Autolock _l(mUidLock);
+ int32_t emptyUidArray[] = { };
if (mRegistered) return;
status_t res = mAm.linkToDeath(this);
- mAm.registerUidObserver(this, ActivityManager::UID_OBSERVER_GONE
+ mAm.registerUidObserverForUids(this, ActivityManager::UID_OBSERVER_GONE
| ActivityManager::UID_OBSERVER_IDLE
| ActivityManager::UID_OBSERVER_ACTIVE | ActivityManager::UID_OBSERVER_PROCSTATE
| ActivityManager::UID_OBSERVER_PROC_OOM_ADJ,
ActivityManager::PROCESS_STATE_UNKNOWN,
- String16("cameraserver"));
+ String16("cameraserver"), emptyUidArray, 0, mObserverToken);
if (res == OK) {
mRegistered = true;
ALOGV("UidPolicy: Registered with ActivityManager");
+ } else {
+ ALOGE("UidPolicy: Failed to register with ActivityManager: 0x%08x", res);
}
}
@@ -4019,13 +4022,16 @@
if (it->second.hasCamera) {
for (auto &monitoredUid : mMonitoredUids) {
if (monitoredUid.first != uid && adj > monitoredUid.second.procAdj) {
+ ALOGV("%s: notify uid %d", __FUNCTION__, monitoredUid.first);
notifyUidSet.emplace(monitoredUid.first);
}
}
+ ALOGV("%s: notify uid %d", __FUNCTION__, uid);
notifyUidSet.emplace(uid);
} else {
for (auto &monitoredUid : mMonitoredUids) {
if (monitoredUid.second.hasCamera && adj < monitoredUid.second.procAdj) {
+ ALOGV("%s: notify uid %d", __FUNCTION__, uid);
notifyUidSet.emplace(uid);
}
}
@@ -4056,6 +4062,10 @@
monitoredUid.procAdj = resource_policy::UNKNOWN_ADJ;
monitoredUid.refCount = 1;
it = mMonitoredUids.emplace(std::pair<uid_t, MonitoredUid>(uid, monitoredUid)).first;
+ status_t res = mAm.addUidToObserver(mObserverToken, String16("cameraserver"), uid);
+ if (res != OK) {
+ ALOGE("UidPolicy: Failed to add uid to observer: 0x%08x", res);
+ }
}
if (openCamera) {
@@ -4073,6 +4083,10 @@
it->second.refCount--;
if (it->second.refCount == 0) {
mMonitoredUids.erase(it);
+ status_t res = mAm.removeUidFromObserver(mObserverToken, String16("cameraserver"), uid);
+ if (res != OK) {
+ ALOGE("UidPolicy: Failed to remove uid from observer: 0x%08x", res);
+ }
} else if (closeCamera) {
it->second.hasCamera = false;
}
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 8c57d26..d84cb00 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -798,6 +798,7 @@
// Monitored uid map
std::unordered_map<uid_t, MonitoredUid> mMonitoredUids;
std::unordered_map<uid_t, bool> mOverrideUids;
+ sp<IBinder> mObserverToken;
}; // class UidPolicy
// If sensor privacy is enabled then all apps, including those that are active, should be
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.cpp b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
index 28dff7d..1c1bd24 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.cpp
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
@@ -43,8 +43,7 @@
mTidMap[currentThreadId].cycles++;
if (mTidMap[currentThreadId].cycles >= mMaxCycles) {
- std::string abortMessage = getAbortMessage(getpid(), currentThreadId,
- mTidMap[currentThreadId].functionName);
+ std::string abortMessage = getAbortMessage(mTidMap[currentThreadId].functionName);
android_set_abort_message(abortMessage.c_str());
ALOGW("CameraServiceWatchdog triggering abort for pid: %d tid: %d", getpid(),
currentThreadId);
@@ -60,10 +59,9 @@
return true;
}
-std::string CameraServiceWatchdog::getAbortMessage(int pid, int tid, std::string functionName) {
+std::string CameraServiceWatchdog::getAbortMessage(const std::string& functionName) {
std::string res = "CameraServiceWatchdog triggering abort during "
- + functionName + " | pid: " + std::to_string(pid)
- + " tid: " + std::to_string(tid);
+ + functionName;
return res;
}
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.h b/services/camera/libcameraservice/CameraServiceWatchdog.h
index 6f8858a..de6ac9e 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.h
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.h
@@ -144,7 +144,7 @@
*/
void stop(uint32_t tid);
- std::string getAbortMessage(int pid, int tid, std::string functionName);
+ std::string getAbortMessage(const std::string& functionName);
virtual bool threadLoop();
diff --git a/services/tuner/hidl/TunerHidlFilter.cpp b/services/tuner/hidl/TunerHidlFilter.cpp
index 1789028..617622d 100644
--- a/services/tuner/hidl/TunerHidlFilter.cpp
+++ b/services/tuner/hidl/TunerHidlFilter.cpp
@@ -368,7 +368,7 @@
}
hidl_handle handle;
- handle.setTo(makeFromAidl(in_handle), true);
+ handle.setTo(makeFromAidl(in_handle));
HidlResult res = mFilter->releaseAvHandle(handle, in_avDataId);
if (res != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));