Merge changes Ifee8731e,Idf67d9ce into main
* changes:
Checking API_SAME_INPUT_BUFFER support for large audio frame
Changing interface creation for Large audio frame
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index d662585..362e98e 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -133,3 +133,10 @@
description: "Feature flag to track teamfood population"
bug: "328770262"
}
+
+flag {
+ name: "thumbnail_block_model"
+ namespace: "codec_fwk"
+ description: "Feature flag for using block model decoder in thumbnail generation"
+ bug: "329521645"
+}
diff --git a/media/audioaidlconversion/AidlConversionNdk.cpp b/media/audioaidlconversion/AidlConversionNdk.cpp
index 9b14a5e..5f7260d 100644
--- a/media/audioaidlconversion/AidlConversionNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionNdk.cpp
@@ -35,6 +35,7 @@
using hardware::audio::common::PlaybackTrackMetadata;
using hardware::audio::common::RecordTrackMetadata;
+using hardware::audio::common::SourceMetadata;
using ::android::BAD_VALUE;
using ::android::OK;
@@ -194,5 +195,17 @@
return aidl;
}
+// static
+ConversionResult<SourceMetadata>
+legacy2aidl_playback_track_metadata_v7_SourceMetadata(
+ const std::vector<playback_track_metadata_v7_t>& legacy) {
+ SourceMetadata aidl;
+ aidl.tracks = VALUE_OR_RETURN(
+ convertContainer<std::vector<PlaybackTrackMetadata>>(
+ legacy,
+ legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata));
+ return aidl;
+}
+
} // namespace android
} // aidl
diff --git a/media/audioaidlconversion/include/media/AidlConversionNdk.h b/media/audioaidlconversion/include/media/AidlConversionNdk.h
index 813a728..b8a3110 100644
--- a/media/audioaidlconversion/include/media/AidlConversionNdk.h
+++ b/media/audioaidlconversion/include/media/AidlConversionNdk.h
@@ -28,6 +28,7 @@
#include <aidl/android/hardware/audio/common/PlaybackTrackMetadata.h>
#include <aidl/android/hardware/audio/common/RecordTrackMetadata.h>
+#include <aidl/android/hardware/audio/common/SourceMetadata.h>
#include <aidl/android/media/audio/common/AudioConfig.h>
#include <media/AidlConversionUtil.h>
@@ -56,5 +57,9 @@
ConversionResult<hardware::audio::common::RecordTrackMetadata>
legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(const record_track_metadata_v7& legacy);
+ConversionResult<hardware::audio::common::SourceMetadata>
+legacy2aidl_playback_track_metadata_v7_SourceMetadata(
+ const std::vector<playback_track_metadata_v7_t>& legacy);
+
} // namespace android
} // namespace aidl
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index 8d9e76e..bdfc409 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -34,7 +34,7 @@
c2_status_t retrieveAHardwareBufferId(const C2ConstGraphicBlock &blk, uint64_t *bid) {
std::shared_ptr<const _C2BlockPoolData> bpData = _C2BlockFactory::GetGraphicBlockPoolData(blk);
- if (bpData->getType() != _C2BlockPoolData::TYPE_AHWBUFFER) {
+ if (!bpData || bpData->getType() != _C2BlockPoolData::TYPE_AHWBUFFER) {
return C2_BAD_VALUE;
}
if (__builtin_available(android __ANDROID_API_T__, *)) {
@@ -824,6 +824,10 @@
std::shared_ptr<BufferCache> cache;
int slotId;
sp<Fence> rFence;
+ if (mStopped.load() == true) {
+ ALOGE("cannot deallocate due to being stopped");
+ return C2_BAD_STATE;
+ }
c2_status_t res = requestDeallocate(bid, fence, &completed, &updateDequeue,
&cache, &slotId, &rFence);
if (res != C2_OK) {
@@ -900,7 +904,10 @@
cache->unblockSlot(buffer->mSlot);
if (oldBuffer) {
// migrated, register the new buffer to the cache.
- cache->mBuffers.emplace(buffer->mSlot, buffer);
+ auto ret = cache->mBuffers.emplace(buffer->mSlot, buffer);
+ if (!ret.second) {
+ ret.first->second = buffer;
+ }
}
}
mDeallocating.erase(origBid);
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index a137dbb..80735cb 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -2391,7 +2391,12 @@
"GraphicBufferAllocator was not created.";
return C2_CORRUPTED;
}
+ // Note: Consumer usage is set ahead of the HAL allocator(gba) being set.
+ // This is same as HIDL.
+ uint64_t consumerUsage = configConsumerUsage(surface);
bool ret = gba->configure(surface, generation, maxDequeueCount);
+ ALOGD("setOutputSurface -- generation=%u consumer usage=%#llx",
+ generation, (long long)consumerUsage);
return ret ? C2_OK : C2_CORRUPTED;
}
uint64_t bqId = 0;
@@ -2419,41 +2424,9 @@
mHidlBase1_2 ? &syncObj : nullptr);
}
- // set consumer bits
- // TODO: should this get incorporated into setOutputSurface method so that consumer bits
- // can be set atomically?
- uint64_t consumerUsage = kDefaultConsumerUsage;
- {
- if (surface) {
- uint64_t usage = 0;
- status_t err = surface->getConsumerUsage(&usage);
- if (err != NO_ERROR) {
- ALOGD("setOutputSurface -- failed to get consumer usage bits (%d/%s). ignoring",
- err, asString(err));
- } else {
- // Note: we are adding the default usage because components must support
- // producing output frames that can be displayed an all output surfaces.
-
- // TODO: do not set usage for tunneled scenario. It is unclear if consumer usage
- // is meaningful in a tunneled scenario; on one hand output buffers exist, but
- // they do not exist inside of C2 scope. Any buffer usage shall be communicated
- // through the sideband channel.
-
- consumerUsage = usage | kDefaultConsumerUsage;
- }
- }
-
- C2StreamUsageTuning::output outputUsage{
- 0u, C2AndroidMemoryUsage::FromGrallocUsage(consumerUsage).expected};
- std::vector<std::unique_ptr<C2SettingResult>> failures;
- c2_status_t err = config({&outputUsage}, C2_MAY_BLOCK, &failures);
- if (err != C2_OK) {
- ALOGD("setOutputSurface -- failed to set consumer usage (%d/%s)",
- err, asString(err));
- }
- }
+ uint64_t consumerUsage = configConsumerUsage(surface);
ALOGD("setOutputSurface -- generation=%u consumer usage=%#llx%s",
- generation, (long long)consumerUsage, syncObj ? " sync" : "");
+ generation, (long long)consumerUsage, syncObj ? " sync" : "");
Return<c2_hidl::Status> transStatus = syncObj ?
mHidlBase1_2->setOutputSurfaceWithSyncObj(
@@ -2495,6 +2468,44 @@
return mOutputBufferQueue->outputBuffer(block, input, output);
}
+uint64_t Codec2Client::Component::configConsumerUsage(
+ const sp<IGraphicBufferProducer>& surface) {
+ // set consumer bits
+ // TODO: should this get incorporated into setOutputSurface method so that consumer bits
+ // can be set atomically?
+ uint64_t consumerUsage = kDefaultConsumerUsage;
+ {
+ if (surface) {
+ uint64_t usage = 0;
+ status_t err = surface->getConsumerUsage(&usage);
+ if (err != NO_ERROR) {
+ ALOGD("setOutputSurface -- failed to get consumer usage bits (%d/%s). ignoring",
+ err, asString(err));
+ } else {
+ // Note: we are adding the default usage because components must support
+ // producing output frames that can be displayed an all output surfaces.
+
+ // TODO: do not set usage for tunneled scenario. It is unclear if consumer usage
+ // is meaningful in a tunneled scenario; on one hand output buffers exist, but
+ // they do not exist inside of C2 scope. Any buffer usage shall be communicated
+ // through the sideband channel.
+
+ consumerUsage = usage | kDefaultConsumerUsage;
+ }
+ }
+
+ C2StreamUsageTuning::output outputUsage{
+ 0u, C2AndroidMemoryUsage::FromGrallocUsage(consumerUsage).expected};
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = config({&outputUsage}, C2_MAY_BLOCK, &failures);
+ if (err != C2_OK) {
+ ALOGD("setOutputSurface -- failed to set consumer usage (%d/%s)",
+ err, asString(err));
+ }
+ }
+ return consumerUsage;
+}
+
void Codec2Client::Component::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
if (mAidlBase) {
// TODO b/311348680
diff --git a/media/codec2/hal/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
index 413e92e..7923f04 100644
--- a/media/codec2/hal/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -467,6 +467,9 @@
const QueueBufferInput& input,
QueueBufferOutput* output);
+ // configure consumer usage.
+ uint64_t configConsumerUsage(const sp<IGraphicBufferProducer>& surface);
+
// Retrieve frame event history from the output surface.
void pollForRenderedFrames(FrameEventHistoryDelta* delta);
diff --git a/media/codec2/hal/client/include/codec2/hidl/output.h b/media/codec2/hal/client/include/codec2/hidl/output.h
index ddb9855..108f0a6 100644
--- a/media/codec2/hal/client/include/codec2/hidl/output.h
+++ b/media/codec2/hal/client/include/codec2/hidl/output.h
@@ -96,6 +96,7 @@
uint64_t mBqId;
int32_t mMaxDequeueBufferCount;
std::shared_ptr<int> mOwner;
+ std::shared_ptr<int> mConsumerAttachCount;
// To migrate existing buffers
sp<GraphicBuffer> mBuffers[BufferQueueDefs::NUM_BUFFER_SLOTS]; // find a better way
std::weak_ptr<_C2BlockPoolData> mPoolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
diff --git a/media/codec2/hal/client/output.cpp b/media/codec2/hal/client/output.cpp
index 54d78a0..2eb381b 100644
--- a/media/codec2/hal/client/output.cpp
+++ b/media/codec2/hal/client/output.cpp
@@ -261,6 +261,7 @@
mGeneration = generation;
mBqId = bqId;
mOwner = std::make_shared<int>(0);
+ mConsumerAttachCount = std::make_shared<int>(0);
mMaxDequeueBufferCount = maxDequeueBufferCount;
if (igbp == nullptr) {
return false;
@@ -522,6 +523,7 @@
std::shared_ptr<C2SurfaceSyncMemory> syncMem;
sp<IGraphicBufferProducer> outputIgbp;
uint32_t outputGeneration = 0;
+ std::shared_ptr<int> consumerAttachCount;
{
std::unique_lock<std::mutex> l(mMutex);
if (mStopped) {
@@ -529,6 +531,7 @@
}
outputIgbp = mIgbp;
outputGeneration = mGeneration;
+ consumerAttachCount = mConsumerAttachCount;
syncMem = mSyncMem;
}
@@ -536,15 +539,42 @@
auto syncVar = syncMem ? syncMem->mem() : nullptr;
if (syncVar) {
syncVar->lock();
- syncVar->notifyQueuedLocked();
+ if (consumerAttachCount && *consumerAttachCount > 0) {
+ (*consumerAttachCount)--;
+ } else {
+ syncVar->notifyQueuedLocked();
+ }
syncVar->unlock();
}
}
}
void OutputBufferQueue::onBufferAttached(uint32_t generation) {
- // TODO
- (void) generation;
+ std::shared_ptr<C2SurfaceSyncMemory> syncMem;
+ sp<IGraphicBufferProducer> outputIgbp;
+ uint32_t outputGeneration = 0;
+ std::shared_ptr<int> consumerAttachCount;
+ {
+ std::unique_lock<std::mutex> l(mMutex);
+ if (mStopped) {
+ return;
+ }
+ outputIgbp = mIgbp;
+ outputGeneration = mGeneration;
+ consumerAttachCount = mConsumerAttachCount;
+ syncMem = mSyncMem;
+ }
+
+ if (outputIgbp && generation == outputGeneration) {
+ auto syncVar = syncMem ? syncMem->mem() : nullptr;
+ if (syncVar) {
+ syncVar->lock();
+ if (consumerAttachCount) {
+ (*consumerAttachCount)++;
+ }
+ syncVar->unlock();
+ }
+ }
}
void OutputBufferQueue::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 0aae23c..68f1dda 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -96,11 +96,14 @@
public:
static sp<CCodecWatchdog> getInstance() {
- static sp<CCodecWatchdog> instance(new CCodecWatchdog);
- static std::once_flag flag;
- // Call Init() only once.
- std::call_once(flag, Init, instance);
- return instance;
+ static sp<CCodecWatchdog> sInstance = [] {
+ sp<CCodecWatchdog> instance = new CCodecWatchdog;
+ // the instance should never get destructed
+ instance->incStrong((void *)CCodecWatchdog::getInstance);
+ instance->init();
+ return instance;
+ }();
+ return sInstance;
}
~CCodecWatchdog() = default;
@@ -146,11 +149,11 @@
private:
CCodecWatchdog() : mLooper(new ALooper) {}
- static void Init(const sp<CCodecWatchdog> &thiz) {
- ALOGV("Init");
- thiz->mLooper->setName("CCodecWatchdog");
- thiz->mLooper->registerHandler(thiz);
- thiz->mLooper->start();
+ void init() {
+ ALOGV("init");
+ mLooper->setName("CCodecWatchdog");
+ mLooper->registerHandler(this);
+ mLooper->start();
}
sp<ALooper> mLooper;
@@ -222,19 +225,20 @@
~HGraphicBufferSourceWrapper() override = default;
status_t connect(const std::shared_ptr<Codec2Client::Component> &comp) override {
- mNode = new C2OMXNode(comp);
- mOmxNode = new hardware::media::omx::V1_0::utils::TWOmxNode(mNode);
- mNode->setFrameSize(mWidth, mHeight);
+ Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+ *node = new C2OMXNode(comp);
+ mOmxNode = new hardware::media::omx::V1_0::utils::TWOmxNode(*node);
+ (*node)->setFrameSize(mWidth, mHeight);
// Usage is queried during configure(), so setting it beforehand.
// 64 bit set parameter is existing only in C2OMXNode.
OMX_U64 usage64 = mConfig.mUsage;
- status_t res = mNode->setParameter(
+ status_t res = (*node)->setParameter(
(OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits64,
&usage64, sizeof(usage64));
if (res != OK) {
OMX_U32 usage = mConfig.mUsage & 0xFFFFFFFF;
- (void)mNode->setParameter(
+ (void)(*node)->setParameter(
(OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
&usage, sizeof(usage));
}
@@ -244,17 +248,18 @@
}
void disconnect() override {
- if (mNode == nullptr) {
+ Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+ if ((*node) == nullptr) {
return;
}
- sp<IOMXBufferSource> source = mNode->getSource();
+ sp<IOMXBufferSource> source = (*node)->getSource();
if (source == nullptr) {
ALOGD("GBSWrapper::disconnect: node is not configured with OMXBufferSource.");
return;
}
source->onOmxIdle();
source->onOmxLoaded();
- mNode.clear();
+ node->clear();
mOmxNode.clear();
}
@@ -268,7 +273,11 @@
}
status_t start() override {
- sp<IOMXBufferSource> source = mNode->getSource();
+ Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+ if ((*node) == nullptr) {
+ return NO_INIT;
+ }
+ sp<IOMXBufferSource> source = (*node)->getSource();
if (source == nullptr) {
return NO_INIT;
}
@@ -278,7 +287,7 @@
OMX_PARAM_PORTDEFINITIONTYPE param;
param.nPortIndex = kPortIndexInput;
- status_t err = mNode->getParameter(OMX_IndexParamPortDefinition,
+ status_t err = (*node)->getParameter(OMX_IndexParamPortDefinition,
¶m, sizeof(param));
if (err == OK) {
numSlots = param.nBufferCountActual;
@@ -297,6 +306,7 @@
}
status_t configure(Config &config) {
+ Mutexed<sp<C2OMXNode>>::Locked node(mNode);
std::stringstream status;
status_t err = OK;
@@ -317,7 +327,7 @@
// pts gap
if (config.mMinAdjustedFps > 0 || config.mFixedAdjustedFps > 0) {
- if (mNode != nullptr) {
+ if ((*node) != nullptr) {
OMX_PARAM_U32TYPE ptrGapParam = {};
ptrGapParam.nSize = sizeof(OMX_PARAM_U32TYPE);
float gap = (config.mMinAdjustedFps > 0)
@@ -326,7 +336,7 @@
// float -> uint32_t is undefined if the value is negative.
// First convert to int32_t to ensure the expected behavior.
ptrGapParam.nU32 = int32_t(gap);
- (void)mNode->setParameter(
+ (void)(*node)->setParameter(
(OMX_INDEXTYPE)OMX_IndexParamMaxFrameDurationForBitrateControl,
&ptrGapParam, sizeof(ptrGapParam));
}
@@ -426,8 +436,8 @@
// priority
if (mConfig.mPriority != config.mPriority) {
- if (config.mPriority != INT_MAX) {
- mNode->setPriority(config.mPriority);
+ if (config.mPriority != INT_MAX && (*node) != nullptr) {
+ (*node)->setPriority(config.mPriority);
}
mConfig.mPriority = config.mPriority;
}
@@ -441,24 +451,40 @@
}
void onInputBufferDone(c2_cntr64_t index) override {
- mNode->onInputBufferDone(index);
+ Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+ if ((*node) == nullptr) {
+ return;
+ }
+ (*node)->onInputBufferDone(index);
}
void onInputBufferEmptied() override {
- mNode->onInputBufferEmptied();
+ Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+ if ((*node) == nullptr) {
+ return;
+ }
+ (*node)->onInputBufferEmptied();
}
android_dataspace getDataspace() override {
- return mNode->getDataspace();
+ Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+ if ((*node) == nullptr) {
+ return HAL_DATASPACE_UNKNOWN;
+ }
+ return (*node)->getDataspace();
}
uint32_t getPixelFormat() override {
- return mNode->getPixelFormat();
+ Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+ if ((*node) == nullptr) {
+ return PIXEL_FORMAT_UNKNOWN;
+ }
+ return (*node)->getPixelFormat();
}
private:
sp<HGraphicBufferSource> mSource;
- sp<C2OMXNode> mNode;
+ Mutexed<sp<C2OMXNode>> mNode;
sp<hardware::media::omx::V1_0::IOmxNode> mOmxNode;
uint32_t mWidth;
uint32_t mHeight;
@@ -479,33 +505,39 @@
~AGraphicBufferSourceWrapper() override = default;
status_t connect(const std::shared_ptr<Codec2Client::Component> &comp) override {
- mNode = ::ndk::SharedRefBase::make<C2AidlNode>(comp);
- mNode->setFrameSize(mWidth, mHeight);
+ Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+ *node = ::ndk::SharedRefBase::make<C2AidlNode>(comp);
+ (*node)->setFrameSize(mWidth, mHeight);
// Usage is queried during configure(), so setting it beforehand.
uint64_t usage = mConfig.mUsage;
- (void)mNode->setConsumerUsage((int64_t)usage);
+ (void)(*node)->setConsumerUsage((int64_t)usage);
return fromAidlStatus(mSource->configure(
- mNode, static_cast<::aidl::android::hardware::graphics::common::Dataspace>(
+ (*node), static_cast<::aidl::android::hardware::graphics::common::Dataspace>(
mDataSpace)));
}
void disconnect() override {
- if (mNode == nullptr) {
+ Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+ if ((*node) == nullptr) {
return;
}
- std::shared_ptr<IAidlBufferSource> source = mNode->getSource();
+ std::shared_ptr<IAidlBufferSource> source = (*node)->getSource();
if (source == nullptr) {
ALOGD("GBSWrapper::disconnect: node is not configured with OMXBufferSource.");
return;
}
(void)source->onStop();
(void)source->onRelease();
- mNode.reset();
+ node->reset();
}
status_t start() override {
- std::shared_ptr<IAidlBufferSource> source = mNode->getSource();
+ Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+ if ((*node) == nullptr) {
+ return NO_INIT;
+ }
+ std::shared_ptr<IAidlBufferSource> source = (*node)->getSource();
if (source == nullptr) {
return NO_INIT;
}
@@ -513,7 +545,7 @@
size_t numSlots = 16;
IAidlNode::InputBufferParams param;
- status_t err = fromAidlStatus(mNode->getInputBufferParams(¶m));
+ status_t err = fromAidlStatus((*node)->getInputBufferParams(¶m));
if (err == OK) {
numSlots = param.bufferCountActual;
}
@@ -531,6 +563,7 @@
}
status_t configure(Config &config) {
+ Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
std::stringstream status;
status_t err = OK;
@@ -551,14 +584,14 @@
// pts gap
if (config.mMinAdjustedFps > 0 || config.mFixedAdjustedFps > 0) {
- if (mNode != nullptr) {
+ if ((*node) != nullptr) {
float gap = (config.mMinAdjustedFps > 0)
? c2_min(INT32_MAX + 0., 1e6 / config.mMinAdjustedFps + 0.5)
: c2_max(0. - INT32_MAX, -1e6 / config.mFixedAdjustedFps - 0.5);
// float -> uint32_t is undefined if the value is negative.
// First convert to int32_t to ensure the expected behavior.
int32_t gapUs = int32_t(gap);
- (void)mNode->setAdjustTimestampGapUs(gapUs);
+ (void)(*node)->setAdjustTimestampGapUs(gapUs);
}
}
@@ -650,7 +683,7 @@
// priority
if (mConfig.mPriority != config.mPriority) {
if (config.mPriority != INT_MAX) {
- mNode->setPriority(config.mPriority);
+ (*node)->setPriority(config.mPriority);
}
mConfig.mPriority = config.mPriority;
}
@@ -664,24 +697,40 @@
}
void onInputBufferDone(c2_cntr64_t index) override {
- mNode->onInputBufferDone(index);
+ Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+ if ((*node) == nullptr) {
+ return;
+ }
+ (*node)->onInputBufferDone(index);
}
void onInputBufferEmptied() override {
- mNode->onInputBufferEmptied();
+ Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+ if ((*node) == nullptr) {
+ return;
+ }
+ (*node)->onInputBufferEmptied();
}
android_dataspace getDataspace() override {
- return mNode->getDataspace();
+ Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+ if ((*node) == nullptr) {
+ return HAL_DATASPACE_UNKNOWN;
+ }
+ return (*node)->getDataspace();
}
uint32_t getPixelFormat() override {
- return mNode->getPixelFormat();
+ Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+ if ((*node) == nullptr) {
+ return PIXEL_FORMAT_UNKNOWN;
+ }
+ return (*node)->getPixelFormat();
}
private:
std::shared_ptr<AGraphicBufferSource> mSource;
- std::shared_ptr<C2AidlNode> mNode;
+ Mutexed<std::shared_ptr<C2AidlNode>> mNode;
uint32_t mWidth;
uint32_t mHeight;
Config mConfig;
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 7583e6f..d829523 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -228,15 +228,17 @@
status_t CCodecBufferChannel::setInputSurface(
const std::shared_ptr<InputSurfaceWrapper> &surface) {
ALOGV("[%s] setInputSurface", mName);
- mInputSurface = surface;
- return mInputSurface->connect(mComponent);
+ Mutexed<std::shared_ptr<InputSurfaceWrapper>>::Locked inputSurface(mInputSurface);
+ *inputSurface = surface;
+ return (*inputSurface)->connect(mComponent);
}
status_t CCodecBufferChannel::signalEndOfInputStream() {
- if (mInputSurface == nullptr) {
+ Mutexed<std::shared_ptr<InputSurfaceWrapper>>::Locked inputSurface(mInputSurface);
+ if ((*inputSurface) == nullptr) {
return INVALID_OPERATION;
}
- return mInputSurface->signalEndOfInputStream();
+ return (*inputSurface)->signalEndOfInputStream();
}
status_t CCodecBufferChannel::queueInputBufferInternal(
@@ -1069,9 +1071,11 @@
return;
}
}
- if (android::media::codec::provider_->input_surface_throttle()
- && mInputSurface != nullptr) {
- mInputSurface->onInputBufferEmptied();
+ if (android::media::codec::provider_->input_surface_throttle()) {
+ Mutexed<std::shared_ptr<InputSurfaceWrapper>>::Locked inputSurface(mInputSurface);
+ if ((*inputSurface) != nullptr) {
+ (*inputSurface)->onInputBufferEmptied();
+ }
}
size_t numActiveSlots = 0;
while (!mPipelineWatcher.lock()->pipelineFull()) {
@@ -1700,7 +1704,7 @@
&& (hasCryptoOrDescrambler() || conforming)) {
input->buffers.reset(new SlotInputBuffers(mName));
} else if (graphic) {
- if (mInputSurface) {
+ if (mInputSurface.lock()->get()) {
input->buffers.reset(new DummyInputBuffers(mName));
} else if (mMetaMode == MODE_ANW) {
input->buffers.reset(new GraphicMetadataInputBuffers(mName));
@@ -1983,7 +1987,7 @@
status_t CCodecBufferChannel::prepareInitialInputBuffers(
std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers, bool retry) {
- if (mInputSurface) {
+ if (mInputSurface.lock()->get()) {
return OK;
}
@@ -2109,9 +2113,7 @@
void CCodecBufferChannel::reset() {
stop();
- if (mInputSurface != nullptr) {
- mInputSurface.reset();
- }
+ mInputSurface.lock()->reset();
mPipelineWatcher.lock()->flush();
{
Mutexed<Input>::Locked input(mInput);
@@ -2206,7 +2208,7 @@
void CCodecBufferChannel::onInputBufferDone(
uint64_t frameIndex, size_t arrayIndex) {
- if (mInputSurface) {
+ if (mInputSurface.lock()->get()) {
return;
}
std::shared_ptr<C2Buffer> buffer =
@@ -2263,7 +2265,8 @@
notifyClient = false;
}
- if (mInputSurface == nullptr && (work->worklets.size() != 1u
+ bool hasInputSurface = (mInputSurface.lock()->get() != nullptr);
+ if (!hasInputSurface && (work->worklets.size() != 1u
|| !work->worklets.front()
|| !(work->worklets.front()->output.flags &
C2FrameData::FLAG_INCOMPLETE))) {
@@ -2472,7 +2475,7 @@
c2_cntr64_t timestamp =
worklet->output.ordinal.timestamp + work->input.ordinal.customOrdinal
- work->input.ordinal.timestamp;
- if (mInputSurface != nullptr) {
+ if (hasInputSurface) {
// When using input surface we need to restore the original input timestamp.
timestamp = work->input.ordinal.customOrdinal;
}
@@ -2799,7 +2802,7 @@
}
void CCodecBufferChannel::setInfoBuffer(const std::shared_ptr<C2InfoBuffer> &buffer) {
- if (mInputSurface == nullptr) {
+ if (mInputSurface.lock()->get() == nullptr) {
mInfoBuffers.push_back(buffer);
} else {
std::list<std::unique_ptr<C2Work>> items;
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index e62742b..f5e268a 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -391,7 +391,7 @@
};
Mutexed<BlockPools> mBlockPools;
- std::shared_ptr<InputSurfaceWrapper> mInputSurface;
+ Mutexed<std::shared_ptr<InputSurfaceWrapper>> mInputSurface;
MetaMode mMetaMode;
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 36725ec..a943626 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -1896,7 +1896,8 @@
std::vector<C2QpOffsetRectStruct> c2QpOffsetRects;
char mutableStrQpOffsetRects[strlen(qpOffsetRects.c_str()) + 1];
strcpy(mutableStrQpOffsetRects, qpOffsetRects.c_str());
- char* box = strtok(mutableStrQpOffsetRects, ";");
+ char* savePtr;
+ char* box = strtok_r(mutableStrQpOffsetRects, ";", &savePtr);
while (box != nullptr) {
int top, left, bottom, right, offset;
if (sscanf(box, "%d,%d-%d,%d=%d", &top, &left, &bottom, &right, &offset) == 5) {
@@ -1914,7 +1915,7 @@
} else {
ALOGE("Rects configuration %s doesn't follow the string pattern.", box);
}
- box = strtok(nullptr, ";");
+ box = strtok_r(nullptr, ";", &savePtr);
}
if (c2QpOffsetRects.size() != 0) {
const std::unique_ptr<C2StreamQpOffsetRects::output> regions =
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 2550dcf..164a1e0 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -43,6 +43,7 @@
#include <C2Debug.h>
#include "Codec2Buffer.h"
+#include "Codec2BufferUtils.h"
namespace android {
@@ -215,482 +216,6 @@
mBufferRef.reset();
}
-// GraphicView2MediaImageConverter
-
-namespace {
-
-class GraphicView2MediaImageConverter {
-public:
- /**
- * Creates a C2GraphicView <=> MediaImage converter
- *
- * \param view C2GraphicView object
- * \param format buffer format
- * \param copy whether the converter is used for copy or not
- */
- GraphicView2MediaImageConverter(
- const C2GraphicView &view, const sp<AMessage> &format, bool copy)
- : mInitCheck(NO_INIT),
- mView(view),
- mWidth(view.width()),
- mHeight(view.height()),
- mAllocatedDepth(0),
- mBackBufferSize(0),
- mMediaImage(new ABuffer(sizeof(MediaImage2))) {
- ATRACE_CALL();
- if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
- mClientColorFormat = COLOR_FormatYUV420Flexible;
- }
- if (!format->findInt32("android._color-format", &mComponentColorFormat)) {
- mComponentColorFormat = COLOR_FormatYUV420Flexible;
- }
- if (view.error() != C2_OK) {
- ALOGD("Converter: view.error() = %d", view.error());
- mInitCheck = BAD_VALUE;
- return;
- }
- MediaImage2 *mediaImage = (MediaImage2 *)mMediaImage->base();
- const C2PlanarLayout &layout = view.layout();
- if (layout.numPlanes == 0) {
- ALOGD("Converter: 0 planes");
- mInitCheck = BAD_VALUE;
- return;
- }
- memset(mediaImage, 0, sizeof(*mediaImage));
- mAllocatedDepth = layout.planes[0].allocatedDepth;
- uint32_t bitDepth = layout.planes[0].bitDepth;
-
- // align width and height to support subsampling cleanly
- uint32_t stride = align(view.crop().width, 2) * divUp(layout.planes[0].allocatedDepth, 8u);
- uint32_t vStride = align(view.crop().height, 2);
-
- bool tryWrapping = !copy;
-
- switch (layout.type) {
- case C2PlanarLayout::TYPE_YUV: {
- mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
- if (layout.numPlanes != 3) {
- ALOGD("Converter: %d planes for YUV layout", layout.numPlanes);
- mInitCheck = BAD_VALUE;
- return;
- }
- std::optional<int> clientBitDepth = {};
- switch (mClientColorFormat) {
- case COLOR_FormatYUVP010:
- clientBitDepth = 10;
- break;
- case COLOR_FormatYUV411PackedPlanar:
- case COLOR_FormatYUV411Planar:
- case COLOR_FormatYUV420Flexible:
- case COLOR_FormatYUV420PackedPlanar:
- case COLOR_FormatYUV420PackedSemiPlanar:
- case COLOR_FormatYUV420Planar:
- case COLOR_FormatYUV420SemiPlanar:
- case COLOR_FormatYUV422Flexible:
- case COLOR_FormatYUV422PackedPlanar:
- case COLOR_FormatYUV422PackedSemiPlanar:
- case COLOR_FormatYUV422Planar:
- case COLOR_FormatYUV422SemiPlanar:
- case COLOR_FormatYUV444Flexible:
- case COLOR_FormatYUV444Interleaved:
- clientBitDepth = 8;
- break;
- default:
- // no-op; used with optional
- break;
-
- }
- // conversion fails if client bit-depth and the component bit-depth differs
- if ((clientBitDepth) && (bitDepth != clientBitDepth.value())) {
- ALOGD("Bit depth of client: %d and component: %d differs",
- *clientBitDepth, bitDepth);
- mInitCheck = BAD_VALUE;
- return;
- }
- C2PlaneInfo yPlane = layout.planes[C2PlanarLayout::PLANE_Y];
- C2PlaneInfo uPlane = layout.planes[C2PlanarLayout::PLANE_U];
- C2PlaneInfo vPlane = layout.planes[C2PlanarLayout::PLANE_V];
- if (yPlane.channel != C2PlaneInfo::CHANNEL_Y
- || uPlane.channel != C2PlaneInfo::CHANNEL_CB
- || vPlane.channel != C2PlaneInfo::CHANNEL_CR) {
- ALOGD("Converter: not YUV layout");
- mInitCheck = BAD_VALUE;
- return;
- }
- bool yuv420888 = yPlane.rowSampling == 1 && yPlane.colSampling == 1
- && uPlane.rowSampling == 2 && uPlane.colSampling == 2
- && vPlane.rowSampling == 2 && vPlane.colSampling == 2;
- if (yuv420888) {
- for (uint32_t i = 0; i < 3; ++i) {
- const C2PlaneInfo &plane = layout.planes[i];
- if (plane.allocatedDepth != 8 || plane.bitDepth != 8) {
- yuv420888 = false;
- break;
- }
- }
- yuv420888 = yuv420888 && yPlane.colInc == 1 && uPlane.rowInc == vPlane.rowInc;
- }
- int32_t copyFormat = mClientColorFormat;
- if (yuv420888 && mClientColorFormat == COLOR_FormatYUV420Flexible) {
- if (uPlane.colInc == 2 && vPlane.colInc == 2
- && yPlane.rowInc == uPlane.rowInc) {
- copyFormat = COLOR_FormatYUV420PackedSemiPlanar;
- } else if (uPlane.colInc == 1 && vPlane.colInc == 1
- && yPlane.rowInc == uPlane.rowInc * 2) {
- copyFormat = COLOR_FormatYUV420PackedPlanar;
- }
- }
- ALOGV("client_fmt=0x%x y:{colInc=%d rowInc=%d} u:{colInc=%d rowInc=%d} "
- "v:{colInc=%d rowInc=%d}",
- mClientColorFormat,
- yPlane.colInc, yPlane.rowInc,
- uPlane.colInc, uPlane.rowInc,
- vPlane.colInc, vPlane.rowInc);
- switch (copyFormat) {
- case COLOR_FormatYUV420Flexible:
- case COLOR_FormatYUV420Planar:
- case COLOR_FormatYUV420PackedPlanar:
- mediaImage->mPlane[mediaImage->Y].mOffset = 0;
- mediaImage->mPlane[mediaImage->Y].mColInc = 1;
- mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
- mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
- mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
-
- mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
- mediaImage->mPlane[mediaImage->U].mColInc = 1;
- mediaImage->mPlane[mediaImage->U].mRowInc = stride / 2;
- mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
- mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
-
- mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride * 5 / 4;
- mediaImage->mPlane[mediaImage->V].mColInc = 1;
- mediaImage->mPlane[mediaImage->V].mRowInc = stride / 2;
- mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
- mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
-
- if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
- tryWrapping = yuv420888 && uPlane.colInc == 1 && vPlane.colInc == 1
- && yPlane.rowInc == uPlane.rowInc * 2
- && view.data()[0] < view.data()[1]
- && view.data()[1] < view.data()[2];
- }
- break;
-
- case COLOR_FormatYUV420SemiPlanar:
- case COLOR_FormatYUV420PackedSemiPlanar:
- mediaImage->mPlane[mediaImage->Y].mOffset = 0;
- mediaImage->mPlane[mediaImage->Y].mColInc = 1;
- mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
- mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
- mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
-
- mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
- mediaImage->mPlane[mediaImage->U].mColInc = 2;
- mediaImage->mPlane[mediaImage->U].mRowInc = stride;
- mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
- mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
-
- mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 1;
- mediaImage->mPlane[mediaImage->V].mColInc = 2;
- mediaImage->mPlane[mediaImage->V].mRowInc = stride;
- mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
- mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
-
- if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
- tryWrapping = yuv420888 && uPlane.colInc == 2 && vPlane.colInc == 2
- && yPlane.rowInc == uPlane.rowInc
- && view.data()[0] < view.data()[1]
- && view.data()[1] < view.data()[2];
- }
- break;
-
- case COLOR_FormatYUVP010:
- // stride is in bytes
- mediaImage->mPlane[mediaImage->Y].mOffset = 0;
- mediaImage->mPlane[mediaImage->Y].mColInc = 2;
- mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
- mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
- mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
-
- mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
- mediaImage->mPlane[mediaImage->U].mColInc = 4;
- mediaImage->mPlane[mediaImage->U].mRowInc = stride;
- mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
- mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
-
- mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 2;
- mediaImage->mPlane[mediaImage->V].mColInc = 4;
- mediaImage->mPlane[mediaImage->V].mRowInc = stride;
- mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
- mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
- if (tryWrapping) {
- tryWrapping = yPlane.allocatedDepth == 16
- && uPlane.allocatedDepth == 16
- && vPlane.allocatedDepth == 16
- && yPlane.bitDepth == 10
- && uPlane.bitDepth == 10
- && vPlane.bitDepth == 10
- && yPlane.rightShift == 6
- && uPlane.rightShift == 6
- && vPlane.rightShift == 6
- && yPlane.rowSampling == 1 && yPlane.colSampling == 1
- && uPlane.rowSampling == 2 && uPlane.colSampling == 2
- && vPlane.rowSampling == 2 && vPlane.colSampling == 2
- && yPlane.colInc == 2
- && uPlane.colInc == 4
- && vPlane.colInc == 4
- && yPlane.rowInc == uPlane.rowInc
- && yPlane.rowInc == vPlane.rowInc;
- }
- break;
-
- default: {
- // default to fully planar format --- this will be overridden if wrapping
- // TODO: keep interleaved format
- int32_t colInc = divUp(mAllocatedDepth, 8u);
- int32_t rowInc = stride * colInc / yPlane.colSampling;
- mediaImage->mPlane[mediaImage->Y].mOffset = 0;
- mediaImage->mPlane[mediaImage->Y].mColInc = colInc;
- mediaImage->mPlane[mediaImage->Y].mRowInc = rowInc;
- mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = yPlane.colSampling;
- mediaImage->mPlane[mediaImage->Y].mVertSubsampling = yPlane.rowSampling;
- int32_t offset = rowInc * vStride / yPlane.rowSampling;
-
- rowInc = stride * colInc / uPlane.colSampling;
- mediaImage->mPlane[mediaImage->U].mOffset = offset;
- mediaImage->mPlane[mediaImage->U].mColInc = colInc;
- mediaImage->mPlane[mediaImage->U].mRowInc = rowInc;
- mediaImage->mPlane[mediaImage->U].mHorizSubsampling = uPlane.colSampling;
- mediaImage->mPlane[mediaImage->U].mVertSubsampling = uPlane.rowSampling;
- offset += rowInc * vStride / uPlane.rowSampling;
-
- rowInc = stride * colInc / vPlane.colSampling;
- mediaImage->mPlane[mediaImage->V].mOffset = offset;
- mediaImage->mPlane[mediaImage->V].mColInc = colInc;
- mediaImage->mPlane[mediaImage->V].mRowInc = rowInc;
- mediaImage->mPlane[mediaImage->V].mHorizSubsampling = vPlane.colSampling;
- mediaImage->mPlane[mediaImage->V].mVertSubsampling = vPlane.rowSampling;
- break;
- }
- }
- break;
- }
-
- case C2PlanarLayout::TYPE_YUVA:
- ALOGD("Converter: unrecognized color format "
- "(client %d component %d) for YUVA layout",
- mClientColorFormat, mComponentColorFormat);
- mInitCheck = NO_INIT;
- return;
- case C2PlanarLayout::TYPE_RGB:
- mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGB;
- // TODO: support MediaImage layout
- switch (mClientColorFormat) {
- case COLOR_FormatSurface:
- case COLOR_FormatRGBFlexible:
- case COLOR_Format24bitBGR888:
- case COLOR_Format24bitRGB888:
- ALOGD("Converter: accept color format "
- "(client %d component %d) for RGB layout",
- mClientColorFormat, mComponentColorFormat);
- break;
- default:
- ALOGD("Converter: unrecognized color format "
- "(client %d component %d) for RGB layout",
- mClientColorFormat, mComponentColorFormat);
- mInitCheck = BAD_VALUE;
- return;
- }
- if (layout.numPlanes != 3) {
- ALOGD("Converter: %d planes for RGB layout", layout.numPlanes);
- mInitCheck = BAD_VALUE;
- return;
- }
- break;
- case C2PlanarLayout::TYPE_RGBA:
- mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGBA;
- // TODO: support MediaImage layout
- switch (mClientColorFormat) {
- case COLOR_FormatSurface:
- case COLOR_FormatRGBAFlexible:
- case COLOR_Format32bitABGR8888:
- case COLOR_Format32bitARGB8888:
- case COLOR_Format32bitBGRA8888:
- ALOGD("Converter: accept color format "
- "(client %d component %d) for RGBA layout",
- mClientColorFormat, mComponentColorFormat);
- break;
- default:
- ALOGD("Converter: unrecognized color format "
- "(client %d component %d) for RGBA layout",
- mClientColorFormat, mComponentColorFormat);
- mInitCheck = BAD_VALUE;
- return;
- }
- if (layout.numPlanes != 4) {
- ALOGD("Converter: %d planes for RGBA layout", layout.numPlanes);
- mInitCheck = BAD_VALUE;
- return;
- }
- break;
- default:
- mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
- if (layout.numPlanes == 1) {
- const C2PlaneInfo &plane = layout.planes[0];
- if (plane.colInc < 0 || plane.rowInc < 0) {
- // Copy-only if we have negative colInc/rowInc
- tryWrapping = false;
- }
- mediaImage->mPlane[0].mOffset = 0;
- mediaImage->mPlane[0].mColInc = std::abs(plane.colInc);
- mediaImage->mPlane[0].mRowInc = std::abs(plane.rowInc);
- mediaImage->mPlane[0].mHorizSubsampling = plane.colSampling;
- mediaImage->mPlane[0].mVertSubsampling = plane.rowSampling;
- } else {
- ALOGD("Converter: unrecognized layout: color format (client %d component %d)",
- mClientColorFormat, mComponentColorFormat);
- mInitCheck = NO_INIT;
- return;
- }
- break;
- }
- if (tryWrapping) {
- // try to map directly. check if the planes are near one another
- const uint8_t *minPtr = mView.data()[0];
- const uint8_t *maxPtr = mView.data()[0];
- int32_t planeSize = 0;
- for (uint32_t i = 0; i < layout.numPlanes; ++i) {
- const C2PlaneInfo &plane = layout.planes[i];
- int64_t planeStride = std::abs(plane.rowInc / plane.colInc);
- ssize_t minOffset = plane.minOffset(
- mWidth / plane.colSampling, mHeight / plane.rowSampling);
- ssize_t maxOffset = plane.maxOffset(
- mWidth / plane.colSampling, mHeight / plane.rowSampling);
- if (minPtr > mView.data()[i] + minOffset) {
- minPtr = mView.data()[i] + minOffset;
- }
- if (maxPtr < mView.data()[i] + maxOffset) {
- maxPtr = mView.data()[i] + maxOffset;
- }
- planeSize += planeStride * divUp(mAllocatedDepth, 8u)
- * align(mHeight, 64) / plane.rowSampling;
- }
-
- if (minPtr == mView.data()[0] && (maxPtr - minPtr) <= planeSize) {
- // FIXME: this is risky as reading/writing data out of bound results
- // in an undefined behavior, but gralloc does assume a
- // contiguous mapping
- for (uint32_t i = 0; i < layout.numPlanes; ++i) {
- const C2PlaneInfo &plane = layout.planes[i];
- mediaImage->mPlane[i].mOffset = mView.data()[i] - minPtr;
- mediaImage->mPlane[i].mColInc = plane.colInc;
- mediaImage->mPlane[i].mRowInc = plane.rowInc;
- mediaImage->mPlane[i].mHorizSubsampling = plane.colSampling;
- mediaImage->mPlane[i].mVertSubsampling = plane.rowSampling;
- }
- mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr), maxPtr - minPtr);
- ALOGV("Converter: wrapped (capacity=%zu)", mWrapped->capacity());
- }
- }
- mediaImage->mNumPlanes = layout.numPlanes;
- mediaImage->mWidth = view.crop().width;
- mediaImage->mHeight = view.crop().height;
- mediaImage->mBitDepth = bitDepth;
- mediaImage->mBitDepthAllocated = mAllocatedDepth;
-
- uint32_t bufferSize = 0;
- for (uint32_t i = 0; i < layout.numPlanes; ++i) {
- const C2PlaneInfo &plane = layout.planes[i];
- if (plane.allocatedDepth < plane.bitDepth
- || plane.rightShift != plane.allocatedDepth - plane.bitDepth) {
- ALOGD("rightShift value of %u unsupported", plane.rightShift);
- mInitCheck = BAD_VALUE;
- return;
- }
- if (plane.allocatedDepth > 8 && plane.endianness != C2PlaneInfo::NATIVE) {
- ALOGD("endianness value of %u unsupported", plane.endianness);
- mInitCheck = BAD_VALUE;
- return;
- }
- if (plane.allocatedDepth != mAllocatedDepth || plane.bitDepth != bitDepth) {
- ALOGD("different allocatedDepth/bitDepth per plane unsupported");
- mInitCheck = BAD_VALUE;
- return;
- }
- // stride is in bytes
- bufferSize += stride * vStride / plane.rowSampling / plane.colSampling;
- }
-
- mBackBufferSize = bufferSize;
- mInitCheck = OK;
- }
-
- status_t initCheck() const { return mInitCheck; }
-
- uint32_t backBufferSize() const { return mBackBufferSize; }
-
- /**
- * Wrap C2GraphicView using a MediaImage2. Note that if not wrapped, the content is not mapped
- * in this function --- the caller should use CopyGraphicView2MediaImage() function to copy the
- * data into a backing buffer explicitly.
- *
- * \return media buffer. This is null if wrapping failed.
- */
- sp<ABuffer> wrap() const {
- if (mBackBuffer == nullptr) {
- return mWrapped;
- }
- return nullptr;
- }
-
- bool setBackBuffer(const sp<ABuffer> &backBuffer) {
- if (backBuffer == nullptr) {
- return false;
- }
- if (backBuffer->capacity() < mBackBufferSize) {
- return false;
- }
- backBuffer->setRange(0, mBackBufferSize);
- mBackBuffer = backBuffer;
- return true;
- }
-
- /**
- * Copy C2GraphicView to MediaImage2.
- */
- status_t copyToMediaImage() {
- ATRACE_CALL();
- if (mInitCheck != OK) {
- return mInitCheck;
- }
- return ImageCopy(mBackBuffer->base(), getMediaImage(), mView);
- }
-
- const sp<ABuffer> &imageData() const { return mMediaImage; }
-
-private:
- status_t mInitCheck;
-
- const C2GraphicView mView;
- uint32_t mWidth;
- uint32_t mHeight;
- int32_t mClientColorFormat; ///< SDK color format for MediaImage
- int32_t mComponentColorFormat; ///< SDK color format from component
- sp<ABuffer> mWrapped; ///< wrapped buffer (if we can map C2Buffer to an ABuffer)
- uint32_t mAllocatedDepth;
- uint32_t mBackBufferSize;
- sp<ABuffer> mMediaImage;
- std::function<sp<ABuffer>(size_t)> mAlloc;
-
- sp<ABuffer> mBackBuffer; ///< backing buffer if we have to copy C2Buffer <=> ABuffer
-
- MediaImage2 *getMediaImage() {
- return (MediaImage2 *)mMediaImage->base();
- }
-};
-
-} // namespace
-
// GraphicBlockBuffer
// static
diff --git a/media/codec2/sfplugin/Codec2Buffer.h b/media/codec2/sfplugin/Codec2Buffer.h
index 5e96921..8c5e909 100644
--- a/media/codec2/sfplugin/Codec2Buffer.h
+++ b/media/codec2/sfplugin/Codec2Buffer.h
@@ -44,28 +44,6 @@
} // namespace drm
} // namespace hardware
-/**
- * Copies a graphic view into a media image.
- *
- * \param imgBase base of MediaImage
- * \param img MediaImage data
- * \param view graphic view
- *
- * \return OK on success
- */
-status_t ImageCopy(uint8_t *imgBase, const MediaImage2 *img, const C2GraphicView &view);
-
-/**
- * Copies a media image into a graphic view.
- *
- * \param view graphic view
- * \param imgBase base of MediaImage
- * \param img MediaImage data
- *
- * \return OK on success
- */
-status_t ImageCopy(C2GraphicView &view, const uint8_t *imgBase, const MediaImage2 *img);
-
class Codec2Buffer : public MediaCodecBuffer {
public:
using MediaCodecBuffer::MediaCodecBuffer;
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 75e9bbc..574f1b9 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -27,7 +27,10 @@
#include <android/hardware_buffer.h>
#include <media/hardware/HardwareAPI.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/MediaCodecConstants.h>
#include <C2Debug.h>
@@ -787,4 +790,438 @@
return MemoryBlockPool().fetch(size);
}
+GraphicView2MediaImageConverter::GraphicView2MediaImageConverter(
+ const C2GraphicView &view, const sp<AMessage> &format, bool copy)
+ : mInitCheck(NO_INIT),
+ mView(view),
+ mWidth(view.width()),
+ mHeight(view.height()),
+ mAllocatedDepth(0),
+ mBackBufferSize(0),
+ mMediaImage(new ABuffer(sizeof(MediaImage2))) {
+ ATRACE_CALL();
+ if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
+ mClientColorFormat = COLOR_FormatYUV420Flexible;
+ }
+ if (!format->findInt32("android._color-format", &mComponentColorFormat)) {
+ mComponentColorFormat = COLOR_FormatYUV420Flexible;
+ }
+ if (view.error() != C2_OK) {
+ ALOGD("Converter: view.error() = %d", view.error());
+ mInitCheck = BAD_VALUE;
+ return;
+ }
+ MediaImage2 *mediaImage = (MediaImage2 *)mMediaImage->base();
+ const C2PlanarLayout &layout = view.layout();
+ if (layout.numPlanes == 0) {
+ ALOGD("Converter: 0 planes");
+ mInitCheck = BAD_VALUE;
+ return;
+ }
+ memset(mediaImage, 0, sizeof(*mediaImage));
+ mAllocatedDepth = layout.planes[0].allocatedDepth;
+ uint32_t bitDepth = layout.planes[0].bitDepth;
+
+ // align width and height to support subsampling cleanly
+ uint32_t stride = align(view.crop().width, 2) * divUp(layout.planes[0].allocatedDepth, 8u);
+ uint32_t vStride = align(view.crop().height, 2);
+
+ bool tryWrapping = !copy;
+
+ switch (layout.type) {
+ case C2PlanarLayout::TYPE_YUV: {
+ mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
+ if (layout.numPlanes != 3) {
+ ALOGD("Converter: %d planes for YUV layout", layout.numPlanes);
+ mInitCheck = BAD_VALUE;
+ return;
+ }
+ std::optional<int> clientBitDepth = {};
+ switch (mClientColorFormat) {
+ case COLOR_FormatYUVP010:
+ clientBitDepth = 10;
+ break;
+ case COLOR_FormatYUV411PackedPlanar:
+ case COLOR_FormatYUV411Planar:
+ case COLOR_FormatYUV420Flexible:
+ case COLOR_FormatYUV420PackedPlanar:
+ case COLOR_FormatYUV420PackedSemiPlanar:
+ case COLOR_FormatYUV420Planar:
+ case COLOR_FormatYUV420SemiPlanar:
+ case COLOR_FormatYUV422Flexible:
+ case COLOR_FormatYUV422PackedPlanar:
+ case COLOR_FormatYUV422PackedSemiPlanar:
+ case COLOR_FormatYUV422Planar:
+ case COLOR_FormatYUV422SemiPlanar:
+ case COLOR_FormatYUV444Flexible:
+ case COLOR_FormatYUV444Interleaved:
+ clientBitDepth = 8;
+ break;
+ default:
+ // no-op; used with optional
+ break;
+
+ }
+ // conversion fails if client bit-depth and the component bit-depth differs
+ if ((clientBitDepth) && (bitDepth != clientBitDepth.value())) {
+ ALOGD("Bit depth of client: %d and component: %d differs",
+ *clientBitDepth, bitDepth);
+ mInitCheck = BAD_VALUE;
+ return;
+ }
+ C2PlaneInfo yPlane = layout.planes[C2PlanarLayout::PLANE_Y];
+ C2PlaneInfo uPlane = layout.planes[C2PlanarLayout::PLANE_U];
+ C2PlaneInfo vPlane = layout.planes[C2PlanarLayout::PLANE_V];
+ if (yPlane.channel != C2PlaneInfo::CHANNEL_Y
+ || uPlane.channel != C2PlaneInfo::CHANNEL_CB
+ || vPlane.channel != C2PlaneInfo::CHANNEL_CR) {
+ ALOGD("Converter: not YUV layout");
+ mInitCheck = BAD_VALUE;
+ return;
+ }
+ bool yuv420888 = yPlane.rowSampling == 1 && yPlane.colSampling == 1
+ && uPlane.rowSampling == 2 && uPlane.colSampling == 2
+ && vPlane.rowSampling == 2 && vPlane.colSampling == 2;
+ if (yuv420888) {
+ for (uint32_t i = 0; i < 3; ++i) {
+ const C2PlaneInfo &plane = layout.planes[i];
+ if (plane.allocatedDepth != 8 || plane.bitDepth != 8) {
+ yuv420888 = false;
+ break;
+ }
+ }
+ yuv420888 = yuv420888 && yPlane.colInc == 1 && uPlane.rowInc == vPlane.rowInc;
+ }
+ int32_t copyFormat = mClientColorFormat;
+ if (yuv420888 && mClientColorFormat == COLOR_FormatYUV420Flexible) {
+ if (uPlane.colInc == 2 && vPlane.colInc == 2
+ && yPlane.rowInc == uPlane.rowInc) {
+ copyFormat = COLOR_FormatYUV420PackedSemiPlanar;
+ } else if (uPlane.colInc == 1 && vPlane.colInc == 1
+ && yPlane.rowInc == uPlane.rowInc * 2) {
+ copyFormat = COLOR_FormatYUV420PackedPlanar;
+ }
+ }
+ ALOGV("client_fmt=0x%x y:{colInc=%d rowInc=%d} u:{colInc=%d rowInc=%d} "
+ "v:{colInc=%d rowInc=%d}",
+ mClientColorFormat,
+ yPlane.colInc, yPlane.rowInc,
+ uPlane.colInc, uPlane.rowInc,
+ vPlane.colInc, vPlane.rowInc);
+ switch (copyFormat) {
+ case COLOR_FormatYUV420Flexible:
+ case COLOR_FormatYUV420Planar:
+ case COLOR_FormatYUV420PackedPlanar:
+ mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+ mediaImage->mPlane[mediaImage->Y].mColInc = 1;
+ mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
+ mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
+ mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
+
+ mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
+ mediaImage->mPlane[mediaImage->U].mColInc = 1;
+ mediaImage->mPlane[mediaImage->U].mRowInc = stride / 2;
+ mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
+ mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
+
+ mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride * 5 / 4;
+ mediaImage->mPlane[mediaImage->V].mColInc = 1;
+ mediaImage->mPlane[mediaImage->V].mRowInc = stride / 2;
+ mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
+ mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
+
+ if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
+ tryWrapping = yuv420888 && uPlane.colInc == 1 && vPlane.colInc == 1
+ && yPlane.rowInc == uPlane.rowInc * 2
+ && view.data()[0] < view.data()[1]
+ && view.data()[1] < view.data()[2];
+ }
+ break;
+
+ case COLOR_FormatYUV420SemiPlanar:
+ case COLOR_FormatYUV420PackedSemiPlanar:
+ mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+ mediaImage->mPlane[mediaImage->Y].mColInc = 1;
+ mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
+ mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
+ mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
+
+ mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
+ mediaImage->mPlane[mediaImage->U].mColInc = 2;
+ mediaImage->mPlane[mediaImage->U].mRowInc = stride;
+ mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
+ mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
+
+ mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 1;
+ mediaImage->mPlane[mediaImage->V].mColInc = 2;
+ mediaImage->mPlane[mediaImage->V].mRowInc = stride;
+ mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
+ mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
+
+ if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
+ tryWrapping = yuv420888 && uPlane.colInc == 2 && vPlane.colInc == 2
+ && yPlane.rowInc == uPlane.rowInc
+ && view.data()[0] < view.data()[1]
+ && view.data()[1] < view.data()[2];
+ }
+ break;
+
+ case COLOR_FormatYUVP010:
+ // stride is in bytes
+ mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+ mediaImage->mPlane[mediaImage->Y].mColInc = 2;
+ mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
+ mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
+ mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
+
+ mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
+ mediaImage->mPlane[mediaImage->U].mColInc = 4;
+ mediaImage->mPlane[mediaImage->U].mRowInc = stride;
+ mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
+ mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
+
+ mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 2;
+ mediaImage->mPlane[mediaImage->V].mColInc = 4;
+ mediaImage->mPlane[mediaImage->V].mRowInc = stride;
+ mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
+ mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
+ if (tryWrapping) {
+ tryWrapping = yPlane.allocatedDepth == 16
+ && uPlane.allocatedDepth == 16
+ && vPlane.allocatedDepth == 16
+ && yPlane.bitDepth == 10
+ && uPlane.bitDepth == 10
+ && vPlane.bitDepth == 10
+ && yPlane.rightShift == 6
+ && uPlane.rightShift == 6
+ && vPlane.rightShift == 6
+ && yPlane.rowSampling == 1 && yPlane.colSampling == 1
+ && uPlane.rowSampling == 2 && uPlane.colSampling == 2
+ && vPlane.rowSampling == 2 && vPlane.colSampling == 2
+ && yPlane.colInc == 2
+ && uPlane.colInc == 4
+ && vPlane.colInc == 4
+ && yPlane.rowInc == uPlane.rowInc
+ && yPlane.rowInc == vPlane.rowInc;
+ }
+ break;
+
+ default: {
+ // default to fully planar format --- this will be overridden if wrapping
+ // TODO: keep interleaved format
+ int32_t colInc = divUp(mAllocatedDepth, 8u);
+ int32_t rowInc = stride * colInc / yPlane.colSampling;
+ mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+ mediaImage->mPlane[mediaImage->Y].mColInc = colInc;
+ mediaImage->mPlane[mediaImage->Y].mRowInc = rowInc;
+ mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = yPlane.colSampling;
+ mediaImage->mPlane[mediaImage->Y].mVertSubsampling = yPlane.rowSampling;
+ int32_t offset = rowInc * vStride / yPlane.rowSampling;
+
+ rowInc = stride * colInc / uPlane.colSampling;
+ mediaImage->mPlane[mediaImage->U].mOffset = offset;
+ mediaImage->mPlane[mediaImage->U].mColInc = colInc;
+ mediaImage->mPlane[mediaImage->U].mRowInc = rowInc;
+ mediaImage->mPlane[mediaImage->U].mHorizSubsampling = uPlane.colSampling;
+ mediaImage->mPlane[mediaImage->U].mVertSubsampling = uPlane.rowSampling;
+ offset += rowInc * vStride / uPlane.rowSampling;
+
+ rowInc = stride * colInc / vPlane.colSampling;
+ mediaImage->mPlane[mediaImage->V].mOffset = offset;
+ mediaImage->mPlane[mediaImage->V].mColInc = colInc;
+ mediaImage->mPlane[mediaImage->V].mRowInc = rowInc;
+ mediaImage->mPlane[mediaImage->V].mHorizSubsampling = vPlane.colSampling;
+ mediaImage->mPlane[mediaImage->V].mVertSubsampling = vPlane.rowSampling;
+ break;
+ }
+ }
+ break;
+ }
+
+ case C2PlanarLayout::TYPE_YUVA:
+ ALOGD("Converter: unrecognized color format "
+ "(client %d component %d) for YUVA layout",
+ mClientColorFormat, mComponentColorFormat);
+ mInitCheck = NO_INIT;
+ return;
+ case C2PlanarLayout::TYPE_RGB:
+ mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGB;
+ // TODO: support MediaImage layout
+ switch (mClientColorFormat) {
+ case COLOR_FormatSurface:
+ case COLOR_FormatRGBFlexible:
+ case COLOR_Format24bitBGR888:
+ case COLOR_Format24bitRGB888:
+ ALOGD("Converter: accept color format "
+ "(client %d component %d) for RGB layout",
+ mClientColorFormat, mComponentColorFormat);
+ break;
+ default:
+ ALOGD("Converter: unrecognized color format "
+ "(client %d component %d) for RGB layout",
+ mClientColorFormat, mComponentColorFormat);
+ mInitCheck = BAD_VALUE;
+ return;
+ }
+ if (layout.numPlanes != 3) {
+ ALOGD("Converter: %d planes for RGB layout", layout.numPlanes);
+ mInitCheck = BAD_VALUE;
+ return;
+ }
+ break;
+ case C2PlanarLayout::TYPE_RGBA:
+ mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGBA;
+ // TODO: support MediaImage layout
+ switch (mClientColorFormat) {
+ case COLOR_FormatSurface:
+ case COLOR_FormatRGBAFlexible:
+ case COLOR_Format32bitABGR8888:
+ case COLOR_Format32bitARGB8888:
+ case COLOR_Format32bitBGRA8888:
+ ALOGD("Converter: accept color format "
+ "(client %d component %d) for RGBA layout",
+ mClientColorFormat, mComponentColorFormat);
+ break;
+ default:
+ ALOGD("Converter: unrecognized color format "
+ "(client %d component %d) for RGBA layout",
+ mClientColorFormat, mComponentColorFormat);
+ mInitCheck = BAD_VALUE;
+ return;
+ }
+ if (layout.numPlanes != 4) {
+ ALOGD("Converter: %d planes for RGBA layout", layout.numPlanes);
+ mInitCheck = BAD_VALUE;
+ return;
+ }
+ break;
+ default:
+ mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
+ if (layout.numPlanes == 1) {
+ const C2PlaneInfo &plane = layout.planes[0];
+ if (plane.colInc < 0 || plane.rowInc < 0) {
+ // Copy-only if we have negative colInc/rowInc
+ tryWrapping = false;
+ }
+ mediaImage->mPlane[0].mOffset = 0;
+ mediaImage->mPlane[0].mColInc = std::abs(plane.colInc);
+ mediaImage->mPlane[0].mRowInc = std::abs(plane.rowInc);
+ mediaImage->mPlane[0].mHorizSubsampling = plane.colSampling;
+ mediaImage->mPlane[0].mVertSubsampling = plane.rowSampling;
+ } else {
+ ALOGD("Converter: unrecognized layout: color format (client %d component %d)",
+ mClientColorFormat, mComponentColorFormat);
+ mInitCheck = NO_INIT;
+ return;
+ }
+ break;
+ }
+ if (tryWrapping) {
+ // try to map directly. check if the planes are near one another
+ const uint8_t *minPtr = mView.data()[0];
+ const uint8_t *maxPtr = mView.data()[0];
+ int32_t planeSize = 0;
+ for (uint32_t i = 0; i < layout.numPlanes; ++i) {
+ const C2PlaneInfo &plane = layout.planes[i];
+ int64_t planeStride = std::abs(plane.rowInc / plane.colInc);
+ ssize_t minOffset = plane.minOffset(
+ mWidth / plane.colSampling, mHeight / plane.rowSampling);
+ ssize_t maxOffset = plane.maxOffset(
+ mWidth / plane.colSampling, mHeight / plane.rowSampling);
+ if (minPtr > mView.data()[i] + minOffset) {
+ minPtr = mView.data()[i] + minOffset;
+ }
+ if (maxPtr < mView.data()[i] + maxOffset) {
+ maxPtr = mView.data()[i] + maxOffset;
+ }
+ planeSize += planeStride * divUp(mAllocatedDepth, 8u)
+ * align(mHeight, 64) / plane.rowSampling;
+ }
+
+ if (minPtr == mView.data()[0] && (maxPtr - minPtr) <= planeSize) {
+ // FIXME: this is risky as reading/writing data out of bound results
+ // in an undefined behavior, but gralloc does assume a
+ // contiguous mapping
+ for (uint32_t i = 0; i < layout.numPlanes; ++i) {
+ const C2PlaneInfo &plane = layout.planes[i];
+ mediaImage->mPlane[i].mOffset = mView.data()[i] - minPtr;
+ mediaImage->mPlane[i].mColInc = plane.colInc;
+ mediaImage->mPlane[i].mRowInc = plane.rowInc;
+ mediaImage->mPlane[i].mHorizSubsampling = plane.colSampling;
+ mediaImage->mPlane[i].mVertSubsampling = plane.rowSampling;
+ }
+ mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr), maxPtr - minPtr);
+ ALOGV("Converter: wrapped (capacity=%zu)", mWrapped->capacity());
+ }
+ }
+ mediaImage->mNumPlanes = layout.numPlanes;
+ mediaImage->mWidth = view.crop().width;
+ mediaImage->mHeight = view.crop().height;
+ mediaImage->mBitDepth = bitDepth;
+ mediaImage->mBitDepthAllocated = mAllocatedDepth;
+
+ uint32_t bufferSize = 0;
+ for (uint32_t i = 0; i < layout.numPlanes; ++i) {
+ const C2PlaneInfo &plane = layout.planes[i];
+ if (plane.allocatedDepth < plane.bitDepth
+ || plane.rightShift != plane.allocatedDepth - plane.bitDepth) {
+ ALOGD("rightShift value of %u unsupported", plane.rightShift);
+ mInitCheck = BAD_VALUE;
+ return;
+ }
+ if (plane.allocatedDepth > 8 && plane.endianness != C2PlaneInfo::NATIVE) {
+ ALOGD("endianness value of %u unsupported", plane.endianness);
+ mInitCheck = BAD_VALUE;
+ return;
+ }
+ if (plane.allocatedDepth != mAllocatedDepth || plane.bitDepth != bitDepth) {
+ ALOGD("different allocatedDepth/bitDepth per plane unsupported");
+ mInitCheck = BAD_VALUE;
+ return;
+ }
+ // stride is in bytes
+ bufferSize += stride * vStride / plane.rowSampling / plane.colSampling;
+ }
+
+ mBackBufferSize = bufferSize;
+ mInitCheck = OK;
+}
+
+status_t GraphicView2MediaImageConverter::initCheck() const { return mInitCheck; }
+
+uint32_t GraphicView2MediaImageConverter::backBufferSize() const { return mBackBufferSize; }
+
+sp<ABuffer> GraphicView2MediaImageConverter::wrap() const {
+ if (mBackBuffer == nullptr) {
+ return mWrapped;
+ }
+ return nullptr;
+}
+
+bool GraphicView2MediaImageConverter::setBackBuffer(const sp<ABuffer> &backBuffer) {
+ if (backBuffer == nullptr) {
+ return false;
+ }
+ if (backBuffer->capacity() < mBackBufferSize) {
+ return false;
+ }
+ backBuffer->setRange(0, mBackBufferSize);
+ mBackBuffer = backBuffer;
+ return true;
+}
+
+status_t GraphicView2MediaImageConverter::copyToMediaImage() {
+ ATRACE_CALL();
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+ return ImageCopy(mBackBuffer->base(), getMediaImage(), mView);
+}
+
+const sp<ABuffer> &GraphicView2MediaImageConverter::imageData() const { return mMediaImage; }
+
+MediaImage2 *GraphicView2MediaImageConverter::getMediaImage() {
+ return (MediaImage2 *)mMediaImage->base();
+}
+
} // namespace android
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.h b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
index 6b0ba7f..8daf3d8 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
@@ -22,6 +22,7 @@
#include <C2ParamDef.h>
#include <media/hardware/VideoAPI.h>
+#include <utils/StrongPointer.h>
#include <utils/Errors.h>
namespace android {
@@ -194,6 +195,61 @@
std::shared_ptr<Impl> mImpl;
};
+struct ABuffer;
+struct AMessage;
+
+class GraphicView2MediaImageConverter {
+public:
+ /**
+ * Creates a C2GraphicView <=> MediaImage converter
+ *
+ * \param view C2GraphicView object
+ * \param format buffer format
+ * \param copy whether the converter is used for copy or not
+ */
+ GraphicView2MediaImageConverter(
+ const C2GraphicView &view, const sp<AMessage> &format, bool copy);
+
+ status_t initCheck() const;
+
+ uint32_t backBufferSize() const;
+
+ /**
+ * Wrap C2GraphicView using a MediaImage2. Note that if not wrapped, the content is not mapped
+ * in this function --- the caller should use CopyGraphicView2MediaImage() function to copy the
+ * data into a backing buffer explicitly.
+ *
+ * \return media buffer. This is null if wrapping failed.
+ */
+ sp<ABuffer> wrap() const;
+
+ bool setBackBuffer(const sp<ABuffer> &backBuffer);
+
+ /**
+ * Copy C2GraphicView to MediaImage2.
+ */
+ status_t copyToMediaImage();
+
+ const sp<ABuffer> &imageData() const;
+
+private:
+ status_t mInitCheck;
+
+ const C2GraphicView mView;
+ uint32_t mWidth;
+ uint32_t mHeight;
+ int32_t mClientColorFormat; ///< SDK color format for MediaImage
+ int32_t mComponentColorFormat; ///< SDK color format from component
+ sp<ABuffer> mWrapped; ///< wrapped buffer (if we can map C2Buffer to an ABuffer)
+ uint32_t mAllocatedDepth;
+ uint32_t mBackBufferSize;
+ sp<ABuffer> mMediaImage;
+
+ sp<ABuffer> mBackBuffer; ///< backing buffer if we have to copy C2Buffer <=> ABuffer
+
+ MediaImage2 *getMediaImage();
+};
+
} // namespace android
#endif // CODEC2_BUFFER_UTILS_H_
diff --git a/media/codec2/vndk/C2Buffer.cpp b/media/codec2/vndk/C2Buffer.cpp
index 7b9b80d..bff953d 100644
--- a/media/codec2/vndk/C2Buffer.cpp
+++ b/media/codec2/vndk/C2Buffer.cpp
@@ -1311,8 +1311,7 @@
for (size_t planeIx = 0; planeIx < mLayout.numPlanes; ++planeIx) {
const uint32_t colSampling = mLayout.planes[planeIx].colSampling;
const uint32_t rowSampling = mLayout.planes[planeIx].rowSampling;
- if (crop.left % colSampling || crop.right() % colSampling
- || crop.top % rowSampling || crop.bottom() % rowSampling) {
+ if (crop.left % colSampling || crop.top % rowSampling) {
// cannot calculate data pointer
mImpl->getAllocation()->unmap(mData, crop, nullptr);
memset(&mLayout, 0, sizeof(mLayout));
diff --git a/media/libaaudio/Android.bp b/media/libaaudio/Android.bp
index 4b417a7..add28e0 100644
--- a/media/libaaudio/Android.bp
+++ b/media/libaaudio/Android.bp
@@ -36,9 +36,6 @@
symbol_file: "src/libaaudio.map.txt",
first_version: "26",
unversioned_until: "current",
- export_header_libs: [
- "libAAudio_headers",
- ],
}
cc_library_headers {
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 7af6eb1..ef24d8f 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -79,6 +79,17 @@
return NO_ERROR;
}
+status_t AudioRecord::logIfErrorAndReturnStatus(status_t status, const std::string& errorMessage,
+ const std::string& func) {
+ if (status != NO_ERROR) {
+ if (!func.empty()) mMediaMetrics.markError(status, func.c_str());
+ ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
+ reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
+ }
+ mStatus = status;
+ return mStatus;
+}
+
// ---------------------------------------------------------------------------
void AudioRecord::MediaMetrics::gather(const AudioRecord *record)
@@ -246,13 +257,28 @@
if (pid == -1 || (callingPid != myPid)) {
adjPid = callingPid;
}
- mClientAttributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(adjPid));
-
+ auto clientAttributionSourcePid = legacy2aidl_pid_t_int32_t(adjPid);
+ if (!clientAttributionSourcePid.ok()) {
+ return logIfErrorAndReturnStatus(BAD_VALUE,
+ StringPrintf("%s: received invalid client attribution "
+ "source pid, pid: %d, sessionId: %d",
+ __func__, pid, sessionId),
+ __func__);
+ }
+ mClientAttributionSource.pid = clientAttributionSourcePid.value();
uid_t adjUid = uid;
if (uid == -1 || (callingPid != myPid)) {
adjUid = IPCThreadState::self()->getCallingUid();
}
- mClientAttributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(adjUid));
+ auto clientAttributionSourceUid = legacy2aidl_uid_t_int32_t(adjUid);
+ if (!clientAttributionSourceUid.ok()) {
+ return logIfErrorAndReturnStatus(BAD_VALUE,
+ StringPrintf("%s: received invalid client attribution "
+ "source uid, pid: %d, session id: %d",
+ __func__, pid, sessionId),
+ __func__);
+ }
+ mClientAttributionSource.uid = clientAttributionSourceUid.value();
mTracker.reset(new RecordingActivityTracker());
@@ -261,7 +287,6 @@
mSelectedMicFieldDimension = microphoneFieldDimension;
mMaxSharedAudioHistoryMs = maxSharedAudioHistoryMs;
- std::string errorMessage;
// Copy the state variables early so they are available for error reporting.
if (pAttributes == nullptr) {
mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
@@ -304,38 +329,48 @@
break;
case TRANSFER_CALLBACK:
if (callback == nullptr) {
- errorMessage = StringPrintf(
- "%s: Transfer type TRANSFER_CALLBACK but callback == nullptr", __func__);
- status = BAD_VALUE;
- goto error;
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE,
+ StringPrintf("%s: Transfer type TRANSFER_CALLBACK but callback == nullptr, "
+ "pid: %d, session id: %d",
+ __func__, pid, sessionId),
+ __func__);
}
break;
case TRANSFER_OBTAIN:
case TRANSFER_SYNC:
break;
default:
- errorMessage = StringPrintf("%s: Invalid transfer type %d", __func__, mTransfer);
- status = BAD_VALUE;
- goto error;
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE,
+ StringPrintf("%s: Invalid transfer type %d, pid: %d, session id: %d", __func__,
+ mTransfer, pid, sessionId),
+ __func__);
}
// invariant that mAudioRecord != 0 is true only after set() returns successfully
if (mAudioRecord != 0) {
- errorMessage = StringPrintf("%s: Track already in use", __func__);
- status = INVALID_OPERATION;
- goto error;
+ return logIfErrorAndReturnStatus(
+ INVALID_OPERATION,
+ StringPrintf("%s: Track already in use, pid: %d, session id: %d", __func__, pid,
+ sessionId),
+ __func__);
}
if (!audio_is_valid_format(mFormat)) {
- errorMessage = StringPrintf("%s: Format %#x is not valid", __func__, mFormat);
- status = BAD_VALUE;
- goto error;
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE,
+ StringPrintf("%s: Format %#x is not valid, pid: %d, session id: %d", __func__,
+ mFormat, pid, sessionId),
+ __func__);
}
if (!audio_is_input_channel(mChannelMask)) {
- errorMessage = StringPrintf("%s: Invalid channel mask %#x", __func__, mChannelMask);
- status = BAD_VALUE;
- goto error;
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE,
+ StringPrintf("%s: Invalid channel mask %#x, pid: %d, session id: %d", __func__,
+ mChannelMask, pid, sessionId),
+ __func__);
}
mChannelCount = audio_channel_count_from_in_mask(mChannelMask);
@@ -369,7 +404,8 @@
mAudioRecordThread.clear();
}
// bypass error message to avoid logging twice (createRecord_l logs the error).
- goto exit;
+ mStatus = status;
+ return mStatus;
}
// TODO: add audio hardware input latency here
@@ -385,15 +421,7 @@
mFramesRead = 0;
mFramesReadServerOffset = 0;
-error:
- if (status != NO_ERROR) {
- mMediaMetrics.markError(status, __FUNCTION__);
- ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
- reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
- }
-exit:
- mStatus = status;
- return status;
+ return logIfErrorAndReturnStatus(status, "", __func__);
}
// -------------------------------------------------------------------------
@@ -772,12 +800,10 @@
status_t status;
static const int32_t kMaxCreateAttempts = 3;
int32_t remainingAttempts = kMaxCreateAttempts;
- std::string errorMessage;
if (audioFlinger == 0) {
- errorMessage = StringPrintf("%s(%d): Could not get audioflinger", __func__, mPortId);
- status = NO_INIT;
- goto exit;
+ return logIfErrorAndReturnStatus(
+ NO_INIT, StringPrintf("%s(%d): Could not get audioflinger", __func__, mPortId), "");
}
// mFlags (not mOrigFlags) is modified depending on whether fast request is accepted.
@@ -835,16 +861,34 @@
do {
media::CreateRecordResponse response;
- status = audioFlinger->createRecord(VALUE_OR_FATAL(input.toAidl()), response);
- output = VALUE_OR_FATAL(IAudioFlinger::CreateRecordOutput::fromAidl(response));
+ auto aidlInput = input.toAidl();
+ if (!aidlInput.ok()) {
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE,
+ StringPrintf("%s(%d): Could not create record due to invalid input", __func__,
+ mPortId),
+ "");
+ }
+ status = audioFlinger->createRecord(aidlInput.value(), response);
+
+ auto recordOutput = IAudioFlinger::CreateRecordOutput::fromAidl(response);
+ if (!recordOutput.ok()) {
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE,
+ StringPrintf("%s(%d): Could not create record output due to invalid response",
+ __func__, mPortId),
+ "");
+ }
+ output = recordOutput.value();
if (status == NO_ERROR) {
break;
}
if (status != FAILED_TRANSACTION || --remainingAttempts <= 0) {
- errorMessage = StringPrintf(
- "%s(%d): AudioFlinger could not create record track, status: %d",
- __func__, mPortId, status);
- goto exit;
+ return logIfErrorAndReturnStatus(
+ status,
+ StringPrintf("%s(%d): AudioFlinger could not create record track, status: %d",
+ __func__, mPortId, status),
+ "");
}
// FAILED_TRANSACTION happens under very specific conditions causing a state mismatch
// between audio policy manager and audio flinger during the input stream open sequence
@@ -879,9 +923,9 @@
mHalFormat = output.halConfig.format;
if (output.cblk == 0) {
- errorMessage = StringPrintf("%s(%d): Could not get control block", __func__, mPortId);
- status = NO_INIT;
- goto exit;
+ return logIfErrorAndReturnStatus(
+ NO_INIT, StringPrintf("%s(%d): Could not get control block", __func__, mPortId),
+ "");
}
// TODO: Using unsecurePointer() has some associated security pitfalls
// (see declaration for details).
@@ -889,10 +933,9 @@
// issue (e.g. by copying).
iMemPointer = output.cblk ->unsecurePointer();
if (iMemPointer == NULL) {
- errorMessage = StringPrintf(
- "%s(%d): Could not get control block pointer", __func__, mPortId);
- status = NO_INIT;
- goto exit;
+ return logIfErrorAndReturnStatus(
+ NO_INIT,
+ StringPrintf("%s(%d): Could not get control block pointer", __func__, mPortId), "");
}
cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
@@ -909,10 +952,9 @@
// issue (e.g. by copying).
buffers = output.buffers->unsecurePointer();
if (buffers == NULL) {
- errorMessage = StringPrintf(
- "%s(%d): Could not get buffer pointer", __func__, mPortId);
- status = NO_INIT;
- goto exit;
+ return logIfErrorAndReturnStatus(
+ NO_INIT,
+ StringPrintf("%s(%d): Could not get buffer pointer", __func__, mPortId), "");
}
}
@@ -1004,15 +1046,8 @@
.set(AMEDIAMETRICS_PROP_SELECTEDMICFIELDDIRECTION, (double)mSelectedMicFieldDimension)
.record();
-exit:
- if (status != NO_ERROR) {
- ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
- reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
- }
-
- mStatus = status;
// sp<IAudioTrack> track destructor will cause releaseOutput() to be called by AudioFlinger
- return status;
+ return logIfErrorAndReturnStatus(status, "", "");
}
// Report error associated with the event and some configuration details.
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 1a575a7..e85bda7 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -188,6 +188,14 @@
return result.value_or(false);
}
+status_t AudioTrack::logIfErrorAndReturnStatus(status_t status, const std::string& errorMessage) {
+ if (status != NO_ERROR) {
+ ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
+ reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
+ }
+ mStatus = status;
+ return mStatus;
+}
// ---------------------------------------------------------------------------
void AudioTrack::MediaMetrics::gather(const AudioTrack *track)
@@ -365,6 +373,10 @@
mSessionId, IPCThreadState::self()->getCallingPid(), clientPid);
AudioSystem::releaseAudioSessionId(mSessionId, clientPid);
}
+
+ if (mOutput != AUDIO_IO_HANDLE_NONE) {
+ AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
+ }
}
void AudioTrack::stopAndJoinCallbacks() {
@@ -413,9 +425,16 @@
uint32_t channelCount;
pid_t callingPid;
pid_t myPid;
- uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
- pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
- std::string errorMessage;
+ auto uid = aidl2legacy_int32_t_uid_t(attributionSource.uid);
+ auto pid = aidl2legacy_int32_t_pid_t(attributionSource.pid);
+ if (!uid.ok()) {
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE, StringPrintf("%s: received invalid attribution source uid", __func__));
+ }
+ if (!pid.ok()) {
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE, StringPrintf("%s: received invalid attribution source pid", __func__));
+ }
// Note mPortId is not valid until the track is created, so omit mPortId in ALOG for set.
ALOGV("%s(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
"flags %#x, notificationFrames %d, sessionId %d, transferType %d, uid %d, pid %d",
@@ -486,34 +505,33 @@
case TRANSFER_CALLBACK:
case TRANSFER_SYNC_NOTIF_CALLBACK:
if (callback == nullptr || sharedBuffer != 0) {
- errorMessage = StringPrintf(
- "%s: Transfer type %s but callback == nullptr || sharedBuffer != 0",
- convertTransferToText(transferType), __func__);
- status = BAD_VALUE;
- goto error;
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE,
+ StringPrintf(
+ "%s: Transfer type %s but callback == nullptr || sharedBuffer != 0",
+ convertTransferToText(transferType), __func__));
}
break;
case TRANSFER_OBTAIN:
case TRANSFER_SYNC:
if (sharedBuffer != 0) {
- errorMessage = StringPrintf(
- "%s: Transfer type TRANSFER_OBTAIN but sharedBuffer != 0", __func__);
- status = BAD_VALUE;
- goto error;
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE,
+ StringPrintf("%s: Transfer type TRANSFER_OBTAIN but sharedBuffer != 0",
+ __func__));
}
break;
case TRANSFER_SHARED:
if (sharedBuffer == 0) {
- errorMessage = StringPrintf(
- "%s: Transfer type TRANSFER_SHARED but sharedBuffer == 0", __func__);
- status = BAD_VALUE;
- goto error;
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE,
+ StringPrintf("%s: Transfer type TRANSFER_SHARED but sharedBuffer == 0",
+ __func__));
}
break;
default:
- errorMessage = StringPrintf("%s: Invalid transfer type %d", __func__, transferType);
- status = BAD_VALUE;
- goto error;
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE, StringPrintf("%s: Invalid transfer type %d", __func__, transferType));
}
mSharedBuffer = sharedBuffer;
mTransfer = transferType;
@@ -524,9 +542,8 @@
// invariant that mAudioTrack != 0 is true only after set() returns successfully
if (mAudioTrack != 0) {
- errorMessage = StringPrintf("%s: Track already in use", __func__);
- status = INVALID_OPERATION;
- goto error;
+ return logIfErrorAndReturnStatus(INVALID_OPERATION,
+ StringPrintf("%s: Track already in use", __func__));
}
// handle default values first.
@@ -535,9 +552,8 @@
}
if (pAttributes == NULL) {
if (uint32_t(streamType) >= AUDIO_STREAM_PUBLIC_CNT) {
- errorMessage = StringPrintf("%s: Invalid stream type %d", __func__, streamType);
- status = BAD_VALUE;
- goto error;
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE, StringPrintf("%s: Invalid stream type %d", __func__, streamType));
}
mOriginalStreamType = streamType;
} else {
@@ -546,15 +562,13 @@
// validate parameters
if (!audio_is_valid_format(format)) {
- errorMessage = StringPrintf("%s: Invalid format %#x", __func__, format);
- status = BAD_VALUE;
- goto error;
+ return logIfErrorAndReturnStatus(BAD_VALUE,
+ StringPrintf("%s: Invalid format %#x", __func__, format));
}
if (!audio_is_output_channel(channelMask)) {
- errorMessage = StringPrintf("%s: Invalid channel mask %#x", __func__, channelMask);
- status = BAD_VALUE;
- goto error;
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE, StringPrintf("%s: Invalid channel mask %#x", __func__, channelMask));
}
channelCount = audio_channel_count_from_out_mask(channelMask);
mChannelCount = channelCount;
@@ -569,10 +583,9 @@
// sampling rate must be specified for direct outputs
if (sampleRate == 0 && (mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) {
- errorMessage = StringPrintf(
- "%s: sample rate must be specified for direct outputs", __func__);
- status = BAD_VALUE;
- goto error;
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE,
+ StringPrintf("%s: sample rate must be specified for direct outputs", __func__));
}
// 1.0 <= mMaxRequiredSpeed <= AUDIO_TIMESTRETCH_SPEED_MAX
mMaxRequiredSpeed = min(max(maxRequiredSpeed, 1.0f), AUDIO_TIMESTRETCH_SPEED_MAX);
@@ -600,17 +613,16 @@
mNotificationsPerBufferReq = 0;
} else {
if (!(mFlags & AUDIO_OUTPUT_FLAG_FAST)) {
- errorMessage = StringPrintf(
- "%s: notificationFrames=%d not permitted for non-fast track",
- __func__, notificationFrames);
- status = BAD_VALUE;
- goto error;
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE,
+ StringPrintf("%s: notificationFrames=%d not permitted for non-fast track",
+ __func__, notificationFrames));
}
if (frameCount > 0) {
- ALOGE("%s(): notificationFrames=%d not permitted with non-zero frameCount=%zu",
- __func__, notificationFrames, frameCount);
- status = BAD_VALUE;
- goto error;
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE, StringPrintf("%s(): notificationFrames=%d not permitted "
+ "with non-zero frameCount=%zu",
+ __func__, notificationFrames, frameCount));
}
mNotificationFramesReq = 0;
const uint32_t minNotificationsPerBuffer = 1;
@@ -627,12 +639,24 @@
mClientAttributionSource = AttributionSourceState(attributionSource);
callingPid = IPCThreadState::self()->getCallingPid();
myPid = getpid();
- if (uid == -1 || (callingPid != myPid)) {
- mClientAttributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(
- IPCThreadState::self()->getCallingUid()));
+ if (uid.value() == -1 || (callingPid != myPid)) {
+ auto clientAttributionSourceUid =
+ legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid());
+ if (!clientAttributionSourceUid.ok()) {
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE,
+ StringPrintf("%s: received invalid client attribution source uid", __func__));
+ }
+ mClientAttributionSource.uid = clientAttributionSourceUid.value();
}
- if (pid == (pid_t)-1 || (callingPid != myPid)) {
- mClientAttributionSource.pid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(callingPid));
+ if (pid.value() == (pid_t)-1 || (callingPid != myPid)) {
+ auto clientAttributionSourcePid = legacy2aidl_uid_t_int32_t(callingPid);
+ if (!clientAttributionSourcePid.ok()) {
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE,
+ StringPrintf("%s: received invalid client attribution source pid", __func__));
+ }
+ mClientAttributionSource.pid = clientAttributionSourcePid.value();
}
mAuxEffectId = 0;
mCallback = callback;
@@ -655,7 +679,8 @@
mAudioTrackThread.clear();
}
// We do not goto error to prevent double-logging errors.
- goto exit;
+ mStatus = status;
+ return mStatus;
}
mLoopCount = 0;
@@ -670,7 +695,7 @@
mReleased = 0;
mStartNs = 0;
mStartFromZeroUs = 0;
- AudioSystem::acquireAudioSessionId(mSessionId, pid, uid);
+ AudioSystem::acquireAudioSessionId(mSessionId, pid.value(), uid.value());
mSequence = 1;
mObservedSequence = mSequence;
mInUnderrun = false;
@@ -688,15 +713,7 @@
mFramesWrittenAtRestore = -1; // -1 is a unique initializer.
mVolumeHandler = new media::VolumeHandler();
-error:
- if (status != NO_ERROR) {
- ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
- reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
- }
- // fall through
-exit:
- mStatus = status;
- return status;
+ return logIfErrorAndReturnStatus(status, "");
}
@@ -723,8 +740,22 @@
audio_port_handle_t selectedDeviceId)
{
AttributionSourceState attributionSource;
- attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(uid));
- attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(pid));
+ auto attributionSourceUid = legacy2aidl_uid_t_int32_t(uid);
+ if (!attributionSourceUid.ok()) {
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE,
+ StringPrintf("%s: received invalid attribution source uid, uid: %d, session id: %d",
+ __func__, uid, sessionId));
+ }
+ attributionSource.uid = attributionSourceUid.value();
+ auto attributionSourcePid = legacy2aidl_pid_t_int32_t(pid);
+ if (!attributionSourcePid.ok()) {
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE,
+ StringPrintf("%s: received invalid attribution source pid, pid: %d, sessionId: %d",
+ __func__, pid, sessionId));
+ }
+ attributionSource.pid = attributionSourcePid.value();
attributionSource.token = sp<BBinder>::make();
if (callback) {
mLegacyCallbackWrapper = sp<LegacyCallbackWrapper>::make(callback, user);
@@ -1785,15 +1816,11 @@
status_t AudioTrack::createTrack_l()
{
status_t status;
- bool callbackAdded = false;
- std::string errorMessage;
const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
if (audioFlinger == 0) {
- errorMessage = StringPrintf("%s(%d): Could not get audioflinger",
- __func__, mPortId);
- status = DEAD_OBJECT;
- goto exit;
+ return logIfErrorAndReturnStatus(
+ DEAD_OBJECT, StringPrintf("%s(%d): Could not get audioflinger", __func__, mPortId));
}
{
@@ -1861,21 +1888,31 @@
input.audioTrackCallback = mAudioTrackCallback;
media::CreateTrackResponse response;
- status = audioFlinger->createTrack(VALUE_OR_FATAL(input.toAidl()), response);
+ auto aidlInput = input.toAidl();
+ if (!aidlInput.ok()) {
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE, StringPrintf("%s(%d): Could not create track due to invalid input",
+ __func__, mPortId));
+ }
+ status = audioFlinger->createTrack(aidlInput.value(), response);
IAudioFlinger::CreateTrackOutput output{};
if (status == NO_ERROR) {
- output = VALUE_OR_FATAL(IAudioFlinger::CreateTrackOutput::fromAidl(response));
+ auto trackOutput = IAudioFlinger::CreateTrackOutput::fromAidl(response);
+ if (!trackOutput.ok()) {
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE,
+ StringPrintf("%s(%d): Could not create track output due to invalid response",
+ __func__, mPortId));
+ }
+ output = trackOutput.value();
}
if (status != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
- errorMessage = StringPrintf(
- "%s(%d): AudioFlinger could not create track, status: %d output %d",
- __func__, mPortId, status, output.outputId);
- if (status == NO_ERROR) {
- status = INVALID_OPERATION; // device not ready
- }
- goto exit;
+ return logIfErrorAndReturnStatus(
+ status == NO_ERROR ? INVALID_OPERATION : status, // device not ready
+ StringPrintf("%s(%d): AudioFlinger could not create track, status: %d output %d",
+ __func__, mPortId, status, output.outputId));
}
ALOG_ASSERT(output.audioTrack != 0);
@@ -1905,22 +1942,22 @@
// FIXME compare to AudioRecord
std::optional<media::SharedFileRegion> sfr;
output.audioTrack->getCblk(&sfr);
- sp<IMemory> iMem = VALUE_OR_FATAL(aidl2legacy_NullableSharedFileRegion_IMemory(sfr));
- if (iMem == 0) {
- errorMessage = StringPrintf("%s(%d): Could not get control block", __func__, mPortId);
- status = FAILED_TRANSACTION;
- goto exit;
+ auto iMemory = aidl2legacy_NullableSharedFileRegion_IMemory(sfr);
+ if (!iMemory.ok() || iMemory.value() == 0) {
+ return logIfErrorAndReturnStatus(
+ FAILED_TRANSACTION,
+ StringPrintf("%s(%d): Could not get control block", __func__, mPortId));
}
+ sp<IMemory> iMem = iMemory.value();
// TODO: Using unsecurePointer() has some associated security pitfalls
// (see declaration for details).
// Either document why it is safe in this case or address the
// issue (e.g. by copying).
void *iMemPointer = iMem->unsecurePointer();
if (iMemPointer == NULL) {
- errorMessage = StringPrintf(
- "%s(%d): Could not get control block pointer", __func__, mPortId);
- status = FAILED_TRANSACTION;
- goto exit;
+ return logIfErrorAndReturnStatus(
+ FAILED_TRANSACTION,
+ StringPrintf("%s(%d): Could not get control block pointer", __func__, mPortId));
}
// invariant that mAudioTrack != 0 is true only after set() returns successfully
if (mAudioTrack != 0) {
@@ -1955,7 +1992,6 @@
AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
}
AudioSystem::addAudioDeviceCallback(this, output.outputId, output.portId);
- callbackAdded = true;
}
mPortId = output.portId;
@@ -1980,11 +2016,9 @@
// issue (e.g. by copying).
buffers = mSharedBuffer->unsecurePointer();
if (buffers == NULL) {
- errorMessage = StringPrintf(
- "%s(%d): Could not get buffer pointer", __func__, mPortId);
- ALOGE("%s", errorMessage.c_str());
- status = FAILED_TRANSACTION;
- goto exit;
+ return logIfErrorAndReturnStatus(
+ FAILED_TRANSACTION,
+ StringPrintf("%s(%d): Could not get buffer pointer", __func__, mPortId));
}
}
@@ -2081,19 +2115,8 @@
}
-exit:
- if (status != NO_ERROR) {
- if (callbackAdded) {
- // note: mOutput is always valid is callbackAdded is true
- AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
- }
- ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
- reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
- }
- mStatus = status;
-
// sp<IAudioTrack> track destructor will cause releaseOutput() to be called by AudioFlinger
- return status;
+ return logIfErrorAndReturnStatus(status, "");
}
void AudioTrack::reportError(status_t status, const char *event, const char *message) const
@@ -3169,7 +3192,12 @@
media::AudioTimestampInternal ts;
mAudioTrack->getTimestamp(&ts, &status);
if (status == OK) {
- timestamp = VALUE_OR_FATAL(aidl2legacy_AudioTimestampInternal_AudioTimestamp(ts));
+ auto legacyTs = aidl2legacy_AudioTimestampInternal_AudioTimestamp(ts);
+ if (!legacyTs.ok()) {
+ return logIfErrorAndReturnStatus(
+ BAD_VALUE, StringPrintf("%s: received invalid audio timestamp", __func__));
+ }
+ timestamp = legacyTs.value();
}
} else {
// read timestamp from shared memory
diff --git a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
index ddda8bb..73610a8 100644
--- a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
@@ -19,6 +19,7 @@
import android.media.AudioPortFw;
import android.media.audio.common.AudioConfig;
import android.media.audio.common.AudioConfigBase;
+import android.media.audio.common.AudioAttributes;
/**
* {@hide}
@@ -32,4 +33,5 @@
AudioPortFw device;
/** Bitmask, indexed by AudioOutputFlag. */
int flags;
+ AudioAttributes attributes;
}
diff --git a/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-0 b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..c1e1de5
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-1 b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-1
new file mode 100644
index 0000000..8e49acd
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-1
Binary files differ
diff --git a/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-2 b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-2
new file mode 100644
index 0000000..a8ffcae
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-2
Binary files differ
diff --git a/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-3 b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-3
new file mode 100644
index 0000000..7c25f6e
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-3
Binary files differ
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index d4479ef..25d91d3 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -138,6 +138,12 @@
audio_format_t format,
audio_channel_mask_t channelMask);
+ /* Checks for erroneous status, marks error in MediaMetrics, logs the error message.
+ * Updates and returns mStatus.
+ */
+ status_t logIfErrorAndReturnStatus(status_t status, const std::string& errorMessage,
+ const std::string& func);
+
/* How data is transferred from AudioRecord
*/
enum transfer_type {
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 3a001a4..de97863 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -233,8 +233,7 @@
* FIXME This API assumes a route, and so should be deprecated.
*/
- static status_t getMinFrameCount(size_t* frameCount,
- audio_stream_type_t streamType,
+ static status_t getMinFrameCount(size_t* frameCount, audio_stream_type_t streamType,
uint32_t sampleRate);
/* Check if direct playback is possible for the given audio configuration and attributes.
@@ -243,6 +242,11 @@
static bool isDirectOutputSupported(const audio_config_base_t& config,
const audio_attributes_t& attributes);
+ /* Checks for erroneous status, logs the error message.
+ * Updates and returns mStatus.
+ */
+ status_t logIfErrorAndReturnStatus(status_t status, const std::string& errorMessage);
+
/* How data is transferred to AudioTrack
*/
enum transfer_type {
diff --git a/media/libaudiohal/impl/ConversionHelperAidl.h b/media/libaudiohal/impl/ConversionHelperAidl.h
index 0fadd9c..fe00fb2 100644
--- a/media/libaudiohal/impl/ConversionHelperAidl.h
+++ b/media/libaudiohal/impl/ConversionHelperAidl.h
@@ -32,6 +32,28 @@
namespace android {
+/*
+ * Helper macro to add instance name, function name in logs
+ * classes should provide getInstanceName API to use these macros.
+ * print function names along with instance name.
+ *
+ * Usage:
+ * AUGMENT_LOG(D);
+ * AUGMENT_LOG(I, "hello!");
+ * AUGMENT_LOG(W, "value: %d", value);
+ *
+ * AUGMENT_LOG_IF(D, value < 0, "negative");
+ * AUGMENT_LOG_IF(E, value < 0, "bad value: %d", value);
+ */
+
+#define AUGMENT_LOG(level, ...) \
+ ALOG##level("[%s] %s" __VA_OPT__(": " __android_second(0, __VA_ARGS__, "")), \
+ getInstanceName().c_str(), __func__ __VA_OPT__(__android_rest(__VA_ARGS__)))
+
+#define AUGMENT_LOG_IF(level, cond, ...) \
+ ALOG##level##_IF(cond, "[%s] %s" __VA_OPT__(": " __android_second(0, __VA_ARGS__, "")), \
+ getInstanceName().c_str(), __func__ __VA_OPT__(__android_rest(__VA_ARGS__)))
+
class Args {
public:
explicit Args(const Vector<String16>& args)
@@ -49,13 +71,15 @@
class ConversionHelperAidl {
protected:
- ConversionHelperAidl(std::string_view className) : mClassName(className) {}
+ ConversionHelperAidl(std::string_view className, std::string_view instanceName)
+ : mClassName(className), mInstanceName(instanceName) {}
- const std::string& getClassName() const {
- return mClassName;
- }
+ const std::string& getClassName() const { return mClassName; }
+
+ const std::string& getInstanceName() const { return mInstanceName; }
const std::string mClassName;
+ const std::string mInstanceName;
};
// 'action' must accept a value of type 'T' and return 'status_t'.
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 86dd663..36f4f70 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -24,6 +24,7 @@
#include <aidl/android/hardware/audio/core/StreamDescriptor.h>
#include <error/expected_utils.h>
#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionNdk.h>
#include <media/AidlConversionNdkCpp.h>
#include <media/AidlConversionUtil.h>
#include <mediautils/TimeCheck.h>
@@ -31,6 +32,7 @@
#include <Utils.h>
#include <utils/Log.h>
+#include "AidlUtils.h"
#include "DeviceHalAidl.h"
#include "EffectHalAidl.h"
#include "StreamHalAidl.h"
@@ -59,6 +61,8 @@
using aidl::android::hardware::audio::common::getFrameSizeInBytes;
using aidl::android::hardware::audio::common::isBitPositionFlagSet;
using aidl::android::hardware::audio::common::RecordTrackMetadata;
+using aidl::android::hardware::audio::common::PlaybackTrackMetadata;
+using aidl::android::hardware::audio::common::SourceMetadata;
using aidl::android::hardware::audio::core::sounddose::ISoundDose;
using aidl::android::hardware::audio::core::AudioPatch;
using aidl::android::hardware::audio::core::AudioRoute;
@@ -70,6 +74,18 @@
using aidl::android::hardware::audio::core::ModuleDebug;
using aidl::android::hardware::audio::core::VendorParameter;
+#define RETURN_IF_MODULE_NOT_INIT(retVal) \
+ if (mModule == nullptr) { \
+ AUGMENT_LOG(E, "module not initialized"); \
+ return retVal; \
+ }
+
+#define RETURN_IF_TELEPHONY_NOT_INIT(retVal) \
+ if (mTelephony == nullptr) { \
+ AUGMENT_LOG(E, "telephony not initialized"); \
+ return retVal; \
+ }
+
namespace android {
namespace {
@@ -102,15 +118,16 @@
DeviceHalAidl::DeviceHalAidl(const std::string& instance, const std::shared_ptr<IModule>& module,
const std::shared_ptr<IHalAdapterVendorExtension>& vext)
- : ConversionHelperAidl("DeviceHalAidl"),
- mInstance(instance), mModule(module), mVendorExt(vext),
- mTelephony(retrieveSubInterface<ITelephony>(module, &IModule::getTelephony)),
- mBluetooth(retrieveSubInterface<IBluetooth>(module, &IModule::getBluetooth)),
- mBluetoothA2dp(retrieveSubInterface<IBluetoothA2dp>(module, &IModule::getBluetoothA2dp)),
- mBluetoothLe(retrieveSubInterface<IBluetoothLe>(module, &IModule::getBluetoothLe)),
- mSoundDose(retrieveSubInterface<ISoundDose>(module, &IModule::getSoundDose)),
- mMapper(instance, module), mMapperAccessor(mMapper, mLock) {
-}
+ : ConversionHelperAidl("DeviceHalAidl", instance),
+ mModule(module),
+ mVendorExt(vext),
+ mTelephony(retrieveSubInterface<ITelephony>(module, &IModule::getTelephony)),
+ mBluetooth(retrieveSubInterface<IBluetooth>(module, &IModule::getBluetooth)),
+ mBluetoothA2dp(retrieveSubInterface<IBluetoothA2dp>(module, &IModule::getBluetoothA2dp)),
+ mBluetoothLe(retrieveSubInterface<IBluetoothLe>(module, &IModule::getBluetoothLe)),
+ mSoundDose(retrieveSubInterface<ISoundDose>(module, &IModule::getSoundDose)),
+ mMapper(instance, module),
+ mMapperAccessor(mMapper, mLock) {}
status_t DeviceHalAidl::getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) {
std::lock_guard l(mLock);
@@ -123,11 +140,13 @@
}
status_t DeviceHalAidl::getSupportedModes(std::vector<media::audio::common::AudioMode> *modes) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
- if (mTelephony == nullptr) return INVALID_OPERATION;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+ RETURN_IF_TELEPHONY_NOT_INIT(INVALID_OPERATION);
+
if (modes == nullptr) {
+ AUGMENT_LOG(E, "uninitialized modes");
return BAD_VALUE;
}
std::vector<AudioMode> aidlModes;
@@ -145,48 +164,53 @@
}
status_t DeviceHalAidl::initCheck() {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
std::lock_guard l(mLock);
return mMapper.initialize();
}
status_t DeviceHalAidl::setVoiceVolume(float volume) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D, "volume %f", volume);
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
- if (mTelephony == nullptr) return INVALID_OPERATION;
- ITelephony::TelecomConfig inConfig{ .voiceVolume = Float{volume} }, outConfig;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+ RETURN_IF_TELEPHONY_NOT_INIT(INVALID_OPERATION);
+
+ ITelephony::TelecomConfig inConfig{.voiceVolume = Float{volume}}, outConfig;
RETURN_STATUS_IF_ERROR(
statusTFromBinderStatus(mTelephony->setTelecomConfig(inConfig, &outConfig)));
- ALOGW_IF(outConfig.voiceVolume.has_value() && volume != outConfig.voiceVolume.value().value,
- "%s: the resulting voice volume %f is not the same as requested %f",
- __func__, outConfig.voiceVolume.value().value, volume);
+ AUGMENT_LOG_IF(
+ W, outConfig.voiceVolume.has_value() && volume != outConfig.voiceVolume.value().value,
+ "the resulting voice volume %f is not the same as requested %f",
+ outConfig.voiceVolume.value().value, volume);
return OK;
}
status_t DeviceHalAidl::setMasterVolume(float volume) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D, "volume %f", volume);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
return statusTFromBinderStatus(mModule->setMasterVolume(volume));
}
status_t DeviceHalAidl::getMasterVolume(float *volume) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (volume == nullptr) {
+ AUGMENT_LOG(E, "uninitialized volumes");
return BAD_VALUE;
}
return statusTFromBinderStatus(mModule->getMasterVolume(volume));
}
status_t DeviceHalAidl::setMode(audio_mode_t mode) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D, "mode %d", mode);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
AudioMode audioMode = VALUE_OR_FATAL(::aidl::android::legacy2aidl_audio_mode_t_AudioMode(mode));
if (mTelephony != nullptr) {
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mTelephony->switchAudioMode(audioMode)));
@@ -195,90 +219,99 @@
}
status_t DeviceHalAidl::setMicMute(bool state) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D, "mute %d", state);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
return statusTFromBinderStatus(mModule->setMicMute(state));
}
status_t DeviceHalAidl::getMicMute(bool *state) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (state == nullptr) {
+ AUGMENT_LOG(E, "uninitialized mute state");
return BAD_VALUE;
}
return statusTFromBinderStatus(mModule->getMicMute(state));
}
status_t DeviceHalAidl::setMasterMute(bool state) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D, "mute %d", state);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
return statusTFromBinderStatus(mModule->setMasterMute(state));
}
status_t DeviceHalAidl::getMasterMute(bool *state) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (state == nullptr) {
+ AUGMENT_LOG(E, "uninitialized mute state");
return BAD_VALUE;
}
return statusTFromBinderStatus(mModule->getMasterMute(state));
}
status_t DeviceHalAidl::setParameters(const String8& kvPairs) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
AudioParameter parameters(kvPairs);
- ALOGD("%s: parameters: \"%s\"", __func__, parameters.toString().c_str());
+ AUGMENT_LOG(D, "parameters: \"%s\"", parameters.toString().c_str());
if (status_t status = filterAndUpdateBtA2dpParameters(parameters); status != OK) {
- ALOGW("%s: filtering or updating BT A2DP parameters failed: %d", __func__, status);
+ AUGMENT_LOG(W, "filterAndUpdateBtA2dpParameters failed: %d", status);
}
if (status_t status = filterAndUpdateBtHfpParameters(parameters); status != OK) {
- ALOGW("%s: filtering or updating BT HFP parameters failed: %d", __func__, status);
+ AUGMENT_LOG(W, "filterAndUpdateBtHfpParameters failed: %d", status);
}
if (status_t status = filterAndUpdateBtLeParameters(parameters); status != OK) {
- ALOGW("%s: filtering or updating BT LE parameters failed: %d", __func__, status);
+ AUGMENT_LOG(W, "filterAndUpdateBtLeParameters failed: %d", status);
}
if (status_t status = filterAndUpdateBtScoParameters(parameters); status != OK) {
- ALOGW("%s: filtering or updating BT SCO parameters failed: %d", __func__, status);
+ AUGMENT_LOG(W, "filterAndUpdateBtScoParameters failed: %d", status);
}
if (status_t status = filterAndUpdateScreenParameters(parameters); status != OK) {
- ALOGW("%s: filtering or updating screen parameters failed: %d", __func__, status);
+ AUGMENT_LOG(W, "filterAndUpdateScreenParameters failed: %d", status);
}
if (status_t status = filterAndUpdateTelephonyParameters(parameters); status != OK) {
- ALOGW("%s: filtering or updating telephony parameters failed: %d", __func__, status);
+ AUGMENT_LOG(W, "filterAndUpdateTelephonyParameters failed: %d", status);
}
return parseAndSetVendorParameters(mVendorExt, mModule, parameters);
}
status_t DeviceHalAidl::getParameters(const String8& keys, String8 *values) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D, "keys: \"%s\"", keys.c_str());
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (values == nullptr) {
+ AUGMENT_LOG(E, "invalid values");
return BAD_VALUE;
}
AudioParameter parameterKeys(keys), result;
if (status_t status = filterAndRetrieveBtA2dpParameters(parameterKeys, &result); status != OK) {
- ALOGW("%s: filtering or retrieving BT A2DP parameters failed: %d", __func__, status);
+ AUGMENT_LOG(W, "filterAndRetrieveBtA2dpParameters failed: %d", status);
}
if (status_t status = filterAndRetrieveBtLeParameters(parameterKeys, &result); status != OK) {
- ALOGW("%s: filtering or retrieving BT LE parameters failed: %d", __func__, status);
+ AUGMENT_LOG(W, "filterAndRetrieveBtLeParameters failed: %d", status);
}
*values = result.toString();
return parseAndGetVendorParameters(mVendorExt, mModule, parameterKeys, values);
}
status_t DeviceHalAidl::getInputBufferSize(struct audio_config* config, size_t* size) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (config == nullptr || size == nullptr) {
+ AUGMENT_LOG(E, "invalid config or size");
return BAD_VALUE;
}
constexpr bool isInput = true;
@@ -426,11 +459,14 @@
audio_io_handle_t handle, audio_devices_t devices,
audio_output_flags_t flags, struct audio_config* config,
const char* address,
- sp<StreamOutHalInterface>* outStream) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ sp<StreamOutHalInterface>* outStream,
+ const std::vector<playback_track_metadata_v7_t>& sourceMetadata) {
+ AUGMENT_LOG(D, "handle: %d devices %0x flags %0x", handle, devices, flags);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (outStream == nullptr || config == nullptr) {
+ AUGMENT_LOG(E, "invalid outStream or config");
return BAD_VALUE;
}
constexpr bool isInput = false;
@@ -442,9 +478,12 @@
::aidl::android::legacy2aidl_audio_device_AudioDevice(devices, address));
int32_t aidlOutputFlags = VALUE_OR_RETURN_STATUS(
::aidl::android::legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+ SourceMetadata aidlMetadata = VALUE_OR_RETURN_STATUS(
+ ::aidl::android::legacy2aidl_playback_track_metadata_v7_SourceMetadata(sourceMetadata));
AudioIoFlags aidlFlags = AudioIoFlags::make<AudioIoFlags::Tag::output>(aidlOutputFlags);
AudioPortConfig mixPortConfig;
AudioPatch aidlPatch;
+
Hal2AidlMapper::Cleanups cleanups(mMapperAccessor);
{
std::lock_guard l(mLock);
@@ -474,12 +513,13 @@
}
args.bufferSizeFrames = aidlConfig.frameCount;
args.eventCallback = eventCb;
+ args.sourceMetadata = aidlMetadata;
::aidl::android::hardware::audio::core::IModule::OpenOutputStreamReturn ret;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openOutputStream(args, &ret)));
- StreamContextAidl context(ret.desc, isOffload);
+ StreamContextAidl context(ret.desc, isOffload, aidlHandle);
if (!context.isValid()) {
- ALOGE("%s: Failed to created a valid stream context from the descriptor: %s",
- __func__, ret.desc.toString().c_str());
+ AUGMENT_LOG(E, "Failed to created a valid stream context from the descriptor: %s",
+ ret.desc.toString().c_str());
return NO_INIT;
}
auto stream = sp<StreamOutHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
@@ -487,8 +527,11 @@
*outStream = stream;
/* StreamOutHalInterface* */ void* cbCookie = (*outStream).get();
{
- std::lock_guard l(mLock);
+ std::lock_guard l(mCallbacksLock);
mCallbacks.emplace(cbCookie, Callbacks{});
+ }
+ {
+ std::lock_guard l(mLock);
mMapper.addStream(*outStream, mixPortConfig.id, aidlPatch.id);
}
if (streamCb) {
@@ -508,10 +551,11 @@
const char* address, audio_source_t source,
audio_devices_t outputDevice, const char* outputDeviceAddress,
sp<StreamInHalInterface>* inStream) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D, "handle: %d devices %0x flags %0x", handle, devices, flags);
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (inStream == nullptr || config == nullptr) {
+ AUGMENT_LOG(E, "invalid inStream or config");
return BAD_VALUE;
}
constexpr bool isInput = true;
@@ -551,10 +595,10 @@
args.bufferSizeFrames = aidlConfig.frameCount;
::aidl::android::hardware::audio::core::IModule::OpenInputStreamReturn ret;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openInputStream(args, &ret)));
- StreamContextAidl context(ret.desc, false /*isAsynchronous*/);
+ StreamContextAidl context(ret.desc, false /*isAsynchronous*/, aidlHandle);
if (!context.isValid()) {
- ALOGE("%s: Failed to created a valid stream context from the descriptor: %s",
- __func__, ret.desc.toString().c_str());
+ AUGMENT_LOG(E, "Failed to created a valid stream context from the descriptor: %s",
+ ret.desc.toString().c_str());
return NO_INIT;
}
*inStream = sp<StreamInHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
@@ -568,7 +612,10 @@
}
status_t DeviceHalAidl::supportsAudioPatches(bool* supportsPatches) {
+ AUGMENT_LOG(V);
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (supportsPatches == nullptr) {
+ AUGMENT_LOG(E, "uninitialized supportsPatches");
return BAD_VALUE;
}
*supportsPatches = true;
@@ -580,13 +627,20 @@
unsigned int num_sinks,
const struct audio_port_config* sinks,
audio_patch_handle_t* patch) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D, "sources: %d sinks %d", num_sources, num_sinks);
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
- if (num_sinks > AUDIO_PATCH_PORTS_MAX || num_sources > AUDIO_PATCH_PORTS_MAX ||
- sources == nullptr || sinks == nullptr || patch == nullptr) {
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+ if (num_sinks > AUDIO_PATCH_PORTS_MAX || num_sources > AUDIO_PATCH_PORTS_MAX) {
+ AUGMENT_LOG(E, "invalid sources %d or sinks %d ", num_sources, num_sinks);
return BAD_VALUE;
}
+
+ if (sources == nullptr || sinks == nullptr || patch == nullptr) {
+ AUGMENT_LOG(E, "uninitialized sources %d or sinks %d or patches %d", (sources == nullptr),
+ (sinks == nullptr), (patch == nullptr));
+ return BAD_VALUE;
+ }
+
// When the patch handle (*patch) is AUDIO_PATCH_HANDLE_NONE, it means
// the framework wants to create a new patch. The handle has to be generated
// by the HAL. Since handles generated this way can only be unique within
@@ -648,9 +702,10 @@
}
status_t DeviceHalAidl::releaseAudioPatch(audio_patch_handle_t patch) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D, "patch: %d", patch);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
static_assert(AUDIO_PATCH_HANDLE_NONE == 0);
if (patch == AUDIO_PATCH_HANDLE_NONE) {
return BAD_VALUE;
@@ -673,7 +728,10 @@
}
status_t DeviceHalAidl::getAudioPort(struct audio_port* port) {
+ AUGMENT_LOG(V);
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (port == nullptr) {
+ AUGMENT_LOG(E, "port not initialized");
return BAD_VALUE;
}
audio_port_v7 portV7;
@@ -683,10 +741,12 @@
}
status_t DeviceHalAidl::getAudioPort(struct audio_port_v7 *port) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (port == nullptr) {
+ AUGMENT_LOG(E, "port not initialized");
return BAD_VALUE;
}
bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(port->role, port->type)) ==
@@ -694,8 +754,7 @@
auto aidlPort = VALUE_OR_RETURN_STATUS(
::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
if (aidlPort.ext.getTag() != AudioPortExt::device) {
- ALOGE("%s: provided port is not a device port (module %s): %s",
- __func__, mInstance.c_str(), aidlPort.toString().c_str());
+ AUGMENT_LOG(E, "provided port is not a device port %s", aidlPort.toString().c_str());
return BAD_VALUE;
}
const auto& matchDevice = aidlPort.ext.get<AudioPortExt::device>().device;
@@ -714,11 +773,13 @@
status_t DeviceHalAidl::getAudioMixPort(const struct audio_port_v7 *devicePort,
struct audio_port_v7 *mixPort) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
- if (devicePort == nullptr || mixPort == nullptr ||
- devicePort->type != AUDIO_PORT_TYPE_DEVICE || mixPort->type != AUDIO_PORT_TYPE_MIX) {
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+
+ if (devicePort == nullptr || mixPort == nullptr || devicePort->type != AUDIO_PORT_TYPE_DEVICE ||
+ mixPort->type != AUDIO_PORT_TYPE_MIX) {
+ AUGMENT_LOG(E, "invalid device or mix port");
return BAD_VALUE;
}
const int32_t aidlHandle = VALUE_OR_RETURN_STATUS(
@@ -736,10 +797,12 @@
}
status_t DeviceHalAidl::setAudioPortConfig(const struct audio_port_config* config) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (config == nullptr) {
+ AUGMENT_LOG(E, "config not initialized");
return BAD_VALUE;
}
bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
@@ -753,9 +816,10 @@
}
MicrophoneInfoProvider::Info const* DeviceHalAidl::getMicrophoneInfo() {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
+
TIME_CHECK();
- if (!mModule) return {};
+ RETURN_IF_MODULE_NOT_INIT({});
std::lock_guard l(mLock);
if (mMicrophones.status == Microphones::Status::UNKNOWN) {
TIME_CHECK();
@@ -767,7 +831,7 @@
} else if (status == INVALID_OPERATION) {
mMicrophones.status = Microphones::Status::NOT_SUPPORTED;
} else {
- ALOGE("%s: Unexpected status from 'IModule.getMicrophones': %d", __func__, status);
+ AUGMENT_LOG(E, "Unexpected status from HAL: %d", status);
return {};
}
}
@@ -779,10 +843,12 @@
status_t DeviceHalAidl::getMicrophones(
std::vector<audio_microphone_characteristic_t>* microphones) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (microphones == nullptr) {
+ AUGMENT_LOG(E, "microphones not initialized");
return BAD_VALUE;
}
auto staticInfo = getMicrophoneInfo();
@@ -801,10 +867,12 @@
status_t DeviceHalAidl::addDeviceEffect(
const struct audio_port_config *device, sp<EffectHalInterface> effect) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (device == nullptr || effect == nullptr) {
+ AUGMENT_LOG(E, "device or effect not initialized");
return BAD_VALUE;
}
bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
@@ -813,8 +881,8 @@
::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
*device, isInput, 0));
if (requestedPortConfig.ext.getTag() != AudioPortExt::Tag::device) {
- ALOGE("%s: provided port config is not a device port config: %s",
- __func__, requestedPortConfig.toString().c_str());
+ AUGMENT_LOG(E, "provided port config is not a device port config: %s",
+ requestedPortConfig.toString().c_str());
return BAD_VALUE;
}
AudioPortConfig devicePortConfig;
@@ -832,10 +900,11 @@
}
status_t DeviceHalAidl::removeDeviceEffect(
const struct audio_port_config *device, sp<EffectHalInterface> effect) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (device == nullptr || effect == nullptr) {
+ AUGMENT_LOG(E, "device or effect not initialized");
return BAD_VALUE;
}
bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
@@ -844,8 +913,8 @@
::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
*device, isInput, 0));
if (requestedPortConfig.ext.getTag() != AudioPortExt::Tag::device) {
- ALOGE("%s: provided port config is not a device port config: %s",
- __func__, requestedPortConfig.toString().c_str());
+ AUGMENT_LOG(E, "provided port config is not a device port config: %s",
+ requestedPortConfig.toString().c_str());
return BAD_VALUE;
}
AudioPortConfig devicePortConfig;
@@ -863,11 +932,13 @@
status_t DeviceHalAidl::getMmapPolicyInfos(
media::audio::common::AudioMMapPolicyType policyType,
std::vector<media::audio::common::AudioMMapPolicyInfo>* policyInfos) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
- AudioMMapPolicyType mmapPolicyType = VALUE_OR_RETURN_STATUS(
- cpp2ndk_AudioMMapPolicyType(policyType));
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+
+ AudioMMapPolicyType mmapPolicyType =
+ VALUE_OR_RETURN_STATUS(cpp2ndk_AudioMMapPolicyType(policyType));
std::vector<AudioMMapPolicyInfo> mmapPolicyInfos;
@@ -883,9 +954,10 @@
}
int32_t DeviceHalAidl::getAAudioMixerBurstCount() {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
int32_t mixerBurstCount = 0;
if (mModule->getAAudioMixerBurstCount(&mixerBurstCount).isOk()) {
return mixerBurstCount;
@@ -894,9 +966,10 @@
}
int32_t DeviceHalAidl::getAAudioHardwareBurstMinUsec() {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
int32_t hardwareBurstMinUsec = 0;
if (mModule->getAAudioHardwareBurstMinUsec(&hardwareBurstMinUsec).isOk()) {
return hardwareBurstMinUsec;
@@ -905,9 +978,10 @@
}
error::Result<audio_hw_sync_t> DeviceHalAidl::getHwAvSync() {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
int32_t aidlHwAvSync;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->generateHwAvSyncId(&aidlHwAvSync)));
return VALUE_OR_RETURN_STATUS(
@@ -921,55 +995,59 @@
}
status_t DeviceHalAidl::supportsBluetoothVariableLatency(bool* supports) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
+
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (supports == nullptr) {
return BAD_VALUE;
}
return statusTFromBinderStatus(mModule->supportsVariableLatency(supports));
}
-status_t DeviceHalAidl::getSoundDoseInterface(const std::string& module,
- ::ndk::SpAIBinder* soundDoseBinder) {
+status_t DeviceHalAidl::getSoundDoseInterface([[maybe_unused]] const std::string& module,
+ ::ndk::SpAIBinder* soundDoseBinder) {
+ AUGMENT_LOG(V);
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+
if (soundDoseBinder == nullptr) {
return BAD_VALUE;
}
if (mSoundDose == nullptr) {
- ALOGE("%s failed to retrieve the sound dose interface for module %s",
- __func__, module.c_str());
+ AUGMENT_LOG(E, "failed to retrieve the sound dose interface");
return BAD_VALUE;
}
if (mSoundDose == nullptr) {
- ALOGE("%s failed to return the sound dose interface for module %s: not implemented",
- __func__,
- module.c_str());
+ AUGMENT_LOG(E, "failed to return the sound dose interface not implemented");
return NO_INIT;
}
*soundDoseBinder = mSoundDose->asBinder();
- ALOGI("%s using audio AIDL HAL sound dose interface", __func__);
+ AUGMENT_LOG(I, "using audio AIDL HAL sound dose interface");
return OK;
}
status_t DeviceHalAidl::prepareToDisconnectExternalDevice(const struct audio_port_v7* port) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(V);
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (port == nullptr) {
+ AUGMENT_LOG(E, "port not initialized");
return BAD_VALUE;
}
- const bool isInput = VALUE_OR_RETURN_STATUS(
- ::aidl::android::portDirection(port->role, port->type)) ==
- ::aidl::android::AudioPortDirection::INPUT;
+ const bool isInput =
+ VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(port->role, port->type)) ==
+ ::aidl::android::AudioPortDirection::INPUT;
AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
if (aidlPort.ext.getTag() != AudioPortExt::device) {
- ALOGE("%s: provided port is not a device port (module %s): %s",
- __func__, mInstance.c_str(), aidlPort.toString().c_str());
+ AUGMENT_LOG(E, "provided port is not a device port: %s", aidlPort.toString().c_str());
return BAD_VALUE;
}
+
+ AUGMENT_LOG(D, "device %s", aidlPort.toString().c_str());
+
status_t status = NO_ERROR;
{
std::lock_guard l(mLock);
@@ -990,10 +1068,11 @@
}
status_t DeviceHalAidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(V);
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
if (port == nullptr) {
+ AUGMENT_LOG(E, "port not initialized");
return BAD_VALUE;
}
if (!connected) {
@@ -1012,17 +1091,18 @@
AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
if (aidlPort.ext.getTag() != AudioPortExt::device) {
- ALOGE("%s: provided port is not a device port (module %s): %s",
- __func__, mInstance.c_str(), aidlPort.toString().c_str());
+ AUGMENT_LOG(E, "provided port is not a device port: %s", aidlPort.toString().c_str());
return BAD_VALUE;
}
+ AUGMENT_LOG(D, "connected %d port: %s", connected, aidlPort.toString().c_str());
std::lock_guard l(mLock);
return mMapper.setDevicePortConnectedState(aidlPort, connected);
}
status_t DeviceHalAidl::setSimulateDeviceConnections(bool enabled) {
+ AUGMENT_LOG(V);
TIME_CHECK();
- if (mModule == nullptr) return NO_INIT;
+ RETURN_IF_MODULE_NOT_INIT(NO_INIT);
{
std::lock_guard l(mLock);
mMapper.resetUnusedPatchesAndPortConfigs();
@@ -1031,9 +1111,9 @@
status_t status = statusTFromBinderStatus(mModule->setModuleDebug(debug));
// This is important to log as it affects HAL behavior.
if (status == OK) {
- ALOGI("%s: set enabled: %d", __func__, enabled);
+ AUGMENT_LOG(I, "set enabled: %d", enabled);
} else {
- ALOGW("%s: set enabled to %d failed: %d", __func__, enabled, status);
+ AUGMENT_LOG(W, "set enabled to %d failed: %d", enabled, status);
}
return status;
}
@@ -1048,7 +1128,7 @@
mBluetoothA2dp->supportsOffloadReconfiguration(&supports)));
result->addInt(key, supports ? 1 : 0);
} else {
- ALOGI("%s: no IBluetoothA2dp on %s", __func__, mInstance.c_str());
+ AUGMENT_LOG(I, "no IBluetoothA2dp");
result->addInt(key, 0);
}
}
@@ -1065,7 +1145,7 @@
mBluetoothLe->supportsOffloadReconfiguration(&supports)));
result->addInt(key, supports ? 1 : 0);
} else {
- ALOGI("%s: no mBluetoothLe on %s", __func__, mInstance.c_str());
+ AUGMENT_LOG(I, "no mBluetoothLe");
result->addInt(key, 0);
}
}
@@ -1076,29 +1156,29 @@
std::optional<bool> a2dpEnabled;
std::optional<std::vector<VendorParameter>> reconfigureOffload;
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
- parameters, String8(AudioParameter::keyBtA2dpSuspended),
- [&a2dpEnabled](const String8& trueOrFalse) {
- if (trueOrFalse == AudioParameter::valueTrue) {
- a2dpEnabled = false; // 'suspended' == true
- return OK;
- } else if (trueOrFalse == AudioParameter::valueFalse) {
- a2dpEnabled = true; // 'suspended' == false
- return OK;
- }
- ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
- AudioParameter::keyBtA2dpSuspended, trueOrFalse.c_str());
- return BAD_VALUE;
- }));
+ parameters, String8(AudioParameter::keyBtA2dpSuspended),
+ [&a2dpEnabled, this](const String8& trueOrFalse) {
+ if (trueOrFalse == AudioParameter::valueTrue) {
+ a2dpEnabled = false; // 'suspended' == true
+ return OK;
+ } else if (trueOrFalse == AudioParameter::valueFalse) {
+ a2dpEnabled = true; // 'suspended' == false
+ return OK;
+ }
+ AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+ AudioParameter::keyBtA2dpSuspended, trueOrFalse.c_str());
+ return BAD_VALUE;
+ }));
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
- parameters, String8(AudioParameter::keyReconfigA2dp),
- [&](const String8& value) -> status_t {
- std::vector<VendorParameter> result;
- RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
- mVendorExt->parseBluetoothA2dpReconfigureOffload(
- std::string(value.c_str()), &result)));
- reconfigureOffload = std::move(result);
- return OK;
- }));
+ parameters, String8(AudioParameter::keyReconfigA2dp),
+ [&](const String8& value) -> status_t {
+ std::vector<VendorParameter> result;
+ RETURN_STATUS_IF_ERROR(
+ statusTFromBinderStatus(mVendorExt->parseBluetoothA2dpReconfigureOffload(
+ std::string(value.c_str()), &result)));
+ reconfigureOffload = std::move(result);
+ return OK;
+ }));
if (mBluetoothA2dp != nullptr && a2dpEnabled.has_value()) {
return statusTFromBinderStatus(mBluetoothA2dp->setEnabled(a2dpEnabled.value()));
}
@@ -1112,34 +1192,33 @@
status_t DeviceHalAidl::filterAndUpdateBtHfpParameters(AudioParameter ¶meters) {
IBluetooth::HfpConfig hfpConfig;
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
- parameters, String8(AudioParameter::keyBtHfpEnable),
- [&hfpConfig](const String8& trueOrFalse) {
- if (trueOrFalse == AudioParameter::valueTrue) {
- hfpConfig.isEnabled = Boolean{ .value = true };
- return OK;
- } else if (trueOrFalse == AudioParameter::valueFalse) {
- hfpConfig.isEnabled = Boolean{ .value = false };
- return OK;
- }
- ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
- AudioParameter::keyBtHfpEnable, trueOrFalse.c_str());
- return BAD_VALUE;
- }));
+ parameters, String8(AudioParameter::keyBtHfpEnable),
+ [&hfpConfig, this](const String8& trueOrFalse) {
+ if (trueOrFalse == AudioParameter::valueTrue) {
+ hfpConfig.isEnabled = Boolean{.value = true};
+ return OK;
+ } else if (trueOrFalse == AudioParameter::valueFalse) {
+ hfpConfig.isEnabled = Boolean{.value = false};
+ return OK;
+ }
+ AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+ AudioParameter::keyBtHfpEnable, trueOrFalse.c_str());
+ return BAD_VALUE;
+ }));
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
- parameters, String8(AudioParameter::keyBtHfpSamplingRate),
- [&hfpConfig](int sampleRate) {
- return sampleRate > 0 ?
- hfpConfig.sampleRate = Int{ .value = sampleRate }, OK : BAD_VALUE;
- }));
+ parameters, String8(AudioParameter::keyBtHfpSamplingRate),
+ [&hfpConfig](int sampleRate) {
+ return sampleRate > 0 ? hfpConfig.sampleRate = Int{.value = sampleRate},
+ OK : BAD_VALUE;
+ }));
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
- parameters, String8(AudioParameter::keyBtHfpVolume),
- [&hfpConfig](int volume0to15) {
- if (volume0to15 >= 0 && volume0to15 <= 15) {
- hfpConfig.volume = Float{ .value = volume0to15 / 15.0f };
- return OK;
- }
- return BAD_VALUE;
- }));
+ parameters, String8(AudioParameter::keyBtHfpVolume), [&hfpConfig](int volume0to15) {
+ if (volume0to15 >= 0 && volume0to15 <= 15) {
+ hfpConfig.volume = Float{.value = volume0to15 / 15.0f};
+ return OK;
+ }
+ return BAD_VALUE;
+ }));
if (mBluetooth != nullptr && hfpConfig != IBluetooth::HfpConfig{}) {
IBluetooth::HfpConfig newHfpConfig;
return statusTFromBinderStatus(mBluetooth->setHfpConfig(hfpConfig, &newHfpConfig));
@@ -1151,39 +1230,39 @@
std::optional<bool> leEnabled;
std::optional<std::vector<VendorParameter>> reconfigureOffload;
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
- parameters, String8(AudioParameter::keyBtLeSuspended),
- [&leEnabled](const String8& trueOrFalse) {
- if (trueOrFalse == AudioParameter::valueTrue) {
- leEnabled = false; // 'suspended' == true
- return OK;
- } else if (trueOrFalse == AudioParameter::valueFalse) {
- leEnabled = true; // 'suspended' == false
- return OK;
- }
- ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
- AudioParameter::keyBtLeSuspended, trueOrFalse.c_str());
- return BAD_VALUE;
- }));
+ parameters, String8(AudioParameter::keyBtLeSuspended),
+ [&leEnabled, this](const String8& trueOrFalse) {
+ if (trueOrFalse == AudioParameter::valueTrue) {
+ leEnabled = false; // 'suspended' == true
+ return OK;
+ } else if (trueOrFalse == AudioParameter::valueFalse) {
+ leEnabled = true; // 'suspended' == false
+ return OK;
+ }
+ AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+ AudioParameter::keyBtLeSuspended, trueOrFalse.c_str());
+ return BAD_VALUE;
+ }));
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
- parameters, String8(AudioParameter::keyReconfigLe),
- [&](const String8& value) -> status_t {
- if (mVendorExt != nullptr) {
- std::vector<VendorParameter> result;
- RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
- mVendorExt->parseBluetoothLeReconfigureOffload(
- std::string(value.c_str()), &result)));
- reconfigureOffload = std::move(result);
- } else {
- reconfigureOffload = std::vector<VendorParameter>();
- }
- return OK;
- }));
+ parameters, String8(AudioParameter::keyReconfigLe),
+ [&](const String8& value) -> status_t {
+ if (mVendorExt != nullptr) {
+ std::vector<VendorParameter> result;
+ RETURN_STATUS_IF_ERROR(
+ statusTFromBinderStatus(mVendorExt->parseBluetoothLeReconfigureOffload(
+ std::string(value.c_str()), &result)));
+ reconfigureOffload = std::move(result);
+ } else {
+ reconfigureOffload = std::vector<VendorParameter>();
+ }
+ return OK;
+ }));
if (mBluetoothLe != nullptr && leEnabled.has_value()) {
return statusTFromBinderStatus(mBluetoothLe->setEnabled(leEnabled.value()));
}
if (mBluetoothLe != nullptr && reconfigureOffload.has_value()) {
- return statusTFromBinderStatus(mBluetoothLe->reconfigureOffload(
- reconfigureOffload.value()));
+ return statusTFromBinderStatus(
+ mBluetoothLe->reconfigureOffload(reconfigureOffload.value()));
}
return OK;
}
@@ -1191,53 +1270,53 @@
status_t DeviceHalAidl::filterAndUpdateBtScoParameters(AudioParameter ¶meters) {
IBluetooth::ScoConfig scoConfig;
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
- parameters, String8(AudioParameter::keyBtSco),
- [&scoConfig](const String8& onOrOff) {
- if (onOrOff == AudioParameter::valueOn) {
- scoConfig.isEnabled = Boolean{ .value = true };
- return OK;
- } else if (onOrOff == AudioParameter::valueOff) {
- scoConfig.isEnabled = Boolean{ .value = false };
- return OK;
- }
- ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
- AudioParameter::keyBtSco, onOrOff.c_str());
- return BAD_VALUE;
- }));
+ parameters, String8(AudioParameter::keyBtSco),
+ [&scoConfig, this](const String8& onOrOff) {
+ if (onOrOff == AudioParameter::valueOn) {
+ scoConfig.isEnabled = Boolean{.value = true};
+ return OK;
+ } else if (onOrOff == AudioParameter::valueOff) {
+ scoConfig.isEnabled = Boolean{.value = false};
+ return OK;
+ }
+ AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+ AudioParameter::keyBtSco, onOrOff.c_str());
+ return BAD_VALUE;
+ }));
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
- parameters, String8(AudioParameter::keyBtScoHeadsetName),
- [&scoConfig](const String8& name) {
- scoConfig.debugName = name;
- return OK;
- }));
+ parameters, String8(AudioParameter::keyBtScoHeadsetName),
+ [&scoConfig](const String8& name) {
+ scoConfig.debugName = name;
+ return OK;
+ }));
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
- parameters, String8(AudioParameter::keyBtNrec),
- [&scoConfig](const String8& onOrOff) {
- if (onOrOff == AudioParameter::valueOn) {
- scoConfig.isNrecEnabled = Boolean{ .value = true };
- return OK;
- } else if (onOrOff == AudioParameter::valueOff) {
- scoConfig.isNrecEnabled = Boolean{ .value = false };
- return OK;
- }
- ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
- AudioParameter::keyBtNrec, onOrOff.c_str());
- return BAD_VALUE;
- }));
+ parameters, String8(AudioParameter::keyBtNrec),
+ [&scoConfig, this](const String8& onOrOff) {
+ if (onOrOff == AudioParameter::valueOn) {
+ scoConfig.isNrecEnabled = Boolean{.value = true};
+ return OK;
+ } else if (onOrOff == AudioParameter::valueOff) {
+ scoConfig.isNrecEnabled = Boolean{.value = false};
+ return OK;
+ }
+ AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+ AudioParameter::keyBtNrec, onOrOff.c_str());
+ return BAD_VALUE;
+ }));
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
- parameters, String8(AudioParameter::keyBtScoWb),
- [&scoConfig](const String8& onOrOff) {
- if (onOrOff == AudioParameter::valueOn) {
- scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO_WB;
- return OK;
- } else if (onOrOff == AudioParameter::valueOff) {
- scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO;
- return OK;
- }
- ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
- AudioParameter::keyBtScoWb, onOrOff.c_str());
- return BAD_VALUE;
- }));
+ parameters, String8(AudioParameter::keyBtScoWb),
+ [&scoConfig, this](const String8& onOrOff) {
+ if (onOrOff == AudioParameter::valueOn) {
+ scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO_WB;
+ return OK;
+ } else if (onOrOff == AudioParameter::valueOff) {
+ scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO;
+ return OK;
+ }
+ AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+ AudioParameter::keyBtScoWb, onOrOff.c_str());
+ return BAD_VALUE;
+ }));
if (mBluetooth != nullptr && scoConfig != IBluetooth::ScoConfig{}) {
IBluetooth::ScoConfig newScoConfig;
return statusTFromBinderStatus(mBluetooth->setScoConfig(scoConfig, &newScoConfig));
@@ -1247,34 +1326,41 @@
status_t DeviceHalAidl::filterAndUpdateScreenParameters(AudioParameter ¶meters) {
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
- parameters, String8(AudioParameter::keyScreenState),
- [&](const String8& onOrOff) -> status_t {
- std::optional<bool> isTurnedOn;
- if (onOrOff == AudioParameter::valueOn) {
- isTurnedOn = true;
- } else if (onOrOff == AudioParameter::valueOff) {
- isTurnedOn = false;
- }
- if (!isTurnedOn.has_value()) {
- ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
- AudioParameter::keyScreenState, onOrOff.c_str());
- return BAD_VALUE;
- }
- return statusTFromBinderStatus(
- mModule->updateScreenState(isTurnedOn.value()));
- }));
+ parameters, String8(AudioParameter::keyScreenState),
+ [&, this](const String8& onOrOff) -> status_t {
+ std::optional<bool> isTurnedOn;
+ if (onOrOff == AudioParameter::valueOn) {
+ isTurnedOn = true;
+ } else if (onOrOff == AudioParameter::valueOff) {
+ isTurnedOn = false;
+ }
+ if (!isTurnedOn.has_value()) {
+ AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+ AudioParameter::keyScreenState, onOrOff.c_str());
+ return BAD_VALUE;
+ }
+ return statusTFromBinderStatus(mModule->updateScreenState(isTurnedOn.value()));
+ }));
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
- parameters, String8(AudioParameter::keyScreenRotation),
- [&](int rotationDegrees) -> status_t {
+ parameters, String8(AudioParameter::keyScreenRotation),
+ [&, this](int rotationDegrees) -> status_t {
IModule::ScreenRotation rotation;
switch (rotationDegrees) {
- case 0: rotation = IModule::ScreenRotation::DEG_0; break;
- case 90: rotation = IModule::ScreenRotation::DEG_90; break;
- case 180: rotation = IModule::ScreenRotation::DEG_180; break;
- case 270: rotation = IModule::ScreenRotation::DEG_270; break;
+ case 0:
+ rotation = IModule::ScreenRotation::DEG_0;
+ break;
+ case 90:
+ rotation = IModule::ScreenRotation::DEG_90;
+ break;
+ case 180:
+ rotation = IModule::ScreenRotation::DEG_180;
+ break;
+ case 270:
+ rotation = IModule::ScreenRotation::DEG_270;
+ break;
default:
- ALOGE("setParameters: parameter key \"%s\" has invalid value %d",
- AudioParameter::keyScreenRotation, rotationDegrees);
+ AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value %d",
+ AudioParameter::keyScreenRotation, rotationDegrees);
return BAD_VALUE;
}
return statusTFromBinderStatus(mModule->updateScreenRotation(rotation));
@@ -1286,49 +1372,48 @@
using TtyMode = ITelephony::TelecomConfig::TtyMode;
ITelephony::TelecomConfig telConfig;
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
- parameters, String8(AudioParameter::keyTtyMode),
- [&telConfig](const String8& mode) {
- if (mode == AudioParameter::valueTtyModeOff) {
- telConfig.ttyMode = TtyMode::OFF;
- return OK;
- } else if (mode == AudioParameter::valueTtyModeFull) {
- telConfig.ttyMode = TtyMode::FULL;
- return OK;
- } else if (mode == AudioParameter::valueTtyModeHco) {
- telConfig.ttyMode = TtyMode::HCO;
- return OK;
- } else if (mode == AudioParameter::valueTtyModeVco) {
- telConfig.ttyMode = TtyMode::VCO;
- return OK;
- }
- ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
- AudioParameter::keyTtyMode, mode.c_str());
- return BAD_VALUE;
- }));
+ parameters, String8(AudioParameter::keyTtyMode),
+ [&telConfig, this](const String8& mode) {
+ if (mode == AudioParameter::valueTtyModeOff) {
+ telConfig.ttyMode = TtyMode::OFF;
+ return OK;
+ } else if (mode == AudioParameter::valueTtyModeFull) {
+ telConfig.ttyMode = TtyMode::FULL;
+ return OK;
+ } else if (mode == AudioParameter::valueTtyModeHco) {
+ telConfig.ttyMode = TtyMode::HCO;
+ return OK;
+ } else if (mode == AudioParameter::valueTtyModeVco) {
+ telConfig.ttyMode = TtyMode::VCO;
+ return OK;
+ }
+ AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+ AudioParameter::keyTtyMode, mode.c_str());
+ return BAD_VALUE;
+ }));
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
- parameters, String8(AudioParameter::keyHacSetting),
- [&telConfig](const String8& onOrOff) {
- if (onOrOff == AudioParameter::valueHacOn) {
- telConfig.isHacEnabled = Boolean{ .value = true };
- return OK;
- } else if (onOrOff == AudioParameter::valueHacOff) {
- telConfig.isHacEnabled = Boolean{ .value = false };
- return OK;
- }
- ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
- AudioParameter::keyHacSetting, onOrOff.c_str());
- return BAD_VALUE;
- }));
+ parameters, String8(AudioParameter::keyHacSetting),
+ [&telConfig, this](const String8& onOrOff) {
+ if (onOrOff == AudioParameter::valueHacOn) {
+ telConfig.isHacEnabled = Boolean{.value = true};
+ return OK;
+ } else if (onOrOff == AudioParameter::valueHacOff) {
+ telConfig.isHacEnabled = Boolean{.value = false};
+ return OK;
+ }
+ AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+ AudioParameter::keyHacSetting, onOrOff.c_str());
+ return BAD_VALUE;
+ }));
if (mTelephony != nullptr && telConfig != ITelephony::TelecomConfig{}) {
ITelephony::TelecomConfig newTelConfig;
- return statusTFromBinderStatus(
- mTelephony->setTelecomConfig(telConfig, &newTelConfig));
+ return statusTFromBinderStatus(mTelephony->setTelecomConfig(telConfig, &newTelConfig));
}
return OK;
}
void DeviceHalAidl::clearCallbacks(void* cookie) {
- std::lock_guard l(mLock);
+ std::lock_guard l(mCallbacksLock);
mCallbacks.erase(cookie);
}
@@ -1361,18 +1446,21 @@
setCallbackImpl(cookie, &Callbacks::latency, cb);
}
-template<class C>
+template <class C>
sp<C> DeviceHalAidl::getCallbackImpl(void* cookie, wp<C> DeviceHalAidl::Callbacks::* field) {
- std::lock_guard l(mLock);
- if (auto it = mCallbacks.find(cookie); it != mCallbacks.end()) {
- return ((it->second).*field).promote();
+ wp<C> result;
+ {
+ std::lock_guard l(mCallbacksLock);
+ if (auto it = mCallbacks.find(cookie); it != mCallbacks.end()) {
+ result = (it->second).*field;
+ }
}
- return nullptr;
+ return result.promote();
}
template<class C>
void DeviceHalAidl::setCallbackImpl(
void* cookie, wp<C> DeviceHalAidl::Callbacks::* field, const sp<C>& cb) {
- std::lock_guard l(mLock);
+ std::lock_guard l(mCallbacksLock);
if (auto it = mCallbacks.find(cookie); it != mCallbacks.end()) {
(it->second).*field = cb;
}
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index d925b46..b41dbe0 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -119,7 +119,9 @@
// by releasing all references to the returned object.
status_t openOutputStream(audio_io_handle_t handle, audio_devices_t devices,
audio_output_flags_t flags, struct audio_config* config,
- const char* address, sp<StreamOutHalInterface>* outStream) override;
+ const char* address, sp<StreamOutHalInterface>* outStream,
+ const std::vector<playback_track_metadata_v7_t>&
+ sourceMetadata = {}) override;
// Creates and opens the audio hardware input stream. The stream is closed
// by releasing all references to the returned object.
@@ -233,7 +235,6 @@
// MicrophoneInfoProvider implementation
MicrophoneInfoProvider::Info const* getMicrophoneInfo() override;
- const std::string mInstance;
const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
const std::shared_ptr<::aidl::android::hardware::audio::core::ITelephony> mTelephony;
@@ -242,8 +243,11 @@
const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothLe> mBluetoothLe;
const std::shared_ptr<::aidl::android::hardware::audio::core::sounddose::ISoundDose> mSoundDose;
+ std::mutex mCallbacksLock;
+ // Use 'mCallbacksLock' only to implement exclusive access to 'mCallbacks'. Never hold it
+ // while making any calls.
+ std::map<void*, Callbacks> mCallbacks GUARDED_BY(mCallbacksLock);
std::mutex mLock;
- std::map<void*, Callbacks> mCallbacks GUARDED_BY(mLock);
std::set<audio_port_handle_t> mDeviceDisconnectionNotified GUARDED_BY(mLock);
Hal2AidlMapper mMapper GUARDED_BY(mLock);
LockedAccessor<Hal2AidlMapper> mMapperAccessor;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index ea4258c..0a262e4 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -259,7 +259,8 @@
audio_output_flags_t flags,
struct audio_config *config,
const char *address,
- sp<StreamOutHalInterface> *outStream) {
+ sp<StreamOutHalInterface> *outStream,
+ const std::vector<playback_track_metadata_v7_t>& sourceMetadata) {
TIME_CHECK();
if (mDevice == 0) return NO_INIT;
DeviceAddress hidlDevice;
@@ -273,6 +274,16 @@
return status;
}
+#if MAJOR_VERSION == 4
+ ::android::hardware::audio::CORE_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
+#else
+ ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
+#endif
+
+ RETURN_STATUS_IF_ERROR(CoreUtils::sourceMetadataFromHalV7(
+ sourceMetadata, true /*ignoreNonVendorTags*/, &hidlMetadata
+ ));
+
#if !(MAJOR_VERSION == 7 && MINOR_VERSION == 1)
//TODO: b/193496180 use spatializer flag at audio HAL when available
if ((flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0) {
@@ -294,7 +305,7 @@
#endif
handle, hidlDevice, hidlConfig, hidlFlags,
#if MAJOR_VERSION >= 4
- {} /* metadata */,
+ hidlMetadata /* metadata */,
#endif
[&](Result r, const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& result,
const AudioConfig& suggestedConfig) {
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 1362dab..5f3e08c 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -73,7 +73,9 @@
// by releasing all references to the returned object.
status_t openOutputStream(audio_io_handle_t handle, audio_devices_t devices,
audio_output_flags_t flags, struct audio_config* config,
- const char* address, sp<StreamOutHalInterface>* outStream) override;
+ const char* address, sp<StreamOutHalInterface>* outStream,
+ const std::vector<playback_track_metadata_v7_t>&
+ sourceMetadata = {}) override;
// Creates and opens the audio hardware input stream. The stream is closed
// by releasing all references to the returned object.
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
index a01ac4b..f352849 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.cpp
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -25,6 +25,7 @@
#include <Utils.h>
#include <utils/Log.h>
+#include "AidlUtils.h"
#include "Hal2AidlMapper.h"
using aidl::android::aidl_utils::statusTFromBinderStatus;
@@ -99,8 +100,7 @@
} // namespace
Hal2AidlMapper::Hal2AidlMapper(const std::string& instance, const std::shared_ptr<IModule>& module)
- : mInstance(instance), mModule(module) {
-}
+ : ConversionHelperAidl("Hal2AidlMapper", instance), mModule(module) {}
void Hal2AidlMapper::addStream(
const sp<StreamHalInterface>& stream, int32_t mixPortConfigId, int32_t patchId) {
@@ -137,9 +137,9 @@
// 'sinks' will not be updated because 'setAudioPatch' only needs IDs. Here we log
// the source arguments, where only the audio configuration and device specifications
// are relevant.
- ALOGD("%s: patch ID: %d, [disregard IDs] sources: %s, sinks: %s",
- __func__, *patchId, ::android::internal::ToString(sources).c_str(),
- ::android::internal::ToString(sinks).c_str());
+ AUGMENT_LOG(D, "patch ID: %d, [disregard IDs] sources: %s, sinks: %s", *patchId,
+ ::android::internal::ToString(sources).c_str(),
+ ::android::internal::ToString(sinks).c_str());
auto fillPortConfigs = [&](
const std::vector<AudioPortConfig>& configs,
const std::set<int32_t>& destinationPortIds,
@@ -152,18 +152,20 @@
// See b/315528763. Despite that the framework knows the actual format of
// the mix port, it still uses the original format. Luckily, there is
// the I/O handle which can be used to find the mix port.
- ALOGI("fillPortConfigs: retrying to find a mix port config with default "
- "configuration");
+ AUGMENT_LOG(I,
+ "fillPortConfigs: retrying to find a mix port config with"
+ " default configuration");
if (auto it = findPortConfig(std::nullopt, s.flags,
s.ext.get<AudioPortExt::mix>().handle);
it != mPortConfigs.end()) {
portConfig = it->second;
} else {
- const std::string flags = s.flags.has_value() ?
- s.flags->toString() : "<unspecified>";
- ALOGE("fillPortConfigs: existing port config for flags %s, handle %d "
- "not found in module %s", flags.c_str(),
- s.ext.get<AudioPortExt::mix>().handle, mInstance.c_str());
+ const std::string flags =
+ s.flags.has_value() ? s.flags->toString() : "<unspecified>";
+ AUGMENT_LOG(E,
+ "fillPortConfigs: existing port config for flags %s, "
+ " handle %d not found",
+ flags.c_str(), s.ext.get<AudioPortExt::mix>().handle);
return BAD_VALUE;
}
} else {
@@ -171,8 +173,8 @@
}
}
LOG_ALWAYS_FATAL_IF(portConfig.id == 0,
- "fillPortConfigs: initial config: %s, port config: %s",
- s.toString().c_str(), portConfig.toString().c_str());
+ "fillPortConfigs: initial config: %s, port config: %s",
+ s.toString().c_str(), portConfig.toString().c_str());
ids->push_back(portConfig.id);
if (portIds != nullptr) {
portIds->insert(portConfig.portId);
@@ -218,8 +220,8 @@
if (!created) {
requestedPatch.id = patch.id;
if (patch != requestedPatch) {
- ALOGI("%s: Updating transient patch. Current: %s, new: %s",
- __func__, patch.toString().c_str(), requestedPatch.toString().c_str());
+ AUGMENT_LOG(I, "Updating transient patch. Current: %s, new: %s",
+ patch.toString().c_str(), requestedPatch.toString().c_str());
// Since matching may be done by mix port only, update the patch if the device port
// config has changed.
patch = requestedPatch;
@@ -252,7 +254,7 @@
int32_t id = result->id;
if (requestedPortConfig.id != 0 && requestedPortConfig.id != id) {
LOG_ALWAYS_FATAL("%s: requested port config id %d changed to %d", __func__,
- requestedPortConfig.id, id);
+ requestedPortConfig.id, id);
}
auto [_, inserted] = mPortConfigs.insert_or_assign(id, *result);
@@ -272,8 +274,8 @@
RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(suggestedOrAppliedPortConfig,
&appliedPortConfig, created));
if (appliedPortConfig.id == 0) {
- ALOGE("%s: module %s did not apply suggested config %s", __func__,
- mInstance.c_str(), suggestedOrAppliedPortConfig.toString().c_str());
+ AUGMENT_LOG(E, "did not apply suggested config %s",
+ suggestedOrAppliedPortConfig.toString().c_str());
return NO_INIT;
}
*result = appliedPortConfig;
@@ -289,7 +291,7 @@
if (mDisconnectedPortReplacement.first == portId) {
const auto& port = mDisconnectedPortReplacement.second;
mPorts.insert(std::make_pair(port.id, port));
- ALOGD("%s: disconnected port replacement: %s", __func__, port.toString().c_str());
+ AUGMENT_LOG(D, "disconnected port replacement: %s", port.toString().c_str());
mDisconnectedPortReplacement = std::pair<int32_t, AudioPort>();
}
updateDynamicMixPorts();
@@ -331,8 +333,7 @@
if (auto portConfigIt = findPortConfig(device); portConfigIt == mPortConfigs.end()) {
auto portsIt = findPort(device);
if (portsIt == mPorts.end()) {
- ALOGE("%s: device port for device %s is not found in the module %s",
- __func__, device.toString().c_str(), mInstance.c_str());
+ AUGMENT_LOG(E, "device port for device %s is not found", device.toString().c_str());
return BAD_VALUE;
}
AudioPortConfig requestedPortConfig;
@@ -385,15 +386,15 @@
matchFlags.set<AudioIoFlags::Tag::input>(matchFlags.get<AudioIoFlags::Tag::input>() &
~makeBitPositionFlagMask(*optionalInputFlagsIt++));
portsIt = findPort(config, matchFlags, destinationPortIds);
- ALOGI("%s: mix port for config %s, flags %s was not found in the module %s, "
- "retried with flags %s", __func__, config.toString().c_str(),
- flags.value().toString().c_str(), mInstance.c_str(),
- matchFlags.toString().c_str());
+ AUGMENT_LOG(I,
+ "mix port for config %s, flags %s was not found"
+ "retried with flags %s",
+ config.toString().c_str(), flags.value().toString().c_str(),
+ matchFlags.toString().c_str());
}
if (portsIt == mPorts.end()) {
- ALOGE("%s: mix port for config %s, flags %s is not found in the module %s",
- __func__, config.toString().c_str(), matchFlags.toString().c_str(),
- mInstance.c_str());
+ AUGMENT_LOG(E, "mix port for config %s, flags %s is not found",
+ config.toString().c_str(), matchFlags.toString().c_str());
return BAD_VALUE;
}
AudioPortConfig requestedPortConfig;
@@ -408,9 +409,10 @@
}
return createOrUpdatePortConfig(requestedPortConfig, portConfig, created);
} else if (portConfigIt == mPortConfigs.end() && !flags.has_value()) {
- ALOGW("%s: mix port config for %s, handle %d not found in the module %s, "
- "and was not created as flags are not specified",
- __func__, config.toString().c_str(), ioHandle, mInstance.c_str());
+ AUGMENT_LOG(W,
+ "mix port config for %s, handle %d not found "
+ "and was not created as flags are not specified",
+ config.toString().c_str(), ioHandle);
return BAD_VALUE;
} else {
AudioPortConfig requestedPortConfig = portConfigIt->second;
@@ -440,8 +442,8 @@
if (const auto& p = requestedPortConfig;
!p.sampleRate.has_value() || !p.channelMask.has_value() ||
!p.format.has_value()) {
- ALOGW("%s: provided mix port config is not fully specified: %s",
- __func__, p.toString().c_str());
+ AUGMENT_LOG(W, "provided mix port config is not fully specified: %s",
+ p.toString().c_str());
return BAD_VALUE;
}
AudioConfig config;
@@ -470,14 +472,13 @@
requestedPortConfig.ext.get<Tag::device>().device, configPtr, gainConfigPtr,
portConfig, created);
} else {
- ALOGD("%s: device port config does not have audio or gain config specified", __func__);
+ AUGMENT_LOG(D, "device port config does not have audio or gain config specified");
return findOrCreateDevicePortConfig(
requestedPortConfig.ext.get<Tag::device>().device, nullptr /*config*/,
nullptr /*gainConfig*/, portConfig, created);
}
}
- ALOGW("%s: unsupported audio port config: %s",
- __func__, requestedPortConfig.toString().c_str());
+ AUGMENT_LOG(W, "unsupported audio port config: %s", requestedPortConfig.toString().c_str());
return BAD_VALUE;
}
@@ -486,8 +487,7 @@
*portConfig = it->second;
return OK;
}
- ALOGE("%s: could not find a device port config for device %s",
- __func__, device.toString().c_str());
+ AUGMENT_LOG(E, "could not find a device port config for device %s", device.toString().c_str());
return BAD_VALUE;
}
@@ -593,9 +593,10 @@
}
optionalFlags |= makeBitPositionFlagMask(*optionalOutputFlagsIt++);
result = std::find_if(mPorts.begin(), mPorts.end(), matcher);
- ALOGI("%s: port for config %s, flags %s was not found in the module %s, "
- "retried with excluding optional flags %#x", __func__, config.toString().c_str(),
- flags.toString().c_str(), mInstance.c_str(), optionalFlags);
+ AUGMENT_LOG(I,
+ "port for config %s, flags %s was not found "
+ "retried with excluding optional flags %#x",
+ config.toString().c_str(), flags.toString().c_str(), optionalFlags);
}
}
return result;
@@ -629,7 +630,7 @@
status_t Hal2AidlMapper::getAudioMixPort(int32_t ioHandle, AudioPort* port) {
auto it = findPortConfig(std::nullopt /*config*/, std::nullopt /*flags*/, ioHandle);
if (it == mPortConfigs.end()) {
- ALOGE("%s, cannot find mix port config for handle %u", __func__, ioHandle);
+ AUGMENT_LOG(E, "cannot find mix port config for handle %u", ioHandle);
return BAD_VALUE;
}
return updateAudioPort(it->second.portId, port);
@@ -638,21 +639,18 @@
status_t Hal2AidlMapper::getAudioPortCached(
const ::aidl::android::media::audio::common::AudioDevice& device,
::aidl::android::media::audio::common::AudioPort* port) {
-
if (auto portsIt = findPort(device); portsIt != mPorts.end()) {
*port = portsIt->second;
return OK;
}
- ALOGE("%s: device port for device %s is not found in the module %s",
- __func__, device.toString().c_str(), mInstance.c_str());
+ AUGMENT_LOG(E, "device port for device %s is not found", device.toString().c_str());
return BAD_VALUE;
}
status_t Hal2AidlMapper::initialize() {
std::vector<AudioPort> ports;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->getAudioPorts(&ports)));
- ALOGW_IF(ports.empty(), "%s: module %s returned an empty list of audio ports",
- __func__, mInstance.c_str());
+ AUGMENT_LOG_IF(W, ports.empty(), "returned an empty list of audio ports");
mDefaultInputPortId = mDefaultOutputPortId = -1;
const int defaultDeviceFlag = 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE;
for (auto it = ports.begin(); it != ports.end(); ) {
@@ -685,8 +683,9 @@
}
}
if (mRemoteSubmixIn.has_value() != mRemoteSubmixOut.has_value()) {
- ALOGE("%s: The configuration only has input or output remote submix device, must have both",
- __func__);
+ AUGMENT_LOG(E,
+ "The configuration only has input or output remote submix device, "
+ "must have both");
mRemoteSubmixIn.reset();
mRemoteSubmixOut.reset();
}
@@ -694,7 +693,7 @@
AudioPort connectedRSubmixIn = *mRemoteSubmixIn;
connectedRSubmixIn.ext.get<AudioPortExt::Tag::device>().device.address =
AUDIO_REMOTE_SUBMIX_DEVICE_ADDRESS;
- ALOGD("%s: connecting remote submix input", __func__);
+ AUGMENT_LOG(D, "connecting remote submix input");
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->connectExternalDevice(
connectedRSubmixIn, &connectedRSubmixIn)));
// The template port for the remote submix input couldn't be "default" because it is not
@@ -711,7 +710,7 @@
AudioPort tempConnectedRSubmixOut = *mRemoteSubmixOut;
tempConnectedRSubmixOut.ext.get<AudioPortExt::Tag::device>().device.address =
AUDIO_REMOTE_SUBMIX_DEVICE_ADDRESS;
- ALOGD("%s: temporarily connecting and disconnecting remote submix output", __func__);
+ AUGMENT_LOG(D, "temporarily connecting and disconnecting remote submix output");
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->connectExternalDevice(
tempConnectedRSubmixOut, &tempConnectedRSubmixOut)));
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->disconnectExternalDevice(
@@ -720,8 +719,8 @@
ports.push_back(std::move(tempConnectedRSubmixOut));
}
- ALOGI("%s: module %s default port ids: input %d, output %d",
- __func__, mInstance.c_str(), mDefaultInputPortId, mDefaultOutputPortId);
+ AUGMENT_LOG(I, "default port ids: input %d, output %d", mDefaultInputPortId,
+ mDefaultOutputPortId);
std::transform(ports.begin(), ports.end(), std::inserter(mPorts, mPorts.end()),
[](const auto& p) { return std::make_pair(p.id, p); });
RETURN_STATUS_IF_ERROR(updateRoutes());
@@ -774,10 +773,10 @@
int32_t ioHandle, const AudioDevice& device, const AudioIoFlags& flags,
AudioSource source, Cleanups* cleanups, AudioConfig* config,
AudioPortConfig* mixPortConfig, AudioPatch* patch) {
- ALOGD("%p %s: handle %d, device %s, flags %s, source %s, config %s, mix port config %s",
- this, __func__, ioHandle, device.toString().c_str(),
- flags.toString().c_str(), toString(source).c_str(),
- config->toString().c_str(), mixPortConfig->toString().c_str());
+ AUGMENT_LOG(D, "handle %d, device %s, flags %s, source %s, config %s, mixport config %s",
+ ioHandle, device.toString().c_str(), flags.toString().c_str(),
+ toString(source).c_str(), config->toString().c_str(),
+ mixPortConfig->toString().c_str());
resetUnusedPatchesAndPortConfigs();
const AudioConfig initialConfig = *config;
// Find / create AudioPortConfigs for the device port and the mix port,
@@ -800,8 +799,8 @@
// module can't perform audio stream conversions.
AudioConfig deviceConfig = initialConfig;
if (setConfigFromPortConfig(&deviceConfig, devicePortConfig)->base != initialConfig.base) {
- ALOGD("%s: retrying with device port config: %s", __func__,
- devicePortConfig.toString().c_str());
+ AUGMENT_LOG(D, "retrying with device port config: %s",
+ devicePortConfig.toString().c_str());
status = prepareToOpenStreamHelper(ioHandle, devicePortConfig.portId,
devicePortConfig.id, flags, source, initialConfig, cleanups,
&deviceConfig, mixPortConfig, patch);
@@ -845,8 +844,8 @@
retryWithSuggestedConfig = true;
}
if (mixPortConfig->id == 0 && retryWithSuggestedConfig) {
- ALOGD("%s: retrying to find/create a mix port config using config %s", __func__,
- config->toString().c_str());
+ AUGMENT_LOG(D, "retrying to find/create a mix port config using config %s",
+ config->toString().c_str());
RETURN_STATUS_IF_ERROR(findOrCreateMixPortConfig(*config, flags, ioHandle, source,
std::set<int32_t>{devicePortId}, mixPortConfig, &created));
if (created) {
@@ -855,8 +854,8 @@
setConfigFromPortConfig(config, *mixPortConfig);
}
if (mixPortConfig->id == 0) {
- ALOGD("%p %s: returning suggested config for the stream: %s", this, __func__,
- config->toString().c_str());
+ AUGMENT_LOG(D, "returning suggested config for the stream: %s",
+ config->toString().c_str());
return OK;
}
if (isInput) {
@@ -894,9 +893,10 @@
// Note: does not reset port configs.
status_t Hal2AidlMapper::releaseAudioPatch(Patches::iterator it) {
const int32_t patchId = it->first;
+ AUGMENT_LOG(D, "patchId %d", patchId);
if (ndk::ScopedAStatus status = mModule->resetAudioPatch(patchId); !status.isOk()) {
- ALOGE("%s: error while resetting patch %d: %s",
- __func__, patchId, status.getDescription().c_str());
+ AUGMENT_LOG(E, "error while resetting patch %d: %s", patchId,
+ status.getDescription().c_str());
return statusTFromBinderStatus(status);
}
mPatches.erase(it);
@@ -915,7 +915,7 @@
if (auto it = mPatches.find(patchId); it != mPatches.end()) {
releaseAudioPatch(it);
} else {
- ALOGE("%s: patch id %d not found", __func__, patchId);
+ AUGMENT_LOG(E, "patch id %d not found", patchId);
result = BAD_VALUE;
}
}
@@ -925,16 +925,17 @@
void Hal2AidlMapper::resetPortConfig(int32_t portConfigId) {
if (auto it = mPortConfigs.find(portConfigId); it != mPortConfigs.end()) {
+ AUGMENT_LOG(D, "%s", it->second.toString().c_str());
if (ndk::ScopedAStatus status = mModule->resetAudioPortConfig(portConfigId);
!status.isOk()) {
- ALOGE("%s: error while resetting port config %d: %s",
- __func__, portConfigId, status.getDescription().c_str());
+ AUGMENT_LOG(E, "error while resetting port config %d: %s", portConfigId,
+ status.getDescription().c_str());
return;
}
mPortConfigs.erase(it);
return;
}
- ALOGE("%s: port config id %d not found", __func__, portConfigId);
+ AUGMENT_LOG(E, "port config id %d not found", portConfigId);
}
void Hal2AidlMapper::resetUnusedPatchesAndPortConfigs() {
@@ -979,6 +980,8 @@
}
status_t Hal2AidlMapper::setDevicePortConnectedState(const AudioPort& devicePort, bool connected) {
+ AUGMENT_LOG(D, "state %s, device %s", (connected ? "connected" : "disconnected"),
+ devicePort.toString().c_str());
resetUnusedPatchesAndPortConfigs();
if (connected) {
AudioDevice matchDevice = devicePort.ext.get<AudioPortExt::device>().device;
@@ -1009,8 +1012,7 @@
// port not found in every one of them.
return BAD_VALUE;
} else {
- ALOGD("%s: device port for device %s found in the module %s",
- __func__, matchDevice.toString().c_str(), mInstance.c_str());
+ AUGMENT_LOG(D, "device port for device %s found", matchDevice.toString().c_str());
}
templatePort = portsIt->second;
}
@@ -1021,10 +1023,9 @@
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->connectExternalDevice(
connectedPort, &connectedPort)));
const auto [it, inserted] = mPorts.insert(std::make_pair(connectedPort.id, connectedPort));
- LOG_ALWAYS_FATAL_IF(!inserted,
- "%s: module %s, duplicate port ID received from HAL: %s, existing port: %s",
- __func__, mInstance.c_str(), connectedPort.toString().c_str(),
- it->second.toString().c_str());
+ LOG_ALWAYS_FATAL_IF(
+ !inserted, "%s duplicate port ID received from HAL: %s, existing port: %s",
+ __func__, connectedPort.toString().c_str(), it->second.toString().c_str());
mConnectedPorts.insert(connectedPort.id);
if (erasePortAfterConnectionIt != mPorts.end()) {
mPorts.erase(erasePortAfterConnectionIt);
@@ -1037,8 +1038,7 @@
// port not found in every one of them.
return BAD_VALUE;
} else {
- ALOGD("%s: device port for device %s found in the module %s",
- __func__, matchDevice.toString().c_str(), mInstance.c_str());
+ AUGMENT_LOG(D, "device port for device %s found", matchDevice.toString().c_str());
}
// Disconnection of remote submix out with address "0" is a special case. We need to replace
@@ -1094,8 +1094,8 @@
}
portIt->second = *port;
} else {
- ALOGW("%s, port(%d) returned successfully from the HAL but not it is not cached",
- __func__, portId);
+ AUGMENT_LOG(W, "port(%d) returned successfully from the HAL but not it is not cached",
+ portId);
}
}
return status;
@@ -1104,8 +1104,7 @@
status_t Hal2AidlMapper::updateRoutes() {
RETURN_STATUS_IF_ERROR(
statusTFromBinderStatus(mModule->getAudioRoutes(&mRoutes)));
- ALOGW_IF(mRoutes.empty(), "%s: module %s returned an empty list of audio routes",
- __func__, mInstance.c_str());
+ AUGMENT_LOG_IF(W, mRoutes.empty(), "returned an empty list of audio routes");
if (mRemoteSubmixIn.has_value()) {
// Remove mentions of the template remote submix input from routes.
int32_t rSubmixInId = mRemoteSubmixIn->id;
@@ -1146,7 +1145,7 @@
updateAudioPort(portId, &it->second);
} else {
// This must not happen
- ALOGE("%s, cannot find port for id=%d", __func__, portId);
+ AUGMENT_LOG(E, "cannot find port for id=%d", portId);
}
}
}
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.h b/media/libaudiohal/impl/Hal2AidlMapper.h
index 710b43e..2548752 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.h
+++ b/media/libaudiohal/impl/Hal2AidlMapper.h
@@ -26,6 +26,7 @@
#include <media/AidlConversionUtil.h>
#include "Cleanups.h"
+#include "ConversionHelperAidl.h"
namespace android {
@@ -41,7 +42,7 @@
// but still consider some of the outputs to be valid (for example, in 'open{Input|Output}Stream'),
// 'Hal2AidlMapper' follows the Binder convention. It means that if a method returns an error,
// the outputs may not be initialized at all and should not be considered by the caller.
-class Hal2AidlMapper {
+class Hal2AidlMapper : public ConversionHelperAidl {
public:
using Cleanups = Cleanups<Hal2AidlMapper>;
@@ -135,7 +136,6 @@
enum PatchMatch { MATCH_SOURCES, MATCH_SINKS, MATCH_BOTH };
- const std::string mInstance;
const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
bool audioDeviceMatches(const ::aidl::android::media::audio::common::AudioDevice& device,
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 7a80a6a..01ebe1b 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -31,6 +31,7 @@
#include <system/audio.h>
#include <utils/Log.h>
+#include "AidlUtils.h"
#include "DeviceHalAidl.h"
#include "EffectHalAidl.h"
#include "StreamHalAidl.h"
@@ -72,12 +73,12 @@
return streamCommon;
}
-StreamHalAidl::StreamHalAidl(
- std::string_view className, bool isInput, const audio_config& config,
- int32_t nominalLatency, StreamContextAidl&& context,
- const std::shared_ptr<IStreamCommon>& stream,
- const std::shared_ptr<IHalAdapterVendorExtension>& vext)
- : ConversionHelperAidl(className),
+StreamHalAidl::StreamHalAidl(std::string_view className, bool isInput, const audio_config& config,
+ int32_t nominalLatency, StreamContextAidl&& context,
+ const std::shared_ptr<IStreamCommon>& stream,
+ const std::shared_ptr<IHalAdapterVendorExtension>& vext)
+ : ConversionHelperAidl(className, std::string(isInput ? "in" : "out") + "|ioHandle:" +
+ std::to_string(context.getIoHandle())),
mIsInput(isInput),
mConfig(configToBase(config)),
mContext(std::move(context)),
@@ -88,7 +89,7 @@
mContext.getBufferDurationMs(mConfig.sample_rate))
* NANOS_PER_MILLISECOND)
{
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
{
std::lock_guard l(mLock);
mLastReply.latencyMs = nominalLatency;
@@ -103,15 +104,15 @@
}
StreamHalAidl::~StreamHalAidl() {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
if (mStream != nullptr) {
ndk::ScopedAStatus status = mStream->close();
- ALOGE_IF(!status.isOk(), "%s: status %s", __func__, status.getDescription().c_str());
+ AUGMENT_LOG_IF(E, !status.isOk(), "status %s", status.getDescription().c_str());
}
}
status_t StreamHalAidl::getBufferSize(size_t *size) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
if (size == nullptr) {
return BAD_VALUE;
}
@@ -120,11 +121,12 @@
return NO_INIT;
}
*size = mContext.getBufferSizeBytes();
+ AUGMENT_LOG(I, "size: %zu", *size);
return OK;
}
status_t StreamHalAidl::getAudioProperties(audio_config_base_t *configBase) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
if (configBase == nullptr) {
return BAD_VALUE;
}
@@ -134,10 +136,11 @@
}
status_t StreamHalAidl::setParameters(const String8& kvPairs) {
+ AUGMENT_LOG(V);
TIME_CHECK();
if (!mStream) return NO_INIT;
AudioParameter parameters(kvPairs);
- ALOGD("%s: parameters: %s", __func__, parameters.toString().c_str());
+ AUGMENT_LOG(D, "parameters: %s", parameters.toString().c_str());
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
parameters, String8(AudioParameter::keyStreamHwAvSync),
@@ -148,6 +151,7 @@
}
status_t StreamHalAidl::getParameters(const String8& keys __unused, String8 *values) {
+ AUGMENT_LOG(V);
TIME_CHECK();
if (!mStream) return NO_INIT;
if (values == nullptr) {
@@ -159,7 +163,7 @@
}
status_t StreamHalAidl::getFrameSize(size_t *size) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
if (size == nullptr) {
return BAD_VALUE;
}
@@ -171,7 +175,7 @@
}
status_t StreamHalAidl::addEffect(sp<EffectHalInterface> effect) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
if (!mStream) return NO_INIT;
if (effect == nullptr) {
@@ -182,7 +186,7 @@
}
status_t StreamHalAidl::removeEffect(sp<EffectHalInterface> effect) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
if (!mStream) return NO_INIT;
if (effect == nullptr) {
@@ -193,7 +197,7 @@
}
status_t StreamHalAidl::standby() {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
if (!mStream) return NO_INIT;
const auto state = getState();
@@ -206,8 +210,8 @@
if (reply.state != StreamDescriptor::State::PAUSED &&
reply.state != StreamDescriptor::State::DRAIN_PAUSED &&
reply.state != StreamDescriptor::State::TRANSFER_PAUSED) {
- ALOGE("%s: unexpected stream state: %s (expected PAUSED)",
- __func__, toString(reply.state).c_str());
+ AUGMENT_LOG(E, "unexpected stream state: %s (expected PAUSED)",
+ toString(reply.state).c_str());
return INVALID_OPERATION;
}
FALLTHROUGH_INTENDED;
@@ -217,8 +221,8 @@
if (mIsInput) return flush();
RETURN_STATUS_IF_ERROR(flush(&reply));
if (reply.state != StreamDescriptor::State::IDLE) {
- ALOGE("%s: unexpected stream state: %s (expected IDLE)",
- __func__, toString(reply.state).c_str());
+ AUGMENT_LOG(E, "unexpected stream state: %s (expected IDLE)",
+ toString(reply.state).c_str());
return INVALID_OPERATION;
}
FALLTHROUGH_INTENDED;
@@ -226,22 +230,22 @@
RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::standby>(),
&reply, true /*safeFromNonWorkerThread*/));
if (reply.state != StreamDescriptor::State::STANDBY) {
- ALOGE("%s: unexpected stream state: %s (expected STANDBY)",
- __func__, toString(reply.state).c_str());
+ AUGMENT_LOG(E, "unexpected stream state: %s (expected STANDBY)",
+ toString(reply.state).c_str());
return INVALID_OPERATION;
}
FALLTHROUGH_INTENDED;
case StreamDescriptor::State::STANDBY:
return OK;
default:
- ALOGE("%s: not supported from %s stream state %s",
- __func__, mIsInput ? "input" : "output", toString(state).c_str());
+ AUGMENT_LOG(E, "not supported from %s stream state %s", mIsInput ? "input" : "output",
+ toString(state).c_str());
return INVALID_OPERATION;
}
}
status_t StreamHalAidl::dump(int fd, const Vector<String16>& args) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
if (!mStream) return NO_INIT;
status_t status = mStream->dump(fd, Args(args).args(), args.size());
@@ -250,7 +254,7 @@
}
status_t StreamHalAidl::start() {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
if (!mStream) return NO_INIT;
if (!mContext.isMmapped()) {
@@ -263,8 +267,8 @@
RETURN_STATUS_IF_ERROR(
sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
if (reply.state != StreamDescriptor::State::IDLE) {
- ALOGE("%s: unexpected stream state: %s (expected IDLE)",
- __func__, toString(reply.state).c_str());
+ AUGMENT_LOG(E, "unexpected stream state: %s (expected IDLE)",
+ toString(reply.state).c_str());
return INVALID_OPERATION;
}
FALLTHROUGH_INTENDED;
@@ -272,8 +276,8 @@
RETURN_STATUS_IF_ERROR(
sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true));
if (reply.state != StreamDescriptor::State::ACTIVE) {
- ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
- __func__, toString(reply.state).c_str());
+ AUGMENT_LOG(E, "unexpected stream state: %s (expected ACTIVE)",
+ toString(reply.state).c_str());
return INVALID_OPERATION;
}
FALLTHROUGH_INTENDED;
@@ -283,20 +287,20 @@
RETURN_STATUS_IF_ERROR(
sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
if (reply.state != StreamDescriptor::State::ACTIVE) {
- ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
- __func__, toString(reply.state).c_str());
+ AUGMENT_LOG(E, "unexpected stream state: %s (expected ACTIVE)",
+ toString(reply.state).c_str());
return INVALID_OPERATION;
}
return OK;
default:
- ALOGE("%s: not supported from %s stream state %s",
- __func__, mIsInput ? "input" : "output", toString(reply.state).c_str());
+ AUGMENT_LOG(E, "not supported from %s stream state %s", mIsInput ? "input" : "output",
+ toString(reply.state).c_str());
return INVALID_OPERATION;
}
}
status_t StreamHalAidl::stop() {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
if (!mStream) return NO_INIT;
if (!mContext.isMmapped()) {
@@ -313,28 +317,28 @@
return flush();
} else if (state != StreamDescriptor::State::IDLE &&
state != StreamDescriptor::State::STANDBY) {
- ALOGE("%s: not supported from %s stream state %s",
- __func__, mIsInput ? "input" : "output", toString(state).c_str());
+ AUGMENT_LOG(E, "not supported from %s stream state %s", mIsInput ? "input" : "output",
+ toString(state).c_str());
return INVALID_OPERATION;
}
return OK;
}
status_t StreamHalAidl::getLatency(uint32_t *latency) {
- ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(V);
if (!mStream) return NO_INIT;
StreamDescriptor::Reply reply;
RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
*latency = std::clamp(std::max<int32_t>(0, reply.latencyMs), 1, 3000);
- ALOGW_IF(reply.latencyMs != static_cast<int32_t>(*latency),
- "Suspicious latency value reported by HAL: %d, clamped to %u", reply.latencyMs,
- *latency);
+ AUGMENT_LOG_IF(W, reply.latencyMs != static_cast<int32_t>(*latency),
+ "Suspicious latency value reported by HAL: %d, clamped to %u", reply.latencyMs,
+ *latency);
return OK;
}
status_t StreamHalAidl::getObservablePosition(int64_t* frames, int64_t* timestamp,
StatePositions* statePositions) {
- ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(V);
if (!mStream) return NO_INIT;
StreamDescriptor::Reply reply;
RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, statePositions));
@@ -344,7 +348,7 @@
}
status_t StreamHalAidl::getHardwarePosition(int64_t *frames, int64_t *timestamp) {
- ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(V);
if (!mStream) return NO_INIT;
StreamDescriptor::Reply reply;
RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
@@ -354,7 +358,7 @@
}
status_t StreamHalAidl::getXruns(int32_t *frames) {
- ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(V);
if (!mStream) return NO_INIT;
StreamDescriptor::Reply reply;
RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
@@ -363,7 +367,7 @@
}
status_t StreamHalAidl::transfer(void *buffer, size_t bytes, size_t *transferred) {
- ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(V);
// TIME_CHECK(); // TODO(b/243839867) reenable only when optimized.
if (!mStream || mContext.getDataMQ() == nullptr) return NO_INIT;
mWorkerTid.store(gettid(), std::memory_order_release);
@@ -375,8 +379,8 @@
StreamDescriptor::Reply reply;
RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply));
if (reply.state != StreamDescriptor::State::IDLE) {
- ALOGE("%s: failed to get the stream out of standby, actual state: %s",
- __func__, toString(reply.state).c_str());
+ AUGMENT_LOG(E, "failed to get the stream out of standby, actual state: %s",
+ toString(reply.state).c_str());
return INVALID_OPERATION;
}
}
@@ -390,7 +394,7 @@
StreamDescriptor::Command::make<StreamDescriptor::Command::Tag::burst>(bytes);
if (!mIsInput) {
if (!mContext.getDataMQ()->write(static_cast<const int8_t*>(buffer), bytes)) {
- ALOGE("%s: failed to write %zu bytes to data MQ", __func__, bytes);
+ AUGMENT_LOG(E, "failed to write %zu bytes to data MQ", bytes);
return NOT_ENOUGH_DATA;
}
}
@@ -403,7 +407,7 @@
__func__, *transferred, bytes);
if (auto toRead = mContext.getDataMQ()->availableToRead(&fmqError, &fmqErrorMsg);
toRead != 0 && !mContext.getDataMQ()->read(static_cast<int8_t*>(buffer), toRead)) {
- ALOGE("%s: failed to read %zu bytes to data MQ", __func__, toRead);
+ AUGMENT_LOG(E, "failed to read %zu bytes to data MQ", toRead);
return NOT_ENOUGH_DATA;
}
}
@@ -414,7 +418,7 @@
}
status_t StreamHalAidl::pause(StreamDescriptor::Reply* reply) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
if (!mStream) return NO_INIT;
@@ -423,14 +427,14 @@
makeHalCommand<HalCommand::Tag::pause>(), reply,
true /*safeFromNonWorkerThread*/); // The workers stops its I/O activity first.
} else {
- ALOGD("%s: already stream in one of the PAUSED kind of states, current state: %s", __func__,
- toString(state).c_str());
+ AUGMENT_LOG(D, "already stream in one of the PAUSED kind of states, current state: %s",
+ toString(state).c_str());
return OK;
}
}
status_t StreamHalAidl::resume(StreamDescriptor::Reply* reply) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
if (!mStream) return NO_INIT;
if (mIsInput) {
@@ -444,8 +448,8 @@
RETURN_STATUS_IF_ERROR(
sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), innerReply));
if (innerReply->state != StreamDescriptor::State::ACTIVE) {
- ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
- __func__, toString(innerReply->state).c_str());
+ AUGMENT_LOG(E, "unexpected stream state: %s (expected ACTIVE)",
+ toString(innerReply->state).c_str());
return INVALID_OPERATION;
}
return OK;
@@ -456,18 +460,18 @@
} else if (state == StreamDescriptor::State::ACTIVE ||
state == StreamDescriptor::State::TRANSFERRING ||
state == StreamDescriptor::State::DRAINING) {
- ALOGD("%s: already in stream state: %s", __func__, toString(state).c_str());
+ AUGMENT_LOG(D, "already in stream state: %s", toString(state).c_str());
return OK;
} else {
- ALOGE("%s: unexpected stream state: %s (expected IDLE or one of *PAUSED states)",
- __func__, toString(state).c_str());
+ AUGMENT_LOG(E, "unexpected stream state: %s (expected IDLE or one of *PAUSED states)",
+ toString(state).c_str());
return INVALID_OPERATION;
}
}
}
status_t StreamHalAidl::drain(bool earlyNotify, StreamDescriptor::Reply* reply) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
if (!mStream) return NO_INIT;
return sendCommand(makeHalCommand<HalCommand::Tag::drain>(
@@ -478,7 +482,7 @@
}
status_t StreamHalAidl::flush(StreamDescriptor::Reply* reply) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
if (!mStream) return NO_INIT;
@@ -487,17 +491,17 @@
makeHalCommand<HalCommand::Tag::flush>(), reply,
true /*safeFromNonWorkerThread*/); // The workers stops its I/O activity first.
} else if (isInPlayOrRecordState(state)) {
- ALOGE("%s: found stream in non-flushable state: %s", __func__, toString(state).c_str());
+ AUGMENT_LOG(E, "found stream in non-flushable state: %s", toString(state).c_str());
return INVALID_OPERATION;
} else {
- ALOGD("%s: already stream in one of the flushable state: current state: %s", __func__,
- toString(state).c_str());
+ AUGMENT_LOG(D, "already stream in one of the flushable state: current state: %s",
+ toString(state).c_str());
return OK;
}
}
status_t StreamHalAidl::exit() {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
if (!mStream) return NO_INIT;
return statusTFromBinderStatus(mStream->prepareToClose());
@@ -510,7 +514,7 @@
sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
nullptr, true /*safeFromNonWorkerThread */);
} else {
- ALOGW("%s: unexpected onTransferReady in the state %s", __func__, toString(state).c_str());
+ AUGMENT_LOG(W, "unexpected onTransferReady in the state %s", toString(state).c_str());
}
}
@@ -525,19 +529,19 @@
std::lock_guard l(mLock);
mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames;
} else {
- ALOGW("%s: unexpected onDrainReady in the state %s", __func__, toString(state).c_str());
+ AUGMENT_LOG(W, "unexpected onDrainReady in the state %s", toString(state).c_str());
}
}
void StreamHalAidl::onAsyncError() {
std::lock_guard l(mLock);
- ALOGW("%s: received in the state %s", __func__, toString(mLastReply.state).c_str());
+ AUGMENT_LOG(W, "received in the state %s", toString(mLastReply.state).c_str());
mLastReply.state = StreamDescriptor::State::ERROR;
}
status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
struct audio_mmap_buffer_info *info) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
if (!mStream) return NO_INIT;
if (!mContext.isMmapped()) {
@@ -597,15 +601,14 @@
{
std::lock_guard l(mCommandReplyLock);
if (!mContext.getCommandMQ()->writeBlocking(&command, 1)) {
- ALOGE("%s: failed to write command %s to MQ", __func__, command.toString().c_str());
+ AUGMENT_LOG(E, "failed to write command %s to MQ", command.toString().c_str());
return NOT_ENOUGH_DATA;
}
if (reply == nullptr) {
reply = &localReply;
}
if (!mContext.getReplyMQ()->readBlocking(reply, 1)) {
- ALOGE("%s: failed to read from reply MQ, command %s",
- __func__, command.toString().c_str());
+ AUGMENT_LOG(E, "failed to read from reply MQ, command %s", command.toString().c_str());
return NOT_ENOUGH_DATA;
}
{
@@ -642,8 +645,8 @@
case STATUS_INVALID_OPERATION: return INVALID_OPERATION;
case STATUS_NOT_ENOUGH_DATA: return NOT_ENOUGH_DATA;
default:
- ALOGE("%s: unexpected status %d returned for command %s",
- __func__, reply->status, command.toString().c_str());
+ AUGMENT_LOG(E, "unexpected status %d returned for command %s", reply->status,
+ command.toString().c_str());
return INVALID_OPERATION;
}
}
@@ -708,10 +711,10 @@
if (!mStream) return NO_INIT;
AudioParameter parameters(kvPairs);
- ALOGD("%s: parameters: \"%s\"", __func__, parameters.toString().c_str());
+ AUGMENT_LOG(D, "parameters: \"%s\"", parameters.toString().c_str());
if (status_t status = filterAndUpdateOffloadMetadata(parameters); status != OK) {
- ALOGW("%s: filtering or updating offload metadata failed: %d", __func__, status);
+ AUGMENT_LOG(W, "filtering or updating offload metadata failed: %d", status);
}
return StreamHalAidl::setParameters(parameters.toString());
@@ -722,6 +725,7 @@
}
status_t StreamOutHalAidl::setVolume(float left, float right) {
+ AUGMENT_LOG(V, "left %f right %f", left, right);
TIME_CHECK();
if (!mStream) return NO_INIT;
size_t channelCount = audio_channel_count_from_out_mask(mConfig.channel_mask);
@@ -775,11 +779,11 @@
}
status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
- ALOGD("%p %s", this, __func__);
+ AUGMENT_LOG(D);
TIME_CHECK();
if (!mStream) return NO_INIT;
if (!mContext.isAsynchronous()) {
- ALOGE("%s: the callback is intended for asynchronous streams only", __func__);
+ AUGMENT_LOG(E, "the callback is intended for asynchronous streams only");
return INVALID_OPERATION;
}
mClientCallback = callback;
@@ -818,7 +822,7 @@
if (!mStream) return NO_INIT;
if (const auto state = getState(); isInDrainedState(state)) {
- ALOGD("%p %s stream already in %s", this, __func__, toString(state).c_str());
+ AUGMENT_LOG(D, "stream already in %s state", toString(state).c_str());
if (mContext.isAsynchronous()) onDrainReady();
return OK;
}
@@ -851,7 +855,7 @@
}
status_t StreamOutHalAidl::presentationComplete() {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ AUGMENT_LOG(D);
return OK;
}
@@ -1042,10 +1046,10 @@
updateMetadata = true;
}
if (updateMetadata) {
- ALOGD("%s set offload metadata %s", __func__, mOffloadMetadata.toString().c_str());
+ AUGMENT_LOG(D, "set offload metadata %s", mOffloadMetadata.toString().c_str());
if (status_t status = statusTFromBinderStatus(
mStream->updateOffloadMetadata(mOffloadMetadata)); status != OK) {
- ALOGE("%s: updateOffloadMetadata failed %d", __func__, status);
+ AUGMENT_LOG(E, "updateOffloadMetadata failed %d", status);
return status;
}
}
@@ -1132,7 +1136,7 @@
// Note: info.portId is not filled because it's a bit of framework info.
result.push_back(std::move(info));
} else {
- ALOGE("%s: no static info for active microphone with id '%s'", __func__, d.id.c_str());
+ AUGMENT_LOG(E, "no static info for active microphone with id '%s'", d.id.c_str());
}
}
*microphones = std::move(result);
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index baf4ac0..a1cdac4 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -53,7 +53,7 @@
StreamContextAidl(
::aidl::android::hardware::audio::core::StreamDescriptor& descriptor,
- bool isAsynchronous)
+ bool isAsynchronous, int ioHandle)
: mFrameSizeBytes(descriptor.frameSizeBytes),
mCommandMQ(new CommandMQ(descriptor.command)),
mReplyMQ(new ReplyMQ(descriptor.reply)),
@@ -61,7 +61,8 @@
mDataMQ(maybeCreateDataMQ(descriptor)),
mIsAsynchronous(isAsynchronous),
mIsMmapped(isMmapped(descriptor)),
- mMmapBufferDescriptor(maybeGetMmapBuffer(descriptor)) {}
+ mMmapBufferDescriptor(maybeGetMmapBuffer(descriptor)),
+ mIoHandle(ioHandle) {}
StreamContextAidl(StreamContextAidl&& other) :
mFrameSizeBytes(other.mFrameSizeBytes),
mCommandMQ(std::move(other.mCommandMQ)),
@@ -70,7 +71,8 @@
mDataMQ(std::move(other.mDataMQ)),
mIsAsynchronous(other.mIsAsynchronous),
mIsMmapped(other.mIsMmapped),
- mMmapBufferDescriptor(std::move(other.mMmapBufferDescriptor)) {}
+ mMmapBufferDescriptor(std::move(other.mMmapBufferDescriptor)),
+ mIoHandle(other.mIoHandle) {}
StreamContextAidl& operator=(StreamContextAidl&& other) {
mFrameSizeBytes = other.mFrameSizeBytes;
mCommandMQ = std::move(other.mCommandMQ);
@@ -80,6 +82,7 @@
mIsAsynchronous = other.mIsAsynchronous;
mIsMmapped = other.mIsMmapped;
mMmapBufferDescriptor = std::move(other.mMmapBufferDescriptor);
+ mIoHandle = other.mIoHandle;
return *this;
}
bool isValid() const {
@@ -105,7 +108,9 @@
bool isAsynchronous() const { return mIsAsynchronous; }
bool isMmapped() const { return mIsMmapped; }
const MmapBufferDescriptor& getMmapBufferDescriptor() const { return mMmapBufferDescriptor; }
- size_t getMmapBurstSize() const { return mMmapBufferDescriptor.burstSizeFrames;}
+ size_t getMmapBurstSize() const { return mMmapBufferDescriptor.burstSizeFrames; }
+ int getIoHandle() const { return mIoHandle; }
+
private:
static std::unique_ptr<DataMQ> maybeCreateDataMQ(
const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
@@ -137,6 +142,7 @@
bool mIsAsynchronous;
bool mIsMmapped;
MmapBufferDescriptor mMmapBufferDescriptor;
+ int mIoHandle;
};
class StreamHalAidl : public virtual StreamHalInterface, public ConversionHelperAidl {
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 7f6c1fb..3f16526 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -90,7 +90,8 @@
audio_output_flags_t flags,
struct audio_config *config,
const char *address,
- sp<StreamOutHalInterface> *outStream) = 0;
+ sp<StreamOutHalInterface> *outStream,
+ const std::vector<playback_track_metadata_v7_t>& sourceMetadata = {}) = 0;
// Creates and opens the audio hardware input stream. The stream is closed
// by releasing all references to the returned object.
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index 50b748e..0f5334f 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -18,6 +18,7 @@
#include <memory>
#include <mutex>
#include <string>
+#include <thread>
#include <vector>
#define LOG_TAG "CoreAudioHalAidlTest"
@@ -28,6 +29,7 @@
#include <StreamHalAidl.h>
#include <aidl/android/hardware/audio/core/BnModule.h>
#include <aidl/android/hardware/audio/core/BnStreamCommon.h>
+#include <aidl/android/hardware/audio/core/BnStreamOut.h>
#include <aidl/android/media/audio/BnHalAdapterVendorExtension.h>
#include <aidl/android/media/audio/common/AudioGainMode.h>
#include <aidl/android/media/audio/common/Int.h>
@@ -64,13 +66,13 @@
const std::vector<VendorParameter>& getSyncParameters() const { return mSyncParameters; }
protected:
- ndk::ScopedAStatus getVendorParametersImpl(const std::vector<std::string>& in_parameterIds) {
+ ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds) {
mGetParameterIds.insert(mGetParameterIds.end(), in_parameterIds.begin(),
in_parameterIds.end());
return ndk::ScopedAStatus::ok();
}
- ndk::ScopedAStatus setVendorParametersImpl(const std::vector<VendorParameter>& in_parameters,
- bool async) {
+ ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
+ bool async) {
if (async) {
mAsyncParameters.insert(mAsyncParameters.end(), in_parameters.begin(),
in_parameters.end());
@@ -187,6 +189,11 @@
speakerOutDevice.profiles = standardPcmAudioProfiles;
c.ports.push_back(speakerOutDevice);
+ AudioPort primaryOutMix =
+ createPort(c.nextPortId++, "primary output", 0, false, createPortMixExt(1, 1));
+ primaryOutMix.profiles = standardPcmAudioProfiles;
+ c.ports.push_back(primaryOutMix);
+
AudioPort btOutDevice =
createPort(c.nextPortId++, "BT A2DP Out", 0, false,
createPortDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
@@ -200,11 +207,141 @@
c.ports.push_back(btOutMix);
c.routes.push_back(createRoute({micInDevice, micInBackDevice}, primaryInMix));
+ c.routes.push_back(createRoute({primaryOutMix}, speakerOutDevice));
c.routes.push_back(createRoute({btOutMix}, btOutDevice));
return c;
}
+class StreamCommonMock : public ::aidl::android::hardware::audio::core::BnStreamCommon,
+ public VendorParameterMock {
+ ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus prepareToClose() override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus updateHwAvSyncId(int32_t) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds,
+ std::vector<VendorParameter>*) override {
+ return VendorParameterMock::getVendorParameters(in_parameterIds);
+ }
+ ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
+ bool async) override {
+ return VendorParameterMock::setVendorParameters(in_parameters, async);
+ }
+ ndk::ScopedAStatus addEffect(
+ const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus removeEffect(
+ const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+class StreamContext {
+ public:
+ using Descriptor = ::aidl::android::hardware::audio::core::StreamDescriptor;
+ typedef ::android::AidlMessageQueue<
+ Descriptor::Command, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
+ CommandMQ;
+ typedef ::android::AidlMessageQueue<
+ Descriptor::Reply, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
+ ReplyMQ;
+ typedef ::android::AidlMessageQueue<
+ int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
+ DataMQ;
+
+ StreamContext() = default;
+ StreamContext(std::unique_ptr<CommandMQ> commandMQ, std::unique_ptr<ReplyMQ> replyMQ,
+ std::unique_ptr<DataMQ> dataMQ)
+ : mCommandMQ(std::move(commandMQ)),
+ mReplyMQ(std::move(replyMQ)),
+ mDataMQ(std::move(dataMQ)) {}
+ void fillDescriptor(Descriptor* desc) {
+ if (mCommandMQ) {
+ desc->command = mCommandMQ->dupeDesc();
+ }
+ if (mReplyMQ) {
+ desc->reply = mReplyMQ->dupeDesc();
+ }
+ if (mDataMQ) {
+ desc->frameSizeBytes = 2;
+ desc->bufferSizeFrames = 48;
+ desc->audio.set<Descriptor::AudioBuffer::Tag::fmq>(mDataMQ->dupeDesc());
+ }
+ }
+
+ private:
+ std::unique_ptr<CommandMQ> mCommandMQ =
+ std::make_unique<CommandMQ>(1, true /*configureEventFlagWord*/);
+ std::unique_ptr<ReplyMQ> mReplyMQ =
+ std::make_unique<ReplyMQ>(1, true /*configureEventFlagWord*/);
+ std::unique_ptr<DataMQ> mDataMQ = std::make_unique<DataMQ>(96);
+};
+
+class StreamOutMock : public ::aidl::android::hardware::audio::core::BnStreamOut {
+ public:
+ explicit StreamOutMock(StreamContext&& ctx) : mContext(std::move(ctx)) {}
+
+ private:
+ ndk::ScopedAStatus getStreamCommon(
+ std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon>* _aidl_return)
+ override {
+ if (!mCommon) {
+ mCommon = ndk::SharedRefBase::make<StreamCommonMock>();
+ }
+ *_aidl_return = mCommon;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus updateMetadata(
+ const ::aidl::android::hardware::audio::common::SourceMetadata&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus updateOffloadMetadata(
+ const ::aidl::android::hardware::audio::common::AudioOffloadMetadata&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getHwVolume(std::vector<float>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setHwVolume(const std::vector<float>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioDescriptionMixLevel(float*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setAudioDescriptionMixLevel(float) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getDualMonoMode(
+ ::aidl::android::media::audio::common::AudioDualMonoMode*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setDualMonoMode(
+ ::aidl::android::media::audio::common::AudioDualMonoMode) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getRecommendedLatencyModes(
+ std::vector<::aidl::android::media::audio::common::AudioLatencyMode>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setLatencyMode(
+ ::aidl::android::media::audio::common::AudioLatencyMode) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getPlaybackRateParameters(
+ ::aidl::android::media::audio::common::AudioPlaybackRate*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setPlaybackRateParameters(
+ const ::aidl::android::media::audio::common::AudioPlaybackRate&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus selectPresentation(int32_t, int32_t) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ StreamContext mContext;
+ std::shared_ptr<StreamCommonMock> mCommon;
+};
+
class ModuleMock : public ::aidl::android::hardware::audio::core::BnModule,
public VendorParameterMock {
public:
@@ -339,7 +476,10 @@
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus openOutputStream(const OpenOutputStreamArguments&,
- OpenOutputStreamReturn*) override {
+ OpenOutputStreamReturn* _aidl_return) override {
+ StreamContext context;
+ context.fillDescriptor(&_aidl_return->desc);
+ _aidl_return->stream = ndk::SharedRefBase::make<StreamOutMock>(std::move(context));
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus getSupportedPlaybackRateFactors(SupportedPlaybackRateFactors*) override {
@@ -351,6 +491,7 @@
if (requested.id == 0) {
*patch = requested;
patch->id = mConfig.nextPatchId++;
+ patch->latenciesMs.push_back(100);
mConfig.patches.push_back(*patch);
ALOGD("%s: returning %s", __func__, patch->toString().c_str());
} else {
@@ -437,11 +578,11 @@
ndk::ScopedAStatus generateHwAvSyncId(int32_t*) override { return ndk::ScopedAStatus::ok(); }
ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds,
std::vector<VendorParameter>*) override {
- return getVendorParametersImpl(in_parameterIds);
+ return VendorParameterMock::getVendorParameters(in_parameterIds);
}
ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
bool async) override {
- return setVendorParametersImpl(in_parameters, async);
+ return VendorParameterMock::setVendorParameters(in_parameters, async);
}
ndk::ScopedAStatus addDeviceEffect(
int32_t,
@@ -474,29 +615,6 @@
ScreenRotation mScreenRotation = ScreenRotation::DEG_0;
};
-class StreamCommonMock : public ::aidl::android::hardware::audio::core::BnStreamCommon,
- public VendorParameterMock {
- ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
- ndk::ScopedAStatus prepareToClose() override { return ndk::ScopedAStatus::ok(); }
- ndk::ScopedAStatus updateHwAvSyncId(int32_t) override { return ndk::ScopedAStatus::ok(); }
- ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds,
- std::vector<VendorParameter>*) override {
- return getVendorParametersImpl(in_parameterIds);
- }
- ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
- bool async) override {
- return setVendorParametersImpl(in_parameters, async);
- }
- ndk::ScopedAStatus addEffect(
- const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
- return ndk::ScopedAStatus::ok();
- }
- ndk::ScopedAStatus removeEffect(
- const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
- return ndk::ScopedAStatus::ok();
- }
-};
-
VendorParameter makeVendorParameter(const std::string& id, int value) {
VendorParameter result{.id = id};
// Note: in real life, a parcelable type defined by vendor must be used,
@@ -708,7 +826,7 @@
class DeviceHalAidlTest : public testing::Test {
public:
void SetUp() override {
- mModule = ndk::SharedRefBase::make<ModuleMock>();
+ mModule = ndk::SharedRefBase::make<ModuleMock>(getTestConfiguration());
mDevice = sp<DeviceHalAidl>::make("test", mModule, nullptr /*vext*/);
}
void TearDown() override {
@@ -750,6 +868,46 @@
EXPECT_EQ(ScreenRotation::DEG_0, mModule->getScreenRotation());
}
+// See http://b/357487484#comment6
+TEST_F(DeviceHalAidlTest, StreamReleaseOnMapperCleanup) {
+ ASSERT_EQ(OK, mDevice->initCheck());
+ // Since the test is in effect probabilistic, try multiple times.
+ for (int i = 0; i < 100; ++i) {
+ sp<StreamOutHalInterface> stream1;
+ struct audio_config config = AUDIO_CONFIG_INITIALIZER;
+ config.sample_rate = 48000;
+ config.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ config.format = AUDIO_FORMAT_PCM_16_BIT;
+ ASSERT_EQ(OK, mDevice->openOutputStream(42 /*handle*/, AUDIO_DEVICE_OUT_SPEAKER,
+ AUDIO_OUTPUT_FLAG_NONE, &config, "" /*address*/,
+ &stream1));
+ ASSERT_EQ(1, stream1->getStrongCount());
+ std::atomic<bool> stopReleaser = false;
+ // Try to catch the moment when Hal2AidlMapper promotes its wp<StreamHalInterface> to sp<>
+ // in Hal2AidlMapper::resetUnusedPatchesAndPortConfigs and release on our side in order to
+ // make Hal2AidlMapper the sole owner via a temporary sp and enforce destruction of the
+ // stream while the DeviceHalAidl::mLock is held.
+ std::thread releaser([&stream1, &stopReleaser]() {
+ while (!stopReleaser) {
+ if (stream1->getStrongCount() > 1) {
+ stream1.clear();
+ break;
+ }
+ std::this_thread::yield();
+ }
+ });
+ sp<StreamOutHalInterface> stream2;
+ // Opening another stream triggers a call to
+ // Hal2AidlMapper::resetUnusedPatchesAndPortConfigs. It must not cause a deadlock of the
+ // test (main) thread.
+ ASSERT_EQ(OK, mDevice->openOutputStream(43 /*handle*/, AUDIO_DEVICE_OUT_SPEAKER,
+ AUDIO_OUTPUT_FLAG_NONE, &config, "" /*address*/,
+ &stream2));
+ stopReleaser = true;
+ releaser.join();
+ }
+}
+
class DeviceHalAidlVendorParametersTest : public testing::Test {
public:
void SetUp() override {
@@ -830,9 +988,9 @@
mVendorExt = ndk::SharedRefBase::make<TestHalAdapterVendorExtension>();
struct audio_config config = AUDIO_CONFIG_INITIALIZER;
::aidl::android::hardware::audio::core::StreamDescriptor descriptor;
+ StreamContextAidl context(descriptor, false /*isAsynchronous*/, 0);
mStream = sp<StreamHalAidl>::make("test", false /*isInput*/, config, 0 /*nominalLatency*/,
- StreamContextAidl(descriptor, false /*isAsynchronous*/),
- mStreamCommon, mVendorExt);
+ std::move(context), mStreamCommon, mVendorExt);
}
void TearDown() override {
mStream.clear();
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index b1e6db2..b1f9d18 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -64,6 +64,7 @@
"mediarecorder_fuzzer.cpp",
],
defaults: [
+ "libaudioflinger_dependencies",
"libmediaplayerserviceFuzzer_defaults",
],
static_libs: [
@@ -76,12 +77,10 @@
],
shared_libs: [
"android.hardware.media.omx@1.0",
- "av-types-aidl-cpp",
"media_permission-aidl-cpp",
- "libaudioclient_aidl_conversion",
"libactivitymanager_aidl",
"libandroid_net",
- "libaudioclient",
+ "libaudioflinger",
"libcamera_client",
"libcodec2_client",
"libcrypto",
@@ -89,24 +88,13 @@
"libdrmframework",
"libgui",
"libhidlbase",
- "liblog",
"libmedia_codeclist",
"libmedia_omx",
"libmediadrm",
- "libmediametrics",
- "libmediautils",
- "libmemunreachable",
"libnetd_client",
- "libpowermanager",
"libstagefright_httplive",
"packagemanager_aidl-cpp",
"libfakeservicemanager",
- "libvibrator",
- "libnbaio",
- "libnblog",
- "libpowermanager",
- "libaudioprocessing",
- "libaudioflinger",
"libresourcemanagerservice",
"libmediametricsservice",
"mediametricsservice-aidl-cpp",
@@ -122,10 +110,6 @@
"android.hardware.camera.device@3.4",
"libaudiohal@7.0",
],
- header_libs: [
- "libaudiohal_headers",
- "libaudioflinger_headers",
- ],
}
cc_fuzz {
diff --git a/media/libmediaplayerservice/fuzzer/corpus/seed-2024-08-29-0 b/media/libmediaplayerservice/fuzzer/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..aae78ae
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 46703bb..893b442 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -18,19 +18,14 @@
#define LOG_TAG "FrameDecoder"
#define ATRACE_TAG ATRACE_TAG_VIDEO
#include "include/FrameDecoder.h"
-#include "include/FrameCaptureLayer.h"
-#include "include/HevcUtils.h"
+#include <android_media_codec.h>
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <gui/Surface.h>
#include <inttypes.h>
-#include <mediadrm/ICrypto.h>
#include <media/IMediaSource.h>
#include <media/MediaCodecBuffer.h>
-#include <media/stagefright/foundation/avc_utils.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/CodecBase.h>
#include <media/stagefright/ColorConverter.h>
#include <media/stagefright/FrameCaptureProcessor.h>
#include <media/stagefright/MediaBuffer.h>
@@ -39,13 +34,24 @@
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/foundation/avc_utils.h>
+#include <mediadrm/ICrypto.h>
#include <private/media/VideoFrame.h>
#include <utils/Log.h>
#include <utils/Trace.h>
+#include "include/FrameCaptureLayer.h"
+#include "include/HevcUtils.h"
+
+#include <C2Buffer.h>
+#include <Codec2BufferUtils.h>
namespace android {
static const int64_t kBufferTimeOutUs = 10000LL; // 10 msec
+static const int64_t kAsyncBufferTimeOutUs = 2000000LL; // 2000 msec
static const size_t kRetryCount = 100; // must be >0
static const int64_t kDefaultSampleDurationUs = 33333LL; // 33ms
// For codec, 0 is the highest importance; higher the number lesser important.
@@ -232,6 +238,104 @@
return false;
}
+AsyncCodecHandler::AsyncCodecHandler(const wp<FrameDecoder>& frameDecoder) {
+ mFrameDecoder = frameDecoder;
+}
+
+void AsyncCodecHandler::onMessageReceived(const sp<AMessage>& msg) {
+ switch (msg->what()) {
+ case FrameDecoder::kWhatCallbackNotify:
+ int32_t callbackId;
+ if (!msg->findInt32("callbackID", &callbackId)) {
+ ALOGE("kWhatCallbackNotify: callbackID is expected.");
+ break;
+ }
+ switch (callbackId) {
+ case MediaCodec::CB_INPUT_AVAILABLE: {
+ int32_t index;
+ if (!msg->findInt32("index", &index)) {
+ ALOGE("CB_INPUT_AVAILABLE: index is expected.");
+ break;
+ }
+ ALOGD("CB_INPUT_AVAILABLE received, index is %d", index);
+ sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
+ if (frameDecoder != nullptr) {
+ frameDecoder->handleInputBufferAsync(index);
+ }
+ break;
+ }
+ case MediaCodec::CB_OUTPUT_AVAILABLE: {
+ int32_t index;
+ int64_t timeUs;
+ CHECK(msg->findInt32("index", &index));
+ CHECK(msg->findInt64("timeUs", &timeUs));
+ ALOGD("CB_OUTPUT_AVAILABLE received, index is %d", index);
+ sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
+ if (frameDecoder != nullptr) {
+ frameDecoder->handleOutputBufferAsync(index, timeUs);
+ }
+ break;
+ }
+ case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
+ ALOGD("CB_OUTPUT_FORMAT_CHANGED received");
+ sp<AMessage> format;
+ if (!msg->findMessage("format", &format) || format == nullptr) {
+ ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
+ break;
+ }
+ sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
+ if (frameDecoder != nullptr) {
+ frameDecoder->handleOutputFormatChangeAsync(format);
+ }
+ break;
+ }
+ case MediaCodec::CB_ERROR: {
+ status_t err;
+ int32_t actionCode;
+ AString detail;
+ if (!msg->findInt32("err", &err)) {
+ ALOGE("CB_ERROR: err is expected.");
+ break;
+ }
+ if (!msg->findInt32("actionCode", &actionCode)) {
+ ALOGE("CB_ERROR: actionCode is expected.");
+ break;
+ }
+ msg->findString("detail", &detail);
+ ALOGE("Codec reported error(0x%x/%s), actionCode(%d), detail(%s)", err,
+ StrMediaError(err).c_str(), actionCode, detail.c_str());
+ break;
+ }
+ default:
+ ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", callbackId);
+ break;
+ }
+ break;
+ default:
+ ALOGE("unexpected message received: %s", msg->debugString().c_str());
+ break;
+ }
+}
+
+void InputBufferIndexQueue::enqueue(int32_t index) {
+ std::scoped_lock<std::mutex> lock(mMutex);
+ mQueue.push(index);
+ mCondition.notify_one();
+}
+
+bool InputBufferIndexQueue::dequeue(int32_t* index, int32_t timeOutUs) {
+ std::unique_lock<std::mutex> lock(mMutex);
+ bool hasAvailableIndex = mCondition.wait_for(lock, std::chrono::microseconds(timeOutUs),
+ [this] { return !mQueue.empty(); });
+ if (hasAvailableIndex) {
+ *index = mQueue.front();
+ mQueue.pop();
+ return true;
+ } else {
+ return false;
+ }
+}
+
//static
sp<IMemory> FrameDecoder::getMetadataOnly(
const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail, uint32_t bitDepth) {
@@ -281,6 +385,7 @@
const sp<MetaData> &trackMeta,
const sp<IMediaSource> &source)
: mComponentName(componentName),
+ mUseBlockModel(false),
mTrackMeta(trackMeta),
mSource(source),
mDstFormat(OMX_COLOR_Format16bitRGB565),
@@ -290,6 +395,10 @@
}
FrameDecoder::~FrameDecoder() {
+ if (mHandler != NULL) {
+ mAsyncLooper->stop();
+ mAsyncLooper->unregisterHandler(mHandler->id());
+ }
if (mDecoder != NULL) {
mDecoder->release();
mSource->stop();
@@ -333,8 +442,18 @@
return (decoder.get() == NULL) ? NO_MEMORY : err;
}
+ if (mUseBlockModel) {
+ mAsyncLooper = new ALooper;
+ mAsyncLooper->start();
+ mHandler = new AsyncCodecHandler(wp<FrameDecoder>(this));
+ mAsyncLooper->registerHandler(mHandler);
+ sp<AMessage> callbackMsg = new AMessage(kWhatCallbackNotify, mHandler);
+ decoder->setCallback(callbackMsg);
+ }
+
err = decoder->configure(
- videoFormat, mSurface, NULL /* crypto */, 0 /* flags */);
+ videoFormat, mSurface, NULL /* crypto */,
+ mUseBlockModel ? MediaCodec::CONFIGURE_FLAG_USE_BLOCK_MODEL : 0 /* flags */);
if (err != OK) {
ALOGW("configure returned error %d (%s)", err, asString(err));
decoder->release();
@@ -362,10 +481,18 @@
sp<IMemory> FrameDecoder::extractFrame(FrameRect *rect) {
ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ExtractFrame");
status_t err = onExtractRect(rect);
- if (err == OK) {
+ if (err != OK) {
+ ALOGE("onExtractRect error %d", err);
+ return NULL;
+ }
+
+ if (!mUseBlockModel) {
err = extractInternal();
+ } else {
+ err = extractInternalUsingBlockModel();
}
if (err != OK) {
+ ALOGE("extractInternal error %d", err);
return NULL;
}
@@ -380,6 +507,7 @@
ALOGE("decoder is not initialized");
return NO_INIT;
}
+
do {
size_t index;
int64_t ptsUs = 0LL;
@@ -433,7 +561,8 @@
(const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
mediaBuffer->range_length());
- onInputReceived(codecBuffer, mediaBuffer->meta_data(), mFirstSample, &flags);
+ onInputReceived(codecBuffer->data(), codecBuffer->size(), mediaBuffer->meta_data(),
+ mFirstSample, &flags);
mFirstSample = false;
}
@@ -487,11 +616,14 @@
ALOGE("failed to get output buffer %zu", index);
break;
}
+ uint8_t* frameData = videoFrameBuffer->data();
+ sp<ABuffer> imageData;
+ videoFrameBuffer->meta()->findBuffer("image-data", &imageData);
if (mSurface != nullptr) {
mDecoder->renderOutputBufferAndRelease(index);
- err = onOutputReceived(videoFrameBuffer, mOutputFormat, ptsUs, &done);
+ err = onOutputReceived(frameData, imageData, mOutputFormat, ptsUs, &done);
} else {
- err = onOutputReceived(videoFrameBuffer, mOutputFormat, ptsUs, &done);
+ err = onOutputReceived(frameData, imageData, mOutputFormat, ptsUs, &done);
mDecoder->releaseOutputBuffer(index);
}
} else {
@@ -510,6 +642,73 @@
return err;
}
+status_t FrameDecoder::extractInternalUsingBlockModel() {
+ status_t err = OK;
+ MediaBufferBase* mediaBuffer = NULL;
+ int64_t ptsUs = 0LL;
+ uint32_t flags = 0;
+ int32_t index;
+ mHandleOutputBufferAsyncDone = false;
+
+ err = mSource->read(&mediaBuffer, &mReadOptions);
+ mReadOptions.clearSeekTo();
+ if (err != OK) {
+ ALOGW("Input Error: err=%d", err);
+ mediaBuffer->release();
+ return err;
+ }
+
+ size_t inputSize = mediaBuffer->range_length();
+ std::shared_ptr<C2LinearBlock> block =
+ MediaCodec::FetchLinearBlock(inputSize, {std::string{mComponentName.c_str()}});
+ C2WriteView view{block->map().get()};
+ if (view.error() != C2_OK) {
+ ALOGE("Fatal error: failed to allocate and map a block");
+ mediaBuffer->release();
+ return NO_MEMORY;
+ }
+ if (inputSize > view.capacity()) {
+ ALOGE("Fatal error: allocated block is too small "
+ "(input size %zu; block cap %u)",
+ inputSize, view.capacity());
+ mediaBuffer->release();
+ return BAD_VALUE;
+ }
+ CHECK(mediaBuffer->meta_data().findInt64(kKeyTime, &ptsUs));
+ memcpy(view.base(), (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
+ inputSize);
+ std::shared_ptr<C2Buffer> c2Buffer =
+ C2Buffer::CreateLinearBuffer(block->share(0, inputSize, C2Fence{}));
+ onInputReceived(view.base(), inputSize, mediaBuffer->meta_data(), true /* firstSample */,
+ &flags);
+ flags |= MediaCodec::BUFFER_FLAG_EOS;
+ mediaBuffer->release();
+
+ std::vector<AccessUnitInfo> infoVec;
+ infoVec.emplace_back(flags, inputSize, ptsUs);
+ sp<BufferInfosWrapper> infos = new BufferInfosWrapper{std::move(infoVec)};
+
+ if (!mInputBufferIndexQueue.dequeue(&index, kAsyncBufferTimeOutUs)) {
+ ALOGE("No available input buffer index for async mode.");
+ return TIMED_OUT;
+ }
+
+ AString errorDetailMsg;
+ ALOGD("QueueLinearBlock: index=%d size=%zu ts=%" PRId64 " us flags=%x",
+ index, inputSize, ptsUs,flags);
+ err = mDecoder->queueBuffer(index, c2Buffer, infos, nullptr, &errorDetailMsg);
+ if (err != OK) {
+ ALOGE("failed to queueBuffer (err %d): %s", err, errorDetailMsg.c_str());
+ return err;
+ }
+
+ // wait for handleOutputBufferAsync() to finish
+ std::unique_lock _lk(mMutex);
+ mOutputFramePending.wait_for(_lk, std::chrono::microseconds(kAsyncBufferTimeOutUs),
+ [this] { return mHandleOutputBufferAsyncDone; });
+ return mHandleOutputBufferAsyncDone ? OK : TIMED_OUT;
+}
+
//////////////////////////////////////////////////////////////////////
VideoFrameDecoder::VideoFrameDecoder(
@@ -525,6 +724,81 @@
mDefaultSampleDurationUs(0) {
}
+status_t FrameDecoder::handleOutputFormatChangeAsync(sp<AMessage> format) {
+ // Here format is MediaCodec's internal copy of output format.
+ // Make a copy since the client might modify it.
+ mOutputFormat = format->dup();
+ ALOGD("receive output format in async mode: %s", mOutputFormat->debugString().c_str());
+ return OK;
+}
+
+status_t FrameDecoder::handleInputBufferAsync(int32_t index) {
+ mInputBufferIndexQueue.enqueue(index);
+ return OK;
+}
+
+status_t FrameDecoder::handleOutputBufferAsync(int32_t index, int64_t timeUs) {
+ if (mHandleOutputBufferAsyncDone) {
+ // we have already processed an output buffer, skip others
+ return OK;
+ }
+
+ status_t err = OK;
+ sp<MediaCodecBuffer> videoFrameBuffer;
+ err = mDecoder->getOutputBuffer(index, &videoFrameBuffer);
+ if (err != OK || videoFrameBuffer == nullptr) {
+ ALOGE("failed to get output buffer %d", index);
+ return err;
+ }
+
+ bool onOutputReceivedDone = false;
+ if (mSurface != nullptr) {
+ mDecoder->renderOutputBufferAndRelease(index);
+ // frameData and imgObj will be fetched by captureSurface() inside onOutputReceived()
+ // explicitly pass null here
+ err = onOutputReceived(nullptr, nullptr, mOutputFormat, timeUs, &onOutputReceivedDone);
+ } else {
+ // get stride and frame data for block model buffer
+ std::shared_ptr<C2Buffer> c2buffer = videoFrameBuffer->asC2Buffer();
+ if (!c2buffer
+ || c2buffer->data().type() != C2BufferData::GRAPHIC
+ || c2buffer->data().graphicBlocks().size() == 0u) {
+ ALOGE("C2Buffer precond fail");
+ return ERROR_MALFORMED;
+ }
+
+ std::unique_ptr<const C2GraphicView> view(std::make_unique<const C2GraphicView>(
+ c2buffer->data().graphicBlocks()[0].map().get()));
+ GraphicView2MediaImageConverter converter(*view, mOutputFormat, false /* copy */);
+ if (converter.initCheck() != OK) {
+ ALOGE("Converter init failed: %d", converter.initCheck());
+ return NO_INIT;
+ }
+
+ uint8_t* frameData = converter.wrap()->data();
+ sp<ABuffer> imageData = converter.imageData();
+ if (imageData != nullptr) {
+ mOutputFormat->setBuffer("image-data", imageData);
+ MediaImage2 *img = (MediaImage2*) imageData->data();
+ if (img->mNumPlanes > 0 && img->mType != img->MEDIA_IMAGE_TYPE_UNKNOWN) {
+ int32_t stride = img->mPlane[0].mRowInc;
+ mOutputFormat->setInt32(KEY_STRIDE, stride);
+ ALOGD("updating stride = %d", stride);
+ }
+ }
+
+ err = onOutputReceived(frameData, imageData, mOutputFormat, timeUs, &onOutputReceivedDone);
+ mDecoder->releaseOutputBuffer(index);
+ }
+
+ if (err == OK && onOutputReceivedDone) {
+ std::lock_guard _lm(mMutex);
+ mHandleOutputBufferAsyncDone = true;
+ mOutputFramePending.notify_one();
+ }
+ return err;
+}
+
sp<AMessage> VideoFrameDecoder::onGetFormatAndSeekOptions(
int64_t frameTimeUs, int seekMode,
MediaSource::ReadOptions *options,
@@ -575,8 +849,13 @@
bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
|| (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
if (!isSeekingClosest) {
- videoFormat->setInt32("android._num-input-buffers", 1);
- videoFormat->setInt32("android._num-output-buffers", 1);
+ if (mComponentName.startsWithIgnoreCase("c2.")) {
+ mUseBlockModel = android::media::codec::provider_->thumbnail_block_model();
+ } else {
+ // OMX Codec
+ videoFormat->setInt32("android._num-input-buffers", 1);
+ videoFormat->setInt32("android._num-output-buffers", 1);
+ }
}
if (isHDR(videoFormat)) {
@@ -601,9 +880,8 @@
return videoFormat;
}
-status_t VideoFrameDecoder::onInputReceived(
- const sp<MediaCodecBuffer> &codecBuffer,
- MetaDataBase &sampleMeta, bool firstSample, uint32_t *flags) {
+status_t VideoFrameDecoder::onInputReceived(uint8_t* data, size_t size, MetaDataBase& sampleMeta,
+ bool firstSample, uint32_t* flags) {
bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
|| (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
@@ -612,10 +890,7 @@
ALOGV("Seeking closest: targetTimeUs=%lld", (long long)mTargetTimeUs);
}
- if (!isSeekingClosest
- && ((mIsAvc && IsIDR(codecBuffer->data(), codecBuffer->size()))
- || (mIsHevc && IsIDR(
- codecBuffer->data(), codecBuffer->size())))) {
+ if (!isSeekingClosest && ((mIsAvc && IsIDR(data, size)) || (mIsHevc && IsIDR(data, size)))) {
// Only need to decode one IDR frame, unless we're seeking with CLOSEST
// option, in which case we need to actually decode to targetTimeUs.
*flags |= MediaCodec::BUFFER_FLAG_EOS;
@@ -630,7 +905,8 @@
}
status_t VideoFrameDecoder::onOutputReceived(
- const sp<MediaCodecBuffer> &videoFrameBuffer,
+ uint8_t* frameData,
+ sp<ABuffer> imgObj,
const sp<AMessage> &outputFormat,
int64_t timeUs, bool *done) {
int64_t durationUs = mDefaultSampleDurationUs;
@@ -703,7 +979,6 @@
}
mFrame = static_cast<VideoFrame*>(frameMem->unsecurePointer());
-
setFrame(frameMem);
}
@@ -712,7 +987,7 @@
if (mCaptureLayer != nullptr) {
return captureSurface();
}
- ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
+ ColorConverter colorConverter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
uint32_t standard, range, transfer;
if (!outputFormat->findInt32("color-standard", (int32_t*)&standard)) {
@@ -724,22 +999,25 @@
if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
transfer = 0;
}
- sp<ABuffer> imgObj;
- if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
+
+ if (imgObj != nullptr) {
MediaImage2 *imageData = nullptr;
imageData = (MediaImage2 *)(imgObj.get()->data());
if (imageData != nullptr) {
- converter.setSrcMediaImage2(*imageData);
+ colorConverter.setSrcMediaImage2(*imageData);
}
}
if (srcFormat == COLOR_FormatYUV420Flexible && imgObj.get() == nullptr) {
return ERROR_UNSUPPORTED;
}
- converter.setSrcColorSpace(standard, range, transfer);
- if (converter.isValid()) {
+ colorConverter.setSrcColorSpace(standard, range, transfer);
+ if (colorConverter.isValid()) {
ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ColorConverter");
- converter.convert(
- (const uint8_t *)videoFrameBuffer->data(),
+ if (frameData == nullptr) {
+ ALOGD("frameData is null for ColorConverter");
+ }
+ colorConverter.convert(
+ (const uint8_t *)frameData,
width, height, stride,
crop_left, crop_top, crop_right, crop_bottom,
mFrame->getFlattenedData(),
@@ -955,7 +1233,8 @@
}
status_t MediaImageDecoder::onOutputReceived(
- const sp<MediaCodecBuffer> &videoFrameBuffer,
+ uint8_t* frameData,
+ sp<ABuffer> imgObj,
const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
if (outputFormat == NULL) {
return ERROR_MALFORMED;
@@ -1008,8 +1287,8 @@
if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
transfer = 0;
}
- sp<ABuffer> imgObj;
- if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
+
+ if (imgObj != nullptr) {
MediaImage2 *imageData = nullptr;
imageData = (MediaImage2 *)(imgObj.get()->data());
if (imageData != nullptr) {
@@ -1058,7 +1337,7 @@
if (converter.isValid()) {
converter.convert(
- (const uint8_t *)videoFrameBuffer->data(),
+ (const uint8_t *)frameData,
width, height, stride,
crop_left, crop_top, crop_right, crop_bottom,
mFrame->getFlattenedData(),
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 15188b0..76b6aa6 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -3776,6 +3776,12 @@
if (mStszTableEntries->count() == 0) {
mFirstSampleTimeRealUs = systemTime() / 1000;
if (timestampUs < 0 && mFirstSampleStartOffsetUs == 0) {
+ if (WARN_UNLESS(timestampUs != INT64_MIN, "for %s track", trackName)) {
+ copy->release();
+ mSource->stop();
+ mIsMalformed = true;
+ break;
+ }
mFirstSampleStartOffsetUs = -timestampUs;
timestampUs = 0;
}
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 717106b..4a02985 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -6696,8 +6696,8 @@
if (!mDetachedSurface) {
uint64_t usage = 0;
if (!mSurface || mSurface->getConsumerUsage(&usage) != OK) {
- // TODO: should we use a/the default consumer usage?
- usage = 0;
+ // By default prepare buffer to be displayed on any of the common surfaces
+ usage = (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER);
}
mDetachedSurface.reset(new ReleaseSurface(usage));
}
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index d50bc1e..c2ff41f 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -332,7 +332,7 @@
<!-- profiles and levels: ProfileBaseline : Level3 -->
<Limit name="block-count" range="1-1620" />
<Limit name="blocks-per-second" range="1-40500" />
- <Limit name="bitrate" range="1-2000000" />
+ <Limit name="bitrate" range="1-10000000" />
</Variant>
<Feature name="intra-refresh" />
<!-- Video Quality control -->
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
index e417324..94c201f 100644
--- a/media/libstagefright/include/FrameDecoder.h
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -18,12 +18,15 @@
#define FRAME_DECODER_H_
#include <memory>
+#include <mutex>
+#include <queue>
#include <vector>
-#include <media/stagefright/foundation/AString.h>
-#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/MediaSource.h>
#include <media/openmax/OMX_Video.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/AString.h>
#include <ui/GraphicTypes.h>
namespace android {
@@ -34,11 +37,23 @@
class MediaCodecBuffer;
class Surface;
class VideoFrame;
+struct AsyncCodecHandler;
struct FrameRect {
int32_t left, top, right, bottom;
};
+struct InputBufferIndexQueue {
+public:
+ void enqueue(int32_t index);
+ bool dequeue(int32_t* index, int32_t timeOutUs);
+
+private:
+ std::queue<int32_t> mQueue;
+ std::mutex mMutex;
+ std::condition_variable mCondition;
+};
+
struct FrameDecoder : public RefBase {
FrameDecoder(
const AString &componentName,
@@ -53,7 +68,19 @@
const sp<MetaData> &trackMeta, int colorFormat,
bool thumbnail = false, uint32_t bitDepth = 0);
+ status_t handleInputBufferAsync(int32_t index);
+ status_t handleOutputBufferAsync(int32_t index, int64_t timeUs);
+ status_t handleOutputFormatChangeAsync(sp<AMessage> format);
+
+ enum {
+ kWhatCallbackNotify,
+ };
+
protected:
+ AString mComponentName;
+ sp<AMessage> mOutputFormat;
+ bool mUseBlockModel;
+
virtual ~FrameDecoder();
virtual sp<AMessage> onGetFormatAndSeekOptions(
@@ -64,14 +91,12 @@
virtual status_t onExtractRect(FrameRect *rect) = 0;
- virtual status_t onInputReceived(
- const sp<MediaCodecBuffer> &codecBuffer,
- MetaDataBase &sampleMeta,
- bool firstSample,
- uint32_t *flags) = 0;
+ virtual status_t onInputReceived(uint8_t* data, size_t size, MetaDataBase& sampleMeta,
+ bool firstSample, uint32_t* flags) = 0;
virtual status_t onOutputReceived(
- const sp<MediaCodecBuffer> &videoFrameBuffer,
+ uint8_t* data,
+ sp<ABuffer> imgObj,
const sp<AMessage> &outputFormat,
int64_t timeUs,
bool *done) = 0;
@@ -83,7 +108,6 @@
void setFrame(const sp<IMemory> &frameMem) { mFrameMemory = frameMem; }
private:
- AString mComponentName;
sp<MetaData> mTrackMeta;
sp<IMediaSource> mSource;
OMX_COLOR_FORMATTYPE mDstFormat;
@@ -92,17 +116,32 @@
sp<IMemory> mFrameMemory;
MediaSource::ReadOptions mReadOptions;
sp<MediaCodec> mDecoder;
- sp<AMessage> mOutputFormat;
+ sp<AsyncCodecHandler> mHandler;
+ sp<ALooper> mAsyncLooper;
bool mHaveMoreInputs;
bool mFirstSample;
+ bool mHandleOutputBufferAsyncDone;
sp<Surface> mSurface;
+ std::mutex mMutex;
+ std::condition_variable mOutputFramePending;
+ InputBufferIndexQueue mInputBufferIndexQueue;
status_t extractInternal();
+ status_t extractInternalUsingBlockModel();
DISALLOW_EVIL_CONSTRUCTORS(FrameDecoder);
};
struct FrameCaptureLayer;
+struct AsyncCodecHandler : public AHandler {
+public:
+ explicit AsyncCodecHandler(const wp<FrameDecoder>& frameDecoder);
+ virtual void onMessageReceived(const sp<AMessage>& msg);
+
+private:
+ wp<FrameDecoder> mFrameDecoder;
+};
+
struct VideoFrameDecoder : public FrameDecoder {
VideoFrameDecoder(
const AString &componentName,
@@ -121,14 +160,12 @@
return (rect == NULL) ? OK : ERROR_UNSUPPORTED;
}
- virtual status_t onInputReceived(
- const sp<MediaCodecBuffer> &codecBuffer,
- MetaDataBase &sampleMeta,
- bool firstSample,
- uint32_t *flags) override;
+ virtual status_t onInputReceived(uint8_t* data, size_t size, MetaDataBase& sampleMeta,
+ bool firstSample, uint32_t* flags) override;
virtual status_t onOutputReceived(
- const sp<MediaCodecBuffer> &videoFrameBuffer,
+ uint8_t* data,
+ sp<ABuffer> imgObj,
const sp<AMessage> &outputFormat,
int64_t timeUs,
bool *done) override;
@@ -162,14 +199,13 @@
virtual status_t onExtractRect(FrameRect *rect) override;
- virtual status_t onInputReceived(
- const sp<MediaCodecBuffer> &codecBuffer __unused,
- MetaDataBase &sampleMeta __unused,
- bool firstSample __unused,
- uint32_t *flags __unused) override { return OK; }
+ virtual status_t onInputReceived(uint8_t* __unused, size_t __unused,
+ MetaDataBase& sampleMeta __unused, bool firstSample __unused,
+ uint32_t* flags __unused) override { return OK; }
virtual status_t onOutputReceived(
- const sp<MediaCodecBuffer> &videoFrameBuffer,
+ uint8_t* data,
+ sp<ABuffer> imgObj,
const sp<AMessage> &outputFormat,
int64_t timeUs,
bool *done) override;
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index 2341af1..d0f1c1b 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -51,6 +51,11 @@
],
}
+vintf_fragment {
+ name: "manifest_media_c2_software_aidl.xml",
+ src: "manifest_media_c2_software_aidl.xml",
+}
+
mediaserver_cc_binary {
name: "mediaserver",
@@ -92,7 +97,7 @@
vintf_fragments: ["manifest_media_c2_software_hidl.xml"],
product_variables: {
release_aidl_use_unfrozen: {
- vintf_fragments: ["manifest_media_c2_software_aidl.xml"],
+ vintf_fragment_modules: ["manifest_media_c2_software_aidl.xml"],
},
},
diff --git a/media/module/bufferpool/2.0/BufferPoolClient.cpp b/media/module/bufferpool/2.0/BufferPoolClient.cpp
index cda23ff..66d11fa 100644
--- a/media/module/bufferpool/2.0/BufferPoolClient.cpp
+++ b/media/module/bufferpool/2.0/BufferPoolClient.cpp
@@ -762,6 +762,10 @@
} else {
connection = mRemoteConnection;
}
+ if (!connection) {
+ ALOGE("connection null: fetchBufferHandle()");
+ return ResultStatus::CRITICAL_ERROR;
+ }
ResultStatus status;
Return<void> transResult = connection->fetch(
transactionId, bufferId,
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 3d873df..b250a03 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -52,9 +52,6 @@
symbol_file: "libmediandk.map.txt",
first_version: "21",
unversioned_until: "current",
- export_header_libs: [
- "libmediandk_headers",
- ],
}
ndk_headers {
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 3b71758..ca4c294 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -2923,7 +2923,8 @@
audio_config_base_t *mixerConfig,
audio_devices_t deviceType,
const String8& address,
- audio_output_flags_t flags)
+ audio_output_flags_t flags,
+ const audio_attributes_t attributes)
{
AudioHwDevice *outHwDev = findSuitableHwDev_l(module, deviceType);
if (outHwDev == NULL) {
@@ -2941,13 +2942,18 @@
mHardwareStatus = AUDIO_HW_OUTPUT_OPEN;
AudioStreamOut *outputStream = NULL;
+
+ playback_track_metadata_v7_t trackMetadata;
+ trackMetadata.base.usage = attributes.usage;
+
status_t status = outHwDev->openOutputStream(
&outputStream,
*output,
deviceType,
flags,
halConfig,
- address.c_str());
+ address.c_str(),
+ {trackMetadata});
mHardwareStatus = AUDIO_HW_IDLE;
@@ -3016,6 +3022,8 @@
aidl2legacy_DeviceDescriptorBase(request.device));
audio_output_flags_t flags = VALUE_OR_RETURN_STATUS(
aidl2legacy_int32_t_audio_output_flags_t_mask(request.flags));
+ audio_attributes_t attributes = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioAttributes_audio_attributes_t(request.attributes));
audio_io_handle_t output;
@@ -3038,7 +3046,7 @@
audio_utils::lock_guard _l(mutex());
const sp<IAfThreadBase> thread = openOutput_l(module, &output, &halConfig,
- &mixerConfig, deviceType, address, flags);
+ &mixerConfig, deviceType, address, flags, attributes);
if (thread != 0) {
uint32_t latencyMs = 0;
if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) == 0) {
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index f61fdce..f5ec5f8 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -332,7 +332,8 @@
audio_config_base_t* mixerConfig,
audio_devices_t deviceType,
const String8& address,
- audio_output_flags_t flags) final REQUIRES(mutex());
+ audio_output_flags_t flags,
+ audio_attributes_t attributes) final REQUIRES(mutex());
const DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*>&
getAudioHwDevs_l() const final REQUIRES(mutex(), hardwareMutex()) {
return mAudioHwDevs;
diff --git a/services/audioflinger/IAfPatchPanel.h b/services/audioflinger/IAfPatchPanel.h
index 6110e4c..37dce3a 100644
--- a/services/audioflinger/IAfPatchPanel.h
+++ b/services/audioflinger/IAfPatchPanel.h
@@ -82,7 +82,8 @@
audio_config_base_t* mixerConfig,
audio_devices_t deviceType,
const String8& address,
- audio_output_flags_t flags) REQUIRES(mutex()) = 0;
+ audio_output_flags_t flags,
+ audio_attributes_t attributes) REQUIRES(mutex()) = 0;
virtual audio_utils::mutex& mutex() const
RETURN_CAPABILITY(audio_utils::AudioFlinger_Mutex) = 0;
virtual const DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*>&
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index f57470f..35f17c1 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -260,6 +260,7 @@
if (patch->sinks[0].config_mask & AUDIO_PORT_CONFIG_FLAGS) {
flags = patch->sinks[0].flags.output;
}
+ audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
const sp<IAfThreadBase> thread = mAfPatchPanelCallback->openOutput_l(
patch->sinks[0].ext.device.hw_module,
&output,
@@ -267,7 +268,8 @@
&mixerConfig,
outputDevice,
outputDeviceAddress,
- flags);
+ flags,
+ attributes);
ALOGV("mAfPatchPanelCallback->openOutput_l() returned %p", thread.get());
if (thread == 0) {
status = NO_MEMORY;
diff --git a/services/audioflinger/datapath/AudioHwDevice.cpp b/services/audioflinger/datapath/AudioHwDevice.cpp
index 95e9ecc..5314e9e 100644
--- a/services/audioflinger/datapath/AudioHwDevice.cpp
+++ b/services/audioflinger/datapath/AudioHwDevice.cpp
@@ -43,7 +43,8 @@
audio_devices_t deviceType,
audio_output_flags_t flags,
struct audio_config *config,
- const char *address)
+ const char *address,
+ const std::vector<playback_track_metadata_v7_t>& sourceMetadata)
{
struct audio_config originalConfig = *config;
@@ -52,7 +53,7 @@
// Try to open the HAL first using the current format.
ALOGV("openOutputStream(), try sampleRate %d, format %#x, channelMask %#x", config->sample_rate,
config->format, config->channel_mask);
- status_t status = outputStream->open(handle, deviceType, config, address);
+ status_t status = outputStream->open(handle, deviceType, config, address, sourceMetadata);
if (status != NO_ERROR) {
delete outputStream;
@@ -72,7 +73,8 @@
if (wrapperNeeded) {
if (SPDIFEncoder::isFormatSupported(originalConfig.format)) {
outputStream = new SpdifStreamOut(this, flags, originalConfig.format);
- status = outputStream->open(handle, deviceType, &originalConfig, address);
+ status = outputStream->open(handle, deviceType, &originalConfig, address,
+ sourceMetadata);
if (status != NO_ERROR) {
ALOGE("ERROR - openOutputStream(), SPDIF open returned %d",
status);
diff --git a/services/audioflinger/datapath/AudioHwDevice.h b/services/audioflinger/datapath/AudioHwDevice.h
index 80c1473..e1a9018 100644
--- a/services/audioflinger/datapath/AudioHwDevice.h
+++ b/services/audioflinger/datapath/AudioHwDevice.h
@@ -87,7 +87,8 @@
audio_devices_t deviceType,
audio_output_flags_t flags,
struct audio_config *config,
- const char *address);
+ const char *address,
+ const std::vector<playback_track_metadata_v7_t>& sourceMetadata);
status_t openInputStream(
AudioStreamIn **ppStreamIn,
diff --git a/services/audioflinger/datapath/AudioStreamOut.cpp b/services/audioflinger/datapath/AudioStreamOut.cpp
index a686ff6..c65373e 100644
--- a/services/audioflinger/datapath/AudioStreamOut.cpp
+++ b/services/audioflinger/datapath/AudioStreamOut.cpp
@@ -93,7 +93,8 @@
audio_io_handle_t handle,
audio_devices_t deviceType,
struct audio_config *config,
- const char *address)
+ const char *address,
+ const std::vector<playback_track_metadata_v7_t>& sourceMetadata)
{
sp<StreamOutHalInterface> outStream;
@@ -107,7 +108,8 @@
customFlags,
config,
address,
- &outStream);
+ &outStream,
+ sourceMetadata);
ALOGV("AudioStreamOut::open(), HAL returned stream %p, sampleRate %d, format %#x,"
" channelMask %#x, status %d", outStream.get(), config->sample_rate, config->format,
config->channel_mask, status);
@@ -124,7 +126,8 @@
customFlags,
&customConfig,
address,
- &outStream);
+ &outStream,
+ sourceMetadata);
ALOGV("AudioStreamOut::open(), treat IEC61937 as PCM, status = %d", status);
}
diff --git a/services/audioflinger/datapath/AudioStreamOut.h b/services/audioflinger/datapath/AudioStreamOut.h
index 2c9fb3e..2bf94a1 100644
--- a/services/audioflinger/datapath/AudioStreamOut.h
+++ b/services/audioflinger/datapath/AudioStreamOut.h
@@ -47,7 +47,8 @@
audio_io_handle_t handle,
audio_devices_t deviceType,
struct audio_config *config,
- const char *address);
+ const char *address,
+ const std::vector<playback_track_metadata_v7_t>& sourceMetadata);
virtual ~AudioStreamOut();
diff --git a/services/audioflinger/datapath/SpdifStreamOut.cpp b/services/audioflinger/datapath/SpdifStreamOut.cpp
index 65a4eec..d3983b0 100644
--- a/services/audioflinger/datapath/SpdifStreamOut.cpp
+++ b/services/audioflinger/datapath/SpdifStreamOut.cpp
@@ -45,7 +45,8 @@
audio_io_handle_t handle,
audio_devices_t devices,
struct audio_config *config,
- const char *address)
+ const char *address,
+ const std::vector<playback_track_metadata_v7_t>& sourceMetadata)
{
struct audio_config customConfig = *config;
@@ -75,7 +76,8 @@
handle,
devices,
&customConfig,
- address);
+ address,
+ sourceMetadata);
ALOGI("SpdifStreamOut::open() status = %d", status);
diff --git a/services/audioflinger/datapath/SpdifStreamOut.h b/services/audioflinger/datapath/SpdifStreamOut.h
index c6d27ba..1cd8f65 100644
--- a/services/audioflinger/datapath/SpdifStreamOut.h
+++ b/services/audioflinger/datapath/SpdifStreamOut.h
@@ -43,7 +43,8 @@
audio_io_handle_t handle,
audio_devices_t devices,
struct audio_config *config,
- const char *address) override;
+ const char *address,
+ const std::vector<playback_track_metadata_v7_t>& sourceMetadata) override;
/**
* Write audio buffer to driver. Returns number of bytes written, or a
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index bfc3132..3dfda03 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -470,7 +470,8 @@
audio_config_base_t *mixerConfig,
const sp<DeviceDescriptorBase>& device,
uint32_t *latencyMs,
- audio_output_flags_t flags) = 0;
+ audio_output_flags_t flags,
+ audio_attributes_t audioAttributes) = 0;
// creates a special output that is duplicated to the two outputs passed as arguments.
// The duplication is performed by a special mixer thread in the AudioFlinger.
virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 00958aa..ab2a350 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -412,7 +412,8 @@
const DeviceVector &devices,
audio_stream_type_t stream,
audio_output_flags_t flags,
- audio_io_handle_t *output);
+ audio_io_handle_t *output,
+ audio_attributes_t attributes);
// Called when a stream is about to be started
// Note: called before setClientActive(true);
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index acd0ff5..9286268 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -588,7 +588,8 @@
const DeviceVector &devices,
audio_stream_type_t stream,
audio_output_flags_t flags,
- audio_io_handle_t *output)
+ audio_io_handle_t *output,
+ audio_attributes_t attributes)
{
mDevices = devices;
sp<DeviceDescriptor> device = devices.getDeviceForOpening();
@@ -652,7 +653,8 @@
&lMixerConfig,
device,
&mLatency,
- mFlags);
+ mFlags,
+ attributes);
if (status == NO_ERROR) {
LOG_ALWAYS_FATAL_IF(*output == AUDIO_IO_HANDLE_NONE,
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-0 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..4d539b7
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-1 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-1
new file mode 100644
index 0000000..8af7d2f
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-1
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-2 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-2
new file mode 100644
index 0000000..b89b77e
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-2
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-3 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-3
new file mode 100644
index 0000000..6e966e9
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-3
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-4 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-4
new file mode 100644
index 0000000..8ccf24d
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-4
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-5 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-5
new file mode 100644
index 0000000..223d1df
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-5
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-6 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-6
new file mode 100644
index 0000000..ad54b83
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-6
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-7 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-7
new file mode 100644
index 0000000..f4eabf4
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-7
Binary files differ
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 811ca12..5a51a91 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1258,7 +1258,7 @@
status = openDirectOutput(
*stream, session, config,
(audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_DIRECT),
- DeviceVector(policyMixDevice), &newOutput);
+ DeviceVector(policyMixDevice), &newOutput, *resultAttr);
if (status == NO_ERROR) {
policyDesc = mOutputs.valueFor(newOutput);
primaryMix->setOutput(policyDesc);
@@ -1479,7 +1479,8 @@
const audio_config_t *config,
audio_output_flags_t flags,
const DeviceVector &devices,
- audio_io_handle_t *output) {
+ audio_io_handle_t *output,
+ audio_attributes_t attributes) {
*output = AUDIO_IO_HANDLE_NONE;
@@ -1567,7 +1568,8 @@
releaseMsdOutputPatches(devices);
status_t status =
- outputDesc->open(config, nullptr /* mixerConfig */, devices, stream, flags, output);
+ outputDesc->open(config, nullptr /* mixerConfig */, devices, stream, flags, output,
+ attributes);
// only accept an output with the requested parameters
if (status != NO_ERROR ||
@@ -1668,7 +1670,9 @@
audio_config_t directConfig = *config;
directConfig.channel_mask = channelMask;
- status_t status = openDirectOutput(stream, session, &directConfig, *flags, devices, &output);
+
+ status_t status = openDirectOutput(stream, session, &directConfig, *flags, devices, &output,
+ *attr);
if (status != NAME_NOT_FOUND) {
return output;
}
@@ -6447,10 +6451,11 @@
sp<SwAudioOutputDescriptor> outputDesc = new SwAudioOutputDescriptor(outProfile,
mpClientInterface);
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+ audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
status_t status = outputDesc->open(nullptr /* halConfig */, nullptr /* mixerConfig */,
DeviceVector(supportedDevice),
AUDIO_STREAM_DEFAULT,
- AUDIO_OUTPUT_FLAG_NONE, &output);
+ AUDIO_OUTPUT_FLAG_NONE, &output, attributes);
if (status != NO_ERROR) {
ALOGW("Cannot open output stream for devices %s on hw module %s",
supportedDevice->toString().c_str(), hwModule->getName());
@@ -7110,28 +7115,35 @@
std::vector<sp<SwAudioOutputDescriptor>> invalidatedOutputs;
// take into account dynamic audio policies related changes: if a client is now associated
// to a different policy mix than at creation time, invalidate corresponding stream
+ // invalidate clients on outputs that do not support all the newly selected devices for the
+ // strategy
for (size_t i = 0; i < mPreviousOutputs.size(); i++) {
const sp<SwAudioOutputDescriptor>& desc = mPreviousOutputs.valueAt(i);
- if (desc->isDuplicated()) {
+ if (desc->isDuplicated() || desc->getClientCount() == 0) {
continue;
}
+
for (const sp<TrackClientDescriptor>& client : desc->getClientIterable()) {
if (mEngine->getProductStrategyForAttributes(client->attributes()) != psId) {
continue;
}
+ if (!desc->supportsAllDevices(newDevices)) {
+ invalidatedOutputs.push_back(desc);
+ break;
+ }
sp<AudioPolicyMix> primaryMix;
status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->config(),
client->uid(), client->session(), client->flags(), mAvailableOutputDevices,
nullptr /* requestedDevice */, primaryMix, nullptr /* secondaryMixes */,
unneededUsePrimaryOutputFromPolicyMixes);
- if (status != OK) {
- continue;
- }
- if (client->getPrimaryMix() != primaryMix || client->hasLostPrimaryMix()) {
- if (desc->isStrategyActive(psId) && maxLatency < desc->latency()) {
- maxLatency = desc->latency();
+ if (status == OK) {
+ if (client->getPrimaryMix() != primaryMix || client->hasLostPrimaryMix()) {
+ if (desc->isStrategyActive(psId) && maxLatency < desc->latency()) {
+ maxLatency = desc->latency();
+ }
+ invalidatedOutputs.push_back(desc);
+ break;
}
- invalidatedOutputs.push_back(desc);
}
}
}
@@ -8598,8 +8610,9 @@
}
sp<SwAudioOutputDescriptor> desc = new SwAudioOutputDescriptor(profile, mpClientInterface);
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+ audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
status_t status = desc->open(halConfig, mixerConfig, devices,
- AUDIO_STREAM_DEFAULT, flags, &output);
+ AUDIO_STREAM_DEFAULT, flags, &output, attributes);
if (status != NO_ERROR) {
ALOGE("%s failed to open output %d", __func__, status);
return nullptr;
@@ -8637,7 +8650,8 @@
config.offload_info.channel_mask = config.channel_mask;
config.offload_info.format = config.format;
- status = desc->open(&config, mixerConfig, devices, AUDIO_STREAM_DEFAULT, flags, &output);
+ status = desc->open(&config, mixerConfig, devices, AUDIO_STREAM_DEFAULT, flags, &output,
+ attributes);
if (status != NO_ERROR) {
return nullptr;
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index c8cced6..5801846 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -1124,7 +1124,8 @@
const audio_config_t *config,
audio_output_flags_t flags,
const DeviceVector &devices,
- audio_io_handle_t *output);
+ audio_io_handle_t *output,
+ audio_attributes_t attributes);
/**
* @brief Queries if some kind of spatialization will be performed if the audio playback
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 6de71a3..f1d7af8 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -56,7 +56,8 @@
audio_config_base_t *mixerConfig,
const sp<DeviceDescriptorBase>& device,
uint32_t *latencyMs,
- audio_output_flags_t flags)
+ audio_output_flags_t flags,
+ audio_attributes_t attributes)
{
sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
if (af == 0) {
@@ -74,6 +75,8 @@
legacy2aidl_audio_config_base_t_AudioConfigBase(*mixerConfig, false /*isInput*/));
request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_DeviceDescriptorBase(device));
request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+ request.attributes = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_attributes_t_AudioAttributes(attributes));
status_t status = af->openOutput(request, &response);
if (status == OK) {
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 7aa80cf..da09e5f 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -778,7 +778,8 @@
audio_config_base_t *mixerConfig,
const sp<DeviceDescriptorBase>& device,
uint32_t *latencyMs,
- audio_output_flags_t flags);
+ audio_output_flags_t flags,
+ audio_attributes_t attributes);
// creates a special output that is duplicated to the two outputs passed as arguments. The duplication is performed by
// a special mixer thread in the AudioFlinger.
virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1, audio_io_handle_t output2);
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index df0c3a1..b5d3188 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -106,6 +106,6 @@
"-Werror",
],
- test_suites: ["device-tests"],
+ test_suites: ["general-tests"],
}
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index 072d9c0..6382c71 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -41,7 +41,8 @@
audio_config_base_t * /*mixerConfig*/,
const sp<DeviceDescriptorBase>& /*device*/,
uint32_t * /*latencyMs*/,
- audio_output_flags_t /*flags*/) override {
+ audio_output_flags_t /*flags*/,
+ audio_attributes_t /*attributes*/) override {
if (module >= mNextModuleHandle) {
ALOGE("%s: Module handle %d has not been allocated yet (next is %d)",
__func__, module, mNextModuleHandle);
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index e55e935..6aa347e 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -37,7 +37,8 @@
audio_config_base_t* /*mixerConfig*/,
const sp<DeviceDescriptorBase>& /*device*/,
uint32_t* /*latencyMs*/,
- audio_output_flags_t /*flags*/) override { return NO_INIT; }
+ audio_output_flags_t /*flags*/,
+ audio_attributes_t /*attributes*/) override { return NO_INIT; }
audio_io_handle_t openDuplicateOutput(audio_io_handle_t /*output1*/,
audio_io_handle_t /*output2*/) override {
return AUDIO_IO_HANDLE_NONE;
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index b93f7e2..3765a8f 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -2628,10 +2628,12 @@
audio_config_base_t * mixerConfig,
const sp<DeviceDescriptorBase>& device,
uint32_t * latencyMs,
- audio_output_flags_t flags) override {
+ audio_output_flags_t flags,
+ audio_attributes_t attributes) override {
return mSimulateFailure ? BAD_VALUE :
AudioPolicyManagerTestClient::openOutput(
- module, output, halConfig, mixerConfig, device, latencyMs, flags);
+ module, output, halConfig, mixerConfig, device, latencyMs, flags,
+ attributes);
}
status_t openInput(audio_module_handle_t module,
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-0 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..4c56959
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-1 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-1
new file mode 100644
index 0000000..fc0e371
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-1
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-10 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-10
new file mode 100644
index 0000000..1266b3e
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-10
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-11 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-11
new file mode 100644
index 0000000..cb1c0e4
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-11
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-12 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-12
new file mode 100644
index 0000000..ab820a4
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-12
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-13 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-13
new file mode 100644
index 0000000..6051e9a
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-13
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-14 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-14
new file mode 100644
index 0000000..596e55b
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-14
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-15 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-15
new file mode 100644
index 0000000..20d7dcb
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-15
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-2 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-2
new file mode 100644
index 0000000..5bbfa56
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-2
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-3 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-3
new file mode 100644
index 0000000..cd148f6
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-3
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-4 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-4
new file mode 100644
index 0000000..e4ddb50
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-4
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-5 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-5
new file mode 100644
index 0000000..3be3ce1
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-5
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-6 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-6
new file mode 100644
index 0000000..3b51e41
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-6
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-7 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-7
new file mode 100644
index 0000000..3b929df
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-7
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-8 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-8
new file mode 100644
index 0000000..f92337b
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-8
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-9 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-9
new file mode 100644
index 0000000..0fe0f06
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-9
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-0 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..802c2b5
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-1 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-1
new file mode 100644
index 0000000..9ee6a15
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-1
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-10 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-10
new file mode 100644
index 0000000..95006c8
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-10
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-11 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-11
new file mode 100644
index 0000000..853be96
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-11
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-12 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-12
new file mode 100644
index 0000000..c3e9848
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-12
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-13 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-13
new file mode 100644
index 0000000..08b7f0d
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-13
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-14 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-14
new file mode 100644
index 0000000..20e5e80
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-14
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-15 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-15
new file mode 100644
index 0000000..4e54f0b
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-15
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-2 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-2
new file mode 100644
index 0000000..2b2495d
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-2
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-3 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-3
new file mode 100644
index 0000000..753594d
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-3
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-4 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-4
new file mode 100644
index 0000000..0ed2010
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-4
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-5 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-5
new file mode 100644
index 0000000..f6141d1
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-5
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-6 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-6
new file mode 100644
index 0000000..b93f618
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-6
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-7 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-7
new file mode 100644
index 0000000..f8f296d
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-7
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-8 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-8
new file mode 100644
index 0000000..29bdbc1
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-8
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-9 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-9
new file mode 100644
index 0000000..315f25e
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-9
Binary files differ
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 9c2fb7c..f12a5d6 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -310,6 +310,7 @@
mServiceLog->add(log);
std::scoped_lock lock{mLock};
+ ClientInfoParcel updatedClientInfo = clientInfo;
if (!mProcessInfo->isPidUidTrusted(pid, uid)) {
pid_t callingPid = IPCThreadState::self()->getCallingPid();
uid_t callingUid = IPCThreadState::self()->getCallingUid();
@@ -317,6 +318,8 @@
__FUNCTION__, pid, uid, callingPid, callingUid);
pid = callingPid;
uid = callingUid;
+ updatedClientInfo.pid = callingPid;
+ updatedClientInfo.uid = callingUid;
}
ResourceInfos& infos = getResourceInfosForEdit(pid, mMap);
ResourceInfo& info = getResourceInfoForEdit(clientInfo, client, infos);
@@ -342,7 +345,7 @@
}
if (info.deathNotifier == nullptr && client != nullptr) {
info.deathNotifier = DeathNotifier::Create(
- client, ref<ResourceManagerService>(), clientInfo);
+ client, ref<ResourceManagerService>(), updatedClientInfo);
}
if (mObserverService != nullptr && !resourceAdded.empty()) {
mObserverService->onResourceAdded(uid, pid, resourceAdded);