Merge "Enable low-latency feature in av1 decoders" into main
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index 1cf63b0..e9b757b 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -4,9 +4,30 @@
hidden_api_txt_checksorted_hook = ${REPO_ROOT}/tools/platform-compat/hiddenapi/checksorted_sha.sh ${PREUPLOAD_COMMIT} ${REPO_ROOT}
[Builtin Hooks]
+bpfmt = true
clang_format = true
[Builtin Hooks Options]
+# Enable sort and limit subfolder checks
+bpfmt = -s
+ media/audio/
+ media/audioserver/
+ media/libaaudio/
+ media/libaudioclient/
+ media/libaudiofoundation/
+ media/libaudiohal/
+ media/libaudioprocessing/
+ media/libaudiousecasevalidation/
+ media/libeffects/
+ media/libmediametrics/
+ media/libnbaio/
+ media/libnblog/
+ services/audioflinger/
+ services/audioparameterparser/
+ services/audiopolicy/
+ services/medialog/
+ services/oboeservice/
+
# Only turn on clang-format check for the following subfolders.
clang_format = --commit ${PREUPLOAD_COMMIT} --style file --extensions c,h,cc,cpp
media/libaudioclient/tests/
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index a2b6a82..da422b5 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -13,6 +13,16 @@
}
flag {
+ name: "set_state_early"
+ namespace: "codec_fwk"
+ description: "Bugfix flag for setting state early to avoid a race condition"
+ bug: "298613712"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+
+flag {
name: "dynamic_color_aspects"
is_exported: true
namespace: "codec_fwk"
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
index 256bcb8..722b13a 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.cpp
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -29,6 +29,12 @@
#include "C2SoftAomEnc.h"
+/* Quantization param values defined by the spec */
+#define AOM_QP_MIN 0
+#define AOM_QP_MAX 63
+#define AOM_QP_DEFAULT_MIN AOM_QP_MIN
+#define AOM_QP_DEFAULT_MAX AOM_QP_MAX
+
namespace android {
constexpr char COMPONENT_NAME[] = "c2.android.av1.encoder";
@@ -175,6 +181,19 @@
.inRange(C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)})
.withSetter(CodedColorAspectsSetter, mColorAspects)
.build());
+
+ addParameter(
+ DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+ .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+ 0 /* flexCount */, 0u /* stream */))
+ .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+ {C2Config::I_FRAME, C2Config::P_FRAME}),
+ C2F(mPictureQuantization, m.values[0].min).inRange(
+ AOM_QP_DEFAULT_MIN, AOM_QP_DEFAULT_MAX),
+ C2F(mPictureQuantization, m.values[0].max).inRange(
+ AOM_QP_DEFAULT_MIN, AOM_QP_DEFAULT_MAX)})
+ .withSetter(PictureQuantizationSetter)
+ .build());
}
C2R C2SoftAomEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output>& me) {
@@ -307,6 +326,54 @@
return C2R::Ok();
}
+C2R C2SoftAomEnc::IntfImpl::PictureQuantizationSetter(
+ bool mayBlock, C2P<C2StreamPictureQuantizationTuning::output>& me) {
+ (void)mayBlock;
+ int32_t iMin = AOM_QP_DEFAULT_MIN, pMin = AOM_QP_DEFAULT_MIN;
+ int32_t iMax = AOM_QP_DEFAULT_MAX, pMax = AOM_QP_DEFAULT_MAX;
+ for (size_t i = 0; i < me.v.flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+ // layerMin is clamped to [AOM_QP_MIN, layerMax] to avoid error
+ // cases where layer.min > layer.max
+ int32_t layerMax = std::clamp(layer.max, AOM_QP_MIN, AOM_QP_MAX);
+ int32_t layerMin = std::clamp(layer.min, AOM_QP_MIN, layerMax);
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+ iMax = layerMax;
+ iMin = layerMin;
+ ALOGV("iMin %d iMax %d", iMin, iMax);
+ } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+ pMax = layerMax;
+ pMin = layerMin;
+ ALOGV("pMin %d pMax %d", pMin, pMax);
+ }
+ }
+ ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d",
+ iMin, iMax, pMin, pMax);
+
+ // aom library takes same range for I/P picture type
+ int32_t maxFrameQP = std::min(iMax, pMax);
+ int32_t minFrameQP = std::max(iMin, pMin);
+ if (minFrameQP > maxFrameQP) {
+ minFrameQP = maxFrameQP;
+ }
+ // put them back into the structure
+ for (size_t i = 0; i < me.v.flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+ me.set().m.values[i].max = maxFrameQP;
+ me.set().m.values[i].min = minFrameQP;
+ }
+ else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+ me.set().m.values[i].max = maxFrameQP;
+ me.set().m.values[i].min = minFrameQP;
+ }
+ }
+ ALOGV("PictureQuantizationSetter(exit): minFrameQP = %d maxFrameQP = %d",
+ minFrameQP, maxFrameQP);
+ return C2R::Ok();
+}
+
uint32_t C2SoftAomEnc::IntfImpl::getLevel_l() const {
return mProfileLevel->level - LEVEL_AV1_2;
}
@@ -558,6 +625,7 @@
mQuality = mIntf->getQuality_l();
mComplexity = mIntf->getComplexity_l();
mAV1EncLevel = mIntf->getLevel_l();
+ mQpBounds = mIntf->getPictureQuantization_l();
}
@@ -575,6 +643,18 @@
break;
}
+ if (mQpBounds->flexCount() > 0) {
+ // read min max qp for sequence
+ for (size_t i = 0; i < mQpBounds->flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = mQpBounds->m.values[i];
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+ mMaxQuantizer = layer.max;
+ mMinQuantizer = layer.min;
+ break;
+ }
+ }
+ }
+
mCodecInterface = aom_codec_av1_cx();
if (!mCodecInterface) goto CleanUp;
diff --git a/media/codec2/components/aom/C2SoftAomEnc.h b/media/codec2/components/aom/C2SoftAomEnc.h
index 7e5ea63..067b04f 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.h
+++ b/media/codec2/components/aom/C2SoftAomEnc.h
@@ -109,6 +109,7 @@
std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> mQpBounds;
aom_codec_err_t setupCodecParameters();
};
@@ -126,6 +127,8 @@
const C2P<C2StreamPictureSizeInfo::input>& size,
const C2P<C2StreamFrameRateInfo::output>& frameRate,
const C2P<C2StreamBitrateInfo::output>& bitrate);
+ static C2R PictureQuantizationSetter(bool mayBlock,
+ C2P<C2StreamPictureQuantizationTuning::output> &me);
// unsafe getters
std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
@@ -150,6 +153,9 @@
std::shared_ptr<C2StreamPixelFormatInfo::input> getPixelFormat_l() const {
return mPixelFormat;
}
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const {
+ return mPictureQuantization;
+ }
uint32_t getSyncFramePeriod() const;
static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me);
static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
@@ -171,6 +177,7 @@
std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
std::shared_ptr<C2StreamPixelFormatInfo::input> mPixelFormat;
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
};
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 591d56d..7b63e75 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -21,6 +21,7 @@
#include <audio_utils/primitives.h>
#include <media/stagefright/foundation/MediaDefs.h>
+#include <C2Debug.h>
#include <C2PlatformSupport.h>
#include <SimpleC2Interface.h>
@@ -81,10 +82,6 @@
FLAC_COMPRESSION_LEVEL_MIN, FLAC_COMPRESSION_LEVEL_MAX)})
.withSetter(Setter<decltype(*mComplexity)>::NonStrictValueWithNoDeps)
.build());
- addParameter(
- DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
- .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 4608))
- .build());
addParameter(
DefineParam(mPcmEncodingInfo, C2_PARAMKEY_PCM_ENCODING)
@@ -96,6 +93,26 @@
})
.withSetter((Setter<decltype(*mPcmEncodingInfo)>::StrictValueWithNoDeps))
.build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMaxBlockSize))
+ .withFields({
+ C2F(mInputMaxBufSize, value).any(),
+ })
+ .withSetter(MaxInputSizeSetter, mChannelCount, mPcmEncodingInfo)
+ .build());
+ }
+
+ static C2R MaxInputSizeSetter(bool mayBlock,
+ C2P<C2StreamMaxBufferSizeInfo::input> &me,
+ const C2P<C2StreamChannelCountInfo::input> &channelCount,
+ const C2P<C2StreamPcmEncodingInfo::input> &pcmEncoding) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ int bytesPerSample = pcmEncoding.v.value == C2Config::PCM_FLOAT ? 4 : 2;
+ me.set().value = kMaxBlockSize * bytesPerSample * channelCount.v.value;
+ return res;
}
uint32_t getSampleRate() const { return mSampleRate->value; }
@@ -446,6 +463,9 @@
mBlockSize = FLAC__stream_encoder_get_blocksize(mFlacStreamEncoder);
+ // Update kMaxBlockSize to match maximum size used by the encoder
+ CHECK(mBlockSize <= kMaxBlockSize);
+
ALOGV("encoder successfully configured");
return OK;
}
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.h b/media/codec2/components/flac/C2SoftFlacEnc.h
index a971ab5..1f3be3c 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.h
+++ b/media/codec2/components/flac/C2SoftFlacEnc.h
@@ -63,7 +63,8 @@
std::shared_ptr<IntfImpl> mIntf;
const unsigned int kInBlockSize = 1152;
- const unsigned int kMaxNumChannels = 2;
+ static constexpr unsigned int kMaxNumChannels = 2;
+ static constexpr unsigned int kMaxBlockSize = 4608;
FLAC__StreamEncoder* mFlacStreamEncoder;
FLAC__int32* mInputBufferPcm32;
std::shared_ptr<C2LinearBlock> mOutputBlock;
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 2137964..fd9488b 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -469,11 +469,12 @@
mInitialized = false;
}
+ bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0;
+
if (!mInitialized) {
uint8_t *vol_data[1]{};
int32_t vol_size = 0;
- bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0;
if (codecConfig || volHeader) {
vol_data[0] = bitstream;
vol_size = inSize;
@@ -512,10 +513,11 @@
return;
}
}
- if (codecConfig) {
- fillEmptyWork(work);
- return;
- }
+ }
+
+ if (codecConfig) {
+ fillEmptyWork(work);
+ return;
}
size_t inPos = 0;
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
index cdc3be0..40bb26e 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.cpp
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -29,7 +29,6 @@
#include <opus_multistream.h>
}
-#define DEFAULT_FRAME_DURATION_MS 20
namespace android {
namespace {
@@ -38,7 +37,6 @@
} // namespace
-static const int kMaxNumChannelsSupported = 2;
class C2SoftOpusEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
public:
@@ -248,10 +246,11 @@
mAnchorTimeStamp = 0;
mProcessedSamples = 0;
mFilledLen = 0;
- mFrameDurationMs = DEFAULT_FRAME_DURATION_MS;
+ mFrameDurationMs = kDefaultFrameDurationMs;
if (!mInputBufferPcm16) {
+ size_t frameSize = (mFrameDurationMs * kMaxSampleRateSupported) / 1000;
mInputBufferPcm16 =
- (int16_t*)malloc(kFrameSize * kMaxNumChannels * sizeof(int16_t));
+ (int16_t*)malloc(frameSize * kMaxNumChannelsSupported * sizeof(int16_t));
}
if (!mInputBufferPcm16) return C2_NO_MEMORY;
@@ -368,7 +367,9 @@
}
C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
- err = pool->fetchLinearBlock(kMaxPayload, usage, &mOutputBlock);
+ int outCapacity =
+ kMaxPayload * ((inSize + mNumPcmBytesPerInputFrame) / mNumPcmBytesPerInputFrame);
+ err = pool->fetchLinearBlock(outCapacity, usage, &mOutputBlock);
if (err != C2_OK) {
ALOGE("fetchLinearBlock for Output failed with status %d", err);
work->result = C2_NO_MEMORY;
@@ -497,11 +498,11 @@
uint8_t* outPtr = wView.data() + mBytesEncoded;
int encodedBytes =
opus_multistream_encode(mEncoder, mInputBufferPcm16,
- mNumSamplesPerFrame, outPtr, kMaxPayload - mBytesEncoded);
+ mNumSamplesPerFrame, outPtr, outCapacity - mBytesEncoded);
ALOGV("encoded %i Opus bytes from %zu PCM bytes", encodedBytes,
processSize);
- if (encodedBytes < 0 || encodedBytes > (kMaxPayload - mBytesEncoded)) {
+ if (encodedBytes < 0 || encodedBytes > (outCapacity - mBytesEncoded)) {
ALOGE("opus_encode failed, encodedBytes : %d", encodedBytes);
mSignalledError = true;
work->result = C2_CORRUPTED;
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.h b/media/codec2/components/opus/C2SoftOpusEnc.h
index 733a6bc..2c9f5e5 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.h
+++ b/media/codec2/components/opus/C2SoftOpusEnc.h
@@ -45,12 +45,13 @@
uint32_t drainMode,
const std::shared_ptr<C2BlockPool> &pool) override;
private:
- /* OPUS_FRAMESIZE_20_MS */
- const int kFrameSize = 960;
- const int kMaxSampleRate = 48000;
- const int kMinSampleRate = 8000;
- const int kMaxPayload = (4000 * kMaxSampleRate) / kMinSampleRate;
- const int kMaxNumChannels = 8;
+ static const int kMaxNumChannelsSupported = 2;
+ static const int kMaxSampleRateSupported = 48000;
+ static const int kDefaultFrameDurationMs = 20;
+ // For a frame duration of 20ms, payload recommended size is 1276 as per
+ // https://www.opus-codec.org/docs/html_api/group__opusencoder.html.
+ // For 40ms, 60ms, .. payload size will change proportionately, 1276 x 2, 1276 x 3, ..
+ static const int kMaxPayload = 1500; // from tests/test_opus_encode.c
std::shared_ptr<IntfImpl> mIntf;
std::shared_ptr<C2LinearBlock> mOutputBlock;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 66ce5ea..1c5772f 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -354,12 +354,9 @@
return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
}
-C2R C2SoftVpxEnc::IntfImpl::PictureQuantizationSetter(bool mayBlock,
- C2P<C2StreamPictureQuantizationTuning::output>
- &me) {
+C2R C2SoftVpxEnc::IntfImpl::PictureQuantizationSetter(
+ bool mayBlock, C2P<C2StreamPictureQuantizationTuning::output>& me) {
(void)mayBlock;
- // these are the ones we're going to set, so want them to default
- // to the DEFAULT values for the codec
int32_t iMin = VPX_QP_DEFAULT_MIN, pMin = VPX_QP_DEFAULT_MIN;
int32_t iMax = VPX_QP_DEFAULT_MAX, pMax = VPX_QP_DEFAULT_MAX;
for (size_t i = 0; i < me.v.flexCount(); ++i) {
@@ -382,8 +379,8 @@
iMin, iMax, pMin, pMax);
// vpx library takes same range for I/P picture type
- int32_t maxFrameQP = std::min({iMax, pMax});
- int32_t minFrameQP = std::max({iMin, pMin});
+ int32_t maxFrameQP = std::min(iMax, pMax);
+ int32_t minFrameQP = std::max(iMin, pMin);
if (minFrameQP > maxFrameQP) {
minFrameQP = maxFrameQP;
}
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index 01b0678..1c2a0fb 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -563,6 +563,8 @@
auto mapRet = mDequeued.emplace(bid, *pBuffer);
CHECK(mapRet.second);
} else {
+ ALOGD("allocate error(%d): Dequeued(%zu), Dequeuable(%d)",
+ (int)res, mDequeued.size(), mDequeueable + 1);
if (adjustDequeueConfLocked(updateDequeue)) {
return;
}
@@ -629,7 +631,11 @@
::android::status_t status = igbp->dequeueBuffer(
&slotId, &fence, width, height, format, usage, &outBufferAge, &outTimestamps);
if (status < ::android::OK) {
- ALOGE("dequeueBuffer() error %d", (int)status);
+ if (status == ::android::TIMED_OUT || status == ::android::WOULD_BLOCK) {
+ ALOGW("BQ might not be ready for dequeueBuffer()");
+ return C2_BLOCKING;
+ }
+ ALOGE("BQ in inconsistent status. dequeueBuffer() error %d", (int)status);
return C2_CORRUPTED;
}
cache->waitOnSlot(slotId);
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
index d1f0fb5..4c2ef9c 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
@@ -83,6 +83,7 @@
}
}
+// @VsrTest = 3.2-001.003
TEST_P(Codec2MasterHalTest, MustUseAidlBeyond202404) {
static int sVendorApiLevel = android::base::GetIntProperty("ro.vendor.api_level", 0);
if (sVendorApiLevel < 202404) {
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 2fa89e7..3eec0f3 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -1550,19 +1550,23 @@
sp<Codec2Buffer> LinearOutputBuffers::wrap(const std::shared_ptr<C2Buffer> &buffer) {
if (buffer == nullptr) {
- ALOGV("[%s] using a dummy buffer", mName);
+ ALOGD("[%s] received null buffer", mName);
return new LocalLinearBuffer(mFormat, new ABuffer(0));
}
if (buffer->data().type() != C2BufferData::LINEAR) {
- ALOGV("[%s] non-linear buffer %d", mName, buffer->data().type());
+ ALOGW("[%s] non-linear buffer %d", mName, buffer->data().type());
// We expect linear output buffers from the component.
return nullptr;
}
if (buffer->data().linearBlocks().size() != 1u) {
- ALOGV("[%s] no linear buffers", mName);
+ ALOGW("[%s] no linear buffers", mName);
// We expect one and only one linear block from the component.
return nullptr;
}
+ if (buffer->data().linearBlocks().front().size() == 0) {
+ ALOGD("[%s] received 0-sized buffer", mName);
+ return new LocalLinearBuffer(mFormat, new ABuffer(0));
+ }
sp<Codec2Buffer> clientBuffer = ConstLinearBlockBuffer::Allocate(mFormat, buffer);
if (clientBuffer == nullptr) {
ALOGD("[%s] ConstLinearBlockBuffer::Allocate failed", mName);
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 9c514f2..2550dcf 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -179,10 +179,17 @@
if (!buffer
|| buffer->data().type() != C2BufferData::LINEAR
|| buffer->data().linearBlocks().size() != 1u) {
+ if (!buffer) {
+ ALOGD("ConstLinearBlockBuffer::Allocate: null buffer");
+ } else {
+ ALOGW("ConstLinearBlockBuffer::Allocate: type=%d # linear blocks=%zu",
+ buffer->data().type(), buffer->data().linearBlocks().size());
+ }
return nullptr;
}
C2ReadView readView(buffer->data().linearBlocks()[0].map().get());
if (readView.error() != C2_OK) {
+ ALOGW("ConstLinearBlockBuffer::Allocate: readView.error()=%d", readView.error());
return nullptr;
}
return new ConstLinearBlockBuffer(format, std::move(readView), buffer);
@@ -1137,7 +1144,7 @@
std::optional<Smpte2086> smpte2086;
status_t status = mapper.getSmpte2086(buffer.get(), &smpte2086);
- if (status != OK) {
+ if (status != OK || !smpte2086) {
err = C2_CORRUPTED;
} else {
if (smpte2086) {
@@ -1157,7 +1164,7 @@
std::optional<Cta861_3> cta861_3;
status = mapper.getCta861_3(buffer.get(), &cta861_3);
- if (status != OK) {
+ if (status != OK || !cta861_3) {
err = C2_CORRUPTED;
} else {
if (cta861_3) {
@@ -1176,7 +1183,7 @@
dynamicInfo->reset();
std::optional<std::vector<uint8_t>> vec;
status_t status = mapper.getSmpte2094_40(buffer.get(), &vec);
- if (status != OK) {
+ if (status != OK || !vec) {
dynamicInfo->reset();
err = C2_CORRUPTED;
} else {
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 37a7a4f..692f700 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -687,6 +687,11 @@
const MediaCodecsXmlParser::AttributeMap &attrMap = typeIt->second;
std::unique_ptr<MediaCodecInfo::CapabilitiesWriter> caps =
codecInfo->addMediaType(mediaType.c_str());
+
+ // we could detect tunneled playback via the playback interface, but we never did
+ // that for the advertised feature, so for now use only the advertised feature.
+ bool canDoTunneledPlayback = false;
+
for (const auto &v : attrMap) {
std::string key = v.first;
std::string value = v.second;
@@ -707,6 +712,11 @@
// Ignore trailing bad characters and default to 0.
(void)sscanf(value.c_str(), "%d", &intValue);
caps->addDetail(key.c_str(), intValue);
+
+ if (key.compare(
+ MediaCodecInfo::Capabilities::FEATURE_TUNNELED_PLAYBACK) == 0) {
+ canDoTunneledPlayback = true;
+ }
} else {
caps->addDetail(key.c_str(), value.c_str());
}
@@ -774,6 +784,17 @@
}
}
}
+
+ if (android::media::codec::provider_->null_output_surface_support() &&
+ android::media::codec::provider_->null_output_surface()) {
+ // all non-tunneled video decoders support detached surface mode
+ if (trait.kind == C2Component::KIND_DECODER &&
+ trait.domain == C2Component::DOMAIN_VIDEO &&
+ !canDoTunneledPlayback) {
+ caps->addDetail(
+ MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE, 0);
+ }
+ }
}
}
}
diff --git a/media/libaaudio/examples/input_monitor/Android.bp b/media/libaaudio/examples/input_monitor/Android.bp
index 72adfd7..fc55290 100644
--- a/media/libaaudio/examples/input_monitor/Android.bp
+++ b/media/libaaudio/examples/input_monitor/Android.bp
@@ -11,7 +11,10 @@
name: "input_monitor",
gtest: false,
srcs: ["src/input_monitor.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
shared_libs: ["libaaudio"],
header_libs: ["libaaudio_example_utils"],
}
@@ -20,7 +23,10 @@
name: "input_monitor_callback",
gtest: false,
srcs: ["src/input_monitor_callback.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
shared_libs: ["libaaudio"],
header_libs: ["libaaudio_example_utils"],
}
diff --git a/media/libaaudio/examples/loopback/Android.bp b/media/libaaudio/examples/loopback/Android.bp
index b18aeec..bde1024 100644
--- a/media/libaaudio/examples/loopback/Android.bp
+++ b/media/libaaudio/examples/loopback/Android.bp
@@ -11,13 +11,16 @@
name: "aaudio_loopback",
gtest: false,
srcs: ["src/loopback.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
static_libs: ["libsndfile"],
include_dirs: ["external/oboe/apps/OboeTester/app/src/main/cpp"],
shared_libs: [
"libaaudio",
"libaudioutils",
- "liblog"
- ],
+ "liblog",
+ ],
header_libs: ["libaaudio_example_utils"],
}
diff --git a/media/libaaudio/examples/write_sine/Android.bp b/media/libaaudio/examples/write_sine/Android.bp
index 1c7e0f1..70b1764 100644
--- a/media/libaaudio/examples/write_sine/Android.bp
+++ b/media/libaaudio/examples/write_sine/Android.bp
@@ -10,7 +10,10 @@
cc_test {
name: "write_sine",
srcs: ["src/write_sine.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
shared_libs: ["libaaudio"],
header_libs: ["libaaudio_example_utils"],
}
@@ -18,7 +21,10 @@
cc_test {
name: "write_sine_callback",
srcs: ["src/write_sine_callback.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
shared_libs: ["libaaudio"],
header_libs: ["libaaudio_example_utils"],
}
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index 1f6f950..8aaa4a0 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -91,13 +91,10 @@
cflags: [
"-Wall",
"-Werror",
- // By default, all symbols are hidden.
-
- // "-fvisibility=hidden",
- // AAUDIO_API is used to explicitly export a function or a variable as a visible symbol.
"-Wno-unused-parameter",
"-Wthread-safety",
+ // AAUDIO_API is used to explicitly export a function or a variable as a visible symbol.
"-DAAUDIO_API=__attribute__((visibility(\"default\")))",
],
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index b7a30dc..a25d7ff 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -83,7 +83,7 @@
typename ServiceTraits>
class ServiceHandler {
public:
- sp<ServiceInterface> getService(bool canStartThreadPool = true)
+ sp<ServiceInterface> getService()
EXCLUDES(mMutex) NO_THREAD_SAFETY_ANALYSIS { // std::unique_ptr
sp<ServiceInterface> service;
sp<Client> client;
@@ -140,7 +140,7 @@
client = mClient;
service = mService;
// Make sure callbacks can be received by the client
- if (canStartThreadPool) {
+ if (mCanStartThreadPool) {
ProcessState::self()->startThreadPool();
}
ul.unlock();
@@ -183,6 +183,10 @@
if (mClient) ServiceTraits::onClearService(mClient);
}
+ void disableThreadPool() {
+ mCanStartThreadPool = false;
+ }
+
private:
std::mutex mSingleGetter;
std::mutex mMutex;
@@ -191,6 +195,7 @@
sp<ServiceInterface> mLocalService GUARDED_BY(mMutex);
sp<ServiceInterface> mService GUARDED_BY(mMutex);
sp<Client> mClient GUARDED_BY(mMutex);
+ std::atomic<bool> mCanStartThreadPool = true;
};
struct AudioFlingerTraits {
@@ -221,10 +226,6 @@
return gAudioFlingerServiceHandler.getService();
}
-sp<IAudioFlinger> AudioSystem::get_audio_flinger_for_fuzzer() {
- return gAudioFlingerServiceHandler.getService(false /* canStartThreadPool */);
-}
-
sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
return gAudioFlingerServiceHandler.getClient();
}
@@ -954,6 +955,11 @@
gAudioPolicyServiceHandler.clearService();
}
+void AudioSystem::disableThreadPool() {
+ gAudioFlingerServiceHandler.disableThreadPool();
+ gAudioPolicyServiceHandler.disableThreadPool();
+}
+
// ---------------------------------------------------------------------------
void AudioSystem::onNewAudioModulesAvailable() {
diff --git a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
index 5e4f9a1..c7a04da 100644
--- a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
+++ b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
@@ -36,6 +36,7 @@
[[clang::no_destroy]] static std::once_flag gSmOnce;
sp<FakeServiceManager> gFakeServiceManager;
+sp<AudioFlingerServerAdapter> gAudioFlingerServerAdapter;
bool addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
FuzzedDataProvider& fdp) {
@@ -47,22 +48,10 @@
return true;
}
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
- FuzzedDataProvider fdp(data, size);
-
- std::call_once(gSmOnce, [&] {
- /* Create a FakeServiceManager instance and add required services */
- gFakeServiceManager = sp<FakeServiceManager>::make();
- setDefaultServiceManager(gFakeServiceManager);
- });
- gFakeServiceManager->clear();
-
- for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
- "batterystats", "media.metrics"}) {
- if (!addService(String16(service), gFakeServiceManager, fdp)) {
- return 0;
- }
- }
+extern "C" int LLVMFuzzerInitialize(int* /*argc*/, char*** /*argv*/) {
+ /* Create a FakeServiceManager instance and add required services */
+ gFakeServiceManager = sp<FakeServiceManager>::make();
+ setDefaultServiceManager(gFakeServiceManager);
auto configService = ndk::SharedRefBase::make<ConfigMock>();
CHECK_EQ(NO_ERROR, AServiceManager_addService(configService.get()->asBinder().get(),
@@ -77,23 +66,40 @@
CHECK_EQ(NO_ERROR, AServiceManager_addService(moduleService.get()->asBinder().get(),
"android.hardware.audio.core.IModule/default"));
- const auto audioFlinger = sp<AudioFlinger>::make();
- const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+ // Disable creating thread pool for fuzzer instance of audio flinger and audio policy services
+ AudioSystem::disableThreadPool();
- CHECK_EQ(NO_ERROR,
- gFakeServiceManager->addService(
- String16(IAudioFlinger::DEFAULT_SERVICE_NAME), IInterface::asBinder(afAdapter),
- false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+ return 0;
+}
- AudioSystem::get_audio_flinger_for_fuzzer();
- const auto audioPolicyService = sp<AudioPolicyService>::make();
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ FuzzedDataProvider fdp(data, size);
- CHECK_EQ(NO_ERROR,
- gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
- false /* allowIsolated */,
- IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+ for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
+ "batterystats", "media.metrics"}) {
+ if (!addService(String16(service), gFakeServiceManager, fdp)) {
+ return 0;
+ }
+ }
- fuzzService(media::IAudioFlingerService::asBinder(afAdapter), std::move(fdp));
+ // TODO(330882064) : Initialise Audio Flinger and Audio Policy services every time
+ std::call_once(gSmOnce, [&] {
+ const auto audioFlinger = sp<AudioFlinger>::make();
+ gAudioFlingerServerAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+ CHECK_EQ(NO_ERROR,
+ gFakeServiceManager->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+ IInterface::asBinder(gAudioFlingerServerAdapter),
+ false /* allowIsolated */,
+ IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+
+ const auto audioPolicyService = sp<AudioPolicyService>::make();
+ CHECK_EQ(NO_ERROR,
+ gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
+ false /* allowIsolated */,
+ IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+ });
+
+ fuzzService(media::IAudioFlingerService::asBinder(gAudioFlingerServerAdapter), std::move(fdp));
return 0;
}
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 77d686a..c238158 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -187,7 +187,10 @@
// helper function to obtain AudioFlinger service handle
static sp<IAudioFlinger> get_audio_flinger();
- static sp<IAudioFlinger> get_audio_flinger_for_fuzzer();
+
+ // function to disable creation of thread pool (Used for testing).
+ // This should be called before get_audio_flinger() or get_audio_policy_service().
+ static void disableThreadPool();
static float linearToLog(int volume);
static int logToLinear(float volume);
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 3fd0911..0af164f 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -415,8 +415,10 @@
void StreamHalAidl::onAsyncTransferReady() {
if (auto state = getState(); state == StreamDescriptor::State::TRANSFERRING) {
- // Retrieve the current state together with position counters.
- updateCountersIfNeeded();
+ // Retrieve the current state together with position counters unconditionally
+ // to ensure that the state on our side gets updated.
+ sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
+ nullptr, true /*safeFromNonWorkerThread */);
} else {
ALOGW("%s: unexpected onTransferReady in the state %s", __func__, toString(state).c_str());
}
@@ -424,8 +426,10 @@
void StreamHalAidl::onAsyncDrainReady() {
if (auto state = getState(); state == StreamDescriptor::State::DRAINING) {
- // Retrieve the current state together with position counters.
- updateCountersIfNeeded();
+ // Retrieve the current state together with position counters unconditionally
+ // to ensure that the state on our side gets updated.
+ sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
+ nullptr, true /*safeFromNonWorkerThread */);
} else {
ALOGW("%s: unexpected onDrainReady in the state %s", __func__, toString(state).c_str());
}
@@ -502,27 +506,31 @@
"%s %s: must be invoked from the worker thread (%d)",
__func__, command.toString().c_str(), workerTid);
}
- if (!mContext.getCommandMQ()->writeBlocking(&command, 1)) {
- ALOGE("%s: failed to write command %s to MQ", __func__, command.toString().c_str());
- return NOT_ENOUGH_DATA;
- }
StreamDescriptor::Reply localReply{};
- if (reply == nullptr) {
- reply = &localReply;
- }
- if (!mContext.getReplyMQ()->readBlocking(reply, 1)) {
- ALOGE("%s: failed to read from reply MQ, command %s", __func__, command.toString().c_str());
- return NOT_ENOUGH_DATA;
- }
{
- std::lock_guard l(mLock);
- // Not every command replies with 'latencyMs' field filled out, substitute the last
- // returned value in that case.
- if (reply->latencyMs <= 0) {
- reply->latencyMs = mLastReply.latencyMs;
+ std::lock_guard l(mCommandReplyLock);
+ if (!mContext.getCommandMQ()->writeBlocking(&command, 1)) {
+ ALOGE("%s: failed to write command %s to MQ", __func__, command.toString().c_str());
+ return NOT_ENOUGH_DATA;
}
- mLastReply = *reply;
- mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
+ if (reply == nullptr) {
+ reply = &localReply;
+ }
+ if (!mContext.getReplyMQ()->readBlocking(reply, 1)) {
+ ALOGE("%s: failed to read from reply MQ, command %s",
+ __func__, command.toString().c_str());
+ return NOT_ENOUGH_DATA;
+ }
+ {
+ std::lock_guard l(mLock);
+ // Not every command replies with 'latencyMs' field filled out, substitute the last
+ // returned value in that case.
+ if (reply->latencyMs <= 0) {
+ reply->latencyMs = mLastReply.latencyMs;
+ }
+ mLastReply = *reply;
+ mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
+ }
}
switch (reply->status) {
case STATUS_OK: return OK;
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index 53d46e5..b20eb00 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -243,6 +243,15 @@
const bool mIsInput;
const audio_config_base_t mConfig;
const StreamContextAidl mContext;
+ // This lock is used to make sending of a command and receiving a reply an atomic
+ // operation. Otherwise, when two threads are trying to send a command, they may both advance to
+ // reading of the reply once the HAL has consumed the command from the MQ, and that creates a
+ // race condition between them.
+ //
+ // Note that only access to command and reply MQs needs to be protected because the data MQ is
+ // only accessed by the I/O thread. Also, there is no need to protect lookup operations on the
+ // queues as they are thread-safe, only send/receive operation must be protected.
+ std::mutex mCommandReplyLock;
private:
static audio_config_base_t configToBase(const audio_config& config) {
@@ -256,6 +265,8 @@
std::lock_guard l(mLock);
return mLastReply.state;
}
+ // Note: Since `sendCommand` takes mLock while holding mCommandReplyLock, never call
+ // it with `mLock` being held.
status_t sendCommand(
const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
index df1f9bd..c2aa278 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
@@ -147,8 +147,12 @@
return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
}
default: {
- ALOGE("%s %d invalid command %u", __func__, __LINE__, command);
- return BAD_VALUE;
+ // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+ VendorExtension ext = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_VendorExtension(param));
+ aidlParam =
+ MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer, vendor, ext);
+ break;
}
}
} else {
@@ -321,8 +325,7 @@
return OK;
}
default: {
- ALOGE("%s %d invalid command %u", __func__, __LINE__, command);
- return BAD_VALUE;
+ VENDOR_EXTENSION_GET_AND_RETURN(Spatializer, spatializer, param);
}
}
} else {
diff --git a/media/libaudioprocessing/tests/fuzzer/Android.bp b/media/libaudioprocessing/tests/fuzzer/Android.bp
index 8fb6fff..e4780cf 100644
--- a/media/libaudioprocessing/tests/fuzzer/Android.bp
+++ b/media/libaudioprocessing/tests/fuzzer/Android.bp
@@ -8,23 +8,23 @@
}
cc_fuzz {
- name: "libaudioprocessing_resampler_fuzzer",
- srcs: [
- "libaudioprocessing_resampler_fuzzer.cpp",
- ],
- defaults: ["libaudioprocessing_test_defaults"],
- static_libs: [
- "libsndfile",
- ],
+ name: "libaudioprocessing_resampler_fuzzer",
+ srcs: [
+ "libaudioprocessing_resampler_fuzzer.cpp",
+ ],
+ defaults: ["libaudioprocessing_test_defaults"],
+ static_libs: [
+ "libsndfile",
+ ],
}
cc_fuzz {
- name: "libaudioprocessing_record_buffer_converter_fuzzer",
- srcs: [
- "libaudioprocessing_record_buffer_converter_fuzzer.cpp",
- ],
- defaults: ["libaudioprocessing_test_defaults"],
- static_libs: [
- "libsndfile",
- ],
+ name: "libaudioprocessing_record_buffer_converter_fuzzer",
+ srcs: [
+ "libaudioprocessing_record_buffer_converter_fuzzer.cpp",
+ ],
+ defaults: ["libaudioprocessing_test_defaults"],
+ static_libs: [
+ "libsndfile",
+ ],
}
diff --git a/media/libeffects/downmix/tests/Android.bp b/media/libeffects/downmix/tests/Android.bp
index 392a6fa..8cecbe2 100644
--- a/media/libeffects/downmix/tests/Android.bp
+++ b/media/libeffects/downmix/tests/Android.bp
@@ -14,7 +14,7 @@
//
// Use "atest downmix_tests" to run.
cc_test {
- name:"downmix_tests",
+ name: "downmix_tests",
gtest: true,
host_supported: true,
vendor: true,
@@ -45,7 +45,7 @@
// test application and outputs then compares files in a local directory
// on device (/data/local/tmp/downmixtest/).
cc_test {
- name:"downmixtest",
+ name: "downmixtest",
host_supported: false,
proprietary: true,
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 0568fbd..9bb3264 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -12,7 +12,7 @@
cc_test {
name: "EffectReverbTest",
defaults: [
- "libeffects-test-defaults",
+ "libeffects-test-defaults",
],
srcs: [
"EffectReverbTest.cpp",
@@ -29,7 +29,7 @@
cc_test {
name: "EffectBundleTest",
defaults: [
- "libeffects-test-defaults",
+ "libeffects-test-defaults",
],
srcs: [
"EffectBundleTest.cpp",
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index 90406e9..fff2feb 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -564,10 +564,40 @@
RetCode BundleContext::setForcedDevice(
const ::aidl::android::media::audio::common::AudioDeviceDescription& device) {
- RETURN_VALUE_IF(true != isDeviceSupportedVirtualizer({device}), RetCode::ERROR_EFFECT_LIB_ERROR,
- " deviceNotSupportVirtualizer");
- mForceDevice = device;
- return RetCode::SUCCESS;
+ RetCode ret = RetCode::SUCCESS;
+ bool enableVirtualizer = mType == lvm::BundleEffectType::VIRTUALIZER && mEnabled;
+
+ if (isDeviceSupportedVirtualizer({device})) {
+ mVirtualizerForcedDevice = device;
+ } else {
+ // disabling forced virtualization mode
+ AudioDeviceDescription noneDevice;
+ if (device != noneDevice) {
+ // device is not supported, make it behave as a reset of forced mode but return an error
+ ret = RetCode::ERROR_ILLEGAL_PARAMETER;
+ }
+ // verify whether the virtualization should be enabled or disabled
+ if (!isDeviceSupportedVirtualizer(mOutputDevice)) {
+ enableVirtualizer = false;
+ }
+ mVirtualizerForcedDevice = noneDevice;
+ }
+
+ if (enableVirtualizer) {
+ if (mVirtualizerTempDisabled) {
+ LOG(VERBOSE) << __func__ << " re-enable virtualizer";
+ enableOperatingMode();
+ mVirtualizerTempDisabled = false;
+ }
+ } else {
+ if (!mVirtualizerTempDisabled) {
+ LOG(VERBOSE) << __func__ << " disable virtualizer";
+ disableOperatingMode();
+ mVirtualizerTempDisabled = true;
+ }
+ }
+
+ return ret;
}
RetCode BundleContext::initControlParameter(LVM_ControlParams_t& params) const {
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
index ba3997a..044c8dd 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -76,7 +76,7 @@
RetCode setForcedDevice(
const ::aidl::android::media::audio::common::AudioDeviceDescription& device);
aidl::android::media::audio::common::AudioDeviceDescription getForcedDevice() const {
- return mForceDevice;
+ return mVirtualizerForcedDevice;
}
std::vector<Virtualizer::ChannelAngle> getSpeakerAngles(
const Virtualizer::SpeakerAnglesPayload payload);
@@ -93,8 +93,6 @@
bool mEnabled = false;
LVM_Handle_t mInstance;
- aidl::android::media::audio::common::AudioDeviceDescription mVirtualizerForcedDevice;
-
int mSamplesPerSecond = 0;
int mSamplesToExitCountEq = 0;
int mSamplesToExitCountBb = 0;
@@ -120,7 +118,7 @@
// Virtualizer
int mVirtStrengthSaved = 0; /* Conversion between Get/Set */
bool mVirtualizerTempDisabled = false;
- ::aidl::android::media::audio::common::AudioDeviceDescription mForceDevice;
+ ::aidl::android::media::audio::common::AudioDeviceDescription mVirtualizerForcedDevice;
// Volume
float mLevelSaveddB = 0; /* for when mute is set, level must be saved */
float mVolumedB = 0;
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 67518af..44ea2a4 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -353,45 +353,62 @@
return status;
}
- std::vector<float> inFrames(samples);
- std::vector<float> outFrames(frameCount * FCC_2);
+ std::vector<float> inputSamples;
+ std::vector<float> outputSamples(frameCount * FCC_2);
if (isPreset() && mNextPreset != mPreset) {
loadPreset();
}
if (isAuxiliary()) {
- inFrames.assign(in, in + samples);
+ inputSamples.resize(samples);
+ inputSamples.assign(in, in + samples);
} else {
- // mono input is duplicated
+ // Resizing to stereo is required to duplicate mono input
+ inputSamples.resize(frameCount * FCC_2);
if (channels >= FCC_2) {
for (int i = 0; i < frameCount; i++) {
- inFrames[FCC_2 * i] = in[channels * i] * kSendLevel;
- inFrames[FCC_2 * i + 1] = in[channels * i + 1] * kSendLevel;
+ inputSamples[FCC_2 * i] = in[channels * i] * kSendLevel;
+ inputSamples[FCC_2 * i + 1] = in[channels * i + 1] * kSendLevel;
}
} else {
for (int i = 0; i < frameCount; i++) {
- inFrames[FCC_2 * i] = inFrames[FCC_2 * i + 1] = in[i] * kSendLevel;
+ inputSamples[FCC_2 * i] = inputSamples[FCC_2 * i + 1] = in[i] * kSendLevel;
}
}
}
if (isPreset() && mPreset == PresetReverb::Presets::NONE) {
- std::fill(outFrames.begin(), outFrames.end(), 0); // always stereo here
+ std::fill(outputSamples.begin(), outputSamples.end(), 0); // always stereo here
} else {
if (!mEnabled && mSamplesToExitCount > 0) {
- std::fill(outFrames.begin(), outFrames.end(), 0);
+ std::fill(outputSamples.begin(), outputSamples.end(), 0);
}
+ int inputBufferIndex = 0;
+ int outputBufferIndex = 0;
+
+ // LVREV library supports max of int16_t frames at a time
+ constexpr int kMaxBlockFrames = std::numeric_limits<int16_t>::max();
+ const auto inputFrameSize = getInputFrameSize();
+ const auto outputFrameSize = getOutputFrameSize();
/* Process the samples, producing a stereo output */
- LVREV_ReturnStatus_en lvrevStatus =
- LVREV_Process(mInstance, /* Instance handle */
- inFrames.data(), /* Input buffer */
- outFrames.data(), /* Output buffer */
- frameCount); /* Number of samples to read */
- if (lvrevStatus != LVREV_SUCCESS) {
- LOG(ERROR) << __func__ << " LVREV_Process error: " << lvrevStatus;
- return {EX_UNSUPPORTED_OPERATION, 0, 0};
+ for (int fc = frameCount; fc > 0;) {
+ int processFrames = std::min(fc, kMaxBlockFrames);
+ LVREV_ReturnStatus_en lvrevStatus =
+ LVREV_Process(mInstance, /* Instance handle */
+ inputSamples.data() + inputBufferIndex, /* Input buffer */
+ outputSamples.data() + outputBufferIndex, /* Output buffer */
+ processFrames); /* Number of samples to process */
+ if (lvrevStatus != LVREV_SUCCESS) {
+ LOG(ERROR) << __func__ << " LVREV_Process error: " << lvrevStatus;
+ return {EX_UNSUPPORTED_OPERATION, 0, 0};
+ }
+
+ fc -= processFrames;
+
+ inputBufferIndex += processFrames * inputFrameSize / sizeof(float);
+ outputBufferIndex += processFrames * outputFrameSize / sizeof(float);
}
}
// Convert to 16 bits
@@ -401,14 +418,14 @@
if (channels >= FCC_2) {
for (int i = 0; i < frameCount; i++) {
// Mix with dry input
- outFrames[FCC_2 * i] += in[channels * i];
- outFrames[FCC_2 * i + 1] += in[channels * i + 1];
+ outputSamples[FCC_2 * i] += in[channels * i];
+ outputSamples[FCC_2 * i + 1] += in[channels * i + 1];
}
} else {
for (int i = 0; i < frameCount; i++) {
// Mix with dry input
- outFrames[FCC_2 * i] += in[i];
- outFrames[FCC_2 * i + 1] += in[i];
+ outputSamples[FCC_2 * i] += in[i];
+ outputSamples[FCC_2 * i + 1] += in[i];
}
}
@@ -420,8 +437,8 @@
float incr = (mVolume.right - vr) / frameCount;
for (int i = 0; i < frameCount; i++) {
- outFrames[FCC_2 * i] *= vl;
- outFrames[FCC_2 * i + 1] *= vr;
+ outputSamples[FCC_2 * i] *= vl;
+ outputSamples[FCC_2 * i + 1] *= vr;
vl += incl;
vr += incr;
@@ -430,8 +447,8 @@
} else if (volumeMode != VOLUME_OFF) {
if (mVolume.left != kUnitVolume || mVolume.right != kUnitVolume) {
for (int i = 0; i < frameCount; i++) {
- outFrames[FCC_2 * i] *= mVolume.left;
- outFrames[FCC_2 * i + 1] *= mVolume.right;
+ outputSamples[FCC_2 * i] *= mVolume.left;
+ outputSamples[FCC_2 * i + 1] *= mVolume.right;
}
}
mPrevVolume = mVolume;
@@ -441,8 +458,8 @@
if (outChannels > 2) {
for (int i = 0; i < frameCount; i++) {
- out[outChannels * i] = outFrames[FCC_2 * i];
- out[outChannels * i + 1] = outFrames[FCC_2 * i + 1];
+ out[outChannels * i] = outputSamples[FCC_2 * i];
+ out[outChannels * i + 1] = outputSamples[FCC_2 * i + 1];
}
if (!isAuxiliary()) {
for (int i = 0; i < frameCount; i++) {
@@ -454,10 +471,10 @@
}
} else {
if (outChannels == FCC_1) {
- From2iToMono_Float(outFrames.data(), out, frameCount);
+ From2iToMono_Float(outputSamples.data(), out, frameCount);
} else {
for (int i = 0; i < frameCount * FCC_2; i++) {
- out[i] = outFrames[i];
+ out[i] = outputSamples[i];
}
}
}
diff --git a/media/libeffects/spatializer/tests/Android.bp b/media/libeffects/spatializer/tests/Android.bp
index 704e873..818e094 100644
--- a/media/libeffects/spatializer/tests/Android.bp
+++ b/media/libeffects/spatializer/tests/Android.bp
@@ -12,7 +12,7 @@
cc_test {
name: "SpatializerTest",
defaults: [
- "libeffects-test-defaults",
+ "libeffects-test-defaults",
],
host_supported: false,
srcs: [
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 590a7b7..840897f 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -223,7 +223,6 @@
"com.android.media",
],
-
srcs: ["MidiIoWrapper.cpp"],
static_libs: [
@@ -278,6 +277,10 @@
"libutils",
],
+ static_libs: [
+ "android.media.codec-aconfig-cc",
+ ],
+
include_dirs: [
"system/libhidl/transport/token/1.0/utils/include",
],
diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp
index 86ad997..c45c5c3 100644
--- a/media/libmedia/MediaCodecInfo.cpp
+++ b/media/libmedia/MediaCodecInfo.cpp
@@ -36,6 +36,7 @@
constexpr char MediaCodecInfo::Capabilities::FEATURE_MULTIPLE_FRAMES[];
constexpr char MediaCodecInfo::Capabilities::FEATURE_SECURE_PLAYBACK[];
constexpr char MediaCodecInfo::Capabilities::FEATURE_TUNNELED_PLAYBACK[];
+constexpr char MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE[];
void MediaCodecInfo::Capabilities::getSupportedProfileLevels(
Vector<ProfileLevel> *profileLevels) const {
diff --git a/media/libmedia/include/media/MediaCodecInfo.h b/media/libmedia/include/media/MediaCodecInfo.h
index 54f565a..88a2dc4 100644
--- a/media/libmedia/include/media/MediaCodecInfo.h
+++ b/media/libmedia/include/media/MediaCodecInfo.h
@@ -69,6 +69,7 @@
constexpr static char FEATURE_MULTIPLE_FRAMES[] = "feature-multiple-frames";
constexpr static char FEATURE_SECURE_PLAYBACK[] = "feature-secure-playback";
constexpr static char FEATURE_TUNNELED_PLAYBACK[] = "feature-tunneled-playback";
+ constexpr static char FEATURE_DETACHED_SURFACE[] = "feature-detached-surface";
/**
* Returns the supported levels for each supported profile in a target array.
diff --git a/media/libmediametrics/Android.bp b/media/libmediametrics/Android.bp
index 8a38dd7..5214dfe 100644
--- a/media/libmediametrics/Android.bp
+++ b/media/libmediametrics/Android.bp
@@ -16,8 +16,8 @@
name: "libmediametrics",
srcs: [
- "MediaMetricsItem.cpp",
"MediaMetrics.cpp",
+ "MediaMetricsItem.cpp",
],
shared_libs: [
@@ -40,8 +40,8 @@
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
cfi: true,
},
@@ -50,8 +50,8 @@
stubs: {
symbol_file: "libmediametrics.map.txt",
versions: [
- "1" ,
- ]
+ "1",
+ ],
},
header_abi_checker: {
@@ -65,7 +65,7 @@
"//frameworks/base/apex/media/framework",
"//frameworks/base/core/jni",
"//frameworks/base/media/jni",
- "//packages/modules/Media/apex/framework",
+ "//packages/modules/Media/apex/framework",
],
}
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index f9ceef2..1593aa0 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -21,6 +21,8 @@
#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
#endif
+#include <android_media_codec.h>
+
#include <inttypes.h>
#include <utils/Trace.h>
@@ -9314,6 +9316,12 @@
// adaptive playback is not supported
caps->removeDetail(MediaCodecInfo::Capabilities::FEATURE_ADAPTIVE_PLAYBACK);
}
+
+ // all non-tunneled video decoders support detached surface mode
+ if (android::media::codec::provider_->null_output_surface_support() &&
+ android::media::codec::provider_->null_output_surface()) {
+ caps->addDetail(MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE, 0);
+ }
}
}
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 896e021..886285e 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -319,6 +319,7 @@
],
static_libs: [
+ "android.media.codec-aconfig-cc",
"libstagefright_esds",
"libstagefright_color_conversion",
"libyuv_static",
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index a18dbfe..e918b5e 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -2436,47 +2436,6 @@
return OK;
}
-bool MPEG4Writer::isSampleMetadataValid(size_t trackIndex, int64_t timeUs) {
- // Track Index starts from zero, so it should be at least 1 less than size.
- if (trackIndex >= mTracks.size()) {
- ALOGE("Incorrect trackIndex %zu, mTracks->size() %zu", trackIndex, mTracks.size());
- return false;
- }
-
- List<Track *>::iterator it = mTracks.begin();
-
- // (*it) is already pointing to trackIndex 0.
- for (int i = 1; i <= trackIndex; i++) {
- it++;
- }
-
- return (*it)->isTimestampValid(timeUs);
-}
-
-bool MPEG4Writer::Track::isTimestampValid(int64_t timeUs) {
- // No timescale if HEIF
- if (mIsHeif) {
- return true;
- }
-
- // Make sure abs(timeUs) does not overflow
- if (timeUs == INT64_MIN) {
- return false;
- }
-
- // Ensure that the timeUs value does not have extremely low or high values
- // that would cause an underflow or overflow, like in the calculation -
- // mdhdDuration = (trakDurationUs * mTimeScale + 5E5) / 1E6
- if (abs(timeUs) >= (INT64_MAX - 5E5) / mTimeScale) {
- return false;
- }
- // Limit check for calculations in ctts box
- if (abs(timeUs) + kMaxCttsOffsetTimeUs >= INT64_MAX / mTimeScale) {
- return false;
- }
- return true;
-}
-
bool MPEG4Writer::Track::isExifData(
MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const {
if (!mIsHeif) {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index e4f3b83..4e378d4 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -30,6 +30,8 @@
#include "include/SoftwareRenderer.h"
+#include <android_media_codec.h>
+
#include <android/api-level.h>
#include <android/content/pm/IPackageManagerNative.h>
#include <android/hardware/cas/native/1.0/IDescrambler.h>
@@ -3017,6 +3019,13 @@
return PostAndAwaitResponse(msg, &response);
}
+status_t MediaCodec::detachOutputSurface() {
+ sp<AMessage> msg = new AMessage(kWhatDetachSurface, this);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
status_t MediaCodec::setSurface(const sp<Surface> &surface) {
sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
msg->setObject("surface", surface);
@@ -4676,7 +4685,7 @@
}
mResourceManagerProxy->removeClient();
- mReleaseSurface.reset();
+ mDetachedSurface.reset();
if (mReplyID != nullptr) {
postPendingRepliesAndDeferredMessages("kWhatReleaseCompleted");
@@ -4849,6 +4858,23 @@
mFlags |= kFlagPushBlankBuffersOnShutdown;
}
+ uint32_t flags;
+ CHECK(msg->findInt32("flags", (int32_t *)&flags));
+
+ if (android::media::codec::provider_->null_output_surface_support()) {
+ if (obj == nullptr
+ && (flags & CONFIGURE_FLAG_DETACHED_SURFACE)
+ && !(flags & CONFIGURE_FLAG_ENCODE)) {
+ sp<Surface> surface = getOrCreateDetachedSurface();
+ if (surface == nullptr) {
+ mErrorLog.log(
+ LOG_TAG, "Detached surface mode is not supported by this codec");
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ }
+ obj = surface;
+ }
+ }
+
if (obj != NULL) {
if (!format->findInt32(KEY_ALLOW_FRAME_DROP, &mAllowFrameDroppingBySurface)) {
// allow frame dropping by surface by default
@@ -4872,8 +4898,6 @@
mApiUsageMetrics.isUsingOutputSurface = true;
- uint32_t flags;
- CHECK(msg->findInt32("flags", (int32_t *)&flags));
if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL ||
flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
if (!(mFlags & kFlagIsAsync)) {
@@ -4994,6 +5018,23 @@
break;
}
+ case kWhatDetachSurface:
+ {
+ // detach surface is equivalent to setSurface(mDetachedSurface)
+ sp<Surface> surface = getOrCreateDetachedSurface();
+
+ if (surface == nullptr) {
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ mErrorLog.log(LOG_TAG, "Detaching surface is not supported by the codec.");
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ }
+
+ msg->setObject("surface", surface);
+ }
+ [[fallthrough]];
+
case kWhatSetSurface:
{
sp<AReplyToken> replyID;
@@ -5011,14 +5052,17 @@
sp<Surface> surface = static_cast<Surface *>(obj.get());
if (mSurface == NULL) {
// do not support setting surface if it was not set
- mErrorLog.log(LOG_TAG,
- "Cannot set surface if the codec is not configured with "
- "a surface already");
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Cannot %s surface if the codec is not configured with "
+ "a surface already",
+ msg->what() == kWhatDetachSurface ? "detach" : "set"));
err = INVALID_OPERATION;
} else if (obj == NULL) {
// do not support unsetting surface
mErrorLog.log(LOG_TAG, "Unsetting surface is not supported");
err = BAD_VALUE;
+ } else if (android::media::codec::provider_->null_output_surface_support()) {
+ err = handleSetSurface(surface, true /* callCodec */);
} else {
uint32_t generation;
err = connectToSurface(surface, &generation);
@@ -5052,7 +5096,8 @@
default:
mErrorLog.log(LOG_TAG, base::StringPrintf(
- "setSurface() is valid only at Executing states; currently %s",
+ "%sSurface() is valid only at Executing states; currently %s",
+ msg->what() == kWhatDetachSurface ? "detach" : "set",
apiStateString().c_str()));
err = INVALID_OPERATION;
break;
@@ -5273,30 +5318,40 @@
bool forceSync = false;
if (asyncNotify != nullptr && mSurface != NULL) {
- if (!mReleaseSurface) {
- uint64_t usage = 0;
- if (mSurface->getConsumerUsage(&usage) != OK) {
- usage = 0;
- }
- mReleaseSurface.reset(new ReleaseSurface(usage));
- }
- if (mSurface != mReleaseSurface->getSurface()) {
- uint32_t generation;
- status_t err = connectToSurface(mReleaseSurface->getSurface(), &generation);
- ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
- if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
- err = mCodec->setSurface(mReleaseSurface->getSurface(), generation);
- ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
- }
- if (err == OK) {
- (void)disconnectFromSurface();
- mSurface = mReleaseSurface->getSurface();
- mSurfaceGeneration = generation;
- } else {
- // We were not able to switch the surface, so force
+ if (android::media::codec::provider_->null_output_surface_support()) {
+ if (handleSetSurface(getOrCreateDetachedSurface(), true /* callCodec */,
+ true /* onShutDown */) != OK) {
+ // We were not able to detach the surface, so force
// synchronous release.
forceSync = true;
}
+ } else {
+ if (!mDetachedSurface) {
+ uint64_t usage = 0;
+ if (mSurface->getConsumerUsage(&usage) != OK) {
+ usage = 0;
+ }
+ mDetachedSurface.reset(new ReleaseSurface(usage));
+ }
+ if (mSurface != mDetachedSurface->getSurface()) {
+ uint32_t generation;
+ status_t err =
+ connectToSurface(mDetachedSurface->getSurface(), &generation);
+ ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
+ if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
+ err = mCodec->setSurface(mDetachedSurface->getSurface(), generation);
+ ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
+ }
+ if (err == OK) {
+ (void)disconnectFromSurface();
+ mSurface = mDetachedSurface->getSurface();
+ mSurfaceGeneration = generation;
+ } else {
+ // We were not able to switch the surface, so force
+ // synchronous release.
+ forceSync = true;
+ }
+ }
}
}
@@ -5997,6 +6052,10 @@
mErrorLog.clear();
}
+ if (android::media::codec::provider_->set_state_early()) {
+ mState = newState;
+ }
+
if (newState == UNINITIALIZED) {
// return any straggling buffers, e.g. if we got here on an error
returnBuffersToCodec();
@@ -6007,7 +6066,9 @@
mFlags &= ~kFlagSawMediaServerDie;
}
- mState = newState;
+ if (!android::media::codec::provider_->set_state_early()) {
+ mState = newState;
+ }
if (mBatteryChecker != nullptr) {
mBatteryChecker->setExecuting(isExecuting());
@@ -6591,9 +6652,9 @@
CHECK_EQ(info, &mPortBuffers[portIndex][index]);
availBuffers->erase(availBuffers->begin());
- CHECK(!info->mOwnedByClient);
{
Mutex::Autolock al(mBufferLock);
+ CHECK(!info->mOwnedByClient);
info->mOwnedByClient = true;
// set image-data
@@ -6612,6 +6673,23 @@
return index;
}
+sp<Surface> MediaCodec::getOrCreateDetachedSurface() {
+ if (mDomain != DOMAIN_VIDEO || (mFlags & kFlagIsEncoder)) {
+ return nullptr;
+ }
+
+ if (!mDetachedSurface) {
+ uint64_t usage = 0;
+ if (!mSurface || mSurface->getConsumerUsage(&usage) != OK) {
+ // TODO: should we use a/the default consumer usage?
+ usage = 0;
+ }
+ mDetachedSurface.reset(new ReleaseSurface(usage));
+ }
+
+ return mDetachedSurface->getSurface();
+}
+
status_t MediaCodec::connectToSurface(const sp<Surface> &surface, uint32_t *generation) {
status_t err = OK;
if (surface != NULL) {
@@ -6685,7 +6763,56 @@
return err;
}
+status_t MediaCodec::handleSetSurface(const sp<Surface> &surface, bool callCodec, bool onShutDown) {
+ uint32_t generation;
+ status_t err = OK;
+ if (surface != nullptr) {
+ err = connectToSurface(surface, &generation);
+ if (err == ALREADY_EXISTS) {
+ // reconnecting to same surface
+ return OK;
+ }
+
+ if (err == OK && callCodec) {
+ if (mFlags & kFlagUsesSoftwareRenderer) {
+ if (mSoftRenderer != NULL
+ && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
+ pushBlankBuffersToNativeWindow(mSurface.get());
+ }
+ // do not create a new software renderer on shutdown (release)
+ // as it will not be used anyway
+ if (!onShutDown) {
+ surface->setDequeueTimeout(-1);
+ mSoftRenderer = new SoftwareRenderer(surface);
+ // TODO: check if this was successful
+ }
+ } else {
+ err = mCodec->setSurface(surface, generation);
+ }
+
+ mReliabilityContextMetrics.setOutputSurfaceCount++;
+ }
+ }
+
+ if (err == OK) {
+ if (mSurface != NULL) {
+ (void)disconnectFromSurface();
+ }
+
+ if (surface != NULL) {
+ mSurface = surface;
+ mSurfaceGeneration = generation;
+ }
+ }
+
+ return err;
+}
+
status_t MediaCodec::handleSetSurface(const sp<Surface> &surface) {
+ if (android::media::codec::provider_->null_output_surface_support()) {
+ return handleSetSurface(surface, false /* callCodec */);
+ }
+
status_t err = OK;
if (mSurface != NULL) {
(void)disconnectFromSurface();
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 1ff8acf..054a4b8 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -77,9 +77,6 @@
virtual void setStartTimeOffsetMs(int ms) { mStartTimeOffsetMs = ms; }
virtual int32_t getStartTimeOffsetMs() const { return mStartTimeOffsetMs; }
virtual status_t setNextFd(int fd);
- // Returns true if the timestamp is valid which is compatible with the Mpeg4.
- // Note that this overloads that method in the base class.
- bool isSampleMetadataValid(size_t trackIndex, int64_t timeUs) override;
protected:
virtual ~MPEG4Writer();
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 9ecb12e..7169b1e 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -96,6 +96,7 @@
CONFIGURE_FLAG_ENCODE = 1,
CONFIGURE_FLAG_USE_BLOCK_MODEL = 2,
CONFIGURE_FLAG_USE_CRYPTO_ASYNC = 4,
+ CONFIGURE_FLAG_DETACHED_SURFACE = 8,
};
enum BufferFlags {
@@ -274,6 +275,8 @@
status_t setSurface(const sp<Surface> &nativeWindow);
+ status_t detachOutputSurface();
+
status_t requestIDRFrame();
// Notification will be posted once there "is something to do", i.e.
@@ -368,6 +371,7 @@
kWhatInit = 'init',
kWhatConfigure = 'conf',
kWhatSetSurface = 'sSur',
+ kWhatDetachSurface = 'dSur',
kWhatCreateInputSurface = 'cisf',
kWhatSetInputSurface = 'sisf',
kWhatStart = 'strt',
@@ -474,6 +478,10 @@
uint32_t mSurfaceGeneration = 0;
SoftwareRenderer *mSoftRenderer;
+ // Get the detached BufferQueue surface for a video decoder, and create it
+ // if it did not yet exist.
+ sp<Surface> getOrCreateDetachedSurface();
+
Mutex mMetricsLock;
mediametrics_handle_t mMetricsHandle = 0;
bool mMetricsToUpload = false;
@@ -642,6 +650,13 @@
status_t queueCSDInputBuffer(size_t bufferIndex);
status_t handleSetSurface(const sp<Surface> &surface);
+
+ // Common reimplementation of changing the output surface.
+ // Handles setting null surface, which is used during configure and init.
+ // Set |callCodec| to true if the codec needs to be notified (e.g. during executing state).
+ // Setting |onShutdown| to true will avoid extra work, if this is used for detaching on
+ // delayed release.
+ status_t handleSetSurface(const sp<Surface> &surface, bool callCodec, bool onShutdown = false);
status_t connectToSurface(const sp<Surface> &surface, uint32_t *generation);
status_t disconnectFromSurface();
@@ -714,7 +729,7 @@
sp<AMessage> mMsgPollForRenderedBuffers;
class ReleaseSurface;
- std::unique_ptr<ReleaseSurface> mReleaseSurface;
+ std::unique_ptr<ReleaseSurface> mDetachedSurface;
std::list<sp<AMessage>> mLeftover;
status_t handleLeftover(size_t index);
diff --git a/media/module/bufferpool/1.0/vts/multi.cpp b/media/module/bufferpool/1.0/vts/multi.cpp
index d8cc285..21f47d3 100644
--- a/media/module/bufferpool/1.0/vts/multi.cpp
+++ b/media/module/bufferpool/1.0/vts/multi.cpp
@@ -24,6 +24,7 @@
#include <hidl/HidlSupport.h>
#include <hidl/HidlTransportSupport.h>
#include <hidl/LegacySupport.h>
+#include <hidl/ServiceManagement.h>
#include <hidl/Status.h>
#include <signal.h>
#include <sys/types.h>
@@ -36,6 +37,7 @@
using android::hardware::configureRpcThreadpool;
using android::hardware::hidl_handle;
+using android::hardware::isHidlSupported;
using android::hardware::media::bufferpool::V1_0::IClientManager;
using android::hardware::media::bufferpool::V1_0::ResultStatus;
using android::hardware::media::bufferpool::V1_0::implementation::BufferId;
@@ -178,6 +180,7 @@
ResultStatus status;
PipeMessage message;
+ if (!isHidlSupported()) GTEST_SKIP() << "HIDL is not supported on this device";
ASSERT_TRUE(receiveMessage(mResultPipeFds, &message));
android::sp<IClientManager> receiver = IClientManager::getService();
diff --git a/media/module/codecs/amrwb/enc/Android.bp b/media/module/codecs/amrwb/enc/Android.bp
index 8780136..04f36b5 100644
--- a/media/module/codecs/amrwb/enc/Android.bp
+++ b/media/module/codecs/amrwb/enc/Android.bp
@@ -79,67 +79,31 @@
arch: {
arm: {
srcs: [
- "src/asm/ARMV5E/convolve_opt.s",
- "src/asm/ARMV5E/cor_h_vec_opt.s",
- "src/asm/ARMV5E/Deemph_32_opt.s",
- "src/asm/ARMV5E/Dot_p_opt.s",
- "src/asm/ARMV5E/Filt_6k_7k_opt.s",
- "src/asm/ARMV5E/Norm_Corr_opt.s",
- "src/asm/ARMV5E/pred_lt4_1_opt.s",
- "src/asm/ARMV5E/residu_asm_opt.s",
- "src/asm/ARMV5E/scale_sig_opt.s",
- "src/asm/ARMV5E/Syn_filt_32_opt.s",
- "src/asm/ARMV5E/syn_filt_opt.s",
+ "src/asm/ARMV7/convolve_neon.s",
+ "src/asm/ARMV7/cor_h_vec_neon.s",
+ "src/asm/ARMV7/Deemph_32_neon.s",
+ "src/asm/ARMV7/Dot_p_neon.s",
+ "src/asm/ARMV7/Filt_6k_7k_neon.s",
+ "src/asm/ARMV7/Norm_Corr_neon.s",
+ "src/asm/ARMV7/pred_lt4_1_neon.s",
+ "src/asm/ARMV7/residu_asm_neon.s",
+ "src/asm/ARMV7/scale_sig_neon.s",
+ "src/asm/ARMV7/Syn_filt_32_neon.s",
+ "src/asm/ARMV7/syn_filt_neon.s",
],
cflags: [
"-DARM",
+ "-DARMV7",
"-DASM_OPT",
+ // don't actually generate neon instructions, see bug 26932980
+ "-mfpu=vfpv3",
],
- local_include_dirs: ["src/asm/ARMV5E"],
+ local_include_dirs: [
+ "src/asm/ARMV7",
+ ],
instruction_set: "arm",
-
- neon: {
- exclude_srcs: [
- "src/asm/ARMV5E/convolve_opt.s",
- "src/asm/ARMV5E/cor_h_vec_opt.s",
- "src/asm/ARMV5E/Deemph_32_opt.s",
- "src/asm/ARMV5E/Dot_p_opt.s",
- "src/asm/ARMV5E/Filt_6k_7k_opt.s",
- "src/asm/ARMV5E/Norm_Corr_opt.s",
- "src/asm/ARMV5E/pred_lt4_1_opt.s",
- "src/asm/ARMV5E/residu_asm_opt.s",
- "src/asm/ARMV5E/scale_sig_opt.s",
- "src/asm/ARMV5E/Syn_filt_32_opt.s",
- "src/asm/ARMV5E/syn_filt_opt.s",
- ],
-
- srcs: [
- "src/asm/ARMV7/convolve_neon.s",
- "src/asm/ARMV7/cor_h_vec_neon.s",
- "src/asm/ARMV7/Deemph_32_neon.s",
- "src/asm/ARMV7/Dot_p_neon.s",
- "src/asm/ARMV7/Filt_6k_7k_neon.s",
- "src/asm/ARMV7/Norm_Corr_neon.s",
- "src/asm/ARMV7/pred_lt4_1_neon.s",
- "src/asm/ARMV7/residu_asm_neon.s",
- "src/asm/ARMV7/scale_sig_neon.s",
- "src/asm/ARMV7/Syn_filt_32_neon.s",
- "src/asm/ARMV7/syn_filt_neon.s",
- ],
-
- // don't actually generate neon instructions, see bug 26932980
- cflags: [
- "-DARMV7",
- "-mfpu=vfpv3",
- ],
- local_include_dirs: [
- "src/asm/ARMV5E",
- "src/asm/ARMV7",
- ],
- },
-
},
},
diff --git a/media/module/extractors/mkv/MatroskaExtractor.cpp b/media/module/extractors/mkv/MatroskaExtractor.cpp
index 6900341..f326db1 100644
--- a/media/module/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/module/extractors/mkv/MatroskaExtractor.cpp
@@ -1769,6 +1769,30 @@
}
+status_t MatroskaExtractor::synthesizeVP9(TrackInfo* trackInfo, size_t index) {
+ BlockIterator iter(this, trackInfo->mTrackNum, index);
+ if (iter.eos()) {
+ return ERROR_MALFORMED;
+ }
+
+ const mkvparser::Block* block = iter.block();
+ if (block->GetFrameCount() <= 0) {
+ return ERROR_MALFORMED;
+ }
+
+ const mkvparser::Block::Frame& frame = block->GetFrame(0);
+ auto tmpData = heapbuffer<unsigned char>(frame.len);
+ long n = frame.Read(mReader, tmpData.get());
+ if (n != 0) {
+ return ERROR_MALFORMED;
+ }
+
+ if (!MakeVP9CodecSpecificData(trackInfo->mMeta, tmpData.get(), frame.len)) {
+ return ERROR_MALFORMED;
+ }
+
+ return OK;
+}
static inline bool isValidInt32ColourValue(long long value) {
return value != mkvparser::Colour::kValueNotPresent
@@ -2002,6 +2026,8 @@
// specified in http://www.webmproject.org/vp9/profiles/.
AMediaFormat_setBuffer(meta,
AMEDIAFORMAT_KEY_CSD_0, codecPrivate, codecPrivateSize);
+ } else {
+ isSetCsdFrom1stFrame = true;
}
} else if (!strcmp("V_AV1", codecID)) {
AMediaFormat_setString(meta, AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_VIDEO_AV1);
@@ -2254,6 +2280,13 @@
mTracks.pop();
continue;
}
+ } else if ((!strcmp("V_VP9", codecID) && codecPrivateSize == 0) ||
+ (!strcmp(mimetype, MEDIA_MIMETYPE_VIDEO_VP9) && isSetCsdFrom1stFrame)) {
+ // Attempt to recover from VP9 track without codec private data
+ err = synthesizeVP9(trackInfo, n);
+ if (err != OK) {
+ ALOGW("ignoring error %d in synthesizeVP9", err);
+ }
}
// the TrackInfo owns the metadata now
meta = nullptr;
@@ -2279,6 +2312,8 @@
int64_t thumbnailTimeUs = 0;
size_t maxBlockSize = 0;
while (!iter.eos() && j < 20) {
+ int64_t blockTimeUs = iter.blockTimeUs();
+
if (iter.block()->IsKey()) {
++j;
@@ -2289,9 +2324,13 @@
if (blockSize > maxBlockSize) {
maxBlockSize = blockSize;
- thumbnailTimeUs = iter.blockTimeUs();
+ thumbnailTimeUs = blockTimeUs;
}
}
+ // Exit after 20s if we've already found at least one key frame.
+ if (blockTimeUs > 20000000 && maxBlockSize > 0) {
+ break;
+ }
iter.advance();
}
AMediaFormat_setInt64(info->mMeta,
diff --git a/media/module/extractors/mkv/include/MatroskaExtractor.h b/media/module/extractors/mkv/include/MatroskaExtractor.h
index 99fad17..2e4d955 100644
--- a/media/module/extractors/mkv/include/MatroskaExtractor.h
+++ b/media/module/extractors/mkv/include/MatroskaExtractor.h
@@ -95,6 +95,7 @@
status_t synthesizeAVCC(TrackInfo *trackInfo, size_t index);
status_t synthesizeMPEG2(TrackInfo *trackInfo, size_t index);
status_t synthesizeMPEG4(TrackInfo *trackInfo, size_t index);
+ status_t synthesizeVP9(TrackInfo* trackInfo, size_t index);
status_t initTrackInfo(
const mkvparser::Track *track,
AMediaFormat *meta,
diff --git a/media/module/metadatautils/MetaDataUtils.cpp b/media/module/metadatautils/MetaDataUtils.cpp
index db60f04..0895bb5 100644
--- a/media/module/metadatautils/MetaDataUtils.cpp
+++ b/media/module/metadatautils/MetaDataUtils.cpp
@@ -81,6 +81,177 @@
return true;
}
+// Check if the next 24 bits are VP9 SYNC_CODE
+static bool isVp9SyncCode(ABitReader &bits) {
+ if (bits.numBitsLeft() < 24) {
+ return false;
+ }
+ return bits.getBits(24) == 0x498342;
+}
+
+// This parses bitdepth and subsampling in a VP9 uncompressed header
+// (refer section bitdepth_colorspace_sampling in 6.2 of the VP9 bitstream spec)
+static bool getVp9BitdepthChromaSubSampling(ABitReader &bits,
+ int32_t profile,
+ int32_t *bitDepth,
+ int32_t *chromaSubsampling) {
+ if (profile >= 2) {
+ if (bits.numBitsLeft() < 1) {
+ return false;
+ }
+ *bitDepth = bits.getBits(1) ? 12 : 10;
+ } else {
+ *bitDepth = 8;
+ }
+
+ uint32_t colorspace;
+ if (!bits.getBitsGraceful(3, &colorspace)) {
+ return false;
+ }
+
+ *chromaSubsampling = -1;
+ if (colorspace != 7 /*SRGB*/) {
+ // Skip yuv_range_flag
+ if (!bits.skipBits(1)) {
+ return false;
+ }
+ // Check for subsampling only for profiles 1 and 3.
+ if (profile == 1 || profile == 3) {
+ uint32_t ss_x;
+ uint32_t ss_y;
+ if (bits.getBitsGraceful(1, &ss_x) && bits.getBitsGraceful(1, &ss_y)) {
+ *chromaSubsampling = ss_x << 1 & ss_y;
+ } else {
+ return false;
+ }
+ } else {
+ *chromaSubsampling = 3;
+ }
+ } else {
+ if (profile == 1 || profile == 3) {
+ *chromaSubsampling = 0;
+ }
+ }
+ return true;
+}
+// The param data contains the first frame data, starting with the uncompressed frame
+// header. This uncompressed header (refer section 6.2 of the VP9 bitstream spec) is
+// used to parse profile, bitdepth and subsampling.
+bool MakeVP9CodecSpecificData(AMediaFormat* meta, const uint8_t* data, size_t size) {
+ if (meta == nullptr || data == nullptr || size == 0) {
+ return false;
+ }
+
+ ABitReader bits(data, size);
+
+ // First 2 bits of the uncompressed header should be the frame_marker.
+ if (bits.getBits(2) != 0b10) {
+ return false;
+ }
+
+ int32_t profileLowBit = bits.getBits(1);
+ int32_t profileHighBit = bits.getBits(1);
+ int32_t profile = profileHighBit * 2 + profileLowBit;
+
+ // One reserved '0' bit if profile is 3.
+ if (profile == 3 && bits.getBits(1) != 0) {
+ return false;
+ }
+
+ // If show_existing_frame is set, we get no more data. Since this is
+ // expected to be the first frame, we can return false which will cascade
+ // into ERROR_MALFORMED.
+ if (bits.getBits(1)) {
+ return false;
+ }
+
+ int32_t frame_type = bits.getBits(1);
+
+ // Upto 7 bits could be read till now, which were guaranteed to be available
+ // since size > 0. Check for bits available before reading them from now on.
+ if (bits.numBitsLeft() < 2) {
+ return false;
+ }
+
+ int32_t show_frame = bits.getBits(1);
+ int32_t error_resilient_mode = bits.getBits(1);
+ int32_t bitDepth = 8;
+ int32_t chromaSubsampling = -1;
+
+ if (frame_type == 0 /* KEY_FRAME */) {
+ // Check for sync code.
+ if (!isVp9SyncCode(bits)) {
+ return false;
+ }
+
+ if (!getVp9BitdepthChromaSubSampling(bits, profile, &bitDepth, &chromaSubsampling)) {
+ return false;
+ }
+ } else {
+ int32_t intra_only = 0;
+ if (!show_frame) {
+ if (bits.numBitsLeft() < 1) {
+ return false;
+ }
+ intra_only = bits.getBits(1);
+ }
+
+ if (!error_resilient_mode) {
+ if (bits.numBitsLeft() < 2) {
+ return false;
+ }
+ // ignore reset_frame_context
+ bits.skipBits(2);
+ }
+
+ if (!intra_only) {
+ // Require first frame to be either KEY_FRAME or INTER_FRAME with intra_only set to true
+ return false;
+ }
+
+ // Check for sync code.
+ if (!isVp9SyncCode(bits)) {
+ return false;
+ }
+
+ if (profile > 0) {
+ if (!getVp9BitdepthChromaSubSampling(bits, profile, &bitDepth, &chromaSubsampling)) {
+ return false;
+ }
+ } else {
+ bitDepth = 8;
+ chromaSubsampling = 3;
+ }
+ }
+ int32_t csdSize = 6;
+ if (chromaSubsampling != -1) {
+ csdSize += 3;
+ }
+
+ // Create VP9 Codec Feature Metadata (CodecPrivate) that can be parsed
+ // https://www.webmproject.org/docs/container/#vp9-codec-feature-metadata-codecprivate
+ sp<ABuffer> csd = sp<ABuffer>::make(csdSize);
+ uint8_t* csdData = csd->data();
+
+ *csdData++ = 0x01 /* FEATURE PROFILE */;
+ *csdData++ = 0x01 /* length */;
+ *csdData++ = profile;
+
+ *csdData++ = 0x03 /* FEATURE BITDEPTH */;
+ *csdData++ = 0x01 /* length */;
+ *csdData++ = bitDepth;
+
+ // csdSize more than 6 means chroma subsampling data was found.
+ if (csdSize > 6) {
+ *csdData++ = 0x04 /* FEATURE SUBSAMPLING */;
+ *csdData++ = 0x01 /* length */;
+ *csdData++ = chromaSubsampling;
+ }
+
+ AMediaFormat_setBuffer(meta, AMEDIAFORMAT_KEY_CSD_0, csd->data(), csd->size());
+ return true;
+}
+
bool MakeAACCodecSpecificData(MetaDataBase &meta, const uint8_t *data, size_t size) {
if (data == nullptr || size < 7) {
return false;
diff --git a/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h b/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
index dcaf27f..69cf21a 100644
--- a/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
+++ b/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
@@ -38,6 +38,8 @@
void parseVorbisComment(
AMediaFormat *fileMeta, const char *comment, size_t commentLength);
+bool MakeVP9CodecSpecificData(AMediaFormat* meta, const uint8_t* data, size_t size);
+
} // namespace android
#endif // META_DATA_UTILS_H_
diff --git a/services/audioflinger/timing/tests/Android.bp b/services/audioflinger/timing/tests/Android.bp
index d1e5563..94eaa6a 100644
--- a/services/audioflinger/timing/tests/Android.bp
+++ b/services/audioflinger/timing/tests/Android.bp
@@ -13,7 +13,7 @@
host_supported: true,
srcs: [
- "mediasyncevent_tests.cpp"
+ "mediasyncevent_tests.cpp",
],
header_libs: [
@@ -38,7 +38,7 @@
host_supported: true,
srcs: [
- "monotonicframecounter_tests.cpp"
+ "monotonicframecounter_tests.cpp",
],
static_libs: [
@@ -54,26 +54,26 @@
}
cc_test {
- name: "synchronizedrecordstate_tests",
+ name: "synchronizedrecordstate_tests",
- host_supported: true,
+ host_supported: true,
- srcs: [
- "synchronizedrecordstate_tests.cpp"
- ],
+ srcs: [
+ "synchronizedrecordstate_tests.cpp",
+ ],
- header_libs: [
- "libaudioclient_headers",
- ],
+ header_libs: [
+ "libaudioclient_headers",
+ ],
- static_libs: [
- "liblog",
- "libutils", // RefBase
- ],
+ static_libs: [
+ "liblog",
+ "libutils", // RefBase
+ ],
- cflags: [
- "-Wall",
- "-Werror",
- "-Wextra",
- ],
- }
\ No newline at end of file
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ ],
+}
diff --git a/services/audiopolicy/config/Android.bp b/services/audiopolicy/config/Android.bp
index 86600f4..321181d 100644
--- a/services/audiopolicy/config/Android.bp
+++ b/services/audiopolicy/config/Android.bp
@@ -31,41 +31,49 @@
vendor: true,
src: ":a2dp_in_audio_policy_configuration",
}
+
prebuilt_etc {
name: "a2dp_audio_policy_configuration.xml",
vendor: true,
src: ":a2dp_audio_policy_configuration",
}
+
prebuilt_etc {
name: "audio_policy_configuration.xml",
vendor: true,
src: ":audio_policy_configuration_generic",
}
+
prebuilt_etc {
name: "r_submix_audio_policy_configuration.xml",
vendor: true,
src: ":r_submix_audio_policy_configuration",
}
+
prebuilt_etc {
name: "audio_policy_volumes.xml",
vendor: true,
src: ":audio_policy_volumes",
}
+
prebuilt_etc {
name: "default_volume_tables.xml",
vendor: true,
src: ":default_volume_tables",
}
+
prebuilt_etc {
name: "surround_sound_configuration_5_0.xml",
vendor: true,
src: ":surround_sound_configuration_5_0",
}
+
prebuilt_etc {
name: "usb_audio_policy_configuration.xml",
vendor: true,
src: ":usb_audio_policy_configuration",
}
+
prebuilt_etc {
name: "primary_audio_policy_configuration.xml",
src: ":primary_audio_policy_configuration",
@@ -76,50 +84,62 @@
name: "a2dp_in_audio_policy_configuration",
srcs: ["a2dp_in_audio_policy_configuration.xml"],
}
+
filegroup {
name: "a2dp_audio_policy_configuration",
srcs: ["a2dp_audio_policy_configuration.xml"],
}
+
filegroup {
name: "primary_audio_policy_configuration",
srcs: ["primary_audio_policy_configuration.xml"],
}
+
filegroup {
name: "surround_sound_configuration_5_0",
srcs: ["surround_sound_configuration_5_0.xml"],
}
+
filegroup {
name: "default_volume_tables",
srcs: ["default_volume_tables.xml"],
}
+
filegroup {
name: "audio_policy_volumes",
srcs: ["audio_policy_volumes.xml"],
}
+
filegroup {
name: "audio_policy_configuration_generic",
srcs: ["audio_policy_configuration_generic.xml"],
}
+
filegroup {
name: "audio_policy_configuration_generic_configurable",
srcs: ["audio_policy_configuration_generic_configurable.xml"],
}
+
filegroup {
name: "usb_audio_policy_configuration",
srcs: ["usb_audio_policy_configuration.xml"],
}
+
filegroup {
name: "r_submix_audio_policy_configuration",
srcs: ["r_submix_audio_policy_configuration.xml"],
}
+
filegroup {
name: "bluetooth_audio_policy_configuration_7_0",
srcs: ["bluetooth_audio_policy_configuration_7_0.xml"],
}
+
filegroup {
name: "bluetooth_with_le_audio_policy_configuration_7_0",
srcs: ["bluetooth_with_le_audio_policy_configuration_7_0.xml"],
}
+
filegroup {
name: "hearing_aid_audio_policy_configuration_7_0",
srcs: ["hearing_aid_audio_policy_configuration_7_0.xml"],
diff --git a/services/audiopolicy/engineconfigurable/config/Android.bp b/services/audiopolicy/engineconfigurable/config/Android.bp
index b3d1f97..a733c3f 100644
--- a/services/audiopolicy/engineconfigurable/config/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/Android.bp
@@ -30,10 +30,12 @@
vendor: true,
src: ":audio_policy_engine_criteria",
}
+
filegroup {
name: "audio_policy_engine_criterion_types_template",
srcs: ["example/common/audio_policy_engine_criterion_types.xml.in"],
}
+
filegroup {
name: "audio_policy_engine_criteria",
srcs: ["example/common/audio_policy_engine_criteria.xml"],
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
index ee62d5e..98b8e78 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
@@ -31,18 +31,21 @@
src: ":PolicyClass",
sub_dir: "parameter-framework/Structure/Policy",
}
+
prebuilt_etc {
name: "PolicySubsystem.xml",
vendor: true,
src: ":PolicySubsystem",
sub_dir: "parameter-framework/Structure/Policy",
}
+
prebuilt_etc {
name: "PolicySubsystem-CommonTypes.xml",
vendor: true,
src: ":buildcommontypesstructure_gen",
sub_dir: "parameter-framework/Structure/Policy",
}
+
genrule {
name: "buildcommontypesstructure_gen",
defaults: ["buildcommontypesstructurerule"],
@@ -52,34 +55,42 @@
name: "product_strategies_structure_template",
srcs: ["examples/common/Structure/ProductStrategies.xml.in"],
}
+
filegroup {
name: "PolicySubsystem",
srcs: ["examples/common/Structure/PolicySubsystem.xml"],
}
+
filegroup {
name: "PolicySubsystem-no-strategy",
srcs: ["examples/common/Structure/PolicySubsystem-no-strategy.xml"],
}
+
filegroup {
name: "common_types_structure_template",
srcs: ["examples/common/Structure/PolicySubsystem-CommonTypes.xml.in"],
}
+
filegroup {
name: "PolicyClass",
srcs: ["examples/common/Structure/PolicyClass.xml"],
}
+
filegroup {
name: "volumes.pfw",
srcs: ["examples/Settings/volumes.pfw"],
}
+
filegroup {
name: "device_for_input_source.pfw",
srcs: ["examples/Settings/device_for_input_source.pfw"],
}
+
filegroup {
name: "ParameterFrameworkConfigurationPolicy.userdebug.xml",
srcs: ["examples/ParameterFrameworkConfigurationPolicy.userdebug.xml"],
}
+
filegroup {
name: "ParameterFrameworkConfigurationPolicy.user.xml",
srcs: ["examples/ParameterFrameworkConfigurationPolicy.user.xml"],
diff --git a/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
index 3d972dc..f5e72f5 100644
--- a/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
@@ -38,6 +38,7 @@
[[clang::no_destroy]] static std::once_flag gSmOnce;
sp<FakeServiceManager> gFakeServiceManager;
+sp<AudioPolicyService> gAudioPolicyService;
bool addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
FuzzedDataProvider& fdp) {
@@ -49,22 +50,10 @@
return true;
}
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
- FuzzedDataProvider fdp(data, size);
-
- std::call_once(gSmOnce, [&] {
- /* Create a FakeServiceManager instance and add required services */
- gFakeServiceManager = sp<FakeServiceManager>::make();
- setDefaultServiceManager(gFakeServiceManager);
- });
- gFakeServiceManager->clear();
-
- for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
- "batterystats", "media.metrics"}) {
- if (!addService(String16(service), gFakeServiceManager, fdp)) {
- return 0;
- }
- }
+extern "C" int LLVMFuzzerInitialize(int* /*argc*/, char*** /*argv*/) {
+ /* Create a FakeServiceManager instance and add required services */
+ gFakeServiceManager = sp<FakeServiceManager>::make();
+ setDefaultServiceManager(gFakeServiceManager);
auto configService = ndk::SharedRefBase::make<ConfigMock>();
CHECK_EQ(NO_ERROR, AServiceManager_addService(configService.get()->asBinder().get(),
@@ -79,23 +68,40 @@
CHECK_EQ(NO_ERROR, AServiceManager_addService(moduleService.get()->asBinder().get(),
"android.hardware.audio.core.IModule/default"));
- const auto audioFlinger = sp<AudioFlinger>::make();
- const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+ // Disable creating thread pool for fuzzer instance of audio flinger and audio policy services
+ AudioSystem::disableThreadPool();
- CHECK_EQ(NO_ERROR,
- gFakeServiceManager->addService(
- String16(IAudioFlinger::DEFAULT_SERVICE_NAME), IInterface::asBinder(afAdapter),
- false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+ return 0;
+}
- AudioSystem::get_audio_flinger_for_fuzzer();
- const auto audioPolicyService = sp<AudioPolicyService>::make();
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ FuzzedDataProvider fdp(data, size);
- CHECK_EQ(NO_ERROR,
- gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
- false /* allowIsolated */,
- IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+ for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
+ "batterystats", "media.metrics"}) {
+ if (!addService(String16(service), gFakeServiceManager, fdp)) {
+ return 0;
+ }
+ }
- fuzzService(media::IAudioPolicyService::asBinder(audioPolicyService), std::move(fdp));
+ // TODO(330882064) : Initialise Audio Flinger and Audio Policy services every time
+ std::call_once(gSmOnce, [&] {
+ const auto audioFlinger = sp<AudioFlinger>::make();
+ const auto audioFlingerServerAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+ CHECK_EQ(NO_ERROR,
+ gFakeServiceManager->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+ IInterface::asBinder(audioFlingerServerAdapter),
+ false /* allowIsolated */,
+ IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+
+ gAudioPolicyService = sp<AudioPolicyService>::make();
+ CHECK_EQ(NO_ERROR,
+ gFakeServiceManager->addService(String16("media.audio_policy"),
+ gAudioPolicyService, false /* allowIsolated */,
+ IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+ });
+
+ fuzzService(media::IAudioPolicyService::asBinder(gAudioPolicyService), std::move(fdp));
return 0;
}
diff --git a/services/medialog/Android.bp b/services/medialog/Android.bp
index 8088ef0..fdb56e5 100644
--- a/services/medialog/Android.bp
+++ b/services/medialog/Android.bp
@@ -30,7 +30,7 @@
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
}
diff --git a/services/medialog/fuzzer/Android.bp b/services/medialog/fuzzer/Android.bp
index c96c37b..84a1ce6 100644
--- a/services/medialog/fuzzer/Android.bp
+++ b/services/medialog/fuzzer/Android.bp
@@ -30,12 +30,12 @@
"frameworks/av/services/medialog",
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
fuzz_config: {
cc: [
- "android-media-fuzzing-reports@google.com",
+ "android-audio-fuzzing-reports@google.com",
],
componentid: 155276,
hotlists: [