Merge "Remove min sdk version and set target/max sdk version to 29" into qt-dev
diff --git a/apex/ld.config.txt b/apex/ld.config.txt
index 87af5a1..a5937fd 100644
--- a/apex/ld.config.txt
+++ b/apex/ld.config.txt
@@ -127,3 +127,5 @@
# namespace.sphal.link.platform.shared_libs += %SANITIZER_RUNTIME_LIBRARIES%
namespace.sphal.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libRS.so:libandroid_net.so:libc.so:libcgrouprc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libvulkan.so
+# Add a link for libz.so which is llndk on devices where VNDK is not enforced.
+namespace.sphal.link.platform.shared_libs += libz.so
diff --git a/media/bufferpool/1.0/BufferPoolClient.cpp b/media/bufferpool/1.0/BufferPoolClient.cpp
index 41520ca..d712398 100644
--- a/media/bufferpool/1.0/BufferPoolClient.cpp
+++ b/media/bufferpool/1.0/BufferPoolClient.cpp
@@ -528,6 +528,10 @@
(void) outStatus;
(void) outBuffer;
});
+ if(!transResult.isOk()) {
+ ALOGD("sync from client %lld failed: bufferpool process died.",
+ (long long)mConnectionId);
+ }
}
mRemoteSyncLock.unlock();
}
diff --git a/media/bufferpool/2.0/AccessorImpl.cpp b/media/bufferpool/2.0/AccessorImpl.cpp
index 5260909..94cf006 100644
--- a/media/bufferpool/2.0/AccessorImpl.cpp
+++ b/media/bufferpool/2.0/AccessorImpl.cpp
@@ -261,13 +261,19 @@
mBufferPool.mInvalidation.onHandleAck(&observers, &invalidationId);
}
// Do not hold lock for send invalidations
+ size_t deadClients = 0;
for (auto it = observers.begin(); it != observers.end(); ++it) {
const sp<IObserver> observer = it->second;
if (observer) {
Return<void> transResult = observer->onMessage(it->first, invalidationId);
- (void) transResult;
+ if (!transResult.isOk()) {
+ ++deadClients;
+ }
}
}
+ if (deadClients > 0) {
+ ALOGD("During invalidation found %zu dead clients", deadClients);
+ }
}
bool Accessor::Impl::isValid() {
diff --git a/media/bufferpool/2.0/BufferPoolClient.cpp b/media/bufferpool/2.0/BufferPoolClient.cpp
index f907de5..342fef6 100644
--- a/media/bufferpool/2.0/BufferPoolClient.cpp
+++ b/media/bufferpool/2.0/BufferPoolClient.cpp
@@ -582,6 +582,10 @@
(void) outStatus;
(void) outBuffer;
});
+ if (!transResult.isOk()) {
+ ALOGD("sync from client %lld failed: bufferpool process died.",
+ (long long)mConnectionId);
+ }
}
mRemoteSyncLock.unlock();
}
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index df4dadb..769895c 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -501,15 +501,13 @@
}
}
-static void copyOutputBufferToYV12Frame(uint8_t *dst,
- const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+static void copyOutputBufferToYuvPlanarFrame(
+ uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
+ size_t dstYStride, size_t dstUVStride,
uint32_t width, uint32_t height) {
- size_t dstYStride = align(width, 16);
- size_t dstUVStride = align(dstYStride / 2, 16);
uint8_t* dstStart = dst;
-
for (size_t i = 0; i < height; ++i) {
memcpy(dst, srcY, width);
srcY += srcYStride;
@@ -597,11 +595,10 @@
static void convertYUV420Planar16ToYUV420Planar(uint8_t *dst,
const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
- size_t dstStride, size_t width, size_t height) {
+ size_t dstYStride, size_t dstUVStride, size_t width, size_t height) {
uint8_t *dstY = (uint8_t *)dst;
- size_t dstYSize = dstStride * height;
- size_t dstUVStride = align(dstStride / 2, 16);
+ size_t dstYSize = dstYStride * height;
size_t dstUVSize = dstUVStride * height / 2;
uint8_t *dstV = dstY + dstYSize;
uint8_t *dstU = dstV + dstUVSize;
@@ -612,7 +609,7 @@
}
srcY += srcYStride;
- dstY += dstStride;
+ dstY += dstYStride;
}
for (size_t y = 0; y < (height + 1) / 2; ++y) {
@@ -699,6 +696,9 @@
size_t srcYStride = img->stride[AOM_PLANE_Y];
size_t srcUStride = img->stride[AOM_PLANE_U];
size_t srcVStride = img->stride[AOM_PLANE_V];
+ C2PlanarLayout layout = wView.layout();
+ size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
if (img->fmt == AOM_IMG_FMT_I42016) {
const uint16_t *srcY = (const uint16_t *)img->planes[AOM_PLANE_Y];
@@ -708,20 +708,23 @@
if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
convertYUV420Planar16ToY410((uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
srcUStride / 2, srcVStride / 2,
- align(mWidth, 16),
+ dstYStride / sizeof(uint32_t),
mWidth, mHeight);
} else {
convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
srcUStride / 2, srcVStride / 2,
- align(mWidth, 16),
+ dstYStride, dstUVStride,
mWidth, mHeight);
}
} else {
const uint8_t *srcY = (const uint8_t *)img->planes[AOM_PLANE_Y];
const uint8_t *srcU = (const uint8_t *)img->planes[AOM_PLANE_U];
const uint8_t *srcV = (const uint8_t *)img->planes[AOM_PLANE_V];
- copyOutputBufferToYV12Frame(dst, srcY, srcU, srcV,
- srcYStride, srcUStride, srcVStride, mWidth, mHeight);
+ copyOutputBufferToYuvPlanarFrame(
+ dst, srcY, srcU, srcV,
+ srcYStride, srcUStride, srcVStride,
+ dstYStride, dstUVStride,
+ mWidth, mHeight);
}
finishWork(*(int64_t*)img->user_priv, work, std::move(block));
block = nullptr;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index 8d9f21a..b41c271 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -41,6 +41,37 @@
constexpr char COMPONENT_NAME[] = "c2.android.avc.encoder";
+void ParseGop(
+ const C2StreamGopTuning::output &gop,
+ uint32_t *syncInterval, uint32_t *iInterval, uint32_t *maxBframes) {
+ uint32_t syncInt = 1;
+ uint32_t iInt = 1;
+ for (size_t i = 0; i < gop.flexCount(); ++i) {
+ const C2GopLayerStruct &layer = gop.m.values[i];
+ if (layer.count == UINT32_MAX) {
+ syncInt = 0;
+ } else if (syncInt <= UINT32_MAX / (layer.count + 1)) {
+ syncInt *= (layer.count + 1);
+ }
+ if ((layer.type_ & I_FRAME) == 0) {
+ if (layer.count == UINT32_MAX) {
+ iInt = 0;
+ } else if (iInt <= UINT32_MAX / (layer.count + 1)) {
+ iInt *= (layer.count + 1);
+ }
+ }
+ if (layer.type_ == C2Config::picture_type_t(P_FRAME | B_FRAME) && maxBframes) {
+ *maxBframes = layer.count;
+ }
+ }
+ if (syncInterval) {
+ *syncInterval = syncInt;
+ }
+ if (iInterval) {
+ *iInterval = iInt;
+ }
+}
+
} // namespace
class C2SoftAvcEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
@@ -81,10 +112,19 @@
.build());
addParameter(
+ DefineParam(mGop, C2_PARAMKEY_GOP)
+ .withDefault(C2StreamGopTuning::output::AllocShared(
+ 0 /* flexCount */, 0u /* stream */))
+ .withFields({C2F(mGop, m.values[0].type_).any(),
+ C2F(mGop, m.values[0].count).any()})
+ .withSetter(GopSetter)
+ .build());
+
+ addParameter(
DefineParam(mActualInputDelay, C2_PARAMKEY_INPUT_DELAY)
.withDefault(new C2PortActualDelayTuning::input(DEFAULT_B_FRAMES))
.withFields({C2F(mActualInputDelay, value).inRange(0, MAX_B_FRAMES)})
- .withSetter(Setter<decltype(*mActualInputDelay)>::StrictValueWithNoDeps)
+ .calculatedAs(InputDelaySetter, mGop)
.build());
addParameter(
@@ -160,6 +200,17 @@
.build());
}
+ static C2R InputDelaySetter(
+ bool mayBlock,
+ C2P<C2PortActualDelayTuning::input> &me,
+ const C2P<C2StreamGopTuning::output> &gop) {
+ (void)mayBlock;
+ uint32_t maxBframes = 0;
+ ParseGop(gop.v, nullptr, nullptr, &maxBframes);
+ me.set().value = maxBframes;
+ return C2R::Ok();
+ }
+
static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
(void)mayBlock;
C2R res = C2R::Ok();
@@ -273,6 +324,18 @@
return res;
}
+ static C2R GopSetter(bool mayBlock, C2P<C2StreamGopTuning::output> &me) {
+ (void)mayBlock;
+ for (size_t i = 0; i < me.v.flexCount(); ++i) {
+ const C2GopLayerStruct &layer = me.v.m.values[0];
+ if (layer.type_ == C2Config::picture_type_t(P_FRAME | B_FRAME)
+ && layer.count > MAX_B_FRAMES) {
+ me.set().m.values[i].count = MAX_B_FRAMES;
+ }
+ }
+ return C2R::Ok();
+ }
+
IV_PROFILE_T getProfile_l() const {
switch (mProfileLevel->profile) {
case PROFILE_AVC_CONSTRAINED_BASELINE: [[fallthrough]];
@@ -314,6 +377,7 @@
ALOGD("Unrecognized level: %x", mProfileLevel->level);
return 41;
}
+
uint32_t getSyncFramePeriod_l() const {
if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
return 0;
@@ -328,6 +392,7 @@
std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
+ std::shared_ptr<C2StreamGopTuning::output> getGop_l() const { return mGop; }
private:
std::shared_ptr<C2StreamUsageTuning::input> mUsage;
@@ -338,6 +403,7 @@
std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
+ std::shared_ptr<C2StreamGopTuning::output> mGop;
};
#define ive_api_function ih264e_api_function
@@ -850,6 +916,7 @@
c2_status_t errType = C2_OK;
+ std::shared_ptr<C2StreamGopTuning::output> gop;
{
IntfImpl::Lock lock = mIntf->lock();
mSize = mIntf->getSize_l();
@@ -859,6 +926,25 @@
mAVCEncLevel = mIntf->getLevel_l();
mIInterval = mIntf->getSyncFramePeriod_l();
mIDRInterval = mIntf->getSyncFramePeriod_l();
+ gop = mIntf->getGop_l();
+ }
+ if (gop && gop->flexCount() > 0) {
+ uint32_t syncInterval = 1;
+ uint32_t iInterval = 1;
+ uint32_t maxBframes = 0;
+ ParseGop(*gop, &syncInterval, &iInterval, &maxBframes);
+ if (syncInterval > 0) {
+ ALOGD("Updating IDR interval from GOP: old %u new %u", mIDRInterval, syncInterval);
+ mIDRInterval = syncInterval;
+ }
+ if (iInterval > 0) {
+ ALOGD("Updating I interval from GOP: old %u new %u", mIInterval, iInterval);
+ mIInterval = iInterval;
+ }
+ if (mBframes != maxBframes) {
+ ALOGD("Updating max B frames from GOP: old %u new %u", mBframes, maxBframes);
+ mBframes = maxBframes;
+ }
}
uint32_t width = mSize->width;
uint32_t height = mSize->height;
@@ -868,8 +954,8 @@
// TODO
mIvVideoColorFormat = IV_YUV_420P;
- ALOGD("Params width %d height %d level %d colorFormat %d", width,
- height, mAVCEncLevel, mIvVideoColorFormat);
+ ALOGD("Params width %d height %d level %d colorFormat %d bframes %d", width,
+ height, mAVCEncLevel, mIvVideoColorFormat, mBframes);
/* Getting Number of MemRecords */
{
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index cf34dff..408db7e 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -74,6 +74,14 @@
.withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
.build());
addParameter(
+ DefineParam(mComplexity, C2_PARAMKEY_COMPLEXITY)
+ .withDefault(new C2StreamComplexityTuning::output(0u,
+ FLAC_COMPRESSION_LEVEL_DEFAULT))
+ .withFields({C2F(mComplexity, value).inRange(
+ FLAC_COMPRESSION_LEVEL_MIN, FLAC_COMPRESSION_LEVEL_MAX)})
+ .withSetter(Setter<decltype(*mComplexity)>::NonStrictValueWithNoDeps)
+ .build());
+ addParameter(
DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
.withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 4608))
.build());
@@ -93,12 +101,14 @@
uint32_t getSampleRate() const { return mSampleRate->value; }
uint32_t getChannelCount() const { return mChannelCount->value; }
uint32_t getBitrate() const { return mBitrate->value; }
+ uint32_t getComplexity() const { return mComplexity->value; }
int32_t getPcmEncodingInfo() const { return mPcmEncodingInfo->value; }
private:
std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+ std::shared_ptr<C2StreamComplexityTuning::output> mComplexity;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
std::shared_ptr<C2StreamPcmEncodingInfo::input> mPcmEncodingInfo;
};
@@ -127,7 +137,6 @@
mSignalledError = false;
mSignalledOutputEos = false;
- mCompressionLevel = FLAC_COMPRESSION_LEVEL_DEFAULT;
mIsFirstFrame = true;
mAnchorTimeStamp = 0ull;
mProcessedSamples = 0u;
@@ -153,7 +162,6 @@
}
void C2SoftFlacEnc::onReset() {
- mCompressionLevel = FLAC_COMPRESSION_LEVEL_DEFAULT;
(void) onStop();
}
@@ -369,7 +377,8 @@
ok = ok && FLAC__stream_encoder_set_channels(mFlacStreamEncoder, mIntf->getChannelCount());
ok = ok && FLAC__stream_encoder_set_sample_rate(mFlacStreamEncoder, mIntf->getSampleRate());
ok = ok && FLAC__stream_encoder_set_bits_per_sample(mFlacStreamEncoder, bitsPerSample);
- ok = ok && FLAC__stream_encoder_set_compression_level(mFlacStreamEncoder, mCompressionLevel);
+ ok = ok && FLAC__stream_encoder_set_compression_level(mFlacStreamEncoder,
+ mIntf->getComplexity());
ok = ok && FLAC__stream_encoder_set_verify(mFlacStreamEncoder, false);
if (!ok) {
ALOGE("unknown error when configuring encoder");
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.h b/media/codec2/components/flac/C2SoftFlacEnc.h
index cdf305e..b3f01d5 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.h
+++ b/media/codec2/components/flac/C2SoftFlacEnc.h
@@ -69,7 +69,6 @@
std::shared_ptr<C2LinearBlock> mOutputBlock;
bool mSignalledError;
bool mSignalledOutputEos;
- uint32_t mCompressionLevel;
uint32_t mBlockSize;
bool mIsFirstFrame;
uint64_t mAnchorTimeStamp;
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index efeab6c..b129b1b 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -86,8 +86,8 @@
DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
.withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
.withFields({
- C2F(mSize, width).inRange(320, 1920, 2),
- C2F(mSize, height).inRange(128, 1088, 2),
+ C2F(mSize, width).inRange(2, 1920, 2),
+ C2F(mSize, height).inRange(2, 1088, 2),
})
.withSetter(SizeSetter)
.build());
@@ -390,27 +390,23 @@
}
C2SoftHevcEnc::~C2SoftHevcEnc() {
- releaseEncoder();
+ onRelease();
}
c2_status_t C2SoftHevcEnc::onInit() {
- return initEncoder();
+ return C2_OK;
}
c2_status_t C2SoftHevcEnc::onStop() {
- if (!mStarted) {
- return C2_OK;
- }
- return releaseEncoder();
+ return C2_OK;
}
void C2SoftHevcEnc::onReset() {
- onStop();
- initEncoder();
+ releaseEncoder();
}
void C2SoftHevcEnc::onRelease() {
- onStop();
+ releaseEncoder();
}
c2_status_t C2SoftHevcEnc::onFlush_sm() {
@@ -865,6 +861,22 @@
return;
}
}
+
+ // handle dynamic config parameters
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
+ lock.unlock();
+
+ if (bitrate != mBitrate) {
+ mBitrate = bitrate;
+ mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_tgt_bitrate[0] =
+ mBitrate->value;
+ mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_peak_bitrate[0] =
+ mBitrate->value << 1;
+ }
+ }
+
ihevce_inp_buf_t s_encode_ip{};
ihevce_out_buf_t s_encode_op{};
uint64_t workIndex = work->input.ordinal.frameIndex.peekull();
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 3d4a733..7e6685e 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -466,9 +466,11 @@
/* TODO: can remove temporary copy after library supports writing to display
* buffer Y, U and V plane pointers using stride info. */
-static void copyOutputBufferToYV12Frame(uint8_t *dst, uint8_t *src, size_t dstYStride,
- size_t srcYStride, uint32_t width, uint32_t height) {
- size_t dstUVStride = align(dstYStride / 2, 16);
+static void copyOutputBufferToYuvPlanarFrame(
+ uint8_t *dst, uint8_t *src,
+ size_t dstYStride, size_t dstUVStride,
+ size_t srcYStride, uint32_t width,
+ uint32_t height) {
size_t srcUVStride = srcYStride / 2;
uint8_t *srcStart = src;
uint8_t *dstStart = dst;
@@ -673,8 +675,14 @@
}
uint8_t *outputBufferY = wView.data()[C2PlanarLayout::PLANE_Y];
- (void)copyOutputBufferToYV12Frame(outputBufferY, mOutputBuffer[mNumSamplesOutput & 1],
- wView.width(), align(mWidth, 16), mWidth, mHeight);
+ C2PlanarLayout layout = wView.layout();
+ size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+ (void)copyOutputBufferToYuvPlanarFrame(
+ outputBufferY,
+ mOutputBuffer[mNumSamplesOutput & 1],
+ dstYStride, dstUVStride,
+ align(mWidth, 16), mWidth, mHeight);
inPos += inSize - (size_t)tmpInSize;
finishWork(workIndex, work);
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
index 7b58c9b..2d110f7 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.cpp
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -205,15 +205,6 @@
return C2_BAD_VALUE;
}
- // Get codecDelay
- int32_t lookahead;
- if (opus_multistream_encoder_ctl(mEncoder, OPUS_GET_LOOKAHEAD(&lookahead)) !=
- OPUS_OK) {
- ALOGE("failed to get lookahead");
- return C2_BAD_VALUE;
- }
- mCodecDelay = lookahead * 1000000000ll / mSampleRate;
-
// Set seek preroll to 80 ms
mSeekPreRoll = 80000000;
return C2_OK;
@@ -406,13 +397,26 @@
if (!mHeaderGenerated) {
uint8_t header[AOPUS_UNIFIED_CSD_MAXSIZE];
memset(header, 0, sizeof(header));
+
+ // Get codecDelay
+ int32_t lookahead;
+ if (opus_multistream_encoder_ctl(mEncoder, OPUS_GET_LOOKAHEAD(&lookahead)) !=
+ OPUS_OK) {
+ ALOGE("failed to get lookahead");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ mCodecDelay = lookahead * 1000000000ll / mSampleRate;
+
OpusHeader opusHeader;
+ memset(&opusHeader, 0, sizeof(opusHeader));
opusHeader.channels = mChannelCount;
opusHeader.num_streams = mChannelCount;
opusHeader.num_coupled = 0;
opusHeader.channel_mapping = ((mChannelCount > 8) ? 255 : (mChannelCount > 2));
opusHeader.gain_db = 0;
- opusHeader.skip_samples = 0;
+ opusHeader.skip_samples = lookahead;
int headerLen = WriteOpusHeaders(opusHeader, mSampleRate, header,
sizeof(header), mCodecDelay, mSeekPreRoll);
diff --git a/media/codec2/components/vpx/Android.bp b/media/codec2/components/vpx/Android.bp
index abfd379..34f5753 100644
--- a/media/codec2/components/vpx/Android.bp
+++ b/media/codec2/components/vpx/Android.bp
@@ -7,7 +7,7 @@
srcs: ["C2SoftVpxDec.cpp"],
- static_libs: ["libvpx"],
+ shared_libs: ["libvpx"],
cflags: [
"-DVP9",
@@ -23,7 +23,7 @@
srcs: ["C2SoftVpxDec.cpp"],
- static_libs: ["libvpx"],
+ shared_libs: ["libvpx"],
}
cc_library_shared {
@@ -38,7 +38,7 @@
"C2SoftVpxEnc.cpp",
],
- static_libs: ["libvpx"],
+ shared_libs: ["libvpx"],
cflags: ["-DVP9"],
}
@@ -55,6 +55,6 @@
"C2SoftVpxEnc.cpp",
],
- static_libs: ["libvpx"],
+ shared_libs: ["libvpx"],
}
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 3120f7a..a52ca15 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -18,6 +18,8 @@
#define LOG_TAG "C2SoftVpxDec"
#include <log/log.h>
+#include <algorithm>
+
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/MediaDefs.h>
@@ -303,13 +305,43 @@
#endif
};
+C2SoftVpxDec::ConverterThread::ConverterThread(
+ const std::shared_ptr<Mutexed<ConversionQueue>> &queue)
+ : Thread(false), mQueue(queue) {}
+
+bool C2SoftVpxDec::ConverterThread::threadLoop() {
+ Mutexed<ConversionQueue>::Locked queue(*mQueue);
+ if (queue->entries.empty()) {
+ queue.waitForCondition(queue->cond);
+ if (queue->entries.empty()) {
+ return true;
+ }
+ }
+ std::function<void()> convert = queue->entries.front();
+ queue->entries.pop_front();
+ if (!queue->entries.empty()) {
+ queue->cond.signal();
+ }
+ queue.unlock();
+
+ convert();
+
+ queue.lock();
+ if (--queue->numPending == 0u) {
+ queue->cond.broadcast();
+ }
+ return true;
+}
+
C2SoftVpxDec::C2SoftVpxDec(
const char *name,
c2_node_id_t id,
const std::shared_ptr<IntfImpl> &intfImpl)
: SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
mIntf(intfImpl),
- mCodecCtx(nullptr) {
+ mCodecCtx(nullptr),
+ mCoreCount(1),
+ mQueue(new Mutexed<ConversionQueue>) {
}
C2SoftVpxDec::~C2SoftVpxDec() {
@@ -399,7 +431,7 @@
vpx_codec_dec_cfg_t cfg;
memset(&cfg, 0, sizeof(vpx_codec_dec_cfg_t));
- cfg.threads = GetCPUCoreCount();
+ cfg.threads = mCoreCount = GetCPUCoreCount();
vpx_codec_flags_t flags;
memset(&flags, 0, sizeof(vpx_codec_flags_t));
@@ -413,6 +445,18 @@
return UNKNOWN_ERROR;
}
+ if (mMode == MODE_VP9) {
+ using namespace std::string_literals;
+ for (int i = 0; i < mCoreCount; ++i) {
+ sp<ConverterThread> thread(new ConverterThread(mQueue));
+ mConverterThreads.push_back(thread);
+ if (thread->run(("vp9conv #"s + std::to_string(i)).c_str(),
+ ANDROID_PRIORITY_AUDIO) != OK) {
+ return UNKNOWN_ERROR;
+ }
+ }
+ }
+
return OK;
}
@@ -422,6 +466,21 @@
delete mCodecCtx;
mCodecCtx = nullptr;
}
+ bool running = true;
+ for (const sp<ConverterThread> &thread : mConverterThreads) {
+ thread->requestExit();
+ }
+ while (running) {
+ mQueue->lock()->cond.broadcast();
+ running = false;
+ for (const sp<ConverterThread> &thread : mConverterThreads) {
+ if (thread->isRunning()) {
+ running = true;
+ break;
+ }
+ }
+ }
+ mConverterThreads.clear();
return OK;
}
@@ -559,12 +618,11 @@
}
}
-static void copyOutputBufferToYV12Frame(uint8_t *dst,
- const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+static void copyOutputBufferToYuvPlanarFrame(
+ uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
+ size_t dstYStride, size_t dstUVStride,
uint32_t width, uint32_t height) {
- size_t dstYStride = align(width, 16);
- size_t dstUVStride = align(dstYStride / 2, 16);
uint8_t *dstStart = dst;
for (size_t i = 0; i < height; ++i) {
@@ -654,11 +712,10 @@
static void convertYUV420Planar16ToYUV420Planar(uint8_t *dst,
const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
- size_t dstStride, size_t width, size_t height) {
+ size_t dstYStride, size_t dstUVStride, size_t width, size_t height) {
uint8_t *dstY = (uint8_t *)dst;
- size_t dstYSize = dstStride * height;
- size_t dstUVStride = align(dstStride / 2, 16);
+ size_t dstYSize = dstYStride * height;
size_t dstUVSize = dstUVStride * height / 2;
uint8_t *dstV = dstY + dstYSize;
uint8_t *dstU = dstV + dstUVSize;
@@ -669,7 +726,7 @@
}
srcY += srcYStride;
- dstY += dstStride;
+ dstY += dstYStride;
}
for (size_t y = 0; y < (height + 1) / 2; ++y) {
@@ -751,6 +808,9 @@
size_t srcYStride = img->stride[VPX_PLANE_Y];
size_t srcUStride = img->stride[VPX_PLANE_U];
size_t srcVStride = img->stride[VPX_PLANE_V];
+ C2PlanarLayout layout = wView.layout();
+ size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
if (img->fmt == VPX_IMG_FMT_I42016) {
const uint16_t *srcY = (const uint16_t *)img->planes[VPX_PLANE_Y];
@@ -758,22 +818,45 @@
const uint16_t *srcV = (const uint16_t *)img->planes[VPX_PLANE_V];
if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
- convertYUV420Planar16ToY410((uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
- srcUStride / 2, srcVStride / 2,
- align(mWidth, 16),
- mWidth, mHeight);
+ Mutexed<ConversionQueue>::Locked queue(*mQueue);
+ size_t i = 0;
+ constexpr size_t kHeight = 64;
+ for (; i < mHeight; i += kHeight) {
+ queue->entries.push_back(
+ [dst, srcY, srcU, srcV,
+ srcYStride, srcUStride, srcVStride, dstYStride,
+ width = mWidth, height = std::min(mHeight - i, kHeight)] {
+ convertYUV420Planar16ToY410(
+ (uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
+ srcUStride / 2, srcVStride / 2, dstYStride / sizeof(uint32_t),
+ width, height);
+ });
+ srcY += srcYStride / 2 * kHeight;
+ srcU += srcUStride / 2 * (kHeight / 2);
+ srcV += srcVStride / 2 * (kHeight / 2);
+ dst += dstYStride * kHeight;
+ }
+ CHECK_EQ(0u, queue->numPending);
+ queue->numPending = queue->entries.size();
+ while (queue->numPending > 0) {
+ queue->cond.signal();
+ queue.waitForCondition(queue->cond);
+ }
} else {
convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
- srcUStride / 2, srcVStride / 2,
- align(mWidth, 16),
- mWidth, mHeight);
+ srcUStride / 2, srcVStride / 2,
+ dstYStride, dstUVStride,
+ mWidth, mHeight);
}
} else {
const uint8_t *srcY = (const uint8_t *)img->planes[VPX_PLANE_Y];
const uint8_t *srcU = (const uint8_t *)img->planes[VPX_PLANE_U];
const uint8_t *srcV = (const uint8_t *)img->planes[VPX_PLANE_V];
- copyOutputBufferToYV12Frame(dst, srcY, srcU, srcV,
- srcYStride, srcUStride, srcVStride, mWidth, mHeight);
+ copyOutputBufferToYuvPlanarFrame(
+ dst, srcY, srcU, srcV,
+ srcYStride, srcUStride, srcVStride,
+ dstYStride, dstUVStride,
+ mWidth, mHeight);
}
finishWork(*(int64_t *)img->user_priv, work, std::move(block));
return true;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
index 60c8484..e51bcee 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.h
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -50,6 +50,19 @@
MODE_VP9,
} mMode;
+ struct ConversionQueue;
+
+ class ConverterThread : public Thread {
+ public:
+ explicit ConverterThread(
+ const std::shared_ptr<Mutexed<ConversionQueue>> &queue);
+ ~ConverterThread() override = default;
+ bool threadLoop() override;
+
+ private:
+ std::shared_ptr<Mutexed<ConversionQueue>> mQueue;
+ };
+
std::shared_ptr<IntfImpl> mIntf;
vpx_codec_ctx_t *mCodecCtx;
bool mFrameParallelMode; // Frame parallel is only supported by VP9 decoder.
@@ -59,6 +72,15 @@
bool mSignalledOutputEos;
bool mSignalledError;
+ int mCoreCount;
+ struct ConversionQueue {
+ std::list<std::function<void()>> entries;
+ Condition cond;
+ size_t numPending{0u};
+ };
+ std::shared_ptr<Mutexed<ConversionQueue>> mQueue;
+ std::vector<sp<ConverterThread>> mConverterThreads;
+
status_t initDecoder();
status_t destroyDecoder();
void finishWork(uint64_t index, const std::unique_ptr<C2Work> &work,
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index c395d62..9f484a3 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -1644,6 +1644,7 @@
* frames.
*/
struct C2GopLayerStruct {
+ C2GopLayerStruct() : type_((C2Config::picture_type_t)0), count(0) {}
C2GopLayerStruct(C2Config::picture_type_t type, uint32_t count_)
: type_(type), count(count_) { }
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index 6ae1c13..f0f62f6 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -50,14 +50,8 @@
} // namespace
C2OMXNode::C2OMXNode(const std::shared_ptr<Codec2Client::Component> &comp)
- : mComp(comp), mFrameIndex(0), mWidth(0), mHeight(0),
+ : mComp(comp), mFrameIndex(0), mWidth(0), mHeight(0), mUsage(0),
mAdjustTimestampGapUs(0), mFirstInputFrame(true) {
- // TODO: read from intf()
- if (!strncmp(comp->getName().c_str(), "c2.android.", 11)) {
- mUsage = GRALLOC_USAGE_SW_READ_OFTEN;
- } else {
- mUsage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
- }
}
status_t C2OMXNode::freeNode() {
@@ -103,13 +97,25 @@
}
status_t C2OMXNode::setParameter(OMX_INDEXTYPE index, const void *params, size_t size) {
- // handle max/fixed frame duration control
- if (index == (OMX_INDEXTYPE)OMX_IndexParamMaxFrameDurationForBitrateControl
- && params != NULL
- && size == sizeof(OMX_PARAM_U32TYPE)) {
- // The incoming number is an int32_t contained in OMX_U32.
- mAdjustTimestampGapUs = (int32_t)((OMX_PARAM_U32TYPE*)params)->nU32;
- return OK;
+ if (params == NULL) {
+ return BAD_VALUE;
+ }
+ switch ((uint32_t)index) {
+ case OMX_IndexParamMaxFrameDurationForBitrateControl:
+ // handle max/fixed frame duration control
+ if (size != sizeof(OMX_PARAM_U32TYPE)) {
+ return BAD_VALUE;
+ }
+ // The incoming number is an int32_t contained in OMX_U32.
+ mAdjustTimestampGapUs = (int32_t)((OMX_PARAM_U32TYPE*)params)->nU32;
+ return OK;
+
+ case OMX_IndexParamConsumerUsageBits:
+ if (size != sizeof(OMX_U32)) {
+ return BAD_VALUE;
+ }
+ mUsage = *((OMX_U32 *)params);
+ return OK;
}
return ERROR_UNSUPPORTED;
}
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 85c783b..f5a4d94 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -183,9 +183,11 @@
GraphicBufferSourceWrapper(
const sp<BGraphicBufferSource> &source,
uint32_t width,
- uint32_t height)
+ uint32_t height,
+ uint64_t usage)
: mSource(source), mWidth(width), mHeight(height) {
mDataSpace = HAL_DATASPACE_BT709;
+ mConfig.mUsage = usage;
}
~GraphicBufferSourceWrapper() override = default;
@@ -193,6 +195,12 @@
mNode = new C2OMXNode(comp);
mNode->setFrameSize(mWidth, mHeight);
+ // Usage is queried during configure(), so setting it beforehand.
+ OMX_U32 usage = mConfig.mUsage & 0xFFFFFFFF;
+ (void)mNode->setParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
+ &usage, sizeof(usage));
+
// NOTE: we do not use/pass through color aspects from GraphicBufferSource as we
// communicate that directly to the component.
mSource->configure(mNode, mDataSpace);
@@ -364,7 +372,8 @@
// color aspects (android._color-aspects)
- // consumer usage
+ // consumer usage is queried earlier.
+
ALOGD("ISConfig%s", status.str().c_str());
return err;
}
@@ -745,11 +754,8 @@
return BAD_VALUE;
}
if ((config->mDomain & Config::IS_ENCODER) && (config->mDomain & Config::IS_VIDEO)) {
- C2Config::bitrate_mode_t mode = C2Config::BITRATE_VARIABLE;
- if (msg->findInt32(KEY_BITRATE_MODE, &i32)) {
- mode = (C2Config::bitrate_mode_t) i32;
- }
- if (mode == BITRATE_MODE_CQ) {
+ int32_t mode = BITRATE_MODE_VBR;
+ if (msg->findInt32(KEY_BITRATE_MODE, &mode) && mode == BITRATE_MODE_CQ) {
if (!msg->findInt32(KEY_QUALITY, &i32)) {
ALOGD("quality is missing, which is required for video encoders in CQ.");
return BAD_VALUE;
@@ -766,6 +772,11 @@
ALOGD("I frame interval is missing, which is required for video encoders.");
return BAD_VALUE;
}
+ if (!msg->findInt32(KEY_FRAME_RATE, &i32)
+ && !msg->findFloat(KEY_FRAME_RATE, &flt)) {
+ ALOGD("frame rate is missing, which is required for video encoders.");
+ return BAD_VALUE;
+ }
}
}
@@ -813,6 +824,7 @@
config->mISConfig->mSuspended = true;
}
}
+ config->mISConfig->mUsage = 0;
}
/*
@@ -849,6 +861,22 @@
if (err != OK) {
ALOGW("failed to convert configuration to c2 params");
}
+
+ int32_t maxBframes = 0;
+ if ((config->mDomain & Config::IS_ENCODER)
+ && (config->mDomain & Config::IS_VIDEO)
+ && sdkParams->findInt32(KEY_MAX_B_FRAMES, &maxBframes)
+ && maxBframes > 0) {
+ std::unique_ptr<C2StreamGopTuning::output> gop =
+ C2StreamGopTuning::output::AllocUnique(2 /* flexCount */, 0u /* stream */);
+ gop->m.values[0] = { P_FRAME, UINT32_MAX };
+ gop->m.values[1] = {
+ C2Config::picture_type_t(P_FRAME | B_FRAME),
+ uint32_t(maxBframes)
+ };
+ configUpdate.push_back(std::move(gop));
+ }
+
err = config->setParameters(comp, configUpdate, C2_DONT_BLOCK);
if (err != OK) {
ALOGW("failed to configure c2 params");
@@ -876,8 +904,14 @@
indices.size(), params.size());
return UNKNOWN_ERROR;
}
- if (usage && (usage.value & C2MemoryUsage::CPU_READ)) {
- config->mInputFormat->setInt32("using-sw-read-often", true);
+ if (usage) {
+ if (usage.value & C2MemoryUsage::CPU_READ) {
+ config->mInputFormat->setInt32("using-sw-read-often", true);
+ }
+ if (config->mISConfig) {
+ C2AndroidMemoryUsage androidUsage(C2MemoryUsage(usage.value));
+ config->mISConfig->mUsage = androidUsage.asGrallocUsage();
+ }
}
// NOTE: we don't blindly use client specified input size if specified as clients
@@ -1068,10 +1102,12 @@
sp<AMessage> inputFormat;
sp<AMessage> outputFormat;
+ uint64_t usage = 0;
{
Mutexed<Config>::Locked config(mConfig);
inputFormat = config->mInputFormat;
outputFormat = config->mOutputFormat;
+ usage = config->mISConfig ? config->mISConfig->mUsage : 0;
}
sp<PersistentSurface> persistentSurface = CreateCompatibleInputSurface();
@@ -1095,7 +1131,7 @@
int32_t height = 0;
(void)outputFormat->findInt32("height", &height);
err = setupInputSurface(std::make_shared<GraphicBufferSourceWrapper>(
- persistentSurface->getBufferSource(), width, height));
+ persistentSurface->getBufferSource(), width, height, usage));
bufferProducer = persistentSurface->getBufferProducer();
}
@@ -1155,10 +1191,12 @@
void CCodec::setInputSurface(const sp<PersistentSurface> &surface) {
sp<AMessage> inputFormat;
sp<AMessage> outputFormat;
+ uint64_t usage = 0;
{
Mutexed<Config>::Locked config(mConfig);
inputFormat = config->mInputFormat;
outputFormat = config->mOutputFormat;
+ usage = config->mISConfig ? config->mISConfig->mUsage : 0;
}
auto hidlTarget = surface->getHidlTarget();
if (hidlTarget) {
@@ -1182,7 +1220,7 @@
int32_t height = 0;
(void)outputFormat->findInt32("height", &height);
status_t err = setupInputSurface(std::make_shared<GraphicBufferSourceWrapper>(
- surface->getBufferSource(), width, height));
+ surface->getBufferSource(), width, height, usage));
if (err != OK) {
ALOGE("Failed to set up input surface: %d", err);
mCallback->onInputSurfaceDeclined(err);
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 715e78b..7669421 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1203,6 +1203,9 @@
void CCodecBufferChannel::onInputBufferDone(
uint64_t frameIndex, size_t arrayIndex) {
+ if (mInputSurface) {
+ return;
+ }
std::shared_ptr<C2Buffer> buffer =
mPipelineWatcher.lock()->onInputBufferReleased(frameIndex, arrayIndex);
bool newInputSlotAvailable;
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 428f032..4c3fff7 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -358,14 +358,7 @@
.limitTo(D::ENCODER & D::VIDEO & D::PARAM));
add(ConfigMapper(KEY_BITRATE_MODE, C2_PARAMKEY_BITRATE_MODE, "value")
.limitTo(D::ENCODER & D::CODED)
- .withMapper([](C2Value v) -> C2Value {
- int32_t value;
- C2Config::bitrate_mode_t mode;
- if (v.get(&value) && C2Mapper::map(value, &mode)) {
- return mode;
- }
- return C2Value();
- }));
+ .withC2Mappers<C2Config::bitrate_mode_t>());
// remove when codecs switch to PARAMKEY and new modes
deprecated(ConfigMapper(KEY_BITRATE_MODE, "coded.bitrate-mode", "value")
.limitTo(D::ENCODER));
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 0fd5731..c6cbad3 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -739,6 +739,7 @@
ALOGD("ConstGraphicBlockBuffer::copy: set back buffer failed");
return false;
}
+ setRange(0, aBuffer->size()); // align size info
converter.copyToMediaImage();
setImageData(converter.imageData());
mBufferRef = buffer;
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index 8341fd5..bb35763 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -78,6 +78,7 @@
// IN PARAMS (CODEC WRAPPER)
float mFixedAdjustedFps; // fixed fps via PTS manipulation
float mMinAdjustedFps; // minimum fps via PTS manipulation
+ uint64_t mUsage; // consumer usage
};
/**
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index 10c4dcc..f8afa7c 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -872,7 +872,6 @@
emplace("libcodec2_soft_vp8enc.so");
emplace("libcodec2_soft_vp9dec.so");
emplace("libcodec2_soft_vp9enc.so");
- emplace("libcodec2_soft_xaacdec.so");
}
c2_status_t C2PlatformComponentStore::copyBuffer(
diff --git a/media/extractors/mkv/MatroskaExtractor.cpp b/media/extractors/mkv/MatroskaExtractor.cpp
index ab76edc..b1eb301 100644
--- a/media/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/extractors/mkv/MatroskaExtractor.cpp
@@ -1993,6 +1993,12 @@
}
} else if (!strcmp("V_AV1", codecID)) {
AMediaFormat_setString(meta, AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_VIDEO_AV1);
+ if (codecPrivateSize > 0) {
+ // 'csd-0' for AV1 is the Blob of Codec Private data as
+ // specified in https://aomediacodec.github.io/av1-isobmff/.
+ AMediaFormat_setBuffer(
+ meta, AMEDIAFORMAT_KEY_CSD_0, codecPrivate, codecPrivateSize);
+ }
} else if (!strcmp("V_MPEG2", codecID) || !strcmp("V_MPEG1", codecID)) {
AMediaFormat_setString(meta, AMEDIAFORMAT_KEY_MIME,
MEDIA_MIMETYPE_VIDEO_MPEG2);
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index b4fd811..527bb77 100755
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -1754,6 +1754,15 @@
// http://wiki.xiph.org/OggOpus#ID_Header
strncpy((char *)opusInfo, "OpusHead", 8);
+ // Version shall be 0 as per mp4 Opus Specific Box
+ // (https://opus-codec.org/docs/opus_in_isobmff.html#4.3.2)
+ if (opusInfo[8]) {
+ return ERROR_MALFORMED;
+ }
+ // Force version to 1 as per OpusHead definition
+ // (http://wiki.xiph.org/OggOpus#ID_Header)
+ opusInfo[8] = 1;
+
// Read Opus Specific Box values
size_t opusOffset = 10;
uint16_t pre_skip = U16_AT(&opusInfo[opusOffset]);
@@ -1772,7 +1781,7 @@
int64_t codecDelay = pre_skip * 1000000000ll / kOpusSampleRate;
AMediaFormat_setBuffer(mLastTrack->meta,
- AMEDIAFORMAT_KEY_CSD_0, opusInfo, sizeof(opusInfo));
+ AMEDIAFORMAT_KEY_CSD_0, opusInfo, opusInfoSize);
AMediaFormat_setBuffer(mLastTrack->meta,
AMEDIAFORMAT_KEY_CSD_1, &codecDelay, sizeof(codecDelay));
AMediaFormat_setBuffer(mLastTrack->meta,
@@ -1972,6 +1981,8 @@
return err;
}
+ adjustRawDefaultFrameSize();
+
size_t max_size;
err = mLastTrack->sampleTable->getMaxSampleSize(&max_size);
@@ -4606,6 +4617,20 @@
return OK;
}
+void MPEG4Extractor::adjustRawDefaultFrameSize() {
+ int32_t chanCount = 0;
+ int32_t bitWidth = 0;
+ const char *mimeStr = NULL;
+
+ if(AMediaFormat_getString(mLastTrack->meta, AMEDIAFORMAT_KEY_MIME, &mimeStr) &&
+ !strcasecmp(mimeStr, MEDIA_MIMETYPE_AUDIO_RAW) &&
+ AMediaFormat_getInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &chanCount) &&
+ AMediaFormat_getInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_BITS_PER_SAMPLE, &bitWidth)) {
+ // samplesize in stsz may not right , so updade default samplesize
+ mLastTrack->sampleTable->setPredictSampleSize(chanCount * bitWidth / 8);
+ }
+}
+
////////////////////////////////////////////////////////////////////////////////
MPEG4Source::MPEG4Source(
diff --git a/media/extractors/mp4/MPEG4Extractor.h b/media/extractors/mp4/MPEG4Extractor.h
index 031e793..e10bf8a 100644
--- a/media/extractors/mp4/MPEG4Extractor.h
+++ b/media/extractors/mp4/MPEG4Extractor.h
@@ -179,6 +179,7 @@
status_t parseAC3SpecificBox(off64_t offset);
status_t parseEAC3SpecificBox(off64_t offset);
status_t parseAC4SpecificBox(off64_t offset);
+ void adjustRawDefaultFrameSize();
MPEG4Extractor(const MPEG4Extractor &);
MPEG4Extractor &operator=(const MPEG4Extractor &);
diff --git a/media/extractors/mp4/SampleTable.h b/media/extractors/mp4/SampleTable.h
index 57f6e62..076f4c3 100644
--- a/media/extractors/mp4/SampleTable.h
+++ b/media/extractors/mp4/SampleTable.h
@@ -89,6 +89,10 @@
status_t findThumbnailSample(uint32_t *sample_index);
+ void setPredictSampleSize(uint32_t sampleSize) {
+ mDefaultSampleSize = sampleSize;
+ }
+
protected:
~SampleTable();
diff --git a/media/libeffects/dynamicsproc/dsp/DPFrequency.cpp b/media/libeffects/dynamicsproc/dsp/DPFrequency.cpp
index 1f53978..ae5c020 100644
--- a/media/libeffects/dynamicsproc/dsp/DPFrequency.cpp
+++ b/media/libeffects/dynamicsproc/dsp/DPFrequency.cpp
@@ -62,11 +62,6 @@
cBInput.resize(mBlockSize * CIRCULAR_BUFFER_UPSAMPLE);
cBOutput.resize(mBlockSize * CIRCULAR_BUFFER_UPSAMPLE);
- //fill input with half block size...
- for (unsigned int k = 0; k < mBlockSize/2; k++) {
- cBInput.write(0);
- }
-
//temp vectors
input.resize(mBlockSize);
output.resize(mBlockSize);
@@ -170,6 +165,11 @@
fill_window(mVWindow, RDSP_WINDOW_HANNING_FLAT_TOP, mBlockSize, mOverlapSize);
+ //split window into analysis and synthesis. Both are the sqrt() of original
+ //window
+ Eigen::Map<Eigen::VectorXf> eWindow(&mVWindow[0], mVWindow.size());
+ eWindow = eWindow.array().sqrt();
+
//compute window rms for energy compensation
mWindowRms = 0;
for (size_t i = 0; i < mVWindow.size(); i++) {
@@ -666,6 +666,11 @@
//##ifft directly to output.
Eigen::Map<Eigen::VectorXf> eOutput(&cb.output[0], cb.output.size());
mFftServer.inv(eOutput, cb.complexTemp);
+
+ //apply rest of window for resynthesis
+ Eigen::Map<Eigen::VectorXf> eWindow(&mVWindow[0], mVWindow.size());
+ eOutput = eOutput.cwiseProduct(eWindow);
+
return mBlockSize;
}
diff --git a/media/libmedia/xsd/api/current.txt b/media/libmedia/xsd/api/current.txt
index 05e8a49..73b5f8d 100644
--- a/media/libmedia/xsd/api/current.txt
+++ b/media/libmedia/xsd/api/current.txt
@@ -44,20 +44,20 @@
public class CamcorderProfiles {
ctor public CamcorderProfiles();
method public int getCameraId();
- method public java.util.List<media.profiles.EncoderProfile> getEncoderProfile();
- method public java.util.List<media.profiles.CamcorderProfiles.ImageDecoding> getImageDecoding();
- method public java.util.List<media.profiles.CamcorderProfiles.ImageEncoding> getImageEncoding();
+ method public java.util.List<media.profiles.EncoderProfile> getEncoderProfile_optional();
+ method public java.util.List<media.profiles.CamcorderProfiles.ImageDecodingOptional> getImageDecoding_optional();
+ method public java.util.List<media.profiles.CamcorderProfiles.ImageEncodingOptional> getImageEncoding_optional();
method public void setCameraId(int);
}
- public static class CamcorderProfiles.ImageDecoding {
- ctor public CamcorderProfiles.ImageDecoding();
+ public static class CamcorderProfiles.ImageDecodingOptional {
+ ctor public CamcorderProfiles.ImageDecodingOptional();
method public int getMemCap();
method public void setMemCap(int);
}
- public static class CamcorderProfiles.ImageEncoding {
- ctor public CamcorderProfiles.ImageEncoding();
+ public static class CamcorderProfiles.ImageEncodingOptional {
+ ctor public CamcorderProfiles.ImageEncodingOptional();
method public int getQuality();
method public void setQuality(int);
}
diff --git a/media/libmedia/xsd/media_profiles.xsd b/media/libmedia/xsd/media_profiles.xsd
index a02252a..9664456 100644
--- a/media/libmedia/xsd/media_profiles.xsd
+++ b/media/libmedia/xsd/media_profiles.xsd
@@ -35,19 +35,19 @@
</xs:complexType>
</xs:element>
<xs:complexType name="CamcorderProfiles">
- <xs:sequence>
- <xs:element name="EncoderProfile" type="EncoderProfile" minOccurs="0" maxOccurs="unbounded"/>
- <xs:element name="ImageEncoding" minOccurs="0" maxOccurs="unbounded">
+ <xs:choice minOccurs="0" maxOccurs="unbounded">
+ <xs:element name="EncoderProfile" type="EncoderProfile"/>
+ <xs:element name="ImageEncoding">
<xs:complexType>
<xs:attribute name="quality" type="xs:int"/>
</xs:complexType>
</xs:element>
- <xs:element name="ImageDecoding" minOccurs="0" maxOccurs="unbounded">
+ <xs:element name="ImageDecoding">
<xs:complexType>
<xs:attribute name="memCap" type="xs:int"/>
</xs:complexType>
</xs:element>
- </xs:sequence>
+ </xs:choice>
<xs:attribute name="cameraId" type="xs:int"/>
</xs:complexType>
<xs:complexType name="EncoderProfile">
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 317b5ec..3d67c91 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1138,6 +1138,8 @@
return err;
}
+ static_cast<Surface *>(mNativeWindow.get())->setDequeueTimeout(-1);
+
// Exits here for tunneled video playback codecs -- i.e. skips native window
// buffer allocation step as this is managed by the tunneled OMX omponent
// itself and explicitly sets def.nBufferCountActual to 0.
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 5932518..9170805 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -212,7 +212,6 @@
"libstagefright_mediafilter",
"libstagefright_webm",
"libstagefright_timedtext",
- "libvpx",
"libogg",
"libwebm",
"libstagefright_esds",
diff --git a/media/libstagefright/StagefrightPluginLoader.cpp b/media/libstagefright/StagefrightPluginLoader.cpp
index ee28966..fb03c5e 100644
--- a/media/libstagefright/StagefrightPluginLoader.cpp
+++ b/media/libstagefright/StagefrightPluginLoader.cpp
@@ -35,7 +35,7 @@
} // unnamed namespace
StagefrightPluginLoader::StagefrightPluginLoader(const char *libPath) {
- if (android::base::GetIntProperty("ro.media.codec2.enable", 1) == 0) {
+ if (android::base::GetIntProperty("debug.stagefright.ccodec", 1) == 0) {
ALOGD("CCodec is disabled.");
return;
}
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 3de934f..135151f 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -39,6 +39,7 @@
#include <media/stagefright/foundation/ByteUtils.h>
#include <media/stagefright/foundation/OpusHeader.h>
#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaDefs.h>
#include <media/AudioSystem.h>
#include <media/MediaPlayerInterface.h>
@@ -573,6 +574,68 @@
}
}
+static void parseAV1ProfileLevelFromCsd(const sp<ABuffer> &csd, sp<AMessage> &format) {
+ // Parse CSD structure to extract profile level information
+ // https://aomediacodec.github.io/av1-isobmff/#av1codecconfigurationbox
+ const uint8_t *data = csd->data();
+ size_t remaining = csd->size();
+ if (remaining < 4 || data[0] != 0x81) { // configurationVersion == 1
+ return;
+ }
+ uint8_t profileData = (data[1] & 0xE0) >> 5;
+ uint8_t levelData = data[1] & 0x1F;
+ uint8_t highBitDepth = (data[2] & 0x40) >> 6;
+
+ const static ALookup<std::pair<uint8_t, uint8_t>, int32_t> profiles {
+ { { 0, 0 }, AV1ProfileMain8 },
+ { { 1, 0 }, AV1ProfileMain10 },
+ };
+
+ int32_t profile;
+ if (profiles.map(std::make_pair(highBitDepth, profileData), &profile)) {
+ // bump to HDR profile
+ if (isHdr(format) && profile == AV1ProfileMain10) {
+ if (format->contains("hdr10-plus-info")) {
+ profile = AV1ProfileMain10HDR10Plus;
+ } else {
+ profile = AV1ProfileMain10HDR10;
+ }
+ }
+ format->setInt32("profile", profile);
+ }
+ const static ALookup<uint8_t, int32_t> levels {
+ { 0, AV1Level2 },
+ { 1, AV1Level21 },
+ { 2, AV1Level22 },
+ { 3, AV1Level23 },
+ { 4, AV1Level3 },
+ { 5, AV1Level31 },
+ { 6, AV1Level32 },
+ { 7, AV1Level33 },
+ { 8, AV1Level4 },
+ { 9, AV1Level41 },
+ { 10, AV1Level42 },
+ { 11, AV1Level43 },
+ { 12, AV1Level5 },
+ { 13, AV1Level51 },
+ { 14, AV1Level52 },
+ { 15, AV1Level53 },
+ { 16, AV1Level6 },
+ { 17, AV1Level61 },
+ { 18, AV1Level62 },
+ { 19, AV1Level63 },
+ { 20, AV1Level7 },
+ { 21, AV1Level71 },
+ { 22, AV1Level72 },
+ { 23, AV1Level73 },
+ };
+
+ int32_t level;
+ if (levels.map(levelData, &level)) {
+ format->setInt32("level", level);
+ }
+}
+
static std::vector<std::pair<const char *, uint32_t>> stringMappings {
{
@@ -1234,6 +1297,7 @@
buffer->meta()->setInt32("csd", true);
buffer->meta()->setInt64("timeUs", 0);
msg->setBuffer("csd-0", buffer);
+ parseAV1ProfileLevelFromCsd(buffer, msg);
} else if (meta->findData(kKeyESDS, &type, &data, &size)) {
ESDS esds((const char *)data, size);
if (esds.InitCheck() != (status_t)OK) {
diff --git a/media/libstagefright/codecs/amrnb/common/Android.bp b/media/libstagefright/codecs/amrnb/common/Android.bp
index 5177593..772ebf9 100644
--- a/media/libstagefright/codecs/amrnb/common/Android.bp
+++ b/media/libstagefright/codecs/amrnb/common/Android.bp
@@ -1,9 +1,6 @@
cc_library_shared {
name: "libstagefright_amrnb_common",
vendor_available: true,
- vndk: {
- enabled: true,
- },
srcs: [
"src/add.cpp",
diff --git a/media/libstagefright/codecs/amrwbenc/Android.bp b/media/libstagefright/codecs/amrwbenc/Android.bp
index 3beed66..084be0a 100644
--- a/media/libstagefright/codecs/amrwbenc/Android.bp
+++ b/media/libstagefright/codecs/amrwbenc/Android.bp
@@ -129,6 +129,7 @@
shared_libs: [
"libstagefright_enc_common",
+ "liblog",
],
cflags: ["-Werror"],
diff --git a/media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp b/media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp
index 95f9494..9442fc4 100644
--- a/media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp
+++ b/media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp
@@ -14,6 +14,7 @@
shared_libs: [
"libdl",
+ "liblog",
],
static_libs: [
diff --git a/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c b/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c
index 8cebb09..f2e28c4 100644
--- a/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c
+++ b/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c
@@ -47,6 +47,10 @@
#include "q_pulse.h"
+#undef LOG_TAG
+#define LOG_TAG "amrwbenc"
+#include "log/log.h"
+
static Word16 tipos[36] = {
0, 1, 2, 3, /* starting point &ipos[0], 1st iter */
1, 2, 3, 0, /* starting point &ipos[4], 2nd iter */
@@ -745,11 +749,16 @@
i = (Word16)((vo_L_mult(track, NPMAXPT) >> 1));
- while (ind[i] >= 0)
+ while (i < NPMAXPT * NB_TRACK && ind[i] >= 0)
{
i += 1;
}
- ind[i] = index;
+ if (i < NPMAXPT * NB_TRACK) {
+ ind[i] = index;
+ } else {
+ ALOGE("b/132647222, OOB access in ind array track=%d i=%d", track, i);
+ android_errorWriteLog(0x534e4554, "132647222");
+ }
}
k = 0;
diff --git a/media/libstagefright/codecs/common/Android.bp b/media/libstagefright/codecs/common/Android.bp
index 3726922..c5a076a 100644
--- a/media/libstagefright/codecs/common/Android.bp
+++ b/media/libstagefright/codecs/common/Android.bp
@@ -1,9 +1,6 @@
cc_library {
name: "libstagefright_enc_common",
vendor_available: true,
- vndk: {
- enabled: true,
- },
srcs: ["cmnMemory.c"],
diff --git a/media/libstagefright/codecs/on2/dec/Android.bp b/media/libstagefright/codecs/on2/dec/Android.bp
index 577231c..82bb8d1 100644
--- a/media/libstagefright/codecs/on2/dec/Android.bp
+++ b/media/libstagefright/codecs/on2/dec/Android.bp
@@ -4,7 +4,7 @@
srcs: ["SoftVPX.cpp"],
- static_libs: ["libvpx"],
+ shared_libs: ["libvpx"],
version_script: "exports.lds",
diff --git a/media/libstagefright/codecs/on2/enc/Android.bp b/media/libstagefright/codecs/on2/enc/Android.bp
index 82c215e..cd69e0d 100644
--- a/media/libstagefright/codecs/on2/enc/Android.bp
+++ b/media/libstagefright/codecs/on2/enc/Android.bp
@@ -20,5 +20,5 @@
cfi: true,
},
- static_libs: ["libvpx"],
+ shared_libs: ["libvpx"],
}
diff --git a/media/libstagefright/data/media_codecs_google_c2_audio.xml b/media/libstagefright/data/media_codecs_google_c2_audio.xml
index be2404d..509f7a9 100644
--- a/media/libstagefright/data/media_codecs_google_c2_audio.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_audio.xml
@@ -112,7 +112,7 @@
<Limit name="sample-rate" ranges="8000,12000,16000,24000,48000" />
<Limit name="bitrate" range="500-512000" />
<Limit name="complexity" range="0-10" default="5" />
- <Feature name="bitrate-modes" value="CQ" />
+ <Feature name="bitrate-modes" value="CBR" />
</MediaCodec>
</Encoders>
</Included>
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index f785bfa..04041eb 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -109,11 +109,12 @@
</MediaCodec>
<MediaCodec name="c2.android.hevc.encoder" type="video/hevc">
<!-- profiles and levels: ProfileMain : MainTierLevel51 -->
- <Limit name="size" min="320x128" max="512x512" />
+ <Limit name="size" min="2x2" max="512x512" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="8x8" />
<Limit name="block-count" range="1-4096" /> <!-- max 512x512 -->
<Limit name="blocks-per-second" range="1-122880" />
+ <Limit name="frame-rate" range="1-120" />
<Limit name="bitrate" range="1-10000000" />
<Limit name="complexity" range="0-10" default="0" />
<Limit name="quality" range="0-100" default="80" />
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index 7ee1f4d..67d3f1a 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -237,7 +237,7 @@
<Limit name="sample-rate" ranges="8000,12000,16000,24000,48000" />
<Limit name="bitrate" range="500-512000" />
<Limit name="complexity" range="0-10" default="5" />
- <Feature name="bitrate-modes" value="CQ" />
+ <Feature name="bitrate-modes" value="CBR" />
</MediaCodec>
<MediaCodec name="c2.android.h263.encoder" type="video/3gpp">
<Alias name="OMX.google.h263.encoder" />
@@ -296,11 +296,12 @@
</MediaCodec>
<MediaCodec name="c2.android.hevc.encoder" type="video/hevc" variant="!slow-cpu">
<!-- profiles and levels: ProfileMain : MainTierLevel51 -->
- <Limit name="size" min="320x128" max="512x512" />
+ <Limit name="size" min="2x2" max="512x512" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="8x8" />
<Limit name="block-count" range="1-4096" /> <!-- max 512x512 -->
<Limit name="blocks-per-second" range="1-122880" />
+ <Limit name="frame-rate" range="1-120" />
<Limit name="bitrate" range="1-10000000" />
<Limit name="complexity" range="0-10" default="0" />
<Limit name="quality" range="0-100" default="80" />
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/libstagefright/flac/dec/Android.bp
index b270808..b494e16 100644
--- a/media/libstagefright/flac/dec/Android.bp
+++ b/media/libstagefright/flac/dec/Android.bp
@@ -1,9 +1,6 @@
cc_library {
name: "libstagefright_flacdec",
vendor_available: true,
- vndk: {
- enabled: true,
- },
srcs: [
"FLACDecoder.cpp",
diff --git a/media/libstagefright/foundation/OpusHeader.cpp b/media/libstagefright/foundation/OpusHeader.cpp
index acb9ccf..513e41f 100644
--- a/media/libstagefright/foundation/OpusHeader.cpp
+++ b/media/libstagefright/foundation/OpusHeader.cpp
@@ -208,7 +208,7 @@
headerLen += AOPUS_LENGTH_SIZE;
int headerSize = WriteOpusHeader(header, inputSampleRate, output + headerLen,
- outputSize);
+ outputSize - headerLen);
if (headerSize < 0) {
ALOGD("%s: WriteOpusHeader failed", __func__);
return -1;
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 5e7f90a..9cf97c7 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -1384,7 +1384,7 @@
while (index > lowestBandwidth) {
// be conservative (70%) to avoid overestimating and immediately
// switching down again.
- size_t adjustedBandwidthBps = bandwidthBps * 7 / 10;
+ size_t adjustedBandwidthBps = bandwidthBps * .7f;
const BandwidthItem &item = mBandwidthItems[index];
if (item.mBandwidth <= adjustedBandwidthBps
&& isBandwidthValid(item)) {
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index 26e0884..5eaadbd 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -124,6 +124,11 @@
return NULL;
}
+ int32_t bitsPerSample = 0;
+ if (!md->findInt32(kKeyBitsPerSample, &bitsPerSample)) {
+ ALOGV("kKeyBitsPerSample not available");
+ }
+
if (!strncasecmp(mimeType, MEDIA_MIMETYPE_AUDIO_OPUS, strlen(MEDIA_MIMETYPE_AUDIO_OPUS))) {
// Opus in WebM is a well-known, yet under-documented, format. The codec private data
// of the track is an Opus Ogg header (https://tools.ietf.org/html/rfc7845#section-5.1)
@@ -164,8 +169,8 @@
uint8_t* codecPrivateData = codecPrivateBuf->data();
memcpy(codecPrivateData + off, (uint8_t*)header_data, headerSize);
- sp<WebmElement> entry =
- WebmElement::AudioTrackEntry("A_OPUS", nChannels, samplerate, codecPrivateBuf);
+ sp<WebmElement> entry = WebmElement::AudioTrackEntry("A_OPUS", nChannels, samplerate,
+ codecPrivateBuf, bitsPerSample);
return entry;
} else if (!strncasecmp(mimeType,
MEDIA_MIMETYPE_AUDIO_VORBIS,
@@ -203,8 +208,8 @@
off += headerSize2;
memcpy(codecPrivateData + off, headerData3, headerSize3);
- sp<WebmElement> entry =
- WebmElement::AudioTrackEntry("A_VORBIS", nChannels, samplerate, codecPrivateBuf);
+ sp<WebmElement> entry = WebmElement::AudioTrackEntry("A_VORBIS", nChannels, samplerate,
+ codecPrivateBuf, bitsPerSample);
return entry;
} else {
ALOGE("Track (%s) is not a supported audio format", mimeType);
diff --git a/media/libstagefright/xmlparser/api/current.txt b/media/libstagefright/xmlparser/api/current.txt
index f7f4c36..9d7c57d 100644
--- a/media/libstagefright/xmlparser/api/current.txt
+++ b/media/libstagefright/xmlparser/api/current.txt
@@ -68,16 +68,26 @@
public class MediaCodec {
ctor public MediaCodec();
method public java.util.List<media.codecs.Alias> getAlias_optional();
+ method public java.util.List<media.codecs.Quirk> getAttribute_optional();
+ method public String getDomain();
+ method public String getEnabled();
method public java.util.List<media.codecs.Feature> getFeature_optional();
method public java.util.List<media.codecs.Limit> getLimit_optional();
method public String getName();
method public java.util.List<media.codecs.Quirk> getQuirk_optional();
+ method public String getRank();
method public String getType();
method public java.util.List<media.codecs.Type> getType_optional();
method public String getUpdate();
+ method public String getVariant();
+ method public java.util.List<media.codecs.Variant> getVariant_optional();
+ method public void setDomain(String);
+ method public void setEnabled(String);
method public void setName(String);
+ method public void setRank(String);
method public void setType(String);
method public void setUpdate(String);
+ method public void setVariant(String);
}
public class MediaCodecs {
@@ -91,14 +101,18 @@
public class Quirk {
ctor public Quirk();
method public String getName();
+ method public String getValue();
method public void setName(String);
+ method public void setValue(String);
}
public class Setting {
ctor public Setting();
+ method public String getEnabled();
method public String getName();
method public String getUpdate();
method public String getValue();
+ method public void setEnabled(String);
method public void setName(String);
method public void setUpdate(String);
method public void setValue(String);
@@ -106,7 +120,9 @@
public class Settings {
ctor public Settings();
- method public java.util.List<media.codecs.Setting> getSetting();
+ method public java.util.List<media.codecs.Setting> getDomain_optional();
+ method public java.util.List<media.codecs.Setting> getSetting_optional();
+ method public java.util.List<media.codecs.Setting> getVariant_optional();
}
public class Type {
@@ -120,6 +136,12 @@
method public void setUpdate(String);
}
+ public class Variant {
+ ctor public Variant();
+ method public String getName();
+ method public void setName(String);
+ }
+
public class XmlParser {
ctor public XmlParser();
method public static media.codecs.Included readIncluded(java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException;
diff --git a/media/libstagefright/xmlparser/media_codecs.xsd b/media/libstagefright/xmlparser/media_codecs.xsd
index 77193a2..63ec5d0 100644
--- a/media/libstagefright/xmlparser/media_codecs.xsd
+++ b/media/libstagefright/xmlparser/media_codecs.xsd
@@ -49,24 +49,33 @@
</xs:sequence>
</xs:complexType>
<xs:complexType name="Settings">
- <xs:sequence>
- <xs:element name="Setting" type="Setting" maxOccurs="unbounded"/>
- </xs:sequence>
+ <xs:choice minOccurs="0" maxOccurs="unbounded">
+ <xs:element name="Setting" type="Setting"/>
+ <xs:element name="Variant" type="Setting"/>
+ <xs:element name="Domain" type="Setting"/>
+ </xs:choice>
</xs:complexType>
<xs:complexType name="MediaCodec">
<xs:choice minOccurs="0" maxOccurs="unbounded">
<xs:element name="Quirk" type="Quirk" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element name="Attribute" type="Quirk" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="Type" type="Type" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="Alias" type="Alias" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="Limit" type="Limit" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="Feature" type="Feature" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element name="Variant" type="Variant" minOccurs="0" maxOccurs="unbounded"/>
</xs:choice>
<xs:attribute name="name" type="xs:string"/>
<xs:attribute name="type" type="xs:string"/>
<xs:attribute name="update" type="xs:string"/>
+ <xs:attribute name="rank" type="xs:string"/>
+ <xs:attribute name="domain" type="xs:string"/>
+ <xs:attribute name="variant" type="xs:string"/>
+ <xs:attribute name="enabled" type="xs:string"/>
</xs:complexType>
<xs:complexType name="Quirk">
<xs:attribute name="name" type="xs:string"/>
+ <xs:attribute name="value" type="xs:string"/>
</xs:complexType>
<xs:complexType name="Type">
<xs:sequence>
@@ -97,9 +106,13 @@
<xs:attribute name="required" type="xs:string"/>
<xs:attribute name="value" type="xs:string"/>
</xs:complexType>
+ <xs:complexType name="Variant">
+ <xs:attribute name="name" type="xs:string"/>
+ </xs:complexType>
<xs:complexType name="Setting">
<xs:attribute name="name" type="xs:string"/>
<xs:attribute name="value" type="xs:string"/>
+ <xs:attribute name="enabled" type="xs:string"/>
<xs:attribute name="update" type="xs:string"/>
</xs:complexType>
<xs:complexType name="Include">
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 7008cee..a093893 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -31,6 +31,7 @@
private:
OpPlayAudioMonitor(uid_t uid, audio_usage_t usage, int id);
void onFirstRef() override;
+ static void getPackagesForUid(uid_t uid, Vector<String16>& packages);
AppOpsManager mAppOpsManager;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 1d99b88..ce408be 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -4918,6 +4918,10 @@
// read original volumes with volume control
float typeVolume = mStreamTypes[track->streamType()].volume;
float v = masterVolume * typeVolume;
+ // Always fetch volumeshaper volume to ensure state is updated.
+ const sp<AudioTrackServerProxy> proxy = track->mAudioTrackServerProxy;
+ const float vh = track->getVolumeHandler()->getVolume(
+ track->mAudioTrackServerProxy->framesReleased()).first;
if (track->isPausing() || mStreamTypes[track->streamType()].mute
|| track->isPlaybackRestricted()) {
@@ -4927,7 +4931,6 @@
track->setPaused();
}
} else {
- sp<AudioTrackServerProxy> proxy = track->mAudioTrackServerProxy;
gain_minifloat_packed_t vlr = proxy->getVolumeLR();
vlf = float_from_gain(gain_minifloat_unpack_left(vlr));
vrf = float_from_gain(gain_minifloat_unpack_right(vlr));
@@ -4940,8 +4943,6 @@
ALOGV("Track right volume out of range: %.3g", vrf);
vrf = GAIN_FLOAT_UNITY;
}
- const float vh = track->getVolumeHandler()->getVolume(
- track->mAudioTrackServerProxy->framesReleased()).first;
// now apply the master volume and stream type volume and shaper volume
vlf *= v * vh;
vrf *= v * vh;
@@ -5021,7 +5022,7 @@
(void *)(uintptr_t)(mChannelMask | mHapticChannelMask));
// limit track sample rate to 2 x output sample rate, which changes at re-configuration
uint32_t maxSampleRate = mSampleRate * AUDIO_RESAMPLER_DOWN_RATIO_MAX;
- uint32_t reqSampleRate = track->mAudioTrackServerProxy->getSampleRate();
+ uint32_t reqSampleRate = proxy->getSampleRate();
if (reqSampleRate == 0) {
reqSampleRate = mSampleRate;
} else if (reqSampleRate > maxSampleRate) {
@@ -5033,7 +5034,7 @@
AudioMixer::SAMPLE_RATE,
(void *)(uintptr_t)reqSampleRate);
- AudioPlaybackRate playbackRate = track->mAudioTrackServerProxy->getPlaybackRate();
+ AudioPlaybackRate playbackRate = proxy->getPlaybackRate();
mAudioMixer->setParameter(
trackId,
AudioMixer::TIMESTRETCH,
@@ -5507,19 +5508,17 @@
{
float left, right;
+ // Ensure volumeshaper state always advances even when muted.
+ const sp<AudioTrackServerProxy> proxy = track->mAudioTrackServerProxy;
+ const auto [shaperVolume, shaperActive] = track->getVolumeHandler()->getVolume(
+ proxy->framesReleased());
+ mVolumeShaperActive = shaperActive;
+
if (mMasterMute || mStreamTypes[track->streamType()].mute || track->isPlaybackRestricted()) {
left = right = 0;
} else {
float typeVolume = mStreamTypes[track->streamType()].volume;
- float v = mMasterVolume * typeVolume;
- sp<AudioTrackServerProxy> proxy = track->mAudioTrackServerProxy;
-
- // Get volumeshaper scaling
- std::pair<float /* volume */, bool /* active */>
- vh = track->getVolumeHandler()->getVolume(
- track->mAudioTrackServerProxy->framesReleased());
- v *= vh.first;
- mVolumeShaperActive = vh.second;
+ const float v = mMasterVolume * typeVolume * shaperVolume;
gain_minifloat_packed_t vlr = proxy->getVolumeLR();
left = float_from_gain(gain_minifloat_unpack_left(vlr));
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index b0817ed..78db80c 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -389,9 +389,16 @@
AudioFlinger::PlaybackThread::OpPlayAudioMonitor::createIfNeeded(
uid_t uid, const audio_attributes_t& attr, int id, audio_stream_type_t streamType)
{
- if (isAudioServerOrRootUid(uid)) {
- ALOGD("OpPlayAudio: not muting track:%d usage:%d root or audioserver", id, attr.usage);
- return nullptr;
+ if (isServiceUid(uid)) {
+ Vector <String16> packages;
+ getPackagesForUid(uid, packages);
+ if (packages.isEmpty()) {
+ ALOGD("OpPlayAudio: not muting track:%d usage:%d for service UID %d",
+ id,
+ attr.usage,
+ uid);
+ return nullptr;
+ }
}
// stream type has been filtered by audio policy to indicate whether it can be muted
if (streamType == AUDIO_STREAM_ENFORCED_AUDIBLE) {
@@ -423,8 +430,7 @@
void AudioFlinger::PlaybackThread::OpPlayAudioMonitor::onFirstRef()
{
- PermissionController permissionController;
- permissionController.getPackagesForUid(mUid, mPackages);
+ getPackagesForUid(mUid, mPackages);
checkPlayAudioForUsage();
if (!mPackages.isEmpty()) {
mOpCallback = new PlayAudioOpCallback(this);
@@ -475,6 +481,14 @@
}
}
+// static
+void AudioFlinger::PlaybackThread::OpPlayAudioMonitor::getPackagesForUid(
+ uid_t uid, Vector<String16>& packages)
+{
+ PermissionController permissionController;
+ permissionController.getPackagesForUid(uid, packages);
+}
+
// ----------------------------------------------------------------------------
#undef LOG_TAG
#define LOG_TAG "AF::Track"
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 49937f0..30f29d6 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -258,7 +258,7 @@
virtual status_t getHwOffloadEncodingFormatsSupportedForA2DP(
std::vector<audio_format_t> *formats) = 0;
- virtual void setAppState(uid_t uid, app_state_t state);
+ virtual void setAppState(uid_t uid, app_state_t state) = 0;
virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies) = 0;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index 12b5e7d..094f506 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -19,7 +19,7 @@
#include "DeviceDescriptor.h"
#include <utils/RefBase.h>
#include <media/AudioPolicy.h>
-#include <utils/KeyedVector.h>
+#include <utils/Vector.h>
#include <system/audio.h>
#include <utils/String8.h>
@@ -48,14 +48,15 @@
};
-class AudioPolicyMixCollection : public DefaultKeyedVector<String8, sp<AudioPolicyMix> >
+class AudioPolicyMixCollection : public Vector<sp<AudioPolicyMix>>
{
public:
- status_t getAudioPolicyMix(const String8& address, sp<AudioPolicyMix> &policyMix) const;
+ status_t getAudioPolicyMix(audio_devices_t deviceType,
+ const String8& address, sp<AudioPolicyMix> &policyMix) const;
- status_t registerMix(const String8& address, AudioMix mix, sp<SwAudioOutputDescriptor> desc);
+ status_t registerMix(AudioMix mix, sp<SwAudioOutputDescriptor> desc);
- status_t unregisterMix(const String8& address);
+ status_t unregisterMix(const AudioMix& mix);
void closeOutput(sp<SwAudioOutputDescriptor> &desc);
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index 98a7800..dca84c0 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -73,16 +73,21 @@
}
}
-status_t AudioPolicyMixCollection::registerMix(const String8& address, AudioMix mix,
- sp<SwAudioOutputDescriptor> desc)
+status_t AudioPolicyMixCollection::registerMix(AudioMix mix, sp<SwAudioOutputDescriptor> desc)
{
- ssize_t index = indexOfKey(address);
- if (index >= 0) {
- ALOGE("registerPolicyMixes(): mix for address %s already registered", address.string());
- return BAD_VALUE;
+ for (size_t i = 0; i < size(); i++) {
+ const sp<AudioPolicyMix>& registeredMix = itemAt(i);
+ if (mix.mDeviceType == registeredMix->mDeviceType
+ && mix.mDeviceAddress.compare(registeredMix->mDeviceAddress) == 0) {
+ ALOGE("registerMix(): mix already registered for dev=0x%x addr=%s",
+ mix.mDeviceType, mix.mDeviceAddress.string());
+ return BAD_VALUE;
+ }
}
sp<AudioPolicyMix> policyMix = new AudioPolicyMix(mix);
- add(address, policyMix);
+ add(policyMix);
+ ALOGD("registerMix(): adding mix for dev=0x%x addr=%s",
+ policyMix->mDeviceType, policyMix->mDeviceAddress.string());
if (desc != 0) {
desc->mPolicyMix = policyMix;
@@ -91,34 +96,48 @@
return NO_ERROR;
}
-status_t AudioPolicyMixCollection::unregisterMix(const String8& address)
+status_t AudioPolicyMixCollection::unregisterMix(const AudioMix& mix)
{
- ssize_t index = indexOfKey(address);
- if (index < 0) {
- ALOGE("unregisterPolicyMixes(): mix for address %s not registered", address.string());
- return BAD_VALUE;
+ for (size_t i = 0; i < size(); i++) {
+ const sp<AudioPolicyMix>& registeredMix = itemAt(i);
+ if (mix.mDeviceType == registeredMix->mDeviceType
+ && mix.mDeviceAddress.compare(registeredMix->mDeviceAddress) == 0) {
+ ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
+ mix.mDeviceType, mix.mDeviceAddress.string());
+ removeAt(i);
+ return NO_ERROR;
+ }
}
- removeItemsAt(index);
- return NO_ERROR;
+ ALOGE("unregisterMix(): mix not registered for dev=0x%x addr=%s",
+ mix.mDeviceType, mix.mDeviceAddress.string());
+ return BAD_VALUE;
}
-status_t AudioPolicyMixCollection::getAudioPolicyMix(const String8& address,
- sp<AudioPolicyMix> &policyMix) const
+status_t AudioPolicyMixCollection::getAudioPolicyMix(audio_devices_t deviceType,
+ const String8& address, sp<AudioPolicyMix> &policyMix) const
{
- ssize_t index = indexOfKey(address);
- if (index < 0) {
- ALOGE("unregisterPolicyMixes(): mix for address %s not registered", address.string());
- return BAD_VALUE;
+
+ ALOGV("getAudioPolicyMix() for dev=0x%x addr=%s", deviceType, address.string());
+ for (ssize_t i = 0; i < size(); i++) {
+ if (itemAt(i)->mDeviceType == deviceType
+ && itemAt(i)->mDeviceAddress.compare(address) == 0) {
+ policyMix = itemAt(i);
+ ALOGV("getAudioPolicyMix: found mix %zu match (devType=0x%x addr=%s)",
+ i, deviceType, address.string());
+ return NO_ERROR;
+ }
}
- policyMix = valueAt(index);
- return NO_ERROR;
+
+ ALOGE("getAudioPolicyMix(): mix not registered for dev=0x%x addr=%s",
+ deviceType, address.string());
+ return BAD_VALUE;
}
void AudioPolicyMixCollection::closeOutput(sp<SwAudioOutputDescriptor> &desc)
{
for (size_t i = 0; i < size(); i++) {
- sp<AudioPolicyMix> policyMix = valueAt(i);
+ sp<AudioPolicyMix> policyMix = itemAt(i);
if (policyMix->getOutput() == desc) {
policyMix->clearOutput();
}
@@ -134,7 +153,7 @@
ALOGV("getOutputForAttr() querying %zu mixes:", size());
primaryDesc = 0;
for (size_t i = 0; i < size(); i++) {
- sp<AudioPolicyMix> policyMix = valueAt(i);
+ sp<AudioPolicyMix> policyMix = itemAt(i);
const bool primaryOutputMix = !is_mix_loopback_render(policyMix->mRouteFlags);
if (!primaryOutputMix && (flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ)) {
// AAudio does not support MMAP_NO_IRQ loopback render, and there is no way with
@@ -320,10 +339,10 @@
const DeviceVector &availableOutputDevices)
{
for (size_t i = 0; i < size(); i++) {
- if (valueAt(i)->getOutput() == output) {
+ if (itemAt(i)->getOutput() == output) {
// This Desc is involved in a Mix, which has the highest prio
- audio_devices_t deviceType = valueAt(i)->mDeviceType;
- String8 address = valueAt(i)->mDeviceAddress;
+ audio_devices_t deviceType = itemAt(i)->mDeviceType;
+ String8 address = itemAt(i)->mDeviceAddress;
ALOGV("%s: device (0x%x, addr=%s) forced by mix",
__FUNCTION__, deviceType, address.c_str());
return availableOutputDevices.getDevice(deviceType, address, AUDIO_FORMAT_DEFAULT);
@@ -338,7 +357,7 @@
sp<AudioPolicyMix> *policyMix) const
{
for (size_t i = 0; i < size(); i++) {
- AudioPolicyMix *mix = valueAt(i).get();
+ AudioPolicyMix *mix = itemAt(i).get();
if (mix->mMixType != MIX_TYPE_RECORDERS) {
continue;
}
@@ -374,19 +393,28 @@
String8 address(attr.tags + strlen("addr="));
#ifdef LOG_NDEBUG
- ALOGV("getInputMixForAttr looking for address %s\n mixes available:", address.string());
+ ALOGV("getInputMixForAttr looking for address %s for source %d\n mixes available:",
+ address.string(), attr.source);
for (size_t i = 0; i < size(); i++) {
- sp<AudioPolicyMix> audioPolicyMix = valueAt(i);
- ALOGV("\tmix %zu address=%s", i, audioPolicyMix->mDeviceAddress.string());
+ const sp<AudioPolicyMix> audioPolicyMix = itemAt(i);
+ ALOGV("\tmix %zu address=%s", i, audioPolicyMix->mDeviceAddress.string());
}
#endif
- ssize_t index = indexOfKey(address);
- if (index < 0) {
+ size_t index;
+ for (index = 0; index < size(); index++) {
+ const sp<AudioPolicyMix>& registeredMix = itemAt(index);
+ if (registeredMix->mDeviceAddress.compare(address) == 0) {
+ ALOGD("getInputMixForAttr found addr=%s dev=0x%x",
+ registeredMix->mDeviceAddress.string(), registeredMix->mDeviceType);
+ break;
+ }
+ }
+ if (index == size()) {
ALOGW("getInputMixForAttr() no policy for address %s", address.string());
return BAD_VALUE;
}
- sp<AudioPolicyMix> audioPolicyMix = valueAt(index);
+ const sp<AudioPolicyMix> audioPolicyMix = itemAt(index);
if (audioPolicyMix->mMixType != MIX_TYPE_PLAYERS) {
ALOGW("getInputMixForAttr() bad policy mix type for address %s", address.string());
@@ -404,7 +432,7 @@
// "match uid" rule for this uid, return an error
// (adding a uid-device affinity would result in contradictory rules)
for (size_t i = 0; i < size(); i++) {
- const AudioPolicyMix* mix = valueAt(i).get();
+ const AudioPolicyMix* mix = itemAt(i).get();
if (!mix->isDeviceAffinityCompatible()) {
continue;
}
@@ -421,7 +449,7 @@
// AND it doesn't have a "match uid" rule
// THEN add a rule to exclude the uid
for (size_t i = 0; i < size(); i++) {
- const AudioPolicyMix *mix = valueAt(i).get();
+ const AudioPolicyMix *mix = itemAt(i).get();
if (!mix->isDeviceAffinityCompatible()) {
continue;
}
@@ -452,7 +480,7 @@
// for each player mix: remove existing rules that match or exclude this uid
for (size_t i = 0; i < size(); i++) {
bool foundUidRule = false;
- const AudioPolicyMix *mix = valueAt(i).get();
+ const AudioPolicyMix *mix = itemAt(i).get();
if (!mix->isDeviceAffinityCompatible()) {
continue;
}
@@ -481,7 +509,7 @@
// for each player mix: find rules that don't exclude this uid, and add the device to the list
for (size_t i = 0; i < size(); i++) {
bool ruleAllowsUid = true;
- const AudioPolicyMix *mix = valueAt(i).get();
+ const AudioPolicyMix *mix = itemAt(i).get();
if (mix->mMixType != MIX_TYPE_PLAYERS) {
continue;
}
@@ -504,7 +532,7 @@
{
dst->append("\nAudio Policy Mix:\n");
for (size_t i = 0; i < size(); i++) {
- valueAt(i)->dump(dst, 2, i);
+ itemAt(i)->dump(dst, 2, i);
}
}
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 530a2e4..07a7e65 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -70,20 +70,7 @@
audio_stream_type_t EngineBase::getStreamTypeForAttributes(const audio_attributes_t &attr) const
{
- audio_stream_type_t engineStream = mProductStrategies.getStreamTypeForAttributes(attr);
- // ensure the audibility flag for sonification is honored for stream types
- // Note this is typically implemented in the product strategy configuration files, but is
- // duplicated here for safety.
- if (attr.usage == AUDIO_USAGE_ASSISTANCE_SONIFICATION
- && ((attr.flags & AUDIO_FLAG_AUDIBILITY_ENFORCED) != 0)) {
- engineStream = AUDIO_STREAM_ENFORCED_AUDIBLE;
- }
- // ensure the ENFORCED_AUDIBLE stream type reflects the "force use" setting:
- if ((getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) != AUDIO_POLICY_FORCE_SYSTEM_ENFORCED)
- && (engineStream == AUDIO_STREAM_ENFORCED_AUDIBLE)) {
- return AUDIO_STREAM_SYSTEM;
- }
- return engineStream;
+ return mProductStrategies.getStreamTypeForAttributes(attr);
}
audio_attributes_t EngineBase::getAttributesForStreamType(audio_stream_type_t stream) const
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index c430488..3ca7591 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -777,6 +777,12 @@
// check for device and output changes triggered by new force usage
checkForDeviceAndOutputChanges();
+ // force client reconnection to reevaluate flag AUDIO_FLAG_AUDIBILITY_ENFORCED
+ if (usage == AUDIO_POLICY_FORCE_FOR_SYSTEM) {
+ mpClientInterface->invalidateStream(AUDIO_STREAM_SYSTEM);
+ mpClientInterface->invalidateStream(AUDIO_STREAM_ENFORCED_AUDIBLE);
+ }
+
//FIXME: workaround for truncated touch sounds
// to be removed when the problem is handled by system UI
uint32_t delayMs = 0;
@@ -910,6 +916,13 @@
}
*dstAttr = mEngine->getAttributesForStreamType(srcStream);
}
+
+ // Only honor audibility enforced when required. The client will be
+ // forced to reconnect if the forced usage changes.
+ if (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) != AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) {
+ dstAttr->flags &= ~AUDIO_FLAG_AUDIBILITY_ENFORCED;
+ }
+
return NO_ERROR;
}
@@ -2855,13 +2868,16 @@
}
String8 address = mix.mDeviceAddress;
+ audio_devices_t deviceTypeToMakeAvailable;
if (mix.mMixType == MIX_TYPE_PLAYERS) {
- mix.mDeviceType = AUDIO_DEVICE_IN_REMOTE_SUBMIX;
- } else {
mix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+ deviceTypeToMakeAvailable = AUDIO_DEVICE_IN_REMOTE_SUBMIX;
+ } else {
+ mix.mDeviceType = AUDIO_DEVICE_IN_REMOTE_SUBMIX;
+ deviceTypeToMakeAvailable = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
}
- if (mPolicyMixes.registerMix(address, mix, 0 /*output desc*/) != NO_ERROR) {
+ if (mPolicyMixes.registerMix(mix, 0 /*output desc*/) != NO_ERROR) {
ALOGE("Error registering mix %zu for address %s", i, address.string());
res = INVALID_OPERATION;
break;
@@ -2877,7 +2893,7 @@
rSubmixModule->addInputProfile(address, &inputConfig,
AUDIO_DEVICE_IN_REMOTE_SUBMIX, address);
- if ((res = setDeviceConnectionStateInt(mix.mDeviceType,
+ if ((res = setDeviceConnectionStateInt(deviceTypeToMakeAvailable,
AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
address.string(), "remote-submix", AUDIO_FORMAT_DEFAULT)) != NO_ERROR) {
ALOGE("Failed to set remote submix device available, type %u, address %s",
@@ -2903,7 +2919,7 @@
sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(j);
if (desc->supportedDevices().contains(device)) {
- if (mPolicyMixes.registerMix(address, mix, desc) != NO_ERROR) {
+ if (mPolicyMixes.registerMix(mix, desc) != NO_ERROR) {
ALOGE("Could not register mix RENDER, dev=0x%X addr=%s", type,
address.string());
res = INVALID_OPERATION;
@@ -2953,7 +2969,7 @@
String8 address = mix.mDeviceAddress;
- if (mPolicyMixes.unregisterMix(address) != NO_ERROR) {
+ if (mPolicyMixes.unregisterMix(mix) != NO_ERROR) {
res = INVALID_OPERATION;
continue;
}
@@ -2974,7 +2990,7 @@
rSubmixModule->removeInputProfile(address);
} else if ((mix.mRouteFlags & MIX_ROUTE_FLAG_RENDER) == MIX_ROUTE_FLAG_RENDER) {
- if (mPolicyMixes.unregisterMix(mix.mDeviceAddress) != NO_ERROR) {
+ if (mPolicyMixes.unregisterMix(mix) != NO_ERROR) {
res = INVALID_OPERATION;
continue;
}
@@ -3911,6 +3927,8 @@
if (status != NO_ERROR) {
mpClientInterface->releaseAudioPatch(sourceDesc->patchDesc()->mAfPatchHandle, 0);
+ outputDesc->removeClient(sourceDesc->portId());
+ outputDesc->stop();
return status;
}
sourceDesc->setSwOutput(outputDesc);
@@ -4175,6 +4193,7 @@
if (status == NO_ERROR) {
swOutputDesc->stop();
}
+ swOutputDesc->removeClient(sourceDesc->portId());
mpClientInterface->releaseAudioPatch(patchDesc->mAfPatchHandle, 0);
} else {
sp<HwAudioOutputDescriptor> hwOutputDesc = sourceDesc->hwOutput().promote();
@@ -4637,7 +4656,8 @@
addOutput(output, desc);
if (device_distinguishes_on_address(deviceType) && address != "0") {
sp<AudioPolicyMix> policyMix;
- if (mPolicyMixes.getAudioPolicyMix(address, policyMix) == NO_ERROR) {
+ if (mPolicyMixes.getAudioPolicyMix(deviceType, address, policyMix)
+ == NO_ERROR) {
policyMix->setOutput(desc);
desc->mPolicyMix = policyMix;
} else {
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 743c816..5a87134 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -330,9 +330,9 @@
}
bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
- bool* useHeic, bool* useGrid, int64_t* stall) {
+ bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) {
static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance();
- return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall);
+ return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall, hevcName);
}
bool HeicCompositeStream::isInMemoryTempFileSupported() {
@@ -1115,8 +1115,9 @@
ALOGV("%s", __FUNCTION__);
bool useGrid = false;
+ AString hevcName;
bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
- &mUseHeic, &useGrid, nullptr);
+ &mUseHeic, &useGrid, nullptr, &hevcName);
if (!isSizeSupported) {
ALOGE("%s: Encoder doesnt' support size %u x %u!",
__FUNCTION__, width, height);
@@ -1138,7 +1139,11 @@
}
// Create HEIC/HEVC codec.
- mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
+ if (mUseHeic) {
+ mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
+ } else {
+ mCodec = MediaCodec::CreateByComponentName(mCodecLooper, hevcName);
+ }
if (mCodec == nullptr) {
ALOGE("%s: Failed to create codec for %s", __FUNCTION__, desiredMime);
return NO_INIT;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 2aa3c38..260c68e 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -71,7 +71,7 @@
const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
static bool isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
- bool* useHeic, bool* useGrid, int64_t* stall);
+ bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName = nullptr);
static bool isInMemoryTempFileSupported();
protected:
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
index ed9be6e..d7cc2bf 100644
--- a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
@@ -49,7 +49,7 @@
}
bool HeicEncoderInfoManager::isSizeSupported(int32_t width, int32_t height, bool* useHeic,
- bool* useGrid, int64_t* stall) const {
+ bool* useGrid, int64_t* stall, AString* hevcName) const {
if (useHeic == nullptr || useGrid == nullptr) {
ALOGE("%s: invalid parameters: useHeic %p, useGrid %p",
__FUNCTION__, useHeic, useGrid);
@@ -72,6 +72,9 @@
(width <= 1920 && height <= 1080))) {
enableGrid = false;
}
+ if (hevcName != nullptr) {
+ *hevcName = mHevcName;
+ }
} else {
// No encoder available for the requested size.
return false;
@@ -113,9 +116,8 @@
}
sp<AMessage> heicDetails = getCodecDetails(codecsList, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
- sp<AMessage> hevcDetails = getCodecDetails(codecsList, MEDIA_MIMETYPE_VIDEO_HEVC);
- if (hevcDetails == nullptr) {
+ if (!getHevcCodecDetails(codecsList, MEDIA_MIMETYPE_VIDEO_HEVC)) {
if (heicDetails != nullptr) {
ALOGE("%s: Device must support HEVC codec if HEIC codec is available!",
__FUNCTION__);
@@ -123,22 +125,7 @@
}
return OK;
}
-
- // Check CQ mode for HEVC codec
- {
- AString bitrateModes;
- auto hasItem = hevcDetails->findString("feature-bitrate-modes", &bitrateModes);
- if (!hasItem) {
- ALOGE("%s: Failed to query bitrate modes for HEVC codec", __FUNCTION__);
- return BAD_VALUE;
- }
- ALOGV("%s: HEVC codec's feature-bitrate-modes value is %d, %s",
- __FUNCTION__, hasItem, bitrateModes.c_str());
- std::regex pattern("(^|,)CQ($|,)", std::regex_constants::icase);
- if (!std::regex_search(bitrateModes.c_str(), pattern)) {
- return OK;
- }
- }
+ mHasHEVC = true;
// HEIC size range
if (heicDetails != nullptr) {
@@ -152,19 +139,6 @@
mHasHEIC = true;
}
- // HEVC size range
- {
- auto res = getCodecSizeRange(MEDIA_MIMETYPE_VIDEO_HEVC,
- hevcDetails, &mMinSizeHevc, &mMaxSizeHevc, &mHevcFrameRateMaps);
- if (res != OK) {
- ALOGE("%s: Failed to get HEVC codec size range: %s (%d)", __FUNCTION__,
- strerror(-res), res);
- return BAD_VALUE;
- }
-
- mHasHEVC = true;
- }
-
return OK;
}
@@ -290,5 +264,80 @@
return details;
}
+
+bool HeicEncoderInfoManager::getHevcCodecDetails(
+ sp<IMediaCodecList> codecsList, const char* mime) {
+ bool found = false;
+ ssize_t idx = 0;
+ while ((idx = codecsList->findCodecByType(mime, true /*encoder*/, idx)) >= 0) {
+ const sp<MediaCodecInfo> info = codecsList->getCodecInfo(idx++);
+ if (info == nullptr) {
+ ALOGE("%s: Failed to get codec info for %s", __FUNCTION__, mime);
+ break;
+ }
+
+ // Filter out software ones as they may be too slow
+ if (!(info->getAttributes() & MediaCodecInfo::kFlagIsHardwareAccelerated)) {
+ continue;
+ }
+
+ const sp<MediaCodecInfo::Capabilities> caps =
+ info->getCapabilitiesFor(mime);
+ if (caps == nullptr) {
+ ALOGE("%s: [%s] Failed to get capabilities", __FUNCTION__,
+ info->getCodecName());
+ break;
+ }
+ const sp<AMessage> details = caps->getDetails();
+ if (details == nullptr) {
+ ALOGE("%s: [%s] Failed to get details", __FUNCTION__,
+ info->getCodecName());
+ break;
+ }
+
+ // Check CQ mode
+ AString bitrateModes;
+ auto hasItem = details->findString("feature-bitrate-modes", &bitrateModes);
+ if (!hasItem) {
+ ALOGE("%s: [%s] Failed to query bitrate modes", __FUNCTION__,
+ info->getCodecName());
+ break;
+ }
+ ALOGV("%s: [%s] feature-bitrate-modes value is %d, %s",
+ __FUNCTION__, info->getCodecName(), hasItem, bitrateModes.c_str());
+ std::regex pattern("(^|,)CQ($|,)", std::regex_constants::icase);
+ if (!std::regex_search(bitrateModes.c_str(), pattern)) {
+ continue; // move on to next encoder
+ }
+
+ std::pair<int32_t, int32_t> minSizeHevc, maxSizeHevc;
+ FrameRateMaps hevcFrameRateMaps;
+ auto res = getCodecSizeRange(MEDIA_MIMETYPE_VIDEO_HEVC,
+ details, &minSizeHevc, &maxSizeHevc, &hevcFrameRateMaps);
+ if (res != OK) {
+ ALOGE("%s: [%s] Failed to get size range: %s (%d)", __FUNCTION__,
+ info->getCodecName(), strerror(-res), res);
+ break;
+ }
+ if (kGridWidth < minSizeHevc.first
+ || kGridWidth > maxSizeHevc.first
+ || kGridHeight < minSizeHevc.second
+ || kGridHeight > maxSizeHevc.second) {
+ continue; // move on to next encoder
+ }
+
+ // Found: save name, size, frame rate
+ mHevcName = info->getCodecName();
+ mMinSizeHevc = minSizeHevc;
+ mMaxSizeHevc = maxSizeHevc;
+ mHevcFrameRateMaps = hevcFrameRateMaps;
+
+ found = true;
+ break;
+ }
+
+ return found;
+}
+
} //namespace camera3
} // namespace android
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
index fb0b914..58edba2 100644
--- a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
@@ -36,7 +36,7 @@
}
bool isSizeSupported(int32_t width, int32_t height,
- bool* useHeic, bool* useGrid, int64_t* stall) const;
+ bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) const;
static const auto kGridWidth = 512;
static const auto kGridHeight = 512;
@@ -61,11 +61,13 @@
FrameRateMaps::const_iterator findClosestSize(const FrameRateMaps& maps,
int32_t width, int32_t height) const;
sp<AMessage> getCodecDetails(sp<IMediaCodecList> codecsList, const char* name);
+ bool getHevcCodecDetails(sp<IMediaCodecList> codecsList, const char* mime);
bool mIsInited;
std::pair<int32_t, int32_t> mMinSizeHeic, mMaxSizeHeic;
std::pair<int32_t, int32_t> mMinSizeHevc, mMaxSizeHevc;
bool mHasHEVC, mHasHEIC;
+ AString mHevcName;
FrameRateMaps mHeicFrameRateMaps, mHevcFrameRateMaps;
bool mDisableGrid;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 7573089..00f0d86 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -77,8 +77,10 @@
mTimestampOffset(0),
mNextResultFrameNumber(0),
mNextReprocessResultFrameNumber(0),
+ mNextZslStillResultFrameNumber(0),
mNextShutterFrameNumber(0),
mNextReprocessShutterFrameNumber(0),
+ mNextZslStillShutterFrameNumber(0),
mListener(NULL),
mVendorTagId(CAMERA_METADATA_INVALID_VENDOR_ID),
mLastTemplateId(-1),
@@ -3464,7 +3466,7 @@
CaptureResultExtras &resultExtras,
CameraMetadata &collectedPartialResult,
uint32_t frameNumber,
- bool reprocess,
+ bool reprocess, bool zslStillCapture,
const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
ATRACE_CALL();
if (pendingMetadata.isEmpty())
@@ -3481,6 +3483,14 @@
return;
}
mNextReprocessResultFrameNumber = frameNumber + 1;
+ } else if (zslStillCapture) {
+ if (frameNumber < mNextZslStillResultFrameNumber) {
+ SET_ERR("Out-of-order ZSL still capture result metadata submitted! "
+ "(got frame number %d, expecting %d)",
+ frameNumber, mNextZslStillResultFrameNumber);
+ return;
+ }
+ mNextZslStillResultFrameNumber = frameNumber + 1;
} else {
if (frameNumber < mNextResultFrameNumber) {
SET_ERR("Out-of-order capture result metadata submitted! "
@@ -3741,7 +3751,8 @@
metadata = result->result;
sendCaptureResult(metadata, request.resultExtras,
collectedPartialResult, frameNumber,
- hasInputBufferInRequest, request.physicalMetadatas);
+ hasInputBufferInRequest, request.zslCapture && request.stillCapture,
+ request.physicalMetadatas);
}
}
@@ -3919,12 +3930,20 @@
// TODO: need to track errors for tighter bounds on expected frame number.
if (r.hasInputBuffer) {
if (msg.frame_number < mNextReprocessShutterFrameNumber) {
- SET_ERR("Shutter notification out-of-order. Expected "
+ SET_ERR("Reprocess shutter notification out-of-order. Expected "
"notification for frame %d, got frame %d",
mNextReprocessShutterFrameNumber, msg.frame_number);
return;
}
mNextReprocessShutterFrameNumber = msg.frame_number + 1;
+ } else if (r.zslCapture && r.stillCapture) {
+ if (msg.frame_number < mNextZslStillShutterFrameNumber) {
+ SET_ERR("ZSL still capture shutter notification out-of-order. Expected "
+ "notification for frame %d, got frame %d",
+ mNextZslStillShutterFrameNumber, msg.frame_number);
+ return;
+ }
+ mNextZslStillShutterFrameNumber = msg.frame_number + 1;
} else {
if (msg.frame_number < mNextShutterFrameNumber) {
SET_ERR("Shutter notification out-of-order. Expected "
@@ -3948,7 +3967,8 @@
// send pending result and buffers
sendCaptureResult(r.pendingMetadata, r.resultExtras,
r.collectedPartialResult, msg.frame_number,
- r.hasInputBuffer, r.physicalMetadatas);
+ r.hasInputBuffer, r.zslCapture && r.stillCapture,
+ r.physicalMetadatas);
}
bool timestampIncreasing = !(r.zslCapture || r.hasInputBuffer);
returnOutputBuffers(r.pendingOutputBuffers.array(),
@@ -4032,10 +4052,6 @@
mHidlSession.clear();
}
-bool Camera3Device::HalInterface::supportBatchRequest() {
- return mHidlSession != nullptr;
-}
-
status_t Camera3Device::HalInterface::constructDefaultRequestSettings(
camera3_request_template_t templateId,
/*out*/ camera_metadata_t **requestTemplate) {
@@ -4385,11 +4401,12 @@
status_t Camera3Device::HalInterface::wrapAsHidlRequest(camera3_capture_request_t* request,
/*out*/device::V3_2::CaptureRequest* captureRequest,
- /*out*/std::vector<native_handle_t*>* handlesCreated) {
+ /*out*/std::vector<native_handle_t*>* handlesCreated,
+ /*out*/std::vector<std::pair<int32_t, int32_t>>* inflightBuffers) {
ATRACE_CALL();
- if (captureRequest == nullptr || handlesCreated == nullptr) {
- ALOGE("%s: captureRequest (%p) and handlesCreated (%p) must not be null",
- __FUNCTION__, captureRequest, handlesCreated);
+ if (captureRequest == nullptr || handlesCreated == nullptr || inflightBuffers == nullptr) {
+ ALOGE("%s: captureRequest (%p), handlesCreated (%p), and inflightBuffers(%p) "
+ "must not be null", __FUNCTION__, captureRequest, handlesCreated, inflightBuffers);
return BAD_VALUE;
}
@@ -4419,8 +4436,8 @@
captureRequest->inputBuffer.releaseFence = nullptr;
pushInflightBufferLocked(captureRequest->frameNumber, streamId,
- request->input_buffer->buffer,
- request->input_buffer->acquire_fence);
+ request->input_buffer->buffer);
+ inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
} else {
captureRequest->inputBuffer.streamId = -1;
captureRequest->inputBuffer.bufferId = BUFFER_ID_NO_BUFFER;
@@ -4459,14 +4476,31 @@
// Output buffers are empty when using HAL buffer manager
if (!mUseHalBufManager) {
- pushInflightBufferLocked(captureRequest->frameNumber, streamId,
- src->buffer, src->acquire_fence);
+ pushInflightBufferLocked(captureRequest->frameNumber, streamId, src->buffer);
+ inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
}
}
}
return OK;
}
+void Camera3Device::HalInterface::cleanupNativeHandles(
+ std::vector<native_handle_t*> *handles, bool closeFd) {
+ if (handles == nullptr) {
+ return;
+ }
+ if (closeFd) {
+ for (auto& handle : *handles) {
+ native_handle_close(handle);
+ }
+ }
+ for (auto& handle : *handles) {
+ native_handle_delete(handle);
+ }
+ handles->clear();
+ return;
+}
+
status_t Camera3Device::HalInterface::processBatchCaptureRequests(
std::vector<camera3_capture_request_t*>& requests,/*out*/uint32_t* numRequestProcessed) {
ATRACE_NAME("CameraHal::processBatchCaptureRequests");
@@ -4487,17 +4521,20 @@
captureRequests.resize(batchSize);
}
std::vector<native_handle_t*> handlesCreated;
+ std::vector<std::pair<int32_t, int32_t>> inflightBuffers;
status_t res = OK;
for (size_t i = 0; i < batchSize; i++) {
if (hidlSession_3_4 != nullptr) {
res = wrapAsHidlRequest(requests[i], /*out*/&captureRequests_3_4[i].v3_2,
- /*out*/&handlesCreated);
+ /*out*/&handlesCreated, /*out*/&inflightBuffers);
} else {
- res = wrapAsHidlRequest(requests[i],
- /*out*/&captureRequests[i], /*out*/&handlesCreated);
+ res = wrapAsHidlRequest(requests[i], /*out*/&captureRequests[i],
+ /*out*/&handlesCreated, /*out*/&inflightBuffers);
}
if (res != OK) {
+ popInflightBuffers(inflightBuffers);
+ cleanupNativeHandles(&handlesCreated);
return res;
}
}
@@ -4594,31 +4631,29 @@
}
if (!err.isOk()) {
ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
- return DEAD_OBJECT;
+ status = common::V1_0::Status::CAMERA_DISCONNECTED;
}
+
if (status == common::V1_0::Status::OK && *numRequestProcessed != batchSize) {
ALOGE("%s: processCaptureRequest returns OK but processed %d/%zu requests",
__FUNCTION__, *numRequestProcessed, batchSize);
status = common::V1_0::Status::INTERNAL_ERROR;
}
- for (auto& handle : handlesCreated) {
- native_handle_delete(handle);
+ res = CameraProviderManager::mapToStatusT(status);
+ if (res == OK) {
+ if (mHidlSession->isRemote()) {
+ // Only close acquire fence FDs when the HIDL transaction succeeds (so the FDs have been
+ // sent to camera HAL processes)
+ cleanupNativeHandles(&handlesCreated, /*closeFd*/true);
+ } else {
+ // In passthrough mode the FDs are now owned by HAL
+ cleanupNativeHandles(&handlesCreated);
+ }
+ } else {
+ popInflightBuffers(inflightBuffers);
+ cleanupNativeHandles(&handlesCreated);
}
- return CameraProviderManager::mapToStatusT(status);
-}
-
-status_t Camera3Device::HalInterface::processCaptureRequest(
- camera3_capture_request_t *request) {
- ATRACE_NAME("CameraHal::processCaptureRequest");
- if (!valid()) return INVALID_OPERATION;
- status_t res = OK;
-
- uint32_t numRequestProcessed = 0;
- std::vector<camera3_capture_request_t*> requests(1);
- requests[0] = request;
- res = processBatchCaptureRequests(requests, &numRequestProcessed);
-
return res;
}
@@ -4701,10 +4736,9 @@
}
status_t Camera3Device::HalInterface::pushInflightBufferLocked(
- int32_t frameNumber, int32_t streamId, buffer_handle_t *buffer, int acquireFence) {
+ int32_t frameNumber, int32_t streamId, buffer_handle_t *buffer) {
uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
- auto pair = std::make_pair(buffer, acquireFence);
- mInflightBufferMap[key] = pair;
+ mInflightBufferMap[key] = buffer;
return OK;
}
@@ -4716,16 +4750,22 @@
uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
auto it = mInflightBufferMap.find(key);
if (it == mInflightBufferMap.end()) return NAME_NOT_FOUND;
- auto pair = it->second;
- *buffer = pair.first;
- int acquireFence = pair.second;
- if (acquireFence > 0) {
- ::close(acquireFence);
+ if (buffer != nullptr) {
+ *buffer = it->second;
}
mInflightBufferMap.erase(it);
return OK;
}
+void Camera3Device::HalInterface::popInflightBuffers(
+ const std::vector<std::pair<int32_t, int32_t>>& buffers) {
+ for (const auto& pair : buffers) {
+ int32_t frameNumber = pair.first;
+ int32_t streamId = pair.second;
+ popInflightBuffer(frameNumber, streamId, nullptr);
+ }
+}
+
status_t Camera3Device::HalInterface::pushInflightRequestBuffer(
uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) {
std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
@@ -5172,43 +5212,6 @@
return true;
}
-bool Camera3Device::RequestThread::sendRequestsOneByOne() {
- status_t res;
-
- for (auto& nextRequest : mNextRequests) {
- // Submit request and block until ready for next one
- ATRACE_ASYNC_BEGIN("frame capture", nextRequest.halRequest.frame_number);
- res = mInterface->processCaptureRequest(&nextRequest.halRequest);
-
- if (res != OK) {
- // Should only get a failure here for malformed requests or device-level
- // errors, so consider all errors fatal. Bad metadata failures should
- // come through notify.
- SET_ERR("RequestThread: Unable to submit capture request %d to HAL"
- " device: %s (%d)", nextRequest.halRequest.frame_number, strerror(-res),
- res);
- cleanUpFailedRequests(/*sendRequestError*/ false);
- return false;
- }
-
- // Mark that the request has be submitted successfully.
- nextRequest.submitted = true;
-
- updateNextRequest(nextRequest);
-
- // Remove any previously queued triggers (after unlock)
- res = removeTriggers(mPrevRequest);
- if (res != OK) {
- SET_ERR("RequestThread: Unable to remove triggers "
- "(capture request %d, HAL device: %s (%d)",
- nextRequest.halRequest.frame_number, strerror(-res), res);
- cleanUpFailedRequests(/*sendRequestError*/ false);
- return false;
- }
- }
- return true;
-}
-
nsecs_t Camera3Device::RequestThread::calculateMaxExpectedDuration(const camera_metadata_t *request) {
nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
@@ -5462,11 +5465,8 @@
bool submitRequestSuccess = false;
nsecs_t tRequestStart = systemTime(SYSTEM_TIME_MONOTONIC);
- if (mInterface->supportBatchRequest()) {
- submitRequestSuccess = sendRequestsBatch();
- } else {
- submitRequestSuccess = sendRequestsOneByOne();
- }
+ submitRequestSuccess = sendRequestsBatch();
+
nsecs_t tRequestEnd = systemTime(SYSTEM_TIME_MONOTONIC);
mRequestLatency.add(tRequestStart, tRequestEnd);
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 8f74611..6e8ac84 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -289,9 +289,6 @@
// Reset this HalInterface object (does not call close())
void clear();
- // Check if HalInterface support sending requests in batch
- bool supportBatchRequest();
-
// Calls into the HAL interface
// Caller takes ownership of requestTemplate
@@ -300,7 +297,11 @@
status_t configureStreams(const camera_metadata_t *sessionParams,
/*inout*/ camera3_stream_configuration *config,
const std::vector<uint32_t>& bufferSizes);
- status_t processCaptureRequest(camera3_capture_request_t *request);
+
+ // When the call succeeds, the ownership of acquire fences in requests is transferred to
+ // HalInterface. More specifically, the current implementation will send the fence to
+ // HAL process and close the FD in cameraserver process. When the call fails, the ownership
+ // of the acquire fence still belongs to the caller.
status_t processBatchCaptureRequests(
std::vector<camera3_capture_request_t*>& requests,
/*out*/uint32_t* numRequestProcessed);
@@ -357,13 +358,21 @@
// Do not free input camera3_capture_request_t before output HIDL request
status_t wrapAsHidlRequest(camera3_capture_request_t* in,
/*out*/hardware::camera::device::V3_2::CaptureRequest* out,
- /*out*/std::vector<native_handle_t*>* handlesCreated);
+ /*out*/std::vector<native_handle_t*>* handlesCreated,
+ /*out*/std::vector<std::pair<int32_t, int32_t>>* inflightBuffers);
status_t pushInflightBufferLocked(int32_t frameNumber, int32_t streamId,
- buffer_handle_t *buffer, int acquireFence);
+ buffer_handle_t *buffer);
+
+ // Pop inflight buffers based on pairs of (frameNumber,streamId)
+ void popInflightBuffers(const std::vector<std::pair<int32_t, int32_t>>& buffers);
+
// Cache of buffer handles keyed off (frameNumber << 32 | streamId)
- // value is a pair of (buffer_handle_t*, acquire_fence FD)
- std::unordered_map<uint64_t, std::pair<buffer_handle_t*, int>> mInflightBufferMap;
+ std::unordered_map<uint64_t, buffer_handle_t*> mInflightBufferMap;
+
+ // Delete and optionally close native handles and clear the input vector afterward
+ static void cleanupNativeHandles(
+ std::vector<native_handle_t*> *handles, bool closeFd = false);
struct BufferHasher {
size_t operator()(const buffer_handle_t& buf) const {
@@ -895,9 +904,6 @@
// Clear repeating requests. Must be called with mRequestLock held.
status_t clearRepeatingRequestsLocked(/*out*/ int64_t *lastFrameNumber = NULL);
- // send request in mNextRequests to HAL one by one. Return true = sucssess
- bool sendRequestsOneByOne();
-
// send request in mNextRequests to HAL in a batch. Return true = sucssess
bool sendRequestsBatch();
@@ -1186,10 +1192,14 @@
uint32_t mNextResultFrameNumber;
// the minimal frame number of the next reprocess result
uint32_t mNextReprocessResultFrameNumber;
+ // the minimal frame number of the next ZSL still capture result
+ uint32_t mNextZslStillResultFrameNumber;
// the minimal frame number of the next non-reprocess shutter
uint32_t mNextShutterFrameNumber;
// the minimal frame number of the next reprocess shutter
uint32_t mNextReprocessShutterFrameNumber;
+ // the minimal frame number of the next ZSL still capture shutter
+ uint32_t mNextZslStillShutterFrameNumber;
List<CaptureResult> mResultQueue;
Condition mResultSignal;
wp<NotificationListener> mListener;
@@ -1226,7 +1236,8 @@
void sendCaptureResult(CameraMetadata &pendingMetadata,
CaptureResultExtras &resultExtras,
CameraMetadata &collectedPartialResult, uint32_t frameNumber,
- bool reprocess, const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas);
+ bool reprocess, bool zslStillCapture,
+ const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas);
bool isLastFullResult(const InFlightRequest& inFlightRequest);
diff --git a/services/mediacodec/registrant/Android.bp b/services/mediacodec/registrant/Android.bp
index 1470de2..17c2e02 100644
--- a/services/mediacodec/registrant/Android.bp
+++ b/services/mediacodec/registrant/Android.bp
@@ -49,7 +49,6 @@
"libcodec2_soft_flacdec",
"libcodec2_soft_flacenc",
"libcodec2_soft_gsmdec",
- "libcodec2_soft_xaacdec",
],
}
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
index f75515a..9042cd7 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
@@ -42,6 +42,7 @@
lseek: 1
rt_sigprocmask: 1
openat: 1
+open: 1
fstat64: 1
write: 1
nanosleep: 1
@@ -49,6 +50,7 @@
set_tid_address: 1
getdents64: 1
readlinkat: 1
+readlink: 1
read: 1
pread64: 1
fstatfs64: 1