Merge "Fix memory leaks"
diff --git a/media/bufferpool/2.0/AccessorImpl.cpp b/media/bufferpool/2.0/AccessorImpl.cpp
index 2c734ac..5260909 100644
--- a/media/bufferpool/2.0/AccessorImpl.cpp
+++ b/media/bufferpool/2.0/AccessorImpl.cpp
@@ -253,9 +253,21 @@
}
void Accessor::Impl::handleInvalidateAck() {
- std::lock_guard<std::mutex> lock(mBufferPool.mMutex);
- mBufferPool.processStatusMessages();
- mBufferPool.mInvalidation.onHandleAck();
+ std::map<ConnectionId, const sp<IObserver>> observers;
+ uint32_t invalidationId;
+ {
+ std::lock_guard<std::mutex> lock(mBufferPool.mMutex);
+ mBufferPool.processStatusMessages();
+ mBufferPool.mInvalidation.onHandleAck(&observers, &invalidationId);
+ }
+ // Do not hold lock for send invalidations
+ for (auto it = observers.begin(); it != observers.end(); ++it) {
+ const sp<IObserver> observer = it->second;
+ if (observer) {
+ Return<void> transResult = observer->onMessage(it->first, invalidationId);
+ (void) transResult;
+ }
+ }
}
bool Accessor::Impl::isValid() {
@@ -365,19 +377,21 @@
sInvalidator->addAccessor(mId, impl);
}
-void Accessor::Impl::BufferPool::Invalidation::onHandleAck() {
+void Accessor::Impl::BufferPool::Invalidation::onHandleAck(
+ std::map<ConnectionId, const sp<IObserver>> *observers,
+ uint32_t *invalidationId) {
if (mInvalidationId != 0) {
+ *invalidationId = mInvalidationId;
std::set<int> deads;
for (auto it = mAcks.begin(); it != mAcks.end(); ++it) {
if (it->second != mInvalidationId) {
const sp<IObserver> observer = mObservers[it->first];
if (observer) {
- ALOGV("connection %lld call observer (%u: %u)",
+ observers->emplace(it->first, observer);
+ ALOGV("connection %lld will call observer (%u: %u)",
(long long)it->first, it->second, mInvalidationId);
- Return<void> transResult = observer->onMessage(it->first, mInvalidationId);
- (void) transResult;
- // N.B: ignore possibility of onMessage oneway call being
- // lost.
+ // N.B: onMessage will be called later. ignore possibility of
+ // onMessage# oneway call being lost.
it->second = mInvalidationId;
} else {
ALOGV("bufferpool2 observer died %lld", (long long)it->first);
diff --git a/media/bufferpool/2.0/AccessorImpl.h b/media/bufferpool/2.0/AccessorImpl.h
index b3faa96..eea72b9 100644
--- a/media/bufferpool/2.0/AccessorImpl.h
+++ b/media/bufferpool/2.0/AccessorImpl.h
@@ -158,7 +158,9 @@
BufferInvalidationChannel &channel,
const std::shared_ptr<Accessor::Impl> &impl);
- void onHandleAck();
+ void onHandleAck(
+ std::map<ConnectionId, const sp<IObserver>> *observers,
+ uint32_t *invalidationId);
} mInvalidation;
/// Buffer pool statistics which tracks allocation and transfer statistics.
struct Stats {
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index ee5cf27..cf06623 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -768,7 +768,11 @@
s_profile_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_PROFILE_PARAMS;
s_profile_params_ip.e_profile = mIntf->getProfile_l();
- s_profile_params_ip.u4_entropy_coding_mode = mEntropyMode;
+ if (s_profile_params_ip.e_profile == IV_PROFILE_BASE) {
+ s_profile_params_ip.u4_entropy_coding_mode = 0;
+ } else {
+ s_profile_params_ip.u4_entropy_coding_mode = 1;
+ }
s_profile_params_ip.u4_timestamp_high = -1;
s_profile_params_ip.u4_timestamp_low = -1;
diff --git a/media/codec2/components/vorbis/C2SoftVorbisDec.cpp b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
index 280ae36..48825e4 100644
--- a/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
+++ b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
@@ -405,10 +405,7 @@
int numFrames = 0;
int ret = vorbis_dsp_synthesis(mState, &pack, 1);
if (0 != ret) {
- ALOGE("vorbis_dsp_synthesis returned %d", ret);
- mSignalledError = true;
- work->result = C2_CORRUPTED;
- return;
+ ALOGD("vorbis_dsp_synthesis returned %d; ignored", ret);
} else {
numFrames = vorbis_dsp_pcmout(
mState, reinterpret_cast<int16_t *> (wView.data()),
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 8ecbf5d..9ba2362 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -188,6 +188,24 @@
.withConstValue(defaultColorInfo)
.build());
+ addParameter(
+ DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsTuning::output(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mDefaultColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mDefaultColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mDefaultColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mDefaultColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(DefaultColorAspectsSetter)
+ .build());
+
// TODO: support more formats?
addParameter(
DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
@@ -228,6 +246,22 @@
return C2R::Ok();
}
+ static C2R DefaultColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsTuning::output> &me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
const C2P<C2StreamPictureSizeInfo::output> &size) {
@@ -236,6 +270,9 @@
(void)me; // TODO: validate
return C2R::Ok();
}
+ std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
+ return mDefaultColorAspects;
+ }
static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input> &me) {
(void)mayBlock;
@@ -256,6 +293,7 @@
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+ std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
#ifdef VP9
#if 0
std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
@@ -524,32 +562,129 @@
static void copyOutputBufferToYV12Frame(uint8_t *dst,
const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
- uint32_t width, uint32_t height, int32_t bpp) {
- size_t dstYStride = align(width, 16) * bpp ;
+ uint32_t width, uint32_t height) {
+ size_t dstYStride = align(width, 16);
size_t dstUVStride = align(dstYStride / 2, 16);
uint8_t *dstStart = dst;
for (size_t i = 0; i < height; ++i) {
- memcpy(dst, srcY, width * bpp);
+ memcpy(dst, srcY, width);
srcY += srcYStride;
dst += dstYStride;
}
dst = dstStart + dstYStride * height;
for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dst, srcV, width / 2 * bpp);
+ memcpy(dst, srcV, width / 2);
srcV += srcVStride;
dst += dstUVStride;
}
dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dst, srcU, width / 2 * bpp);
+ memcpy(dst, srcU, width / 2);
srcU += srcUStride;
dst += dstUVStride;
}
}
+static void convertYUV420Planar16ToY410(uint32_t *dst,
+ const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
+ size_t srcYStride, size_t srcUStride, size_t srcVStride,
+ size_t dstStride, size_t width, size_t height) {
+
+ // Converting two lines at a time, slightly faster
+ for (size_t y = 0; y < height; y += 2) {
+ uint32_t *dstTop = (uint32_t *) dst;
+ uint32_t *dstBot = (uint32_t *) (dst + dstStride);
+ uint16_t *ySrcTop = (uint16_t*) srcY;
+ uint16_t *ySrcBot = (uint16_t*) (srcY + srcYStride);
+ uint16_t *uSrc = (uint16_t*) srcU;
+ uint16_t *vSrc = (uint16_t*) srcV;
+
+ uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
+ size_t x = 0;
+ for (; x < width - 3; x += 4) {
+
+ u01 = *((uint32_t*)uSrc); uSrc += 2;
+ v01 = *((uint32_t*)vSrc); vSrc += 2;
+
+ y01 = *((uint32_t*)ySrcTop); ySrcTop += 2;
+ y23 = *((uint32_t*)ySrcTop); ySrcTop += 2;
+ y45 = *((uint32_t*)ySrcBot); ySrcBot += 2;
+ y67 = *((uint32_t*)ySrcBot); ySrcBot += 2;
+
+ uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
+ uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
+
+ *dstTop++ = 3 << 30 | ((y01 & 0x3FF) << 10) | uv0;
+ *dstTop++ = 3 << 30 | ((y01 >> 16) << 10) | uv0;
+ *dstTop++ = 3 << 30 | ((y23 & 0x3FF) << 10) | uv1;
+ *dstTop++ = 3 << 30 | ((y23 >> 16) << 10) | uv1;
+
+ *dstBot++ = 3 << 30 | ((y45 & 0x3FF) << 10) | uv0;
+ *dstBot++ = 3 << 30 | ((y45 >> 16) << 10) | uv0;
+ *dstBot++ = 3 << 30 | ((y67 & 0x3FF) << 10) | uv1;
+ *dstBot++ = 3 << 30 | ((y67 >> 16) << 10) | uv1;
+ }
+
+ // There should be at most 2 more pixels to process. Note that we don't
+ // need to consider odd case as the buffer is always aligned to even.
+ if (x < width) {
+ u01 = *uSrc;
+ v01 = *vSrc;
+ y01 = *((uint32_t*)ySrcTop);
+ y45 = *((uint32_t*)ySrcBot);
+ uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
+ *dstTop++ = ((y01 & 0x3FF) << 10) | uv0;
+ *dstTop++ = ((y01 >> 16) << 10) | uv0;
+ *dstBot++ = ((y45 & 0x3FF) << 10) | uv0;
+ *dstBot++ = ((y45 >> 16) << 10) | uv0;
+ }
+
+ srcY += srcYStride * 2;
+ srcU += srcUStride;
+ srcV += srcVStride;
+ dst += dstStride * 2;
+ }
+
+ return;
+}
+
+static void convertYUV420Planar16ToYUV420Planar(uint8_t *dst,
+ const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
+ size_t srcYStride, size_t srcUStride, size_t srcVStride,
+ size_t dstStride, size_t width, size_t height) {
+
+ uint8_t *dstY = (uint8_t *)dst;
+ size_t dstYSize = dstStride * height;
+ size_t dstUVStride = align(dstStride / 2, 16);
+ size_t dstUVSize = dstUVStride * height / 2;
+ uint8_t *dstV = dstY + dstYSize;
+ uint8_t *dstU = dstV + dstUVSize;
+
+ for (size_t y = 0; y < height; ++y) {
+ for (size_t x = 0; x < width; ++x) {
+ dstY[x] = (uint8_t)(srcY[x] >> 2);
+ }
+
+ srcY += srcYStride;
+ dstY += dstStride;
+ }
+
+ for (size_t y = 0; y < (height + 1) / 2; ++y) {
+ for (size_t x = 0; x < (width + 1) / 2; ++x) {
+ dstU[x] = (uint8_t)(srcU[x] >> 2);
+ dstV[x] = (uint8_t)(srcV[x] >> 2);
+ }
+
+ srcU += srcUStride;
+ srcV += srcVStride;
+ dstU += dstUVStride;
+ dstV += dstUVStride;
+ }
+ return;
+}
bool C2SoftVpxDec::outputBuffer(
const std::shared_ptr<C2BlockPool> &pool,
const std::unique_ptr<C2Work> &work)
@@ -581,15 +716,21 @@
}
CHECK(img->fmt == VPX_IMG_FMT_I420 || img->fmt == VPX_IMG_FMT_I42016);
- int32_t bpp = 1;
- if (img->fmt == VPX_IMG_FMT_I42016) {
- bpp = 2;
- }
std::shared_ptr<C2GraphicBlock> block;
uint32_t format = HAL_PIXEL_FORMAT_YV12;
+ if (img->fmt == VPX_IMG_FMT_I42016) {
+ IntfImpl::Lock lock = mIntf->lock();
+ std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects = mIntf->getDefaultColorAspects_l();
+
+ if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
+ defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
+ defaultColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+ format = HAL_PIXEL_FORMAT_RGBA_1010102;
+ }
+ }
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
- c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16) * bpp, mHeight, format, usage, &block);
+ c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight, format, usage, &block);
if (err != C2_OK) {
ALOGE("fetchGraphicBlock for Output failed with status %d", err);
work->result = err;
@@ -610,12 +751,30 @@
size_t srcYStride = img->stride[VPX_PLANE_Y];
size_t srcUStride = img->stride[VPX_PLANE_U];
size_t srcVStride = img->stride[VPX_PLANE_V];
- const uint8_t *srcY = (const uint8_t *)img->planes[VPX_PLANE_Y];
- const uint8_t *srcU = (const uint8_t *)img->planes[VPX_PLANE_U];
- const uint8_t *srcV = (const uint8_t *)img->planes[VPX_PLANE_V];
- copyOutputBufferToYV12Frame(dst, srcY, srcU, srcV,
- srcYStride, srcUStride, srcVStride, mWidth, mHeight, bpp);
+ if (img->fmt == VPX_IMG_FMT_I42016) {
+ const uint16_t *srcY = (const uint16_t *)img->planes[VPX_PLANE_Y];
+ const uint16_t *srcU = (const uint16_t *)img->planes[VPX_PLANE_U];
+ const uint16_t *srcV = (const uint16_t *)img->planes[VPX_PLANE_V];
+
+ if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
+ convertYUV420Planar16ToY410((uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
+ srcUStride / 2, srcVStride / 2,
+ align(mWidth, 16),
+ mWidth, mHeight);
+ } else {
+ convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
+ srcUStride / 2, srcVStride / 2,
+ align(mWidth, 16),
+ mWidth, mHeight);
+ }
+ } else {
+ const uint8_t *srcY = (const uint8_t *)img->planes[VPX_PLANE_Y];
+ const uint8_t *srcU = (const uint8_t *)img->planes[VPX_PLANE_U];
+ const uint8_t *srcV = (const uint8_t *)img->planes[VPX_PLANE_V];
+ copyOutputBufferToYV12Frame(dst, srcY, srcU, srcV,
+ srcYStride, srcUStride, srcVStride, mWidth, mHeight);
+ }
finishWork(*(int64_t *)img->user_priv, work, std::move(block));
return true;
}
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index 22e8d84..4878974 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -366,16 +366,165 @@
generation, igbp_id, igbp_slot);
}
- // UGLY HACK: assume YCbCr 4:2:0 8-bit format (and lockable via lockYCbCr) if we don't
- // recognize the format
- PixelFormat format = mInfo.mapperInfo.format;
- if (format != PixelFormat::RGBA_8888 && format != PixelFormat::RGBX_8888) {
- format = PixelFormat::YCBCR_420_888;
- }
+ switch (mInfo.mapperInfo.format) {
+ case PixelFormat::RGBA_1010102: {
+ // TRICKY: this is used for media as YUV444 in the case when it is queued directly to a
+ // Surface. In all other cases it is RGBA. We don't know which case it is here, so
+ // default to YUV for now.
+ void *pointer = nullptr;
+ mMapper->lock(
+ const_cast<native_handle_t *>(mBuffer),
+ grallocUsage,
+ { (int32_t)rect.left, (int32_t)rect.top, (int32_t)rect.width, (int32_t)rect.height },
+ // TODO: fence
+ hidl_handle(),
+ [&err, &pointer](const auto &maperr, const auto &mapPointer) {
+ err = maperr2error(maperr);
+ if (err == C2_OK) {
+ pointer = mapPointer;
+ }
+ });
+ if (err != C2_OK) {
+ ALOGD("lock failed: %d", err);
+ return err;
+ }
+ // treat as 32-bit values
+ addr[C2PlanarLayout::PLANE_Y] = (uint8_t *)pointer;
+ addr[C2PlanarLayout::PLANE_U] = (uint8_t *)pointer;
+ addr[C2PlanarLayout::PLANE_V] = (uint8_t *)pointer;
+ addr[C2PlanarLayout::PLANE_A] = (uint8_t *)pointer;
+ layout->type = C2PlanarLayout::TYPE_YUVA;
+ layout->numPlanes = 4;
+ layout->rootPlanes = 1;
+ layout->planes[C2PlanarLayout::PLANE_Y] = {
+ C2PlaneInfo::CHANNEL_Y, // channel
+ 4, // colInc
+ 4 * (int32_t)mInfo.stride, // rowInc
+ 1, // mColSampling
+ 1, // mRowSampling
+ 32, // allocatedDepth
+ 10, // bitDepth
+ 10, // rightShift
+ C2PlaneInfo::LITTLE_END, // endianness
+ C2PlanarLayout::PLANE_Y, // rootIx
+ 0, // offset
+ };
+ layout->planes[C2PlanarLayout::PLANE_U] = {
+ C2PlaneInfo::CHANNEL_CB, // channel
+ 4, // colInc
+ 4 * (int32_t)mInfo.stride, // rowInc
+ 1, // mColSampling
+ 1, // mRowSampling
+ 32, // allocatedDepth
+ 10, // bitDepth
+ 0, // rightShift
+ C2PlaneInfo::LITTLE_END, // endianness
+ C2PlanarLayout::PLANE_Y, // rootIx
+ 0, // offset
+ };
+ layout->planes[C2PlanarLayout::PLANE_V] = {
+ C2PlaneInfo::CHANNEL_CR, // channel
+ 4, // colInc
+ 4 * (int32_t)mInfo.stride, // rowInc
+ 1, // mColSampling
+ 1, // mRowSampling
+ 32, // allocatedDepth
+ 10, // bitDepth
+ 20, // rightShift
+ C2PlaneInfo::LITTLE_END, // endianness
+ C2PlanarLayout::PLANE_Y, // rootIx
+ 0, // offset
+ };
+ layout->planes[C2PlanarLayout::PLANE_A] = {
+ C2PlaneInfo::CHANNEL_A, // channel
+ 4, // colInc
+ 4 * (int32_t)mInfo.stride, // rowInc
+ 1, // mColSampling
+ 1, // mRowSampling
+ 32, // allocatedDepth
+ 2, // bitDepth
+ 30, // rightShift
+ C2PlaneInfo::LITTLE_END, // endianness
+ C2PlanarLayout::PLANE_Y, // rootIx
+ 0, // offset
+ };
+ break;
+ }
- switch (format) {
+ case PixelFormat::RGBA_8888:
+ // TODO: alpha channel
+ // fall-through
+ case PixelFormat::RGBX_8888: {
+ void *pointer = nullptr;
+ mMapper->lock(
+ const_cast<native_handle_t *>(mBuffer),
+ grallocUsage,
+ { (int32_t)rect.left, (int32_t)rect.top, (int32_t)rect.width, (int32_t)rect.height },
+ // TODO: fence
+ hidl_handle(),
+ [&err, &pointer](const auto &maperr, const auto &mapPointer) {
+ err = maperr2error(maperr);
+ if (err == C2_OK) {
+ pointer = mapPointer;
+ }
+ });
+ if (err != C2_OK) {
+ ALOGD("lock failed: %d", err);
+ return err;
+ }
+ addr[C2PlanarLayout::PLANE_R] = (uint8_t *)pointer;
+ addr[C2PlanarLayout::PLANE_G] = (uint8_t *)pointer + 1;
+ addr[C2PlanarLayout::PLANE_B] = (uint8_t *)pointer + 2;
+ layout->type = C2PlanarLayout::TYPE_RGB;
+ layout->numPlanes = 3;
+ layout->rootPlanes = 1;
+ layout->planes[C2PlanarLayout::PLANE_R] = {
+ C2PlaneInfo::CHANNEL_R, // channel
+ 4, // colInc
+ 4 * (int32_t)mInfo.stride, // rowInc
+ 1, // mColSampling
+ 1, // mRowSampling
+ 8, // allocatedDepth
+ 8, // bitDepth
+ 0, // rightShift
+ C2PlaneInfo::NATIVE, // endianness
+ C2PlanarLayout::PLANE_R, // rootIx
+ 0, // offset
+ };
+ layout->planes[C2PlanarLayout::PLANE_G] = {
+ C2PlaneInfo::CHANNEL_G, // channel
+ 4, // colInc
+ 4 * (int32_t)mInfo.stride, // rowInc
+ 1, // mColSampling
+ 1, // mRowSampling
+ 8, // allocatedDepth
+ 8, // bitDepth
+ 0, // rightShift
+ C2PlaneInfo::NATIVE, // endianness
+ C2PlanarLayout::PLANE_R, // rootIx
+ 1, // offset
+ };
+ layout->planes[C2PlanarLayout::PLANE_B] = {
+ C2PlaneInfo::CHANNEL_B, // channel
+ 4, // colInc
+ 4 * (int32_t)mInfo.stride, // rowInc
+ 1, // mColSampling
+ 1, // mRowSampling
+ 8, // allocatedDepth
+ 8, // bitDepth
+ 0, // rightShift
+ C2PlaneInfo::NATIVE, // endianness
+ C2PlanarLayout::PLANE_R, // rootIx
+ 2, // offset
+ };
+ break;
+ }
+
case PixelFormat::YCBCR_420_888:
- case PixelFormat::YV12: {
+ // fall-through
+ case PixelFormat::YV12:
+ // fall-through
+ default: {
YCbCrLayout ycbcrLayout;
mMapper->lockYCbCr(
const_cast<native_handle_t *>(mBuffer), grallocUsage,
@@ -450,79 +599,6 @@
}
break;
}
-
- case PixelFormat::RGBA_8888:
- // TODO: alpha channel
- // fall-through
- case PixelFormat::RGBX_8888: {
- void *pointer = nullptr;
- mMapper->lock(
- const_cast<native_handle_t *>(mBuffer),
- grallocUsage,
- { (int32_t)rect.left, (int32_t)rect.top, (int32_t)rect.width, (int32_t)rect.height },
- // TODO: fence
- hidl_handle(),
- [&err, &pointer](const auto &maperr, const auto &mapPointer) {
- err = maperr2error(maperr);
- if (err == C2_OK) {
- pointer = mapPointer;
- }
- });
- if (err != C2_OK) {
- ALOGD("lock failed: %d", err);
- return err;
- }
- addr[C2PlanarLayout::PLANE_R] = (uint8_t *)pointer;
- addr[C2PlanarLayout::PLANE_G] = (uint8_t *)pointer + 1;
- addr[C2PlanarLayout::PLANE_B] = (uint8_t *)pointer + 2;
- layout->type = C2PlanarLayout::TYPE_RGB;
- layout->numPlanes = 3;
- layout->rootPlanes = 1;
- layout->planes[C2PlanarLayout::PLANE_R] = {
- C2PlaneInfo::CHANNEL_R, // channel
- 4, // colInc
- 4 * (int32_t)mInfo.stride, // rowInc
- 1, // mColSampling
- 1, // mRowSampling
- 8, // allocatedDepth
- 8, // bitDepth
- 0, // rightShift
- C2PlaneInfo::NATIVE, // endianness
- C2PlanarLayout::PLANE_R, // rootIx
- 0, // offset
- };
- layout->planes[C2PlanarLayout::PLANE_G] = {
- C2PlaneInfo::CHANNEL_G, // channel
- 4, // colInc
- 4 * (int32_t)mInfo.stride, // rowInc
- 1, // mColSampling
- 1, // mRowSampling
- 8, // allocatedDepth
- 8, // bitDepth
- 0, // rightShift
- C2PlaneInfo::NATIVE, // endianness
- C2PlanarLayout::PLANE_R, // rootIx
- 1, // offset
- };
- layout->planes[C2PlanarLayout::PLANE_B] = {
- C2PlaneInfo::CHANNEL_B, // channel
- 4, // colInc
- 4 * (int32_t)mInfo.stride, // rowInc
- 1, // mColSampling
- 1, // mRowSampling
- 8, // allocatedDepth
- 8, // bitDepth
- 0, // rightShift
- C2PlaneInfo::NATIVE, // endianness
- C2PlanarLayout::PLANE_R, // rootIx
- 2, // offset
- };
- break;
- }
- default: {
- ALOGD("unsupported pixel format: %d", mInfo.mapperInfo.format);
- return C2_OMITTED;
- }
}
mLocked = true;
diff --git a/media/libaaudio/tests/test_return_stop.cpp b/media/libaaudio/tests/test_return_stop.cpp
index f34c3c8..9a9e00c 100644
--- a/media/libaaudio/tests/test_return_stop.cpp
+++ b/media/libaaudio/tests/test_return_stop.cpp
@@ -228,8 +228,6 @@
result = AAudioStream_requestStart(engine.stream);
printf("AAudioStream_requestStart() returned %d >>>>>>>>>>>>>>>>>>>>>>\n", result);
if (result != AAUDIO_OK) {
- printf("ERROR - AAudioStream_requestStart returned %s",
- AAudio_convertResultToText(result));
errorCount++;
break;
}
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 1f6dd60..b444d2d 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -952,7 +952,8 @@
if (rate == mSampleRate) {
return NO_ERROR;
}
- if (isOffloadedOrDirect_l() || (mFlags & AUDIO_OUTPUT_FLAG_FAST)) {
+ if (isOffloadedOrDirect_l() || (mFlags & AUDIO_OUTPUT_FLAG_FAST)
+ || (mChannelMask & AUDIO_CHANNEL_HAPTIC_ALL)) {
return INVALID_OPERATION;
}
if (mOutput == AUDIO_IO_HANDLE_NONE) {
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
index 277d95c..e45d81f 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
@@ -178,13 +178,15 @@
#define CS_MIDDLE_96000_B1 (-0.993334)
#define CS_MIDDLE_96000_B2 0.000000
#define CS_MIDDLE_96000_SCALE 15
-/* bandpass filter with fc1 270 and fc2 3703, designed using 2nd order butterworth */
-#define CS_SIDE_96000_A0 0.016727
-#define CS_SIDE_96000_A1 0.000000
-#define CS_SIDE_96000_A2 (-0.016727)
-#define CS_SIDE_96000_B1 (-1.793372)
-#define CS_SIDE_96000_B2 0.797236
-#define CS_SIDE_96000_SCALE 14
+/* Coefficients calculated using tf2ss and ss2tf functions based on
+ * coefficients available for 48000Hz sampling frequency
+ */
+#define CS_SIDE_96000_A0 0.224326f
+#define CS_SIDE_96000_A1 (-0.294937f)
+#define CS_SIDE_96000_A2 0.070611f
+#define CS_SIDE_96000_B1 (-1.792166f)
+#define CS_SIDE_96000_B2 0.795830f
+#define CS_SIDE_96000_SCALE 14
/* Stereo Enhancer coefficients for 176400Hz sample rate.
* The filter coefficients are obtained by carrying out
@@ -211,13 +213,15 @@
#define CS_MIDDLE_192000_B1 (-0.996661)
#define CS_MIDDLE_192000_B2 0.000000
#define CS_MIDDLE_192000_SCALE 15
-/* bandpass filter with fc1 270 and fc2 3703, designed using 2nd order butterworth */
-#define CS_SIDE_192000_A0 0.008991
-#define CS_SIDE_192000_A1 (-0.000000)
-#define CS_SIDE_192000_A2 (-0.008991)
-#define CS_SIDE_192000_B1 (-1.892509)
-#define CS_SIDE_192000_B2 0.893524
-#define CS_SIDE_192000_SCALE 14
+/* Coefficients calculated using tf2ss and ss2tf functions based on
+ * coefficients available for 48000Hz sampling frequency
+ */
+#define CS_SIDE_192000_A0 0.196039f
+#define CS_SIDE_192000_A1 (-0.311027f)
+#define CS_SIDE_192000_A2 0.114988f
+#define CS_SIDE_192000_B1 (-1.891380f)
+#define CS_SIDE_192000_B2 0.8923460f
+#define CS_SIDE_192000_SCALE 14
#endif
/************************************************************************************/
diff --git a/media/libmediaextractor/include/media/stagefright/MetaDataBase.h b/media/libmediaextractor/include/media/stagefright/MetaDataBase.h
index 9f2deda..b99c14c 100644
--- a/media/libmediaextractor/include/media/stagefright/MetaDataBase.h
+++ b/media/libmediaextractor/include/media/stagefright/MetaDataBase.h
@@ -199,6 +199,7 @@
// HDR related
kKeyHdrStaticInfo = 'hdrS', // HDRStaticInfo
+ kKeyHdr10PlusInfo = 'hdrD', // raw data
// color aspects
kKeyColorRange = 'cRng', // int32_t, color range, value defined by ColorAspects.Range
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.cpp b/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.cpp
index 9d9e179..e3c9b4b 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.cpp
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.cpp
@@ -28,7 +28,7 @@
#include <media/stagefright/MediaClock.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/Utils.h>
-#include <media/stagefright/VideoFrameScheduler.h>
+#include <media/stagefright/VideoFrameScheduler2.h>
#include <media/MediaCodecBuffer.h>
#include <inttypes.h>
@@ -1436,7 +1436,7 @@
if (mHasVideo) {
if (mVideoScheduler == NULL) {
- mVideoScheduler = new VideoFrameScheduler();
+ mVideoScheduler = new VideoFrameScheduler2();
mVideoScheduler->init();
}
}
@@ -1779,7 +1779,7 @@
void NuPlayer2::Renderer::onSetVideoFrameRate(float fps) {
if (mVideoScheduler == NULL) {
- mVideoScheduler = new VideoFrameScheduler();
+ mVideoScheduler = new VideoFrameScheduler2();
}
mVideoScheduler->init(fps);
}
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.h b/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.h
index 305af68..484d9b7 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.h
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.h
@@ -28,7 +28,7 @@
class JWakeLock;
struct MediaClock;
class MediaCodecBuffer;
-struct VideoFrameScheduler;
+struct VideoFrameSchedulerBase;
struct NuPlayer2::Renderer : public AHandler {
enum Flags {
@@ -156,7 +156,7 @@
List<QueueEntry> mAudioQueue;
List<QueueEntry> mVideoQueue;
uint32_t mNumFramesWritten;
- sp<VideoFrameScheduler> mVideoScheduler;
+ sp<VideoFrameSchedulerBase> mVideoScheduler;
bool mDrainAudioQueuePending;
bool mDrainVideoQueuePending;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index a047975..a521f62 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -28,7 +28,7 @@
class AWakeLock;
struct MediaClock;
class MediaCodecBuffer;
-struct VideoFrameScheduler;
+struct VideoFrameSchedulerBase;
struct NuPlayer::Renderer : public AHandler {
enum Flags {
@@ -156,7 +156,7 @@
List<QueueEntry> mAudioQueue;
List<QueueEntry> mVideoQueue;
uint32_t mNumFramesWritten;
- sp<VideoFrameScheduler> mVideoScheduler;
+ sp<VideoFrameSchedulerBase> mVideoScheduler;
bool mDrainAudioQueuePending;
bool mDrainVideoQueuePending;
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 114f492..6ad0417 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -576,6 +576,7 @@
mTunneled(false),
mDescribeColorAspectsIndex((OMX_INDEXTYPE)0),
mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0),
+ mDescribeHDR10PlusInfoIndex((OMX_INDEXTYPE)0),
mStateGeneration(0),
mVendorExtensionsStatus(kExtensionsUnchecked) {
memset(&mLastHDRStaticInfo, 0, sizeof(mLastHDRStaticInfo));
@@ -3765,8 +3766,17 @@
"OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex);
if (err != OK) {
mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0;
+ return err;
}
- return err;
+
+ err = mOMXNode->getExtensionIndex(
+ "OMX.google.android.index.describeHDR10PlusInfo", &mDescribeHDR10PlusInfoIndex);
+ if (err != OK) {
+ mDescribeHDR10PlusInfoIndex = (OMX_INDEXTYPE)0;
+ return err;
+ }
+
+ return OK;
}
status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) {
@@ -5397,6 +5407,70 @@
return getVendorParameters(portIndex, notify);
}
+DescribeHDR10PlusInfoParams* ACodec::getHDR10PlusInfo(size_t paramSizeUsed) {
+ if (mDescribeHDR10PlusInfoIndex == 0) {
+ ALOGE("getHDR10PlusInfo: does not support DescribeHDR10PlusInfoParams");
+ return nullptr;
+ }
+
+ size_t newSize = sizeof(DescribeHDR10PlusInfoParams) - 1 +
+ ((paramSizeUsed > 0) ? paramSizeUsed : 512);
+ if (mHdr10PlusScratchBuffer == nullptr
+ || newSize > mHdr10PlusScratchBuffer->size()) {
+ mHdr10PlusScratchBuffer = new ABuffer(newSize);
+ }
+ DescribeHDR10PlusInfoParams *config =
+ (DescribeHDR10PlusInfoParams *)mHdr10PlusScratchBuffer->data();
+ InitOMXParams(config);
+ config->nSize = mHdr10PlusScratchBuffer->size();
+ config->nPortIndex = 1;
+ size_t paramSize = config->nSize - sizeof(DescribeHDR10PlusInfoParams) + 1;
+ config->nParamSize = paramSize;
+ config->nParamSizeUsed = 0;
+ status_t err = mOMXNode->getConfig(
+ (OMX_INDEXTYPE)mDescribeHDR10PlusInfoIndex,
+ config, config->nSize);
+ if (err != OK) {
+ ALOGE("failed to get DescribeHDR10PlusInfoParams (err %d)", err);
+ return nullptr;
+ }
+ if (config->nParamSize != paramSize) {
+ ALOGE("DescribeHDR10PlusInfoParams alters nParamSize: %u vs %zu",
+ config->nParamSize, paramSize);
+ return nullptr;
+ }
+ if (paramSizeUsed > 0 && config->nParamSizeUsed != paramSizeUsed) {
+ ALOGE("DescribeHDR10PlusInfoParams returns wrong nParamSizeUsed: %u vs %zu",
+ config->nParamSizeUsed, paramSizeUsed);
+ return nullptr;
+ }
+ return config;
+}
+
+void ACodec::onConfigUpdate(OMX_INDEXTYPE configIndex) {
+ if (mDescribeHDR10PlusInfoIndex == 0
+ || configIndex != mDescribeHDR10PlusInfoIndex) {
+ // mDescribeHDR10PlusInfoIndex is the only update we recognize now
+ return;
+ }
+
+ DescribeHDR10PlusInfoParams *config = getHDR10PlusInfo();
+ if (config == nullptr) {
+ return;
+ }
+ if (config->nParamSizeUsed > config->nParamSize) {
+ // try again with the size specified
+ config = getHDR10PlusInfo(config->nParamSizeUsed);
+ if (config == nullptr) {
+ return;
+ }
+ }
+
+ mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event
+ mOutputFormat->setBuffer("hdr10-plus-info",
+ ABuffer::CreateAsCopy(config->nValue, config->nParamSizeUsed));
+}
+
void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) {
// aspects are normally communicated in ColorAspects
int32_t range, standard, transfer;
@@ -6337,6 +6411,15 @@
}
}
+ sp<ABuffer> hdr10PlusInfo;
+ if (buffer->format()->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
+ && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0
+ && hdr10PlusInfo != mCodec->mLastHdr10PlusBuffer) {
+ native_window_set_buffers_hdr10_plus_metadata(mCodec->mNativeWindow.get(),
+ hdr10PlusInfo->size(), hdr10PlusInfo->data());
+ mCodec->mLastHdr10PlusBuffer = hdr10PlusInfo;
+ }
+
// save buffers sent to the surface so we can get render time when they return
int64_t mediaTimeUs = -1;
buffer->meta()->findInt64("timeUs", &mediaTimeUs);
@@ -7475,12 +7558,45 @@
}
}
+ sp<ABuffer> hdr10PlusInfo;
+ if (params->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
+ && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
+ (void)setHdr10PlusInfo(hdr10PlusInfo);
+ }
+
// Ignore errors as failure is expected for codecs that aren't video encoders.
(void)configureTemporalLayers(params, false /* inConfigure */, mOutputFormat);
return setVendorParameters(params);
}
+status_t ACodec::setHdr10PlusInfo(const sp<ABuffer> &hdr10PlusInfo) {
+ if (mDescribeHDR10PlusInfoIndex == 0) {
+ ALOGE("setHdr10PlusInfo: does not support DescribeHDR10PlusInfoParams");
+ return ERROR_UNSUPPORTED;
+ }
+ size_t newSize = sizeof(DescribeHDR10PlusInfoParams) + hdr10PlusInfo->size() - 1;
+ if (mHdr10PlusScratchBuffer == nullptr ||
+ newSize > mHdr10PlusScratchBuffer->size()) {
+ mHdr10PlusScratchBuffer = new ABuffer(newSize);
+ }
+ DescribeHDR10PlusInfoParams *config =
+ (DescribeHDR10PlusInfoParams *)mHdr10PlusScratchBuffer->data();
+ InitOMXParams(config);
+ config->nPortIndex = 0;
+ config->nSize = newSize;
+ config->nParamSize = hdr10PlusInfo->size();
+ config->nParamSizeUsed = hdr10PlusInfo->size();
+ memcpy(config->nValue, hdr10PlusInfo->data(), hdr10PlusInfo->size());
+ status_t err = mOMXNode->setConfig(
+ (OMX_INDEXTYPE)mDescribeHDR10PlusInfoIndex,
+ config, config->nSize);
+ if (err != OK) {
+ ALOGE("failed to set DescribeHDR10PlusInfoParams (err %d)", err);
+ }
+ return OK;
+}
+
// Removes trailing tags matching |tag| from |key| (e.g. a settings name). |minLength| specifies
// the minimum number of characters to keep in |key| (even if it has trailing tags).
// (Used to remove trailing 'value' tags in settings names, e.g. to normalize
@@ -7902,6 +8018,15 @@
return true;
}
+ case OMX_EventConfigUpdate:
+ {
+ CHECK_EQ(data1, (OMX_U32)kPortIndexOutput);
+
+ mCodec->onConfigUpdate((OMX_INDEXTYPE)data2);
+
+ return true;
+ }
+
case OMX_EventBufferFlag:
{
return true;
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index dec5d3b..3388ed9 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -133,6 +133,7 @@
"SurfaceUtils.cpp",
"Utils.cpp",
"ThrottledSource.cpp",
+ "VideoFrameSchedulerBase.cpp",
"VideoFrameScheduler.cpp",
],
@@ -237,7 +238,8 @@
"MediaClock.cpp",
"NdkUtils.cpp",
"Utils.cpp",
- "VideoFrameScheduler.cpp",
+ "VideoFrameSchedulerBase.cpp",
+ "VideoFrameScheduler2.cpp",
"http/ClearMediaHTTP.cpp",
],
@@ -247,10 +249,12 @@
"libnetd_client",
"libutils",
"libstagefright_foundation",
+ "libandroid",
],
static_libs: [
"libmedia_player2_util",
+ "libmedia2_jni_core",
],
export_include_dirs: [
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 70064ea..bd9e2bb 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -2197,6 +2197,13 @@
}
}
+ sp<ABuffer> hdr10PlusInfo;
+ if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
+ && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
+ native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
+ hdr10PlusInfo->size(), hdr10PlusInfo->data());
+ }
+
if (mime.startsWithIgnoreCase("video/")) {
mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
}
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 8f2427e..163cd05 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -119,7 +119,8 @@
}
// if user/container supplied HDR static info without transfer set, assume true
- if (format->contains("hdr-static-info") && !format->contains("color-transfer")) {
+ if ((format->contains("hdr-static-info") || format->contains("hdr10-plus-info"))
+ && !format->contains("color-transfer")) {
return true;
}
// otherwise, verify that an HDR transfer function is set
@@ -878,6 +879,16 @@
ColorUtils::setHDRStaticInfoIntoFormat(*(HDRStaticInfo*)data, msg);
}
+ if (meta->findData(kKeyHdr10PlusInfo, &type, &data, &size)
+ && size > 0) {
+ sp<ABuffer> buffer = new (std::nothrow) ABuffer(size);
+ if (buffer.get() == NULL || buffer->base() == NULL) {
+ return NO_MEMORY;
+ }
+ memcpy(buffer->data(), data, size);
+ msg->setBuffer("hdr10-plus-info", buffer);
+ }
+
convertMetaDataToMessageColorAspects(meta, msg);
} else if (!strncasecmp("audio/", mime, 6)) {
int32_t numChannels, sampleRate;
@@ -1626,6 +1637,12 @@
}
}
+ sp<ABuffer> hdr10PlusInfo;
+ if (msg->findBuffer("hdr10-plus-info", &hdr10PlusInfo)) {
+ meta->setData(kKeyHdr10PlusInfo, 0,
+ hdr10PlusInfo->data(), hdr10PlusInfo->size());
+ }
+
convertMessageToMetaDataColorAspects(msg, meta);
AString tsSchema;
diff --git a/media/libstagefright/VideoFrameScheduler.cpp b/media/libstagefright/VideoFrameScheduler.cpp
index 9020fc1..4e5b5e2 100644
--- a/media/libstagefright/VideoFrameScheduler.cpp
+++ b/media/libstagefright/VideoFrameScheduler.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2014 The Android Open Source Project
+ * Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -19,8 +19,7 @@
#include <utils/Log.h>
#define ATRACE_TAG ATRACE_TAG_VIDEO
#include <utils/Trace.h>
-
-#include <sys/time.h>
+#include <utils/String16.h>
#include <binder/IServiceManager.h>
#include <gui/ISurfaceComposer.h>
@@ -32,321 +31,14 @@
namespace android {
-static const nsecs_t kNanosIn1s = 1000000000;
-
-template<class T>
-static int compare(const T *lhs, const T *rhs) {
- if (*lhs < *rhs) {
- return -1;
- } else if (*lhs > *rhs) {
- return 1;
- } else {
- return 0;
- }
-}
-
-/* ======================================================================= */
-/* PLL */
-/* ======================================================================= */
-
-static const size_t kMinSamplesToStartPrime = 3;
-static const size_t kMinSamplesToStopPrime = VideoFrameScheduler::kHistorySize;
-static const size_t kMinSamplesToEstimatePeriod = 3;
-static const size_t kMaxSamplesToEstimatePeriod = VideoFrameScheduler::kHistorySize;
-
-static const size_t kPrecision = 12;
-static const int64_t kErrorThreshold = (1 << (kPrecision * 2)) / 10;
-static const int64_t kMultiplesThresholdDiv = 4; // 25%
-static const int64_t kReFitThresholdDiv = 100; // 1%
-static const nsecs_t kMaxAllowedFrameSkip = kNanosIn1s; // 1 sec
-static const nsecs_t kMinPeriod = kNanosIn1s / 120; // 120Hz
-static const nsecs_t kRefitRefreshPeriod = 10 * kNanosIn1s; // 10 sec
-
-VideoFrameScheduler::PLL::PLL()
- : mPeriod(-1),
- mPhase(0),
- mPrimed(false),
- mSamplesUsedForPriming(0),
- mLastTime(-1),
- mNumSamples(0) {
-}
-
-void VideoFrameScheduler::PLL::reset(float fps) {
- //test();
-
- mSamplesUsedForPriming = 0;
- mLastTime = -1;
-
- // set up or reset video PLL
- if (fps <= 0.f) {
- mPeriod = -1;
- mPrimed = false;
- } else {
- ALOGV("reset at %.1f fps", fps);
- mPeriod = (nsecs_t)(1e9 / fps + 0.5);
- mPrimed = true;
- }
-
- restart();
-}
-
-// reset PLL but keep previous period estimate
-void VideoFrameScheduler::PLL::restart() {
- mNumSamples = 0;
- mPhase = -1;
-}
-
-#if 0
-
-void VideoFrameScheduler::PLL::test() {
- nsecs_t period = kNanosIn1s / 60;
- mTimes[0] = 0;
- mTimes[1] = period;
- mTimes[2] = period * 3;
- mTimes[3] = period * 4;
- mTimes[4] = period * 7;
- mTimes[5] = period * 8;
- mTimes[6] = period * 10;
- mTimes[7] = period * 12;
- mNumSamples = 8;
- int64_t a, b, err;
- fit(0, period * 12 / 7, 8, &a, &b, &err);
- // a = 0.8(5)+
- // b = -0.14097(2)+
- // err = 0.2750578(703)+
- ALOGD("a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
- (long long)a, (a / (float)(1 << kPrecision)),
- (long long)b, (b / (float)(1 << kPrecision)),
- (long long)err, (err / (float)(1 << (kPrecision * 2))));
-}
-
-#endif
-
-bool VideoFrameScheduler::PLL::fit(
- nsecs_t phase, nsecs_t period, size_t numSamplesToUse,
- int64_t *a, int64_t *b, int64_t *err) {
- if (numSamplesToUse > mNumSamples) {
- numSamplesToUse = mNumSamples;
- }
-
- if ((period >> kPrecision) == 0 ) {
- ALOGW("Period is 0, or after including precision is 0 - would cause div0, returning");
- return false;
- }
-
- int64_t sumX = 0;
- int64_t sumXX = 0;
- int64_t sumXY = 0;
- int64_t sumYY = 0;
- int64_t sumY = 0;
-
- int64_t x = 0; // x usually is in [0..numSamplesToUse)
- nsecs_t lastTime;
- for (size_t i = 0; i < numSamplesToUse; i++) {
- size_t ix = (mNumSamples - numSamplesToUse + i) % kHistorySize;
- nsecs_t time = mTimes[ix];
- if (i > 0) {
- x += divRound(time - lastTime, period);
- }
- // y is usually in [-numSamplesToUse..numSamplesToUse+kRefitRefreshPeriod/kMinPeriod) << kPrecision
- // ideally in [0..numSamplesToUse), but shifted by -numSamplesToUse during
- // priming, and possibly shifted by up to kRefitRefreshPeriod/kMinPeriod
- // while we are not refitting.
- int64_t y = divRound(time - phase, period >> kPrecision);
- sumX += x;
- sumY += y;
- sumXX += x * x;
- sumXY += x * y;
- sumYY += y * y;
- lastTime = time;
- }
-
- int64_t div = (int64_t)numSamplesToUse * sumXX - sumX * sumX;
- if (div == 0) {
- return false;
- }
-
- int64_t a_nom = (int64_t)numSamplesToUse * sumXY - sumX * sumY;
- int64_t b_nom = sumXX * sumY - sumX * sumXY;
- *a = divRound(a_nom, div);
- *b = divRound(b_nom, div);
- // don't use a and b directly as the rounding error is significant
- *err = sumYY - divRound(a_nom * sumXY + b_nom * sumY, div);
- ALOGV("fitting[%zu] a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
- numSamplesToUse,
- (long long)*a, (*a / (float)(1 << kPrecision)),
- (long long)*b, (*b / (float)(1 << kPrecision)),
- (long long)*err, (*err / (float)(1 << (kPrecision * 2))));
- return true;
-}
-
-void VideoFrameScheduler::PLL::prime(size_t numSamplesToUse) {
- if (numSamplesToUse > mNumSamples) {
- numSamplesToUse = mNumSamples;
- }
- CHECK(numSamplesToUse >= 3); // must have at least 3 samples
-
- // estimate video framerate from deltas between timestamps, and
- // 2nd order deltas
- Vector<nsecs_t> deltas;
- nsecs_t lastTime, firstTime;
- for (size_t i = 0; i < numSamplesToUse; ++i) {
- size_t index = (mNumSamples - numSamplesToUse + i) % kHistorySize;
- nsecs_t time = mTimes[index];
- if (i > 0) {
- if (time - lastTime > kMinPeriod) {
- //ALOGV("delta: %lld", (long long)(time - lastTime));
- deltas.push(time - lastTime);
- }
- } else {
- firstTime = time;
- }
- lastTime = time;
- }
- deltas.sort(compare<nsecs_t>);
- size_t numDeltas = deltas.size();
- if (numDeltas > 1) {
- nsecs_t deltaMinLimit = max(deltas[0] / kMultiplesThresholdDiv, kMinPeriod);
- nsecs_t deltaMaxLimit = deltas[numDeltas / 2] * kMultiplesThresholdDiv;
- for (size_t i = numDeltas / 2 + 1; i < numDeltas; ++i) {
- if (deltas[i] > deltaMaxLimit) {
- deltas.resize(i);
- numDeltas = i;
- break;
- }
- }
- for (size_t i = 1; i < numDeltas; ++i) {
- nsecs_t delta2nd = deltas[i] - deltas[i - 1];
- if (delta2nd >= deltaMinLimit) {
- //ALOGV("delta2: %lld", (long long)(delta2nd));
- deltas.push(delta2nd);
- }
- }
- }
-
- // use the one that yields the best match
- int64_t bestScore;
- for (size_t i = 0; i < deltas.size(); ++i) {
- nsecs_t delta = deltas[i];
- int64_t score = 0;
-#if 1
- // simplest score: number of deltas that are near multiples
- size_t matches = 0;
- for (size_t j = 0; j < deltas.size(); ++j) {
- nsecs_t err = periodicError(deltas[j], delta);
- if (err < delta / kMultiplesThresholdDiv) {
- ++matches;
- }
- }
- score = matches;
-#if 0
- // could be weighed by the (1 - normalized error)
- if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
- int64_t a, b, err;
- fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
- err = (1 << (2 * kPrecision)) - err;
- score *= max(0, err);
- }
-#endif
-#else
- // or use the error as a negative score
- if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
- int64_t a, b, err;
- fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
- score = -delta * err;
- }
-#endif
- if (i == 0 || score > bestScore) {
- bestScore = score;
- mPeriod = delta;
- mPhase = firstTime;
- }
- }
- ALOGV("priming[%zu] phase:%lld period:%lld",
- numSamplesToUse, (long long)mPhase, (long long)mPeriod);
-}
-
-nsecs_t VideoFrameScheduler::PLL::addSample(nsecs_t time) {
- if (mLastTime >= 0
- // if time goes backward, or we skipped rendering
- && (time > mLastTime + kMaxAllowedFrameSkip || time < mLastTime)) {
- restart();
- }
-
- mLastTime = time;
- mTimes[mNumSamples % kHistorySize] = time;
- ++mNumSamples;
-
- bool doFit = time > mRefitAt;
- if ((mPeriod <= 0 || !mPrimed) && mNumSamples >= kMinSamplesToStartPrime) {
- prime(kMinSamplesToStopPrime);
- ++mSamplesUsedForPriming;
- doFit = true;
- }
- if (mPeriod > 0 && mNumSamples >= kMinSamplesToEstimatePeriod) {
- if (mPhase < 0) {
- // initialize phase to the current render time
- mPhase = time;
- doFit = true;
- } else if (!doFit) {
- int64_t err = periodicError(time - mPhase, mPeriod);
- doFit = err > mPeriod / kReFitThresholdDiv;
- }
-
- if (doFit) {
- int64_t a, b, err;
- if (!fit(mPhase, mPeriod, kMaxSamplesToEstimatePeriod, &a, &b, &err)) {
- // samples are not suitable for fitting. this means they are
- // also not suitable for priming.
- ALOGV("could not fit - keeping old period:%lld", (long long)mPeriod);
- return mPeriod;
- }
-
- mRefitAt = time + kRefitRefreshPeriod;
-
- mPhase += (mPeriod * b) >> kPrecision;
- mPeriod = (mPeriod * a) >> kPrecision;
- ALOGV("new phase:%lld period:%lld", (long long)mPhase, (long long)mPeriod);
-
- if (err < kErrorThreshold) {
- if (!mPrimed && mSamplesUsedForPriming >= kMinSamplesToStopPrime) {
- mPrimed = true;
- }
- } else {
- mPrimed = false;
- mSamplesUsedForPriming = 0;
- }
- }
- }
- return mPeriod;
-}
-
-nsecs_t VideoFrameScheduler::PLL::getPeriod() const {
- return mPrimed ? mPeriod : 0;
-}
-
-/* ======================================================================= */
-/* Frame Scheduler */
-/* ======================================================================= */
-
-static const nsecs_t kDefaultVsyncPeriod = kNanosIn1s / 60; // 60Hz
-static const nsecs_t kVsyncRefreshPeriod = kNanosIn1s; // 1 sec
-
-VideoFrameScheduler::VideoFrameScheduler()
- : mVsyncTime(0),
- mVsyncPeriod(0),
- mVsyncRefreshAt(0),
- mLastVsyncTime(-1),
- mTimeCorrection(0) {
+VideoFrameScheduler::VideoFrameScheduler() : VideoFrameSchedulerBase() {
}
void VideoFrameScheduler::updateVsync() {
mVsyncRefreshAt = systemTime(SYSTEM_TIME_MONOTONIC) + kVsyncRefreshPeriod;
- mVsyncPeriod = 0;
mVsyncTime = 0;
+ mVsyncPeriod = 0;
- // TODO: schedule frames for the destination surface
- // For now, surface flinger only schedules frames on the primary display
if (mComposer == NULL) {
String16 name("SurfaceFlinger");
sp<IServiceManager> sm = defaultServiceManager();
@@ -368,136 +60,6 @@
}
}
-void VideoFrameScheduler::init(float videoFps) {
- updateVsync();
-
- mLastVsyncTime = -1;
- mTimeCorrection = 0;
-
- mPll.reset(videoFps);
-}
-
-void VideoFrameScheduler::restart() {
- mLastVsyncTime = -1;
- mTimeCorrection = 0;
-
- mPll.restart();
-}
-
-nsecs_t VideoFrameScheduler::getVsyncPeriod() {
- if (mVsyncPeriod > 0) {
- return mVsyncPeriod;
- }
- return kDefaultVsyncPeriod;
-}
-
-float VideoFrameScheduler::getFrameRate() {
- nsecs_t videoPeriod = mPll.getPeriod();
- if (videoPeriod > 0) {
- return 1e9 / videoPeriod;
- }
- return 0.f;
-}
-
-nsecs_t VideoFrameScheduler::schedule(nsecs_t renderTime) {
- nsecs_t origRenderTime = renderTime;
-
- nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
- if (now >= mVsyncRefreshAt) {
- updateVsync();
- }
-
- // without VSYNC info, there is nothing to do
- if (mVsyncPeriod == 0) {
- ALOGV("no vsync: render=%lld", (long long)renderTime);
- return renderTime;
- }
-
- // ensure vsync time is well before (corrected) render time
- if (mVsyncTime > renderTime - 4 * mVsyncPeriod) {
- mVsyncTime -=
- ((mVsyncTime - renderTime) / mVsyncPeriod + 5) * mVsyncPeriod;
- }
-
- // Video presentation takes place at the VSYNC _after_ renderTime. Adjust renderTime
- // so this effectively becomes a rounding operation (to the _closest_ VSYNC.)
- renderTime -= mVsyncPeriod / 2;
-
- const nsecs_t videoPeriod = mPll.addSample(origRenderTime);
- if (videoPeriod > 0) {
- // Smooth out rendering
- size_t N = 12;
- nsecs_t fiveSixthDev =
- abs(((videoPeriod * 5 + mVsyncPeriod) % (mVsyncPeriod * 6)) - mVsyncPeriod)
- / (mVsyncPeriod / 100);
- // use 20 samples if we are doing 5:6 ratio +- 1% (e.g. playing 50Hz on 60Hz)
- if (fiveSixthDev < 12) { /* 12% / 6 = 2% */
- N = 20;
- }
-
- nsecs_t offset = 0;
- nsecs_t edgeRemainder = 0;
- for (size_t i = 1; i <= N; i++) {
- offset +=
- (renderTime + mTimeCorrection + videoPeriod * i - mVsyncTime) % mVsyncPeriod;
- edgeRemainder += (videoPeriod * i) % mVsyncPeriod;
- }
- mTimeCorrection += mVsyncPeriod / 2 - offset / (nsecs_t)N;
- renderTime += mTimeCorrection;
- nsecs_t correctionLimit = mVsyncPeriod * 3 / 5;
- edgeRemainder = abs(edgeRemainder / (nsecs_t)N - mVsyncPeriod / 2);
- if (edgeRemainder <= mVsyncPeriod / 3) {
- correctionLimit /= 2;
- }
-
- // estimate how many VSYNCs a frame will spend on the display
- nsecs_t nextVsyncTime =
- renderTime + mVsyncPeriod - ((renderTime - mVsyncTime) % mVsyncPeriod);
- if (mLastVsyncTime >= 0) {
- size_t minVsyncsPerFrame = videoPeriod / mVsyncPeriod;
- size_t vsyncsForLastFrame = divRound(nextVsyncTime - mLastVsyncTime, mVsyncPeriod);
- bool vsyncsPerFrameAreNearlyConstant =
- periodicError(videoPeriod, mVsyncPeriod) / (mVsyncPeriod / 20) == 0;
-
- if (mTimeCorrection > correctionLimit &&
- (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame > minVsyncsPerFrame)) {
- // remove a VSYNC
- mTimeCorrection -= mVsyncPeriod / 2;
- renderTime -= mVsyncPeriod / 2;
- nextVsyncTime -= mVsyncPeriod;
- if (vsyncsForLastFrame > 0)
- --vsyncsForLastFrame;
- } else if (mTimeCorrection < -correctionLimit &&
- (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame == minVsyncsPerFrame)) {
- // add a VSYNC
- mTimeCorrection += mVsyncPeriod / 2;
- renderTime += mVsyncPeriod / 2;
- nextVsyncTime += mVsyncPeriod;
- if (vsyncsForLastFrame < ULONG_MAX)
- ++vsyncsForLastFrame;
- } else if (mTimeCorrection < -correctionLimit * 2
- || mTimeCorrection > correctionLimit * 2) {
- ALOGW("correction beyond limit: %lld vs %lld (vsyncs for last frame: %zu, min: %zu)"
- " restarting. render=%lld",
- (long long)mTimeCorrection, (long long)correctionLimit,
- vsyncsForLastFrame, minVsyncsPerFrame, (long long)origRenderTime);
- restart();
- return origRenderTime;
- }
-
- ATRACE_INT("FRAME_VSYNCS", vsyncsForLastFrame);
- }
- mLastVsyncTime = nextVsyncTime;
- }
-
- // align rendertime to the center between VSYNC edges
- renderTime -= (renderTime - mVsyncTime) % mVsyncPeriod;
- renderTime += mVsyncPeriod / 2;
- ALOGV("adjusting render: %lld => %lld", (long long)origRenderTime, (long long)renderTime);
- ATRACE_INT("FRAME_FLIP_IN(ms)", (renderTime - now) / 1000000);
- return renderTime;
-}
-
void VideoFrameScheduler::release() {
mComposer.clear();
}
@@ -507,4 +69,3 @@
}
} // namespace android
-
diff --git a/media/libstagefright/VideoFrameScheduler2.cpp b/media/libstagefright/VideoFrameScheduler2.cpp
new file mode 100644
index 0000000..e02ae7d
--- /dev/null
+++ b/media/libstagefright/VideoFrameScheduler2.cpp
@@ -0,0 +1,285 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoFrameScheduler2"
+#include <utils/Log.h>
+#define ATRACE_TAG ATRACE_TAG_VIDEO
+#include <utils/Mutex.h>
+#include <utils/Thread.h>
+#include <utils/Trace.h>
+
+#include <algorithm>
+#include <jni.h>
+#include <math.h>
+
+#include <android/choreographer.h>
+#include <android/looper.h>
+#include <media/stagefright/VideoFrameScheduler2.h>
+#include <mediaplayer2/JavaVMHelper.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
+
+namespace android {
+
+static void getVsyncOffset(long* appVsyncOffsetPtr, long* sfVsyncOffsetPtr);
+
+/* ======================================================================= */
+/* VsyncTracker */
+/* ======================================================================= */
+
+class VsyncTracker : public RefBase{
+public:
+ VsyncTracker();
+ ~VsyncTracker() {}
+ long getVsyncPeriod();
+ long getVsyncTime(long periodOffset);
+ void addSample(long timestamp);
+
+private:
+ static const int kMaxSamples = 32;
+ static const int kMinSamplesForUpdate = 6;
+ int mNumSamples;
+ int mFirstSample;
+ long mReferenceTime;
+ long mPhase;
+ long mPeriod;
+ long mTimestampSamples[kMaxSamples];
+ Mutex mLock;
+
+ void updateModelLocked();
+};
+
+VsyncTracker::VsyncTracker()
+ : mNumSamples(0),
+ mFirstSample(0),
+ mReferenceTime(0),
+ mPhase(0),
+ mPeriod(0) {
+ for (int i = 0; i < kMaxSamples; i++) {
+ mTimestampSamples[i] = 0;
+ }
+}
+
+long VsyncTracker::getVsyncPeriod() {
+ Mutex::Autolock dataLock(mLock);
+ return mPeriod;
+}
+
+long VsyncTracker::getVsyncTime(long periodOffset) {
+ Mutex::Autolock dataLock(mLock);
+ const long now = systemTime();
+ long phase = mReferenceTime + mPhase;
+ return (((now - phase) / mPeriod) + periodOffset + 1) * mPeriod + phase;
+}
+
+void VsyncTracker::addSample(long timestamp) {
+ Mutex::Autolock dataLock(mLock);
+ if (mNumSamples == 0) {
+ mPhase = 0;
+ mReferenceTime = timestamp;
+ }
+ int idx = (mFirstSample + mNumSamples) % kMaxSamples;
+ mTimestampSamples[idx] = timestamp;
+ if (mNumSamples < kMaxSamples) {
+ mNumSamples++;
+ } else {
+ mFirstSample = (mFirstSample + 1) % kMaxSamples;
+ }
+ updateModelLocked();
+}
+
+void VsyncTracker::updateModelLocked() {
+ if (mNumSamples < kMinSamplesForUpdate) {
+ return;
+ }
+ long durationSum = 0;
+ long minDuration = LONG_MAX;
+ long maxDuration = 0;
+
+ for (int i = 1; i < mNumSamples; i++) {
+ int idx = (mFirstSample + i) % kMaxSamples;
+ int prev = (idx + kMaxSamples - 1) % kMaxSamples;
+ long duration = mTimestampSamples[idx] - mTimestampSamples[prev];
+ durationSum += duration;
+ minDuration = min(minDuration, duration);
+ maxDuration = max(maxDuration, duration);
+ }
+
+ durationSum -= (minDuration + maxDuration);
+ mPeriod = durationSum / (mNumSamples - 3);
+
+ double sampleAvgX = 0.0;
+ double sampleAvgY = 0.0;
+ double scale = 2.0 * M_PI / (double) mPeriod;
+
+ for (int i = 1; i < mNumSamples; i++) {
+ int idx = (mFirstSample + i) % kMaxSamples;
+ long sample = mTimestampSamples[idx] - mReferenceTime;
+ double samplePhase = (double) (sample % mPeriod) * scale;
+ sampleAvgX += cos(samplePhase);
+ sampleAvgY += sin(samplePhase);
+ }
+
+ sampleAvgX /= (double) mNumSamples - 1.0;
+ sampleAvgY /= (double) mNumSamples - 1.0;
+ mPhase = (long) (atan2(sampleAvgY, sampleAvgX) / scale);
+}
+
+static void frameCallback(long frameTimeNanos, void* data) {
+ if (data == NULL) {
+ return;
+ }
+ sp<VsyncTracker> vsyncTracker(static_cast<VsyncTracker*>(data));
+ vsyncTracker->addSample(frameTimeNanos);
+ AChoreographer_postFrameCallback(AChoreographer_getInstance(),
+ frameCallback, static_cast<void*>(vsyncTracker.get()));
+}
+
+/* ======================================================================= */
+/* JNI */
+/* ======================================================================= */
+
+static void getVsyncOffset(long* appVsyncOffsetPtr, long* sfVsyncOffsetPtr) {
+ static const long kOneMillisecInNanosec = 1000000;
+ static const long kOneSecInNanosec = kOneMillisecInNanosec * 1000;
+
+ JNIEnv *env = JavaVMHelper::getJNIEnv();
+ jclass jDisplayManagerGlobalCls = env->FindClass(
+ "android/hardware/display/DisplayManagerGlobal");
+ jclass jDisplayCls = env->FindClass("android/view/Display");
+
+ jmethodID jGetInstance = env->GetStaticMethodID(jDisplayManagerGlobalCls,
+ "getInstance", "()Landroid/hardware/display/DisplayManagerGlobal;");
+ jobject javaDisplayManagerGlobalObj = env->CallStaticObjectMethod(
+ jDisplayManagerGlobalCls, jGetInstance);
+
+ jfieldID jDEFAULT_DISPLAY = env->GetStaticFieldID(jDisplayCls, "DEFAULT_DISPLAY", "I");
+ jint DEFAULT_DISPLAY = env->GetStaticIntField(jDisplayCls, jDEFAULT_DISPLAY);
+
+ jmethodID jgetRealDisplay = env->GetMethodID(jDisplayManagerGlobalCls,
+ "getRealDisplay", "(I)Landroid/view/Display;");
+ jobject javaDisplayObj = env->CallObjectMethod(
+ javaDisplayManagerGlobalObj, jgetRealDisplay, DEFAULT_DISPLAY);
+
+ jmethodID jGetRefreshRate = env->GetMethodID(jDisplayCls, "getRefreshRate", "()F");
+ jfloat javaRefreshRate = env->CallFloatMethod(javaDisplayObj, jGetRefreshRate);
+ long vsyncPeriod = (long) (kOneSecInNanosec / (float) javaRefreshRate);
+
+ jmethodID jGetAppVsyncOffsetNanos = env->GetMethodID(
+ jDisplayCls, "getAppVsyncOffsetNanos", "()J");
+ jlong javaAppVsyncOffset = env->CallLongMethod(javaDisplayObj, jGetAppVsyncOffsetNanos);
+ *appVsyncOffsetPtr = (long) javaAppVsyncOffset;
+
+ jmethodID jGetPresentationDeadlineNanos = env->GetMethodID(
+ jDisplayCls, "getPresentationDeadlineNanos", "()J");
+ jlong javaPresentationDeadline = env->CallLongMethod(
+ javaDisplayObj, jGetPresentationDeadlineNanos);
+
+ *sfVsyncOffsetPtr = vsyncPeriod - ((long) javaPresentationDeadline - kOneMillisecInNanosec);
+}
+
+/* ======================================================================= */
+/* Choreographer Thread */
+/* ======================================================================= */
+
+struct ChoreographerThread : public Thread {
+ ChoreographerThread(bool canCallJava);
+ status_t init(void* data);
+ virtual status_t readyToRun() override;
+ virtual bool threadLoop() override;
+
+protected:
+ virtual ~ChoreographerThread() {}
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(ChoreographerThread);
+ void* mData;
+};
+
+ChoreographerThread::ChoreographerThread(bool canCallJava) : Thread(canCallJava) {
+}
+
+status_t ChoreographerThread::init(void* data) {
+ if (data == NULL) {
+ return NO_INIT;
+ }
+ mData = data;
+ return OK;
+}
+
+status_t ChoreographerThread::readyToRun() {
+ ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS);
+ if (AChoreographer_getInstance() == NULL) {
+ return NO_INIT;
+ }
+ AChoreographer_postFrameCallback(AChoreographer_getInstance(), frameCallback, mData);
+ return OK;
+}
+
+bool ChoreographerThread::threadLoop() {
+ ALooper_pollOnce(-1, nullptr, nullptr, nullptr);
+ return true;
+}
+
+/* ======================================================================= */
+/* Frame Scheduler */
+/* ======================================================================= */
+
+VideoFrameScheduler2::VideoFrameScheduler2() : VideoFrameSchedulerBase() {
+
+ getVsyncOffset(&mAppVsyncOffset, &mSfVsyncOffset);
+
+ Mutex::Autolock threadLock(mLock);
+ mChoreographerThread = new ChoreographerThread(true);
+
+ mVsyncTracker = new VsyncTracker();
+ if (mChoreographerThread->init(static_cast<void*>(mVsyncTracker.get())) != OK) {
+ mChoreographerThread.clear();
+ }
+ if (mChoreographerThread != NULL && mChoreographerThread->run("Choreographer") != OK) {
+ mChoreographerThread.clear();
+ }
+}
+
+void VideoFrameScheduler2::updateVsync() {
+ mVsyncTime = 0;
+ mVsyncPeriod = 0;
+
+ if (mVsyncTracker != NULL) {
+ mVsyncPeriod = mVsyncTracker->getVsyncPeriod();
+ mVsyncTime = mVsyncTracker->getVsyncTime(mSfVsyncOffset - mAppVsyncOffset);
+ }
+ mVsyncRefreshAt = systemTime(SYSTEM_TIME_MONOTONIC) + kVsyncRefreshPeriod;
+}
+
+void VideoFrameScheduler2::release() {
+ // Do not change order
+ {
+ Mutex::Autolock threadLock(mLock);
+ mChoreographerThread->requestExitAndWait();
+ mChoreographerThread.clear();
+ }
+
+ mVsyncTracker.clear();
+}
+
+VideoFrameScheduler2::~VideoFrameScheduler2() {
+ release();
+}
+
+} // namespace android
diff --git a/media/libstagefright/VideoFrameSchedulerBase.cpp b/media/libstagefright/VideoFrameSchedulerBase.cpp
new file mode 100644
index 0000000..77107ff
--- /dev/null
+++ b/media/libstagefright/VideoFrameSchedulerBase.cpp
@@ -0,0 +1,465 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoFrameSchedulerBase"
+#include <utils/Log.h>
+#define ATRACE_TAG ATRACE_TAG_VIDEO
+#include <utils/Trace.h>
+#include <utils/Vector.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/VideoFrameSchedulerBase.h>
+
+namespace android {
+
+template<class T>
+static int compare(const T *lhs, const T *rhs) {
+ if (*lhs < *rhs) {
+ return -1;
+ } else if (*lhs > *rhs) {
+ return 1;
+ } else {
+ return 0;
+ }
+}
+
+/* ======================================================================= */
+/* PLL */
+/* ======================================================================= */
+
+static const size_t kMinSamplesToStartPrime = 3;
+static const size_t kMinSamplesToStopPrime = VideoFrameSchedulerBase::kHistorySize;
+static const size_t kMinSamplesToEstimatePeriod = 3;
+static const size_t kMaxSamplesToEstimatePeriod = VideoFrameSchedulerBase::kHistorySize;
+
+static const size_t kPrecision = 12;
+static const int64_t kErrorThreshold = (1 << (kPrecision * 2)) / 10;
+static const int64_t kMultiplesThresholdDiv = 4; // 25%
+static const int64_t kReFitThresholdDiv = 100; // 1%
+static const nsecs_t kMaxAllowedFrameSkip = VideoFrameSchedulerBase::kNanosIn1s; // 1 sec
+static const nsecs_t kMinPeriod = VideoFrameSchedulerBase::kNanosIn1s / 120; // 120Hz
+static const nsecs_t kRefitRefreshPeriod = 10 * VideoFrameSchedulerBase::kNanosIn1s; // 10 sec
+
+VideoFrameSchedulerBase::PLL::PLL()
+ : mPeriod(-1),
+ mPhase(0),
+ mPrimed(false),
+ mSamplesUsedForPriming(0),
+ mLastTime(-1),
+ mNumSamples(0) {
+}
+
+void VideoFrameSchedulerBase::PLL::reset(float fps) {
+ //test();
+
+ mSamplesUsedForPriming = 0;
+ mLastTime = -1;
+
+ // set up or reset video PLL
+ if (fps <= 0.f) {
+ mPeriod = -1;
+ mPrimed = false;
+ } else {
+ ALOGV("reset at %.1f fps", fps);
+ mPeriod = (nsecs_t)(1e9 / fps + 0.5);
+ mPrimed = true;
+ }
+
+ restart();
+}
+
+// reset PLL but keep previous period estimate
+void VideoFrameSchedulerBase::PLL::restart() {
+ mNumSamples = 0;
+ mPhase = -1;
+}
+
+#if 0
+
+void VideoFrameSchedulerBase::PLL::test() {
+ nsecs_t period = VideoFrameSchedulerBase::kNanosIn1s / 60;
+ mTimes[0] = 0;
+ mTimes[1] = period;
+ mTimes[2] = period * 3;
+ mTimes[3] = period * 4;
+ mTimes[4] = period * 7;
+ mTimes[5] = period * 8;
+ mTimes[6] = period * 10;
+ mTimes[7] = period * 12;
+ mNumSamples = 8;
+ int64_t a, b, err;
+ fit(0, period * 12 / 7, 8, &a, &b, &err);
+ // a = 0.8(5)+
+ // b = -0.14097(2)+
+ // err = 0.2750578(703)+
+ ALOGD("a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
+ (long long)a, (a / (float)(1 << kPrecision)),
+ (long long)b, (b / (float)(1 << kPrecision)),
+ (long long)err, (err / (float)(1 << (kPrecision * 2))));
+}
+
+#endif
+
+bool VideoFrameSchedulerBase::PLL::fit(
+ nsecs_t phase, nsecs_t period, size_t numSamplesToUse,
+ int64_t *a, int64_t *b, int64_t *err) {
+ if (numSamplesToUse > mNumSamples) {
+ numSamplesToUse = mNumSamples;
+ }
+
+ if ((period >> kPrecision) == 0 ) {
+ ALOGW("Period is 0, or after including precision is 0 - would cause div0, returning");
+ return false;
+ }
+
+ int64_t sumX = 0;
+ int64_t sumXX = 0;
+ int64_t sumXY = 0;
+ int64_t sumYY = 0;
+ int64_t sumY = 0;
+
+ int64_t x = 0; // x usually is in [0..numSamplesToUse)
+ nsecs_t lastTime;
+ for (size_t i = 0; i < numSamplesToUse; i++) {
+ size_t ix = (mNumSamples - numSamplesToUse + i) % kHistorySize;
+ nsecs_t time = mTimes[ix];
+ if (i > 0) {
+ x += divRound(time - lastTime, period);
+ }
+ // y is usually in [-numSamplesToUse..numSamplesToUse+kRefitRefreshPeriod/kMinPeriod) << kPrecision
+ // ideally in [0..numSamplesToUse), but shifted by -numSamplesToUse during
+ // priming, and possibly shifted by up to kRefitRefreshPeriod/kMinPeriod
+ // while we are not refitting.
+ int64_t y = divRound(time - phase, period >> kPrecision);
+ sumX += x;
+ sumY += y;
+ sumXX += x * x;
+ sumXY += x * y;
+ sumYY += y * y;
+ lastTime = time;
+ }
+
+ int64_t div = (int64_t)numSamplesToUse * sumXX - sumX * sumX;
+ if (div == 0) {
+ return false;
+ }
+
+ int64_t a_nom = (int64_t)numSamplesToUse * sumXY - sumX * sumY;
+ int64_t b_nom = sumXX * sumY - sumX * sumXY;
+ *a = divRound(a_nom, div);
+ *b = divRound(b_nom, div);
+ // don't use a and b directly as the rounding error is significant
+ *err = sumYY - divRound(a_nom * sumXY + b_nom * sumY, div);
+ ALOGV("fitting[%zu] a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
+ numSamplesToUse,
+ (long long)*a, (*a / (float)(1 << kPrecision)),
+ (long long)*b, (*b / (float)(1 << kPrecision)),
+ (long long)*err, (*err / (float)(1 << (kPrecision * 2))));
+ return true;
+}
+
+void VideoFrameSchedulerBase::PLL::prime(size_t numSamplesToUse) {
+ if (numSamplesToUse > mNumSamples) {
+ numSamplesToUse = mNumSamples;
+ }
+ CHECK(numSamplesToUse >= 3); // must have at least 3 samples
+
+ // estimate video framerate from deltas between timestamps, and
+ // 2nd order deltas
+ Vector<nsecs_t> deltas;
+ nsecs_t lastTime, firstTime;
+ for (size_t i = 0; i < numSamplesToUse; ++i) {
+ size_t index = (mNumSamples - numSamplesToUse + i) % kHistorySize;
+ nsecs_t time = mTimes[index];
+ if (i > 0) {
+ if (time - lastTime > kMinPeriod) {
+ //ALOGV("delta: %lld", (long long)(time - lastTime));
+ deltas.push(time - lastTime);
+ }
+ } else {
+ firstTime = time;
+ }
+ lastTime = time;
+ }
+ deltas.sort(compare<nsecs_t>);
+ size_t numDeltas = deltas.size();
+ if (numDeltas > 1) {
+ nsecs_t deltaMinLimit = max(deltas[0] / kMultiplesThresholdDiv, kMinPeriod);
+ nsecs_t deltaMaxLimit = deltas[numDeltas / 2] * kMultiplesThresholdDiv;
+ for (size_t i = numDeltas / 2 + 1; i < numDeltas; ++i) {
+ if (deltas[i] > deltaMaxLimit) {
+ deltas.resize(i);
+ numDeltas = i;
+ break;
+ }
+ }
+ for (size_t i = 1; i < numDeltas; ++i) {
+ nsecs_t delta2nd = deltas[i] - deltas[i - 1];
+ if (delta2nd >= deltaMinLimit) {
+ //ALOGV("delta2: %lld", (long long)(delta2nd));
+ deltas.push(delta2nd);
+ }
+ }
+ }
+
+ // use the one that yields the best match
+ int64_t bestScore;
+ for (size_t i = 0; i < deltas.size(); ++i) {
+ nsecs_t delta = deltas[i];
+ int64_t score = 0;
+#if 1
+ // simplest score: number of deltas that are near multiples
+ size_t matches = 0;
+ for (size_t j = 0; j < deltas.size(); ++j) {
+ nsecs_t err = periodicError(deltas[j], delta);
+ if (err < delta / kMultiplesThresholdDiv) {
+ ++matches;
+ }
+ }
+ score = matches;
+#if 0
+ // could be weighed by the (1 - normalized error)
+ if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
+ int64_t a, b, err;
+ fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
+ err = (1 << (2 * kPrecision)) - err;
+ score *= max(0, err);
+ }
+#endif
+#else
+ // or use the error as a negative score
+ if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
+ int64_t a, b, err;
+ fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
+ score = -delta * err;
+ }
+#endif
+ if (i == 0 || score > bestScore) {
+ bestScore = score;
+ mPeriod = delta;
+ mPhase = firstTime;
+ }
+ }
+ ALOGV("priming[%zu] phase:%lld period:%lld",
+ numSamplesToUse, (long long)mPhase, (long long)mPeriod);
+}
+
+nsecs_t VideoFrameSchedulerBase::PLL::addSample(nsecs_t time) {
+ if (mLastTime >= 0
+ // if time goes backward, or we skipped rendering
+ && (time > mLastTime + kMaxAllowedFrameSkip || time < mLastTime)) {
+ restart();
+ }
+
+ mLastTime = time;
+ mTimes[mNumSamples % kHistorySize] = time;
+ ++mNumSamples;
+
+ bool doFit = time > mRefitAt;
+ if ((mPeriod <= 0 || !mPrimed) && mNumSamples >= kMinSamplesToStartPrime) {
+ prime(kMinSamplesToStopPrime);
+ ++mSamplesUsedForPriming;
+ doFit = true;
+ }
+ if (mPeriod > 0 && mNumSamples >= kMinSamplesToEstimatePeriod) {
+ if (mPhase < 0) {
+ // initialize phase to the current render time
+ mPhase = time;
+ doFit = true;
+ } else if (!doFit) {
+ int64_t err = periodicError(time - mPhase, mPeriod);
+ doFit = err > mPeriod / kReFitThresholdDiv;
+ }
+
+ if (doFit) {
+ int64_t a, b, err;
+ if (!fit(mPhase, mPeriod, kMaxSamplesToEstimatePeriod, &a, &b, &err)) {
+ // samples are not suitable for fitting. this means they are
+ // also not suitable for priming.
+ ALOGV("could not fit - keeping old period:%lld", (long long)mPeriod);
+ return mPeriod;
+ }
+
+ mRefitAt = time + kRefitRefreshPeriod;
+
+ mPhase += (mPeriod * b) >> kPrecision;
+ mPeriod = (mPeriod * a) >> kPrecision;
+ ALOGV("new phase:%lld period:%lld", (long long)mPhase, (long long)mPeriod);
+
+ if (err < kErrorThreshold) {
+ if (!mPrimed && mSamplesUsedForPriming >= kMinSamplesToStopPrime) {
+ mPrimed = true;
+ }
+ } else {
+ mPrimed = false;
+ mSamplesUsedForPriming = 0;
+ }
+ }
+ }
+ return mPeriod;
+}
+
+nsecs_t VideoFrameSchedulerBase::PLL::getPeriod() const {
+ return mPrimed ? mPeriod : 0;
+}
+
+/* ======================================================================= */
+/* Frame Scheduler */
+/* ======================================================================= */
+
+VideoFrameSchedulerBase::VideoFrameSchedulerBase()
+ : mVsyncTime(0),
+ mVsyncPeriod(0),
+ mVsyncRefreshAt(0),
+ mLastVsyncTime(-1),
+ mTimeCorrection(0) {
+}
+
+void VideoFrameSchedulerBase::init(float videoFps) {
+ updateVsync();
+
+ mLastVsyncTime = -1;
+ mTimeCorrection = 0;
+
+ mPll.reset(videoFps);
+}
+
+void VideoFrameSchedulerBase::restart() {
+ mLastVsyncTime = -1;
+ mTimeCorrection = 0;
+
+ mPll.restart();
+}
+
+nsecs_t VideoFrameSchedulerBase::getVsyncPeriod() {
+ if (mVsyncPeriod > 0) {
+ return mVsyncPeriod;
+ }
+ return kDefaultVsyncPeriod;
+}
+
+float VideoFrameSchedulerBase::getFrameRate() {
+ nsecs_t videoPeriod = mPll.getPeriod();
+ if (videoPeriod > 0) {
+ return 1e9 / videoPeriod;
+ }
+ return 0.f;
+}
+
+nsecs_t VideoFrameSchedulerBase::schedule(nsecs_t renderTime) {
+ nsecs_t origRenderTime = renderTime;
+
+ nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
+ if (now >= mVsyncRefreshAt) {
+ updateVsync();
+ }
+
+ // without VSYNC info, there is nothing to do
+ if (mVsyncPeriod == 0) {
+ ALOGV("no vsync: render=%lld", (long long)renderTime);
+ return renderTime;
+ }
+
+ // ensure vsync time is well before (corrected) render time
+ if (mVsyncTime > renderTime - 4 * mVsyncPeriod) {
+ mVsyncTime -=
+ ((mVsyncTime - renderTime) / mVsyncPeriod + 5) * mVsyncPeriod;
+ }
+
+ // Video presentation takes place at the VSYNC _after_ renderTime. Adjust renderTime
+ // so this effectively becomes a rounding operation (to the _closest_ VSYNC.)
+ renderTime -= mVsyncPeriod / 2;
+
+ const nsecs_t videoPeriod = mPll.addSample(origRenderTime);
+ if (videoPeriod > 0) {
+ // Smooth out rendering
+ size_t N = 12;
+ nsecs_t fiveSixthDev =
+ abs(((videoPeriod * 5 + mVsyncPeriod) % (mVsyncPeriod * 6)) - mVsyncPeriod)
+ / (mVsyncPeriod / 100);
+ // use 20 samples if we are doing 5:6 ratio +- 1% (e.g. playing 50Hz on 60Hz)
+ if (fiveSixthDev < 12) { /* 12% / 6 = 2% */
+ N = 20;
+ }
+
+ nsecs_t offset = 0;
+ nsecs_t edgeRemainder = 0;
+ for (size_t i = 1; i <= N; i++) {
+ offset +=
+ (renderTime + mTimeCorrection + videoPeriod * i - mVsyncTime) % mVsyncPeriod;
+ edgeRemainder += (videoPeriod * i) % mVsyncPeriod;
+ }
+ mTimeCorrection += mVsyncPeriod / 2 - offset / (nsecs_t)N;
+ renderTime += mTimeCorrection;
+ nsecs_t correctionLimit = mVsyncPeriod * 3 / 5;
+ edgeRemainder = abs(edgeRemainder / (nsecs_t)N - mVsyncPeriod / 2);
+ if (edgeRemainder <= mVsyncPeriod / 3) {
+ correctionLimit /= 2;
+ }
+
+ // estimate how many VSYNCs a frame will spend on the display
+ nsecs_t nextVsyncTime =
+ renderTime + mVsyncPeriod - ((renderTime - mVsyncTime) % mVsyncPeriod);
+ if (mLastVsyncTime >= 0) {
+ size_t minVsyncsPerFrame = videoPeriod / mVsyncPeriod;
+ size_t vsyncsForLastFrame = divRound(nextVsyncTime - mLastVsyncTime, mVsyncPeriod);
+ bool vsyncsPerFrameAreNearlyConstant =
+ periodicError(videoPeriod, mVsyncPeriod) / (mVsyncPeriod / 20) == 0;
+
+ if (mTimeCorrection > correctionLimit &&
+ (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame > minVsyncsPerFrame)) {
+ // remove a VSYNC
+ mTimeCorrection -= mVsyncPeriod / 2;
+ renderTime -= mVsyncPeriod / 2;
+ nextVsyncTime -= mVsyncPeriod;
+ if (vsyncsForLastFrame > 0)
+ --vsyncsForLastFrame;
+ } else if (mTimeCorrection < -correctionLimit &&
+ (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame == minVsyncsPerFrame)) {
+ // add a VSYNC
+ mTimeCorrection += mVsyncPeriod / 2;
+ renderTime += mVsyncPeriod / 2;
+ nextVsyncTime += mVsyncPeriod;
+ if (vsyncsForLastFrame < ULONG_MAX)
+ ++vsyncsForLastFrame;
+ } else if (mTimeCorrection < -correctionLimit * 2
+ || mTimeCorrection > correctionLimit * 2) {
+ ALOGW("correction beyond limit: %lld vs %lld (vsyncs for last frame: %zu, min: %zu)"
+ " restarting. render=%lld",
+ (long long)mTimeCorrection, (long long)correctionLimit,
+ vsyncsForLastFrame, minVsyncsPerFrame, (long long)origRenderTime);
+ restart();
+ return origRenderTime;
+ }
+
+ ATRACE_INT("FRAME_VSYNCS", vsyncsForLastFrame);
+ }
+ mLastVsyncTime = nextVsyncTime;
+ }
+
+ // align rendertime to the center between VSYNC edges
+ renderTime -= (renderTime - mVsyncTime) % mVsyncPeriod;
+ renderTime += mVsyncPeriod / 2;
+ ALOGV("adjusting render: %lld => %lld", (long long)origRenderTime, (long long)renderTime);
+ ATRACE_INT("FRAME_FLIP_IN(ms)", (renderTime - now) / 1000000);
+ return renderTime;
+}
+
+VideoFrameSchedulerBase::~VideoFrameSchedulerBase() {}
+
+} // namespace android
diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
index 379d41e..e0f2683 100644
--- a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
+++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
@@ -1058,8 +1058,8 @@
}
}
-OMX_ERRORTYPE SoftAVC::setConfig(
- OMX_INDEXTYPE index, const OMX_PTR _params) {
+OMX_ERRORTYPE SoftAVC::internalSetConfig(
+ OMX_INDEXTYPE index, const OMX_PTR _params, bool *frameConfig) {
switch ((int)index) {
case OMX_IndexConfigVideoIntraVOPRefresh:
{
@@ -1125,7 +1125,7 @@
}
default:
- return SimpleSoftOMXComponent::setConfig(index, _params);
+ return SimpleSoftOMXComponent::internalSetConfig(index, _params, frameConfig);
}
}
diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.h b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
index a43cdf1..8253b7d 100644
--- a/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
+++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
@@ -219,8 +219,8 @@
OMX_ERRORTYPE internalSetBitrateParams(
const OMX_VIDEO_PARAM_BITRATETYPE *bitrate);
- OMX_ERRORTYPE setConfig(
- OMX_INDEXTYPE index, const OMX_PTR _params);
+ OMX_ERRORTYPE internalSetConfig(
+ OMX_INDEXTYPE index, const OMX_PTR _params, bool *frameConfig);
OMX_ERRORTYPE getConfig(
OMX_INDEXTYPE index, const OMX_PTR _params);
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
index 8d5f3e7..0f2ff17 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
@@ -33,6 +33,7 @@
{ OMX_VIDEO_VP9Profile0, OMX_VIDEO_VP9Level5 },
{ OMX_VIDEO_VP9Profile2, OMX_VIDEO_VP9Level5 },
{ OMX_VIDEO_VP9Profile2HDR, OMX_VIDEO_VP9Level5 },
+ { OMX_VIDEO_VP9Profile2HDR10Plus, OMX_VIDEO_VP9Level5 },
};
SoftVPX::SoftVPX(
@@ -84,6 +85,10 @@
return true;
}
+bool SoftVPX::supportDescribeHdr10PlusInfo() {
+ return true;
+}
+
status_t SoftVPX::initDecoder() {
mCtx = new vpx_codec_ctx_t;
vpx_codec_err_t vpx_err;
@@ -167,7 +172,12 @@
outHeader->nOffset = 0;
outHeader->nFlags = 0;
outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * bpp * 3) / 2;
- outHeader->nTimeStamp = *(OMX_TICKS *)mImg->user_priv;
+ PrivInfo *privInfo = (PrivInfo *)mImg->user_priv;
+ outHeader->nTimeStamp = privInfo->mTimeStamp;
+ if (privInfo->mHdr10PlusInfo != nullptr) {
+ queueOutputFrameConfig(privInfo->mHdr10PlusInfo);
+ }
+
if (outputBufferSafe(outHeader)) {
uint8_t *dst = outHeader->pBuffer;
const uint8_t *srcY = (const uint8_t *)mImg->planes[VPX_PLANE_Y];
@@ -275,7 +285,13 @@
}
}
- mTimeStamps[mTimeStampIdx] = inHeader->nTimeStamp;
+ mPrivInfo[mTimeStampIdx].mTimeStamp = inHeader->nTimeStamp;
+
+ if (inInfo->mFrameConfig) {
+ mPrivInfo[mTimeStampIdx].mHdr10PlusInfo = dequeueInputFrameConfig();
+ } else {
+ mPrivInfo[mTimeStampIdx].mHdr10PlusInfo.clear();
+ }
if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
mEOSStatus = INPUT_EOS_SEEN;
@@ -285,7 +301,7 @@
if (inHeader->nFilledLen > 0) {
vpx_codec_err_t err = vpx_codec_decode(
(vpx_codec_ctx_t *)mCtx, inHeader->pBuffer + inHeader->nOffset,
- inHeader->nFilledLen, &mTimeStamps[mTimeStampIdx], 0);
+ inHeader->nFilledLen, &mPrivInfo[mTimeStampIdx], 0);
if (err == VPX_CODEC_OK) {
inInfo->mOwnedByUs = false;
inQueue.erase(inQueue.begin());
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.h b/media/libstagefright/codecs/on2/dec/SoftVPX.h
index b62b526..0aa8e9c 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.h
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.h
@@ -26,6 +26,8 @@
namespace android {
+struct ABuffer;
+
struct SoftVPX : public SoftVideoDecoderOMXComponent {
SoftVPX(const char *name,
const char *componentRole,
@@ -41,6 +43,7 @@
virtual void onPortFlushCompleted(OMX_U32 portIndex);
virtual void onReset();
virtual bool supportDescribeHdrStaticInfo();
+ virtual bool supportDescribeHdr10PlusInfo();
private:
enum {
@@ -60,7 +63,11 @@
void *mCtx;
bool mFrameParallelMode; // Frame parallel is only supported by VP9 decoder.
- OMX_TICKS mTimeStamps[kNumBuffers];
+ struct PrivInfo {
+ OMX_TICKS mTimeStamp;
+ sp<ABuffer> mHdr10PlusInfo;
+ };
+ PrivInfo mPrivInfo[kNumBuffers];
uint8_t mTimeStampIdx;
vpx_image_t *mImg;
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
index 2dfba13..d0cb071 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -401,8 +401,8 @@
}
}
-OMX_ERRORTYPE SoftVPXEncoder::setConfig(
- OMX_INDEXTYPE index, const OMX_PTR _params) {
+OMX_ERRORTYPE SoftVPXEncoder::internalSetConfig(
+ OMX_INDEXTYPE index, const OMX_PTR _params, bool *frameConfig) {
switch (index) {
case OMX_IndexConfigVideoIntraVOPRefresh:
{
@@ -442,7 +442,7 @@
}
default:
- return SimpleSoftOMXComponent::setConfig(index, _params);
+ return SimpleSoftOMXComponent::internalSetConfig(index, _params, frameConfig);
}
}
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
index dd86d36..263d134 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
@@ -84,8 +84,8 @@
virtual OMX_ERRORTYPE internalSetParameter(
OMX_INDEXTYPE index, const OMX_PTR param);
- virtual OMX_ERRORTYPE setConfig(
- OMX_INDEXTYPE index, const OMX_PTR params);
+ virtual OMX_ERRORTYPE internalSetConfig(
+ OMX_INDEXTYPE index, const OMX_PTR params, bool *frameConfig);
// OMX callback when buffers available
// Note that both an input and output buffer
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 80125d4..9b2853e 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -237,6 +237,8 @@
android_native_rect_t mLastNativeWindowCrop;
int32_t mLastNativeWindowDataSpace;
HDRStaticInfo mLastHDRStaticInfo;
+ sp<ABuffer> mHdr10PlusScratchBuffer;
+ sp<ABuffer> mLastHdr10PlusBuffer;
sp<AMessage> mConfigFormat;
sp<AMessage> mInputFormat;
sp<AMessage> mOutputFormat;
@@ -290,6 +292,7 @@
OMX_INDEXTYPE mDescribeColorAspectsIndex;
OMX_INDEXTYPE mDescribeHDRStaticInfoIndex;
+ OMX_INDEXTYPE mDescribeHDR10PlusInfoIndex;
std::shared_ptr<ACodecBufferChannel> mBufferChannel;
@@ -424,6 +427,11 @@
// unspecified values.
void onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects);
+ // notifies the codec that the config with |configIndex| has changed, the value
+ // can be queried by OMX getConfig, and the config should be applied to the next
+ // output buffer notified after this callback.
+ void onConfigUpdate(OMX_INDEXTYPE configIndex);
+
// gets index or sets it to 0 on error. Returns error from codec.
status_t initDescribeHDRStaticInfoIndex();
@@ -435,12 +443,22 @@
// sets |params|. Returns the codec error.
status_t setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms);
+ // sets |hdr10PlusInfo|. Returns the codec error.
+ status_t setHdr10PlusInfo(const sp<ABuffer> &hdr10PlusInfo);
+
// gets |params|. Returns the codec error.
status_t getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms);
// gets HDR static information for the video encoder/decoder port and sets them into |format|.
status_t getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format);
+ // gets DescribeHDR10PlusInfoParams params. If |paramSizeUsed| is zero, it's
+ // possible that the returned DescribeHDR10PlusInfoParams only has the
+ // nParamSizeUsed field updated, because the size of the storage is insufficient.
+ // In this case, getHDR10PlusInfo() should be called again with |paramSizeUsed|
+ // specified to the previous returned value.
+ DescribeHDR10PlusInfoParams* getHDR10PlusInfo(size_t paramSizeUsed = 0);
+
typedef struct drcParams {
int32_t drcCut;
int32_t drcBoost;
diff --git a/media/libstagefright/include/media/stagefright/VideoFrameScheduler.h b/media/libstagefright/include/media/stagefright/VideoFrameScheduler.h
index 9d97dfd..fcfcbec 100644
--- a/media/libstagefright/include/media/stagefright/VideoFrameScheduler.h
+++ b/media/libstagefright/include/media/stagefright/VideoFrameScheduler.h
@@ -1,5 +1,5 @@
/*
- * Copyright 2014, The Android Open Source Project
+ * Copyright 2018, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,87 +17,24 @@
#ifndef VIDEO_FRAME_SCHEDULER_H_
#define VIDEO_FRAME_SCHEDULER_H_
-#include <utils/RefBase.h>
-#include <utils/Timers.h>
-
-#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/VideoFrameSchedulerBase.h>
namespace android {
class ISurfaceComposer;
-struct VideoFrameScheduler : public RefBase {
+struct VideoFrameScheduler : public VideoFrameSchedulerBase {
VideoFrameScheduler();
-
- // (re)initialize scheduler
- void init(float videoFps = -1);
- // use in case of video render-time discontinuity, e.g. seek
- void restart();
- // get adjusted nanotime for a video frame render at renderTime
- nsecs_t schedule(nsecs_t renderTime);
-
- // returns the vsync period for the main display
- nsecs_t getVsyncPeriod();
-
- // returns the current frames-per-second, or 0.f if not primed
- float getFrameRate();
-
- void release();
-
- static const size_t kHistorySize = 8;
+ void release() override;
protected:
virtual ~VideoFrameScheduler();
private:
- struct PLL {
- PLL();
-
- // reset PLL to new PLL
- void reset(float fps = -1);
- // keep current estimate, but restart phase
- void restart();
- // returns period or 0 if not yet primed
- nsecs_t addSample(nsecs_t time);
- nsecs_t getPeriod() const;
-
- private:
- nsecs_t mPeriod;
- nsecs_t mPhase;
-
- bool mPrimed; // have an estimate for the period
- size_t mSamplesUsedForPriming;
-
- nsecs_t mLastTime; // last input time
- nsecs_t mRefitAt; // next input time to fit at
-
- size_t mNumSamples; // can go past kHistorySize
- nsecs_t mTimes[kHistorySize];
-
- void test();
- // returns whether fit was successful
- bool fit(nsecs_t phase, nsecs_t period, size_t numSamples,
- int64_t *a, int64_t *b, int64_t *err);
- void prime(size_t numSamples);
- };
-
- void updateVsync();
-
- nsecs_t mVsyncTime; // vsync timing from display
- nsecs_t mVsyncPeriod;
- nsecs_t mVsyncRefreshAt; // next time to refresh timing info
-
- nsecs_t mLastVsyncTime; // estimated vsync time for last frame
- nsecs_t mTimeCorrection; // running adjustment
-
- PLL mPll; // PLL for video frame rate based on render time
-
+ void updateVsync() override;
sp<ISurfaceComposer> mComposer;
-
- DISALLOW_EVIL_CONSTRUCTORS(VideoFrameScheduler);
};
} // namespace android
#endif // VIDEO_FRAME_SCHEDULER_H_
-
diff --git a/media/libstagefright/include/media/stagefright/VideoFrameScheduler2.h b/media/libstagefright/include/media/stagefright/VideoFrameScheduler2.h
new file mode 100644
index 0000000..be911cc
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/VideoFrameScheduler2.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2018, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_FRAME_SCHEDULER_2_H_
+#define VIDEO_FRAME_SCHEDULER_2_H_
+
+#include <media/stagefright/VideoFrameSchedulerBase.h>
+
+namespace android {
+
+class VsyncTracker;
+struct ChoreographerThread;
+
+struct VideoFrameScheduler2 : public VideoFrameSchedulerBase {
+ VideoFrameScheduler2();
+ void release() override;
+
+protected:
+ virtual ~VideoFrameScheduler2();
+
+private:
+ void updateVsync() override;
+
+ long mAppVsyncOffset;
+ long mSfVsyncOffset;
+ sp<VsyncTracker> mVsyncTracker;
+ sp<ChoreographerThread> mChoreographerThread;
+ Mutex mLock;
+};
+
+} // namespace android
+
+#endif // VIDEO_FRAME_SCHEDULER_2_H_
diff --git a/media/libstagefright/include/media/stagefright/VideoFrameSchedulerBase.h b/media/libstagefright/include/media/stagefright/VideoFrameSchedulerBase.h
new file mode 100644
index 0000000..ff5f716
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/VideoFrameSchedulerBase.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright 2018, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_FRAME_SCHEDULER_BASE_H_
+#define VIDEO_FRAME_SCHEDULER_BASE_H_
+
+#include <utils/RefBase.h>
+#include <utils/Timers.h>
+
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+struct VideoFrameSchedulerBase : public RefBase {
+ VideoFrameSchedulerBase();
+
+ // (re)initialize scheduler
+ void init(float videoFps = -1);
+ // use in case of video render-time discontinuity, e.g. seek
+ void restart();
+ // get adjusted nanotime for a video frame render at renderTime
+ nsecs_t schedule(nsecs_t renderTime);
+
+ // returns the vsync period for the main display
+ nsecs_t getVsyncPeriod();
+
+ // returns the current frames-per-second, or 0.f if not primed
+ float getFrameRate();
+
+ virtual void release() = 0;
+
+ static const size_t kHistorySize = 8;
+ static const nsecs_t kNanosIn1s = 1000000000;
+ static const nsecs_t kDefaultVsyncPeriod = kNanosIn1s / 60; // 60Hz
+ static const nsecs_t kVsyncRefreshPeriod = kNanosIn1s; // 1 sec
+
+protected:
+ virtual ~VideoFrameSchedulerBase();
+
+ nsecs_t mVsyncTime; // vsync timing from display
+ nsecs_t mVsyncPeriod;
+ nsecs_t mVsyncRefreshAt; // next time to refresh timing info
+
+private:
+ struct PLL {
+ PLL();
+
+ // reset PLL to new PLL
+ void reset(float fps = -1);
+ // keep current estimate, but restart phase
+ void restart();
+ // returns period or 0 if not yet primed
+ nsecs_t addSample(nsecs_t time);
+ nsecs_t getPeriod() const;
+
+ private:
+ nsecs_t mPeriod;
+ nsecs_t mPhase;
+
+ bool mPrimed; // have an estimate for the period
+ size_t mSamplesUsedForPriming;
+
+ nsecs_t mLastTime; // last input time
+ nsecs_t mRefitAt; // next input time to fit at
+
+ size_t mNumSamples; // can go past kHistorySize
+ nsecs_t mTimes[kHistorySize];
+
+ void test();
+ // returns whether fit was successful
+ bool fit(nsecs_t phase, nsecs_t period, size_t numSamples,
+ int64_t *a, int64_t *b, int64_t *err);
+ void prime(size_t numSamples);
+ };
+
+ virtual void updateVsync() = 0;
+
+ nsecs_t mLastVsyncTime; // estimated vsync time for last frame
+ nsecs_t mTimeCorrection; // running adjustment
+ PLL mPll; // PLL for video frame rate based on render time
+
+ DISALLOW_EVIL_CONSTRUCTORS(VideoFrameSchedulerBase);
+};
+
+} // namespace android
+
+#endif // VIDEO_FRAME_SCHEDULER_BASE_H_
diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
index 55afe04..ddb459f 100644
--- a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
@@ -34,7 +34,8 @@
mLooper(new ALooper),
mHandler(new AHandlerReflector<SimpleSoftOMXComponent>(this)),
mState(OMX_StateLoaded),
- mTargetState(OMX_StateLoaded) {
+ mTargetState(OMX_StateLoaded),
+ mFrameConfig(false) {
mLooper->setName(name);
mLooper->registerHandler(mHandler);
@@ -204,6 +205,21 @@
}
}
+OMX_ERRORTYPE SimpleSoftOMXComponent::internalSetConfig(
+ OMX_INDEXTYPE index, const OMX_PTR params, bool *frameConfig) {
+ return OMX_ErrorUndefined;
+}
+
+OMX_ERRORTYPE SimpleSoftOMXComponent::setConfig(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ bool frameConfig = mFrameConfig;
+ OMX_ERRORTYPE err = internalSetConfig(index, params, &frameConfig);
+ if (err == OMX_ErrorNone) {
+ mFrameConfig = frameConfig;
+ }
+ return err;
+}
+
OMX_ERRORTYPE SimpleSoftOMXComponent::useBuffer(
OMX_BUFFERHEADERTYPE **header,
OMX_U32 portIndex,
@@ -336,6 +352,10 @@
OMX_BUFFERHEADERTYPE *buffer) {
sp<AMessage> msg = new AMessage(kWhatEmptyThisBuffer, mHandler);
msg->setPointer("header", buffer);
+ if (mFrameConfig) {
+ msg->setInt32("frame-config", mFrameConfig);
+ mFrameConfig = false;
+ }
msg->post();
return OMX_ErrorNone;
@@ -378,6 +398,10 @@
{
OMX_BUFFERHEADERTYPE *header;
CHECK(msg->findPointer("header", (void **)&header));
+ int32_t frameConfig;
+ if (!msg->findInt32("frame-config", &frameConfig)) {
+ frameConfig = 0;
+ }
CHECK(mState == OMX_StateExecuting && mTargetState == mState);
@@ -393,6 +417,7 @@
CHECK(!buffer->mOwnedByUs);
buffer->mOwnedByUs = true;
+ buffer->mFrameConfig = (bool)frameConfig;
CHECK((msgType == kWhatEmptyThisBuffer
&& port->mDef.eDir == OMX_DirInput)
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index f9f7ec2..e853da9 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -602,13 +602,40 @@
return OMX_ErrorNone;
}
+ case kDescribeHdr10PlusInfoIndex:
+ {
+ if (!supportDescribeHdr10PlusInfo()) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ if (mHdr10PlusOutputs.size() > 0) {
+ auto it = mHdr10PlusOutputs.begin();
+
+ auto info = (*it).get();
+
+ DescribeHDR10PlusInfoParams* outParams =
+ (DescribeHDR10PlusInfoParams *)params;
+
+ outParams->nParamSizeUsed = info->size();
+
+ // If the buffer provided by the client does not have enough
+ // storage, return the size only and do not remove the param yet.
+ if (outParams->nParamSize >= info->size()) {
+ memcpy(outParams->nValue, info->data(), info->size());
+ mHdr10PlusOutputs.erase(it);
+ }
+ return OMX_ErrorNone;
+ }
+ return OMX_ErrorUnderflow;
+ }
+
default:
return OMX_ErrorUnsupportedIndex;
}
}
-OMX_ERRORTYPE SoftVideoDecoderOMXComponent::setConfig(
- OMX_INDEXTYPE index, const OMX_PTR params){
+OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalSetConfig(
+ OMX_INDEXTYPE index, const OMX_PTR params, bool *frameConfig){
switch ((int)index) {
case kDescribeColorAspectsIndex:
{
@@ -658,11 +685,55 @@
return OMX_ErrorNone;
}
+ case kDescribeHdr10PlusInfoIndex:
+ {
+ if (!supportDescribeHdr10PlusInfo()) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ const DescribeHDR10PlusInfoParams* inParams =
+ (DescribeHDR10PlusInfoParams *)params;
+
+ if (*frameConfig) {
+ // This is a request to append to the current frame config set.
+ // For now, we only support kDescribeHdr10PlusInfoIndex, which
+ // we simply replace with the last set value.
+ if (mHdr10PlusInputs.size() > 0) {
+ *(--mHdr10PlusInputs.end()) = ABuffer::CreateAsCopy(
+ inParams->nValue, inParams->nParamSizeUsed);
+ } else {
+ ALOGW("Ignoring kDescribeHdr10PlusInfoIndex: append to "
+ "frame config while no frame config is present");
+ }
+ } else {
+ // This is a frame config, setting *frameConfig to true so that
+ // the client marks the next queued input frame to apply it.
+ *frameConfig = true;
+ mHdr10PlusInputs.push_back(ABuffer::CreateAsCopy(
+ inParams->nValue, inParams->nParamSizeUsed));
+ }
+ return OMX_ErrorNone;
+ }
+
default:
return OMX_ErrorUnsupportedIndex;
}
}
+sp<ABuffer> SoftVideoDecoderOMXComponent::dequeueInputFrameConfig() {
+ auto it = mHdr10PlusInputs.begin();
+ sp<ABuffer> info = *it;
+ mHdr10PlusInputs.erase(it);
+ return info;
+}
+
+void SoftVideoDecoderOMXComponent::queueOutputFrameConfig(const sp<ABuffer> &info) {
+ mHdr10PlusOutputs.push_back(info);
+ notify(OMX_EventConfigUpdate,
+ kOutputPortIndex,
+ kDescribeHdr10PlusInfoIndex,
+ NULL);
+}
OMX_ERRORTYPE SoftVideoDecoderOMXComponent::getExtensionIndex(
const char *name, OMX_INDEXTYPE *index) {
@@ -677,6 +748,10 @@
&& supportDescribeHdrStaticInfo()) {
*(int32_t*)index = kDescribeHdrStaticInfoIndex;
return OMX_ErrorNone;
+ } else if (!strcmp(name, "OMX.google.android.index.describeHDR10PlusInfo")
+ && supportDescribeHdr10PlusInfo()) {
+ *(int32_t*)index = kDescribeHdr10PlusInfoIndex;
+ return OMX_ErrorNone;
}
return SimpleSoftOMXComponent::getExtensionIndex(name, index);
@@ -694,6 +769,10 @@
return false;
}
+bool SoftVideoDecoderOMXComponent::supportDescribeHdr10PlusInfo() {
+ return false;
+}
+
void SoftVideoDecoderOMXComponent::onReset() {
mOutputPortSettingsChange = NONE;
}
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/SimpleSoftOMXComponent.h b/media/libstagefright/omx/include/media/stagefright/omx/SimpleSoftOMXComponent.h
index 1d1f2bd..6bbedda 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/SimpleSoftOMXComponent.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/SimpleSoftOMXComponent.h
@@ -20,6 +20,7 @@
#include "SoftOMXComponent.h"
+#include <atomic>
#include <media/stagefright/foundation/AHandlerReflector.h>
#include <utils/RefBase.h>
#include <utils/threads.h>
@@ -28,6 +29,7 @@
namespace android {
struct ALooper;
+struct ABuffer;
struct CodecProfileLevel {
OMX_U32 mProfile;
@@ -49,6 +51,7 @@
struct BufferInfo {
OMX_BUFFERHEADERTYPE *mHeader;
bool mOwnedByUs;
+ bool mFrameConfig;
};
struct PortInfo {
@@ -76,6 +79,9 @@
virtual OMX_ERRORTYPE internalSetParameter(
OMX_INDEXTYPE index, const OMX_PTR params);
+ virtual OMX_ERRORTYPE internalSetConfig(
+ OMX_INDEXTYPE index, const OMX_PTR params, bool *frameConfig);
+
virtual void onQueueFilled(OMX_U32 portIndex);
List<BufferInfo *> &getPortQueue(OMX_U32 portIndex);
@@ -101,6 +107,7 @@
OMX_STATETYPE mTargetState;
Vector<PortInfo> mPorts;
+ std::atomic_bool mFrameConfig;
bool isSetParameterAllowed(
OMX_INDEXTYPE index, const OMX_PTR params) const;
@@ -114,6 +121,9 @@
virtual OMX_ERRORTYPE setParameter(
OMX_INDEXTYPE index, const OMX_PTR params);
+ virtual OMX_ERRORTYPE setConfig(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
virtual OMX_ERRORTYPE useBuffer(
OMX_BUFFERHEADERTYPE **buffer,
OMX_U32 portIndex,
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/SoftVideoDecoderOMXComponent.h b/media/libstagefright/omx/include/media/stagefright/omx/SoftVideoDecoderOMXComponent.h
index 56fc691..3b381ce 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/SoftVideoDecoderOMXComponent.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/SoftVideoDecoderOMXComponent.h
@@ -20,6 +20,7 @@
#include "SimpleSoftOMXComponent.h"
+#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/AHandlerReflector.h>
#include <media/stagefright/foundation/ColorUtils.h>
#include <media/IOMX.h>
@@ -28,6 +29,7 @@
#include <utils/RefBase.h>
#include <utils/threads.h>
#include <utils/Vector.h>
+#include <utils/List.h>
namespace android {
@@ -48,6 +50,7 @@
enum {
kDescribeColorAspectsIndex = kPrepareForAdaptivePlaybackIndex + 1,
kDescribeHdrStaticInfoIndex = kPrepareForAdaptivePlaybackIndex + 2,
+ kDescribeHdr10PlusInfoIndex = kPrepareForAdaptivePlaybackIndex + 3,
};
enum {
@@ -68,8 +71,8 @@
virtual OMX_ERRORTYPE getConfig(
OMX_INDEXTYPE index, OMX_PTR params);
- virtual OMX_ERRORTYPE setConfig(
- OMX_INDEXTYPE index, const OMX_PTR params);
+ virtual OMX_ERRORTYPE internalSetConfig(
+ OMX_INDEXTYPE index, const OMX_PTR params, bool *frameConfig);
virtual OMX_ERRORTYPE getExtensionIndex(
const char *name, OMX_INDEXTYPE *index);
@@ -80,6 +83,8 @@
virtual bool supportDescribeHdrStaticInfo();
+ virtual bool supportDescribeHdr10PlusInfo();
+
// This function sets both minimum buffer count and actual buffer count of
// input port to be |numInputBuffers|. It will also set both minimum buffer
// count and actual buffer count of output port to be |numOutputBuffers|.
@@ -166,6 +171,9 @@
// Helper function to dump the ColorAspects.
void dumpColorAspects(const ColorAspects &colorAspects);
+ sp<ABuffer> dequeueInputFrameConfig();
+ void queueOutputFrameConfig(const sp<ABuffer> &info);
+
private:
uint32_t mMinInputBufferSize;
uint32_t mMinCompressionRatio;
@@ -174,6 +182,9 @@
OMX_VIDEO_CODINGTYPE mCodingType;
const CodecProfileLevel *mProfileLevels;
size_t mNumProfileLevels;
+ typedef List<sp<ABuffer> > Hdr10PlusInfoList;
+ Hdr10PlusInfoList mHdr10PlusInputs;
+ Hdr10PlusInfoList mHdr10PlusOutputs;
DISALLOW_EVIL_CONSTRUCTORS(SoftVideoDecoderOMXComponent);
};
diff --git a/packages/MediaComponents/apex/Android.bp b/packages/MediaComponents/apex/Android.bp
index e797e14..d89eb77 100644
--- a/packages/MediaComponents/apex/Android.bp
+++ b/packages/MediaComponents/apex/Android.bp
@@ -9,6 +9,8 @@
// "Refusing to generate code with unstructured parcelables."
"java/android/media/MediaDescription.aidl",
"java/android/media/MediaMetadata.aidl",
+ // TODO(insun): check why MediaParceledListSlice.aidl should be added here
+ "java/android/media/MediaParceledListSlice.aidl",
"java/android/media/Rating.aidl",
"java/android/media/browse/MediaBrowser.aidl",
"java/android/media/session/MediaSession.aidl",
diff --git a/packages/MediaComponents/apex/java/android/media/MediaParceledListSlice.aidl b/packages/MediaComponents/apex/java/android/media/MediaParceledListSlice.aidl
new file mode 100644
index 0000000..228ea9c
--- /dev/null
+++ b/packages/MediaComponents/apex/java/android/media/MediaParceledListSlice.aidl
@@ -0,0 +1,19 @@
+/* Copyright (C) 2018, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/** @hide */
+parcelable MediaParceledListSlice;
\ No newline at end of file
diff --git a/packages/MediaComponents/apex/java/android/media/MediaParceledListSlice.java b/packages/MediaComponents/apex/java/android/media/MediaParceledListSlice.java
new file mode 100644
index 0000000..ec3fdb7
--- /dev/null
+++ b/packages/MediaComponents/apex/java/android/media/MediaParceledListSlice.java
@@ -0,0 +1,202 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.UnsupportedAppUsage;
+import android.os.Binder;
+import android.os.Build;
+import android.os.IBinder;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.os.RemoteException;
+import android.util.Log;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Transfer a large list of objects across an IPC. Splits into multiple transactions if needed.
+ * Note: Only use classes declared final in order to avoid subclasses overriding reading/writing
+ * parcel logic.
+ *
+ * TODO: Add test for sending large data
+ * @hide
+ */
+public class MediaParceledListSlice<T extends Parcelable> implements Parcelable {
+ private static final String TAG = "MediaParceledListSlice";
+ private static final boolean DEBUG = false;
+
+ private static final int MAX_IPC_SIZE = 64 * 1024; // IBinder.MAX_IPC_SIZE
+
+ final List<T> mList;
+
+ public MediaParceledListSlice(List<T> list) {
+ if (list == null) {
+ throw new IllegalArgumentException("list shouldn't be null");
+ }
+ mList = list;
+ }
+
+ MediaParceledListSlice(Parcel p) {
+ final int itemCount = p.readInt();
+ mList = new ArrayList<>(itemCount);
+ if (DEBUG) {
+ Log.d(TAG, "Retrieving " + itemCount + " items");
+ }
+ if (itemCount <= 0) {
+ return;
+ }
+
+ int i = 0;
+ while (i < itemCount) {
+ if (p.readInt() == 0) {
+ break;
+ }
+
+ final T parcelable = p.readParcelable(null);
+ mList.add(parcelable);
+
+ if (DEBUG) {
+ Log.d(TAG, "Read inline #" + i + ": " + mList.get(mList.size() - 1));
+ }
+ i++;
+ }
+ if (i >= itemCount) {
+ return;
+ }
+ final IBinder retriever = p.readStrongBinder();
+ while (i < itemCount) {
+ if (DEBUG) {
+ Log.d(TAG, "Reading more @" + i + " of " + itemCount + ": retriever=" + retriever);
+ }
+ Parcel data = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+ data.writeInt(i);
+ try {
+ retriever.transact(IBinder.FIRST_CALL_TRANSACTION, data, reply, 0);
+ } catch (RemoteException e) {
+ Log.w(TAG, "Failure retrieving array; only received " + i + " of " + itemCount, e);
+ return;
+ }
+ while (i < itemCount && reply.readInt() != 0) {
+ final T parcelable = reply.readParcelable(null);
+ mList.add(parcelable);
+
+ if (DEBUG) {
+ Log.d(TAG, "Read extra #" + i + ": " + mList.get(mList.size() - 1));
+ }
+ i++;
+ }
+ reply.recycle();
+ data.recycle();
+ }
+ }
+
+ public List<T> getList() {
+ return mList;
+ }
+
+ /**
+ * Write this to another Parcel. Note that this discards the internal Parcel
+ * and should not be used anymore. This is so we can pass this to a Binder
+ * where we won't have a chance to call recycle on this.
+ */
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ final int itemCount = mList.size();
+ dest.writeInt(itemCount);
+ if (DEBUG) {
+ Log.d(TAG, "Writing " + itemCount + " items");
+ }
+ if (itemCount > 0) {
+ int i = 0;
+ while (i < itemCount && dest.dataSize() < MAX_IPC_SIZE) {
+ dest.writeInt(1);
+
+ final T parcelable = mList.get(i);
+ dest.writeParcelable(parcelable, flags);
+
+ if (DEBUG) {
+ Log.d(TAG, "Wrote inline #" + i + ": " + mList.get(i));
+ }
+ i++;
+ }
+ if (i < itemCount) {
+ dest.writeInt(0);
+ Binder retriever = new Binder() {
+ @Override
+ protected boolean onTransact(int code, Parcel data, Parcel reply, int flags)
+ throws RemoteException {
+ if (code != FIRST_CALL_TRANSACTION) {
+ return super.onTransact(code, data, reply, flags);
+ }
+ int i = data.readInt();
+ if (DEBUG) {
+ Log.d(TAG, "Writing more @" + i + " of " + itemCount);
+ }
+ while (i < itemCount && reply.dataSize() < MAX_IPC_SIZE) {
+ reply.writeInt(1);
+
+ final T parcelable = mList.get(i);
+ reply.writeParcelable(parcelable, flags);
+
+ if (DEBUG) {
+ Log.d(TAG, "Wrote extra #" + i + ": " + mList.get(i));
+ }
+ i++;
+ }
+ if (i < itemCount) {
+ if (DEBUG) {
+ Log.d(TAG, "Breaking @" + i + " of " + itemCount);
+ }
+ reply.writeInt(0);
+ }
+ return true;
+ }
+ };
+ if (DEBUG) {
+ Log.d(TAG, "Breaking @" + i + " of " + itemCount + ": retriever=" + retriever);
+ }
+ dest.writeStrongBinder(retriever);
+ }
+ }
+ }
+
+ @Override
+ public int describeContents() {
+ int contents = 0;
+ final List<T> list = getList();
+ for (int i = 0; i < list.size(); i++) {
+ contents |= list.get(i).describeContents();
+ }
+ return contents;
+ }
+
+ public static final Parcelable.Creator<MediaParceledListSlice> CREATOR =
+ new Parcelable.Creator<MediaParceledListSlice>() {
+ @Override
+ public MediaParceledListSlice createFromParcel(Parcel in) {
+ return new MediaParceledListSlice(in);
+ }
+
+ @Override
+ public MediaParceledListSlice[] newArray(int size) {
+ return new MediaParceledListSlice[size];
+ }
+ };
+}
diff --git a/packages/MediaComponents/apex/java/android/media/browse/MediaBrowser.java b/packages/MediaComponents/apex/java/android/media/browse/MediaBrowser.java
index 4e091ad..b1b14c6 100644
--- a/packages/MediaComponents/apex/java/android/media/browse/MediaBrowser.java
+++ b/packages/MediaComponents/apex/java/android/media/browse/MediaBrowser.java
@@ -23,8 +23,8 @@
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
-//import android.content.pm.ParceledListSlice;
import android.media.MediaDescription;
+import android.media.MediaParceledListSlice;
import android.media.session.MediaController;
import android.media.session.MediaSession;
import android.os.Binder;
@@ -652,10 +652,8 @@
});
}
- //TODO:(b/119750807) Resolve hidden API usage ParceledListSlice.
- /*
private final void onLoadChildren(final IMediaBrowserServiceCallbacks callback,
- final String parentId, final ParceledListSlice list, final Bundle options) {
+ final String parentId, final MediaParceledListSlice list, final Bundle options) {
mHandler.post(new Runnable() {
@Override
public void run() {
@@ -699,7 +697,6 @@
}
});
}
- */
/**
* Return true if {@code callback} is the current ServiceCallbacks. Also logs if it's not.
@@ -1109,22 +1106,19 @@
}
}
- //TODO:(b/119750807) Resolve hidden API usage ParceledListSlice.
- /*
@Override
- public void onLoadChildren(String parentId, ParceledListSlice list) {
+ public void onLoadChildren(String parentId, MediaParceledListSlice list) {
onLoadChildrenWithOptions(parentId, list, null);
}
@Override
- public void onLoadChildrenWithOptions(String parentId, ParceledListSlice list,
+ public void onLoadChildrenWithOptions(String parentId, MediaParceledListSlice list,
final Bundle options) {
MediaBrowser mediaBrowser = mMediaBrowser.get();
if (mediaBrowser != null) {
mediaBrowser.onLoadChildren(this, parentId, list, options);
}
}
- */
}
private static class Subscription {
diff --git a/packages/MediaComponents/apex/java/android/media/session/ISession.aidl b/packages/MediaComponents/apex/java/android/media/session/ISession.aidl
index 6363ed0..14b1c64 100644
--- a/packages/MediaComponents/apex/java/android/media/session/ISession.aidl
+++ b/packages/MediaComponents/apex/java/android/media/session/ISession.aidl
@@ -16,9 +16,9 @@
package android.media.session;
import android.app.PendingIntent;
-import android.content.pm.ParceledListSlice;
//import android.media.AudioAttributes;
import android.media.MediaMetadata;
+import android.media.MediaParceledListSlice;
import android.media.session.ISessionController;
import android.media.session.PlaybackState;
import android.media.session.MediaSession;
@@ -41,8 +41,7 @@
// These commands are for the TransportPerformer
void setMetadata(in MediaMetadata metadata, long duration, String metadataDescription);
void setPlaybackState(in PlaybackState state);
- //TODO(b/119750807): Resolve hidden API usage ParceledListSlice.
- //void setQueue(in ParceledListSlice queue);
+ void setQueue(in MediaParceledListSlice queue);
void setQueueTitle(CharSequence title);
void setExtras(in Bundle extras);
void setRatingType(int type);
diff --git a/packages/MediaComponents/apex/java/android/media/session/ISessionController.aidl b/packages/MediaComponents/apex/java/android/media/session/ISessionController.aidl
index 031a388..74897f7 100644
--- a/packages/MediaComponents/apex/java/android/media/session/ISessionController.aidl
+++ b/packages/MediaComponents/apex/java/android/media/session/ISessionController.aidl
@@ -17,8 +17,8 @@
import android.app.PendingIntent;
import android.content.Intent;
-//import android.content.pm.ParceledListSlice;
import android.media.MediaMetadata;
+import android.media.MediaParceledListSlice;
import android.media.Rating;
import android.media.session.ISessionControllerCallback;
import android.media.session.MediaSession;
@@ -81,8 +81,7 @@
String action, in Bundle args);
MediaMetadata getMetadata();
PlaybackState getPlaybackState();
- //TODO:(b/119750807) Resolve hidden API usage ParceledListSlice.
- //ParceledListSlice getQueue();
+ MediaParceledListSlice getQueue();
CharSequence getQueueTitle();
Bundle getExtras();
int getRatingType();
diff --git a/packages/MediaComponents/apex/java/android/media/session/ISessionControllerCallback.aidl b/packages/MediaComponents/apex/java/android/media/session/ISessionControllerCallback.aidl
index 173504b..f5cc4f6 100644
--- a/packages/MediaComponents/apex/java/android/media/session/ISessionControllerCallback.aidl
+++ b/packages/MediaComponents/apex/java/android/media/session/ISessionControllerCallback.aidl
@@ -15,8 +15,8 @@
package android.media.session;
-//import android.content.pm.ParceledListSlice;
import android.media.MediaMetadata;
+import android.media.MediaParceledListSlice;
import android.media.session.ParcelableVolumeInfo;
import android.media.session.PlaybackState;
import android.media.session.MediaSession;
@@ -32,8 +32,7 @@
// These callbacks are for the TransportController
void onPlaybackStateChanged(in PlaybackState state);
void onMetadataChanged(in MediaMetadata metadata);
- //TODO:(b/119750807) Resolve hidden API usage ParceledListSlice.
- //void onQueueChanged(in ParceledListSlice queue);
+ void onQueueChanged(in MediaParceledListSlice queue);
void onQueueTitleChanged(CharSequence title);
void onExtrasChanged(in Bundle extras);
void onVolumeInfoChanged(in ParcelableVolumeInfo info);
diff --git a/packages/MediaComponents/apex/java/android/media/session/MediaController.java b/packages/MediaComponents/apex/java/android/media/session/MediaController.java
index 60f74ab..8c3a013 100644
--- a/packages/MediaComponents/apex/java/android/media/session/MediaController.java
+++ b/packages/MediaComponents/apex/java/android/media/session/MediaController.java
@@ -21,10 +21,10 @@
import android.annotation.UnsupportedAppUsage;
import android.app.PendingIntent;
import android.content.Context;
-//import android.content.pm.ParceledListSlice;
import android.media.AudioAttributes;
import android.media.AudioManager;
import android.media.MediaMetadata;
+import android.media.MediaParceledListSlice;
import android.media.Rating;
import android.media.VolumeProvider;
import android.net.Uri;
@@ -243,17 +243,14 @@
* @return The current play queue or null.
*/
public @Nullable List<MediaSession.QueueItem> getQueue() {
- //TODO:(b/119750807) Resolve hidden API usage ParceledListSlice.
- /*
try {
- ParceledListSlice queue = mSessionBinder.getQueue();
+ MediaParceledListSlice queue = mSessionBinder.getQueue();
if (queue != null) {
return queue.getList();
}
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling getQueue.", e);
}
- */
return null;
}
@@ -1102,10 +1099,8 @@
}
}
- //TODO:(b/119750807) Resolve hidden API usage ParceledListSlice.
- /*
@Override
- public void onQueueChanged(ParceledListSlice parceledQueue) {
+ public void onQueueChanged(MediaParceledListSlice parceledQueue) {
List<MediaSession.QueueItem> queue = parceledQueue == null ? null : parceledQueue
.getList();
MediaController controller = mController.get();
@@ -1113,7 +1108,6 @@
controller.postMessage(MSG_UPDATE_QUEUE, queue, null);
}
}
- */
@Override
public void onQueueTitleChanged(CharSequence title) {
diff --git a/packages/MediaComponents/apex/java/android/media/session/MediaSession.java b/packages/MediaComponents/apex/java/android/media/session/MediaSession.java
index 4ebfb8e..943843d 100644
--- a/packages/MediaComponents/apex/java/android/media/session/MediaSession.java
+++ b/packages/MediaComponents/apex/java/android/media/session/MediaSession.java
@@ -24,10 +24,10 @@
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
-//import android.content.pm.ParceledListSlice;
import android.media.AudioAttributes;
import android.media.MediaDescription;
import android.media.MediaMetadata;
+import android.media.MediaParceledListSlice;
import android.media.Rating;
import android.media.VolumeProvider;
import android.media.session.MediaSessionManager.RemoteUserInfo;
@@ -471,14 +471,11 @@
* @param queue A list of items in the play queue.
*/
public void setQueue(@Nullable List<QueueItem> queue) {
- //TODO:(b/119750807) Resolve hidden API usage ParceledListSlice.
- /*
try {
- mBinder.setQueue(queue == null ? null : new ParceledListSlice<QueueItem>(queue));
+ mBinder.setQueue(queue == null ? null : new MediaParceledListSlice<QueueItem>(queue));
} catch (RemoteException e) {
Log.wtf("Dead object in setQueue.", e);
}
- */
}
/**
diff --git a/packages/MediaComponents/apex/java/android/service/media/IMediaBrowserServiceCallbacks.aidl b/packages/MediaComponents/apex/java/android/service/media/IMediaBrowserServiceCallbacks.aidl
index bcc2826..8dc480d 100644
--- a/packages/MediaComponents/apex/java/android/service/media/IMediaBrowserServiceCallbacks.aidl
+++ b/packages/MediaComponents/apex/java/android/service/media/IMediaBrowserServiceCallbacks.aidl
@@ -2,8 +2,8 @@
package android.service.media;
-//import android.content.pm.ParceledListSlice;
import android.graphics.Bitmap;
+import android.media.MediaParceledListSlice;
import android.media.session.MediaSession;
import android.os.Bundle;
@@ -22,7 +22,7 @@
*/
void onConnect(String root, in MediaSession.Token session, in Bundle extras);
void onConnectFailed();
- //TODO:(b/119750807) Resolve hidden API usage ParceledListSlice.
- //void onLoadChildren(String mediaId, in ParceledListSlice list);
- //void onLoadChildrenWithOptions(String mediaId, in ParceledListSlice list, in Bundle options);
+ void onLoadChildren(String mediaId, in MediaParceledListSlice list);
+ void onLoadChildrenWithOptions(String mediaId, in MediaParceledListSlice list,
+ in Bundle options);
}
diff --git a/packages/MediaComponents/apex/java/android/service/media/MediaBrowserService.java b/packages/MediaComponents/apex/java/android/service/media/MediaBrowserService.java
index fa7696e..a66ec35 100644
--- a/packages/MediaComponents/apex/java/android/service/media/MediaBrowserService.java
+++ b/packages/MediaComponents/apex/java/android/service/media/MediaBrowserService.java
@@ -25,7 +25,7 @@
import android.app.Service;
import android.content.Intent;
import android.content.pm.PackageManager;
-//import android.content.pm.ParceledListSlice;
+import android.media.MediaParceledListSlice;
import android.media.browse.MediaBrowser;
import android.media.browse.MediaBrowserUtils;
import android.media.session.MediaSession;
@@ -687,10 +687,8 @@
List<MediaBrowser.MediaItem> filteredList =
(flag & RESULT_FLAG_OPTION_NOT_HANDLED) != 0
? applyOptions(list, options) : list;
- //TODO:(b/119750807) Resolve hidden API usage ParceledListSlice.
- /*
- final ParceledListSlice<MediaBrowser.MediaItem> pls =
- filteredList == null ? null : new ParceledListSlice<>(filteredList);
+ final MediaParceledListSlice<MediaBrowser.MediaItem> pls =
+ filteredList == null ? null : new MediaParceledListSlice<>(filteredList);
try {
connection.callbacks.onLoadChildrenWithOptions(parentId, pls, options);
} catch (RemoteException ex) {
@@ -698,7 +696,6 @@
Log.w(TAG, "Calling onLoadChildren() failed for id=" + parentId
+ " package=" + connection.pkg);
}
- */
}
};
diff --git a/services/audiopolicy/Android.mk b/services/audiopolicy/Android.mk
index 02ab8ad..bfa1b5e 100644
--- a/services/audiopolicy/Android.mk
+++ b/services/audiopolicy/Android.mk
@@ -25,7 +25,11 @@
libmedia_helper \
libmediametrics \
libmediautils \
- libeffectsconfig
+ libeffectsconfig \
+ libsensorprivacy
+
+LOCAL_EXPORT_SHARED_LIBRARY_HEADERS := \
+ libsensorprivacy
LOCAL_STATIC_LIBRARIES := \
libaudiopolicycomponents
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index ee5d6ff..f233971 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -38,6 +38,7 @@
#include <media/AudioEffect.h>
#include <media/AudioParameter.h>
#include <mediautils/ServiceUtilities.h>
+#include <sensorprivacy/SensorPrivacyManager.h>
#include <system/audio.h>
#include <system/audio_policy.h>
@@ -84,6 +85,9 @@
mUidPolicy = new UidPolicy(this);
mUidPolicy->registerSelf();
+
+ mSensorPrivacyPolicy = new SensorPrivacyPolicy(this);
+ mSensorPrivacyPolicy->registerSelf();
}
AudioPolicyService::~AudioPolicyService()
@@ -99,6 +103,9 @@
mUidPolicy->unregisterSelf();
mUidPolicy.clear();
+
+ mSensorPrivacyPolicy->unregisterSelf();
+ mSensorPrivacyPolicy.clear();
}
// A notification client is always registered by AudioSystem when the client process
@@ -375,6 +382,12 @@
bool isAssistantOnTop = false;
bool isSensitiveActive = false;
+ // if Sensor Privacy is enabled then all recordings should be silenced.
+ if (mSensorPrivacyPolicy->isSensorPrivacyEnabled()) {
+ silenceAllRecordings_l();
+ return;
+ }
+
for (size_t i =0; i < mAudioRecordClients.size(); i++) {
sp<AudioRecordClient> current = mAudioRecordClients[i];
if (!current->active) continue;
@@ -445,6 +458,13 @@
}
}
+void AudioPolicyService::silenceAllRecordings_l() {
+ for (size_t i = 0; i < mAudioRecordClients.size(); i++) {
+ sp<AudioRecordClient> current = mAudioRecordClients[i];
+ setAppState_l(current->uid, APP_STATE_IDLE);
+ }
+}
+
/* static */
app_state_t AudioPolicyService::apmStatFromAmState(int amState) {
switch (amState) {
@@ -858,6 +878,31 @@
return it != mA11yUids.end();
}
+// ----------- AudioPolicyService::SensorPrivacyService implementation ----------
+void AudioPolicyService::SensorPrivacyPolicy::registerSelf() {
+ SensorPrivacyManager spm;
+ mSensorPrivacyEnabled = spm.isSensorPrivacyEnabled();
+ spm.addSensorPrivacyListener(this);
+}
+
+void AudioPolicyService::SensorPrivacyPolicy::unregisterSelf() {
+ SensorPrivacyManager spm;
+ spm.removeSensorPrivacyListener(this);
+}
+
+bool AudioPolicyService::SensorPrivacyPolicy::isSensorPrivacyEnabled() {
+ return mSensorPrivacyEnabled;
+}
+
+binder::Status AudioPolicyService::SensorPrivacyPolicy::onSensorPrivacyChanged(bool enabled) {
+ mSensorPrivacyEnabled = enabled;
+ sp<AudioPolicyService> service = mService.promote();
+ if (service != nullptr) {
+ service->updateUidStates();
+ }
+ return binder::Status::ok();
+}
+
// ----------- AudioPolicyService::AudioCommandThread implementation ----------
AudioPolicyService::AudioCommandThread::AudioCommandThread(String8 name,
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 23c3daa..45d37dc 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -33,6 +33,7 @@
#include <media/AudioPolicy.h>
#include "AudioPolicyEffects.h"
#include "managerdefault/AudioPolicyManager.h"
+#include <android/hardware/BnSensorPrivacyListener.h>
#include <unordered_map>
@@ -279,6 +280,8 @@
void updateUidStates();
void updateUidStates_l();
+ void silenceAllRecordings_l();
+
static bool isPrivacySensitive(audio_source_t source);
// If recording we need to make sure the UID is allowed to do that. If the UID is idle
@@ -334,6 +337,27 @@
std::vector<uid_t> mA11yUids;
};
+ // If sensor privacy is enabled then all apps, including those that are active, should be
+ // prevented from recording. This is handled similar to idle UIDs, any app that attempts
+ // to record while sensor privacy is enabled will receive buffers with zeros. As soon as
+ // sensor privacy is disabled active apps will receive the expected data when recording.
+ class SensorPrivacyPolicy : public hardware::BnSensorPrivacyListener {
+ public:
+ explicit SensorPrivacyPolicy(wp<AudioPolicyService> service)
+ : mService(service) {}
+
+ void registerSelf();
+ void unregisterSelf();
+
+ bool isSensorPrivacyEnabled();
+
+ binder::Status onSensorPrivacyChanged(bool enabled);
+
+ private:
+ wp<AudioPolicyService> mService;
+ std::atomic_bool mSensorPrivacyEnabled;
+ };
+
// Thread used to send audio config commands to audio flinger
// For audio config commands, it is necessary because audio flinger requires that the calling
// process (user) has permission to modify audio settings.
@@ -718,6 +742,8 @@
audio_mode_t mPhoneState;
sp<UidPolicy> mUidPolicy;
+ sp<SensorPrivacyPolicy> mSensorPrivacyPolicy;
+
DefaultKeyedVector< audio_port_handle_t, sp<AudioRecordClient> > mAudioRecordClients;
DefaultKeyedVector< audio_port_handle_t, sp<AudioPlaybackClient> > mAudioPlaybackClients;
};
diff --git a/services/mediacodec/main_swcodecservice.cpp b/services/mediacodec/main_swcodecservice.cpp
index 386abb2..79fea25 100644
--- a/services/mediacodec/main_swcodecservice.cpp
+++ b/services/mediacodec/main_swcodecservice.cpp
@@ -37,6 +37,12 @@
static const char kVendorSeccompPolicyPath[] =
"/vendor/etc/seccomp_policy/mediacodec.policy";
+// Disable Scudo's mismatch allocation check, as it is being triggered
+// by some third party code.
+extern "C" const char *__scudo_default_options() {
+ return "DeallocationTypeMismatch=false";
+}
+
int main(int argc __unused, char** /*argv*/)
{
LOG(INFO) << "media swcodec service starting";