Capture HDR thumbnail from surface control
bug: 113609172
bug: 135717526
test:
- push hdr clips to /sdcard/ and reboot to let it scan,
check in Files and Photos app that the generated thumbnails
show color in hdr.
- Run local testing that use MetadataRetriever to extract
thumbnail of HDR clips, check output bitmap is HDR
Change-Id: I5814e05fd723d23994c2d6961021a1f911db7056
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 7c620a0..c6ec6de 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -21,6 +21,7 @@
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
#include <inttypes.h>
#include <media/ICrypto.h>
#include <media/IMediaSource.h>
@@ -28,6 +29,7 @@
#include <media/stagefright/foundation/avc_utils.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ColorUtils.h>
#include <media/stagefright/ColorConverter.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaCodec.h>
@@ -44,7 +46,7 @@
sp<IMemory> allocVideoFrame(const sp<MetaData>& trackMeta,
int32_t width, int32_t height, int32_t tileWidth, int32_t tileHeight,
- int32_t dstBpp, bool metaOnly = false) {
+ int32_t dstBpp, bool allocRotated, bool metaOnly) {
int32_t rotationAngle;
if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
rotationAngle = 0; // By default, no rotation
@@ -74,6 +76,14 @@
displayHeight = height;
}
+ if (allocRotated && (rotationAngle == 90 || rotationAngle == 270)) {
+ int32_t tmp;
+ tmp = width; width = height; height = tmp;
+ tmp = displayWidth; displayWidth = displayHeight; displayHeight = tmp;
+ tmp = tileWidth; tileWidth = tileHeight; tileHeight = tmp;
+ rotationAngle = 0;
+ }
+
VideoFrame frame(width, height, displayWidth, displayHeight,
tileWidth, tileHeight, rotationAngle, dstBpp, !metaOnly, iccSize);
@@ -94,6 +104,20 @@
return frameMem;
}
+sp<IMemory> allocVideoFrame(const sp<MetaData>& trackMeta,
+ int32_t width, int32_t height, int32_t tileWidth, int32_t tileHeight,
+ int32_t dstBpp, bool allocRotated = false) {
+ return allocVideoFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp,
+ allocRotated, false /*metaOnly*/);
+}
+
+sp<IMemory> allocMetaFrame(const sp<MetaData>& trackMeta,
+ int32_t width, int32_t height, int32_t tileWidth, int32_t tileHeight,
+ int32_t dstBpp) {
+ return allocVideoFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp,
+ false /*allocRotated*/, true /*metaOnly*/);
+}
+
bool findThumbnailInfo(
const sp<MetaData> &trackMeta, int32_t *width, int32_t *height,
uint32_t *type = NULL, const void **data = NULL, size_t *size = NULL) {
@@ -117,23 +141,27 @@
bool getDstColorFormat(
android_pixel_format_t colorFormat,
OMX_COLOR_FORMATTYPE *dstFormat,
+ ui::PixelFormat *captureFormat,
int32_t *dstBpp) {
switch (colorFormat) {
case HAL_PIXEL_FORMAT_RGB_565:
{
*dstFormat = OMX_COLOR_Format16bitRGB565;
+ *captureFormat = ui::PixelFormat::RGB_565;
*dstBpp = 2;
return true;
}
case HAL_PIXEL_FORMAT_RGBA_8888:
{
*dstFormat = OMX_COLOR_Format32BitRGBA8888;
+ *captureFormat = ui::PixelFormat::RGBA_8888;
*dstBpp = 4;
return true;
}
case HAL_PIXEL_FORMAT_BGRA_8888:
{
*dstFormat = OMX_COLOR_Format32bitBGRA8888;
+ *captureFormat = ui::PixelFormat::BGRA_8888;
*dstBpp = 4;
return true;
}
@@ -150,9 +178,10 @@
sp<IMemory> FrameDecoder::getMetadataOnly(
const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail) {
OMX_COLOR_FORMATTYPE dstFormat;
+ ui::PixelFormat captureFormat;
int32_t dstBpp;
- if (!getDstColorFormat(
- (android_pixel_format_t)colorFormat, &dstFormat, &dstBpp)) {
+ if (!getDstColorFormat((android_pixel_format_t)colorFormat,
+ &dstFormat, &captureFormat, &dstBpp)) {
return NULL;
}
@@ -170,8 +199,7 @@
tileWidth = tileHeight = 0;
}
}
- return allocVideoFrame(trackMeta,
- width, height, tileWidth, tileHeight, dstBpp, true /*metaOnly*/);
+ return allocMetaFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp);
}
FrameDecoder::FrameDecoder(
@@ -194,15 +222,30 @@
}
}
+bool isHDR(const sp<AMessage> &format) {
+ uint32_t standard, range, transfer;
+ if (!format->findInt32("color-standard", (int32_t*)&standard)) {
+ standard = 0;
+ }
+ if (!format->findInt32("color-range", (int32_t*)&range)) {
+ range = 0;
+ }
+ if (!format->findInt32("color-transfer", (int32_t*)&transfer)) {
+ transfer = 0;
+ }
+ return standard == ColorUtils::kColorStandardBT2020 &&
+ transfer == ColorUtils::kColorTransferST2084;
+}
+
status_t FrameDecoder::init(
int64_t frameTimeUs, int option, int colorFormat) {
- if (!getDstColorFormat(
- (android_pixel_format_t)colorFormat, &mDstFormat, &mDstBpp)) {
+ if (!getDstColorFormat((android_pixel_format_t)colorFormat,
+ &mDstFormat, &mCaptureFormat, &mDstBpp)) {
return ERROR_UNSUPPORTED;
}
sp<AMessage> videoFormat = onGetFormatAndSeekOptions(
- frameTimeUs, option, &mReadOptions);
+ frameTimeUs, option, &mReadOptions, &mSurface);
if (videoFormat == NULL) {
ALOGE("video format or seek mode not supported");
return ERROR_UNSUPPORTED;
@@ -219,7 +262,7 @@
}
err = decoder->configure(
- videoFormat, NULL /* surface */, NULL /* crypto */, 0 /* flags */);
+ videoFormat, mSurface, NULL /* crypto */, 0 /* flags */);
if (err != OK) {
ALOGW("configure returned error %d (%s)", err, asString(err));
decoder->release();
@@ -367,8 +410,13 @@
ALOGE("failed to get output buffer %zu", index);
break;
}
- err = onOutputReceived(videoFrameBuffer, mOutputFormat, ptsUs, &done);
- mDecoder->releaseOutputBuffer(index);
+ if (mSurface != nullptr) {
+ mDecoder->renderOutputBufferAndRelease(index);
+ err = onOutputReceived(videoFrameBuffer, mOutputFormat, ptsUs, &done);
+ } else {
+ err = onOutputReceived(videoFrameBuffer, mOutputFormat, ptsUs, &done);
+ mDecoder->releaseOutputBuffer(index);
+ }
} else {
ALOGW("Received error %d (%s) instead of output", err, asString(err));
done = true;
@@ -399,7 +447,9 @@
}
sp<AMessage> VideoFrameDecoder::onGetFormatAndSeekOptions(
- int64_t frameTimeUs, int seekMode, MediaSource::ReadOptions *options) {
+ int64_t frameTimeUs, int seekMode,
+ MediaSource::ReadOptions *options,
+ sp<Surface> *window) {
mSeekMode = static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);
if (mSeekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
mSeekMode > MediaSource::ReadOptions::SEEK_FRAME_INDEX) {
@@ -446,6 +496,16 @@
videoFormat->setInt32("android._num-input-buffers", 1);
videoFormat->setInt32("android._num-output-buffers", 1);
}
+
+ if (isHDR(videoFormat)) {
+ *window = initSurfaceControl();
+ if (*window == NULL) {
+ ALOGE("Failed to init surface control for HDR, fallback to non-hdr");
+ } else {
+ videoFormat->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
+ }
+ }
+
return videoFormat;
}
@@ -490,13 +550,22 @@
int32_t width, height, stride, srcFormat;
if (!outputFormat->findInt32("width", &width) ||
!outputFormat->findInt32("height", &height) ||
- !outputFormat->findInt32("stride", &stride) ||
!outputFormat->findInt32("color-format", &srcFormat)) {
ALOGE("format missing dimension or color: %s",
outputFormat->debugString().c_str());
return ERROR_MALFORMED;
}
+ if (!outputFormat->findInt32("stride", &stride)) {
+ if (mSurfaceControl == NULL) {
+ ALOGE("format must have stride for byte buffer mode: %s",
+ outputFormat->debugString().c_str());
+ return ERROR_MALFORMED;
+ }
+ // for surface output, set stride to width, we don't actually need it.
+ stride = width;
+ }
+
int32_t crop_left, crop_top, crop_right, crop_bottom;
if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
crop_left = crop_top = 0;
@@ -511,12 +580,17 @@
(crop_bottom - crop_top + 1),
0,
0,
- dstBpp());
+ dstBpp(),
+ mSurfaceControl != nullptr /*allocRotated*/);
mFrame = static_cast<VideoFrame*>(frameMem->pointer());
setFrame(frameMem);
}
+ if (mSurfaceControl != nullptr) {
+ return captureSurfaceControl();
+ }
+
ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
uint32_t standard, range, transfer;
@@ -547,6 +621,101 @@
return ERROR_UNSUPPORTED;
}
+sp<Surface> VideoFrameDecoder::initSurfaceControl() {
+ sp<SurfaceComposerClient> client = new SurfaceComposerClient();
+ if (client->initCheck() != NO_ERROR) {
+ ALOGE("failed to get SurfaceComposerClient");
+ return NULL;
+ }
+
+ // create a container layer to hold the capture layer, so that we can
+ // use full frame drop. If without the container, the crop will be set
+ // to display size.
+ sp<SurfaceControl> parent = client->createSurface(
+ String8("parent"),
+ 0 /* width */, 0 /* height */,
+ PIXEL_FORMAT_RGBA_8888,
+ ISurfaceComposerClient::eFXSurfaceContainer );
+
+ if (!parent) {
+ ALOGE("failed to get surface control parent");
+ return NULL;
+ }
+
+ // create the surface with unknown size 1x1 for now, real size will
+ // be set before the capture when we have output format info.
+ sp<SurfaceControl> surfaceControl = client->createSurface(
+ String8("thumbnail"),
+ 1 /* width */, 1 /* height */,
+ PIXEL_FORMAT_RGBA_8888,
+ ISurfaceComposerClient::eFXSurfaceBufferQueue,
+ parent.get());
+
+ if (!surfaceControl) {
+ ALOGE("failed to get surface control");
+ return NULL;
+ }
+
+ SurfaceComposerClient::Transaction t;
+ t.hide(parent)
+ .show(surfaceControl)
+ .apply(true);
+
+ mSurfaceControl = surfaceControl;
+ mParent = parent;
+
+ return surfaceControl->getSurface();
+}
+
+status_t VideoFrameDecoder::captureSurfaceControl() {
+ // set the layer size to the output size before the capture
+ SurfaceComposerClient::Transaction()
+ .setSize(mSurfaceControl, mFrame->mWidth, mFrame->mHeight)
+ .apply(true);
+
+ sp<GraphicBuffer> outBuffer;
+ status_t err = ScreenshotClient::captureChildLayers(
+ mParent->getHandle(),
+ ui::Dataspace::V0_SRGB,
+ captureFormat(),
+ Rect(0, 0, mFrame->mWidth, mFrame->mHeight),
+ {},
+ 1.0f /*frameScale*/,
+ &outBuffer);
+
+ if (err != OK) {
+ ALOGE("failed to captureLayers: err %d", err);
+ return err;
+ }
+
+ ALOGV("capture: %dx%d, format %d, stride %d",
+ outBuffer->getWidth(),
+ outBuffer->getHeight(),
+ outBuffer->getPixelFormat(),
+ outBuffer->getStride());
+
+ uint8_t *base;
+ int32_t outBytesPerPixel, outBytesPerStride;
+ err = outBuffer->lock(
+ GraphicBuffer::USAGE_SW_READ_OFTEN,
+ reinterpret_cast<void**>(&base),
+ &outBytesPerPixel,
+ &outBytesPerStride);
+ if (err != OK) {
+ ALOGE("failed to lock graphic buffer: err %d", err);
+ return err;
+ }
+
+ uint8_t *dst = mFrame->getFlattenedData();
+ for (size_t y = 0 ; y < fmin(mFrame->mHeight, outBuffer->getHeight()) ; y++) {
+ memcpy(dst, base, fmin(mFrame->mWidth, outBuffer->getWidth()) * mFrame->mBytesPerPixel);
+ dst += mFrame->mRowBytes;
+ base += outBuffer->getStride() * mFrame->mBytesPerPixel;
+ }
+ outBuffer->unlock();
+ return OK;
+}
+
////////////////////////////////////////////////////////////////////////
ImageDecoder::ImageDecoder(
@@ -566,7 +735,8 @@
}
sp<AMessage> ImageDecoder::onGetFormatAndSeekOptions(
- int64_t frameTimeUs, int /*seekMode*/, MediaSource::ReadOptions *options) {
+ int64_t frameTimeUs, int /*seekMode*/,
+ MediaSource::ReadOptions *options, sp<Surface> * /*window*/) {
sp<MetaData> overrideMeta;
if (frameTimeUs < 0) {
uint32_t type;
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
index ce37ae8..1af6276 100644
--- a/media/libstagefright/include/FrameDecoder.h
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -24,15 +24,17 @@
#include <media/stagefright/foundation/ABase.h>
#include <media/MediaSource.h>
#include <media/openmax/OMX_Video.h>
-#include <system/graphics-base.h>
+#include <ui/GraphicTypes.h>
namespace android {
struct AMessage;
-class MediaCodecBuffer;
-class IMediaSource;
-class VideoFrame;
struct MediaCodec;
+class IMediaSource;
+class MediaCodecBuffer;
+class Surface;
+class SurfaceControl;
+class VideoFrame;
struct FrameRect {
int32_t left, top, right, bottom;
@@ -57,7 +59,8 @@
virtual sp<AMessage> onGetFormatAndSeekOptions(
int64_t frameTimeUs,
int seekMode,
- MediaSource::ReadOptions *options) = 0;
+ MediaSource::ReadOptions *options,
+ sp<Surface> *window) = 0;
virtual status_t onExtractRect(FrameRect *rect) = 0;
@@ -75,6 +78,7 @@
sp<MetaData> trackMeta() const { return mTrackMeta; }
OMX_COLOR_FORMATTYPE dstFormat() const { return mDstFormat; }
+ ui::PixelFormat captureFormat() const { return mCaptureFormat; }
int32_t dstBpp() const { return mDstBpp; }
void setFrame(const sp<IMemory> &frameMem) { mFrameMemory = frameMem; }
@@ -83,6 +87,7 @@
sp<MetaData> mTrackMeta;
sp<IMediaSource> mSource;
OMX_COLOR_FORMATTYPE mDstFormat;
+ ui::PixelFormat mCaptureFormat;
int32_t mDstBpp;
sp<IMemory> mFrameMemory;
MediaSource::ReadOptions mReadOptions;
@@ -90,6 +95,7 @@
sp<AMessage> mOutputFormat;
bool mHaveMoreInputs;
bool mFirstSample;
+ sp<Surface> mSurface;
status_t extractInternal();
@@ -106,7 +112,8 @@
virtual sp<AMessage> onGetFormatAndSeekOptions(
int64_t frameTimeUs,
int seekMode,
- MediaSource::ReadOptions *options) override;
+ MediaSource::ReadOptions *options,
+ sp<Surface> *window) override;
virtual status_t onExtractRect(FrameRect *rect) override {
// Rect extraction for sequences is not supported for now.
@@ -126,10 +133,15 @@
bool *done) override;
private:
+ sp<SurfaceControl> mSurfaceControl;
+ sp<SurfaceControl> mParent;
VideoFrame *mFrame;
bool mIsAvcOrHevc;
MediaSource::ReadOptions::SeekMode mSeekMode;
int64_t mTargetTimeUs;
+
+ sp<Surface> initSurfaceControl();
+ status_t captureSurfaceControl();
};
struct ImageDecoder : public FrameDecoder {
@@ -142,7 +154,8 @@
virtual sp<AMessage> onGetFormatAndSeekOptions(
int64_t frameTimeUs,
int seekMode,
- MediaSource::ReadOptions *options) override;
+ MediaSource::ReadOptions *options,
+ sp<Surface> *window) override;
virtual status_t onExtractRect(FrameRect *rect) override;