Merge "DO NOT MERGE SoftAvcEnc:Configured constrained_intra_pred flag in internalSetParameter" into nyc-dev
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index bd633f7..2365323 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -242,8 +242,10 @@
IOMX::node_id mNode;
sp<MemoryDealer> mDealer[2];
+ bool mUsingNativeWindow;
sp<ANativeWindow> mNativeWindow;
int mNativeWindowUsageBits;
+ sp<AMessage> mConfigFormat;
sp<AMessage> mInputFormat;
sp<AMessage> mOutputFormat;
sp<AMessage> mBaseOutputFormat;
@@ -343,21 +345,72 @@
status_t setSupportedOutputFormat(bool getLegacyFlexibleFormat);
status_t setupVideoDecoder(
- const char *mime, const sp<AMessage> &msg, bool usingNativeBuffers,
+ const char *mime, const sp<AMessage> &msg, bool usingNativeBuffers, bool haveSwRenderer,
sp<AMessage> &outputformat);
status_t setupVideoEncoder(
- const char *mime, const sp<AMessage> &msg, sp<AMessage> &outputformat);
+ const char *mime, const sp<AMessage> &msg,
+ sp<AMessage> &outputformat, sp<AMessage> &inputformat);
status_t setVideoFormatOnPort(
OMX_U32 portIndex,
int32_t width, int32_t height,
OMX_VIDEO_CODINGTYPE compressionFormat, float frameRate = -1.0);
- status_t setColorAspects(
- OMX_U32 portIndex, int32_t width, int32_t height, const sp<AMessage> &msg,
- sp<AMessage> &format);
- status_t getColorAspects(OMX_U32 portIndex, sp<AMessage> &format);
+ // gets index or sets it to 0 on error. Returns error from codec.
+ status_t initDescribeColorAspectsIndex();
+
+ // sets |params|. If |readBack| is true, it re-gets them afterwards if set succeeded.
+ // returns the codec error.
+ status_t setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool readBack = false);
+
+ // gets |params|; returns the codec error. |param| should not change on error.
+ status_t getCodecColorAspects(DescribeColorAspectsParams ¶ms);
+
+ // gets dataspace guidance from codec and platform. |params| should be set up with the color
+ // aspects to use. If |tryCodec| is true, the codec is queried first. If it succeeds, we
+ // return OK. Otherwise, we fall back to the platform guidance and return the codec error;
+ // though, we return OK if the codec failed with UNSUPPORTED, as codec guidance is optional.
+ status_t getDataSpace(
+ DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */,
+ bool tryCodec);
+
+ // sets color aspects for the encoder for certain |width/height| based on |configFormat|, and
+ // set resulting color config into |outputFormat|. If |usingNativeWindow| is true, we use
+ // video defaults if config is unspecified. Returns error from the codec.
+ status_t setColorAspectsForVideoDecoder(
+ int32_t width, int32_t height, bool usingNativeWindow,
+ const sp<AMessage> &configFormat, sp<AMessage> &outputFormat);
+
+ // gets color aspects for the encoder for certain |width/height| based on |configFormat|, and
+ // set resulting color config into |outputFormat|. If |dataSpace| is non-null, it requests
+ // dataspace guidance from the codec and platform and sets it into |dataSpace|. Returns the
+ // error from the codec.
+ status_t getColorAspectsAndDataSpaceForVideoDecoder(
+ int32_t width, int32_t height, const sp<AMessage> &configFormat,
+ sp<AMessage> &outputFormat, android_dataspace *dataSpace);
+
+ // sets color aspects for the video encoder assuming bytebuffer mode for certain |configFormat|
+ // and sets resulting color config into |outputFormat|. For mediarecorder, also set dataspace
+ // into |inputFormat|. Returns the error from the codec.
+ status_t setColorAspectsForVideoEncoder(
+ const sp<AMessage> &configFormat,
+ sp<AMessage> &outputFormat, sp<AMessage> &inputFormat);
+
+ // sets color aspects for the video encoder in surface mode. This basically sets the default
+ // video values for unspecified aspects and sets the dataspace to use in the input format.
+ // Also sets the dataspace into |dataSpace|.
+ // Returns any codec errors during this configuration, except for optional steps.
+ status_t setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(
+ android_dataspace *dataSpace /* nonnull */);
+
+ // gets color aspects for the video encoder input port and sets them into the |format|.
+ // Returns any codec errors.
+ status_t getInputColorAspectsForVideoEncoder(sp<AMessage> &format);
+
+ // updates the encoder output format with |aspects| defaulting to |dataSpace| for
+ // unspecified values.
+ void onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects);
typedef struct drcParams {
int32_t drcCut;
diff --git a/include/media/stagefright/MediaCodecSource.h b/include/media/stagefright/MediaCodecSource.h
index e3f3f5e..5f10487 100644
--- a/include/media/stagefright/MediaCodecSource.h
+++ b/include/media/stagefright/MediaCodecSource.h
@@ -115,8 +115,8 @@
bool mStopping;
bool mDoMoreWorkPending;
bool mSetEncoderFormat;
- int mEncoderFormat;
- int mEncoderDataSpace;
+ int32_t mEncoderFormat;
+ int32_t mEncoderDataSpace;
sp<AMessage> mEncoderActivityNotify;
sp<IGraphicBufferProducer> mGraphicBufferProducer;
sp<IGraphicBufferConsumer> mGraphicBufferConsumer;
diff --git a/include/media/stagefright/foundation/ColorUtils.h b/include/media/stagefright/foundation/ColorUtils.h
index b95c80b..c4971bf 100644
--- a/include/media/stagefright/foundation/ColorUtils.h
+++ b/include/media/stagefright/foundation/ColorUtils.h
@@ -129,11 +129,37 @@
static status_t convertCodecColorAspectsToPlatformAspects(
const ColorAspects &aspects, int32_t *range, int32_t *standard, int32_t *transfer);
- // updates unspecified range, standard and transfer values to their defaults
- static void setDefaultPlatformColorAspectsIfNeeded(
- int32_t &range, int32_t &standard, int32_t &transfer, int32_t width, int32_t height);
+ // updates Unspecified color aspects to their defaults based on the video size
static void setDefaultCodecColorAspectsIfNeeded(
ColorAspects &aspects, int32_t width, int32_t height);
+
+ // it returns the closest dataSpace for given color |aspects|. if |mayExpand| is true, it allows
+ // returning a larger dataSpace that contains the color space given by |aspects|, and is better
+ // suited to blending. This requires implicit color space conversion on part of the device.
+ static android_dataspace getDataSpaceForColorAspects(ColorAspects &aspects, bool mayExpand);
+
+ // converts |dataSpace| to a V0 enum, and returns true if dataSpace is an aspect-only value
+ static bool convertDataSpaceToV0(android_dataspace &dataSpace);
+
+ // compares |aspect| to |orig|. Returns |true| if any aspects have changed, except if they
+ // changed to Unspecified value. It also sets the changed values to Unspecified in |aspect|.
+ static bool checkIfAspectsChangedAndUnspecifyThem(
+ ColorAspects &aspects, const ColorAspects &orig, bool usePlatformAspects = false);
+
+ // finds color config in format, defaulting them to 0.
+ static void getColorConfigFromFormat(
+ const sp<AMessage> &format, int *range, int *standard, int *transfer);
+
+ // copies existing color config from |source| to |target|.
+ static void copyColorConfig(const sp<AMessage> &source, sp<AMessage> &target);
+
+ // finds color config in format as ColorAspects, defaulting them to 0.
+ static void getColorAspectsFromFormat(const sp<AMessage> &format, ColorAspects &aspects);
+
+ // writes |aspects| into format. iff |force| is false, Unspecified values are not
+ // written.
+ static void setColorAspectsIntoFormat(
+ const ColorAspects &aspects, sp<AMessage> &format, bool force = false);
};
inline static const char *asString(android::ColorUtils::ColorStandard i, const char *def = "??") {
diff --git a/media/libmedia/IMediaSource.cpp b/media/libmedia/IMediaSource.cpp
index 1770fb8..8376c0a 100644
--- a/media/libmedia/IMediaSource.cpp
+++ b/media/libmedia/IMediaSource.cpp
@@ -18,8 +18,6 @@
#define LOG_TAG "BpMediaSource"
#include <utils/Log.h>
-#include <utils/CallStack.h>
-
#include <inttypes.h>
#include <stdint.h>
#include <sys/types.h>
@@ -111,16 +109,9 @@
BpMediaSource(const sp<IBinder>& impl)
: BpInterface<IMediaSource>(impl)
{
- mStarted = false;
}
virtual status_t start(MetaData *params) {
- if (mStarted) {
- ALOGD("Source was started previously from:");
- mStartStack.log(LOG_TAG);
- ALOGD("Now from:");
- CallStack stack(LOG_TAG);
- }
ALOGV("start");
Parcel data, reply;
data.writeInterfaceToken(BpMediaSource::getInterfaceDescriptor());
@@ -128,10 +119,6 @@
params->writeToParcel(data);
}
status_t ret = remote()->transact(START, data, &reply);
- if (ret == NO_ERROR) {
- mStarted = true;
- mStartStack.update();
- }
if (ret == NO_ERROR && params) {
ALOGW("ignoring potentially modified MetaData from start");
ALOGW("input:");
@@ -144,7 +131,6 @@
}
virtual status_t stop() {
- mStarted = false;
ALOGV("stop");
Parcel data, reply;
data.writeInterfaceToken(BpMediaSource::getInterfaceDescriptor());
@@ -219,8 +205,7 @@
// NuPlayer passes pointers-to-metadata around, so we use this to keep the metadata alive
// XXX: could we use this for caching, or does metadata change on the fly?
sp<MetaData> mMetaData;
- bool mStarted;
- CallStack mStartStack;
+
};
IMPLEMENT_META_INTERFACE(MediaSource, "android.media.IMediaSource");
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 26362ec..1b248db 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -1571,6 +1571,9 @@
if (cameraSource == NULL) {
flags |= MediaCodecSource::FLAG_USE_SURFACE_INPUT;
+ } else {
+ // require dataspace setup even if not using surface input
+ format->setInt32("android._using-recorder", 1);
}
sp<MediaCodecSource> encoder = MediaCodecSource::Create(
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 9eab66f..d7c8faa 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -488,6 +488,7 @@
ACodec::ACodec()
: mQuirks(0),
mNode(0),
+ mUsingNativeWindow(false),
mNativeWindowUsageBits(0),
mIsVideo(false),
mIsEncoder(false),
@@ -1647,6 +1648,7 @@
sp<AMessage> inputFormat = new AMessage;
sp<AMessage> outputFormat = new AMessage;
+ mConfigFormat = msg;
mIsEncoder = encoder;
@@ -1764,6 +1766,7 @@
sp<RefBase> obj;
bool haveNativeWindow = msg->findObject("native-window", &obj)
&& obj != NULL && video && !encoder;
+ mUsingNativeWindow = haveNativeWindow;
mLegacyAdaptiveExperiment = false;
if (video && !encoder) {
inputFormat->setInt32("adaptive-playback", false);
@@ -1940,9 +1943,9 @@
}
if (encoder) {
- err = setupVideoEncoder(mime, msg, outputFormat);
+ err = setupVideoEncoder(mime, msg, outputFormat, inputFormat);
} else {
- err = setupVideoDecoder(mime, msg, haveNativeWindow, outputFormat);
+ err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat);
}
if (err != OK) {
@@ -2003,7 +2006,8 @@
// fallback is not supported for protected playback
err = PERMISSION_DENIED;
} else if (err == OK) {
- err = setupVideoDecoder(mime, msg, false, outputFormat);
+ err = setupVideoDecoder(
+ mime, msg, haveNativeWindow, usingSwRenderer, outputFormat);
}
}
}
@@ -3013,7 +3017,7 @@
status_t ACodec::setupVideoDecoder(
const char *mime, const sp<AMessage> &msg, bool haveNativeWindow,
- sp<AMessage> &outputFormat) {
+ bool usingSwRenderer, sp<AMessage> &outputFormat) {
int32_t width, height;
if (!msg->findInt32("width", &width)
|| !msg->findInt32("height", &height)) {
@@ -3076,113 +3080,262 @@
return err;
}
- err = setColorAspects(
- kPortIndexOutput, width, height, msg, outputFormat);
- if (err != OK) {
- ALOGI("Falling back to presets as component does not describe color aspects.");
+ err = setColorAspectsForVideoDecoder(
+ width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat);
+ if (err == ERROR_UNSUPPORTED) { // support is optional
err = OK;
}
+ return err;
+}
+
+status_t ACodec::initDescribeColorAspectsIndex() {
+ status_t err = mOMX->getExtensionIndex(
+ mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex);
+ if (err != OK) {
+ mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0;
+ }
+ return err;
+}
+
+status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) {
+ status_t err = ERROR_UNSUPPORTED;
+ if (mDescribeColorAspectsIndex) {
+ err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params));
+ }
+ ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)",
+ mComponentName.c_str(),
+ params.sAspects.mRange, asString(params.sAspects.mRange),
+ params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries),
+ params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs),
+ params.sAspects.mTransfer, asString(params.sAspects.mTransfer),
+ err, asString(err));
+
+ if (verify && err == OK) {
+ err = getCodecColorAspects(params);
+ }
+
+ ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex,
+ "[%s] getting color aspects failed even though codec advertises support",
+ mComponentName.c_str());
+ return err;
+}
+
+status_t ACodec::setColorAspectsForVideoDecoder(
+ int32_t width, int32_t height, bool usingNativeWindow,
+ const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) {
+ DescribeColorAspectsParams params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = kPortIndexOutput;
+
+ getColorAspectsFromFormat(configFormat, params.sAspects);
+ if (usingNativeWindow) {
+ setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height);
+ // The default aspects will be set back to the output format during the
+ // getFormat phase of configure(). Set non-Unspecified values back into the
+ // format, in case component does not support this enumeration.
+ setColorAspectsIntoFormat(params.sAspects, outputFormat);
+ }
+
+ (void)initDescribeColorAspectsIndex();
+
+ // communicate color aspects to codec
+ return setCodecColorAspects(params);
+}
+
+status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) {
+ status_t err = ERROR_UNSUPPORTED;
+ if (mDescribeColorAspectsIndex) {
+ err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params));
+ }
+ ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)",
+ mComponentName.c_str(),
+ params.sAspects.mRange, asString(params.sAspects.mRange),
+ params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries),
+ params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs),
+ params.sAspects.mTransfer, asString(params.sAspects.mTransfer),
+ err, asString(err));
+ if (params.bRequestingDataSpace) {
+ ALOGV("for dataspace %#x", params.nDataSpace);
+ }
+ if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex
+ && !params.bRequestingDataSpace && !params.bDataSpaceChanged) {
+ ALOGW("[%s] getting color aspects failed even though codec advertises support",
+ mComponentName.c_str());
+ }
+ return err;
+}
+
+status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) {
+ DescribeColorAspectsParams params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = kPortIndexInput;
+ status_t err = getCodecColorAspects(params);
+ if (err == OK) {
+ // we only set encoder input aspects if codec supports them
+ setColorAspectsIntoFormat(params.sAspects, format, true /* force */);
+ }
+ return err;
+}
+
+status_t ACodec::getDataSpace(
+ DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */,
+ bool tryCodec) {
+ status_t err = OK;
+ if (tryCodec) {
+ // request dataspace guidance from codec.
+ params.bRequestingDataSpace = OMX_TRUE;
+ err = getCodecColorAspects(params);
+ params.bRequestingDataSpace = OMX_FALSE;
+ if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) {
+ *dataSpace = (android_dataspace)params.nDataSpace;
+ return err;
+ } else if (err == ERROR_UNSUPPORTED) {
+ // ignore not-implemented error for dataspace requests
+ err = OK;
+ }
+ }
+
+ // this returns legacy versions if available
+ *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */);
+ ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) "
+ "and dataspace %#x",
+ mComponentName.c_str(),
+ params.sAspects.mRange, asString(params.sAspects.mRange),
+ params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries),
+ params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs),
+ params.sAspects.mTransfer, asString(params.sAspects.mTransfer),
+ *dataSpace);
+ return err;
+}
+
+
+status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder(
+ int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat,
+ android_dataspace *dataSpace) {
+ DescribeColorAspectsParams params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = kPortIndexOutput;
+
+ // reset default format and get resulting format
+ getColorAspectsFromFormat(configFormat, params.sAspects);
+ if (dataSpace != NULL) {
+ setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height);
+ }
+ status_t err = setCodecColorAspects(params, true /* readBack */);
+
+ // we always set specified aspects for decoders
+ setColorAspectsIntoFormat(params.sAspects, outputFormat);
+
+ if (dataSpace != NULL) {
+ status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */);
+ if (err == OK) {
+ err = res;
+ }
+ }
return err;
}
-status_t ACodec::setColorAspects(
- OMX_U32 portIndex, int32_t width, int32_t height, const sp<AMessage> &msg,
- sp<AMessage> &format) {
- DescribeColorAspectsParams params;
- InitOMXParams(¶ms);
- params.nPortIndex = portIndex;
-
- // 0 values are unspecified
- int32_t range = 0, standard = 0, transfer = 0;
- if (portIndex == kPortIndexInput) {
- // Encoders allow overriding default aspects with 0 if specified by format. Decoders do not.
- setDefaultPlatformColorAspectsIfNeeded(range, standard, transfer, width, height);
- }
- (void)msg->findInt32("color-range", &range);
- (void)msg->findInt32("color-standard", &standard);
- (void)msg->findInt32("color-transfer", &transfer);
-
- if (convertPlatformColorAspectsToCodecAspects(
- range, standard, transfer, params.sAspects) != OK) {
- ALOGW("[%s] Ignoring illegal color aspects(range=%d, standard=%d, transfer=%d)",
- mComponentName.c_str(), range, standard, transfer);
- // Invalid values were converted to unspecified !params!, but otherwise were not changed
- // For encoders, we leave these as is. For decoders, we will use default values.
- }
-
- // set defaults for decoders.
- if (portIndex != kPortIndexInput) {
- setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height);
- convertCodecColorAspectsToPlatformAspects(params.sAspects, &range, &standard, &transfer);
- }
-
- // save updated values to base output format (encoder input format will read back actually
- // supported values by the codec)
- if (range != 0) {
- format->setInt32("color-range", range);
- }
- if (standard != 0) {
- format->setInt32("color-standard", standard);
- }
- if (transfer != 0) {
- format->setInt32("color-transfer", transfer);
- }
-
- // communicate color aspects to codec
- status_t err = mOMX->getExtensionIndex(
- mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex);
- if (err != OK) {
- mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0;
- return err;
- }
-
- return mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params));
-}
-
-status_t ACodec::getColorAspects(OMX_U32 portIndex, sp<AMessage> &format) {
- if (!mDescribeColorAspectsIndex) {
- return ERROR_UNSUPPORTED;
- }
+// initial video encoder setup for bytebuffer mode
+status_t ACodec::setColorAspectsForVideoEncoder(
+ const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) {
+ // copy config to output format as this is not exposed via getFormat
+ copyColorConfig(configFormat, outputFormat);
DescribeColorAspectsParams params;
InitOMXParams(¶ms);
- params.nPortIndex = portIndex;
- ColorAspects &aspects = params.sAspects;
- aspects.mRange = ColorAspects::RangeUnspecified;
- aspects.mPrimaries = ColorAspects::PrimariesUnspecified;
- aspects.mMatrixCoeffs = ColorAspects::MatrixUnspecified;
- aspects.mTransfer = ColorAspects::TransferUnspecified;
+ params.nPortIndex = kPortIndexInput;
+ getColorAspectsFromFormat(configFormat, params.sAspects);
- status_t err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params));
- if (err != OK) {
- return err;
+ (void)initDescribeColorAspectsIndex();
+
+ int32_t usingRecorder;
+ if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) {
+ android_dataspace dataSpace = HAL_DATASPACE_BT709;
+ int32_t width, height;
+ if (configFormat->findInt32("width", &width)
+ && configFormat->findInt32("height", &height)) {
+ setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height);
+ status_t err = getDataSpace(
+ params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */);
+ if (err != OK) {
+ return err;
+ }
+ setColorAspectsIntoFormat(params.sAspects, outputFormat);
+ }
+ inputFormat->setInt32("android._dataspace", (int32_t)dataSpace);
}
- // keep non-standard codec values in extension ranges
- int32_t range, standard, transfer;
- if (convertCodecColorAspectsToPlatformAspects(
- params.sAspects, &range, &standard, &transfer) != OK) {
- ALOGW("[%s] Ignoring invalid color aspects(range=%u, primaries=%u, coeffs=%u, transfer=%u)",
- mComponentName.c_str(),
- aspects.mRange, aspects.mPrimaries, aspects.mMatrixCoeffs, aspects.mTransfer);
- }
-
- // save specified values to format
- if (range != 0) {
- format->setInt32("color-range", range);
- }
- if (standard != 0) {
- format->setInt32("color-standard", standard);
- }
- if (transfer != 0) {
- format->setInt32("color-transfer", transfer);
+ // communicate color aspects to codec, but do not allow change of the platform aspects
+ ColorAspects origAspects = params.sAspects;
+ for (int triesLeft = 2; --triesLeft >= 0; ) {
+ status_t err = setCodecColorAspects(params, true /* readBack */);
+ if (err != OK
+ || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(
+ params.sAspects, origAspects, true /* usePlatformAspects */)) {
+ return err;
+ }
+ ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.",
+ mComponentName.c_str());
}
return OK;
}
+// subsequent initial video encoder setup for surface mode
+status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(
+ android_dataspace *dataSpace /* nonnull */) {
+ DescribeColorAspectsParams params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = kPortIndexInput;
+ ColorAspects &aspects = params.sAspects;
+
+ // reset default format and store resulting format into both input and output formats
+ getColorAspectsFromFormat(mConfigFormat, aspects);
+ int32_t width, height;
+ if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) {
+ setDefaultCodecColorAspectsIfNeeded(aspects, width, height);
+ }
+ setColorAspectsIntoFormat(aspects, mInputFormat);
+ setColorAspectsIntoFormat(aspects, mOutputFormat);
+
+ // communicate color aspects to codec, but do not allow any change
+ ColorAspects origAspects = aspects;
+ status_t err = OK;
+ for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) {
+ status_t err = setCodecColorAspects(params, true /* readBack */);
+ if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) {
+ break;
+ }
+ ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.",
+ mComponentName.c_str());
+ }
+
+ *dataSpace = HAL_DATASPACE_BT709;
+ aspects = origAspects; // restore desired color aspects
+ status_t res = getDataSpace(
+ params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */);
+ if (err == OK) {
+ err = res;
+ }
+ mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace);
+ mInputFormat->setBuffer(
+ "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects)));
+
+ // update input format with codec supported color aspects (basically set unsupported
+ // aspects to Unspecified)
+ if (err == OK) {
+ (void)getInputColorAspectsForVideoEncoder(mInputFormat);
+ }
+
+ ALOGV("set default color aspects, updated input format to %s, output format to %s",
+ mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str());
+
+ return err;
+}
+
status_t ACodec::setupVideoEncoder(
- const char *mime, const sp<AMessage> &msg, sp<AMessage> &outputFormat) {
+ const char *mime, const sp<AMessage> &msg,
+ sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) {
int32_t tmp;
if (!msg->findInt32("color-format", &tmp)) {
return INVALID_OPERATION;
@@ -3355,9 +3508,8 @@
// Set up color aspects on input, but propagate them to the output format, as they will
// not be read back from encoder.
- err = setColorAspects(
- kPortIndexInput, width, height, msg, outputFormat);
- if (err != OK) {
+ err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat);
+ if (err == ERROR_UNSUPPORTED) {
ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str());
err = OK;
}
@@ -4330,47 +4482,58 @@
}
}
- if (portIndex != kPortIndexOutput) {
- // TODO: also get input crop
- break;
+ int32_t width = (int32_t)videoDef->nFrameWidth;
+ int32_t height = (int32_t)videoDef->nFrameHeight;
+
+ if (portIndex == kPortIndexOutput) {
+ OMX_CONFIG_RECTTYPE rect;
+ InitOMXParams(&rect);
+ rect.nPortIndex = portIndex;
+
+ if (mOMX->getConfig(
+ mNode,
+ (portIndex == kPortIndexOutput ?
+ OMX_IndexConfigCommonOutputCrop :
+ OMX_IndexConfigCommonInputCrop),
+ &rect, sizeof(rect)) != OK) {
+ rect.nLeft = 0;
+ rect.nTop = 0;
+ rect.nWidth = videoDef->nFrameWidth;
+ rect.nHeight = videoDef->nFrameHeight;
+ }
+
+ if (rect.nLeft < 0 ||
+ rect.nTop < 0 ||
+ rect.nLeft + rect.nWidth > videoDef->nFrameWidth ||
+ rect.nTop + rect.nHeight > videoDef->nFrameHeight) {
+ ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)",
+ rect.nLeft, rect.nTop,
+ rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight,
+ videoDef->nFrameWidth, videoDef->nFrameHeight);
+ return BAD_VALUE;
+ }
+
+ notify->setRect(
+ "crop",
+ rect.nLeft,
+ rect.nTop,
+ rect.nLeft + rect.nWidth - 1,
+ rect.nTop + rect.nHeight - 1);
+
+ width = rect.nWidth;
+ height = rect.nHeight;
+
+ android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN;
+ (void)getColorAspectsAndDataSpaceForVideoDecoder(
+ width, height, mConfigFormat, notify,
+ mUsingNativeWindow ? &dataSpace : NULL);
+ if (mUsingNativeWindow) {
+ notify->setInt32("android._dataspace", dataSpace);
+ }
+ } else {
+ (void)getInputColorAspectsForVideoEncoder(notify);
}
- (void)getColorAspects(portIndex, notify);
-
- OMX_CONFIG_RECTTYPE rect;
- InitOMXParams(&rect);
- rect.nPortIndex = portIndex;
-
- if (mOMX->getConfig(
- mNode,
- (portIndex == kPortIndexOutput ?
- OMX_IndexConfigCommonOutputCrop :
- OMX_IndexConfigCommonInputCrop),
- &rect, sizeof(rect)) != OK) {
- rect.nLeft = 0;
- rect.nTop = 0;
- rect.nWidth = videoDef->nFrameWidth;
- rect.nHeight = videoDef->nFrameHeight;
- }
-
- if (rect.nLeft < 0 ||
- rect.nTop < 0 ||
- rect.nLeft + rect.nWidth > videoDef->nFrameWidth ||
- rect.nTop + rect.nHeight > videoDef->nFrameHeight) {
- ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)",
- rect.nLeft, rect.nTop,
- rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight,
- videoDef->nFrameWidth, videoDef->nFrameHeight);
- return BAD_VALUE;
- }
-
- notify->setRect(
- "crop",
- rect.nLeft,
- rect.nTop,
- rect.nLeft + rect.nWidth - 1,
- rect.nTop + rect.nHeight - 1);
-
break;
}
@@ -4703,6 +4866,45 @@
return OK;
}
+void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) {
+ // aspects are normally communicated in ColorAspects
+ int32_t range, standard, transfer;
+ convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer);
+
+ // if some aspects are unspecified, use dataspace fields
+ if (range != 0) {
+ range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT;
+ }
+ if (standard != 0) {
+ standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT;
+ }
+ if (transfer != 0) {
+ transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT;
+ }
+
+ mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event
+ if (range != 0) {
+ mOutputFormat->setInt32("color-range", range);
+ }
+ if (standard != 0) {
+ mOutputFormat->setInt32("color-standard", standard);
+ }
+ if (transfer != 0) {
+ mOutputFormat->setInt32("color-transfer", transfer);
+ }
+
+ ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) "
+ "(R:%d(%s), S:%d(%s), T:%d(%s))",
+ dataSpace,
+ aspects.mRange, asString(aspects.mRange),
+ aspects.mPrimaries, asString(aspects.mPrimaries),
+ aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs),
+ aspects.mTransfer, asString(aspects.mTransfer),
+ range, asString((ColorRange)range),
+ standard, asString((ColorStandard)standard),
+ transfer, asString((ColorTransfer)transfer));
+}
+
void ACodec::onOutputFormatChanged() {
// store new output format
mOutputFormat = mBaseOutputFormat->dup();
@@ -4717,17 +4919,22 @@
}
}
-void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> &reply) {
+void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) {
AString mime;
CHECK(mOutputFormat->findString("mime", &mime));
- int32_t left, top, right, bottom;
- if (mime == MEDIA_MIMETYPE_VIDEO_RAW &&
- mNativeWindow != NULL &&
- mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
- // notify renderer of the crop change
+ if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) {
+ // notify renderer of the crop change and dataspace change
// NOTE: native window uses extended right-bottom coordinate
- reply->setRect("crop", left, top, right + 1, bottom + 1);
+ int32_t left, top, right, bottom;
+ if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
+ notify->setRect("crop", left, top, right + 1, bottom + 1);
+ }
+
+ int32_t dataSpace;
+ if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
+ notify->setInt32("dataspace", dataSpace);
+ }
}
}
@@ -5054,6 +5261,17 @@
bool ACodec::BaseState::onOMXEvent(
OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+ if (event == OMX_EventDataSpaceChanged) {
+ ColorAspects aspects;
+ aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF);
+ aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF);
+ aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF);
+ aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF);
+
+ mCodec->onDataSpaceChanged((android_dataspace)data1, aspects);
+ return true;
+ }
+
if (event != OMX_EventError) {
ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)",
mCodec->mComponentName.c_str(), event, data1, data2);
@@ -5559,6 +5777,13 @@
ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err);
}
+ int32_t dataSpace;
+ if (msg->findInt32("dataspace", &dataSpace)) {
+ status_t err = native_window_set_buffers_data_space(
+ mCodec->mNativeWindow.get(), (android_dataspace)dataSpace);
+ ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err);
+ }
+
int32_t render;
if (mCodec->mNativeWindow != NULL
&& msg->findInt32("render", &render) && render != 0
@@ -5682,6 +5907,7 @@
mDeathNotifier.clear();
}
+ mCodec->mUsingNativeWindow = false;
mCodec->mNativeWindow.clear();
mCodec->mNativeWindowUsageBits = 0;
mCodec->mNode = 0;
@@ -6114,6 +6340,17 @@
"using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN));
}
+ sp<ABuffer> colorAspectsBuffer;
+ if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) {
+ err = mCodec->mOMX->setInternalOption(
+ mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS,
+ colorAspectsBuffer->base(), colorAspectsBuffer->capacity());
+ if (err != OK) {
+ ALOGE("[%s] Unable to configure color aspects (err %d)",
+ mCodec->mComponentName.c_str(), err);
+ return err;
+ }
+ }
return OK;
}
@@ -6124,11 +6361,17 @@
sp<AMessage> notify = mCodec->mNotify->dup();
notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated);
- android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN;
+ android_dataspace dataSpace;
+ status_t err =
+ mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace);
+ notify->setMessage("input-format", mCodec->mInputFormat);
+ notify->setMessage("output-format", mCodec->mOutputFormat);
sp<IGraphicBufferProducer> bufferProducer;
- status_t err = mCodec->mOMX->createInputSurface(
- mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, &mCodec->mInputMetadataType);
+ if (err == OK) {
+ err = mCodec->mOMX->createInputSurface(
+ mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, &mCodec->mInputMetadataType);
+ }
if (err == OK) {
err = setupInputSurface();
@@ -6159,11 +6402,20 @@
CHECK(msg->findObject("input-surface", &obj));
sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get());
- status_t err = mCodec->mOMX->setInputSurface(
- mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(),
- &mCodec->mInputMetadataType);
+ android_dataspace dataSpace;
+ status_t err =
+ mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace);
+ notify->setMessage("input-format", mCodec->mInputFormat);
+ notify->setMessage("output-format", mCodec->mOutputFormat);
if (err == OK) {
+ err = mCodec->mOMX->setInputSurface(
+ mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(),
+ &mCodec->mInputMetadataType);
+ }
+
+ if (err == OK) {
+ surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace);
err = setupInputSurface();
}
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 39f73c3..59f839c 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -774,10 +774,10 @@
// apply encoder color format if specified
if (meta->findInt32(kKeyPixelFormat, &mEncoderFormat)) {
- ALOGV("Using encoder format: %#x", mEncoderFormat);
+ ALOGI("Using encoder format: %#x", mEncoderFormat);
}
if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) {
- ALOGV("Using encoder data space: %#x", mEncoderDataSpace);
+ ALOGI("Using encoder data space: %#x", mEncoderDataSpace);
}
}
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 84ccd2d..e8cd58a 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -1266,7 +1266,10 @@
CHECK(msg->findMessage("input-format", &mInputFormat));
CHECK(msg->findMessage("output-format", &mOutputFormat));
-
+ ALOGV("[%s] configured as input format: %s, output format: %s",
+ mComponentName.c_str(),
+ mInputFormat->debugString(4).c_str(),
+ mOutputFormat->debugString(4).c_str());
int32_t usingSwRenderer;
if (mOutputFormat->findInt32("using-sw-renderer", &usingSwRenderer)
&& usingSwRenderer) {
@@ -1285,6 +1288,12 @@
if (!msg->findInt32("err", &err)) {
sp<RefBase> obj;
msg->findObject("input-surface", &obj);
+ CHECK(msg->findMessage("input-format", &mInputFormat));
+ CHECK(msg->findMessage("output-format", &mOutputFormat));
+ ALOGV("[%s] input surface created as input format: %s, output format: %s",
+ mComponentName.c_str(),
+ mInputFormat->debugString(4).c_str(),
+ mOutputFormat->debugString(4).c_str());
CHECK(obj != NULL);
response->setObject("input-surface", obj);
mHaveInputSurface = true;
@@ -1398,16 +1407,29 @@
case CodecBase::kWhatOutputFormatChanged:
{
- ALOGV("codec output format changed");
-
CHECK(msg->findMessage("format", &mOutputFormat));
+ ALOGV("[%s] output format changed to: %s",
+ mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
+
if (mSoftRenderer == NULL &&
mSurface != NULL &&
(mFlags & kFlagUsesSoftwareRenderer)) {
AString mime;
CHECK(mOutputFormat->findString("mime", &mime));
+ // TODO: propagate color aspects to software renderer to allow better
+ // color conversion to RGB. For now, just mark dataspace for YUV
+ // rendering.
+ int32_t dataSpace;
+ if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
+ ALOGD("[%s] setting dataspace on output surface to #%x",
+ mComponentName.c_str(), dataSpace);
+ int err = native_window_set_buffers_data_space(
+ mSurface.get(), (android_dataspace)dataSpace);
+ ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
+ }
+
if (mime.startsWithIgnoreCase("video/")) {
mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
}
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 5f9a1c0..772a5c4 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -40,8 +40,9 @@
namespace android {
-const int kDefaultSwVideoEncoderFormat = HAL_PIXEL_FORMAT_YCbCr_420_888;
-const int kDefaultSwVideoEncoderDataSpace = HAL_DATASPACE_BT709;
+const int32_t kDefaultSwVideoEncoderFormat = HAL_PIXEL_FORMAT_YCbCr_420_888;
+const int32_t kDefaultHwVideoEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+const int32_t kDefaultVideoEncoderDataSpace = HAL_DATASPACE_BT709;
const int kStopTimeoutUs = 300000; // allow 1 sec for shutting down encoder
@@ -515,13 +516,19 @@
sp<AMessage> inputFormat;
int32_t usingSwReadOften;
mSetEncoderFormat = false;
- if (mEncoder->getInputFormat(&inputFormat) == OK
- && inputFormat->findInt32("using-sw-read-often", &usingSwReadOften)
- && usingSwReadOften) {
- // this is a SW encoder; signal source to allocate SW readable buffers
+ if (mEncoder->getInputFormat(&inputFormat) == OK) {
mSetEncoderFormat = true;
- mEncoderFormat = kDefaultSwVideoEncoderFormat;
- mEncoderDataSpace = kDefaultSwVideoEncoderDataSpace;
+ if (inputFormat->findInt32("using-sw-read-often", &usingSwReadOften)
+ && usingSwReadOften) {
+ // this is a SW encoder; signal source to allocate SW readable buffers
+ mEncoderFormat = kDefaultSwVideoEncoderFormat;
+ } else {
+ mEncoderFormat = kDefaultHwVideoEncoderFormat;
+ }
+ if (!inputFormat->findInt32("android._dataspace", &mEncoderDataSpace)) {
+ mEncoderDataSpace = kDefaultVideoEncoderDataSpace;
+ }
+ ALOGV("setting dataspace %#x, format %#x", mEncoderDataSpace, mEncoderFormat);
}
err = mEncoder->start();
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index 45fb785..7027780 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -215,6 +215,7 @@
if (err != OK) {
ALOGW("failed to get input buffers: %d (%s)", err, asString(err));
decoder->release();
+ source->stop();
return NULL;
}
@@ -223,6 +224,7 @@
if (err != OK) {
ALOGW("failed to get output buffers: %d (%s)", err, asString(err));
decoder->release();
+ source->stop();
return NULL;
}
@@ -328,7 +330,6 @@
if (err != OK || size <= 0 || outputFormat == NULL) {
ALOGE("Failed to decode thumbnail frame");
source->stop();
- decoder->stop();
decoder->release();
return NULL;
}
@@ -401,7 +402,6 @@
videoFrameBuffer.clear();
source->stop();
decoder->releaseOutputBuffer(index);
- decoder->stop();
decoder->release();
if (err != OK) {
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index e92c192..bbc4d26 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -363,6 +363,16 @@
}
}
+ // TODO: propagate color aspects to software renderer to allow better
+ // color conversion to RGB. For now, just mark dataspace for YUV rendering.
+ android_dataspace dataSpace;
+ if (format->findInt32("android._dataspace", (int32_t *)&dataSpace) && dataSpace != mDataSpace) {
+ ALOGD("setting dataspace on output surface to #%x", dataSpace);
+ if ((err = native_window_set_buffers_data_space(mNativeWindow.get(), dataSpace))) {
+ ALOGW("failed to set dataspace on surface (%d)", err);
+ }
+ mDataSpace = dataSpace;
+ }
if ((err = mNativeWindow->queueBuffer(mNativeWindow.get(), buf, -1)) != 0) {
ALOGW("Surface::queueBuffer returned error %d", err);
} else {
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index 2b86b0e..30d5b45 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -244,33 +244,6 @@
}
// static
-void ColorUtils::setDefaultPlatformColorAspectsIfNeeded(
- int32_t &range, int32_t &standard, int32_t &transfer,
- int32_t width, int32_t height) {
- if (range == ColorUtils::kColorRangeUnspecified) {
- range = ColorUtils::kColorRangeLimited;
- }
-
- if (standard == ColorUtils::kColorStandardUnspecified) {
- // Default to BT2020, BT709 or BT601 based on size. Allow 2.35:1 aspect ratio. Limit BT601
- // to PAL or smaller, BT2020 to 4K or larger, leaving BT709 for all resolutions in between.
- if (width >= 3840 || height >= 3840 || width * (int64_t)height >= 3840 * 1634) {
- standard = ColorUtils::kColorStandardBT2020;
- } else if ((width <= 720 && height > 480) || (height <= 720 && width > 480)) {
- standard = ColorUtils::kColorStandardBT601_625;
- } else if ((width <= 720 && height <= 480) || (height <= 720 && width <= 480)) {
- standard = ColorUtils::kColorStandardBT601_525;
- } else {
- standard = ColorUtils::kColorStandardBT709;
- }
- }
-
- if (transfer == ColorUtils::kColorTransferUnspecified) {
- transfer = ColorUtils::kColorTransferSMPTE_170M;
- }
-}
-
-// static
void ColorUtils::setDefaultCodecColorAspectsIfNeeded(
ColorAspects &aspects, int32_t width, int32_t height) {
ColorAspects::MatrixCoeffs coeffs;
@@ -308,5 +281,208 @@
}
}
+// TODO: move this into a Video HAL
+ALookup<CU::ColorStandard, std::pair<CA::Primaries, CA::MatrixCoeffs>> sStandardFallbacks {
+ {
+ { CU::kColorStandardBT601_625, { CA::PrimariesBT709_5, CA::MatrixBT470_6M } },
+ { CU::kColorStandardBT601_625, { CA::PrimariesBT709_5, CA::MatrixBT601_6 } },
+ { CU::kColorStandardBT709, { CA::PrimariesBT709_5, CA::MatrixSMPTE240M } },
+ { CU::kColorStandardBT709, { CA::PrimariesBT709_5, CA::MatrixBT2020 } },
+ { CU::kColorStandardBT601_525, { CA::PrimariesBT709_5, CA::MatrixBT2020Constant } },
+
+ { CU::kColorStandardBT2020Constant,
+ { CA::PrimariesBT470_6M, CA::MatrixBT2020Constant } },
+
+ { CU::kColorStandardBT601_625, { CA::PrimariesBT601_6_625, CA::MatrixBT470_6M } },
+ { CU::kColorStandardBT601_525, { CA::PrimariesBT601_6_625, CA::MatrixBT2020Constant } },
+
+ { CU::kColorStandardBT601_525, { CA::PrimariesBT601_6_525, CA::MatrixBT470_6M } },
+ { CU::kColorStandardBT601_525, { CA::PrimariesBT601_6_525, CA::MatrixBT2020Constant } },
+
+ { CU::kColorStandardBT2020Constant,
+ { CA::PrimariesGenericFilm, CA::MatrixBT2020Constant } },
+ }
+};
+
+ALookup<CU::ColorStandard, CA::Primaries> sStandardPrimariesFallbacks {
+ {
+ { CU::kColorStandardFilm, CA::PrimariesGenericFilm },
+ { CU::kColorStandardBT470M, CA::PrimariesBT470_6M },
+ { CU::kColorStandardBT2020, CA::PrimariesBT2020 },
+ { CU::kColorStandardBT601_525_Unadjusted, CA::PrimariesBT601_6_525 },
+ { CU::kColorStandardBT601_625_Unadjusted, CA::PrimariesBT601_6_625 },
+ }
+};
+
+static ALookup<android_dataspace, android_dataspace> sLegacyDataSpaceToV0 {
+ {
+ { HAL_DATASPACE_SRGB, HAL_DATASPACE_V0_SRGB },
+ { HAL_DATASPACE_BT709, HAL_DATASPACE_V0_BT709 },
+ { HAL_DATASPACE_SRGB_LINEAR, HAL_DATASPACE_V0_SRGB_LINEAR },
+ { HAL_DATASPACE_BT601_525, HAL_DATASPACE_V0_BT601_525 },
+ { HAL_DATASPACE_BT601_625, HAL_DATASPACE_V0_BT601_625 },
+ { HAL_DATASPACE_JFIF, HAL_DATASPACE_V0_JFIF },
+ }
+};
+
+bool ColorUtils::convertDataSpaceToV0(android_dataspace &dataSpace) {
+ (void)sLegacyDataSpaceToV0.lookup(dataSpace, &dataSpace);
+ return (dataSpace & 0xC000FFFF) == 0;
+}
+
+bool ColorUtils::checkIfAspectsChangedAndUnspecifyThem(
+ ColorAspects &aspects, const ColorAspects &orig, bool usePlatformAspects) {
+ // remove changed aspects (change them to Unspecified)
+ bool changed = false;
+ if (aspects.mRange && aspects.mRange != orig.mRange) {
+ aspects.mRange = ColorAspects::RangeUnspecified;
+ changed = true;
+ }
+ if (aspects.mPrimaries && aspects.mPrimaries != orig.mPrimaries) {
+ aspects.mPrimaries = ColorAspects::PrimariesUnspecified;
+ if (usePlatformAspects) {
+ aspects.mMatrixCoeffs = ColorAspects::MatrixUnspecified;
+ }
+ changed = true;
+ }
+ if (aspects.mMatrixCoeffs && aspects.mMatrixCoeffs != orig.mMatrixCoeffs) {
+ aspects.mMatrixCoeffs = ColorAspects::MatrixUnspecified;
+ if (usePlatformAspects) {
+ aspects.mPrimaries = ColorAspects::PrimariesUnspecified;
+ }
+ changed = true;
+ }
+ if (aspects.mTransfer && aspects.mTransfer != orig.mTransfer) {
+ aspects.mTransfer = ColorAspects::TransferUnspecified;
+ changed = true;
+ }
+ return changed;
+}
+
+// static
+android_dataspace ColorUtils::getDataSpaceForColorAspects(ColorAspects &aspects, bool mayExpand) {
+ // This platform implementation never expands color space (e.g. returns an expanded
+ // dataspace to use where the codec does in-the-background color space conversion)
+ mayExpand = false;
+
+ if (aspects.mRange == ColorAspects::RangeUnspecified
+ || aspects.mPrimaries == ColorAspects::PrimariesUnspecified
+ || aspects.mMatrixCoeffs == ColorAspects::MatrixUnspecified
+ || aspects.mTransfer == ColorAspects::TransferUnspecified) {
+ ALOGW("expected specified color aspects (%u:%u:%u:%u)",
+ aspects.mRange, aspects.mPrimaries, aspects.mMatrixCoeffs, aspects.mTransfer);
+ }
+
+ // default to video range and transfer
+ ColorRange range = kColorRangeLimited;
+ ColorTransfer transfer = kColorTransferSMPTE_170M;
+ (void)sRanges.map(aspects.mRange, &range);
+ (void)sTransfers.map(aspects.mTransfer, &transfer);
+
+ ColorStandard standard = kColorStandardBT709;
+ auto pair = std::make_pair(aspects.mPrimaries, aspects.mMatrixCoeffs);
+ if (!sStandards.map(pair, &standard)) {
+ if (!sStandardFallbacks.map(pair, &standard)) {
+ (void)sStandardPrimariesFallbacks.map(aspects.mPrimaries, &standard);
+
+ if (aspects.mMatrixCoeffs == ColorAspects::MatrixBT2020Constant) {
+ range = kColorRangeFull;
+ }
+ }
+ }
+
+ android_dataspace dataSpace = (android_dataspace)(
+ (range << HAL_DATASPACE_RANGE_SHIFT) | (standard << HAL_DATASPACE_STANDARD_SHIFT) |
+ (transfer << HAL_DATASPACE_TRANSFER_SHIFT));
+ (void)sLegacyDataSpaceToV0.rlookup(dataSpace, &dataSpace);
+
+ if (!mayExpand) {
+ // update codec aspects based on dataspace
+ convertPlatformColorAspectsToCodecAspects(range, standard, transfer, aspects);
+ }
+ return dataSpace;
+}
+
+// static
+void ColorUtils::getColorConfigFromFormat(
+ const sp<AMessage> &format, int32_t *range, int32_t *standard, int32_t *transfer) {
+ if (!format->findInt32("color-range", range)) {
+ *range = kColorRangeUnspecified;
+ }
+ if (!format->findInt32("color-standard", standard)) {
+ *standard = kColorStandardUnspecified;
+ }
+ if (!format->findInt32("color-transfer", transfer)) {
+ *transfer = kColorTransferUnspecified;
+ }
+}
+
+// static
+void ColorUtils::copyColorConfig(const sp<AMessage> &source, sp<AMessage> &target) {
+ // 0 values are unspecified
+ int32_t value;
+ if (source->findInt32("color-range", &value)) {
+ target->setInt32("color-range", value);
+ }
+ if (source->findInt32("color-standard", &value)) {
+ target->setInt32("color-standard", value);
+ }
+ if (source->findInt32("color-transfer", &value)) {
+ target->setInt32("color-transfer", value);
+ }
+}
+
+// static
+void ColorUtils::getColorAspectsFromFormat(const sp<AMessage> &format, ColorAspects &aspects) {
+ int32_t range, standard, transfer;
+ getColorConfigFromFormat(format, &range, &standard, &transfer);
+
+ if (convertPlatformColorAspectsToCodecAspects(
+ range, standard, transfer, aspects) != OK) {
+ ALOGW("Ignoring illegal color aspects(R:%d(%s), S:%d(%s), T:%d(%s))",
+ range, asString((ColorRange)range),
+ standard, asString((ColorStandard)standard),
+ transfer, asString((ColorTransfer)transfer));
+ // Invalid values were converted to unspecified !params!, but otherwise were not changed
+ // For encoders, we leave these as is. For decoders, we will use default values.
+ }
+ ALOGV("Got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) "
+ "from format (out:R:%d(%s), S:%d(%s), T:%d(%s))",
+ aspects.mRange, asString(aspects.mRange),
+ aspects.mPrimaries, asString(aspects.mPrimaries),
+ aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs),
+ aspects.mTransfer, asString(aspects.mTransfer),
+ range, asString((ColorRange)range),
+ standard, asString((ColorStandard)standard),
+ transfer, asString((ColorTransfer)transfer));
+}
+
+// static
+void ColorUtils::setColorAspectsIntoFormat(
+ const ColorAspects &aspects, sp<AMessage> &format, bool force) {
+ int32_t range = 0, standard = 0, transfer = 0;
+ convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer);
+ // save set values to base output format
+ // (encoder input format will read back actually supported values by the codec)
+ if (range != 0 || force) {
+ format->setInt32("color-range", range);
+ }
+ if (standard != 0 || force) {
+ format->setInt32("color-standard", standard);
+ }
+ if (transfer != 0 || force) {
+ format->setInt32("color-transfer", transfer);
+ }
+ ALOGV("Setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) "
+ "into format (out:R:%d(%s), S:%d(%s), T:%d(%s))",
+ aspects.mRange, asString(aspects.mRange),
+ aspects.mPrimaries, asString(aspects.mPrimaries),
+ aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs),
+ aspects.mTransfer, asString(aspects.mTransfer),
+ range, asString((ColorRange)range),
+ standard, asString((ColorStandard)standard),
+ transfer, asString((ColorTransfer)transfer));
+}
+
} // namespace android
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index 4220358..25c3773 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -94,6 +94,8 @@
status_t signalEndOfInputStream();
+ void signalEvent(OMX_EVENTTYPE event, OMX_U32 arg1, OMX_U32 arg2);
+
status_t allocateSecureBuffer(
OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
void **buffer_data, native_handle_t **native_handle);
diff --git a/media/libstagefright/include/SoftwareRenderer.h b/media/libstagefright/include/SoftwareRenderer.h
index 757b308..258511a 100644
--- a/media/libstagefright/include/SoftwareRenderer.h
+++ b/media/libstagefright/include/SoftwareRenderer.h
@@ -54,6 +54,7 @@
int32_t mCropLeft, mCropTop, mCropRight, mCropBottom;
int32_t mCropWidth, mCropHeight;
int32_t mRotationDegrees;
+ android_dataspace mDataSpace;
FrameRenderTracker mRenderTracker;
SoftwareRenderer(const SoftwareRenderer &);
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index a7c2279..995e50e 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -20,12 +20,16 @@
//#define LOG_NDEBUG 0
#include <utils/Log.h>
+#define STRINGIFY_ENUMS // for asString in HardwareAPI.h/VideoAPI.h
+
#include "GraphicBufferSource.h"
+#include "OMXUtils.h"
#include <OMX_Core.h>
#include <OMX_IndexExt.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ColorUtils.h>
#include <media/hardware/MetadataBufferType.h>
#include <ui/GraphicBuffer.h>
@@ -39,6 +43,8 @@
static const bool EXTRA_CHECK = true;
+static const OMX_U32 kPortIndexInput = 0;
+
GraphicBufferSource::PersistentProxyListener::PersistentProxyListener(
const wp<IGraphicBufferConsumer> &consumer,
const wp<ConsumerListener>& consumerListener) :
@@ -218,6 +224,8 @@
mNumFramesAvailable, mCodecBuffers.size());
CHECK(!mExecuting);
mExecuting = true;
+ mLastDataSpace = HAL_DATASPACE_UNKNOWN;
+ ALOGV("clearing last dataSpace");
// Start by loading up as many buffers as possible. We want to do this,
// rather than just submit the first buffer, to avoid a degenerate case:
@@ -498,6 +506,76 @@
}
}
+void GraphicBufferSource::onDataSpaceChanged_l(
+ android_dataspace dataSpace, android_pixel_format pixelFormat) {
+ ALOGD("got buffer with new dataSpace #%x", dataSpace);
+ mLastDataSpace = dataSpace;
+
+ if (ColorUtils::convertDataSpaceToV0(dataSpace)) {
+ ColorAspects aspects = mColorAspects; // initially requested aspects
+
+ // request color aspects to encode
+ OMX_INDEXTYPE index;
+ status_t err = mNodeInstance->getExtensionIndex(
+ "OMX.google.android.index.describeColorAspects", &index);
+ if (err == OK) {
+ // V0 dataspace
+ DescribeColorAspectsParams params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = kPortIndexInput;
+ params.nDataSpace = mLastDataSpace;
+ params.nPixelFormat = pixelFormat;
+ params.bDataSpaceChanged = OMX_TRUE;
+ params.sAspects = mColorAspects;
+
+ err = mNodeInstance->getConfig(index, ¶ms, sizeof(params));
+ if (err == OK) {
+ aspects = params.sAspects;
+ ALOGD("Codec resolved it to (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)",
+ params.sAspects.mRange, asString(params.sAspects.mRange),
+ params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries),
+ params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs),
+ params.sAspects.mTransfer, asString(params.sAspects.mTransfer),
+ err, asString(err));
+ } else {
+ params.sAspects = aspects;
+ err = OK;
+ }
+ params.bDataSpaceChanged = OMX_FALSE;
+ for (int triesLeft = 2; --triesLeft >= 0; ) {
+ status_t err = mNodeInstance->setConfig(index, ¶ms, sizeof(params));
+ if (err == OK) {
+ err = mNodeInstance->getConfig(index, ¶ms, sizeof(params));
+ }
+ if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(
+ params.sAspects, aspects)) {
+ // if we can't set or get color aspects, still communicate dataspace to client
+ break;
+ }
+
+ ALOGW_IF(triesLeft == 0, "Codec repeatedly changed requested ColorAspects.");
+ }
+ }
+
+ ALOGV("Set color aspects to (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)",
+ aspects.mRange, asString(aspects.mRange),
+ aspects.mPrimaries, asString(aspects.mPrimaries),
+ aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs),
+ aspects.mTransfer, asString(aspects.mTransfer),
+ err, asString(err));
+
+ // signal client that the dataspace has changed; this will update the output format
+ // TODO: we should tie this to an output buffer somehow, and signal the change
+ // just before the output buffer is returned to the client, but there are many
+ // ways this could fail (e.g. flushing), and we are not yet supporting this scenario.
+
+ mNodeInstance->signalEvent(
+ OMX_EventDataSpaceChanged, dataSpace,
+ (aspects.mRange << 24) | (aspects.mPrimaries << 16)
+ | (aspects.mMatrixCoeffs << 8) | aspects.mTransfer);
+ }
+}
+
bool GraphicBufferSource::fillCodecBuffer_l() {
CHECK(mExecuting && mNumFramesAvailable > 0);
@@ -537,6 +615,12 @@
mBufferSlot[item.mSlot] = item.mGraphicBuffer;
}
+ if (item.mDataSpace != mLastDataSpace) {
+ onDataSpaceChanged_l(
+ item.mDataSpace, (android_pixel_format)mBufferSlot[item.mSlot]->getPixelFormat());
+ }
+
+
err = UNKNOWN_ERROR;
// only submit sample if start time is unspecified, or sample
@@ -929,6 +1013,7 @@
}
void GraphicBufferSource::setDefaultDataSpace(android_dataspace dataSpace) {
+ // no need for mutex as we are not yet running
ALOGD("setting dataspace: %#x", dataSpace);
mConsumer->setDefaultBufferDataSpace(dataSpace);
mLastDataSpace = dataSpace;
@@ -999,6 +1084,11 @@
void GraphicBufferSource::setColorAspects(const ColorAspects &aspects) {
Mutex::Autolock autoLock(mMutex);
mColorAspects = aspects;
+ ALOGD("requesting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s))",
+ aspects.mRange, asString(aspects.mRange),
+ aspects.mPrimaries, asString(aspects.mPrimaries),
+ aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs),
+ aspects.mTransfer, asString(aspects.mTransfer));
}
void GraphicBufferSource::onMessageReceived(const sp<AMessage> &msg) {
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index 87f4a60..c8b0e62 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -248,6 +248,9 @@
bool repeatLatestBuffer_l();
int64_t getTimestamp(const BufferItem &item);
+ // called when the data space of the input buffer changes
+ void onDataSpaceChanged_l(android_dataspace dataSpace, android_pixel_format pixelFormat);
+
// Lock, covers all member variables.
mutable Mutex mMutex;
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 278d23c..6b7a871 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -991,6 +991,10 @@
return createGraphicBufferSource(portIndex, bufferConsumer, type);
}
+void OMXNodeInstance::signalEvent(OMX_EVENTTYPE event, OMX_U32 arg1, OMX_U32 arg2) {
+ mOwner->OnEvent(mNodeID, event, arg1, arg2, NULL);
+}
+
status_t OMXNodeInstance::signalEndOfInputStream() {
// For non-Surface input, the MediaCodec should convert the call to a
// pair of requests (dequeue input buffer, queue input buffer with EOS