Merge "Mpeg4Extractor: Fix flac parsing in mp4"
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index c7985ca..39bbe1c 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -588,10 +588,9 @@
const uint16_t *srcV = (const uint16_t *)img->planes[AOM_PLANE_V];
if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
- convertYUV420Planar16ToY410((uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
- srcUStride / 2, srcVStride / 2,
- dstYStride / sizeof(uint32_t),
- mWidth, mHeight);
+ convertYUV420Planar16ToY410OrRGBA1010102(
+ (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
+ srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight);
} else {
convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride,
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 335dfc1..1eec8f9 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -137,7 +137,118 @@
dst += dstStride * 2;
}
}
+#define CLIP3(min, v, max) (((v) < (min)) ? (min) : (((max) > (v)) ? (v) : (max)))
+void convertYUV420Planar16ToRGBA1010102(uint32_t *dst, const uint16_t *srcY, const uint16_t *srcU,
+ const uint16_t *srcV, size_t srcYStride, size_t srcUStride,
+ size_t srcVStride, size_t dstStride, size_t width,
+ size_t height) {
+ // Converting two lines at a time, slightly faster
+ for (size_t y = 0; y < height; y += 2) {
+ uint32_t *dstTop = (uint32_t *)dst;
+ uint32_t *dstBot = (uint32_t *)(dst + dstStride);
+ uint16_t *ySrcTop = (uint16_t *)srcY;
+ uint16_t *ySrcBot = (uint16_t *)(srcY + srcYStride);
+ uint16_t *uSrc = (uint16_t *)srcU;
+ uint16_t *vSrc = (uint16_t *)srcV;
+ // BT.2020 Limited Range conversion
+
+ // B = 1.168 *(Y - 64) + 2.148 *(U - 512)
+ // G = 1.168 *(Y - 64) - 0.652 *(V - 512) - 0.188 *(U - 512)
+ // R = 1.168 *(Y - 64) + 1.683 *(V - 512)
+
+ // B = 1196/1024 *(Y - 64) + 2200/1024 *(U - 512)
+ // G = .................... - 668/1024 *(V - 512) - 192/1024 *(U - 512)
+ // R = .................... + 1723/1024 *(V - 512)
+
+ // min_B = (1196 *(- 64) + 2200 *(- 512)) / 1024 = -1175
+ // min_G = (1196 *(- 64) - 668 *(1023 - 512) - 192 *(1023 - 512)) / 1024 = -504
+ // min_R = (1196 *(- 64) + 1723 *(- 512)) / 1024 = -937
+
+ // max_B = (1196 *(1023 - 64) + 2200 *(1023 - 512)) / 1024 = 2218
+ // max_G = (1196 *(1023 - 64) - 668 *(- 512) - 192 *(- 512)) / 1024 = 1551
+ // max_R = (1196 *(1023 - 64) + 1723 *(1023 - 512)) / 1024 = 1980
+
+ int32_t mY = 1196, mU_B = 2200, mV_G = -668, mV_R = 1723, mU_G = -192;
+ for (size_t x = 0; x < width; x += 2) {
+ int32_t u, v, y00, y01, y10, y11;
+ u = *uSrc - 512;
+ uSrc += 1;
+ v = *vSrc - 512;
+ vSrc += 1;
+
+ y00 = *ySrcTop - 64;
+ ySrcTop += 1;
+ y01 = *ySrcTop - 64;
+ ySrcTop += 1;
+ y10 = *ySrcBot - 64;
+ ySrcBot += 1;
+ y11 = *ySrcBot - 64;
+ ySrcBot += 1;
+
+ int32_t u_b = u * mU_B;
+ int32_t u_g = u * mU_G;
+ int32_t v_g = v * mV_G;
+ int32_t v_r = v * mV_R;
+
+ int32_t yMult, b, g, r;
+ yMult = y00 * mY;
+ b = (yMult + u_b) / 1024;
+ g = (yMult + v_g + u_g) / 1024;
+ r = (yMult + v_r) / 1024;
+ b = CLIP3(0, b, 1023);
+ g = CLIP3(0, g, 1023);
+ r = CLIP3(0, r, 1023);
+ *dstTop++ = 3 << 30 | (b << 20) | (g << 10) | r;
+
+ yMult = y01 * mY;
+ b = (yMult + u_b) / 1024;
+ g = (yMult + v_g + u_g) / 1024;
+ r = (yMult + v_r) / 1024;
+ b = CLIP3(0, b, 1023);
+ g = CLIP3(0, g, 1023);
+ r = CLIP3(0, r, 1023);
+ *dstTop++ = 3 << 30 | (b << 20) | (g << 10) | r;
+
+ yMult = y10 * mY;
+ b = (yMult + u_b) / 1024;
+ g = (yMult + v_g + u_g) / 1024;
+ r = (yMult + v_r) / 1024;
+ b = CLIP3(0, b, 1023);
+ g = CLIP3(0, g, 1023);
+ r = CLIP3(0, r, 1023);
+ *dstBot++ = 3 << 30 | (b << 20) | (g << 10) | r;
+
+ yMult = y11 * mY;
+ b = (yMult + u_b) / 1024;
+ g = (yMult + v_g + u_g) / 1024;
+ r = (yMult + v_r) / 1024;
+ b = CLIP3(0, b, 1023);
+ g = CLIP3(0, g, 1023);
+ r = CLIP3(0, r, 1023);
+ *dstBot++ = 3 << 30 | (b << 20) | (g << 10) | r;
+ }
+
+ srcY += srcYStride * 2;
+ srcU += srcUStride;
+ srcV += srcVStride;
+ dst += dstStride * 2;
+ }
+}
+
+void convertYUV420Planar16ToY410OrRGBA1010102(uint32_t *dst, const uint16_t *srcY,
+ const uint16_t *srcU, const uint16_t *srcV,
+ size_t srcYStride, size_t srcUStride,
+ size_t srcVStride, size_t dstStride, size_t width,
+ size_t height) {
+ if (isAtLeastT()) {
+ convertYUV420Planar16ToRGBA1010102(dst, srcY, srcU, srcV, srcYStride, srcUStride,
+ srcVStride, dstStride, width, height);
+ } else {
+ convertYUV420Planar16ToY410(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+ dstStride, width, height);
+ }
+}
void convertYUV420Planar16ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint16_t *srcY,
const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
size_t srcUStride, size_t srcVStride, size_t dstYStride,
@@ -774,8 +885,7 @@
// Save supported hal pixel formats for bit depth of 10, the first time this is called
if (!mBitDepth10HalPixelFormats.size()) {
std::vector<int> halPixelFormats;
- // TODO(b/229387180) Enable once P010 is fully supported
- if (false && isAtLeastT()) {
+ if (isAtLeastT()) {
halPixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
}
// since allowRGBA1010102 can chance in each call, but mBitDepth10HalPixelFormats
@@ -800,10 +910,15 @@
// Add YV12 in the end as a fall-back option
mBitDepth10HalPixelFormats.push_back(HAL_PIXEL_FORMAT_YV12);
}
- // When RGBA1010102 is not allowed and if the first supported hal pixel is format is
- // HAL_PIXEL_FORMAT_RGBA_1010102, then return HAL_PIXEL_FORMAT_YV12
- if (!allowRGBA1010102 && mBitDepth10HalPixelFormats[0] == HAL_PIXEL_FORMAT_RGBA_1010102) {
- return HAL_PIXEL_FORMAT_YV12;
+ // From Android T onwards, HAL_PIXEL_FORMAT_RGBA_1010102 corresponds to true
+ // RGBA 1010102 format unlike earlier versions where it was used to represent
+ // YUVA 1010102 data
+ if (!isAtLeastT()) {
+ // When RGBA1010102 is not allowed and if the first supported hal pixel is format is
+ // HAL_PIXEL_FORMAT_RGBA_1010102, then return HAL_PIXEL_FORMAT_YV12
+ if (!allowRGBA1010102 && mBitDepth10HalPixelFormats[0] == HAL_PIXEL_FORMAT_RGBA_1010102) {
+ return HAL_PIXEL_FORMAT_YV12;
+ }
}
// Return the first entry from supported formats
return mBitDepth10HalPixelFormats[0];
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index 52ae3b8..3172f29 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -33,9 +33,11 @@
size_t srcUStride, size_t srcVStride, size_t dstYStride,
size_t dstUVStride, uint32_t width, uint32_t height,
bool isMonochrome = false);
-void convertYUV420Planar16ToY410(uint32_t *dst, const uint16_t *srcY, const uint16_t *srcU,
- const uint16_t *srcV, size_t srcYStride, size_t srcUStride,
- size_t srcVStride, size_t dstStride, size_t width, size_t height);
+void convertYUV420Planar16ToY410OrRGBA1010102(uint32_t *dst, const uint16_t *srcY,
+ const uint16_t *srcU, const uint16_t *srcV,
+ size_t srcYStride, size_t srcUStride,
+ size_t srcVStride, size_t dstStride, size_t width,
+ size_t height);
void convertYUV420Planar16ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint16_t *srcY,
const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
size_t srcUStride, size_t srcVStride, size_t dstYStride,
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 7d36d80..701c22c 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -190,8 +190,7 @@
.build());
std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
- // TODO(b/229387180) Enable once P010 is fully supported
- if (false && isAtLeastT()) {
+ if (isAtLeastT()) {
pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
}
// TODO: support more formats?
@@ -713,9 +712,9 @@
const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
- convertYUV420Planar16ToY410((uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
- srcUStride / 2, srcVStride / 2, dstYStride / sizeof(uint32_t),
- mWidth, mHeight);
+ convertYUV420Planar16ToY410OrRGBA1010102((uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
+ srcUStride / 2, srcVStride / 2,
+ dstYStride / sizeof(uint32_t), mWidth, mHeight);
} else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
srcYStride / 2, srcUStride / 2, srcVStride / 2, dstYStride / 2,
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 0607461..c2ccfa0 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -219,8 +219,7 @@
// TODO: support more formats?
std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
#ifdef VP9
- // TODO(b/229387180) Enable once P010 is fully supported
- if (false && isAtLeastT()) {
+ if (isAtLeastT()) {
pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
}
#endif
@@ -761,7 +760,7 @@
[dstY, srcY, srcU, srcV,
srcYStride, srcUStride, srcVStride, dstYStride,
width = mWidth, height = std::min(mHeight - i, kHeight)] {
- convertYUV420Planar16ToY410(
+ convertYUV420Planar16ToY410OrRGBA1010102(
(uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
srcUStride / 2, srcVStride / 2, dstYStride / sizeof(uint32_t),
width, height);
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 70e742c..fca8f4f 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -60,6 +60,7 @@
enum drc_effect_type_t : int32_t; ///< DRC effect type
enum drc_album_mode_t : int32_t; ///< DRC album mode
enum hdr_dynamic_metadata_type_t : uint32_t; ///< HDR dynamic metadata type
+ enum hdr_format_t : uint32_t; ///< HDR format
enum intra_refresh_mode_t : uint32_t; ///< intra refresh modes
enum level_t : uint32_t; ///< coding level
enum ordinal_key_t : uint32_t; ///< work ordering keys
@@ -192,10 +193,9 @@
kParamIndexPictureType,
// deprecated
kParamIndexHdr10PlusMetadata,
-
kParamIndexPictureQuantization,
-
kParamIndexHdrDynamicMetadata,
+ kParamIndexHdrFormat,
/* ------------------------------------ video components ------------------------------------ */
@@ -1664,6 +1664,34 @@
constexpr char C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO[] = "input.hdr-dynamic-info";
constexpr char C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO[] = "output.hdr-dynamic-info";
+/**
+ * HDR Format
+ */
+C2ENUM(C2Config::hdr_format_t, uint32_t,
+ UNKNOWN, ///< HDR format not known (default)
+ SDR, ///< not HDR (SDR)
+ HLG, ///< HLG
+ HDR10, ///< HDR10
+ HDR10_PLUS, ///< HDR10+
+);
+
+/**
+ * HDR Format Info
+ *
+ * This information may be present during configuration to allow encoders to
+ * prepare encoding certain HDR formats. When this information is not present
+ * before start, encoders should determine the HDR format based on the available
+ * HDR metadata on the first input frame.
+ *
+ * While this information is optional, it is not a hint. When present, encoders
+ * that do not support dynamic reconfiguration do not need to switch to the HDR
+ * format based on the metadata on the first input frame.
+ */
+typedef C2StreamParam<C2Info, C2SimpleValueStruct<C2EasyEnum<C2Config::hdr_format_t>>,
+ kParamIndexHdrFormat>
+ C2StreamHdrFormatInfo;
+constexpr char C2_PARAMKEY_HDR_FORMAT[] = "coded.hdr-format";
+
/* ------------------------------------ block-based coding ----------------------------------- */
/**
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 582bc98..81c441b 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -400,10 +400,10 @@
// Rotation
// Note: SDK rotation is clock-wise, while C2 rotation is counter-clock-wise
add(ConfigMapper(KEY_ROTATION, C2_PARAMKEY_VUI_ROTATION, "value")
- .limitTo(D::VIDEO & D::CODED)
+ .limitTo((D::VIDEO | D::IMAGE) & D::CODED)
.withMappers(negate, negate));
add(ConfigMapper(KEY_ROTATION, C2_PARAMKEY_ROTATION, "value")
- .limitTo(D::VIDEO & D::RAW)
+ .limitTo((D::VIDEO | D::IMAGE) & D::RAW)
.withMappers(negate, negate));
// android 'video-scaling'
@@ -513,6 +513,9 @@
add(ConfigMapper("cta861.max-fall", C2_PARAMKEY_HDR_STATIC_INFO, "max-fall")
.limitTo((D::VIDEO | D::IMAGE) & D::RAW));
+ add(ConfigMapper(C2_PARAMKEY_HDR_FORMAT, C2_PARAMKEY_HDR_FORMAT, "value")
+ .limitTo((D::VIDEO | D::IMAGE) & D::CODED & D::CONFIG));
+
add(ConfigMapper(std::string(KEY_FEATURE_) + FEATURE_SecurePlayback,
C2_PARAMKEY_SECURE_MODE, "value"));
@@ -960,7 +963,23 @@
.limitTo(D::ENCODER & D::VIDEO & D::READ));
add(ConfigMapper(KEY_PICTURE_TYPE, C2_PARAMKEY_PICTURE_TYPE, "value")
- .limitTo(D::ENCODER & D::VIDEO & D::READ));
+ .limitTo(D::ENCODER & D::VIDEO & D::READ)
+ .withMappers([](C2Value v) -> C2Value {
+ int32_t sdk;
+ C2Config::picture_type_t c2;
+ if (v.get(&sdk) && C2Mapper::map(sdk, &c2)) {
+ return C2Value(c2);
+ }
+ return C2Value();
+ }, [](C2Value v) -> C2Value {
+ C2Config::picture_type_t c2;
+ int32_t sdk = PICTURE_TYPE_UNKNOWN;
+ using C2ValueType=typename _c2_reduce_enum_to_underlying_type<decltype(c2)>::type;
+ if (v.get((C2ValueType*)&c2) && C2Mapper::map(c2, &sdk)) {
+ return sdk;
+ }
+ return C2Value();
+ }));
/* still to do
not yet used by MediaCodec, but defined as MediaFormat
@@ -1618,6 +1637,27 @@
params->setFloat(C2_PARAMKEY_INPUT_TIME_STRETCH, captureRate / frameRate);
}
}
+
+ // add HDR format for video encoding
+ if (configDomain == IS_CONFIG) {
+ // don't assume here that transfer is set for HDR, only require it for HLG
+ int transfer = 0;
+ params->findInt32(KEY_COLOR_TRANSFER, &transfer);
+
+ int profile;
+ if (params->findInt32(KEY_PROFILE, &profile)) {
+ std::shared_ptr<C2Mapper::ProfileLevelMapper> mapper =
+ C2Mapper::GetProfileLevelMapper(mCodingMediaType);
+ C2Config::hdr_format_t c2 = C2Config::hdr_format_t::UNKNOWN;
+ if (mapper && mapper->mapHdrFormat(profile, &c2)) {
+ if (c2 == C2Config::hdr_format_t::HLG &&
+ transfer != COLOR_TRANSFER_HLG) {
+ c2 = C2Config::hdr_format_t::UNKNOWN;
+ }
+ params->setInt32(C2_PARAMKEY_HDR_FORMAT, c2);
+ }
+ }
+ }
}
{ // reflect temporal layering into a binary blob
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 93f29ca..c606d6f 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -276,6 +276,13 @@
{ C2Config::PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10Plus },
};
+ALookup<C2Config::hdr_format_t, int32_t> sHevcHdrFormats = {
+ { C2Config::hdr_format_t::SDR, HEVCProfileMain },
+ { C2Config::hdr_format_t::HLG, HEVCProfileMain10 },
+ { C2Config::hdr_format_t::HDR10, HEVCProfileMain10HDR10 },
+ { C2Config::hdr_format_t::HDR10_PLUS, HEVCProfileMain10HDR10Plus },
+};
+
ALookup<C2Config::level_t, int32_t> sMpeg2Levels = {
{ C2Config::LEVEL_MP2V_LOW, MPEG2LevelLL },
{ C2Config::LEVEL_MP2V_MAIN, MPEG2LevelML },
@@ -365,6 +372,17 @@
{ C2Config::PROFILE_VP9_3, VP9Profile3HDR10Plus },
};
+ALookup<C2Config::hdr_format_t, int32_t> sVp9HdrFormats = {
+ { C2Config::hdr_format_t::SDR, VP9Profile0 },
+ { C2Config::hdr_format_t::SDR, VP9Profile1 },
+ { C2Config::hdr_format_t::HLG, VP9Profile2 },
+ { C2Config::hdr_format_t::HLG, VP9Profile3 },
+ { C2Config::hdr_format_t::HDR10, VP9Profile2HDR },
+ { C2Config::hdr_format_t::HDR10, VP9Profile3HDR },
+ { C2Config::hdr_format_t::HDR10_PLUS, VP9Profile2HDR10Plus },
+ { C2Config::hdr_format_t::HDR10_PLUS, VP9Profile3HDR10Plus },
+};
+
ALookup<C2Config::level_t, int32_t> sAv1Levels = {
{ C2Config::LEVEL_AV1_2, AV1Level2 },
{ C2Config::LEVEL_AV1_2_1, AV1Level21 },
@@ -411,6 +429,13 @@
{ C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10Plus },
};
+ALookup<C2Config::hdr_format_t, int32_t> sAv1HdrFormats = {
+ { C2Config::hdr_format_t::SDR, AV1ProfileMain8 },
+ { C2Config::hdr_format_t::HLG, AV1ProfileMain10 },
+ { C2Config::hdr_format_t::HDR10, AV1ProfileMain10HDR10 },
+ { C2Config::hdr_format_t::HDR10_PLUS, AV1ProfileMain10HDR10Plus },
+};
+
// HAL_PIXEL_FORMAT_* -> COLOR_Format*
ALookup<uint32_t, int32_t> sPixelFormats = {
{ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, COLOR_FormatSurface },
@@ -435,6 +460,13 @@
{ HAL_PIXEL_FORMAT_RGBA_FP16, COLOR_Format64bitABGRFloat },
};
+ALookup<C2Config::picture_type_t, int32_t> sPictureType = {
+ { C2Config::picture_type_t::SYNC_FRAME, PICTURE_TYPE_I },
+ { C2Config::picture_type_t::I_FRAME, PICTURE_TYPE_I },
+ { C2Config::picture_type_t::P_FRAME, PICTURE_TYPE_P },
+ { C2Config::picture_type_t::B_FRAME, PICTURE_TYPE_B },
+};
+
/**
* A helper that passes through vendor extension profile and level values.
*/
@@ -487,6 +519,10 @@
virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
return sAacProfiles.map(from, to);
}
+ // AAC does not have HDR format
+ virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t*) override {
+ return false;
+ }
};
struct AvcProfileLevelMapper : ProfileLevelMapperHelper {
@@ -517,6 +553,12 @@
virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
return sDolbyVisionProfiles.map(from, to);
}
+ // Dolby Vision is always HDR and the profile is fully expressive so use unknown
+ // HDR format
+ virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t *to) override {
+ *to = C2Config::hdr_format_t::UNKNOWN;
+ return true;
+ }
};
struct H263ProfileLevelMapper : ProfileLevelMapperHelper {
@@ -555,6 +597,9 @@
mIsHdr ? sHevcHdrProfiles.map(from, to) :
sHevcProfiles.map(from, to);
}
+ virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+ return sHevcHdrFormats.map(from, to);
+ }
private:
bool mIsHdr;
@@ -633,6 +678,9 @@
mIsHdr ? sVp9HdrProfiles.map(from, to) :
sVp9Profiles.map(from, to);
}
+ virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+ return sVp9HdrFormats.map(from, to);
+ }
private:
bool mIsHdr;
@@ -662,6 +710,9 @@
mIsHdr ? sAv1HdrProfiles.map(from, to) :
sAv1Profiles.map(from, to);
}
+ virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+ return sAv1HdrFormats.map(from, to);
+ }
private:
bool mIsHdr;
@@ -671,6 +722,13 @@
} // namespace
+// the default mapper is used for media types that do not support HDR
+bool C2Mapper::ProfileLevelMapper::mapHdrFormat(int32_t, C2Config::hdr_format_t *to) {
+ // by default map all (including vendor) profiles to SDR
+ *to = C2Config::hdr_format_t::SDR;
+ return true;
+}
+
// static
std::shared_ptr<C2Mapper::ProfileLevelMapper>
C2Mapper::GetProfileLevelMapper(std::string mediaType) {
@@ -1024,3 +1082,13 @@
}
return true;
}
+
+// static
+bool C2Mapper::map(C2Config::picture_type_t from, int32_t *to) {
+ return sPictureType.map(from, to);
+}
+
+// static
+bool C2Mapper::map(int32_t from, C2Config::picture_type_t *to) {
+ return sPictureType.map(from, to);
+}
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.h b/media/codec2/sfplugin/utils/Codec2Mapper.h
index 33d305e..c8e9e13 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.h
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.h
@@ -34,6 +34,16 @@
virtual bool mapProfile(int32_t, C2Config::profile_t*) = 0;
virtual bool mapLevel(C2Config::level_t, int32_t*) = 0;
virtual bool mapLevel(int32_t, C2Config::level_t*) = 0;
+
+ /**
+ * Mapper method that maps a MediaCodec profile to the supported
+ * HDR format for that profile. Since 10-bit profiles are used for
+ * HLG, this method will return HLG for all 10-bit profiles, but
+ * the caller should also verify that the transfer function is
+ * indeed HLG.
+ */
+ // not an abstract method as we have a default implementation for SDR
+ virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t *hdr);
virtual ~ProfileLevelMapper() = default;
};
diff --git a/media/codec2/tests/Android.bp b/media/codec2/tests/Android.bp
index b858fa5..68db7b2 100644
--- a/media/codec2/tests/Android.bp
+++ b/media/codec2/tests/Android.bp
@@ -39,6 +39,8 @@
cc_test {
name: "codec2_vndk_test",
test_suites: ["device-tests"],
+ // This test doesn't seem to support isolated with current assumption
+ isolated: false,
srcs: [
"C2_test.cpp",
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 212a787..4ebb530 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -40,7 +40,7 @@
/**
* This is used to represent a value that has not been specified.
* For example, an application could use {@link #AAUDIO_UNSPECIFIED} to indicate
- * that is did not not care what the specific value of a parameter was
+ * that it did not care what the specific value of a parameter was
* and would accept whatever it was given.
*/
#define AAUDIO_UNSPECIFIED 0
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index ad00bdb..81aa823 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -572,11 +572,13 @@
// (b) we can support re-creation of offloaded tracks
if (offloadInfo != NULL) {
mOffloadInfoCopy = *offloadInfo;
- mOffloadInfo = &mOffloadInfoCopy;
} else {
- mOffloadInfo = NULL;
memset(&mOffloadInfoCopy, 0, sizeof(audio_offload_info_t));
mOffloadInfoCopy = AUDIO_INFO_INITIALIZER;
+ mOffloadInfoCopy.format = format;
+ mOffloadInfoCopy.sample_rate = sampleRate;
+ mOffloadInfoCopy.channel_mask = channelMask;
+ mOffloadInfoCopy.stream_type = streamType;
}
mVolume[AUDIO_INTERLEAVE_LEFT] = 1.0f;
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index fa21265..285a28a 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1182,7 +1182,6 @@
sp<IMemory> mSharedBuffer;
transfer_type mTransfer;
audio_offload_info_t mOffloadInfoCopy;
- const audio_offload_info_t* mOffloadInfo;
audio_attributes_t mAttributes;
size_t mFrameSize; // frame size in bytes
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 5da32c9..2d29853 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -348,6 +348,10 @@
status_t err = OK;
bool done = false;
size_t retriesLeft = kRetryCount;
+ if (!mDecoder) {
+ ALOGE("decoder is not initialized");
+ return NO_INIT;
+ }
do {
size_t index;
int64_t ptsUs = 0LL;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 382f4a7..6f1974e 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -4099,26 +4099,29 @@
break;
}
- if (asyncNotify != nullptr) {
- if (mSurface != NULL) {
- if (!mReleaseSurface) {
- uint64_t usage = 0;
- if (mSurface->getConsumerUsage(&usage) != OK) {
- usage = 0;
- }
- mReleaseSurface.reset(new ReleaseSurface(usage));
+ bool forceSync = false;
+ if (asyncNotify != nullptr && mSurface != NULL) {
+ if (!mReleaseSurface) {
+ uint64_t usage = 0;
+ if (mSurface->getConsumerUsage(&usage) != OK) {
+ usage = 0;
}
- if (mSurface != mReleaseSurface->getSurface()) {
- status_t err = connectToSurface(mReleaseSurface->getSurface());
- ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
- if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
- err = mCodec->setSurface(mReleaseSurface->getSurface());
- ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
- }
- if (err == OK) {
- (void)disconnectFromSurface();
- mSurface = mReleaseSurface->getSurface();
- }
+ mReleaseSurface.reset(new ReleaseSurface(usage));
+ }
+ if (mSurface != mReleaseSurface->getSurface()) {
+ status_t err = connectToSurface(mReleaseSurface->getSurface());
+ ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
+ if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
+ err = mCodec->setSurface(mReleaseSurface->getSurface());
+ ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
+ }
+ if (err == OK) {
+ (void)disconnectFromSurface();
+ mSurface = mReleaseSurface->getSurface();
+ } else {
+ // We were not able to switch the surface, so force
+ // synchronous release.
+ forceSync = true;
}
}
}
@@ -4142,8 +4145,10 @@
}
if (asyncNotify != nullptr) {
- mResourceManagerProxy->markClientForPendingRemoval();
- postPendingRepliesAndDeferredMessages("kWhatRelease:async");
+ if (!forceSync) {
+ mResourceManagerProxy->markClientForPendingRemoval();
+ postPendingRepliesAndDeferredMessages("kWhatRelease:async");
+ }
asyncNotifyPost.clear();
mAsyncReleaseCompleteNotification = asyncNotify;
}
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 4b6470a..a443ed9 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -126,14 +126,10 @@
}
}
-static bool isHdr(const sp<AMessage> &format) {
- // if CSD specifies HDR transfer(s), we assume HDR. Otherwise, if it specifies non-HDR
- // transfers, we must assume non-HDR. This is because CSD trumps any color-transfer key
- // in the format.
- int32_t isHdr;
- if (format->findInt32("android._is-hdr", &isHdr)) {
- return isHdr;
- }
+/**
+ * Returns true if, and only if, the given format corresponds to HDR10 or HDR10+.
+ */
+static bool isHdr10or10Plus(const sp<AMessage> &format) {
// if user/container supplied HDR static info without transfer set, assume true
if ((format->contains("hdr-static-info") || format->contains("hdr10-plus-info"))
@@ -143,8 +139,7 @@
// otherwise, verify that an HDR transfer function is set
int32_t transfer;
if (format->findInt32("color-transfer", &transfer)) {
- return transfer == ColorUtils::kColorTransferST2084
- || transfer == ColorUtils::kColorTransferHLG;
+ return transfer == ColorUtils::kColorTransferST2084;
}
return false;
}
@@ -419,8 +414,12 @@
}
// bump to HDR profile
- if (isHdr(format) && codecProfile == HEVCProfileMain10) {
- codecProfile = HEVCProfileMain10HDR10;
+ if (isHdr10or10Plus(format) && codecProfile == HEVCProfileMain10) {
+ if (format->contains("hdr10-plus-info")) {
+ codecProfile = HEVCProfileMain10HDR10Plus;
+ } else {
+ codecProfile = HEVCProfileMain10HDR10;
+ }
}
format->setInt32("profile", codecProfile);
@@ -615,16 +614,25 @@
{ 3, VP9Profile3 },
};
- const static ALookup<int32_t, int32_t> toHdr {
+ const static ALookup<int32_t, int32_t> toHdr10 {
{ VP9Profile2, VP9Profile2HDR },
{ VP9Profile3, VP9Profile3HDR },
};
+ const static ALookup<int32_t, int32_t> toHdr10Plus {
+ { VP9Profile2, VP9Profile2HDR10Plus },
+ { VP9Profile3, VP9Profile3HDR10Plus },
+ };
+
int32_t profile;
if (profiles.map(data[0], &profile)) {
// convert to HDR profile
- if (isHdr(format)) {
- toHdr.lookup(profile, &profile);
+ if (isHdr10or10Plus(format)) {
+ if (format->contains("hdr10-plus-info")) {
+ toHdr10Plus.lookup(profile, &profile);
+ } else {
+ toHdr10.lookup(profile, &profile);
+ }
}
format->setInt32("profile", profile);
@@ -684,7 +692,7 @@
int32_t profile;
if (profiles.map(std::make_pair(highBitDepth, profileData), &profile)) {
// bump to HDR profile
- if (isHdr(format) && profile == AV1ProfileMain10) {
+ if (isHdr10or10Plus(format) && profile == AV1ProfileMain10) {
if (format->contains("hdr10-plus-info")) {
profile = AV1ProfileMain10HDR10Plus;
} else {
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
index 810ae95..2b2692f 100644
--- a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
@@ -65,8 +65,8 @@
for (size_t i = 0; i < extractor->countTracks(); ++i) {
sp<MetaData> meta = extractor->getTrackMetaData(i);
- const char *trackMime;
- if (!strcasecmp(mime.c_str(), trackMime)) {
+ std::string trackMime = dataProvider->PickValueInArray(kTestedMimeTypes);
+ if (!strcasecmp(mime.c_str(), trackMime.c_str())) {
sp<IMediaSource> track = extractor->getTrack(i);
if (track == NULL) {
return NULL;
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
index 98bfb94..6856ac0 100644
--- a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
@@ -42,6 +42,51 @@
kMaxValue = MPEG2TS,
};
+static std::string kTestedMimeTypes[] = {"audio/3gpp",
+ "audio/amr-wb",
+ "audio/vorbis",
+ "audio/opus",
+ "audio/mp4a-latm",
+ "audio/mpeg",
+ "audio/mpeg-L1",
+ "audio/mpeg-L2",
+ "audio/midi",
+ "audio/qcelp",
+ "audio/g711-alaw",
+ "audio/g711-mlaw",
+ "audio/flac",
+ "audio/aac-adts",
+ "audio/gsm",
+ "audio/ac3",
+ "audio/eac3",
+ "audio/eac3-joc",
+ "audio/ac4",
+ "audio/scrambled",
+ "audio/alac",
+ "audio/x-ms-wma",
+ "audio/x-adpcm-ms",
+ "audio/x-adpcm-dvi-ima",
+ "video/avc",
+ "video/hevc",
+ "video/mp4v-es",
+ "video/3gpp",
+ "video/x-vnd.on2.vp8",
+ "video/x-vnd.on2.vp9",
+ "video/av01",
+ "video/mpeg2",
+ "video/dolby-vision",
+ "video/scrambled",
+ "video/divx",
+ "video/divx3",
+ "video/xvid",
+ "video/x-motion-jpeg",
+ "text/3gpp-tt",
+ "application/x-subrip",
+ "text/vtt",
+ "text/cea-608",
+ "text/cea-708",
+ "application/x-id3v4"};
+
std::string genMimeType(FuzzedDataProvider *dataProvider);
sp<IMediaExtractor> genMediaExtractor(FuzzedDataProvider *dataProvider, uint16_t dataAmount);
sp<MediaSource> genMediaSource(FuzzedDataProvider *dataProvider, uint16_t maxMediaBlobSize);
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
index 22ff954..d34cca0 100644
--- a/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
@@ -11,17 +11,7 @@
channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
</mixPort>
<!-- Le Audio Audio Ports -->
- <mixPort name="le audio output" role="source">
- <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
- samplingRates="8000,16000,24000,32000,44100,48000"
- channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
- <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
- samplingRates="8000,16000,24000,32000,44100,48000"
- channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
- <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
- samplingRates="8000,16000,24000,32000,44100,48000"
- channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
- </mixPort>
+ <mixPort name="le audio output" role="source"/>
<mixPort name="le audio input" role="sink">
<profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
samplingRates="8000,16000,24000,32000,44100,48000"
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
index aad00d6..ef92d08 100644
--- a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
@@ -11,17 +11,7 @@
channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
</mixPort>
<!-- Le Audio Audio Ports -->
- <mixPort name="le audio output" role="source">
- <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
- samplingRates="8000 16000 24000 32000 44100 48000"
- channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
- <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
- samplingRates="8000 16000 24000 32000 44100 48000"
- channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
- <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
- samplingRates="8000 16000 24000 32000 44100 48000"
- channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
- </mixPort>
+ <mixPort name="le audio output" role="source"/>
<mixPort name="le audio input" role="sink">
<profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
samplingRates="8000 16000 24000 32000 44100 48000"